code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import time
import sys
import numpy as np
from local_search import kmedian_local_search
import feasibility
from kmedkpm import k_median_k_partitions_LS
import psutil
from sklearn.datasets import make_blobs
import generator
import random
test = False
def lp_ls_complete(data, color_mat, rvec, k, logfile):
############################################################################
# INPUT
# data: N X d numpy array
# color_mat: N*t numpy array representing groups' memberships
# rvec: requirements vector of t size
# k: number of clusters
# OUTPUT
# Object with stats, i.e, with "cost" that is a cost of the solution
############################################################################
(N, d) = data.shape
many_solutions_lp_only = True
command = 'linear-program'
return_solution = False
process = psutil.Process()
tstart = time.time()
time_buf = time.time()
perf_stats = feasibility.calculate(k, rvec, color_mat,
command, return_solution,
logfile, many_solutions_lp_only)
set_mappings = perf_stats["subset_map"]
solution = perf_stats["solution"]
set_to_indices = {}
for (idx, _id) in enumerate(sorted(set_mappings.keys())):
set_to_indices[idx] = _id
#end for
unique_solutions = solution if len(solution) == 0 else np.unique(np.stack(solution, axis=0), axis=0)
print('solutions: ', 0 if len(solution) == 0 else unique_solutions.shape[0])
total_cost = sys.maxsize
time_buf = time.time()
for (_, s) in enumerate(unique_solutions):
E = {}
i = 0
for (idx, e) in enumerate(s):
for _ in range(e):
#E[i] = (idx, set_to_indices[idx])
E[i] = data[set_mappings[set_to_indices[idx]], :]
i = i + 1
#end for
#end for
if k > i:
continue
statc = k_median_k_partitions_LS(E, data, None, N, d, k, is_coreset=False)
total_cost = min(total_cost, statc["cost"])
# print(set_to_indices)
kmedkpmtime = time.time() - time_buf
total_time = time.time() - tstart
stats_total = {}
opt_ls_cost = kmedian_local_search(data, k)["cost"]
stats_total['opt_ls_cost'] = opt_ls_cost
stats_total["lp_time"] = perf_stats["total_time"]
stats_total["total_time"] = total_time
stats_total["ls_time"] = kmedkpmtime
stats_total['peak_memory'] = process.memory_info().rss/(1024*1024)
stats_total['virtual_memory'] = process.memory_info().vms/(1024*1024)
stats_total['cost'] = total_cost
return stats_total
#end lp_ls_complete()
def test_lp_ls_complete():
#random number generator seeds
gen_seed = 12312321
dist_matrix_seed = random.randint(1, int(pow(2, 32)-1))
local_search_seed = random.randint(1, int(pow(2, 32)-1))
#initialize
logfile = sys.stdout
n = 100
t = 3
k = 3
d = 2
r_max = 3
r_min = 1
max_freq = 3
data, _ = make_blobs(n_samples=n, centers=k, n_features=d,
random_state=12312, cluster_std=0.8)
#generate instance and time it
time_buf = time.time()
color_mat, rvec, _ = generator.get_feasible_instance(
t,
n,
r_max,
r_min,
max_freq,
k,
gen_seed,
unique=False)
lp_ls_complete(data, color_mat, rvec, k, logfile)
#end es_fpt_3apx_complete_test()
################################################################################
if __name__ == '__main__':
test_lp_ls_complete()
| [
"feasibility.calculate",
"kmedkpm.k_median_k_partitions_LS",
"psutil.Process",
"sklearn.datasets.make_blobs",
"generator.get_feasible_instance",
"numpy.stack",
"local_search.kmedian_local_search",
"time.time"
] | [((865, 881), 'psutil.Process', 'psutil.Process', ([], {}), '()\n', (879, 881), False, 'import psutil\n'), ((895, 906), 'time.time', 'time.time', ([], {}), '()\n', (904, 906), False, 'import time\n'), ((922, 933), 'time.time', 'time.time', ([], {}), '()\n', (931, 933), False, 'import time\n'), ((952, 1056), 'feasibility.calculate', 'feasibility.calculate', (['k', 'rvec', 'color_mat', 'command', 'return_solution', 'logfile', 'many_solutions_lp_only'], {}), '(k, rvec, color_mat, command, return_solution, logfile,\n many_solutions_lp_only)\n', (973, 1056), False, 'import feasibility\n'), ((1588, 1599), 'time.time', 'time.time', ([], {}), '()\n', (1597, 1599), False, 'import time\n'), ((3053, 3142), 'sklearn.datasets.make_blobs', 'make_blobs', ([], {'n_samples': 'n', 'centers': 'k', 'n_features': 'd', 'random_state': '(12312)', 'cluster_std': '(0.8)'}), '(n_samples=n, centers=k, n_features=d, random_state=12312,\n cluster_std=0.8)\n', (3063, 3142), False, 'from sklearn.datasets import make_blobs\n'), ((3218, 3229), 'time.time', 'time.time', ([], {}), '()\n', (3227, 3229), False, 'import time\n'), ((3255, 3347), 'generator.get_feasible_instance', 'generator.get_feasible_instance', (['t', 'n', 'r_max', 'r_min', 'max_freq', 'k', 'gen_seed'], {'unique': '(False)'}), '(t, n, r_max, r_min, max_freq, k, gen_seed,\n unique=False)\n', (3286, 3347), False, 'import generator\n'), ((1982, 2048), 'kmedkpm.k_median_k_partitions_LS', 'k_median_k_partitions_LS', (['E', 'data', 'None', 'N', 'd', 'k'], {'is_coreset': '(False)'}), '(E, data, None, N, d, k, is_coreset=False)\n', (2006, 2048), False, 'from kmedkpm import k_median_k_partitions_LS\n'), ((2147, 2158), 'time.time', 'time.time', ([], {}), '()\n', (2156, 2158), False, 'import time\n'), ((2187, 2198), 'time.time', 'time.time', ([], {}), '()\n', (2196, 2198), False, 'import time\n'), ((2249, 2278), 'local_search.kmedian_local_search', 'kmedian_local_search', (['data', 'k'], {}), '(data, k)\n', (2269, 2278), False, 'from local_search import kmedian_local_search\n'), ((1427, 1453), 'numpy.stack', 'np.stack', (['solution'], {'axis': '(0)'}), '(solution, axis=0)\n', (1435, 1453), True, 'import numpy as np\n')] |
from django.utils.translation import ugettext_lazy as _
from mayan.apps.permissions import PermissionNamespace
namespace = PermissionNamespace(label=_('Dependencies'), name='dependencies')
permission_dependencies_view = namespace.add_permission(
label=_('View dependencies'), name='dependencies_view'
)
| [
"django.utils.translation.ugettext_lazy"
] | [((155, 172), 'django.utils.translation.ugettext_lazy', '_', (['"""Dependencies"""'], {}), "('Dependencies')\n", (156, 172), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((266, 288), 'django.utils.translation.ugettext_lazy', '_', (['"""View dependencies"""'], {}), "('View dependencies')\n", (267, 288), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
import os
import jinja2
import logging
from lib.config import Config
class Template:
def __init__(self, env_search_term="ENV"):
self.log_name = f'{Config.logger_name}.{self.__class__.__name__}'
self.log = logging.getLogger(self.log_name)
self.path = None
self.name = None
self.env_search_term = env_search_term
self.variables = self.get_variables()
def get_variables(self):
variables = {}
autosecondary = {}
for k, v in os.environ.items():
if "AUTOSECONDARY" in k:
obj = {k: v}
autosecondary.update(obj)
elif f"{self.env_search_term}_" in k:
k = k.replace(f"{self.env_search_term}_", "").replace("_", "-").lower()
obj = {k: v}
variables.update(obj)
return variables, autosecondary
def render_template(self, template, output_file):
"""
Takes template, output file and dictionary of variables.
Renders template with variables to the specified output file.
"""
self.path = os.path.dirname(template)
self.name = os.path.basename(template)
self.log.debug(f"Template path: {'Path_not_provided' if self.path is '' else self.path}")
self.log.debug(f"Template name: {self.name}")
# Remove file if exists
if os.path.exists(output_file):
self.log.info(f"Removing old file [{output_file}]")
os.remove(output_file)
# Write rendered template into file
self.log.info(f"Rendering template {template} to {output_file}")
data, autosecondary = self.variables
with open(output_file, 'w') as f:
f.write(self._load_template(self.name, self.path).render(data=data, autosecondary=autosecondary))
def _load_template(self, name, path=None):
"""
Takes template name and a path to the template directory
"""
# Guessing templates directory
if path is None or path == "":
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'templates')
self.log.info(f"Missing path to templates. Using default...")
self.log.info(f"Template path: {path}")
else:
self.log.info(f"Template path: {path}")
env = jinja2.Environment(loader=jinja2.FileSystemLoader(path))
return env.get_template(name)
| [
"logging.getLogger",
"os.path.exists",
"os.environ.items",
"os.path.dirname",
"os.path.realpath",
"os.path.basename",
"jinja2.FileSystemLoader",
"os.remove"
] | [((228, 260), 'logging.getLogger', 'logging.getLogger', (['self.log_name'], {}), '(self.log_name)\n', (245, 260), False, 'import logging\n'), ((504, 522), 'os.environ.items', 'os.environ.items', ([], {}), '()\n', (520, 522), False, 'import os\n'), ((1119, 1144), 'os.path.dirname', 'os.path.dirname', (['template'], {}), '(template)\n', (1134, 1144), False, 'import os\n'), ((1165, 1191), 'os.path.basename', 'os.path.basename', (['template'], {}), '(template)\n', (1181, 1191), False, 'import os\n'), ((1387, 1414), 'os.path.exists', 'os.path.exists', (['output_file'], {}), '(output_file)\n', (1401, 1414), False, 'import os\n'), ((1492, 1514), 'os.remove', 'os.remove', (['output_file'], {}), '(output_file)\n', (1501, 1514), False, 'import os\n'), ((2371, 2400), 'jinja2.FileSystemLoader', 'jinja2.FileSystemLoader', (['path'], {}), '(path)\n', (2394, 2400), False, 'import jinja2\n'), ((2097, 2123), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2113, 2123), False, 'import os\n')] |
from typing import Optional
from pydantic import BaseModel, root_validator, validator
from fief.crypto.encryption import decrypt
from fief.db.types import DatabaseType
from fief.errors import APIErrorCode
from fief.schemas.generics import UUIDSchema
from fief.settings import settings
def validate_all_database_settings(cls, values):
database_type = values.get("database_type")
database_settings = [
values.get("database_host"),
values.get("database_port"),
values.get("database_username"),
values.get("database_password"),
values.get("database_name"),
]
if database_type is None and not any(database_settings):
return values
if database_type is None and any(database_settings):
raise ValueError(APIErrorCode.WORKSPACE_CREATE_MISSING_DATABASE_SETTINGS)
database_name = values.get("database_name")
if database_type == DatabaseType.SQLITE:
if database_name is None:
raise ValueError(APIErrorCode.WORKSPACE_CREATE_MISSING_DATABASE_SETTINGS)
else:
if not all(database_settings):
raise ValueError(APIErrorCode.WORKSPACE_CREATE_MISSING_DATABASE_SETTINGS)
return values
class WorkspaceCheckConnection(BaseModel):
database_type: DatabaseType
database_host: str
database_port: int
database_username: str
database_password: str
database_name: str
_validate_all_database_settings = root_validator(allow_reuse=True)(
validate_all_database_settings
)
class WorkspaceCreate(BaseModel):
name: str
database_type: Optional[DatabaseType]
database_host: Optional[str]
database_port: Optional[int]
database_username: Optional[str]
database_password: Optional[str]
database_name: Optional[str]
_validate_all_database_settings = root_validator(allow_reuse=True)(
validate_all_database_settings
)
class BaseWorkspace(UUIDSchema):
name: str
domain: str
class Workspace(BaseWorkspace):
database_type: Optional[DatabaseType]
database_host: Optional[str]
database_port: Optional[int]
database_username: Optional[str]
database_password: Optional[str]
database_name: Optional[str]
@validator(
"database_host",
"database_username",
"database_password",
"database_name",
pre=True,
)
def decrypt_database_setting(cls, value: Optional[str]) -> Optional[str]:
if value is None:
return value
return decrypt(value, settings.encryption_key)
@validator("database_port", pre=True)
def decrypt_database_port(cls, value: Optional[str]) -> Optional[int]:
if value is None:
return value
return int(decrypt(value, settings.encryption_key))
class WorkspacePublic(BaseWorkspace):
pass
| [
"fief.crypto.encryption.decrypt",
"pydantic.root_validator",
"pydantic.validator"
] | [((2224, 2323), 'pydantic.validator', 'validator', (['"""database_host"""', '"""database_username"""', '"""database_password"""', '"""database_name"""'], {'pre': '(True)'}), "('database_host', 'database_username', 'database_password',\n 'database_name', pre=True)\n", (2233, 2323), False, 'from pydantic import BaseModel, root_validator, validator\n'), ((2557, 2593), 'pydantic.validator', 'validator', (['"""database_port"""'], {'pre': '(True)'}), "('database_port', pre=True)\n", (2566, 2593), False, 'from pydantic import BaseModel, root_validator, validator\n'), ((1442, 1474), 'pydantic.root_validator', 'root_validator', ([], {'allow_reuse': '(True)'}), '(allow_reuse=True)\n', (1456, 1474), False, 'from pydantic import BaseModel, root_validator, validator\n'), ((1825, 1857), 'pydantic.root_validator', 'root_validator', ([], {'allow_reuse': '(True)'}), '(allow_reuse=True)\n', (1839, 1857), False, 'from pydantic import BaseModel, root_validator, validator\n'), ((2511, 2550), 'fief.crypto.encryption.decrypt', 'decrypt', (['value', 'settings.encryption_key'], {}), '(value, settings.encryption_key)\n', (2518, 2550), False, 'from fief.crypto.encryption import decrypt\n'), ((2739, 2778), 'fief.crypto.encryption.decrypt', 'decrypt', (['value', 'settings.encryption_key'], {}), '(value, settings.encryption_key)\n', (2746, 2778), False, 'from fief.crypto.encryption import decrypt\n')] |
# Generated by Django 3.2.12 on 2022-03-01 23:06
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('art', '0006_auto_20220301_1452'),
]
operations = [
migrations.AlterField(
model_name='artimage',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to='art.project'),
),
]
| [
"django.db.models.ForeignKey"
] | [((369, 477), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""images"""', 'to': '"""art.project"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='images', to='art.project')\n", (386, 477), False, 'from django.db import migrations, models\n')] |
# Generated by Django 2.2.27 on 2022-04-16 16:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fehler_auth', '0002_auto_20211002_1511'),
]
operations = [
migrations.AlterField(
model_name='invite',
name='email',
field=models.EmailField(max_length=255),
),
]
| [
"django.db.models.EmailField"
] | [((340, 373), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (357, 373), False, 'from django.db import migrations, models\n')] |
from agents.agent import Agent
from random import randint
class Antifa(Agent):
def __init__(self):
super().__init__()
self.is_spy = False
def __str__(self):
return 'Basic Antifa'
def assign_mission(self, board):
number_to_assign = board.number_to_assign()
board.add_to_mission(self.seating_position)
while len(board.players_on_mission) < number_to_assign:
random_index = randint(0,board.number_of_players - 1)
if random_index not in board.players_on_mission:
board.add_to_mission(random_index)
def play_mission(self, board):
""" No other option but pass for the good guys """
return 'Pass'
def vote(self, board):
if board.stall_counter == 4:
return 1
return randint(0, 1)
| [
"random.randint"
] | [((821, 834), 'random.randint', 'randint', (['(0)', '(1)'], {}), '(0, 1)\n', (828, 834), False, 'from random import randint\n'), ((450, 489), 'random.randint', 'randint', (['(0)', '(board.number_of_players - 1)'], {}), '(0, board.number_of_players - 1)\n', (457, 489), False, 'from random import randint\n')] |
import numpy as np
import pandas as pd
import datetime
import json
class DateTimeTransformer(object):
def __init__(self):
self._new_columns = []
self._old_column = None
self._min_datetime = None
self._transforms = []
def fit(self, X, column):
self._old_column = column
self._min_datetime = np.min(X[column])
values = X[column].dt.year
if len(np.unique(values)) > 1:
self._transforms += ["year"]
new_column = column + "_Year"
self._new_columns += [new_column]
values = X[column].dt.month
if len(np.unique(values)) > 1:
self._transforms += ["month"]
new_column = column + "_Month"
self._new_columns += [new_column]
values = X[column].dt.day
if len(np.unique(values)) > 1:
self._transforms += ["day"]
new_column = column + "_Day"
self._new_columns += [new_column]
values = X[column].dt.weekday
if len(np.unique(values)) > 1:
self._transforms += ["weekday"]
new_column = column + "_WeekDay"
self._new_columns += [new_column]
values = X[column].dt.dayofyear
if len(np.unique(values)) > 1:
self._transforms += ["dayofyear"]
new_column = column + "_DayOfYear"
self._new_columns += [new_column]
values = X[column].dt.hour
if len(np.unique(values)) > 1:
self._transforms += ["hour"]
new_column = column + "_Hour"
self._new_columns += [new_column]
values = (X[column] - self._min_datetime).dt.days
if len(np.unique(values)) > 1:
self._transforms += ["days_diff"]
new_column = column + "_Days_Diff_To_Min"
self._new_columns += [new_column]
def transform(self, X):
column = self._old_column
if "year" in self._transforms:
new_column = column + "_Year"
X[new_column] = X[column].dt.year
if "month" in self._transforms:
new_column = column + "_Month"
X[new_column] = X[column].dt.month
if "day" in self._transforms:
new_column = column + "_Day"
X[new_column] = X[column].dt.day
if "weekday" in self._transforms:
new_column = column + "_WeekDay"
X[new_column] = X[column].dt.weekday
if "dayofyear" in self._transforms:
new_column = column + "_DayOfYear"
X[new_column] = X[column].dt.dayofyear
if "hour" in self._transforms:
new_column = column + "_Hour"
X[new_column] = X[column].dt.hour
if "days_diff" in self._transforms:
new_column = column + "_Days_Diff_To_Min"
X[new_column] = (X[column] - self._min_datetime).dt.days
X.drop(column, axis=1, inplace=True)
return X
def to_json(self):
return {
"new_columns": list(self._new_columns),
"old_column": self._old_column,
"min_datetime": str(self._min_datetime),
"transforms": list(self._transforms),
}
def from_json(self, data_json):
self._new_columns = data_json.get("new_columns", None)
self._old_column = data_json.get("old_column", None)
d = data_json.get("min_datetime", None)
self._min_datetime = None if d is None else pd.to_datetime(d)
self._transforms = data_json.get("transforms", [])
| [
"numpy.unique",
"pandas.to_datetime",
"numpy.min"
] | [((349, 366), 'numpy.min', 'np.min', (['X[column]'], {}), '(X[column])\n', (355, 366), True, 'import numpy as np\n'), ((3456, 3473), 'pandas.to_datetime', 'pd.to_datetime', (['d'], {}), '(d)\n', (3470, 3473), True, 'import pandas as pd\n'), ((418, 435), 'numpy.unique', 'np.unique', (['values'], {}), '(values)\n', (427, 435), True, 'import numpy as np\n'), ((623, 640), 'numpy.unique', 'np.unique', (['values'], {}), '(values)\n', (632, 640), True, 'import numpy as np\n'), ((828, 845), 'numpy.unique', 'np.unique', (['values'], {}), '(values)\n', (837, 845), True, 'import numpy as np\n'), ((1033, 1050), 'numpy.unique', 'np.unique', (['values'], {}), '(values)\n', (1042, 1050), True, 'import numpy as np\n'), ((1248, 1265), 'numpy.unique', 'np.unique', (['values'], {}), '(values)\n', (1257, 1265), True, 'import numpy as np\n'), ((1462, 1479), 'numpy.unique', 'np.unique', (['values'], {}), '(values)\n', (1471, 1479), True, 'import numpy as np\n'), ((1689, 1706), 'numpy.unique', 'np.unique', (['values'], {}), '(values)\n', (1698, 1706), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'd:\MITE12\ksapriori\gui\sales_transaction.ui'
#
# Created by: PyQt5 UI code generator 5.12.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_FormSalesTransaction(object):
def setupUi(self, FormSalesTransaction):
FormSalesTransaction.setObjectName("FormSalesTransaction")
FormSalesTransaction.resize(989, 466)
self.groupBox = QtWidgets.QGroupBox(FormSalesTransaction)
self.groupBox.setGeometry(QtCore.QRect(10, 10, 181, 451))
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.groupBox.setFont(font)
self.groupBox.setObjectName("groupBox")
self.pushButton = QtWidgets.QPushButton(self.groupBox)
self.pushButton.setGeometry(QtCore.QRect(80, 190, 75, 23))
self.pushButton.setObjectName("pushButton")
self.widget = QtWidgets.QWidget(self.groupBox)
self.widget.setGeometry(QtCore.QRect(10, 31, 151, 140))
self.widget.setObjectName("widget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.widget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.widget)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.label.setFont(font)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.fromDate = QtWidgets.QDateEdit(self.widget)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.fromDate.setFont(font)
self.fromDate.setObjectName("fromDate")
self.verticalLayout.addWidget(self.fromDate)
self.label_2 = QtWidgets.QLabel(self.widget)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.toDate = QtWidgets.QDateEdit(self.widget)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.toDate.setFont(font)
self.toDate.setObjectName("toDate")
self.verticalLayout.addWidget(self.toDate)
self.label_3 = QtWidgets.QLabel(self.widget)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.verticalLayout.addWidget(self.label_3)
self.lineEdit = QtWidgets.QLineEdit(self.widget)
self.lineEdit.setObjectName("lineEdit")
self.verticalLayout.addWidget(self.lineEdit)
self.tableWidget = QtWidgets.QTableWidget(FormSalesTransaction)
self.tableWidget.setGeometry(QtCore.QRect(200, 20, 781, 441))
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(8)
self.tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(7, item)
self.retranslateUi(FormSalesTransaction)
QtCore.QMetaObject.connectSlotsByName(FormSalesTransaction)
def retranslateUi(self, FormSalesTransaction):
_translate = QtCore.QCoreApplication.translate
FormSalesTransaction.setWindowTitle(_translate("FormSalesTransaction", "Sales Transaction"))
self.groupBox.setTitle(_translate("FormSalesTransaction", "Filters"))
self.pushButton.setText(_translate("FormSalesTransaction", "Search"))
self.label.setText(_translate("FormSalesTransaction", "From Date"))
self.label_2.setText(_translate("FormSalesTransaction", "To Date"))
self.label_3.setText(_translate("FormSalesTransaction", "Item Code"))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(_translate("FormSalesTransaction", "Document No"))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(_translate("FormSalesTransaction", "Posting Date"))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(_translate("FormSalesTransaction", "Item Code"))
item = self.tableWidget.horizontalHeaderItem(3)
item.setText(_translate("FormSalesTransaction", "Item Label"))
item = self.tableWidget.horizontalHeaderItem(4)
item.setText(_translate("FormSalesTransaction", "Description"))
item = self.tableWidget.horizontalHeaderItem(5)
item.setText(_translate("FormSalesTransaction", "Quantity"))
item = self.tableWidget.horizontalHeaderItem(6)
item.setText(_translate("FormSalesTransaction", "Price"))
item = self.tableWidget.horizontalHeaderItem(7)
item.setText(_translate("FormSalesTransaction", "Amount"))
| [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QTableWidget",
"PyQt5.QtWidgets.QLineEdit",
"PyQt5.QtWidgets.QDateEdit",
"PyQt5.QtGui.QFont",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtCore.QRect",
"PyQt5.QtWidgets.QGroupBox",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QVBoxLayout",
"Py... | [((513, 554), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['FormSalesTransaction'], {}), '(FormSalesTransaction)\n', (532, 554), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((638, 651), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (649, 651), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((831, 867), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (852, 867), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1012, 1044), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.groupBox'], {}), '(self.groupBox)\n', (1029, 1044), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1186, 1220), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.widget'], {}), '(self.widget)\n', (1207, 1220), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1364, 1393), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (1380, 1393), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1410, 1423), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1421, 1423), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1643, 1675), 'PyQt5.QtWidgets.QDateEdit', 'QtWidgets.QDateEdit', (['self.widget'], {}), '(self.widget)\n', (1662, 1675), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1692, 1705), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1703, 1705), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1936, 1965), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (1952, 1965), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1982, 1995), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1993, 1995), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2221, 2253), 'PyQt5.QtWidgets.QDateEdit', 'QtWidgets.QDateEdit', (['self.widget'], {}), '(self.widget)\n', (2240, 2253), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2270, 2283), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (2281, 2283), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2506, 2535), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.widget'], {}), '(self.widget)\n', (2522, 2535), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2552, 2565), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (2563, 2565), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2793, 2825), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.widget'], {}), '(self.widget)\n', (2812, 2825), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2957, 3001), 'PyQt5.QtWidgets.QTableWidget', 'QtWidgets.QTableWidget', (['FormSalesTransaction'], {}), '(FormSalesTransaction)\n', (2979, 3001), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3229, 3257), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3255, 3257), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3333, 3361), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3359, 3361), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3437, 3465), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3463, 3465), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3541, 3569), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3567, 3569), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3645, 3673), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3671, 3673), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3749, 3777), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3775, 3777), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3853, 3881), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3879, 3881), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3957, 3985), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3983, 3985), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4106, 4165), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['FormSalesTransaction'], {}), '(FormSalesTransaction)\n', (4143, 4165), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((590, 620), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(10)', '(181)', '(451)'], {}), '(10, 10, 181, 451)\n', (602, 620), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((905, 934), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(80)', '(190)', '(75)', '(23)'], {}), '(80, 190, 75, 23)\n', (917, 934), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1078, 1108), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(31)', '(151)', '(140)'], {}), '(10, 31, 151, 140)\n', (1090, 1108), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3040, 3071), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(200)', '(20)', '(781)', '(441)'], {}), '(200, 20, 781, 441)\n', (3052, 3071), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.app_name }}',
version='0.0.1',
description='{{ cookiecutter.description }}',
author='{{ cookiecutter.author }}',
author_email='{{ cookiecutter.author_email }}',
license='{{ cookiecutter.license }}',
packages=find_packages(
exclude=['docs', 'tests', 'android']
),
classifiers=[
'Development Status :: 1 - Planning',
'License :: OSI Approved :: {{ cookiecutter.license }}',
],
install_requires=[
],
options={
'app': {
'formal_name': '{{ cookiecutter.formal_name }}',
'bundle': '{{ cookiecutter.bundle }}'
},
}
)
| [
"setuptools.find_packages"
] | [((333, 384), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['docs', 'tests', 'android']"}), "(exclude=['docs', 'tests', 'android'])\n", (346, 384), False, 'from setuptools import setup, find_packages\n')] |
from flask import Flask, request, render_template, send_file, Response
import io
import base64
import csv
import json
import time
from collections import OrderedDict
import numpy
import pandas as pd
from numpy import genfromtxt
from flask import jsonify
from flask_cors import CORS
from LoadingNetwork import EchoWebSocket
import shutil
import gc
from tornado.wsgi import WSGIContainer
from tornado.web import Application, FallbackHandler
from tornado.websocket import WebSocketHandler
from tornado.ioloop import IOLoop
app = Flask('flasknado')
#app = Flask(__name__)
app.debug = True
CORS(app)
##initial netwrok csv data############################
rawdata = open('NetworkWithDistance.txt')
with open('NetworkWithDistance.txt') as f:
rawdata = f.readlines()
# you may also want to remove whitespace characters like `\n` at the end
# of each line
rawdata = [x.strip() for x in rawdata]
my_data = genfromtxt('networkwithdist.csv', delimiter=',')
# my_data=numpy.delete(my_data,(0),axis=0)
header = ['id', 'id_to', 'lon', 'lat', 'basinid']
frame = pd.DataFrame(my_data, columns=header)
data = []
MY_GLOBAL = []
with open('tempcsv.csv') as f:
for line in f:
temp = line.strip().split(',')
data.append(temp)
#############################
data1 = []
with open('MyFile1.txt') as f:
r = 0
for line in f:
if(r > 0):
data2 = []
# print(line)
temp = line.split("\",")
data2.append(temp[0][1:])
temp1 = temp[1].split(",[")
data2.append(temp1[0])
data2.append(temp1[1][:-2])
data1.append(data2)
r += 1
header = ['celllist', 'cellid', 'cellto']
frame_celllist = pd.DataFrame(data1, columns=header)
frame_celllist = frame_celllist.drop_duplicates()
del data1[:]
##################
data_c = []
with open('powerplant_cell_loc.csv') as f:
r = 0
for line in f:
if(r > 0):
data_cc = line.split(",")
data_c.append(data_cc)
# print(line)
r += 1
header = ['cellid', 'loc']
frame_cell = pd.DataFrame(data_c, columns=header)
frame_cell = frame_cell.drop_duplicates()
del data_c[:]
########################################################
import os
import sys
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
# class MyHTTPRequestHandler(SimpleHTTPRequestHandler):
# def translate_path(self,path):
# path = SimpleHTTPRequestHandler.translate_path(self,path)
# if os.path.isdir(path):
# for base in "index", "default":
# for ext in ".html", ".htm", ".txt":
# index = path + "/" + base + ext
# if os.path.exists(index):
# return index
# return path
# def test(HandlerClass = MyHTTPRequestHandler,
# ServerClass = BaseHTTPServer.HTTPServer):
# BaseHTTPServer.test(HandlerClass, ServerClass)
##################travesal network upstream############
'''def find_upstream(value):
gc.collect()
ii=0
li = []
temp=[]
a=frame.ix[int(value)]
temp.append(a)
#print(MY_GLOBAL)
MY_GLOBAL[:]=[]
#x=data[int(value)]
#x=frame[frame['id']==a['id_to']]
#print x
i=0
z=0
zz=0
while zz<len(temp):
item=temp[zz]
zz+=1
##print(z,len(temp))
## item=temp.pop()
## print item
#x=frame[frame['id_to']==item['id']]
x=data[int(float(item['id']))]
#print x
i=1
while i<len(x) :
# d = OrderedDict()
# xx=x.loc[x.index[i]]
xx=frame.ix[int(float(x[i]))]
# d['type'] = 'Feature'
# d['geometry'] = {
# 'type': 'MultiLineString',
# 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]
# }
# d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat'])
# }
# li.append(d)
i+=1
# ii+=1
##if ii%1000==0:
## print ii
temp.append(xx)
print(len(temp))
while z<len(temp):
item=temp[z]
z+=1
##print(z,len(temp))
## item=temp.pop()
## print item
#x=frame[frame['id_to']==item['id']]
x=data[int(float(item['id']))]
#print x
i=1
while i<len(x) :
d = OrderedDict()
#xx=x.loc[x.index[i]]
xx=frame.ix[int(float(x[i]))]
d['type'] = 'Feature'
d['geometry'] = {
'type': 'MultiLineString',
'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]
}
d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat'])
}
li.append(d)
d = OrderedDict()
#xx=x.loc[x.index[i]]
# xx=frame.ix[int(float(x[i]))]
i+=1
ii+=1
if ii%1000==0 or (ii+1)/len(temp)==1:
MY_GLOBAL.append((int)((ii+1)/(len(temp)* 1.0)*100))
## print(checkInt,ii,len(temp))
## print ii
# temp.append(xx)
#d = OrderedDict()
#d['type'] = 'FeatureCollection'
#d['features'] = li
#print li
print(ii)
return li,200'''
def find_upstream(value):
gc.collect()
ii = 0
li = []
temp = []
a = frame.ix[int(value)]
temp.append(int(value))
MY_GLOBAL[:] = []
i = 0
z = 0
zz = 0
jstring = ''
while z < len(temp):
item = frame.ix[temp[z]]
z += 1
x = data[int(float(item['id']))]
#print x
i = 1
while i < len(x):
xx = frame.ix[int(float(x[i]))]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
ii += 1
temp.append(int(float(x[i])))
i += 1
if ii % 1000 == 0:
# print(ii)
MY_GLOBAL.append((int)((ii + 1) / (200000 * 1.0) * 100))
# print(checkInt,ii,len(temp))
## print ii
# temp.append(xx)
#d = OrderedDict()
#d['type'] = 'FeatureCollection'
#d['features'] = li
#print li
# print(jstring)
MY_GLOBAL.append(100)
return jstring[:-1], 200
##################travesal network downstream############
def find_downstream(value, sourceid):
#print value,sourceid
ii = 0
li = []
temp = []
jstring = ''
# MY_GLOBAL[:]=[]
a = frame.ix[int(value)]
temp.append(a)
check = True
z = 0
while z < len(temp) and check:
item = temp[z]
z += 1
if(item['id_to'] == sourceid):
check = False
# break
## print item
# if(item['id']==sourceid):
# check=False
x = frame.ix[frame['id'] == item['id_to']]
#print x
i = 0
while i < len(x):
# d = OrderedDict()
xx = x.ix[x.index[i]]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
# d['type'] = 'Feature'
# d['geometry'] = {
# 'type': 'MultiLineString',
# 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]
# }
# d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat'])
# }
# li.append(d)
# d=OrderedDict()
i += 1
ii += 1
temp.append(xx)
# if(item['id']==sourceid):
# check=False
# MY_GLOBAL.append(100)
# d = OrderedDict()
# d['type'] = 'FeatureCollection'
# d['features'] = li
# print li
# if (check==False):
return jstring[:-1], 200
##################travesal network downstream############
def find_downstream1(value):
#print value,sourceid
ii = 0
li = []
temp = []
jstring = ''
# MY_GLOBAL[:]=[]
a = frame.ix[int(value)]
temp.append(a)
check = True
z = 0
while z < len(temp) and check:
item = temp[z]
z += 1
## print item
# if(item['id']==sourceid):
# check=False
x = frame.ix[frame['id'] == item['id_to']]
#print x
i = 0
while i < len(x):
# d = OrderedDict()
xx = x.ix[x.index[i]]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
# d['type'] = 'Feature'
# d['geometry'] = {
# 'type': 'MultiLineString',
# 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]
# }
# d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat'])
# }
# li.append(d)
# d=OrderedDict()
i += 1
ii += 1
temp.append(xx)
# if(item['id']==sourceid):
# check=False
# MY_GLOBAL.append(100)
# d = OrderedDict()
# d['type'] = 'FeatureCollection'
# d['features'] = li
# print li
# if (check==False):
return jstring[:-1], 200
#######################pp upstream#######################
def find_upstream_pp(cellid):
gc.collect()
# header=['celllist','cellid','cellto']
# header=['cellid','loc']
templi = frame_celllist[frame_celllist['cellid']
== cellid]['celllist'].tolist()
templist = templi[0][1:-1].split(",")
z = 0
jstring = ''
while z < len(templist):
curid = templist[z].strip()
# print(curid,templist)
curidloc = frame_cell[frame_cell['cellid'] == curid]['loc'].tolist()
curidloc1 = curidloc[0].split("_")
# print(curidloc1[0],curidloc1[1][:-1],curidloc[0])
z += 1
temp = frame_celllist[frame_celllist['cellid']
== curid]['cellto'].tolist()
print(temp)
temp = temp[0].split(",")
if len(temp) == 1 and temp[0][:-1] == "none":
# print(temp[0])
continue
else:
zz = 0
while zz < len(temp):
# print(temp[zz],temp)
x = temp[zz]
zz += 1
if zz == len(temp):
nextloc = frame_cell[frame_cell['cellid']
== x[:-1]]['loc'].tolist()
else:
nextloc = frame_cell[frame_cell['cellid']
== x]['loc'].tolist()
nextloc1 = nextloc[0].split("_")
# print(nextloc1[0],nextloc1[1][:-1],nextloc1)
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(curidloc1[0]) + ',' + str(curidloc1[1][:-1]) + '],[' + str(
nextloc1[0]) + ',' + str(nextloc1[1][:-1]) + ']]]},"properties": {"lat":' + str(curidloc1[1][:-1]) + ',"lon": ' + str(curidloc1[0]) + '}},'
# jstring+='{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[['+str(float(xx['lon']))+','+str(float(xx['lat']))+'],['+str(float(item['lon']))+','+str(float(item['lat']))+']]]},"properties": {"id_to": '+str(int(xx['id_to']))+',"id":'+str(int(xx['id']))+',"lat":'+str(float(xx['lat']))+',"lon": '+str(float(xx['lon']))+'}},';
return jstring[:-1], 200
#######################pp downstream#######################
def find_downstream_pp(cellid, dcellid):
gc.collect()
# header=['celllist','cellid','cellto']
# header=['cellid','loc']
print(cellid, dcellid)
templi = frame_celllist[frame_celllist['cellid']
== cellid]['celllist'].tolist()
templist = templi[0][1:-1].split(",")
z = len(templist) - 1
jstring = ''
while z > 0:
print(templist[z].strip())
curid = templist[z].strip()
if curid != str(dcellid):
z -= 1
else:
print(z)
break
while z > 0:
curid = templist[z].strip()
# print(curid,templist)
curidloc = frame_cell[frame_cell['cellid'] == curid]['loc'].tolist()
curidloc1 = curidloc[0].split("_")
# print(curidloc1[0],curidloc1[1][:-1],curidloc[0])
temp = frame_celllist[frame_celllist['cellid']
== templist[z].strip()]['cellto'].tolist()
z -= 1
print(temp)
temp = temp[0].split(",")
if len(temp) == 1 and temp[0][:-1] == "none":
# print(temp[0])
z -= 1
continue
else:
zz = 0
aaaa = 'false'
while zz < len(temp):
# print(temp[zz],temp)
x = temp[zz]
zz += 1
if zz == len(temp):
if x[:-1] == curid:
aaaa = 'true'
nextloc = frame_cell[frame_cell['cellid']
== x[:-1]]['loc'].tolist()
else:
if x == curid:
aaaa = 'true'
nextloc = frame_cell[frame_cell['cellid']
== x]['loc'].tolist()
if aaaa == 'true':
nextloc1 = nextloc[0].split("_")
# print(nextloc1[0],nextloc1[1][:-1],nextloc1)
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(curidloc1[0]) + ',' + str(curidloc1[1][:-1]) + '],[' + str(
nextloc1[0]) + ',' + str(nextloc1[1][:-1]) + ']]]},"properties": {"lat":' + str(curidloc1[1][:-1]) + ',"lon": ' + str(curidloc1[0]) + '}},'
# jstring+='{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[['+str(float(xx['lon']))+','+str(float(xx['lat']))+'],['+str(float(item['lon']))+','+str(float(item['lat']))+']]]},"properties": {"id_to": '+str(int(xx['id_to']))+',"id":'+str(int(xx['id']))+',"lat":'+str(float(xx['lat']))+',"lon": '+str(float(xx['lon']))+'}},';
print(jstring)
if len(jstring) > 0:
return jstring[:-1], 200
else:
return jstring, 200
@app.route("/", methods=['GET', 'POST'])
def index():
print(request)
return render_template('test1.html')
@app.route("/api/", methods=['GET', 'POST'])
def update():
print(request.method)
if request.method == "POST":
source = request.form["source"]
dist = request.form["dist"]
pic = request.form["pic"]
downfirst = request.form["downfirst"]
pp = request.form["pp"]
print(pp, source, dist, downfirst, pic)
if(pp == 'yes'):
upstream = request.form["upstream"]
if(upstream == 'yes'):
ucellid = request.form["ucellid"]
re, ii = find_upstream_pp(ucellid)
# print(re)
return json.dumps(re), ii
# if(upstream=='no'):
### ucellid = request.form["ucellid"]
# dcellid = request.form["dcellid"]
# re,ii=find_downstream_pp(ucellid,dcellid)
# print(re)
# if(pp=='no'):
source = request.form["source"]
dist = request.form["dist"]
pic = request.form["pic"]
downfirst = request.form["downfirst"]
#print dist
if(downfirst == 'no'):
if(source == 'yes'):
sourceid = request.form["sourceid"]
#print sourceid
import time
start = time. time()
re, ii = find_upstream(sourceid)
end = time. time()
#print ii,(end-start)
# print(re)
# print(MY_GLOBAL)
return json.dumps(re), ii
if(dist == 'yes'):
distid = request.form["distid"]
sourceid = request.form["sourceid"]
MY_GLOBAL[:] = []
#print distid,sourceid
re, ii = find_downstream(int(distid), int(sourceid))
print (re)
gc.collect()
MY_GLOBAL.append(100)
return json.dumps(re, sort_keys=False, indent=4), ii
if(downfirst == 'yes'):
if(dist == 'yes'):
distid = request.form["distid"]
sourceid = request.form["sourceid"]
MY_GLOBAL[:] = []
#print distid,sourceid
re, ii = find_downstream1(int(distid))
print (re)
gc.collect()
MY_GLOBAL.append(100)
return json.dumps(re, sort_keys=False, indent=4), ii
if(pic == 'yes'):
#print request.form
MY_GLOBAL[:] = []
start1 = request.form["dist_lat"]
start2 = request.form["dist_lon"]
goal1 = request.form["source_lat"]
goal2 = request.form["source_lon"]
fromdate = request.form["from"]
todate = request.form["to"]
import time
before = time.time()
output, str1, str2, str3 = LoadingNetwork.main(
[start1, start2], [goal1, goal2], fromdate, todate, rawdata)
#print str1,str2,str3
after = time.time()
print ("time,", after - before)
if(isinstance(output, str)):
return output, 201
else:
# gc.collect()
#print base64.b64encode(output.getvalue())
return base64.b64encode(
output.getvalue()) + "***" + str1 + "***" + str2 + "***" + str3, 200
class WebSocket(WebSocketHandler):
def on_message(self, message):
# self.write_message("Received: " + message)
# self.write_message("Received2: " + message)
# m=message.split("&")
print("Received message: " + m[0])
print("Received message: " + m[1])
print("Received message: " + m[2])
print("Received message: " + m[3])
print("Received message: " + m[4])
print("Received message: " + m[5])
print("Received message: " + m[6])
m=message[1:-1].split("&")
source = m[0].split("=")[1]
value = m[1].split("=")[1]
dist = m[2].split("=")[1]
value1 = m[3].split("=")[1]
pic = m[4].split("=")[1]
downfirst = m[5].split("=")[1]
pp = m[6].split("=")
print(pp, source, dist, downfirst, pic,value,value1)
###################################upstram##########################3
if(downfirst == 'no'):
if(source == 'yes'):
##################
gc.collect()
ii = 0
li = []
temp = []
a = frame.ix[int(value)]
temp.append(int(value))
i = 0
z = 0
zz = 0
jstring = ''
while z < len(temp):
item = frame.ix[temp[z]]
z += 1
x = data[int(float(item['id']))]
#print x
i = 1
while i < len(x):
xx = frame.ix[int(float(x[i]))]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
ii += 1
temp.append(int(float(x[i])))
i += 1
if(len(jstring)>1500000):
zz+=5
self.write_message( jstring[:-1])
self.write_message( '~'+str(zz*1.0/100))
jstring = ''
self.write_message( jstring[:-1])
self.write_message( '~1')
############################downstream#########################
if(dist == 'yes'):
########################################################################
ii = 0
li = []
temp = []
jstring = ''
# MY_GLOBAL[:]=[]
a = frame.ix[int(value1)]
temp.append(a)
check = True
z = 0
zz=0
while z < len(temp) and check:
item = temp[z]
z += 1
if(item['id_to'] == int(value)):
check = False
x = frame.ix[frame['id'] == item['id_to']]
#print x
i = 0
while i < len(x):
# d = OrderedDict()
xx = x.ix[x.index[i]]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
i += 1
ii += 1
temp.append(xx)
if(len(jstring)>150000):
zz+=5
self.write_message( jstring[:-1])
self.write_message( '~'+str(zz*1.0/100))
jstring = ''
self.write_message( jstring[:-1])
self.write_message( '~1')
##########################downfirst##############################################
if(downfirst == 'yes'):
if(dist == 'yes'):
ii = 0
li = []
temp = []
jstring = ''
# MY_GLOBAL[:]=[]
a = frame.ix[int(value1)]
temp.append(a)
z = 0
zz=0
while z < len(temp) :
item = temp[z]
z += 1
# break
## print item
# if(item['id']==sourceid):
# check=False
x = frame.ix[frame['id'] == item['id_to']]
#print x
i = 0
while i < len(x):
# d = OrderedDict()
xx = x.ix[x.index[i]]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
# d['type'] = 'Feature'
# d['geometry'] = {
# 'type': 'MultiLineString',
# 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]
# }
# d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat'])
# }
# li.append(d)
# d=OrderedDict()
i += 1
ii += 1
temp.append(xx)
# if(item['id']==sourceid):
# check=False
# MY_GLOBAL.append(100)
# d = OrderedDict()
# d['type'] = 'FeatureCollection'
# d['features'] = li
# print li
# if (check==False):
if(len(jstring)>150000):
zz+=5
self.write_message( jstring[:-1])
self.write_message( '~'+str(zz*1.0/100))
jstring = ''
self.write_message( jstring[:-1])
self.write_message( '~1')
# if(downfirst == 'yes'):
if(pic == 'yes'):
#print request.form
#"&dist_lat="+dist_lat+"&dist_lon="+dist_lon+"&source_lat="+source_lat+"&source_lon="+source_lon+"&from="+value3.value+"&to="+value4.value);
#m[6].split("=")
# start1 = request.form["dist_lat"]
# start2 = request.form["dist_lon"]
# goal1 = request.form["source_lat"]
# goal2 = request.form["source_lon"]
# fromdate = request.form["from"]
# todate = request.form["to"]
start1 = m[7].split("=")[1]
start2 = m[8].split("=")[1]
goal1 =m[9].split("=")[1]
goal2 = m[10].split("=")[1]
fromdate = m[11].split("=")[1]
todate = m[12].split("=")[1]
print(start1,start2,goal1,goal2,fromdate,todate)
import time
before = time.time()
output, str1, str2, str3 = LoadingNetwork.main(
[start1, start2], [goal1, goal2], fromdate, todate, rawdata)
#print str1,str2,str3
# print(output)
after = time.time()
print ("time,", after - before)
# if(isinstance(output, str)):
# return output, 201
# else:
# gc.collect()
#print base64.b64encode(output.getvalue())
# return base64.b64encode(
# output.getvalue()) + "***" + str1 + "***" + str2 + "***" + str3, 200
#
if __name__ == "__main__":
container = WSGIContainer(app)
server = Application([
(r'/websocket/', WebSocket),
(r'/we/', EchoWebSocket),
(r'.*', FallbackHandler, dict(fallback=container))
])
server.listen(5000)
IOLoop.instance().start()
# test()
| [
"flask.render_template",
"flask_cors.CORS",
"flask.Flask",
"json.dumps",
"tornado.ioloop.IOLoop.instance",
"gc.collect",
"time.time",
"pandas.DataFrame",
"tornado.wsgi.WSGIContainer",
"numpy.genfromtxt"
] | [((547, 565), 'flask.Flask', 'Flask', (['"""flasknado"""'], {}), "('flasknado')\n", (552, 565), False, 'from flask import Flask, request, render_template, send_file, Response\n'), ((609, 618), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (613, 618), False, 'from flask_cors import CORS\n'), ((936, 984), 'numpy.genfromtxt', 'genfromtxt', (['"""networkwithdist.csv"""'], {'delimiter': '""","""'}), "('networkwithdist.csv', delimiter=',')\n", (946, 984), False, 'from numpy import genfromtxt\n'), ((1089, 1126), 'pandas.DataFrame', 'pd.DataFrame', (['my_data'], {'columns': 'header'}), '(my_data, columns=header)\n', (1101, 1126), True, 'import pandas as pd\n'), ((1751, 1786), 'pandas.DataFrame', 'pd.DataFrame', (['data1'], {'columns': 'header'}), '(data1, columns=header)\n', (1763, 1786), True, 'import pandas as pd\n'), ((2140, 2176), 'pandas.DataFrame', 'pd.DataFrame', (['data_c'], {'columns': 'header'}), '(data_c, columns=header)\n', (2152, 2176), True, 'import pandas as pd\n'), ((5483, 5495), 'gc.collect', 'gc.collect', ([], {}), '()\n', (5493, 5495), False, 'import gc\n'), ((10571, 10583), 'gc.collect', 'gc.collect', ([], {}), '()\n', (10581, 10583), False, 'import gc\n'), ((12856, 12868), 'gc.collect', 'gc.collect', ([], {}), '()\n', (12866, 12868), False, 'import gc\n'), ((15728, 15757), 'flask.render_template', 'render_template', (['"""test1.html"""'], {}), "('test1.html')\n", (15743, 15757), False, 'from flask import Flask, request, render_template, send_file, Response\n'), ((27708, 27726), 'tornado.wsgi.WSGIContainer', 'WSGIContainer', (['app'], {}), '(app)\n', (27721, 27726), False, 'from tornado.wsgi import WSGIContainer\n'), ((18608, 18619), 'time.time', 'time.time', ([], {}), '()\n', (18617, 18619), False, 'import time\n'), ((18815, 18826), 'time.time', 'time.time', ([], {}), '()\n', (18824, 18826), False, 'import time\n'), ((27035, 27046), 'time.time', 'time.time', ([], {}), '()\n', (27044, 27046), False, 'import time\n'), ((27269, 27280), 'time.time', 'time.time', ([], {}), '()\n', (27278, 27280), False, 'import time\n'), ((27931, 27948), 'tornado.ioloop.IOLoop.instance', 'IOLoop.instance', ([], {}), '()\n', (27946, 27948), False, 'from tornado.ioloop import IOLoop\n'), ((17046, 17057), 'time.time', 'time.time', ([], {}), '()\n', (17055, 17057), False, 'import time\n'), ((17134, 17145), 'time.time', 'time.time', ([], {}), '()\n', (17143, 17145), False, 'import time\n'), ((17606, 17618), 'gc.collect', 'gc.collect', ([], {}), '()\n', (17616, 17618), False, 'import gc\n'), ((18057, 18069), 'gc.collect', 'gc.collect', ([], {}), '()\n', (18067, 18069), False, 'import gc\n'), ((20267, 20279), 'gc.collect', 'gc.collect', ([], {}), '()\n', (20277, 20279), False, 'import gc\n'), ((16394, 16408), 'json.dumps', 'json.dumps', (['re'], {}), '(re)\n', (16404, 16408), False, 'import json\n'), ((17277, 17291), 'json.dumps', 'json.dumps', (['re'], {}), '(re)\n', (17287, 17291), False, 'import json\n'), ((17682, 17723), 'json.dumps', 'json.dumps', (['re'], {'sort_keys': '(False)', 'indent': '(4)'}), '(re, sort_keys=False, indent=4)\n', (17692, 17723), False, 'import json\n'), ((18133, 18174), 'json.dumps', 'json.dumps', (['re'], {'sort_keys': '(False)', 'indent': '(4)'}), '(re, sort_keys=False, indent=4)\n', (18143, 18174), False, 'import json\n')] |
import svgwrite
def cross(dwg, x, y, r = 5):
dwg.add(dwg.line((x - r, y), (x + r, y), stroke='red', stroke_width=.1))
dwg.add(dwg.line((x, y - r), (x, y + r), stroke='red', stroke_width=.1))
if __name__ == '__main__':
dwg = svgwrite.Drawing('test.svg', size=('150mm', '150mm'), viewBox=('0 0 150 150'))
cross(dwg, 5, 5)
cross(dwg, 145, 5)
cross(dwg, 145, 145)
cross(dwg, 5, 145)
dwg.save()
| [
"svgwrite.Drawing"
] | [((238, 314), 'svgwrite.Drawing', 'svgwrite.Drawing', (['"""test.svg"""'], {'size': "('150mm', '150mm')", 'viewBox': '"""0 0 150 150"""'}), "('test.svg', size=('150mm', '150mm'), viewBox='0 0 150 150')\n", (254, 314), False, 'import svgwrite\n')] |
from poketype import PokemonTypeIdentifier
from flask import Flask, request, make_response,jsonify
import os
id = PokemonTypeIdentifier()
app = Flask(__name__,static_url_path='/static')
@app.route('/findtype',methods=['GET'])
def classify():
poke_name=request.args.get('pokename')
results = id.predict_type(poke_name)
return jsonify({'results':results})
@app.route('/',methods=['GET'])
def root():
return app.send_static_file('index.html')
if __name__ == '__main__':
port = int(os.environ.get('PORT', 8001))
app.run(debug=True,host='0.0.0.0',port=port,use_reloader=False)
| [
"flask.request.args.get",
"poketype.PokemonTypeIdentifier",
"flask.Flask",
"os.environ.get",
"flask.jsonify"
] | [((114, 137), 'poketype.PokemonTypeIdentifier', 'PokemonTypeIdentifier', ([], {}), '()\n', (135, 137), False, 'from poketype import PokemonTypeIdentifier\n'), ((144, 186), 'flask.Flask', 'Flask', (['__name__'], {'static_url_path': '"""/static"""'}), "(__name__, static_url_path='/static')\n", (149, 186), False, 'from flask import Flask, request, make_response, jsonify\n'), ((257, 285), 'flask.request.args.get', 'request.args.get', (['"""pokename"""'], {}), "('pokename')\n", (273, 285), False, 'from flask import Flask, request, make_response, jsonify\n'), ((338, 367), 'flask.jsonify', 'jsonify', (["{'results': results}"], {}), "({'results': results})\n", (345, 367), False, 'from flask import Flask, request, make_response, jsonify\n'), ((499, 527), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '(8001)'], {}), "('PORT', 8001)\n", (513, 527), False, 'import os\n')] |
from django.shortcuts import render
from leads.models import Lead
from leads.serializers import LeadSerializer
from rest_framework import generics
# Create your views here.
class LeadListCreate(generics.ListCreateAPIView):
queryset = Lead.objects.all()
serializer_class = LeadSerializer
| [
"leads.models.Lead.objects.all"
] | [((239, 257), 'leads.models.Lead.objects.all', 'Lead.objects.all', ([], {}), '()\n', (255, 257), False, 'from leads.models import Lead\n')] |
#
# This file is part of LUNA.
#
import shutil
import tempfile
import argparse
from nmigen import Elaboratable
from .gateware.platform import get_appropriate_platform
def top_level_cli(fragment, *pos_args, **kwargs):
""" Runs a default CLI that assists in building and running gateware.
If the user's options resulted in the board being programmed, this returns the fragment
that was programmed onto the board. Otherwise, it returns None.
"""
parser = argparse.ArgumentParser(description="Gateware generation/upload script for '{}' gateware.".format(fragment.__class__.__name__))
parser.add_argument('--output', '-o', metavar='filename', help="Build and output a bitstream to the given file.")
parser.add_argument('--erase', '-E', action='store_true',
help="Clears the relevant FPGA's flash before performing other options.")
parser.add_argument('--upload', '-U', action='store_true',
help="Uploads the relevant design to the target hardware. Default if no options are provided.")
parser.add_argument('--flash', '-F', action='store_true',
help="Flashes the relevant design to the target hardware's configuration flash.")
parser.add_argument('--dry-run', '-D', action='store_true',
help="When provided as the only option; builds the relevant bitstream without uploading or flashing it.")
parser.add_argument('--keep-files', action='store_true',
help="Keeps the local files in the default `build` folder.")
args = parser.parse_args()
platform = get_appropriate_platform()
# If this isn't a fragment directly, interpret it as an object that will build one.
if callable(fragment):
fragment = fragment(*pos_args, **kwargs)
# If we have no other options set, build and upload the relevant file.
if (args.output is None and not args.flash and not args.erase and not args.dry_run):
args.upload = True
# Once the device is flashed, it will self-reconfigure, so we
# don't need an explicitly upload step; and it implicitly erases
# the flash, so we don't need an erase step.
if args.flash:
args.erase = False
args.upload = False
# Build the relevant gateware, uploading if requested.
build_dir = "build" if args.keep_files else tempfile.mkdtemp()
# Build the relevant files.
try:
if args.erase:
platform.toolchain_erase()
products = platform.build(fragment,
do_program=args.upload,
build_dir=build_dir
)
# If we're flashing the FPGA's flash, do so.
if args.flash:
platform.toolchain_flash(products)
# If we're outputting a file, write it.
if args.output:
bitstream = products.get("top.bit")
with open(args.output, "wb") as f:
f.write(bitstream)
# Return the fragment we're working with, for convenience.
if args.upload or args.flash:
return fragment
# Clean up any directories we've created.
finally:
if not args.keep_files:
shutil.rmtree(build_dir)
return None
| [
"tempfile.mkdtemp",
"shutil.rmtree"
] | [((2305, 2323), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (2321, 2323), False, 'import tempfile\n'), ((3117, 3141), 'shutil.rmtree', 'shutil.rmtree', (['build_dir'], {}), '(build_dir)\n', (3130, 3141), False, 'import shutil\n')] |
"""
Setup module.
"""
from setuptools import setup
from mmsim import __version__
setup(
name='mmsim',
version=__version__,
description='A simple Micromouse Maze Simulator server',
long_description="""The server can load different mazes and any client
can connect to it to ask for the current position walls, move from
one cell to another and visualize the simulated micromouse state.""",
url='https://github.com/Theseus/mmsim',
author='<NAME>',
author_email='<EMAIL>',
license='BSD License',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
],
keywords='micromouse maze server simulator',
entry_points={
'console_scripts': [
'mmsim = mmsim.commands:launch',
],
},
packages=['mmsim'],
install_requires=[
'click',
'numpy',
'pyqtgraph',
'pyqt5',
'pyzmq'],
extras_require={
'docs': [
'doc8',
'sphinx',
'sphinx_rtd_theme',
],
'lint': [
'flake8',
'flake8-bugbear',
'flake8-per-file-ignores',
'flake8-quotes',
'pep8-naming',
],
'test': [
'pytest',
'pytest-cov',
],
},
)
| [
"setuptools.setup"
] | [((83, 1230), 'setuptools.setup', 'setup', ([], {'name': '"""mmsim"""', 'version': '__version__', 'description': '"""A simple Micromouse Maze Simulator server"""', 'long_description': '"""The server can load different mazes and any client\n can connect to it to ask for the current position walls, move from\n one cell to another and visualize the simulated micromouse state."""', 'url': '"""https://github.com/Theseus/mmsim"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""BSD License"""', 'classifiers': "['Development Status :: 3 - Alpha', 'Topic :: Utilities',\n 'License :: OSI Approved :: BSD License',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: Implementation :: CPython']", 'keywords': '"""micromouse maze server simulator"""', 'entry_points': "{'console_scripts': ['mmsim = mmsim.commands:launch']}", 'packages': "['mmsim']", 'install_requires': "['click', 'numpy', 'pyqtgraph', 'pyqt5', 'pyzmq']", 'extras_require': "{'docs': ['doc8', 'sphinx', 'sphinx_rtd_theme'], 'lint': ['flake8',\n 'flake8-bugbear', 'flake8-per-file-ignores', 'flake8-quotes',\n 'pep8-naming'], 'test': ['pytest', 'pytest-cov']}"}), '(name=\'mmsim\', version=__version__, description=\n \'A simple Micromouse Maze Simulator server\', long_description=\n """The server can load different mazes and any client\n can connect to it to ask for the current position walls, move from\n one cell to another and visualize the simulated micromouse state."""\n , url=\'https://github.com/Theseus/mmsim\', author=\'<NAME>\', author_email\n =\'<EMAIL>\', license=\'BSD License\', classifiers=[\n \'Development Status :: 3 - Alpha\', \'Topic :: Utilities\',\n \'License :: OSI Approved :: BSD License\',\n \'Programming Language :: Python :: 3\',\n \'Programming Language :: Python :: 3.6\',\n \'Programming Language :: Python :: Implementation :: CPython\'],\n keywords=\'micromouse maze server simulator\', entry_points={\n \'console_scripts\': [\'mmsim = mmsim.commands:launch\']}, packages=[\n \'mmsim\'], install_requires=[\'click\', \'numpy\', \'pyqtgraph\', \'pyqt5\',\n \'pyzmq\'], extras_require={\'docs\': [\'doc8\', \'sphinx\', \'sphinx_rtd_theme\'\n ], \'lint\': [\'flake8\', \'flake8-bugbear\', \'flake8-per-file-ignores\',\n \'flake8-quotes\', \'pep8-naming\'], \'test\': [\'pytest\', \'pytest-cov\']})\n', (88, 1230), False, 'from setuptools import setup\n')] |
import argparse
import json
import sys
import os
try:
from utility_functions import load_csv_files_into_db, generate_vocabulary_load
except(ImportError):
sys.path.insert(0, os.path.abspath(os.path.join(os.path.split(__file__)[0], os.path.pardir, os.path.pardir, "src")))
from utility_functions import load_csv_files_into_db, generate_vocabulary_load
def main(vocab_directory, connection_string, schema, vocabularies=["CONCEPT"]):
vocab_list = generate_vocabulary_load(vocab_directory, vocabularies)
vocab_data_dict = {}
for pair in vocab_list:
vocab_data_dict[pair[1]] = pair[0]
load_csv_files_into_db(connection_string, vocab_data_dict, schema_ddl=None, indices_ddl=None,
i_print_update=1000, truncate=True, schema=schema, delimiter="\t")
if __name__ == "__main__":
arg_parse_obj = argparse.ArgumentParser(description="Load concept/vocabulary files into database")
arg_parse_obj.add_argument("-c", "--config-file-name", dest="config_file_name", help="JSON config file", default="../hi_config.json")
arg_parse_obj.add_argument("--connection-uri", dest="connection_uri", default=None)
arg_parse_obj.add_argument("--schema", dest="schema", default=None)
arg_parse_obj.add_argument("--load-concept_ancestor", default=False, action="store_true", dest="load_concept_ancestor")
arg_parse_obj.add_argument("--full-concept-files", default=False, action="store_true", dest="load_full_concept_files")
arg_obj = arg_parse_obj.parse_args()
print("Reading config file '%s'" % arg_obj.config_file_name)
with open(arg_obj.config_file_name) as f:
config = json.load(f)
if arg_obj.connection_uri is None:
connection_uri = config["connection_uri"]
else:
connection_uri = arg_obj.connection_uri
if arg_obj.schema is None:
schema = config["schema"]
else:
schema = arg_obj.schema
if arg_obj.load_full_concept_files:
vocabularies_to_load = ["CONCEPT", "CONCEPT_ANCESTOR", "CONCEPT_CLASS", "CONCEPT_RELATIONSHIP",
"CONCEPT_SYNONYM", "DOMAIN", "DRUG_STRENGTH", "RELATIONSHIP", "VOCABULARY"]
elif arg_obj.load_concept_ancestor:
vocabularies_to_load = ["CONCEPT", "CONCEPT_ANCESTOR"]
else:
vocabularies_to_load = ["CONCEPT"]
main(config["json_map_directory"], connection_uri, schema, vocabularies=vocabularies_to_load)
| [
"argparse.ArgumentParser",
"os.path.split",
"utility_functions.generate_vocabulary_load",
"json.load",
"utility_functions.load_csv_files_into_db"
] | [((464, 519), 'utility_functions.generate_vocabulary_load', 'generate_vocabulary_load', (['vocab_directory', 'vocabularies'], {}), '(vocab_directory, vocabularies)\n', (488, 519), False, 'from utility_functions import load_csv_files_into_db, generate_vocabulary_load\n'), ((622, 790), 'utility_functions.load_csv_files_into_db', 'load_csv_files_into_db', (['connection_string', 'vocab_data_dict'], {'schema_ddl': 'None', 'indices_ddl': 'None', 'i_print_update': '(1000)', 'truncate': '(True)', 'schema': 'schema', 'delimiter': '"""\t"""'}), "(connection_string, vocab_data_dict, schema_ddl=None,\n indices_ddl=None, i_print_update=1000, truncate=True, schema=schema,\n delimiter='\\t')\n", (644, 790), False, 'from utility_functions import load_csv_files_into_db, generate_vocabulary_load\n'), ((860, 947), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Load concept/vocabulary files into database"""'}), "(description=\n 'Load concept/vocabulary files into database')\n", (883, 947), False, 'import argparse\n'), ((1662, 1674), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1671, 1674), False, 'import json\n'), ((212, 235), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (225, 235), False, 'import os\n')] |
#!/usr/bin/env python
# convert jpg tp png
from glob import glob
import cv2
pngs = glob('./*.jpg')
for j in pngs:
img = cv2.imread(j)
cv2.imwrite(j[:-3] + 'png', img)
# delete jpg files
import glob
import os
dir = "/Users/wangmeijie/ALLImportantProjects/FlameDetectionAPP/Models/MaskRCNN/02_26_2020/Mask_RCNN/dataset/train"
for jpgpath in glob.iglob(os.path.join(dir, '*.jpg')):
os.remove(jpgpath) | [
"cv2.imwrite",
"os.path.join",
"glob",
"cv2.imread",
"os.remove"
] | [((83, 98), 'glob', 'glob', (['"""./*.jpg"""'], {}), "('./*.jpg')\n", (87, 98), False, 'import glob\n'), ((125, 138), 'cv2.imread', 'cv2.imread', (['j'], {}), '(j)\n', (135, 138), False, 'import cv2\n'), ((143, 175), 'cv2.imwrite', 'cv2.imwrite', (["(j[:-3] + 'png')", 'img'], {}), "(j[:-3] + 'png', img)\n", (154, 175), False, 'import cv2\n'), ((363, 389), 'os.path.join', 'os.path.join', (['dir', '"""*.jpg"""'], {}), "(dir, '*.jpg')\n", (375, 389), False, 'import os\n'), ((396, 414), 'os.remove', 'os.remove', (['jpgpath'], {}), '(jpgpath)\n', (405, 414), False, 'import os\n')] |
import xml.sax
import re
import os
import json
import time
current_milli_time = lambda: int(round(time.time() * 1000))
RE_LINKS = re.compile(r'\[{2}(.*?)\]{2}', re.DOTALL | re.UNICODE)
IGNORED_NAMESPACES = [
'wikipedia', 'category', 'file', 'portal', 'template',
'mediaWiki', 'user', 'help', 'book', 'draft', 'wikiProject',
'special', 'talk', 'image','module'
]
"""MediaWiki namespaces that ought to be ignored."""
class WikiHandler(xml.sax.ContentHandler):
def __init__(self,title2Id,id2Title,redirects):
self.tag = ""
self.content = ''
self.title = ''
self.id = -1
self.title2Id = title2Id
self.id2Title = id2Title
self.redirects = redirects
self.counter_all = 0
self.attributes = {}
self.n = 0
self.start = current_milli_time()
# Call when an element starts
def startElement(self, tag, attributes):
self.tag = tag
self.attributes = attributes
# Call when an elements ends
def endElement(self, tag):
if tag == 'title':
self.title = self.content.strip()
elif tag == 'id':
self.id = int(self.content)
if self.title not in self.title2Id:
self.title2Id[self.title] = self.id
self.id2Title[self.id] = self.title
self.counter_all += 1
if self.counter_all % 1000 == 0:
diff = current_milli_time() - self.start
print('Pages processed: ' + str(self.counter_all) + ', avg t: ' + str(diff / self.counter_all), end='\r')
elif tag == 'text':
self.n += 1
if not any(self.title.lower().startswith(ignore + ':') for ignore in IGNORED_NAMESPACES) and not self.title.lower().startswith('list of'):
self.processArticle()
elif tag == 'redirect' and 'title' in self.attributes:
redirect = self.attributes['title']
if not any(self.title.lower().startswith(ignore + ':') for ignore in IGNORED_NAMESPACES) \
and not any(redirect.lower().startswith(ignore + ':') for ignore in IGNORED_NAMESPACES) \
and not redirect.lower().startswith('list of') \
and not self.title.lower().startswith('list of'):
self.redirects[self.title] = redirect
self.content = ""
# Call when a character is read
def characters(self, content):
self.content += content
def processArticle(self):
text = self.content.strip()
#self.title2Id[self.title] = self.id
if text.lower().startswith('#redirect'):
match = re.search(RE_LINKS,text)
if match:
redirect = match.group(1).strip()
pos_bar = redirect.find('|')
if pos_bar > -1:
redirect = redirect[:pos_bar]
redirect = redirect.replace('_',' ')
if not any(redirect.lower().startswith(ignore + ':') for ignore in IGNORED_NAMESPACES) and not redirect.lower().startswith('list of'):
self.redirects[self.title] = redirect
else:
lines = text.split('\n')
for line in lines:
if not line.startswith('{{redirect|'):
break
else:
line = line[11:]
line = line[:line.find('|')]
if len(line) > 0:
if not any(line.lower().startswith(ignore + ':') for ignore in IGNORED_NAMESPACES) and not line.lower().startswith('list of'):
self.redirects[line] = self.title
if (__name__ == "__main__"):
title2Id = {}
id2Title = {}
redirects = {}
config = json.load(open('config/config.json'))
wikipath = config['wikipath']
outputpath = config['outputpath']
dictionarypath = outputpath + 'dictionaries/'
mode = 0o755
os.mkdir(outputpath, mode)
os.mkdir(dictionarypath, mode)
parser = xml.sax.make_parser()
parser.setFeature(xml.sax.handler.feature_namespaces, 0)
Handler = WikiHandler(title2Id,id2Title,redirects)
parser.setContentHandler(Handler)
parser.parse(wikipath)
print('done')
with open(dictionarypath + 'title2Id.json', 'w') as f:
json.dump(title2Id, f)
with open(dictionarypath + 'id2Title.json', 'w') as f:
json.dump(id2Title, f)
with open(dictionarypath + 'redirects.json', 'w') as f:
json.dump(redirects, f)
| [
"re.compile",
"os.mkdir",
"time.time",
"json.dump",
"re.search"
] | [((132, 187), 're.compile', 're.compile', (['"""\\\\[{2}(.*?)\\\\]{2}"""', '(re.DOTALL | re.UNICODE)'], {}), "('\\\\[{2}(.*?)\\\\]{2}', re.DOTALL | re.UNICODE)\n", (142, 187), False, 'import re\n'), ((3973, 3999), 'os.mkdir', 'os.mkdir', (['outputpath', 'mode'], {}), '(outputpath, mode)\n', (3981, 3999), False, 'import os\n'), ((4004, 4034), 'os.mkdir', 'os.mkdir', (['dictionarypath', 'mode'], {}), '(dictionarypath, mode)\n', (4012, 4034), False, 'import os\n'), ((4339, 4361), 'json.dump', 'json.dump', (['title2Id', 'f'], {}), '(title2Id, f)\n', (4348, 4361), False, 'import json\n'), ((4429, 4451), 'json.dump', 'json.dump', (['id2Title', 'f'], {}), '(id2Title, f)\n', (4438, 4451), False, 'import json\n'), ((4520, 4543), 'json.dump', 'json.dump', (['redirects', 'f'], {}), '(redirects, f)\n', (4529, 4543), False, 'import json\n'), ((2681, 2706), 're.search', 're.search', (['RE_LINKS', 'text'], {}), '(RE_LINKS, text)\n', (2690, 2706), False, 'import re\n'), ((99, 110), 'time.time', 'time.time', ([], {}), '()\n', (108, 110), False, 'import time\n')] |
import io
import avro.io
try:
from avro.schema import parse
except ImportError:
from avro.schema import Parse as parse
class ApacheAvroAdapter(object):
NAME = 'avro'
def __init__(self):
with open('specs/str.avsc', 'r') as f:
schema = parse(f.read())
self.str_reader = avro.io.DatumReader(schema)
self.str_writer = avro.io.DatumWriter(schema)
with open('specs/bin.avsc', 'r') as f:
schema = parse(f.read())
self.bin_reader = avro.io.DatumReader(schema)
self.bin_writer = avro.io.DatumWriter(schema)
with open('specs/int.avsc', 'r') as f:
schema = parse(f.read())
self.int_reader = avro.io.DatumReader(schema)
self.int_writer = avro.io.DatumWriter(schema)
with open('specs/float.avsc', 'r') as f:
schema = parse(f.read())
self.float_reader = avro.io.DatumReader(schema)
self.float_writer = avro.io.DatumWriter(schema)
with open('specs/null.avsc', 'r') as f:
schema = parse(f.read())
self.null_reader = avro.io.DatumReader(schema)
self.null_writer = avro.io.DatumWriter(schema)
with open('specs/bool.avsc', 'r') as f:
schema = parse(f.read())
self.bool_reader = avro.io.DatumReader(schema)
self.bool_writer = avro.io.DatumWriter(schema)
with open('specs/array.avsc', 'r') as f:
schema = parse(f.read())
self.array_reader = avro.io.DatumReader(schema)
self.array_writer = avro.io.DatumWriter(schema)
with open('specs/map.avsc', 'r') as f:
schema = parse(f.read())
self.map_reader = avro.io.DatumReader(schema)
self.map_writer = avro.io.DatumWriter(schema)
with open('specs/struct10.avsc', 'r') as f:
schema = parse(f.read())
self.struct10_reader = avro.io.DatumReader(schema)
self.struct10_writer = avro.io.DatumWriter(schema)
with open('specs/struct_map.avsc', 'r') as f:
schema = parse(f.read())
self.struct_map_reader = avro.io.DatumReader(schema)
self.struct_map_writer = avro.io.DatumWriter(schema)
with open('specs/simple_list.avsc', 'r') as f:
schema = parse(f.read())
self.simple_list_reader = avro.io.DatumReader(schema)
self.simple_list_writer = avro.io.DatumWriter(schema)
with open('specs/points_list.avsc', 'r') as f:
schema = parse(f.read())
self.points_list_reader = avro.io.DatumReader(schema)
self.points_list_writer = avro.io.DatumWriter(schema)
def encoder_string(self, data):
io_stream = io.BytesIO()
self.str_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_string(self, data):
io_stream = io.BytesIO(data)
return self.str_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_bytes(self, data):
io_stream = io.BytesIO()
self.bin_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_bytes(self, data):
io_stream = io.BytesIO(data)
return self.bin_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_integer(self, data):
io_stream = io.BytesIO()
self.int_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_integer(self, data):
io_stream = io.BytesIO(data)
return self.int_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_float(self, data):
io_stream = io.BytesIO()
self.float_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_float(self, data):
io_stream = io.BytesIO(data)
return self.float_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_null(self, data):
io_stream = io.BytesIO()
self.null_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_null(self, data):
io_stream = io.BytesIO(data)
return self.null_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_boolean(self, data):
io_stream = io.BytesIO()
self.bool_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_boolean(self, data):
io_stream = io.BytesIO(data)
return self.bool_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_array(self, data):
io_stream = io.BytesIO()
self.array_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_array(self, data):
io_stream = io.BytesIO(data)
return self.array_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_map(self, data):
io_stream = io.BytesIO()
self.map_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_map(self, data):
io_stream = io.BytesIO(data)
return self.map_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_struct_10(self, data):
io_stream = io.BytesIO()
self.struct10_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_struct_10(self, data):
io_stream = io.BytesIO(data)
return self.struct10_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_struct_map(self, data):
io_stream = io.BytesIO()
self.struct_map_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_struct_map(self, data):
io_stream = io.BytesIO(data)
return self.struct_map_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_simple_list(self, data):
io_stream = io.BytesIO()
self.simple_list_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_simple_list(self, data):
io_stream = io.BytesIO(data)
return self.simple_list_reader.read(
avro.io.BinaryDecoder(io_stream),
)
def encoder_points_list(self, data):
io_stream = io.BytesIO()
self.points_list_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_points_list(self, data):
io_stream = io.BytesIO(data)
return self.points_list_reader.read(
avro.io.BinaryDecoder(io_stream),
)
| [
"io.BytesIO"
] | [((2775, 2787), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (2785, 2787), False, 'import io\n'), ((2986, 3002), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (2996, 3002), False, 'import io\n'), ((3129, 3141), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (3139, 3141), False, 'import io\n'), ((3339, 3355), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (3349, 3355), False, 'import io\n'), ((3484, 3496), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (3494, 3496), False, 'import io\n'), ((3696, 3712), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (3706, 3712), False, 'import io\n'), ((3839, 3851), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (3849, 3851), False, 'import io\n'), ((4051, 4067), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (4061, 4067), False, 'import io\n'), ((4195, 4207), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (4205, 4207), False, 'import io\n'), ((4405, 4421), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (4415, 4421), False, 'import io\n'), ((4551, 4563), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (4561, 4563), False, 'import io\n'), ((4764, 4780), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (4774, 4780), False, 'import io\n'), ((4908, 4920), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (4918, 4920), False, 'import io\n'), ((5120, 5136), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (5130, 5136), False, 'import io\n'), ((5263, 5275), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (5273, 5275), False, 'import io\n'), ((5471, 5487), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (5481, 5487), False, 'import io\n'), ((5618, 5630), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (5628, 5630), False, 'import io\n'), ((5837, 5853), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (5847, 5853), False, 'import io\n'), ((5990, 6002), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (6000, 6002), False, 'import io\n'), ((6212, 6228), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (6222, 6228), False, 'import io\n'), ((6368, 6380), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (6378, 6380), False, 'import io\n'), ((6593, 6609), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (6603, 6609), False, 'import io\n'), ((6773, 6785), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (6783, 6785), False, 'import io\n'), ((6998, 7014), 'io.BytesIO', 'io.BytesIO', (['data'], {}), '(data)\n', (7008, 7014), False, 'import io\n')] |
from django import forms
from warehouse.models import Product, ProductAdditionalInformation
class ProductForm(forms.ModelForm):
class Meta:
model = Product
fields = '__all__'
field_order = [
'product_name',
'product_code',
'product_quantity',
'product_type',
'product_id',
]
product_code = forms.CharField(disabled=True)
class ProductAdditionalInformationForm(forms.ModelForm):
class Meta:
model = ProductAdditionalInformation
exclude = ['product']
fields = '__all__' | [
"django.forms.CharField"
] | [((397, 427), 'django.forms.CharField', 'forms.CharField', ([], {'disabled': '(True)'}), '(disabled=True)\n', (412, 427), False, 'from django import forms\n')] |
from datetime import datetime, timedelta
from enum import Enum, auto
from dateutil.relativedelta import relativedelta
from .baseclasses import Constant, MethodEnum
from .formulars import days_feb, eastern_calc, thanksgiving_calc, year_start
class ConstantOption(Enum):
TIME_VARIABLE = auto()
DATE_VARIABLE = auto()
YEAR_VARIABLE = auto()
class Constants:
CHRISTMAS = Constant('christmas', ['xmas'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=25))
HOLY_EVE = Constant('holy eve', options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=24))
SILVESTER = Constant('silvester', ['new years eve'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=31))
EASTERN = Constant('eastern', ['easter'], options=[ConstantOption.YEAR_VARIABLE], time_value=eastern_calc)
NICHOLAS = Constant('nicholas', ['nicholas day'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=6))
HALLOWEEN = Constant('halloween', options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=10, day=31))
APRIL_FOOLS_DAY = Constant('april fools day', ['april fool day'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=4, day=1))
THANKSGIVING = Constant('thanksgiving', options=[ConstantOption.YEAR_VARIABLE], time_value=thanksgiving_calc)
SAINT_PATRICKS_DAY = Constant('saint patrick\'s day',
['saint patricks day', 'st. patrick\'s day', 'saint pt. day', 'st patrick\'s day', 'st patricks day'],
options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=3, day=17))
VALENTINES_DAY = Constant('valentines day', ['valentine', 'valentine day'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=2, day=14))
PI_DAY = Constant("pi day", ["piday", "pi-day"], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=3, day=14))
TAU_DAY = Constant("tau day", ["tauday", "tau-day"], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=6, day=28))
SUMMER_BEGIN = Constant('summer begin', ['summer', 'begin of summer', 'begin of the summer'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=6, day=1))
WINTER_BEGIN = Constant('winter begin', ['winter', 'begin of winter', 'begin of the winter'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=1))
SPRING_BEGIN = Constant('spring begin', ['spring', 'begin of spring', 'begin of the spring'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=3, day=1))
FALL_BEGIN = Constant('fall begin',
['fall', 'begin of fall', 'begin of the fall', 'autumn begin', 'autumn', 'begin of autumn',
'begin of the autumn'],
options=[ConstantOption.YEAR_VARIABLE], time_value=lambda year_time: datetime(year=year_time, month=9, day=1))
SUMMER_END = Constant('summer end', ['end of summer', 'end of the summer'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=8, day=31, hour=23, minute=59, second=59))
WINTER_END = Constant('winter end', ['end of winter', 'end of the winter'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=2, day=days_feb(year_time), hour=23, minute=59,
second=59))
SPRING_END = Constant('spring end', ['end of spring', 'end of the spring'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=5, day=31, hour=23, minute=59, second=59))
FALL_END = Constant('fall end', ['end of fall', 'end of the fall', 'autumn end', 'end of autumn', 'end of the autumn'],
options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=11, day=30, hour=23, minute=59, second=59))
MORNING = Constant('morning', ['at morning'],
options=[ConstantOption.YEAR_VARIABLE, ConstantOption.DATE_VARIABLE])
EVENING = Constant('evening', ['at evening'],
options=[ConstantOption.YEAR_VARIABLE, ConstantOption.DATE_VARIABLE])
LUNCHTIME = Constant('lunchtime', ['lunch'], options=[ConstantOption.YEAR_VARIABLE, ConstantOption.DATE_VARIABLE])
# advent of code always starts at midnight 1st december in SET (5 hours negative UTC offset)
BEGIN_AOC = Constant('aoc begin',
['aoc', 'begin of aoc', 'begin of the aoc', 'advent of code begin', 'advent of code', 'begin of advent of code',
'begin of the advent of code'],
options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=1, hour=0),
offset=-5)
END_AOC = Constant('aoc end',
['end of aoc', 'end of the aoc', 'advent of code end', 'end of advent of code', 'end of the advent of code'],
options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=26, hour=0),
offset=-5)
END_OF_YEAR = Constant('end of year', ['the end of year', 'the end of the year', 'end of the year'],
options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=31, hour=23, minute=59, second=59))
BEGIN_OF_YEAR = Constant('begin of year', ['the begin of year', 'the begin of the year', 'begin of the year'],
options=[ConstantOption.YEAR_VARIABLE], time_value=year_start)
INFINITY = Constant('infinity', ['inf'], value=None)
TODAY = Constant('today', options=[ConstantOption.TIME_VARIABLE],
time_value=lambda _: datetime(datetime.today().year, datetime.today().month, datetime.today().day))
TOMORROW = Constant('tomorrow', options=[ConstantOption.TIME_VARIABLE],
time_value=lambda _: datetime(datetime.today().year, datetime.today().month, datetime.today().day) + relativedelta(
days=1))
YESTERDAY = Constant('yesterday', options=[ConstantOption.TIME_VARIABLE],
time_value=lambda _: datetime(datetime.today().year, datetime.today().month, datetime.today().day) - relativedelta(
days=1))
NOW = Constant('now', ['at the moment', 'current time', 'current time now'], time_value=lambda _: datetime.now())
ALL = [
CHRISTMAS, HOLY_EVE, SILVESTER, EASTERN, NICHOLAS, HALLOWEEN, APRIL_FOOLS_DAY, THANKSGIVING, SAINT_PATRICKS_DAY, VALENTINES_DAY,
PI_DAY, TAU_DAY,
SUMMER_END, WINTER_END, SPRING_END, FALL_END, SUMMER_BEGIN, WINTER_BEGIN, SPRING_BEGIN, FALL_BEGIN,
MORNING, EVENING, LUNCHTIME,
BEGIN_AOC, END_AOC,
END_OF_YEAR, BEGIN_OF_YEAR,
INFINITY,
TODAY, TOMORROW, YESTERDAY, NOW
]
ALL_RELATIVE_CONSTANTS = [TODAY, TOMORROW, YESTERDAY, NOW]
class DatetimeDeltaConstants:
# time_value is a tuple containing (hours, minutes, seconds)
MIDNIGHT = Constant('midnight', value=0, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (0, 0, 0))
NIGHT = Constant('night', value=0, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (21, 0, 0))
MORNING_NIGHT = Constant('morning night', value=0, options=[ConstantOption.DATE_VARIABLE],
time_value=lambda _: (3, 0, 0))
DAYLIGHT_CHANGE = Constant('daylight change', ['daylight saving', 'daylight saving time'], value=0,
options=[ConstantOption.YEAR_VARIABLE, ConstantOption.DATE_VARIABLE],
time_value=lambda _: (6, 0, 0))
SUNRISE = Constant('sunrise', value=0, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (7, 0, 0))
MORNING = Constant('morning', value=0, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (6, 0, 0))
BREAKFAST = Constant('breakfast', value=0, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (8, 0, 0))
MIDDAY = Constant('midday', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (12, 0, 0))
LUNCH = Constant('lunch', ['lunchtime'], value=12, options=[ConstantOption.DATE_VARIABLE],
time_value=lambda _: (12, 0, 0))
AFTERNOON = Constant('afternoon', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (15, 0, 0))
EVENING = Constant('evening', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (18, 0, 0))
DINNER = Constant('dinner', ['dinnertime'], value=12, options=[ConstantOption.DATE_VARIABLE],
time_value=lambda _: (19, 0, 0))
DAWN = Constant('dawn', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (6, 0, 0))
DUSK = Constant('dusk', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (20, 0, 0))
SUNSET = Constant('sunset', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (18, 30, 0))
ALL = [
MORNING, AFTERNOON, EVENING, NIGHT, MORNING_NIGHT, DAYLIGHT_CHANGE, MIDNIGHT, MIDDAY, DAWN, DUSK,
SUNRISE, SUNSET, LUNCH, DINNER, BREAKFAST
]
class NumberConstants:
# Presented to you by github copilot
ONE = Constant('one', value=1)
TWO = Constant('two', value=2)
THREE = Constant('three', value=3)
FOUR = Constant('four', value=4)
FIVE = Constant('five', value=5)
SIX = Constant('six', value=6)
SEVEN = Constant('seven', value=7)
EIGHT = Constant('eight', value=8)
NINE = Constant('nine', value=9)
TEN = Constant('ten', value=10)
ELEVEN = Constant('eleven', value=11)
TWELVE = Constant('twelve', value=12)
THIRTEEN = Constant('thirteen', value=13)
FOURTEEN = Constant('fourteen', value=14)
FIFTEEN = Constant('fifteen', value=15)
SIXTEEN = Constant('sixteen', value=16)
SEVENTEEN = Constant('seventeen', value=17)
EIGHTEEN = Constant('eighteen', value=18)
NINETEEN = Constant('nineteen', value=19)
TWENTY = Constant('twenty', value=20)
TWENTY_ONE = Constant('twenty one', alias=["twentyone", "twenty-one"], value=21)
TWENTY_TWO = Constant('twenty two', alias=["twentytwo", "twenty-two"], value=22)
TWENTY_THREE = Constant('twenty three', alias=["twentythree", "twenty-three"], value=23)
TWENTY_FOUR = Constant('twenty four', alias=["twentyfour", "twenty-four"], value=24)
TWENTY_FIVE = Constant('twenty five', alias=["twentyfive", "twenty-five"], value=25)
TWENTY_SIX = Constant('twenty six', alias=["twentysix", "twenty-six"], value=26)
TWENTY_SEVEN = Constant('twenty seven', alias=["twentyseven", "twenty-seven"], value=27)
TWENTY_EIGHT = Constant('twenty eight', alias=["twentyeight", "twenty-eight"], value=28)
TWENTY_NINE = Constant('twenty nine', alias=["twentynine", "twenty-nine"], value=29)
THIRTY = Constant('thirty', value=30)
THIRTY_ONE = Constant('thirty one', alias=["thirtyone", "thirty-one"], value=31)
# Reversed to avoid conflicts with other constants (one is included in twenty one)
ALL = [ONE, TWO, THREE, FOUR, FIVE, SIX, SEVEN, EIGHT, NINE, TEN,
ELEVEN, TWELVE, THIRTEEN, FOURTEEN, FIFTEEN, SIXTEEN, SEVENTEEN, EIGHTEEN, NINETEEN, TWENTY,
TWENTY_ONE, TWENTY_TWO, TWENTY_THREE, TWENTY_FOUR, TWENTY_FIVE, TWENTY_SIX, TWENTY_SEVEN, TWENTY_EIGHT, TWENTY_NINE,
THIRTY, THIRTY_ONE][::-1]
class NumberCountConstants:
# Presented to you by github copilot
FIRST = Constant('first', alias=['1st', '1.'], value=1)
SECOND = Constant('second', alias=['2nd', '2.'], value=2)
THIRD = Constant('third', alias=['3rd', '3.'], value=3)
FOURTH = Constant('fourth', alias=['4th', '4.'], value=4)
FIFTH = Constant('fifth', alias=['5th', '5.'], value=5)
SIXTH = Constant('sixth', alias=['6th', '6.'], value=6)
SEVENTH = Constant('seventh', alias=['7th', '7.'], value=7)
EIGHTH = Constant('eighth', alias=['8th', '8.'], value=8)
NINTH = Constant('ninth', alias=['9th', '9.'], value=9)
TENTH = Constant('tenth', alias=['10th', '10.'], value=10)
ELEVENTH = Constant('eleventh', alias=['11th', '11.'], value=11)
TWELFTH = Constant('twelfth', alias=['12th', '12.'], value=12)
THIRTEENTH = Constant('thirteenth', alias=['13th', '13.'], value=13)
FOURTEENTH = Constant('fourteenth', alias=['14th', '14.'], value=14)
FIFTEENTH = Constant('fifteenth', alias=['15th', '15.'], value=15)
SIXTEENTH = Constant('sixteenth', alias=['16th', '16.'], value=16)
SEVENTEENTH = Constant('seventeenth', alias=['17th', '17.'], value=17)
EIGHTEENTH = Constant('eighteenth', alias=['18th', '18.'], value=18)
NINETEENTH = Constant('nineteenth', alias=['19th', '19.'], value=19)
TWENTIETH = Constant('twentieth', alias=['20th', '20.'], value=20)
TWENTY_FIRST = Constant('twenty first', alias=['21st', '21.', 'twentyfirst', 'twenty-first'], value=21)
TWENTY_SECOND = Constant('twenty second', alias=['22nd', '22.', 'twentysecond', 'twenty-second'], value=22)
TWENTY_THIRD = Constant('twenty third', alias=['23rd', '23.', 'twentythird', 'twenty-third'], value=23)
TWENTY_FOURTH = Constant('twenty fourth', alias=['24th', '24.', 'twentyfourth', 'twenty-fourth'], value=24)
TWENTY_FIFTH = Constant('twenty fifth', alias=['25th', '25.', 'twentyfifth', 'twenty-fifth'], value=25)
TWENTY_SIXTH = Constant('twenty sixth', alias=['26th', '26.', 'twentysixth', 'twenty-sixth'], value=26)
TWENTY_SEVENTH = Constant('twenty seventh', alias=['27th', '27.', 'twentyseventh', 'twenty-seventh'], value=27)
TWENTY_EIGHTH = Constant('twenty eighth', alias=['28th', '28.', 'twentyeighth', 'twenty-eighth'], value=28)
TWENTY_NINTH = Constant('twenty ninth', alias=['29th', '29.', 'twentyninth', 'twenty-ninth'], value=29)
THIRTIETH = Constant('thirtieth', alias=['30th', '30.'], value=30)
THIRTY_FIRST = Constant('thirty first', alias=['31st', '31.', 'thirthyfirst', "thirty-first"], value=31)
# Reversed to avoid conflicts with other constants
ALL = [FIRST, SECOND, THIRD, FOURTH, FIFTH, SIXTH, SEVENTH, EIGHTH, NINTH, TENTH,
ELEVENTH, TWELFTH, THIRTEENTH, FOURTEENTH, FIFTEENTH, SIXTEENTH, SEVENTEENTH, EIGHTEENTH, NINETEENTH, TWENTIETH,
TWENTY_FIRST, TWENTY_SECOND, TWENTY_THIRD, TWENTY_FOURTH, TWENTY_FIFTH, TWENTY_SIXTH, TWENTY_SEVENTH, TWENTY_EIGHTH,
TWENTY_NINTH,
THIRTIETH, THIRTY_FIRST][::-1]
class DatetimeConstants:
SECONDS = Constant('seconds', ['second', 'sec', 'secs'])
MINUTES = Constant('minutes', ['minute', 'min', 'mins'])
QUARTERS = Constant('quarters', ['quarter', 'qtr', 'qtrs'])
HOURS = Constant('hours', ['hour'])
DAYS = Constant('days', ['day'])
WEEKS = Constant('weeks', ['week'])
MONTHS = Constant('months', ['month'])
YEARS = Constant('years', ['year'])
OLYMPIADS = Constant('olympiads', ['olympiad']) # 4 years
DECADES = Constant('decades', ['decade']) # 10 years
CENTURIES = Constant('centuries', ['century']) # 100 years
MILLENNIUMS = Constant('millenniums', ['millennium']) # 1,000 years
MEGAANNUMS = Constant('megaannuums', ['megaannuum']) # 1,000,000 years
GIGAANNUMS = Constant('gigaannuums', ['gigaannuum']) # 1,000,000,000 years
TIME = [SECONDS, MINUTES, QUARTERS, HOURS]
DATE = [DAYS, WEEKS, MONTHS, YEARS, DECADES, CENTURIES, MILLENNIUMS, MEGAANNUMS, GIGAANNUMS]
ALL = [*DATE, *TIME]
@classmethod
def convert_from_mini_date(cls, md):
if md.lower() == "s":
return cls.SECONDS
elif md.lower() == "m":
return cls.MINUTES
elif md.lower() == "h":
return cls.HOURS
elif md.lower() == "w":
return cls.WEEKS
elif md.lower() == "d":
return cls.DAYS
elif md.lower() == "y":
return cls.YEARS
class WeekdayConstants:
MONDAY = Constant('monday', time_value=lambda date: f"{date + timedelta((0 - date.weekday()) % 7)}")
TUESDAY = Constant('tuesday', time_value=lambda date: f"{date + timedelta((1 - date.weekday()) % 7)}")
WEDNESDAY = Constant('wednesday', time_value=lambda date: f"{date + timedelta((2 - date.weekday()) % 7)}")
THURSDAY = Constant('thursday', time_value=lambda date: f"{date + timedelta((3 - date.weekday()) % 7)}")
FRIDAY = Constant('friday', time_value=lambda date: f"{date + timedelta((4 - date.weekday()) % 7)}")
SATURDAY = Constant('saturday', time_value=lambda date: f"{date + timedelta((5 - date.weekday()) % 7)}")
SUNDAY = Constant('sunday', time_value=lambda date: f"{date + timedelta((6 - date.weekday()) % 7)}")
ALL = [MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY]
class MonthConstants:
JANUARY = Constant('january', ['jan'], time_value=lambda year_time: datetime(year=year_time, month=1, day=1))
FEBRUARY = Constant('february', ['feb'], time_value=lambda year_time: datetime(year=year_time, month=2, day=1))
MARCH = Constant('march', ['mar'], time_value=lambda year_time: datetime(year=year_time, month=3, day=1))
APRIL = Constant('april', ['apr'], time_value=lambda year_time: datetime(year=year_time, month=4, day=1))
MAY = Constant('may', time_value=lambda year_time: datetime(year=year_time, month=5, day=1))
JUNE = Constant('june', ['jun'], time_value=lambda year_time: datetime(year=year_time, month=6, day=1))
JULY = Constant('july', ['jul'], time_value=lambda year_time: datetime(year=year_time, month=7, day=1))
AUGUST = Constant('august', ['aug'], time_value=lambda year_time: datetime(year=year_time, month=8, day=1))
SEPTEMBER = Constant('september', ['sep'], time_value=lambda year_time: datetime(year=year_time, month=9, day=1))
OCTOBER = Constant('october', ['oct'], time_value=lambda year_time: datetime(year=year_time, month=10, day=1))
NOVEMBER = Constant('november', ['nov'], time_value=lambda year_time: datetime(year=year_time, month=11, day=1))
DECEMBER = Constant('december', ['dec'], time_value=lambda year_time: datetime(year=year_time, month=12, day=1))
ALL = [JANUARY, FEBRUARY, MARCH, APRIL, MAY, JUNE, JULY, AUGUST, SEPTEMBER, OCTOBER, NOVEMBER, DECEMBER]
class Keywords:
OF = Constant('of')
AFTER = Constant('after')
BEFORE = Constant('before')
NEXT = Constant('next')
IN = Constant('in')
FOR = Constant('for')
PAST = Constant('past')
ALL = [OF, AFTER, BEFORE, NEXT, IN, FOR, PAST]
class Method:
ABSOLUTE_PREPOSITIONS = MethodEnum('absolute_prepositions')
ABSOLUTE_DATE_FORMATS = MethodEnum('absolute_date_formats')
CONSTANTS = MethodEnum('constants')
CONSTANTS_RELATIVE_EXTENSIONS = MethodEnum('constants_relative_extensions')
DATETIME_DELTA_CONSTANTS = MethodEnum('datetime_delta_constants')
RELATIVE_DATETIMES = MethodEnum('relative_datetimes')
| [
"datetime.datetime",
"enum.auto",
"dateutil.relativedelta.relativedelta",
"datetime.datetime.now",
"datetime.datetime.today"
] | [((293, 299), 'enum.auto', 'auto', ([], {}), '()\n', (297, 299), False, 'from enum import Enum, auto\n'), ((320, 326), 'enum.auto', 'auto', ([], {}), '()\n', (324, 326), False, 'from enum import Enum, auto\n'), ((347, 353), 'enum.auto', 'auto', ([], {}), '()\n', (351, 353), False, 'from enum import Enum, auto\n'), ((515, 557), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(12)', 'day': '(25)'}), '(year=year_time, month=12, day=25)\n', (523, 557), False, 'from datetime import datetime, timedelta\n'), ((688, 730), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(12)', 'day': '(24)'}), '(year=year_time, month=12, day=24)\n', (696, 730), False, 'from datetime import datetime, timedelta\n'), ((883, 925), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(12)', 'day': '(31)'}), '(year=year_time, month=12, day=31)\n', (891, 925), False, 'from datetime import datetime, timedelta\n'), ((1185, 1226), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(12)', 'day': '(6)'}), '(year=year_time, month=12, day=6)\n', (1193, 1226), False, 'from datetime import datetime, timedelta\n'), ((1360, 1402), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(10)', 'day': '(31)'}), '(year=year_time, month=10, day=31)\n', (1368, 1402), False, 'from datetime import datetime, timedelta\n'), ((1574, 1614), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(4)', 'day': '(1)'}), '(year=year_time, month=4, day=1)\n', (1582, 1614), False, 'from datetime import datetime, timedelta\n'), ((2062, 2103), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(3)', 'day': '(17)'}), '(year=year_time, month=3, day=17)\n', (2070, 2103), False, 'from datetime import datetime, timedelta\n'), ((2284, 2325), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(2)', 'day': '(14)'}), '(year=year_time, month=2, day=14)\n', (2292, 2325), False, 'from datetime import datetime, timedelta\n'), ((2471, 2512), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(3)', 'day': '(14)'}), '(year=year_time, month=3, day=14)\n', (2479, 2512), False, 'from datetime import datetime, timedelta\n'), ((2663, 2704), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(6)', 'day': '(28)'}), '(year=year_time, month=6, day=28)\n', (2671, 2704), False, 'from datetime import datetime, timedelta\n'), ((2902, 2942), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(6)', 'day': '(1)'}), '(year=year_time, month=6, day=1)\n', (2910, 2942), False, 'from datetime import datetime, timedelta\n'), ((3139, 3180), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(12)', 'day': '(1)'}), '(year=year_time, month=12, day=1)\n', (3147, 3180), False, 'from datetime import datetime, timedelta\n'), ((3377, 3417), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(3)', 'day': '(1)'}), '(year=year_time, month=3, day=1)\n', (3385, 3417), False, 'from datetime import datetime, timedelta\n'), ((3723, 3763), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(9)', 'day': '(1)'}), '(year=year_time, month=9, day=1)\n', (3731, 3763), False, 'from datetime import datetime, timedelta\n'), ((3940, 4012), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(8)', 'day': '(31)', 'hour': '(23)', 'minute': '(59)', 'second': '(59)'}), '(year=year_time, month=8, day=31, hour=23, minute=59, second=59)\n', (3948, 4012), False, 'from datetime import datetime, timedelta\n'), ((4519, 4591), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(5)', 'day': '(31)', 'hour': '(23)', 'minute': '(59)', 'second': '(59)'}), '(year=year_time, month=5, day=31, hour=23, minute=59, second=59)\n', (4527, 4591), False, 'from datetime import datetime, timedelta\n'), ((4834, 4907), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(11)', 'day': '(30)', 'hour': '(23)', 'minute': '(59)', 'second': '(59)'}), '(year=year_time, month=11, day=30, hour=23, minute=59, second=59)\n', (4842, 4907), False, 'from datetime import datetime, timedelta\n'), ((5766, 5815), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(12)', 'day': '(1)', 'hour': '(0)'}), '(year=year_time, month=12, day=1, hour=0)\n', (5774, 5815), False, 'from datetime import datetime, timedelta\n'), ((6135, 6185), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(12)', 'day': '(26)', 'hour': '(0)'}), '(year=year_time, month=12, day=26, hour=0)\n', (6143, 6185), False, 'from datetime import datetime, timedelta\n'), ((6450, 6523), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(12)', 'day': '(31)', 'hour': '(23)', 'minute': '(59)', 'second': '(59)'}), '(year=year_time, month=12, day=31, hour=23, minute=59, second=59)\n', (6458, 6523), False, 'from datetime import datetime, timedelta\n'), ((7594, 7608), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7606, 7608), False, 'from datetime import datetime, timedelta\n'), ((18029, 18069), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(1)', 'day': '(1)'}), '(year=year_time, month=1, day=1)\n', (18037, 18069), False, 'from datetime import datetime, timedelta\n'), ((18145, 18185), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(2)', 'day': '(1)'}), '(year=year_time, month=2, day=1)\n', (18153, 18185), False, 'from datetime import datetime, timedelta\n'), ((18255, 18295), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(3)', 'day': '(1)'}), '(year=year_time, month=3, day=1)\n', (18263, 18295), False, 'from datetime import datetime, timedelta\n'), ((18365, 18405), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(4)', 'day': '(1)'}), '(year=year_time, month=4, day=1)\n', (18373, 18405), False, 'from datetime import datetime, timedelta\n'), ((18462, 18502), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(5)', 'day': '(1)'}), '(year=year_time, month=5, day=1)\n', (18470, 18502), False, 'from datetime import datetime, timedelta\n'), ((18570, 18610), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(6)', 'day': '(1)'}), '(year=year_time, month=6, day=1)\n', (18578, 18610), False, 'from datetime import datetime, timedelta\n'), ((18678, 18718), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(7)', 'day': '(1)'}), '(year=year_time, month=7, day=1)\n', (18686, 18718), False, 'from datetime import datetime, timedelta\n'), ((18790, 18830), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(8)', 'day': '(1)'}), '(year=year_time, month=8, day=1)\n', (18798, 18830), False, 'from datetime import datetime, timedelta\n'), ((18908, 18948), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(9)', 'day': '(1)'}), '(year=year_time, month=9, day=1)\n', (18916, 18948), False, 'from datetime import datetime, timedelta\n'), ((19022, 19063), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(10)', 'day': '(1)'}), '(year=year_time, month=10, day=1)\n', (19030, 19063), False, 'from datetime import datetime, timedelta\n'), ((19139, 19180), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(11)', 'day': '(1)'}), '(year=year_time, month=11, day=1)\n', (19147, 19180), False, 'from datetime import datetime, timedelta\n'), ((19256, 19297), 'datetime.datetime', 'datetime', ([], {'year': 'year_time', 'month': '(12)', 'day': '(1)'}), '(year=year_time, month=12, day=1)\n', (19264, 19297), False, 'from datetime import datetime, timedelta\n'), ((7183, 7204), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'days': '(1)'}), '(days=1)\n', (7196, 7204), False, 'from dateutil.relativedelta import relativedelta\n'), ((7439, 7460), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'days': '(1)'}), '(days=1)\n', (7452, 7460), False, 'from dateutil.relativedelta import relativedelta\n'), ((6912, 6928), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (6926, 6928), False, 'from datetime import datetime, timedelta\n'), ((6935, 6951), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (6949, 6951), False, 'from datetime import datetime, timedelta\n'), ((6959, 6975), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (6973, 6975), False, 'from datetime import datetime, timedelta\n'), ((7112, 7128), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7126, 7128), False, 'from datetime import datetime, timedelta\n'), ((7135, 7151), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7149, 7151), False, 'from datetime import datetime, timedelta\n'), ((7159, 7175), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7173, 7175), False, 'from datetime import datetime, timedelta\n'), ((7368, 7384), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7382, 7384), False, 'from datetime import datetime, timedelta\n'), ((7391, 7407), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7405, 7407), False, 'from datetime import datetime, timedelta\n'), ((7415, 7431), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7429, 7431), False, 'from datetime import datetime, timedelta\n')] |
# Copyright (C) 2021 <NAME>
#
# SPDX-License-Identifier: MIT
from typing import Callable, Tuple, Union
import dolfinx.common as _common
import dolfinx.fem as _fem
import dolfinx.log as _log
import dolfinx.mesh as _mesh
import dolfinx_cuas
import numpy as np
import ufl
from dolfinx.cpp.graph import AdjacencyList_int32
from dolfinx.cpp.mesh import MeshTags_int32
from petsc4py import PETSc as _PETSc
import dolfinx_contact
import dolfinx_contact.cpp
from dolfinx_contact.helpers import (epsilon, lame_parameters,
rigid_motions_nullspace, sigma_func)
kt = dolfinx_contact.cpp.Kernel
__all__ = ["nitsche_unbiased"]
def nitsche_unbiased(mesh: _mesh.Mesh, mesh_tags: list[MeshTags_int32],
domain_marker: MeshTags_int32,
surfaces: AdjacencyList_int32,
dirichlet: list[Tuple[int, Callable[[np.ndarray], np.ndarray]]],
neumann: list[Tuple[int, Callable[[np.ndarray], np.ndarray]]],
contact_pairs: list[Tuple[int, int]],
body_forces: list[Tuple[int, Callable[[np.ndarray], np.ndarray]]],
physical_parameters: dict[str, Union[bool, np.float64, int]],
nitsche_parameters: dict[str, np.float64],
quadrature_degree: int = 5, form_compiler_params: dict = None, jit_params: dict = None,
petsc_options: dict = None, newton_options: dict = None, initial_guess=None,
outfile: str = None, order: int = 1) -> Tuple[_fem.Function, int, int, float]:
"""
Use custom kernel to compute the contact problem with two elastic bodies coming into contact.
Parameters
==========
mesh
The input mesh
mesh_tags
A list of meshtags. The first element must contain the mesh_tags for all puppet surfaces,
Dirichlet-surfaces and Neumann-surfaces
All further elements may contain candidate_surfaces
domain_marker
marker for subdomains where a body force is applied
surfaces
Adjacency list. Links of i are meshtag values for contact surfaces in ith mesh_tag in mesh_tags
dirichlet
List of Dirichlet boundary conditions as pairs of (meshtag value, function), where function
is a function to be interpolated into the dolfinx function space
neumann
Same as dirichlet for Neumann boundary conditions
contact_pairs:
list of pairs (i, j) marking the ith surface as a puppet surface and the jth surface
as the corresponding candidate surface
physical_parameters
Optional dictionary with information about the linear elasticity problem.
Valid (key, value) tuples are: ('E': float), ('nu', float), ('strain', bool)
nitsche_parameters
Optional dictionary with information about the Nitsche configuration.
Valid (keu, value) tuples are: ('gamma', float), ('theta', float) where theta can be -1, 0 or 1 for
skew-symmetric, penalty like or symmetric enforcement of Nitsche conditions
displacement
The displacement enforced on Dirichlet boundary
quadrature_degree
The quadrature degree to use for the custom contact kernels
form_compiler_params
Parameters used in FFCX compilation of this form. Run `ffcx --help` at
the commandline to see all available options. Takes priority over all
other parameter values, except for `scalar_type` which is determined by
DOLFINX.
jit_params
Parameters used in CFFI JIT compilation of C code generated by FFCX.
See https://github.com/FEniCS/dolfinx/blob/main/python/dolfinx/jit.py
for all available parameters. Takes priority over all other parameter values.
petsc_options
Parameters that is passed to the linear algebra backend
PETSc. For available choices for the 'petsc_options' kwarg,
see the `PETSc-documentation
<https://petsc4py.readthedocs.io/en/stable/manual/ksp/>`
newton_options
Dictionary with Newton-solver options. Valid (key, item) tuples are:
("atol", float), ("rtol", float), ("convergence_criterion", "str"),
("max_it", int), ("error_on_nonconvergence", bool), ("relaxation_parameter", float)
initial_guess
A functon containing an intial guess to use for the Newton-solver
outfile
File to append solver summary
order
The order of mesh and function space
"""
form_compiler_params = {} if form_compiler_params is None else form_compiler_params
jit_params = {} if jit_params is None else jit_params
petsc_options = {} if petsc_options is None else petsc_options
newton_options = {} if newton_options is None else newton_options
strain = physical_parameters.get("strain")
if strain is None:
raise RuntimeError("Need to supply if problem is plane strain (True) or plane stress (False)")
else:
plane_strain = bool(strain)
_E = physical_parameters.get("E")
if _E is not None:
E = np.float64(_E)
else:
raise RuntimeError("Need to supply Youngs modulus")
if physical_parameters.get("nu") is None:
raise RuntimeError("Need to supply Poisson's ratio")
else:
nu = physical_parameters.get("nu")
# Compute lame parameters
mu_func, lambda_func = lame_parameters(plane_strain)
mu = mu_func(E, nu)
lmbda = lambda_func(E, nu)
sigma = sigma_func(mu, lmbda)
# Nitche parameters and variables
theta = nitsche_parameters.get("theta")
if theta is None:
raise RuntimeError("Need to supply theta for Nitsche imposition of boundary conditions")
_gamma = nitsche_parameters.get("gamma")
if _gamma is None:
raise RuntimeError("Need to supply Coercivity/Stabilization parameter for Nitsche condition")
else:
gamma: np.float64 = _gamma * E
lifting = nitsche_parameters.get("lift_bc", False)
# Functions space and FEM functions
V = _fem.VectorFunctionSpace(mesh, ("CG", order))
u = _fem.Function(V)
v = ufl.TestFunction(V)
du = ufl.TrialFunction(V)
h = ufl.CellDiameter(mesh)
n = ufl.FacetNormal(mesh)
# Integration measure and ufl part of linear/bilinear form
# metadata = {"quadrature_degree": quadrature_degree}
dx = ufl.Measure("dx", domain=mesh, subdomain_data=domain_marker)
ds = ufl.Measure("ds", domain=mesh, # metadata=metadata,
subdomain_data=mesh_tags[0])
J = ufl.inner(sigma(du), epsilon(v)) * dx
F = ufl.inner(sigma(u), epsilon(v)) * dx
for contact_pair in contact_pairs:
surface_value = int(surfaces.links(0)[contact_pair[0]])
J += - 0.5 * theta * h / gamma * ufl.inner(sigma(du) * n, sigma(v) * n) * \
ds(surface_value)
F += - 0.5 * theta * h / gamma * ufl.inner(sigma(u) * n, sigma(v) * n) * \
ds(surface_value)
# Dirichle boundary conditions
bcs = []
if lifting:
tdim = mesh.topology.dim
for bc in dirichlet:
facets = mesh_tags[0].find(bc[0])
cells = _mesh.compute_incident_entities(mesh, facets, tdim - 1, tdim)
u_bc = _fem.Function(V)
u_bc.interpolate(bc[1], cells)
u_bc.x.scatter_forward()
bcs.append(_fem.dirichletbc(u_bc, _fem.locate_dofs_topological(V, tdim - 1, facets)))
else:
for bc in dirichlet:
f = _fem.Function(V)
f.interpolate(bc[1])
F += - ufl.inner(sigma(u) * n, v) * ds(bc[0])\
- theta * ufl.inner(sigma(v) * n, u - f) * \
ds(bc[0]) + gamma / h * ufl.inner(u - f, v) * ds(bc[0])
J += - ufl.inner(sigma(du) * n, v) * ds(bc[0])\
- theta * ufl.inner(sigma(v) * n, du) * \
ds(bc[0]) + gamma / h * ufl.inner(du, v) * ds(bc[0])
# Neumann boundary conditions
for bc in neumann:
g = _fem.Function(V)
g.interpolate(bc[1])
F -= ufl.inner(g, v) * ds(bc[0])
# body forces
for bf in body_forces:
f = _fem.Function(V)
f.interpolate(bf[1])
F -= ufl.inner(f, v) * dx(bf[0])
# Custom assembly
# create contact class
with _common.Timer("~Contact: Init"):
contact = dolfinx_contact.cpp.Contact(mesh_tags, surfaces, contact_pairs,
V._cpp_object, quadrature_degree=quadrature_degree)
with _common.Timer("~Contact: Distance maps"):
for i in range(len(contact_pairs)):
contact.create_distance_map(i)
# pack constants
consts = np.array([gamma, theta])
# Pack material parameters mu and lambda on each contact surface
with _common.Timer("~Contact: Interpolate coeffs (mu, lmbda)"):
V2 = _fem.FunctionSpace(mesh, ("DG", 0))
lmbda2 = _fem.Function(V2)
lmbda2.interpolate(lambda x: np.full((1, x.shape[1]), lmbda))
mu2 = _fem.Function(V2)
mu2.interpolate(lambda x: np.full((1, x.shape[1]), mu))
entities = []
with _common.Timer("~Contact: Compute active entities"):
for pair in contact_pairs:
entities.append(contact.active_entities(pair[0]))
material = []
with _common.Timer("~Contact: Pack coeffs (mu, lmbda"):
for i in range(len(contact_pairs)):
material.append(dolfinx_cuas.pack_coefficients([mu2, lmbda2], entities[i]))
# Pack celldiameter on each surface
h_packed = []
with _common.Timer("~Contact: Compute and pack celldiameter"):
surface_cells = np.unique(np.hstack([entities[i][:, 0] for i in range(len(contact_pairs))]))
h_int = _fem.Function(V2)
expr = _fem.Expression(h, V2.element.interpolation_points)
h_int.interpolate(expr, surface_cells)
for i in range(len(contact_pairs)):
h_packed.append(dolfinx_cuas.pack_coefficients([h_int], entities[i]))
# Pack gap, normals and test functions on each surface
gaps = []
normals = []
test_fns = []
with _common.Timer("~Contact: Pack gap, normals, testfunction"):
for i in range(len(contact_pairs)):
gaps.append(contact.pack_gap(i))
normals.append(contact.pack_ny(i, gaps[i]))
test_fns.append(contact.pack_test_functions(i, gaps[i]))
# Concatenate all coeffs
coeffs_const = []
for i in range(len(contact_pairs)):
coeffs_const.append(np.hstack([material[i], h_packed[i], gaps[i], normals[i], test_fns[i]]))
# Generate Jacobian data structures
J_custom = _fem.form(J, form_compiler_params=form_compiler_params, jit_params=jit_params)
with _common.Timer("~Contact: Generate Jacobian kernel"):
kernel_jac = contact.generate_kernel(kt.Jac)
with _common.Timer("~Contact: Create matrix"):
J = contact.create_matrix(J_custom)
# Generate residual data structures
F_custom = _fem.form(F, form_compiler_params=form_compiler_params, jit_params=jit_params)
with _common.Timer("~Contact: Generate residual kernel"):
kernel_rhs = contact.generate_kernel(kt.Rhs)
with _common.Timer("~Contact: Create vector"):
b = _fem.petsc.create_vector(F_custom)
@_common.timed("~Contact: Update coefficients")
def compute_coefficients(x, coeffs):
u.vector[:] = x.array
u_candidate = []
with _common.Timer("~~Contact: Pack u contact"):
for i in range(len(contact_pairs)):
u_candidate.append(contact.pack_u_contact(i, u._cpp_object, gaps[i]))
u_puppet = []
with _common.Timer("~~Contact: Pack u"):
for i in range(len(contact_pairs)):
u_puppet.append(dolfinx_cuas.pack_coefficients([u], entities[i]))
for i in range(len(contact_pairs)):
c_0 = np.hstack([coeffs_const[i], u_puppet[i], u_candidate[i]])
coeffs[i][:, :] = c_0[:, :]
@_common.timed("~Contact: Assemble residual")
def compute_residual(x, b, coeffs):
b.zeroEntries()
with _common.Timer("~~Contact: Contact contributions (in assemble vector)"):
for i in range(len(contact_pairs)):
contact.assemble_vector(b, i, kernel_rhs, coeffs[i], consts)
with _common.Timer("~~Contact: Standard contributions (in assemble vector)"):
_fem.petsc.assemble_vector(b, F_custom)
# Apply boundary condition
if lifting:
_fem.petsc.apply_lifting(b, [J_custom], bcs=[bcs], x0=[x], scale=-1.0)
b.ghostUpdate(addv=_PETSc.InsertMode.ADD, mode=_PETSc.ScatterMode.REVERSE)
_fem.petsc.set_bc(b, bcs, x, -1.0)
@_common.timed("~Contact: Assemble matrix")
def compute_jacobian_matrix(x, A, coeffs):
A.zeroEntries()
with _common.Timer("~~Contact: Contact contributions (in assemble matrix)"):
for i in range(len(contact_pairs)):
contact.assemble_matrix(A, [], i, kernel_jac, coeffs[i], consts)
with _common.Timer("~~Contact: Standard contributions (in assemble matrix)"):
_fem.petsc.assemble_matrix(A, J_custom, bcs=bcs)
A.assemble()
# coefficient arrays
num_coeffs = contact.coefficients_size()
coeffs = np.array([np.zeros((len(entities[i]), num_coeffs)) for i in range(len(contact_pairs))])
newton_solver = dolfinx_contact.NewtonSolver(mesh.comm, J, b, coeffs)
# Set matrix-vector computations
newton_solver.set_residual(compute_residual)
newton_solver.set_jacobian(compute_jacobian_matrix)
newton_solver.set_coefficients(compute_coefficients)
# Set rigid motion nullspace
null_space = rigid_motions_nullspace(V)
newton_solver.A.setNearNullSpace(null_space)
# Set Newton solver options
newton_solver.set_newton_options(newton_options)
# Set initial guess
if initial_guess is None:
u.x.array[:] = 0
else:
u.x.array[:] = initial_guess.x.array[:]
# Set Krylov solver options
newton_solver.set_krylov_options(petsc_options)
dofs_global = V.dofmap.index_map_bs * V.dofmap.index_map.size_global
_log.set_log_level(_log.LogLevel.OFF)
# Solve non-linear problem
timing_str = f"~Contact: {id(dofs_global)} Solve Nitsche"
with _common.Timer(timing_str):
n, converged = newton_solver.solve(u)
if outfile is not None:
viewer = _PETSc.Viewer().createASCII(outfile, "a")
newton_solver.krylov_solver.view(viewer)
newton_time = _common.timing(timing_str)
if not converged:
raise RuntimeError("Newton solver did not converge")
u.x.scatter_forward()
print(f"{dofs_global}\n Number of Newton iterations: {n:d}\n",
f"Number of Krylov iterations {newton_solver.krylov_iterations}\n", flush=True)
return u, n, newton_solver.krylov_iterations, newton_time[1]
| [
"dolfinx.common.timing",
"ufl.Measure",
"numpy.hstack",
"ufl.FacetNormal",
"numpy.array",
"dolfinx.fem.petsc.set_bc",
"dolfinx_contact.NewtonSolver",
"dolfinx.fem.petsc.apply_lifting",
"dolfinx.fem.petsc.assemble_matrix",
"dolfinx_contact.cpp.Contact",
"dolfinx.fem.Expression",
"dolfinx.fem.lo... | [((5390, 5419), 'dolfinx_contact.helpers.lame_parameters', 'lame_parameters', (['plane_strain'], {}), '(plane_strain)\n', (5405, 5419), False, 'from dolfinx_contact.helpers import epsilon, lame_parameters, rigid_motions_nullspace, sigma_func\n'), ((5487, 5508), 'dolfinx_contact.helpers.sigma_func', 'sigma_func', (['mu', 'lmbda'], {}), '(mu, lmbda)\n', (5497, 5508), False, 'from dolfinx_contact.helpers import epsilon, lame_parameters, rigid_motions_nullspace, sigma_func\n'), ((6033, 6078), 'dolfinx.fem.VectorFunctionSpace', '_fem.VectorFunctionSpace', (['mesh', "('CG', order)"], {}), "(mesh, ('CG', order))\n", (6057, 6078), True, 'import dolfinx.fem as _fem\n'), ((6087, 6103), 'dolfinx.fem.Function', '_fem.Function', (['V'], {}), '(V)\n', (6100, 6103), True, 'import dolfinx.fem as _fem\n'), ((6112, 6131), 'ufl.TestFunction', 'ufl.TestFunction', (['V'], {}), '(V)\n', (6128, 6131), False, 'import ufl\n'), ((6141, 6161), 'ufl.TrialFunction', 'ufl.TrialFunction', (['V'], {}), '(V)\n', (6158, 6161), False, 'import ufl\n'), ((6171, 6193), 'ufl.CellDiameter', 'ufl.CellDiameter', (['mesh'], {}), '(mesh)\n', (6187, 6193), False, 'import ufl\n'), ((6202, 6223), 'ufl.FacetNormal', 'ufl.FacetNormal', (['mesh'], {}), '(mesh)\n', (6217, 6223), False, 'import ufl\n'), ((6355, 6415), 'ufl.Measure', 'ufl.Measure', (['"""dx"""'], {'domain': 'mesh', 'subdomain_data': 'domain_marker'}), "('dx', domain=mesh, subdomain_data=domain_marker)\n", (6366, 6415), False, 'import ufl\n'), ((6425, 6484), 'ufl.Measure', 'ufl.Measure', (['"""ds"""'], {'domain': 'mesh', 'subdomain_data': 'mesh_tags[0]'}), "('ds', domain=mesh, subdomain_data=mesh_tags[0])\n", (6436, 6484), False, 'import ufl\n'), ((8650, 8674), 'numpy.array', 'np.array', (['[gamma, theta]'], {}), '([gamma, theta])\n', (8658, 8674), True, 'import numpy as np\n'), ((10593, 10671), 'dolfinx.fem.form', '_fem.form', (['J'], {'form_compiler_params': 'form_compiler_params', 'jit_params': 'jit_params'}), '(J, form_compiler_params=form_compiler_params, jit_params=jit_params)\n', (10602, 10671), True, 'import dolfinx.fem as _fem\n'), ((10938, 11016), 'dolfinx.fem.form', '_fem.form', (['F'], {'form_compiler_params': 'form_compiler_params', 'jit_params': 'jit_params'}), '(F, form_compiler_params=form_compiler_params, jit_params=jit_params)\n', (10947, 11016), True, 'import dolfinx.fem as _fem\n'), ((11236, 11282), 'dolfinx.common.timed', '_common.timed', (['"""~Contact: Update coefficients"""'], {}), "('~Contact: Update coefficients')\n", (11249, 11282), True, 'import dolfinx.common as _common\n'), ((11937, 11981), 'dolfinx.common.timed', '_common.timed', (['"""~Contact: Assemble residual"""'], {}), "('~Contact: Assemble residual')\n", (11950, 11981), True, 'import dolfinx.common as _common\n'), ((12673, 12715), 'dolfinx.common.timed', '_common.timed', (['"""~Contact: Assemble matrix"""'], {}), "('~Contact: Assemble matrix')\n", (12686, 12715), True, 'import dolfinx.common as _common\n'), ((13361, 13414), 'dolfinx_contact.NewtonSolver', 'dolfinx_contact.NewtonSolver', (['mesh.comm', 'J', 'b', 'coeffs'], {}), '(mesh.comm, J, b, coeffs)\n', (13389, 13414), False, 'import dolfinx_contact\n'), ((13666, 13692), 'dolfinx_contact.helpers.rigid_motions_nullspace', 'rigid_motions_nullspace', (['V'], {}), '(V)\n', (13689, 13692), False, 'from dolfinx_contact.helpers import epsilon, lame_parameters, rigid_motions_nullspace, sigma_func\n'), ((14129, 14166), 'dolfinx.log.set_log_level', '_log.set_log_level', (['_log.LogLevel.OFF'], {}), '(_log.LogLevel.OFF)\n', (14147, 14166), True, 'import dolfinx.log as _log\n'), ((14497, 14523), 'dolfinx.common.timing', '_common.timing', (['timing_str'], {}), '(timing_str)\n', (14511, 14523), True, 'import dolfinx.common as _common\n'), ((5086, 5100), 'numpy.float64', 'np.float64', (['_E'], {}), '(_E)\n', (5096, 5100), True, 'import numpy as np\n'), ((7974, 7990), 'dolfinx.fem.Function', '_fem.Function', (['V'], {}), '(V)\n', (7987, 7990), True, 'import dolfinx.fem as _fem\n'), ((8119, 8135), 'dolfinx.fem.Function', '_fem.Function', (['V'], {}), '(V)\n', (8132, 8135), True, 'import dolfinx.fem as _fem\n'), ((8265, 8296), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Init"""'], {}), "('~Contact: Init')\n", (8278, 8296), True, 'import dolfinx.common as _common\n'), ((8316, 8436), 'dolfinx_contact.cpp.Contact', 'dolfinx_contact.cpp.Contact', (['mesh_tags', 'surfaces', 'contact_pairs', 'V._cpp_object'], {'quadrature_degree': 'quadrature_degree'}), '(mesh_tags, surfaces, contact_pairs, V.\n _cpp_object, quadrature_degree=quadrature_degree)\n', (8343, 8436), False, 'import dolfinx_contact\n'), ((8487, 8527), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Distance maps"""'], {}), "('~Contact: Distance maps')\n", (8500, 8527), True, 'import dolfinx.common as _common\n'), ((8754, 8811), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Interpolate coeffs (mu, lmbda)"""'], {}), "('~Contact: Interpolate coeffs (mu, lmbda)')\n", (8767, 8811), True, 'import dolfinx.common as _common\n'), ((8826, 8861), 'dolfinx.fem.FunctionSpace', '_fem.FunctionSpace', (['mesh', "('DG', 0)"], {}), "(mesh, ('DG', 0))\n", (8844, 8861), True, 'import dolfinx.fem as _fem\n'), ((8879, 8896), 'dolfinx.fem.Function', '_fem.Function', (['V2'], {}), '(V2)\n', (8892, 8896), True, 'import dolfinx.fem as _fem\n'), ((8981, 8998), 'dolfinx.fem.Function', '_fem.Function', (['V2'], {}), '(V2)\n', (8994, 8998), True, 'import dolfinx.fem as _fem\n'), ((9091, 9141), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Compute active entities"""'], {}), "('~Contact: Compute active entities')\n", (9104, 9141), True, 'import dolfinx.common as _common\n'), ((9268, 9317), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Pack coeffs (mu, lmbda"""'], {}), "('~Contact: Pack coeffs (mu, lmbda')\n", (9281, 9317), True, 'import dolfinx.common as _common\n'), ((9519, 9575), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Compute and pack celldiameter"""'], {}), "('~Contact: Compute and pack celldiameter')\n", (9532, 9575), True, 'import dolfinx.common as _common\n'), ((9694, 9711), 'dolfinx.fem.Function', '_fem.Function', (['V2'], {}), '(V2)\n', (9707, 9711), True, 'import dolfinx.fem as _fem\n'), ((9727, 9778), 'dolfinx.fem.Expression', '_fem.Expression', (['h', 'V2.element.interpolation_points'], {}), '(h, V2.element.interpolation_points)\n', (9742, 9778), True, 'import dolfinx.fem as _fem\n'), ((10070, 10128), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Pack gap, normals, testfunction"""'], {}), "('~Contact: Pack gap, normals, testfunction')\n", (10083, 10128), True, 'import dolfinx.common as _common\n'), ((10681, 10732), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Generate Jacobian kernel"""'], {}), "('~Contact: Generate Jacobian kernel')\n", (10694, 10732), True, 'import dolfinx.common as _common\n'), ((10796, 10836), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Create matrix"""'], {}), "('~Contact: Create matrix')\n", (10809, 10836), True, 'import dolfinx.common as _common\n'), ((11026, 11077), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Generate residual kernel"""'], {}), "('~Contact: Generate residual kernel')\n", (11039, 11077), True, 'import dolfinx.common as _common\n'), ((11141, 11181), 'dolfinx.common.Timer', '_common.Timer', (['"""~Contact: Create vector"""'], {}), "('~Contact: Create vector')\n", (11154, 11181), True, 'import dolfinx.common as _common\n'), ((11195, 11229), 'dolfinx.fem.petsc.create_vector', '_fem.petsc.create_vector', (['F_custom'], {}), '(F_custom)\n', (11219, 11229), True, 'import dolfinx.fem as _fem\n'), ((14269, 14294), 'dolfinx.common.Timer', '_common.Timer', (['timing_str'], {}), '(timing_str)\n', (14282, 14294), True, 'import dolfinx.common as _common\n'), ((6558, 6568), 'dolfinx_contact.helpers.epsilon', 'epsilon', (['v'], {}), '(v)\n', (6565, 6568), False, 'from dolfinx_contact.helpers import epsilon, lame_parameters, rigid_motions_nullspace, sigma_func\n'), ((6603, 6613), 'dolfinx_contact.helpers.epsilon', 'epsilon', (['v'], {}), '(v)\n', (6610, 6613), False, 'from dolfinx_contact.helpers import epsilon, lame_parameters, rigid_motions_nullspace, sigma_func\n'), ((7144, 7205), 'dolfinx.mesh.compute_incident_entities', '_mesh.compute_incident_entities', (['mesh', 'facets', '(tdim - 1)', 'tdim'], {}), '(mesh, facets, tdim - 1, tdim)\n', (7175, 7205), True, 'import dolfinx.mesh as _mesh\n'), ((7225, 7241), 'dolfinx.fem.Function', '_fem.Function', (['V'], {}), '(V)\n', (7238, 7241), True, 'import dolfinx.fem as _fem\n'), ((7475, 7491), 'dolfinx.fem.Function', '_fem.Function', (['V'], {}), '(V)\n', (7488, 7491), True, 'import dolfinx.fem as _fem\n'), ((8033, 8048), 'ufl.inner', 'ufl.inner', (['g', 'v'], {}), '(g, v)\n', (8042, 8048), False, 'import ufl\n'), ((8178, 8193), 'ufl.inner', 'ufl.inner', (['f', 'v'], {}), '(f, v)\n', (8187, 8193), False, 'import ufl\n'), ((10464, 10535), 'numpy.hstack', 'np.hstack', (['[material[i], h_packed[i], gaps[i], normals[i], test_fns[i]]'], {}), '([material[i], h_packed[i], gaps[i], normals[i], test_fns[i]])\n', (10473, 10535), True, 'import numpy as np\n'), ((11392, 11434), 'dolfinx.common.Timer', '_common.Timer', (['"""~~Contact: Pack u contact"""'], {}), "('~~Contact: Pack u contact')\n", (11405, 11434), True, 'import dolfinx.common as _common\n'), ((11605, 11639), 'dolfinx.common.Timer', '_common.Timer', (['"""~~Contact: Pack u"""'], {}), "('~~Contact: Pack u')\n", (11618, 11639), True, 'import dolfinx.common as _common\n'), ((11833, 11890), 'numpy.hstack', 'np.hstack', (['[coeffs_const[i], u_puppet[i], u_candidate[i]]'], {}), '([coeffs_const[i], u_puppet[i], u_candidate[i]])\n', (11842, 11890), True, 'import numpy as np\n'), ((12059, 12129), 'dolfinx.common.Timer', '_common.Timer', (['"""~~Contact: Contact contributions (in assemble vector)"""'], {}), "('~~Contact: Contact contributions (in assemble vector)')\n", (12072, 12129), True, 'import dolfinx.common as _common\n'), ((12269, 12340), 'dolfinx.common.Timer', '_common.Timer', (['"""~~Contact: Standard contributions (in assemble vector)"""'], {}), "('~~Contact: Standard contributions (in assemble vector)')\n", (12282, 12340), True, 'import dolfinx.common as _common\n'), ((12354, 12393), 'dolfinx.fem.petsc.assemble_vector', '_fem.petsc.assemble_vector', (['b', 'F_custom'], {}), '(b, F_custom)\n', (12380, 12393), True, 'import dolfinx.fem as _fem\n'), ((12462, 12532), 'dolfinx.fem.petsc.apply_lifting', '_fem.petsc.apply_lifting', (['b', '[J_custom]'], {'bcs': '[bcs]', 'x0': '[x]', 'scale': '(-1.0)'}), '(b, [J_custom], bcs=[bcs], x0=[x], scale=-1.0)\n', (12486, 12532), True, 'import dolfinx.fem as _fem\n'), ((12632, 12666), 'dolfinx.fem.petsc.set_bc', '_fem.petsc.set_bc', (['b', 'bcs', 'x', '(-1.0)'], {}), '(b, bcs, x, -1.0)\n', (12649, 12666), True, 'import dolfinx.fem as _fem\n'), ((12800, 12870), 'dolfinx.common.Timer', '_common.Timer', (['"""~~Contact: Contact contributions (in assemble matrix)"""'], {}), "('~~Contact: Contact contributions (in assemble matrix)')\n", (12813, 12870), True, 'import dolfinx.common as _common\n'), ((13014, 13085), 'dolfinx.common.Timer', '_common.Timer', (['"""~~Contact: Standard contributions (in assemble matrix)"""'], {}), "('~~Contact: Standard contributions (in assemble matrix)')\n", (13027, 13085), True, 'import dolfinx.common as _common\n'), ((13099, 13147), 'dolfinx.fem.petsc.assemble_matrix', '_fem.petsc.assemble_matrix', (['A', 'J_custom'], {'bcs': 'bcs'}), '(A, J_custom, bcs=bcs)\n', (13125, 13147), True, 'import dolfinx.fem as _fem\n'), ((8934, 8965), 'numpy.full', 'np.full', (['(1, x.shape[1])', 'lmbda'], {}), '((1, x.shape[1]), lmbda)\n', (8941, 8965), True, 'import numpy as np\n'), ((9033, 9061), 'numpy.full', 'np.full', (['(1, x.shape[1])', 'mu'], {}), '((1, x.shape[1]), mu)\n', (9040, 9061), True, 'import numpy as np\n'), ((9391, 9449), 'dolfinx_cuas.pack_coefficients', 'dolfinx_cuas.pack_coefficients', (['[mu2, lmbda2]', 'entities[i]'], {}), '([mu2, lmbda2], entities[i])\n', (9421, 9449), False, 'import dolfinx_cuas\n'), ((9898, 9950), 'dolfinx_cuas.pack_coefficients', 'dolfinx_cuas.pack_coefficients', (['[h_int]', 'entities[i]'], {}), '([h_int], entities[i])\n', (9928, 9950), False, 'import dolfinx_cuas\n'), ((14388, 14403), 'petsc4py.PETSc.Viewer', '_PETSc.Viewer', ([], {}), '()\n', (14401, 14403), True, 'from petsc4py import PETSc as _PETSc\n'), ((7368, 7417), 'dolfinx.fem.locate_dofs_topological', '_fem.locate_dofs_topological', (['V', '(tdim - 1)', 'facets'], {}), '(V, tdim - 1, facets)\n', (7396, 7417), True, 'import dolfinx.fem as _fem\n'), ((11721, 11769), 'dolfinx_cuas.pack_coefficients', 'dolfinx_cuas.pack_coefficients', (['[u]', 'entities[i]'], {}), '([u], entities[i])\n', (11751, 11769), False, 'import dolfinx_cuas\n'), ((7685, 7704), 'ufl.inner', 'ufl.inner', (['(u - f)', 'v'], {}), '(u - f, v)\n', (7694, 7704), False, 'import ufl\n'), ((7875, 7891), 'ufl.inner', 'ufl.inner', (['du', 'v'], {}), '(du, v)\n', (7884, 7891), False, 'import ufl\n')] |
"""Change unique constraint on collection
Revision ID: 8e3f80979dc9
Revises: 3d5fae27a215
Create Date: 2019-12-18 13:14:56.466907
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '3d5fae27a215'
branch_labels = None
depends_on = None
def upgrade():
"""
SELECT source_id, data_version, sample, COUNT(*) FROM collection
WHERE transform_type IS NULL or transform_type = ''
GROUP BY source_id, data_version, sample
HAVING COUNT(*) > 1;
"""
# 0 rows
op.drop_constraint('unique_collection_identifiers', 'collection')
op.create_index('unique_collection_identifiers', 'collection', ['source_id', 'data_version', 'sample'],
unique=True, postgresql_where=sa.text("transform_type = ''"))
op.execute("UPDATE collection SET transform_type = '' WHERE transform_type IS NULL")
op.alter_column('collection', 'transform_type', nullable=False)
def downgrade():
op.drop_index('unique_collection_identifiers', 'collection')
op.create_unique_constraint('unique_collection_identifiers', 'collection', [
'source_id', 'data_version', 'sample', 'transform_from_collection_id', 'transform_type',
])
op.alter_column('collection', 'transform_type', nullable=True)
op.execute("UPDATE collection SET transform_type = NULL WHERE transform_type = ''")
| [
"sqlalchemy.text",
"alembic.op.drop_constraint",
"alembic.op.alter_column",
"alembic.op.execute",
"alembic.op.drop_index",
"alembic.op.create_unique_constraint"
] | [((560, 625), 'alembic.op.drop_constraint', 'op.drop_constraint', (['"""unique_collection_identifiers"""', '"""collection"""'], {}), "('unique_collection_identifiers', 'collection')\n", (578, 625), False, 'from alembic import op\n'), ((821, 910), 'alembic.op.execute', 'op.execute', (['"""UPDATE collection SET transform_type = \'\' WHERE transform_type IS NULL"""'], {}), '(\n "UPDATE collection SET transform_type = \'\' WHERE transform_type IS NULL")\n', (831, 910), False, 'from alembic import op\n'), ((910, 973), 'alembic.op.alter_column', 'op.alter_column', (['"""collection"""', '"""transform_type"""'], {'nullable': '(False)'}), "('collection', 'transform_type', nullable=False)\n", (925, 973), False, 'from alembic import op\n'), ((997, 1057), 'alembic.op.drop_index', 'op.drop_index', (['"""unique_collection_identifiers"""', '"""collection"""'], {}), "('unique_collection_identifiers', 'collection')\n", (1010, 1057), False, 'from alembic import op\n'), ((1062, 1235), 'alembic.op.create_unique_constraint', 'op.create_unique_constraint', (['"""unique_collection_identifiers"""', '"""collection"""', "['source_id', 'data_version', 'sample', 'transform_from_collection_id',\n 'transform_type']"], {}), "('unique_collection_identifiers', 'collection',\n ['source_id', 'data_version', 'sample', 'transform_from_collection_id',\n 'transform_type'])\n", (1089, 1235), False, 'from alembic import op\n'), ((1248, 1310), 'alembic.op.alter_column', 'op.alter_column', (['"""collection"""', '"""transform_type"""'], {'nullable': '(True)'}), "('collection', 'transform_type', nullable=True)\n", (1263, 1310), False, 'from alembic import op\n'), ((1315, 1403), 'alembic.op.execute', 'op.execute', (['"""UPDATE collection SET transform_type = NULL WHERE transform_type = \'\'"""'], {}), '(\n "UPDATE collection SET transform_type = NULL WHERE transform_type = \'\'")\n', (1325, 1403), False, 'from alembic import op\n'), ((784, 814), 'sqlalchemy.text', 'sa.text', (['"""transform_type = \'\'"""'], {}), '("transform_type = \'\'")\n', (791, 814), True, 'import sqlalchemy as sa\n')] |
'''Barst Measurement Computing DAQ Wrapper
==========================================
'''
from functools import partial
from pybarst.mcdaq import MCDAQChannel
from kivy.properties import NumericProperty, ObjectProperty
from moa.threads import ScheduledEventLoop
from moa.device.digital import ButtonViewPort
from cplcom.moa.device import DeviceExceptionBehavior
__all__ = ('MCDAQDevice', )
class MCDAQDevice(DeviceExceptionBehavior, ButtonViewPort, ScheduledEventLoop):
'''A :class:`moa.device.digital.ButtonViewPort` wrapper around a
:class:`pybarst.mcdaq.MCDAQChannel` instance which controls a Switch
and Sense 8/8.
For this class, :class:`moa.device.digital.ButtonViewPort.dev_map` must be
provided upon creation and it's a dict whose keys are the property names
and whose values are the Switch and Sense 8/8 channel numbers that the
property controls.
E.g. for a light switch connected to channel 3 on the Switch and Sense
8/8 output port define the class::
class MyMCDAQDevice(MCDAQDevice):
light = BooleanProperty(False)
And then create the instance with::
dev = MyMCDAQDevice(dev_map={'light': 3})
And then we can set it high by calling e.g.::
dev.set_state(high=['light'])
For an input devices it can defined similarly and the state of the property
reflects the value of the port. A switch and sense which has both a input
and output device still needs to create two device for each.
'''
__settings_attrs__ = ('SAS_chan', )
_read_event = None
def _write_callback(self, value, mask, result):
self.timestamp = result
for idx, name in self.chan_dev_map.iteritems():
if mask & (1 << idx):
setattr(self, name, bool(value & (1 << idx)))
self.dispatch('on_data_update', self)
def _read_callback(self, result, **kwargs):
t, val = result
self.timestamp = t
for idx, name in self.chan_dev_map.iteritems():
setattr(self, name, bool(val & (1 << idx)))
self.dispatch('on_data_update', self)
def set_state(self, high=[], low=[], **kwargs):
if self.activation != 'active':
raise TypeError('Can only set state of an active device. Device '
'is currently "{}"'.format(self.activation))
if 'o' not in self.direction:
raise TypeError('Cannot write state for a input device')
dev_map = self.dev_map
mask = 0
val = 0
for name in high:
idx = dev_map[name]
val |= (1 << idx)
mask |= (1 << idx)
for name in low:
mask |= (1 << dev_map[name])
self.request_callback(
self.chan.write, callback=partial(self._write_callback, val, mask),
mask=mask, value=val)
def get_state(self):
if self.activation != 'active':
raise TypeError('Can only read state of an active device. Device '
'is currently "{}"'.format(self.activation))
if 'i' in self.direction: # happens anyway
return
self._read_event = self.request_callback(
self.chan.read, callback=self._read_callback)
def activate(self, *largs, **kwargs):
kwargs['state'] = 'activating'
if not super(MCDAQDevice, self).activate(*largs, **kwargs):
return False
self.start_thread()
self.chan = MCDAQChannel(chan=self.SAS_chan, server=self.server.server)
def finish_activate(*largs):
self.activation = 'active'
if 'i' in self.direction:
self._read_event = self.request_callback(
self.chan.read, repeat=True, callback=self._read_callback)
self.request_callback(self._start_channel, finish_activate)
return True
def _start_channel(self):
chan = self.chan
chan.open_channel()
if 'o' in self.direction:
chan.write(mask=0xFF, value=0)
def deactivate(self, *largs, **kwargs):
kwargs['state'] = 'deactivating'
if not super(MCDAQDevice, self).deactivate(*largs, **kwargs):
return False
self.remove_request(self.chan.read, self._read_event)
self._read_event = None
def finish_deactivate(*largs):
self.activation = 'inactive'
self.stop_thread()
self.request_callback(self._stop_channel, finish_deactivate)
return True
def _stop_channel(self):
if 'o' in self.direction:
self.chan.write(mask=0xFF, value=0)
if 'i' in self.direction and self.chan.continuous:
self.chan.cancel_read(flush=True)
chan = ObjectProperty(None)
'''The internal :class:`pybarst.mcdaq.MCDAQChannel` instance.
It is read only and is automatically created.
'''
server = ObjectProperty(None, allownone=True)
'''The internal barst :class:`pybarst.core.server.BarstServer`. It
must be provided to the instance.
'''
SAS_chan = NumericProperty(0)
'''The channel number of the Switch & Sense 8/8 as configured in InstaCal.
Defaults to zero.
'''
| [
"functools.partial",
"kivy.properties.NumericProperty",
"kivy.properties.ObjectProperty",
"pybarst.mcdaq.MCDAQChannel"
] | [((4762, 4782), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {}), '(None)\n', (4776, 4782), False, 'from kivy.properties import NumericProperty, ObjectProperty\n'), ((4921, 4957), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {'allownone': '(True)'}), '(None, allownone=True)\n', (4935, 4957), False, 'from kivy.properties import NumericProperty, ObjectProperty\n'), ((5091, 5109), 'kivy.properties.NumericProperty', 'NumericProperty', (['(0)'], {}), '(0)\n', (5106, 5109), False, 'from kivy.properties import NumericProperty, ObjectProperty\n'), ((3495, 3554), 'pybarst.mcdaq.MCDAQChannel', 'MCDAQChannel', ([], {'chan': 'self.SAS_chan', 'server': 'self.server.server'}), '(chan=self.SAS_chan, server=self.server.server)\n', (3507, 3554), False, 'from pybarst.mcdaq import MCDAQChannel\n'), ((2799, 2839), 'functools.partial', 'partial', (['self._write_callback', 'val', 'mask'], {}), '(self._write_callback, val, mask)\n', (2806, 2839), False, 'from functools import partial\n')] |
from multiprocessing import Pool, Manager
from time import sleep
from wmi import WMI
from win32com.client import GetObject
from subprocess import Popen
from collections import Iterable
from tqdm import tqdm
from os import getpid
from sapy_script.Session import Session
session_process = None
all_processes_id = []
def _on_init(sid, p_ids):
p_ids.append(getpid())
global session_process
app = SAP.app()
i = 0
while True:
con = app.Children(i)
if con.Children(0).Info.systemsessionid == sid:
session = con.Children(p_ids.index(getpid()))
session_process = Session(session)
break
i = i + 1
def _task_executor(task):
task['func'](task['data'])
class SAP:
def __init__(self, max_sessions=16):
self._con = None
self._tasks = []
self.max_sessions = max_sessions
self.session = lambda i=0: Session(self._con.Children(i))
@staticmethod
def app():
"""Open SAPGui"""
wmi_obj = WMI()
sap_exists = len(wmi_obj.Win32_Process(name='saplgpad.exe')) > 0
if not sap_exists:
Popen(['C:\Program Files (x86)\SAP\FrontEnd\SAPgui\saplgpad.exe'])
while True:
try:
#temp = GetObject("SAPGUI").GetScriptingEngine
#temp.Change("teste 456", "", "", "", "", ".\LocalSystem", "")
#objService.Change(,, , , , , ".\LocalSystem", "")
return GetObject("SAPGUI").GetScriptingEngine
except:
sleep(1)
pass
def connect(self, environment, client=None, user=None, password=None, lang=None, force=False):
con = SAP.app().OpenConnection(environment, True)
session = Session(con.Children(0))
if client is not None:
session.findById("wnd[0]/usr/txtRSYST-MANDT").Text = client
if user is not None:
session.findById("wnd[0]/usr/txtRSYST-BNAME").Text = user
if password is not None:
session.findById("wnd[0]/usr/pwdRSYST-BCODE").Text = password
if lang is not None:
session.findById("wnd[0]/usr/txtRSYST-LANGU").Text = lang
session.findById("wnd[0]").sendVKey(0)
# Eventual tela de mudanca de senha
change_pwd = False
try:
session.findById("wnd[1]/usr/pwdRSYST-NCODE").text = ''
session.findById("wnd[1]/usr/pwdRSYST-NCOD2").text = ''
change_pwd = True
except:
pass
if change_pwd:
raise ValueError('Please, set a new Password')
# Derruba conexão SAP
if force:
try:
session.findById("wnd[1]/usr/radMULTI_LOGON_OPT1").select()
session.findById("wnd[1]/tbar[0]/btn[0]").press()
except:
pass
else:
try:
session.findById("wnd[1]/usr/radMULTI_LOGON_OPT1").select()
session.findById("wnd[1]").sendVKey(12)
return False
except:
pass
# Teste da Conexao
if session.is_connected():
self._con = con
return True
self._con = None
return False
@property
def connected(self):
return self.session().is_connected()
@staticmethod
def session():
global session_process
return session_process
def sid(self):
return self.session().Info.systemsessionid
def logout(self):
session = self.session()
session.findById("wnd[0]/tbar[0]/okcd").text = "/nex"
session.findById("wnd[0]").sendVKey(0)
del session
self._con = None
@property
def number_of_sessions(self):
return 0 if self._con is None else len(self._con.Children)
@number_of_sessions.setter
def number_of_sessions(self, value):
size = self.number_of_sessions
if size == 0:
return
value = min(max(int(value), 1), self.max_sessions)
minus = value < size
arr = list(range(size, value))
arr.extend(reversed(range(value, size)))
for i in arr:
if minus:
session = self.session(i)
session.findById("wnd[0]/tbar[0]/okcd").text = "/i"
session.findById("wnd[0]").sendVKey(0)
else:
self.session().createSession()
sleep(0.5)
def clear_tasks(self):
self._tasks = []
def add_task(self, func, data):
for dt in data:
self._tasks.append({'func': func, 'data': dt})
def execute_tasks(self, resize_sessions=False):
total = len(self._tasks)
if total == 0:
return
if resize_sessions:
self.number_of_sessions = total
size = self.number_of_sessions
if size == 0:
return
sess_manager = Manager().list([])
pool = Pool(processes=self.number_of_sessions, initializer=_on_init, initargs=(self.sid(), sess_manager))
response = list(tqdm(pool.imap_unordered(_task_executor, self._tasks)))
pool.close()
pool.join()
return list(response)
def execute_function(self, func, data, resize_sessions=False):
if not isinstance(data, Iterable):
data = [data]
self.clear_tasks()
self.add_task(func=func, data=data)
response = self.execute_tasks(resize_sessions=resize_sessions)
self.clear_tasks()
return response
@staticmethod
def multi_arguments(func):
def convert_args(pr):
return func(**pr)
return convert_args
| [
"subprocess.Popen",
"wmi.WMI",
"time.sleep",
"sapy_script.Session.Session",
"os.getpid",
"multiprocessing.Manager",
"win32com.client.GetObject"
] | [((361, 369), 'os.getpid', 'getpid', ([], {}), '()\n', (367, 369), False, 'from os import getpid\n'), ((1020, 1025), 'wmi.WMI', 'WMI', ([], {}), '()\n', (1023, 1025), False, 'from wmi import WMI\n'), ((618, 634), 'sapy_script.Session.Session', 'Session', (['session'], {}), '(session)\n', (625, 634), False, 'from sapy_script.Session import Session\n'), ((1141, 1212), 'subprocess.Popen', 'Popen', (["['C:\\\\Program Files (x86)\\\\SAP\\\\FrontEnd\\\\SAPgui\\\\saplgpad.exe']"], {}), "(['C:\\\\Program Files (x86)\\\\SAP\\\\FrontEnd\\\\SAPgui\\\\saplgpad.exe'])\n", (1146, 1212), False, 'from subprocess import Popen\n'), ((4458, 4468), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (4463, 4468), False, 'from time import sleep\n'), ((4948, 4957), 'multiprocessing.Manager', 'Manager', ([], {}), '()\n', (4955, 4957), False, 'from multiprocessing import Pool, Manager\n'), ((577, 585), 'os.getpid', 'getpid', ([], {}), '()\n', (583, 585), False, 'from os import getpid\n'), ((1478, 1497), 'win32com.client.GetObject', 'GetObject', (['"""SAPGUI"""'], {}), "('SAPGUI')\n", (1487, 1497), False, 'from win32com.client import GetObject\n'), ((1553, 1561), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (1558, 1561), False, 'from time import sleep\n')] |
import json
import os
import os.path as path
import re
from flask import Blueprint, request
blueprint = Blueprint("editor", __name__)
robotsrc_path = path.join(os.getcwd(), "robotsrc")
if not path.exists(robotsrc_path):
os.mkdir(robotsrc_path)
main_path = path.join(robotsrc_path, 'main.py')
main_file = open(main_path, 'w')
main_file.write('# DO NOT DELETE\n')
main_file.close()
blocks_path = path.join(robotsrc_path, 'blocks.json')
@blueprint.route('/')
def get_files():
project_paths = [f for f in os.listdir(robotsrc_path)
if path.isfile(path.join(robotsrc_path, f))
and (f.endswith('.py') or f.endswith(".xml") or f == "blocks.json")
and f != 'main.py']
def read_project(project_path):
with open(path.join(robotsrc_path, project_path), 'r') as project_file:
content = project_file.read()
return {
'filename': project_path,
'content': content
}
blocks = {}
if path.exists(blocks_path):
with open(blocks_path, 'r') as blocks_file:#
try:
blocks = json.load(blocks_file)
except ValueError:
pass
if "requires" not in blocks:
blocks["requires"] = []
if "header" not in blocks:
blocks["header"] = ""
if "footer" not in blocks:
blocks["footer"] = ""
if "blocks" not in blocks:
blocks["blocks"] = []
return json.dumps({
'main': main_path,
'blocks': blocks,
'projects': list(map(read_project, project_paths))
})
@blueprint.route("/save/<string:filename>", methods=["POST"])
def save_file(filename):
dots = len(re.findall("\.", filename))
if dots == 1:
with open(path.join(robotsrc_path, filename), 'w') as f:
f.write(request.data.decode('utf-8'))
return ""
@blueprint.route("/delete/<string:filename>", methods=["DELETE"])
def delete_file(filename):
if filename == "blocks.json":
return ""
dots = len(re.findall("\.", filename))
if dots == 1:
os.unlink(path.join(robotsrc_path, filename))
return ""
| [
"os.path.exists",
"os.listdir",
"flask.request.data.decode",
"os.path.join",
"os.getcwd",
"os.mkdir",
"json.load",
"re.findall",
"flask.Blueprint"
] | [((106, 135), 'flask.Blueprint', 'Blueprint', (['"""editor"""', '__name__'], {}), "('editor', __name__)\n", (115, 135), False, 'from flask import Blueprint, request\n'), ((263, 298), 'os.path.join', 'path.join', (['robotsrc_path', '"""main.py"""'], {}), "(robotsrc_path, 'main.py')\n", (272, 298), True, 'import os.path as path\n'), ((401, 440), 'os.path.join', 'path.join', (['robotsrc_path', '"""blocks.json"""'], {}), "(robotsrc_path, 'blocks.json')\n", (410, 440), True, 'import os.path as path\n'), ((163, 174), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (172, 174), False, 'import os\n'), ((195, 221), 'os.path.exists', 'path.exists', (['robotsrc_path'], {}), '(robotsrc_path)\n', (206, 221), True, 'import os.path as path\n'), ((227, 250), 'os.mkdir', 'os.mkdir', (['robotsrc_path'], {}), '(robotsrc_path)\n', (235, 250), False, 'import os\n'), ((1015, 1039), 'os.path.exists', 'path.exists', (['blocks_path'], {}), '(blocks_path)\n', (1026, 1039), True, 'import os.path as path\n'), ((1707, 1734), 're.findall', 're.findall', (['"""\\\\."""', 'filename'], {}), "('\\\\.', filename)\n", (1717, 1734), False, 'import re\n'), ((2044, 2071), 're.findall', 're.findall', (['"""\\\\."""', 'filename'], {}), "('\\\\.', filename)\n", (2054, 2071), False, 'import re\n'), ((514, 539), 'os.listdir', 'os.listdir', (['robotsrc_path'], {}), '(robotsrc_path)\n', (524, 539), False, 'import os\n'), ((2108, 2142), 'os.path.join', 'path.join', (['robotsrc_path', 'filename'], {}), '(robotsrc_path, filename)\n', (2117, 2142), True, 'import os.path as path\n'), ((790, 828), 'os.path.join', 'path.join', (['robotsrc_path', 'project_path'], {}), '(robotsrc_path, project_path)\n', (799, 828), True, 'import os.path as path\n'), ((1136, 1158), 'json.load', 'json.load', (['blocks_file'], {}), '(blocks_file)\n', (1145, 1158), False, 'import json\n'), ((1771, 1805), 'os.path.join', 'path.join', (['robotsrc_path', 'filename'], {}), '(robotsrc_path, filename)\n', (1780, 1805), True, 'import os.path as path\n'), ((1838, 1866), 'flask.request.data.decode', 'request.data.decode', (['"""utf-8"""'], {}), "('utf-8')\n", (1857, 1866), False, 'from flask import Blueprint, request\n'), ((576, 603), 'os.path.join', 'path.join', (['robotsrc_path', 'f'], {}), '(robotsrc_path, f)\n', (585, 603), True, 'import os.path as path\n')] |
"""
Tests for dit.divergences.jensen_shannon_divergence.
"""
from nose.tools import assert_almost_equal, assert_raises
from dit import Distribution
from dit.exceptions import ditException
from dit.divergences.jensen_shannon_divergence import (
jensen_shannon_divergence as JSD,
jensen_shannon_divergence_pmf as JSD_pmf
)
def test_jsd0():
""" Test the JSD of a distribution but with weights misspecified."""
d1 = Distribution("AB", [0.5, 0.5])
assert_raises(ditException, JSD, d1, d1)
def test_jsd1():
""" Test the JSD of a distribution with itself """
d1 = Distribution("AB", [0.5, 0.5])
jsd = JSD([d1, d1])
assert_almost_equal(jsd, 0)
def test_jsd2():
""" Test the JSD with half-overlapping distributions """
d1 = Distribution("AB", [0.5, 0.5])
d2 = Distribution("BC", [0.5, 0.5])
jsd = JSD([d1, d2])
assert_almost_equal(jsd, 0.5)
def test_jsd3():
""" Test the JSD with disjoint distributions """
d1 = Distribution("AB", [0.5, 0.5])
d2 = Distribution("CD", [0.5, 0.5])
jsd = JSD([d1, d2])
assert_almost_equal(jsd, 1.0)
def test_jsd4():
""" Test the JSD with half-overlapping distributions with weights """
d1 = Distribution("AB", [0.5, 0.5])
d2 = Distribution("BC", [0.5, 0.5])
jsd = JSD([d1, d2], [0.25, 0.75])
assert_almost_equal(jsd, 0.40563906222956625)
def test_jsd5():
""" Test that JSD fails when more weights than dists are given """
d1 = Distribution("AB", [0.5, 0.5])
d2 = Distribution("BC", [0.5, 0.5])
assert_raises(ditException, JSD, [d1, d2], [0.1, 0.6, 0.3])
def test_jsd_pmf1():
""" Test the JSD of a distribution with itself """
d1 = [0.5, 0.5]
jsd = JSD_pmf([d1, d1])
assert_almost_equal(jsd, 0)
def test_jsd_pmf2():
""" Test the JSD with half-overlapping distributions """
d1 = [0.5, 0.5, 0.0]
d2 = [0.0, 0.5, 0.5]
jsd = JSD_pmf([d1, d2])
assert_almost_equal(jsd, 0.5)
def test_jsd_pmf3():
""" Test the JSD with disjoint distributions """
d1 = [0.5, 0.5, 0.0, 0.0]
d2 = [0.0, 0.0, 0.5, 0.5]
jsd = JSD_pmf([d1, d2])
assert_almost_equal(jsd, 1.0)
def test_jsd_pmf4():
""" Test the JSD with half-overlapping distributions with weights """
d1 = [0.5, 0.5, 0.0]
d2 = [0.0, 0.5, 0.5]
jsd = JSD_pmf([d1, d2], [0.25, 0.75])
assert_almost_equal(jsd, 0.40563906222956625)
def test_jsd_pmf5():
""" Test that JSD fails when more weights than dists are given """
d1 = [0.5, 0.5, 0.0]
d2 = [0.0, 0.5, 0.5]
assert_raises(ditException, JSD_pmf, [d1, d2], [0.1, 0.6, 0.2, 0.1])
| [
"dit.divergences.jensen_shannon_divergence.jensen_shannon_divergence_pmf",
"nose.tools.assert_almost_equal",
"dit.divergences.jensen_shannon_divergence.jensen_shannon_divergence",
"nose.tools.assert_raises",
"dit.Distribution"
] | [((431, 461), 'dit.Distribution', 'Distribution', (['"""AB"""', '[0.5, 0.5]'], {}), "('AB', [0.5, 0.5])\n", (443, 461), False, 'from dit import Distribution\n'), ((466, 506), 'nose.tools.assert_raises', 'assert_raises', (['ditException', 'JSD', 'd1', 'd1'], {}), '(ditException, JSD, d1, d1)\n', (479, 506), False, 'from nose.tools import assert_almost_equal, assert_raises\n'), ((589, 619), 'dit.Distribution', 'Distribution', (['"""AB"""', '[0.5, 0.5]'], {}), "('AB', [0.5, 0.5])\n", (601, 619), False, 'from dit import Distribution\n'), ((630, 643), 'dit.divergences.jensen_shannon_divergence.jensen_shannon_divergence', 'JSD', (['[d1, d1]'], {}), '([d1, d1])\n', (633, 643), True, 'from dit.divergences.jensen_shannon_divergence import jensen_shannon_divergence as JSD, jensen_shannon_divergence_pmf as JSD_pmf\n'), ((648, 675), 'nose.tools.assert_almost_equal', 'assert_almost_equal', (['jsd', '(0)'], {}), '(jsd, 0)\n', (667, 675), False, 'from nose.tools import assert_almost_equal, assert_raises\n'), ((764, 794), 'dit.Distribution', 'Distribution', (['"""AB"""', '[0.5, 0.5]'], {}), "('AB', [0.5, 0.5])\n", (776, 794), False, 'from dit import Distribution\n'), ((804, 834), 'dit.Distribution', 'Distribution', (['"""BC"""', '[0.5, 0.5]'], {}), "('BC', [0.5, 0.5])\n", (816, 834), False, 'from dit import Distribution\n'), ((845, 858), 'dit.divergences.jensen_shannon_divergence.jensen_shannon_divergence', 'JSD', (['[d1, d2]'], {}), '([d1, d2])\n', (848, 858), True, 'from dit.divergences.jensen_shannon_divergence import jensen_shannon_divergence as JSD, jensen_shannon_divergence_pmf as JSD_pmf\n'), ((863, 892), 'nose.tools.assert_almost_equal', 'assert_almost_equal', (['jsd', '(0.5)'], {}), '(jsd, 0.5)\n', (882, 892), False, 'from nose.tools import assert_almost_equal, assert_raises\n'), ((973, 1003), 'dit.Distribution', 'Distribution', (['"""AB"""', '[0.5, 0.5]'], {}), "('AB', [0.5, 0.5])\n", (985, 1003), False, 'from dit import Distribution\n'), ((1013, 1043), 'dit.Distribution', 'Distribution', (['"""CD"""', '[0.5, 0.5]'], {}), "('CD', [0.5, 0.5])\n", (1025, 1043), False, 'from dit import Distribution\n'), ((1054, 1067), 'dit.divergences.jensen_shannon_divergence.jensen_shannon_divergence', 'JSD', (['[d1, d2]'], {}), '([d1, d2])\n', (1057, 1067), True, 'from dit.divergences.jensen_shannon_divergence import jensen_shannon_divergence as JSD, jensen_shannon_divergence_pmf as JSD_pmf\n'), ((1072, 1101), 'nose.tools.assert_almost_equal', 'assert_almost_equal', (['jsd', '(1.0)'], {}), '(jsd, 1.0)\n', (1091, 1101), False, 'from nose.tools import assert_almost_equal, assert_raises\n'), ((1203, 1233), 'dit.Distribution', 'Distribution', (['"""AB"""', '[0.5, 0.5]'], {}), "('AB', [0.5, 0.5])\n", (1215, 1233), False, 'from dit import Distribution\n'), ((1243, 1273), 'dit.Distribution', 'Distribution', (['"""BC"""', '[0.5, 0.5]'], {}), "('BC', [0.5, 0.5])\n", (1255, 1273), False, 'from dit import Distribution\n'), ((1284, 1311), 'dit.divergences.jensen_shannon_divergence.jensen_shannon_divergence', 'JSD', (['[d1, d2]', '[0.25, 0.75]'], {}), '([d1, d2], [0.25, 0.75])\n', (1287, 1311), True, 'from dit.divergences.jensen_shannon_divergence import jensen_shannon_divergence as JSD, jensen_shannon_divergence_pmf as JSD_pmf\n'), ((1316, 1361), 'nose.tools.assert_almost_equal', 'assert_almost_equal', (['jsd', '(0.40563906222956625)'], {}), '(jsd, 0.40563906222956625)\n', (1335, 1361), False, 'from nose.tools import assert_almost_equal, assert_raises\n'), ((1460, 1490), 'dit.Distribution', 'Distribution', (['"""AB"""', '[0.5, 0.5]'], {}), "('AB', [0.5, 0.5])\n", (1472, 1490), False, 'from dit import Distribution\n'), ((1500, 1530), 'dit.Distribution', 'Distribution', (['"""BC"""', '[0.5, 0.5]'], {}), "('BC', [0.5, 0.5])\n", (1512, 1530), False, 'from dit import Distribution\n'), ((1535, 1594), 'nose.tools.assert_raises', 'assert_raises', (['ditException', 'JSD', '[d1, d2]', '[0.1, 0.6, 0.3]'], {}), '(ditException, JSD, [d1, d2], [0.1, 0.6, 0.3])\n', (1548, 1594), False, 'from nose.tools import assert_almost_equal, assert_raises\n'), ((1702, 1719), 'dit.divergences.jensen_shannon_divergence.jensen_shannon_divergence_pmf', 'JSD_pmf', (['[d1, d1]'], {}), '([d1, d1])\n', (1709, 1719), True, 'from dit.divergences.jensen_shannon_divergence import jensen_shannon_divergence as JSD, jensen_shannon_divergence_pmf as JSD_pmf\n'), ((1724, 1751), 'nose.tools.assert_almost_equal', 'assert_almost_equal', (['jsd', '(0)'], {}), '(jsd, 0)\n', (1743, 1751), False, 'from nose.tools import assert_almost_equal, assert_raises\n'), ((1895, 1912), 'dit.divergences.jensen_shannon_divergence.jensen_shannon_divergence_pmf', 'JSD_pmf', (['[d1, d2]'], {}), '([d1, d2])\n', (1902, 1912), True, 'from dit.divergences.jensen_shannon_divergence import jensen_shannon_divergence as JSD, jensen_shannon_divergence_pmf as JSD_pmf\n'), ((1917, 1946), 'nose.tools.assert_almost_equal', 'assert_almost_equal', (['jsd', '(0.5)'], {}), '(jsd, 0.5)\n', (1936, 1946), False, 'from nose.tools import assert_almost_equal, assert_raises\n'), ((2092, 2109), 'dit.divergences.jensen_shannon_divergence.jensen_shannon_divergence_pmf', 'JSD_pmf', (['[d1, d2]'], {}), '([d1, d2])\n', (2099, 2109), True, 'from dit.divergences.jensen_shannon_divergence import jensen_shannon_divergence as JSD, jensen_shannon_divergence_pmf as JSD_pmf\n'), ((2114, 2143), 'nose.tools.assert_almost_equal', 'assert_almost_equal', (['jsd', '(1.0)'], {}), '(jsd, 1.0)\n', (2133, 2143), False, 'from nose.tools import assert_almost_equal, assert_raises\n'), ((2300, 2331), 'dit.divergences.jensen_shannon_divergence.jensen_shannon_divergence_pmf', 'JSD_pmf', (['[d1, d2]', '[0.25, 0.75]'], {}), '([d1, d2], [0.25, 0.75])\n', (2307, 2331), True, 'from dit.divergences.jensen_shannon_divergence import jensen_shannon_divergence as JSD, jensen_shannon_divergence_pmf as JSD_pmf\n'), ((2336, 2381), 'nose.tools.assert_almost_equal', 'assert_almost_equal', (['jsd', '(0.40563906222956625)'], {}), '(jsd, 0.40563906222956625)\n', (2355, 2381), False, 'from nose.tools import assert_almost_equal, assert_raises\n'), ((2529, 2597), 'nose.tools.assert_raises', 'assert_raises', (['ditException', 'JSD_pmf', '[d1, d2]', '[0.1, 0.6, 0.2, 0.1]'], {}), '(ditException, JSD_pmf, [d1, d2], [0.1, 0.6, 0.2, 0.1])\n', (2542, 2597), False, 'from nose.tools import assert_almost_equal, assert_raises\n')] |
'''
Descripttion:
Version: 1.0
Author: ZhangHongYu
Date: 2022-02-05 18:23:00
LastEditors: ZhangHongYu
LastEditTime: 2022-05-17 16:26:12
'''
import os
import sys
import json
import argparse
from transformers import AlbertTokenizer
from pytorch_pretrained_bert import BertTokenizer, BertForMaskedLM
file_path = sys.argv[1]
#bert_model = BertForMaskedLM.from_pretrained('/data/jianghao/ralbert-cloth/model/albert-xxlarge-v2/pytorch_model.bin')
PAD, MASK, CLS, SEP = '[PAD]', '[MASK]', '[CLS]', '[SEP]'
bert_tokenizer = AlbertTokenizer.from_pretrained('albert-xxlarge-v2')
max=-1
cnt=0
tot=0
for file in os.listdir(file_path):
if file.endswith(".json"):
with open(os.path.join(file_path,file),'r') as f:
dict = json.load(f)
sentences=dict['article'].split('.')
str=""
for sentence in sentences:
sentence=sentence.replace('_','[MASK]')
tokens = bert_tokenizer.tokenize(sentence)
if len(tokens) == 0:
continue
if tokens[0] != CLS:
tokens = [CLS] + tokens
if tokens[-1] != SEP:
tokens.append(SEP)
str = ''.join(tokens)
# print(str)
# print('完了')
tot=tot+1
if len(str)>max:
max=len(str)
if len(str)>512:
cnt=cnt+1
#os.system("rm "+os.path.join(file_path,file))
print(cnt/tot)
| [
"json.load",
"os.listdir",
"os.path.join",
"transformers.AlbertTokenizer.from_pretrained"
] | [((517, 569), 'transformers.AlbertTokenizer.from_pretrained', 'AlbertTokenizer.from_pretrained', (['"""albert-xxlarge-v2"""'], {}), "('albert-xxlarge-v2')\n", (548, 569), False, 'from transformers import AlbertTokenizer\n'), ((601, 622), 'os.listdir', 'os.listdir', (['file_path'], {}), '(file_path)\n', (611, 622), False, 'import os\n'), ((732, 744), 'json.load', 'json.load', (['f'], {}), '(f)\n', (741, 744), False, 'import json\n'), ((673, 702), 'os.path.join', 'os.path.join', (['file_path', 'file'], {}), '(file_path, file)\n', (685, 702), False, 'import os\n')] |
import re
from mimetypes import guess_type
from django.conf import settings
from datagrowth.processors import ExtractProcessor
from datagrowth.utils import reach
from core.constants import HIGHER_EDUCATION_LEVELS, RESTRICTED_MATERIAL_SETS
class SharekitMetadataExtraction(ExtractProcessor):
youtube_regex = re.compile(r".*(youtube\.com|youtu\.be).*", re.IGNORECASE)
@classmethod
def get_record_state(cls, node):
return node.get("meta", {}).get("status", "active")
#############################
# GENERIC
#############################
@classmethod
def get_files(cls, node):
files = node["attributes"].get("files", []) or []
links = node["attributes"].get("links", []) or []
output = [
{
"mime_type": file["resourceMimeType"],
"url": file["url"],
"title": file["fileName"]
}
for file in files if file["resourceMimeType"] and file["url"]
]
output += [
{
"mime_type": "text/html",
"url": link["url"],
"title": link.get("urlName", None) or f"URL {ix+1}"
}
for ix, link in enumerate(links)
]
return output
@classmethod
def get_url(cls, node):
files = cls.get_files(node)
if not files:
return
return files[0]["url"].strip()
@classmethod
def get_mime_type(cls, node):
files = cls.get_files(node)
if not files:
return
return files[0]["mime_type"]
@classmethod
def get_technical_type(cls, node):
technical_type = node["attributes"].get("technicalFormat", None)
if technical_type:
return technical_type
files = cls.get_files(node)
if not files:
return
technical_type = settings.MIME_TYPE_TO_TECHNICAL_TYPE.get(files[0]["mime_type"], None)
if technical_type:
return technical_type
file_url = files[0]["url"]
if not file_url:
return
mime_type, encoding = guess_type(file_url)
return settings.MIME_TYPE_TO_TECHNICAL_TYPE.get(mime_type, "unknown")
@classmethod
def get_material_types(cls, node):
material_types = node["attributes"].get("typesLearningMaterial", [])
if not material_types:
return []
elif isinstance(material_types, list):
return [material_type for material_type in material_types if material_type]
else:
return [material_types]
@classmethod
def get_copyright(cls, node):
return node["attributes"].get("termsOfUse", None)
@classmethod
def get_from_youtube(cls, node):
url = cls.get_url(node)
if not url:
return False
return cls.youtube_regex.match(url) is not None
@classmethod
def get_authors(cls, node):
authors = node["attributes"].get("authors", []) or []
return [
{
"name": author["person"]["name"],
"email": author["person"]["email"]
}
for author in authors
]
@classmethod
def get_publishers(cls, node):
publishers = node["attributes"].get("publishers", []) or []
if isinstance(publishers, str):
publishers = [publishers]
keywords = node["attributes"].get("keywords", []) or []
# Check HBOVPK tags
hbovpk_keywords = [keyword for keyword in keywords if keyword and "hbovpk" in keyword.lower()]
if hbovpk_keywords:
publishers.append("HBO Verpleegkunde")
return publishers
@classmethod
def get_lom_educational_levels(cls, node):
educational_levels = node["attributes"].get("educationalLevels", [])
if not educational_levels:
return []
return list(set([
educational_level["value"] for educational_level in educational_levels
if educational_level["value"]
]))
@classmethod
def get_lowest_educational_level(cls, node):
educational_levels = cls.get_lom_educational_levels(node)
current_numeric_level = 3 if len(educational_levels) else -1
for education_level in educational_levels:
for higher_education_level, numeric_level in HIGHER_EDUCATION_LEVELS.items():
if not education_level.startswith(higher_education_level):
continue
# One of the records education levels matches a higher education level.
# We re-assign current level and stop processing this education level,
# as it shouldn't match multiple higher education levels
current_numeric_level = min(current_numeric_level, numeric_level)
break
else:
# No higher education level found inside current education level.
# Dealing with an "other" means a lower education level than we're interested in.
# So this record has the lowest possible level. We're done processing this seed.
current_numeric_level = 0
break
return current_numeric_level
@classmethod
def get_ideas(cls, node):
compound_ideas = [vocabulary["value"] for vocabulary in node["attributes"].get("vocabularies", [])]
if not compound_ideas:
return []
ideas = []
for compound_idea in compound_ideas:
ideas += compound_idea.split(" - ")
return list(set(ideas))
@classmethod
def get_is_restricted(cls, data):
link = data["links"]["self"]
for restricted_set in RESTRICTED_MATERIAL_SETS:
if restricted_set in link:
return True
return False
@classmethod
def get_analysis_allowed(cls, node):
# We disallow analysis for non-derivative materials as we'll create derivatives in that process
# NB: any material that is_restricted will also have analysis_allowed set to False
copyright = SharekitMetadataExtraction.get_copyright(node)
return (copyright is not None and "nd" not in copyright) and copyright != "yes"
@classmethod
def get_is_part_of(cls, node):
return reach("$.attributes.partOf", node)
@classmethod
def get_research_themes(cls, node):
theme_value = node["attributes"].get("themesResearchObject", [])
if not theme_value:
return []
return theme_value if isinstance(theme_value, list) else [theme_value]
@classmethod
def get_empty_list(cls, node):
return []
@classmethod
def get_none(cls, node):
return None
@classmethod
def get_learning_material_themes(cls, node):
theme_value = node["attributes"].get("themesLearningMaterial", [])
if not theme_value:
return []
return theme_value if isinstance(theme_value, list) else [theme_value]
SHAREKIT_EXTRACTION_OBJECTIVE = {
"url": SharekitMetadataExtraction.get_url,
"files": SharekitMetadataExtraction.get_files,
"title": "$.attributes.title",
"language": "$.attributes.language",
"keywords": "$.attributes.keywords",
"description": "$.attributes.abstract",
"mime_type": SharekitMetadataExtraction.get_mime_type,
"technical_type": SharekitMetadataExtraction.get_technical_type,
"material_types": SharekitMetadataExtraction.get_material_types,
"copyright": SharekitMetadataExtraction.get_copyright,
"copyright_description": SharekitMetadataExtraction.get_none,
"aggregation_level": "$.attributes.aggregationlevel",
"authors": SharekitMetadataExtraction.get_authors,
"publishers": SharekitMetadataExtraction.get_publishers,
"publisher_date": "$.attributes.publishedAt",
"lom_educational_levels": SharekitMetadataExtraction.get_lom_educational_levels,
"lowest_educational_level": SharekitMetadataExtraction.get_lowest_educational_level,
"disciplines": SharekitMetadataExtraction.get_empty_list,
"ideas": SharekitMetadataExtraction.get_ideas,
"from_youtube": SharekitMetadataExtraction.get_from_youtube,
"#is_restricted": SharekitMetadataExtraction.get_is_restricted,
"analysis_allowed": SharekitMetadataExtraction.get_analysis_allowed,
"is_part_of": SharekitMetadataExtraction.get_is_part_of,
"has_parts": "$.attributes.hasParts",
"doi": "$.attributes.doi",
"research_object_type": "$.attributes.typeResearchObject",
"research_themes": SharekitMetadataExtraction.get_research_themes,
"parties": SharekitMetadataExtraction.get_empty_list,
"learning_material_themes": SharekitMetadataExtraction.get_learning_material_themes,
"consortium": "$.attributes.consortium"
}
| [
"core.constants.HIGHER_EDUCATION_LEVELS.items",
"datagrowth.utils.reach",
"re.compile",
"mimetypes.guess_type",
"django.conf.settings.MIME_TYPE_TO_TECHNICAL_TYPE.get"
] | [((317, 376), 're.compile', 're.compile', (['""".*(youtube\\\\.com|youtu\\\\.be).*"""', 're.IGNORECASE'], {}), "('.*(youtube\\\\.com|youtu\\\\.be).*', re.IGNORECASE)\n", (327, 376), False, 'import re\n'), ((1894, 1963), 'django.conf.settings.MIME_TYPE_TO_TECHNICAL_TYPE.get', 'settings.MIME_TYPE_TO_TECHNICAL_TYPE.get', (["files[0]['mime_type']", 'None'], {}), "(files[0]['mime_type'], None)\n", (1934, 1963), False, 'from django.conf import settings\n'), ((2134, 2154), 'mimetypes.guess_type', 'guess_type', (['file_url'], {}), '(file_url)\n', (2144, 2154), False, 'from mimetypes import guess_type\n'), ((2170, 2232), 'django.conf.settings.MIME_TYPE_TO_TECHNICAL_TYPE.get', 'settings.MIME_TYPE_TO_TECHNICAL_TYPE.get', (['mime_type', '"""unknown"""'], {}), "(mime_type, 'unknown')\n", (2210, 2232), False, 'from django.conf import settings\n'), ((6326, 6360), 'datagrowth.utils.reach', 'reach', (['"""$.attributes.partOf"""', 'node'], {}), "('$.attributes.partOf', node)\n", (6331, 6360), False, 'from datagrowth.utils import reach\n'), ((4374, 4405), 'core.constants.HIGHER_EDUCATION_LEVELS.items', 'HIGHER_EDUCATION_LEVELS.items', ([], {}), '()\n', (4403, 4405), False, 'from core.constants import HIGHER_EDUCATION_LEVELS, RESTRICTED_MATERIAL_SETS\n')] |
#
# Copyright 2019-Present Sonatype Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" test_audit.py , for all your testing of audit py needs """
import unittest
import json
from pathlib import Path
from ..audit.audit import Audit
from ..types.results_decoder import ResultsDecoder
class TestAudit(unittest.TestCase):
""" TestAudit is responsible for testing the Audit class """
def setUp(self):
self.func = Audit()
def test_call_audit_results_prints_output(self):
""" test_call_audit_results_prints_output ensures that when called with
a valid result, audit_results returns the number of vulnerabilities found """
filename = Path(__file__).parent / "ossindexresponse.txt"
with open(filename, "r") as stdin:
response = json.loads(
stdin.read(),
cls=ResultsDecoder)
self.assertEqual(self.func.audit_results(response),
self.expected_results())
@staticmethod
def expected_results():
""" Weeee, I'm helping! """
return 3
| [
"pathlib.Path"
] | [((1159, 1173), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1163, 1173), False, 'from pathlib import Path\n')] |
import torch
import random
from torch.utils.data import Dataset, DataLoader
from abc import ABC
from models.base_model import Model
from torch.utils.tensorboard import SummaryWriter
from typing import List
class BaseDataset(Dataset, ABC):
name = 'base'
def __init__(self, config: dict, mode: str = 'train'):
self.config = config
self.mode = mode
self.device = config['device']
self.data_dim = config['data_dim']
self.summary_name = self.name
'''
Note that dataset's __getitem__() returns (x_coord, x_feat, y_coord, y_feat, name)
But the collated batch returns type of (SparseTensorWrapper, SparseTensorWrapper)
'''
def __getitem__(self, idx) \
-> (torch.tensor, torch.tensor, torch.tensor, torch.tensor, List[str]):
# sparse tensor and tensor should have equal size
raise NotImplemented
def __iter__(self):
while True:
idx = random.randint(0, len(self) - 1)
yield self[idx]
def collate_fn(self, batch: List) -> dict:
# convert list of dict to dict of list
batch = {k: [d[k] for d in batch] for k in batch[0]}
return batch
def evaluate(self, model: Model, writer: SummaryWriter, step):
training = model.training
model.eval()
data_loader = DataLoader(
self,
batch_size=self.config['eval_batch_size'],
num_workers=self.config['num_workers'],
collate_fn=self.collate_fn,
drop_last=False,
)
print('')
eval_losses = []
for eval_step, data in enumerate(data_loader):
mode = self.mode
if len(self.config['eval_datasets']) != 1:
mode += '_' + self.summary_name
eval_loss = model.evaluate(data, step, mode)
eval_losses.append(eval_loss)
print('\r[Evaluating, Step {:7}, Loss {:5}]'.format(
eval_step, '%.3f' % eval_loss), end=''
)
print('')
model.write_dict_summaries(step)
model.train(training)
def test(self, model: Model, writer: SummaryWriter, step):
raise NotImplementedError()
def visualize(self, model: Model, options: List, step):
training = model.training
model.eval()
# fix vis_indices
vis_indices = self.config['vis']['indices']
if isinstance(vis_indices, int):
# sample data points from n data points with equal interval
n = len(self)
vis_indices = torch.linspace(0, n - 1, vis_indices).int().tolist()
# override to the index when in overfitting debug mode
if isinstance(self.config['overfit_one_ex'], int):
vis_indices = torch.tensor([self.config['overfit_one_ex']])
for option in options:
# calls the visualizing function
if hasattr(model, option):
getattr(model, option)(self, vis_indices, step)
else:
raise ValueError(
'model {} has no method {}'.format(
model.__class__.__name__, option
)
)
model.train(training)
def visualize_test(self, model: Model, writer: SummaryWriter, step):
training = model.training
model.eval()
# fix vis_indices
vis_indices = self.config['vis']['indices']
if isinstance(vis_indices, int):
# sample data points from n data points with equal interval
vis_indices = torch.linspace(0, len(self) - 1, vis_indices).int().tolist()
# override to the index when in overfitting debug mode
if isinstance(self.config['overfit_one_ex'], int):
vis_indices = torch.tensor([self.config['overfit_one_ex']])
model.visualize_test(self, vis_indices, step)
model.train(training)
| [
"torch.tensor",
"torch.linspace",
"torch.utils.data.DataLoader"
] | [((1192, 1341), 'torch.utils.data.DataLoader', 'DataLoader', (['self'], {'batch_size': "self.config['eval_batch_size']", 'num_workers': "self.config['num_workers']", 'collate_fn': 'self.collate_fn', 'drop_last': '(False)'}), "(self, batch_size=self.config['eval_batch_size'], num_workers=\n self.config['num_workers'], collate_fn=self.collate_fn, drop_last=False)\n", (1202, 1341), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((2370, 2415), 'torch.tensor', 'torch.tensor', (["[self.config['overfit_one_ex']]"], {}), "([self.config['overfit_one_ex']])\n", (2382, 2415), False, 'import torch\n'), ((3193, 3238), 'torch.tensor', 'torch.tensor', (["[self.config['overfit_one_ex']]"], {}), "([self.config['overfit_one_ex']])\n", (3205, 3238), False, 'import torch\n'), ((2189, 2226), 'torch.linspace', 'torch.linspace', (['(0)', '(n - 1)', 'vis_indices'], {}), '(0, n - 1, vis_indices)\n', (2203, 2226), False, 'import torch\n')] |
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 23 08:25:37 2019
@author: AdeolaOlalekan
"""
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from .models import BTUTOR, CNAME, Edit_User, QSUBJECT, Post
class login_form(forms.Form):
username = forms.CharField(max_length=18)#, help_text="Just type 'renew'")
password1 = forms.CharField(widget=forms.PasswordInput)#
def clean_data(self):
data = self.cleaned_data['username']
data = self.cleaned_data['password1']
return data
###############################################################################
class ProfileForm(forms.ModelForm):
class Meta:
model = Edit_User
fields = ('title', 'first_name', 'last_name', 'bio', 'phone', 'city', 'country', 'organization', 'location', 'birth_date', 'department', 'photo',)
exclude = ['user']
class SignUpForm(UserCreationForm):
email = forms.EmailField(max_length=254, help_text='Required for a valid signup!')
class Meta:
model = User
fields = ('username', 'email', '<PASSWORD>', '<PASSWORD>')
def save(self, commit=True):
user = super(SignUpForm, self).save(commit=False)
user.email = self.cleaned_data['email']
user.password = self.cleaned_data['<PASSWORD>']
if commit:
user.save()
return user
class subject_class_term_Form(forms.ModelForm):
class Meta:
model = BTUTOR
fields = ('Class', 'subject',)
class class_term(forms.ModelForm):
class Meta:
model = BTUTOR
fields = ('Class', 'term', )
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = ('Account_Username', 'subject', 'text')
class a_student_form_new(forms.ModelForm):
class Meta:
model = QSUBJECT
fields = ('student_name','test', 'agn','atd', 'exam','tutor',)
class student_name(forms.ModelForm):
class Meta:
model = CNAME
fields = ('last_name', 'first_name', 'gender', "birth_date",)
class new_student_name(forms.Form):
student_name = forms.CharField(help_text="enter student's surename to search.")
def clean_renewal_date(self):
data = self.cleaned_data['student_name']
return data
# | [
"django.forms.EmailField",
"django.forms.CharField"
] | [((340, 370), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(18)'}), '(max_length=18)\n', (355, 370), False, 'from django import forms\n'), ((420, 463), 'django.forms.CharField', 'forms.CharField', ([], {'widget': 'forms.PasswordInput'}), '(widget=forms.PasswordInput)\n', (435, 463), False, 'from django import forms\n'), ((1010, 1084), 'django.forms.EmailField', 'forms.EmailField', ([], {'max_length': '(254)', 'help_text': '"""Required for a valid signup!"""'}), "(max_length=254, help_text='Required for a valid signup!')\n", (1026, 1084), False, 'from django import forms\n'), ((2214, 2278), 'django.forms.CharField', 'forms.CharField', ([], {'help_text': '"""enter student\'s surename to search."""'}), '(help_text="enter student\'s surename to search.")\n', (2229, 2278), False, 'from django import forms\n')] |
# -*- coding: utf-8 -*-
__all__ = (
"extract_directory",
"cleanup_data_directory",
"dataset_schema",
"validate_directory",
"validate_directory_against_xsd",
)
from .extract_ecospold2 import extract_ecospold2_directory
from ..filesystem import check_cache_directory, get_from_cache, cache_data
import os
def extract_directory(data_path, use_cache=True, use_mp=True):
"""Extract ecospold2 files in directory ``dirpath``.
Uses and writes to cache if ``use_cache`` is ``True``.
Returns datasets in Ocelot internal format."""
data_path = os.path.abspath(data_path)
if not use_cache:
return extract_ecospold2_directory(data_path, use_mp)
elif check_cache_directory(data_path):
print("Using cached ecospold2 data")
return get_from_cache(data_path)
else:
data = extract_ecospold2_directory(data_path, use_mp)
cache_data(data, data_path)
return data
from .cleanup import cleanup_data_directory
from .validate_ecospold2 import validate_directory_against_xsd, validate_directory
from .validate_internal import dataset_schema
| [
"os.path.abspath"
] | [((574, 600), 'os.path.abspath', 'os.path.abspath', (['data_path'], {}), '(data_path)\n', (589, 600), False, 'import os\n')] |
#!/usr/bin/env python3
import stack
import queue
opset = {'*','+','(',")"}
def readinfix():
fh = open("infix.txt")
li = []
li += fh.read()
print (li)
return li
def eval():
evlist = readinfix()
postlist = []
st = stack.Stack()
for op in evlist:
if op in opset:
st.push(op)
else:
postlist.append(op)
print (op)
if st.isEmpty():
continue
elif not st.isEmpty():
postlist.append(st.pop())
print (st.pop())
print (postlist)
if __name__ == "__main__":
eval()
| [
"stack.Stack"
] | [((247, 260), 'stack.Stack', 'stack.Stack', ([], {}), '()\n', (258, 260), False, 'import stack\n')] |
import random
from qlazy import QState
def classical_strategy(trials=1000):
win_cnt = 0
for _ in range(trials):
# random bits by Charlie (x,y)
x = random.randint(0,1)
y = random.randint(0,1)
# response by Alice (a)
a = 0
# response by Bob (b)
b = 0
# count up if win
if (x and y) == (a+b)%2:
win_cnt += 1
print("== result of classical strategy (trials:{0:d}) ==".format(trials))
print("* win prob. = ", win_cnt/trials)
def quantum_strategy(trials=1000):
win_cnt = 0
for _ in range(trials):
# random bits by Charlie (x,y)
x = random.randint(0,1)
y = random.randint(0,1)
# make entangled 2 qubits (one for Alice and another for Bob)
qs = QState(2).h(0).cx(0,1)
# response by Alice (a)
if x == 0:
# measurement of Z-basis (= Ry(0.0)-basis)
sa = qs.m([0], shots=1, angle=0.0, phase=0.0).lst
if sa == 0:
a = 0
else:
a = 1
else:
# measurement of X-basis (or Ry(0.5*PI)-basis)
sa = qs.mx([0], shots=1).lst
# sa = qs.m([0], shots=1, angle=0.5, phase=0.0).lst
if sa == 0:
a = 0
else:
a = 1
# response by Bob (b)
if y == 0:
# measurement of Ry(0.25*PI)-basis
sb = qs.m([1], shots=1, angle=0.25, phase=0.0).lst
if sb == 0:
b = 0
else:
b = 1
else:
# measurement of Ry(-0.25*PI)-basis
sb = qs.m([1], shots=1, angle=-0.25, phase=0.0).lst
if sb == 0:
b = 0
else:
b = 1
# count up if win
if (x and y) == (a+b)%2:
win_cnt += 1
print("== result of quantum strategy (trials:{0:d}) ==".format(trials))
print("* win prob. = ", win_cnt/trials)
if __name__ == '__main__':
classical_strategy()
quantum_strategy()
| [
"qlazy.QState",
"random.randint"
] | [((174, 194), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (188, 194), False, 'import random\n'), ((206, 226), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (220, 226), False, 'import random\n'), ((659, 679), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (673, 679), False, 'import random\n'), ((691, 711), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (705, 711), False, 'import random\n'), ((795, 804), 'qlazy.QState', 'QState', (['(2)'], {}), '(2)\n', (801, 804), False, 'from qlazy import QState\n')] |
from decimal import Decimal
def ensure_decimal(value):
return value if isinstance(value, Decimal) else Decimal(value)
| [
"decimal.Decimal"
] | [((109, 123), 'decimal.Decimal', 'Decimal', (['value'], {}), '(value)\n', (116, 123), False, 'from decimal import Decimal\n')] |
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import backend as K
from keras.models import Sequential,Model
from keras.layers import *
from keras.optimizers import SGD,Adam
class OurLayer(Layer):
def reuse(self, layer, *args, **kwargs):
if not layer.built:
if len(args) > 0:
inputs = args[0]
else:
inputs = kwargs['inputs']
if isinstance(inputs, list):
input_shape = [K.int_shape(x) for x in inputs]
else:
input_shape = K.int_shape(inputs)
layer.build(input_shape)
outputs = layer.call(*args, **kwargs)
for w in layer.trainable_weights:
if w not in self._trainable_weights:
self._trainable_weights.append(w)
for w in layer.non_trainable_weights:
if w not in self._non_trainable_weights:
self._non_trainable_weights.append(w)
for u in layer.updates:
if not hasattr(self, '_updates'):
self._updates = []
if u not in self._updates:
self._updates.append(u)
return outputs
class SelfAttention(OurLayer):
def __init__(self, heads, size_per_head, key_size=None,
mask_right=False, **kwargs):
super(SelfAttention, self).__init__(**kwargs)
self.heads = heads
self.size_per_head = size_per_head
self.out_dim = heads * size_per_head
self.key_size = key_size if key_size else size_per_head
self.mask_right = mask_right
def build(self, input_shape):
super(SelfAttention, self).build(input_shape)
self.attention = Attention_1(
self.heads,
self.size_per_head,
self.key_size,
self.mask_right
)
def call(self, inputs):
if isinstance(inputs, list):
x, x_mask = inputs
o = self.reuse(self.attention, [x, x, x, x_mask, x_mask])
else:
x = inputs
o = self.reuse(self.attention, [x, x, x])
return o
def compute_output_shape(self, input_shape):
if isinstance(input_shape, list):
return (input_shape[0][0], input_shape[0][1], self.out_dim)
else:
return (input_shape[0], input_shape[1], self.out_dim)
def selfattention_timeseries(nb_class, input_dim,):
model_input = Input(shape=input_dim)
#model_input = SinCosPositionEmbedding(4)(model_input)
O_seq = SelfAttention(16,32)(model_input)
O_seq = GlobalAveragePooling1D()(O_seq)
O_seq = Dropout(0.5)(O_seq)
outputs = Dense(1,activation='relu')(O_seq)
model = Model(inputs=model_input, outputs=outputs)
return model | [
"tensorflow.keras.backend.int_shape",
"keras.models.Model"
] | [((2700, 2742), 'keras.models.Model', 'Model', ([], {'inputs': 'model_input', 'outputs': 'outputs'}), '(inputs=model_input, outputs=outputs)\n', (2705, 2742), False, 'from keras.models import Sequential, Model\n'), ((575, 594), 'tensorflow.keras.backend.int_shape', 'K.int_shape', (['inputs'], {}), '(inputs)\n', (586, 594), True, 'from tensorflow.keras import backend as K\n'), ((495, 509), 'tensorflow.keras.backend.int_shape', 'K.int_shape', (['x'], {}), '(x)\n', (506, 509), True, 'from tensorflow.keras import backend as K\n')] |
from subprocess import PIPE, Popen
# Converts celsius temps to fahrenheit
def c2f(celsius):
return (9.0 / 5) * celsius + 32
# Gets the CPU temperature in degrees C
def get_cpu_temperature():
process = Popen(['vcgencmd', 'measure_temp'], stdout=PIPE)
output, _error = process.communicate()
return float(str(str(output).split('=')[1]).split("'")[0])
def debugOutCFH(sensor, valueC, valueF, valueH):
print('Debug Values [' + sensor + ']:')
print('C: ' + str(valueC))
print('F: ' + str(valueF))
print('H: ' + str(valueH) + '%')
print('')
def debugOutCF(sensor, valueC, valueF):
print('Debug Values [' + sensor + ']:')
print('C: ' + str(valueC))
print('F: ' + str(valueF))
print('')
| [
"subprocess.Popen"
] | [((213, 261), 'subprocess.Popen', 'Popen', (["['vcgencmd', 'measure_temp']"], {'stdout': 'PIPE'}), "(['vcgencmd', 'measure_temp'], stdout=PIPE)\n", (218, 261), False, 'from subprocess import PIPE, Popen\n')] |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
import json
from django import forms
from common.forms import BaseComponentForm, TypeCheckField
from components.component import Component
from .toolkit import configs
class GetHostList(Component):
"""
apiLabel get host list
apiMethod GET
### Functional Description
Get host list
### Request Parameters
{{ common_args_desc }}
#### Interface Parameters
| Field | Type | Required | Description |
|-----------|------------|--------|------------|
| app_id | int | Yes | Business ID |
| ip_list | array | No | Host IP address, including ip and bk_cloud_id, bk_cloud_id represents cloud area ID |
### Request Parameters Example
```python
{
"bk_app_code": "esb_test",
"bk_app_secret": "xxx",
"bk_token": "<PASSWORD>",
"bk_biz_id": 1,
"ip_list": [
{
"ip": "10.0.0.1",
"bk_cloud_id": 0
},
{
"ip": "10.0.0.2"
"bk_cloud_id": 0
}
]
}
```
### Return Result Example
```python
{
"result": true,
"code": 0,
"message": "",
"data": [
{
"inner_ip": "10.0.0.1",
"bk_cloud_id": 0,
"host_name": "db-1",
"maintainer": "admin"
},
{
"inner_ip": "10.0.0.2",
"bk_cloud_id": 2,
"host_name": "db-2",
"maintainer": "admin"
}
]
}
```
"""
# Name of the system to which the component belongs
sys_name = configs.SYSTEM_NAME
# Form Processing Parameters Validation
class Form(BaseComponentForm):
bk_biz_id = forms.CharField(label='Business ID', required=True)
ip_list = TypeCheckField(label='Host IP address', promise_type=list, required=False)
# The data returned in clean method is available through the component's form_data property
def clean(self):
return self.get_cleaned_data_when_exist(keys=['<KEY>', 'ip_list'])
# Component Processing Access
def handle(self):
# Get the data processed in Form clean
data = self.form_data
# Set Current Operator
data['operator'] = self.current_user.username
# Request System Interface
try:
response = self.outgoing.http_client.post(
host=configs.host,
path='/hcp/get_host_list/',
data=json.dumps(data),
)
except Exception:
# TODO: To delete, only fake data for testing
response = {
'code': 0,
'data': [
{
'inner_ip': '10.0.0.1',
'bk_cloud_id': 0,
'host_name': 'just_for_test',
'maintainer': 'admin',
},
]
}
# Analyze the Results
code = response['code']
if code == 0:
result = {
'result': True,
'data': response['data'],
}
else:
result = {
'result': False,
'message': response['message']
}
# Set the component return result, and payload is the actual return result of component
self.response.payload = result
| [
"common.forms.TypeCheckField",
"json.dumps",
"django.forms.CharField"
] | [((2535, 2586), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""Business ID"""', 'required': '(True)'}), "(label='Business ID', required=True)\n", (2550, 2586), False, 'from django import forms\n'), ((2605, 2679), 'common.forms.TypeCheckField', 'TypeCheckField', ([], {'label': '"""Host IP address"""', 'promise_type': 'list', 'required': '(False)'}), "(label='Host IP address', promise_type=list, required=False)\n", (2619, 2679), False, 'from common.forms import BaseComponentForm, TypeCheckField\n'), ((3309, 3325), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (3319, 3325), False, 'import json\n')] |
import json
import logging
from json import JSONDecodeError
from django.http import (
HttpResponse, HttpResponseBadRequest, HttpResponseForbidden,
)
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from pretix.base.models import Organizer
from . import tasks
logger = logging.getLogger(__name__)
@csrf_exempt
@require_POST
def webhook(request, *args, **kwargs):
# Google is not actually sending their documented UA m(
# if request.META['HTTP_USER_AGENT'] != 'Google-Valuables':
if request.META['HTTP_USER_AGENT'] != "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)":
return HttpResponseForbidden()
if request.META.get('CONTENT_TYPE') != 'application/json':
return HttpResponseBadRequest()
try:
webhook_json = json.loads(request.body.decode('utf-8'))
except JSONDecodeError:
return False
if all(k in webhook_json for k in ('signature', 'intermediateSigningKey', 'protocolVersion', 'signedMessage')):
organizer = Organizer.objects.filter(
slug=request.resolver_match.kwargs['organizer'],
).first()
tasks.process_webhook.apply_async(
args=(request.body.decode('utf-8'), organizer.settings.googlepaypasses_issuer_id)
)
return HttpResponse()
| [
"logging.getLogger",
"django.http.HttpResponseBadRequest",
"django.http.HttpResponse",
"django.http.HttpResponseForbidden",
"pretix.base.models.Organizer.objects.filter"
] | [((333, 360), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (350, 360), False, 'import logging\n'), ((1339, 1353), 'django.http.HttpResponse', 'HttpResponse', ([], {}), '()\n', (1351, 1353), False, 'from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\n'), ((686, 709), 'django.http.HttpResponseForbidden', 'HttpResponseForbidden', ([], {}), '()\n', (707, 709), False, 'from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\n'), ((789, 813), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', ([], {}), '()\n', (811, 813), False, 'from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\n'), ((1074, 1147), 'pretix.base.models.Organizer.objects.filter', 'Organizer.objects.filter', ([], {'slug': "request.resolver_match.kwargs['organizer']"}), "(slug=request.resolver_match.kwargs['organizer'])\n", (1098, 1147), False, 'from pretix.base.models import Organizer\n')] |
import sys
from setuptools import setup, find_packages
setup(
name='pynomial',
version='0.0.0',
packages=find_packages(),
author='<NAME>',
author_email='<EMAIL>',
description="python package for combinatorial problems",
url="https://github.com/PaulDodd/pynomial.git",
install_requires=[], # install_requires or something else?
)
| [
"setuptools.find_packages"
] | [((118, 133), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (131, 133), False, 'from setuptools import setup, find_packages\n')] |
"""``AppendableExcelDataSet`` loads/saves data from/to a local Excel file opened in append mode.
It uses pandas to handle the Excel file.
"""
from copy import deepcopy
from pathlib import Path, PurePosixPath
from typing import Any, Dict
import pandas as pd
from kedro.io.core import AbstractDataSet, DataSetError
class AppendableExcelDataSet(AbstractDataSet):
"""``AppendableExcelDataSet`` loads/saves data from/to a local Excel file opened in
append mode. It uses pandas to handle the Excel file.
Example adding a catalog entry with
`YAML API <https://kedro.readthedocs.io/en/stable/05_data/\
01_data_catalog.html#using-the-data-catalog-with-the-yaml-api>`_:
.. code-block:: yaml
>>> # AppendableExcelDataSet creates a new sheet for every dataset
>>> # ExcelDataSet restricts one dataset per file as it is overwritten
>>>
>>> preprocessed_companies:
>>> type: pandas.AppendableExcelDataSet
>>> filepath: data/02_intermediate/preprocessed.xlsx # assumes file already exists
>>> save_args:
>>> sheet_name: preprocessed_companies
>>> load_args:
>>> sheet_name: preprocessed_companies
>>>
>>> preprocessed_shuttles:
>>> type: pandas.AppendableExcelDataSet
>>> filepath: data/02_intermediate/preprocessed.xlsx
>>> save_args:
>>> sheet_name: preprocessed_shuttles
>>> load_args:
>>> sheet_name: preprocessed_shuttles
Example using Python API:
::
>>> from kedro.extras.datasets.pandas import AppendableExcelDataSet
>>> from kedro.extras.datasets.pandas import ExcelDataSet
>>> import pandas as pd
>>>
>>> data_1 = pd.DataFrame({'col1': [1, 2], 'col2': [4, 5],
>>> 'col3': [5, 6]})
>>>
>>> data_2 = pd.DataFrame({'col1': [7, 8], 'col2': [5, 7]})
>>>
>>> regular_ds = ExcelDataSet(filepath="/tmp/test.xlsx")
>>> appendable_ds = AppendableExcelDataSet(
>>> filepath="/tmp/test.xlsx",
>>> save_args={"sheet_name": "my_sheet"},
>>> load_args={"sheet_name": "my_sheet"}
>>> )
>>>
>>> regular_ds.save(data_1)
>>> appendable_ds.save(data_2)
>>> reloaded = appendable_ds.load()
>>> assert data_2.equals(reloaded)
"""
DEFAULT_LOAD_ARGS = {"engine": "openpyxl"}
DEFAULT_SAVE_ARGS = {"index": False}
def __init__(
self,
filepath: str,
load_args: Dict[str, Any] = None,
save_args: Dict[str, Any] = None,
) -> None:
"""Creates a new instance of ``AppendableExcelDataSet`` pointing to an existing local
Excel file to be opened in append mode.
Args:
filepath: Filepath in POSIX format to an existing local Excel file.
load_args: Pandas options for loading Excel files.
Here you can find all available arguments:
https://pandas.pydata.org/pandas-docs/stable/generated/pandas.read_excel.html
All defaults are preserved, but "engine", which is set to "openpyxl".
save_args: Pandas options for saving Excel files.
Here you can find all available arguments:
https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_excel.html
All defaults are preserved, but "index", which is set to False.
If you would like to specify options for the `ExcelWriter`,
you can include them under "writer" key. Here you can
find all available arguments:
https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.ExcelWriter.html
Note: `mode` option of `ExcelWriter` is set to `a` and it can not be overridden.
"""
self._filepath = PurePosixPath(filepath)
# Handle default load and save arguments
self._load_args = deepcopy(self.DEFAULT_LOAD_ARGS)
if load_args is not None:
self._load_args.update(load_args)
save_args = deepcopy(save_args) or {}
self._save_args = deepcopy(self.DEFAULT_SAVE_ARGS)
self._writer_args = save_args.pop("writer", {}) # type: Dict[str, Any]
self._writer_args.setdefault("engine", "openpyxl")
if save_args is not None:
self._save_args.update(save_args)
# Use only append mode
self._writer_args["mode"] = "a"
def _describe(self) -> Dict[str, Any]:
return dict(
filepath=self._filepath,
load_args=self._load_args,
save_args=self._save_args,
writer_args=self._writer_args,
)
def _load(self) -> pd.DataFrame:
return pd.read_excel(str(self._filepath), **self._load_args)
def _save(self, data: pd.DataFrame) -> None:
# pylint: disable=abstract-class-instantiated
try:
with pd.ExcelWriter(str(self._filepath), **self._writer_args) as writer:
data.to_excel(writer, **self._save_args)
except FileNotFoundError as exc:
raise DataSetError(
f"`{self._filepath}` Excel file not found. The file cannot be opened in "
f"append mode."
) from exc
def _exists(self) -> bool:
return Path(self._filepath.as_posix()).is_file()
| [
"pathlib.PurePosixPath",
"kedro.io.core.DataSetError",
"copy.deepcopy"
] | [((3932, 3955), 'pathlib.PurePosixPath', 'PurePosixPath', (['filepath'], {}), '(filepath)\n', (3945, 3955), False, 'from pathlib import Path, PurePosixPath\n'), ((4032, 4064), 'copy.deepcopy', 'deepcopy', (['self.DEFAULT_LOAD_ARGS'], {}), '(self.DEFAULT_LOAD_ARGS)\n', (4040, 4064), False, 'from copy import deepcopy\n'), ((4218, 4250), 'copy.deepcopy', 'deepcopy', (['self.DEFAULT_SAVE_ARGS'], {}), '(self.DEFAULT_SAVE_ARGS)\n', (4226, 4250), False, 'from copy import deepcopy\n'), ((4166, 4185), 'copy.deepcopy', 'deepcopy', (['save_args'], {}), '(save_args)\n', (4174, 4185), False, 'from copy import deepcopy\n'), ((5200, 5309), 'kedro.io.core.DataSetError', 'DataSetError', (['f"""`{self._filepath}` Excel file not found. The file cannot be opened in append mode."""'], {}), "(\n f'`{self._filepath}` Excel file not found. The file cannot be opened in append mode.'\n )\n", (5212, 5309), False, 'from kedro.io.core import AbstractDataSet, DataSetError\n')] |
from .makeFullPdf import makeFullPdf
from .parseImage import parseImage
from .makePdf import makePdf
import requests,os,os.path,sys,time,json
from bs4 import BeautifulSoup
def readComicsOnlineRu():
while True:
try:
with open('config.json', 'r', encoding="utf-8") as f:
books = json.load(f)
library=[*books['readComicsOnlineRu']]
# print(library)
# return
if not library:
# print("No books found!")
return
# print("List of books >")
# for i in library:
# print (" > '"+i+"' download will start from Chapter-"+books['readComicsOnlineRu'][i])
except:
# raise
# print("No 'config.json' file found!")
# return
continue
break
# if not confirm():
# return
originDirectory=os.getcwd()
os.chdir('..')
if not os.path.exists('comicDownloads'+os.sep):
os.makedirs('comicDownloads'+os.sep)
os.chdir('comicDownloads'+os.sep)
for comicName in library:
incompleteUrl="https://readcomicsonline.ru/comic/"+comicName+"/"
tryAgain=0
while tryAgain==0:
try:
page_response = requests.get(incompleteUrl, timeout=5)
soup = BeautifulSoup(page_response.content, "html.parser")
except:
print("Could not connect, trying again in 5 seconds!")
time.sleep(5)
continue
# os.chdir('..')
# os.chdir('comicMaker'+os.sep)
# readComicsOnlineRu()
# return
tryAgain=1
chapterNum = []
totalChaptersToDownload = 0
for li in soup.findAll('li', attrs={'class':'volume-0'}):
# validChapterNum=li.find('a').contents[0].split("#")[1]
validChapterNum=li.find('a')['href'].split(comicName+"/")[1]
try:
if float(validChapterNum) >= float(books['readComicsOnlineRu'][comicName]):
chapterNum.append(validChapterNum)
totalChaptersToDownload += 1
except:
chapterNum.append(validChapterNum)
totalChaptersToDownload += 1
chapterNum.reverse()
# print(chapterNum)
# return
parentDir=comicName+os.sep
if os.path.exists(parentDir):
print(comicName+" already exists.")
else:
os.makedirs(parentDir)
print(" Opening "+comicName+" >")
os.chdir(parentDir)
if totalChaptersToDownload > 1 :
for i in chapterNum:
books['readComicsOnlineRu'][comicName] = str(i)
tryAgain=0
while tryAgain==0:
try:
with open(originDirectory+os.sep+'config.json', 'w', encoding="utf-8") as file:
json.dump(books, file, indent=4)
except:
continue
tryAgain=1
chapter=i
currentDir=chapter.replace('.','-')+os.sep
if os.path.exists(currentDir):
print(" "+comicName+" > "+chapter.replace('.','-')+" already exists.")
else:
os.makedirs(currentDir)
print(" Opening "+comicName+" > "+chapter+" > ("+str(totalChaptersToDownload)+" Remaining) >")
os.chdir(currentDir)
completeUrl=incompleteUrl+i+"/"
parseImage.readComicsOnlineRu(comicName,completeUrl,chapter)
makePdf.readComicsOnlineRu(chapter)
os.chdir("..")
totalChaptersToDownload -= 1
makeFullPdf.readComicsOnlineRu(comicName)
else:
print(" < "+comicName+" already fully downloaded.")
os.chdir("..")
print(" << Download finished of "+comicName+" <")
os.chdir(originDirectory)
return | [
"os.path.exists",
"os.makedirs",
"requests.get",
"os.getcwd",
"os.chdir",
"bs4.BeautifulSoup",
"time.sleep",
"json.load",
"json.dump"
] | [((733, 744), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (742, 744), False, 'import requests, os, os.path, sys, time, json\n'), ((746, 760), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (754, 760), False, 'import requests, os, os.path, sys, time, json\n'), ((850, 885), 'os.chdir', 'os.chdir', (["('comicDownloads' + os.sep)"], {}), "('comicDownloads' + os.sep)\n", (858, 885), False, 'import requests, os, os.path, sys, time, json\n'), ((3117, 3142), 'os.chdir', 'os.chdir', (['originDirectory'], {}), '(originDirectory)\n', (3125, 3142), False, 'import requests, os, os.path, sys, time, json\n'), ((769, 810), 'os.path.exists', 'os.path.exists', (["('comicDownloads' + os.sep)"], {}), "('comicDownloads' + os.sep)\n", (783, 810), False, 'import requests, os, os.path, sys, time, json\n'), ((812, 850), 'os.makedirs', 'os.makedirs', (["('comicDownloads' + os.sep)"], {}), "('comicDownloads' + os.sep)\n", (823, 850), False, 'import requests, os, os.path, sys, time, json\n'), ((1922, 1947), 'os.path.exists', 'os.path.exists', (['parentDir'], {}), '(parentDir)\n', (1936, 1947), False, 'import requests, os, os.path, sys, time, json\n'), ((2060, 2079), 'os.chdir', 'os.chdir', (['parentDir'], {}), '(parentDir)\n', (2068, 2079), False, 'import requests, os, os.path, sys, time, json\n'), ((3049, 3063), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (3057, 3063), False, 'import requests, os, os.path, sys, time, json\n'), ((1999, 2021), 'os.makedirs', 'os.makedirs', (['parentDir'], {}), '(parentDir)\n', (2010, 2021), False, 'import requests, os, os.path, sys, time, json\n'), ((288, 300), 'json.load', 'json.load', (['f'], {}), '(f)\n', (297, 300), False, 'import requests, os, os.path, sys, time, json\n'), ((1040, 1078), 'requests.get', 'requests.get', (['incompleteUrl'], {'timeout': '(5)'}), '(incompleteUrl, timeout=5)\n', (1052, 1078), False, 'import requests, os, os.path, sys, time, json\n'), ((1090, 1141), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page_response.content', '"""html.parser"""'], {}), "(page_response.content, 'html.parser')\n", (1103, 1141), False, 'from bs4 import BeautifulSoup\n'), ((2477, 2503), 'os.path.exists', 'os.path.exists', (['currentDir'], {}), '(currentDir)\n', (2491, 2503), False, 'import requests, os, os.path, sys, time, json\n'), ((2725, 2745), 'os.chdir', 'os.chdir', (['currentDir'], {}), '(currentDir)\n', (2733, 2745), False, 'import requests, os, os.path, sys, time, json\n'), ((2891, 2905), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (2899, 2905), False, 'import requests, os, os.path, sys, time, json\n'), ((1216, 1229), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1226, 1229), False, 'import requests, os, os.path, sys, time, json\n'), ((2597, 2620), 'os.makedirs', 'os.makedirs', (['currentDir'], {}), '(currentDir)\n', (2608, 2620), False, 'import requests, os, os.path, sys, time, json\n'), ((2332, 2364), 'json.dump', 'json.dump', (['books', 'file'], {'indent': '(4)'}), '(books, file, indent=4)\n', (2341, 2364), False, 'import requests, os, os.path, sys, time, json\n')] |
import sys
import pickle
from krips_alpha import krippendorff_alpha, nominal_metric
def get_model_labels(model_output):
model_labels = []
for x in model_output:
try:
if x['pos_score'] > x['neg_score']:
model_labels.append('0')
elif x['neg_score'] > x['pos_score']:
model_labels.append('1')
else:
print(x, "error")
except KeyError:
if x['pos'] > x['neg']:
model_labels.append('0')
elif x['neg'] > x['pos']:
model_labels.append('1')
else:
print(x, "error")
return model_labels
annotations = pickle.load(open(sys.argv[1], 'rb'))
#print(len(annotations[0]))
alpha = krippendorff_alpha(annotations, nominal_metric)
#print(alpha)
model_output = pickle.load(open(sys.argv[2], 'rb'))
print(len(model_output))
model_labels = get_model_labels(model_output)
annotations.append(model_labels)
print(len(annotations))
model_alpha = krippendorff_alpha(annotations, nominal_metric)
print(model_alpha)
| [
"krips_alpha.krippendorff_alpha"
] | [((615, 662), 'krips_alpha.krippendorff_alpha', 'krippendorff_alpha', (['annotations', 'nominal_metric'], {}), '(annotations, nominal_metric)\n', (633, 662), False, 'from krips_alpha import krippendorff_alpha, nominal_metric\n'), ((873, 920), 'krips_alpha.krippendorff_alpha', 'krippendorff_alpha', (['annotations', 'nominal_metric'], {}), '(annotations, nominal_metric)\n', (891, 920), False, 'from krips_alpha import krippendorff_alpha, nominal_metric\n')] |
#!/usr/bin/env python
import argparse
import os
import platform
import re
import shutil
import subprocess
import sys
SUPPORTED_VERSIONS = ('3.6', '3.7')
IS_DEBIAN = platform.system() == 'Linux' and os.path.exists('/etc/debian_version')
IS_OLD_UBUNTU = (IS_DEBIAN and os.path.exists('/etc/lsb-release')
and re.search('RELEASE=1[46]', open('/etc/lsb-release').read()))
IS_MACOS = platform.system() == 'Darwin'
SUDO = 'sudo ' if os.getuid() else ''
parser = argparse.ArgumentParser(description='Check and fix Python installation')
parser.add_argument('--autofix', action='store_true', help='Automatically fix any problems found')
parser.add_argument('--version', default=SUPPORTED_VERSIONS[0], choices=SUPPORTED_VERSIONS,
help='Python version to check')
args = parser.parse_args()
PY_VERSION = args.version
AUTOFIX = args.autofix
def check_sudo():
if not run('which sudo', return_output=True):
error('! sudo is not installed.')
print(' Please ask an administrator to install it and run this again.')
sys.exit(1)
def check_apt():
os.environ['DEBIAN_FRONTEND'] = 'noninteractive'
run(SUDO + 'apt-get install -y apt-utils', return_output=True)
def check_curl():
if not run('which curl', return_output=True):
error('! curl is not installed.')
if IS_DEBIAN:
raise AutoFixSuggestion('To install, run', SUDO + 'apt-get install -y curl')
sys.exit(1)
def check_python():
py3_path = run('which python' + PY_VERSION, return_output=True)
if not py3_path:
error('! Python ' + PY_VERSION + ' is not installed.')
if '--version' not in sys.argv:
print(' autopip supports Python {}.'.format(', '.join(SUPPORTED_VERSIONS))
+ ' To check a different version, re-run using "python - --version x.y"')
if IS_OLD_UBUNTU:
raise AutoFixSuggestion('To install, run',
(SUDO + 'apt-get update',
SUDO + 'apt-get install -y software-properties-common',
SUDO + 'add-apt-repository -y ppa:deadsnakes/ppa',
SUDO + 'apt-get update',
SUDO + 'apt-get install -y python' + PY_VERSION))
elif IS_DEBIAN:
raise AutoFixSuggestion('To install, run',
(SUDO + 'apt-get update', SUDO + 'apt-get install -y python' + PY_VERSION))
elif IS_MACOS:
raise AutoFixSuggestion('To install, run', 'brew install python')
print(' Please install Python ' + PY_VERSION
+ ' per http://docs.python-guide.org/en/latest/starting/installation/')
sys.exit(1)
def check_pip():
if not run('which pip3', return_output=True):
error('! pip3 is not installed.')
if IS_DEBIAN:
raise AutoFixSuggestion('To install, run', SUDO + 'apt-get install -y python3-pip')
elif IS_MACOS:
raise AutoFixSuggestion('To install, run', 'curl -s https://bootstrap.pypa.io/get-pip.py | '
+ SUDO + 'python' + PY_VERSION)
print(' If your package repo has a *-pip package for Python ' + PY_VERSION
+ ', then installing it from there is recommended.')
print(' To install directly, run: curl -s https://bootstrap.pypa.io/get-pip.py | '
+ SUDO + 'python' + PY_VERSION)
sys.exit(1)
version_full = run('pip3 --version', return_output=True)
if 'python ' + PY_VERSION not in version_full:
print(' ' + version_full.strip())
error('! pip3 is pointing to another Python version and not Python ' + PY_VERSION)
if '--version' not in sys.argv:
print(' autopip supports Python {}.'.format(', '.join(SUPPORTED_VERSIONS))
+ ' To check a different version, re-run using "python - --version x.y"')
raise AutoFixSuggestion('To re-install for Python ' + PY_VERSION + ', run',
'curl -s https://bootstrap.pypa.io/get-pip.py | ' + SUDO + 'python' + PY_VERSION)
version_str = version_full.split()[1]
version = tuple(map(_int_or, version_str.split('.', 2)))
if version < (9, 0, 3):
error('! Version is', version_str + ', but should be 9.0.3+')
raise AutoFixSuggestion('To upgrade, run', SUDO + 'pip3 install pip==9.0.3')
def check_venv():
test_venv_path = '/tmp/check-python-venv-{}'.format(os.getpid())
try:
try:
run('python' + PY_VERSION + ' -m venv ' + test_venv_path, stderr=subprocess.STDOUT, return_output=True,
raises=True)
except Exception:
error('! Could not create virtual environment.')
if IS_DEBIAN:
raise AutoFixSuggestion('To install, run', SUDO + 'apt-get install -y python' + PY_VERSION + '-venv')
print(' Please make sure Python venv package is installed.')
sys.exit(1)
finally:
shutil.rmtree(test_venv_path, ignore_errors=True)
try:
try:
run('virtualenv --python python' + PY_VERSION + ' ' + test_venv_path, stderr=subprocess.STDOUT,
return_output=True,
raises=True)
except Exception as e:
if run('which virtualenv', return_output=True):
error('! Could not create virtual environment.')
print(' ' + str(e))
sys.exit(1)
else:
error('! virtualenv is not installed.')
raise AutoFixSuggestion('To install, run', SUDO + 'pip3 install virtualenv')
finally:
shutil.rmtree(test_venv_path, ignore_errors=True)
def check_setuptools():
try:
version_str = run('python' + PY_VERSION + ' -m easy_install --version', return_output=True, raises=True)
except Exception:
error('! setuptools is not installed.')
raise AutoFixSuggestion('To install, run', SUDO + 'pip3 install setuptools')
version_str = version_str.split()[1]
version = tuple(map(_int_or, version_str.split('.')))
if version < (39,):
error('! Version is', version_str + ', but should be 39+')
raise AutoFixSuggestion('To upgrade, run', SUDO + 'pip3 install -U setuptools')
def check_wheel():
try:
version_str = run('python' + PY_VERSION + ' -m wheel version ', return_output=True, raises=True)
except Exception:
error('! wheel is not installed.')
raise AutoFixSuggestion('To install, run', SUDO + 'pip3 install wheel')
version_str = version_str.split()[1]
version = tuple(map(_int_or, version_str.split('.')))
if version < (0, 31):
error('! Version is', version_str + ', but should be 0.31+')
raise AutoFixSuggestion('To upgrade, run', SUDO + 'pip3 install -U wheel')
def check_python_dev():
include_path = run('python' + PY_VERSION
+ ' -c "from distutils.sysconfig import get_python_inc; print(get_python_inc())"',
return_output=True)
if not include_path:
error('! Failed to get Python include path, so not sure if Python dev package is installed')
if IS_DEBIAN:
raise AutoFixSuggestion('To install, run', SUDO + ' apt-get install -y python' + PY_VERSION + '-dev')
sys.exit(1)
python_h = os.path.join(include_path.strip(), 'Python.h')
if not os.path.exists(python_h):
error('! Python dev package is not installed as', python_h, 'does not exist')
if IS_DEBIAN:
raise AutoFixSuggestion('To install, run', SUDO + 'apt-get install -y python' + PY_VERSION + '-dev')
sys.exit(1)
def run(cmd, return_output=False, raises=False, **kwargs):
print('+ ' + str(cmd))
if '"' in cmd or '|' in cmd:
kwargs['shell'] = True
elif isinstance(cmd, str):
cmd = cmd.split()
check_call = subprocess.check_output if return_output else subprocess.check_call
try:
output = check_call(cmd, **kwargs)
if isinstance(output, bytes):
output = output.decode('utf-8')
return output
except Exception:
if return_output and not raises:
return
else:
raise
def _int_or(value):
try:
return int(value)
except Exception:
return value
def error(*msg):
msg = ' '.join(map(str, msg))
echo(msg, color=None if AUTOFIX else 'red')
def echo(msg, color=None):
if sys.stdout.isatty() and color:
if color == 'red':
color = '\033[0;31m'
elif color == 'green':
color = '\033[92m'
msg = color + msg + '\033[0m'
print(msg)
class AutoFixSuggestion(Exception):
def __init__(self, instruction, cmd):
super(AutoFixSuggestion, self).__init__(instruction)
self.cmd = cmd
checks = [check_python, check_pip, check_venv, check_setuptools, check_wheel, check_python_dev]
if AUTOFIX:
checks.insert(0, check_curl)
if IS_DEBIAN:
checks.insert(0, check_apt)
if SUDO:
checks.insert(0, check_sudo)
try:
last_fix = None
for check in checks:
print('Checking ' + check.__name__.split('_', 1)[1].replace('_', ' '))
while True:
try:
check()
break
except AutoFixSuggestion as e:
cmds = e.cmd if isinstance(e.cmd, tuple) else (e.cmd,)
if AUTOFIX:
if cmds == last_fix:
error('! Failed to fix automatically, so you gotta fix it yourself.')
sys.exit(1)
else:
for cmd in cmds:
run(cmd, return_output=True, raises=True)
last_fix = cmds
else:
print(' ' + str(e) + ': ' + ' && '.join(cmds) + '\n')
print('# Run the above suggested command(s) manually and then re-run to continue checking,')
print(' or re-run using "python - --autofix" to run all suggested commands automatically.')
sys.exit(1)
print('')
except Exception as e:
error('!', str(e))
sys.exit(1)
except KeyboardInterrupt:
sys.exit(1)
echo('Python is alive and well. Good job!', color='green')
| [
"os.path.exists",
"argparse.ArgumentParser",
"os.getuid",
"shutil.rmtree",
"platform.system",
"sys.stdout.isatty",
"os.getpid",
"sys.exit"
] | [((475, 547), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Check and fix Python installation"""'}), "(description='Check and fix Python installation')\n", (498, 547), False, 'import argparse\n'), ((200, 237), 'os.path.exists', 'os.path.exists', (['"""/etc/debian_version"""'], {}), "('/etc/debian_version')\n", (214, 237), False, 'import os\n'), ((269, 303), 'os.path.exists', 'os.path.exists', (['"""/etc/lsb-release"""'], {}), "('/etc/lsb-release')\n", (283, 303), False, 'import os\n'), ((397, 414), 'platform.system', 'platform.system', ([], {}), '()\n', (412, 414), False, 'import platform\n'), ((445, 456), 'os.getuid', 'os.getuid', ([], {}), '()\n', (454, 456), False, 'import os\n'), ((167, 184), 'platform.system', 'platform.system', ([], {}), '()\n', (182, 184), False, 'import platform\n'), ((1069, 1080), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1077, 1080), False, 'import sys\n'), ((1451, 1462), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1459, 1462), False, 'import sys\n'), ((2774, 2785), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2782, 2785), False, 'import sys\n'), ((3509, 3520), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3517, 3520), False, 'import sys\n'), ((4551, 4562), 'os.getpid', 'os.getpid', ([], {}), '()\n', (4560, 4562), False, 'import os\n'), ((5084, 5133), 'shutil.rmtree', 'shutil.rmtree', (['test_venv_path'], {'ignore_errors': '(True)'}), '(test_venv_path, ignore_errors=True)\n', (5097, 5133), False, 'import shutil\n'), ((5742, 5791), 'shutil.rmtree', 'shutil.rmtree', (['test_venv_path'], {'ignore_errors': '(True)'}), '(test_venv_path, ignore_errors=True)\n', (5755, 5791), False, 'import shutil\n'), ((7424, 7435), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7432, 7435), False, 'import sys\n'), ((7511, 7535), 'os.path.exists', 'os.path.exists', (['python_h'], {}), '(python_h)\n', (7525, 7535), False, 'import os\n'), ((7766, 7777), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7774, 7777), False, 'import sys\n'), ((8585, 8604), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (8602, 8604), False, 'import sys\n'), ((10339, 10350), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (10347, 10350), False, 'import sys\n'), ((10382, 10393), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (10390, 10393), False, 'import sys\n'), ((5050, 5061), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5058, 5061), False, 'import sys\n'), ((5540, 5551), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5548, 5551), False, 'import sys\n'), ((10257, 10268), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (10265, 10268), False, 'import sys\n'), ((9722, 9733), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9730, 9733), False, 'import sys\n')] |
import numpy as np
from sklearn.metrics import average_precision_score as ap
from sklearn.metrics import roc_auc_score
"""
each row is an instance
each column is the prediction of a class
"""
def _score_to_rank(score_list):
rank_array = np.zeros([len(score_list)])
score_array = np.array(score_list)
idx_sorted = (-score_array).argsort()
rank_array[idx_sorted] = np.arange(len(score_list))+1
rank_list = rank_array.tolist()
return rank_list
# For clip evaluation
def auc_y_classwise(Y_target, Y_score):
"""
Y_target: list of lists. {0, 1}
real labels
Y_score: list of lists. real values
prediction values
"""
# Y_target = np.squeeze(np.array(Y_target))
# Y_score = np.squeeze(np.array(Y_score))
Y_target = np.array(Y_target)
Y_score = np.array(Y_score)
auc_list = roc_auc_score(Y_target, Y_score, average=None)
return auc_list
def ap_y_classwise(Y_target, Y_score):
"""
Y_target: list of lists. {0, 1}
real labels
Y_score: list of lists. real values
prediction values
"""
# Y_target = np.squeeze(np.array(Y_target))
# Y_score = np.squeeze(np.array(Y_score))
Y_target = np.array(Y_target)
Y_score = np.array(Y_score)
ap_list = ap(Y_target, Y_score, average=None)
return ap_list
def auc(Y_target, Y_score):
"""
Y_target: list of lists. {0, 1}
real labels
Y_score: list of lists. real values
prediction values
"""
Y_target = np.array(Y_target)
Y_score = np.array(Y_score)
auc_list = []
for i in range(Y_score.shape[1]):
try:
auc = roc_auc_score(Y_target[:, i], Y_score[:, i])
except:
continue
auc_list.append(auc)
return auc_list
def mean_auc(Y_target, Y_score):
auc_list = auc(Y_target, Y_score)
mean_auc = np.mean(auc_list)
return mean_auc
def mean_auc_y(Y_target, Y_score):
'''
along y-axis
'''
return mean_auc(Y_target, Y_score)
def mean_auc_x(Y_target, Y_score):
'''
along x-axis
'''
return mean_auc(np.array(Y_target).T, np.array(Y_score).T)
def mean_average_precision(Y_target, Y_score):
"""
mean average precision
raw-based operation
Y_target: list of lists. {0, 1}
real labels
Y_score: list of lists. real values
prediction values
"""
p = float(len(Y_target))
temp_sum = 0
for y_target, y_score in zip(Y_target, Y_score):
y_target = np.array(y_target)
y_score = np.array(y_score)
if (y_target == 0).all() or (y_target == 1).all():
p -= 1
continue
idx_target = np.nonzero(y_target > 0)[0]
n_target = float(len(idx_target))
rank_list = np.array(_score_to_rank(y_score))
target_rank_list = rank_list[idx_target]
temp_sum_2 = 0
for target_rank in target_rank_list:
mm = sum([1 for ii in idx_target
if rank_list[ii] <= target_rank])/float(target_rank)
temp_sum_2 += mm
temp_sum += temp_sum_2/n_target
measure = temp_sum/p
return measure
def map(Y_target, Y_score):
return mean_average_precision(Y_target, Y_score)
def map_x(Y_target, Y_score):
return mean_average_precision(Y_target, Y_score)
def map_y(Y_target, Y_score):
return mean_average_precision(np.array(Y_target).T,
np.array(Y_score).T)
| [
"numpy.mean",
"sklearn.metrics.average_precision_score",
"sklearn.metrics.roc_auc_score",
"numpy.array",
"numpy.nonzero"
] | [((289, 309), 'numpy.array', 'np.array', (['score_list'], {}), '(score_list)\n', (297, 309), True, 'import numpy as np\n'), ((780, 798), 'numpy.array', 'np.array', (['Y_target'], {}), '(Y_target)\n', (788, 798), True, 'import numpy as np\n'), ((813, 830), 'numpy.array', 'np.array', (['Y_score'], {}), '(Y_score)\n', (821, 830), True, 'import numpy as np\n'), ((846, 892), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['Y_target', 'Y_score'], {'average': 'None'}), '(Y_target, Y_score, average=None)\n', (859, 892), False, 'from sklearn.metrics import roc_auc_score\n'), ((1202, 1220), 'numpy.array', 'np.array', (['Y_target'], {}), '(Y_target)\n', (1210, 1220), True, 'import numpy as np\n'), ((1235, 1252), 'numpy.array', 'np.array', (['Y_score'], {}), '(Y_score)\n', (1243, 1252), True, 'import numpy as np\n'), ((1267, 1302), 'sklearn.metrics.average_precision_score', 'ap', (['Y_target', 'Y_score'], {'average': 'None'}), '(Y_target, Y_score, average=None)\n', (1269, 1302), True, 'from sklearn.metrics import average_precision_score as ap\n'), ((1506, 1524), 'numpy.array', 'np.array', (['Y_target'], {}), '(Y_target)\n', (1514, 1524), True, 'import numpy as np\n'), ((1539, 1556), 'numpy.array', 'np.array', (['Y_score'], {}), '(Y_score)\n', (1547, 1556), True, 'import numpy as np\n'), ((1863, 1880), 'numpy.mean', 'np.mean', (['auc_list'], {}), '(auc_list)\n', (1870, 1880), True, 'import numpy as np\n'), ((2501, 2519), 'numpy.array', 'np.array', (['y_target'], {}), '(y_target)\n', (2509, 2519), True, 'import numpy as np\n'), ((2538, 2555), 'numpy.array', 'np.array', (['y_score'], {}), '(y_score)\n', (2546, 2555), True, 'import numpy as np\n'), ((1644, 1688), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['Y_target[:, i]', 'Y_score[:, i]'], {}), '(Y_target[:, i], Y_score[:, i])\n', (1657, 1688), False, 'from sklearn.metrics import roc_auc_score\n'), ((2100, 2118), 'numpy.array', 'np.array', (['Y_target'], {}), '(Y_target)\n', (2108, 2118), True, 'import numpy as np\n'), ((2122, 2139), 'numpy.array', 'np.array', (['Y_score'], {}), '(Y_score)\n', (2130, 2139), True, 'import numpy as np\n'), ((2676, 2700), 'numpy.nonzero', 'np.nonzero', (['(y_target > 0)'], {}), '(y_target > 0)\n', (2686, 2700), True, 'import numpy as np\n'), ((3386, 3404), 'numpy.array', 'np.array', (['Y_target'], {}), '(Y_target)\n', (3394, 3404), True, 'import numpy as np\n'), ((3442, 3459), 'numpy.array', 'np.array', (['Y_score'], {}), '(Y_score)\n', (3450, 3459), True, 'import numpy as np\n')] |
"""
:filename transformations.py
:author <NAME>
:email <EMAIL>
from
Classes of custom transformations that are applied during the training as additional augmentation of the depth maps.
"""
import torch
import random
import numpy as np
import torch.nn.functional as F
from random import randrange
from skimage.transform import resize, warp, AffineTransform
class Normalize(object):
"""Normalization of a depth map in the value of [0, 1] for each pixel."""
def __init__(self, input_type):
self.input_type = input_type
def __call__(self, sample):
if self.input_type == 'geom':
image, landmarks, label = sample['image'], sample['landmarks'], sample['label']
mean, std = image.mean([1, 2]), image.std([1, 2])
# TODO?
return {'image': image,
'landmarks': landmarks,
'label': label}
class ToTensor(object):
"""Transformation of a training sample into a torch tensor instance."""
def __init__(self, input_type):
self.input_type = input_type
def __call__(self, sample):
image, landmarks, label = sample['image'], sample['landmarks'], sample['label']
image = torch.from_numpy(image.copy())
if self.input_type != 'depth+geom':
image = image.unsqueeze(1)
image = image.permute(1, 0, 2)
else:
image = image.permute(2, 0, 1)
landmarks = np.asarray(landmarks)
landmarks = torch.from_numpy(landmarks.copy())
return {'image': image,
'landmarks': landmarks,
'label': label}
class Resize(object):
"""Resizing of the input sample into provided dimensions."""
def __init__(self, width, height, input_type='image'):
assert isinstance(width, int)
assert isinstance(height, int)
self.width = width
self.height = height
self.type = input_type
def __call__(self, sample):
image, landmarks, label = sample['image'], sample['landmarks'], sample['label']
resized_landmarks = landmarks.copy()
if self.type == 'image':
image = resize(image, (self.height, self.width), anti_aliasing=True)
if self.type == 'landmarks':
resized_landmarks = []
for landmark in landmarks:
landmark_resized = resize(landmark, (self.height, self.width), anti_aliasing=True)
resized_landmarks.append(landmark_resized)
return {'image': image,
'landmarks': resized_landmarks,
'label': label}
class RandomTranslating(object):
"""Randomly translate the input sample from range [-10 px, 10 px] with provided probability."""
def __init__(self, p=0.5):
assert isinstance(p, float)
self.p = p
def __call__(self, sample):
image, landmarks, label = sample['image'], sample['landmarks'], sample['label']
translated_landmarks = landmarks.copy()
if np.random.rand(1) < self.p:
n1 = randrange(-10, 10)
n2 = randrange(-10, 10)
t = AffineTransform(translation=(n1, n2))
image = warp(image, t.inverse)
translated_landmarks = []
for landmark in landmarks:
translated_landmarks.append(warp(landmark, t.inverse))
return {'image': image,
'landmarks': translated_landmarks,
'label': label}
class RandomScaling(object):
"""Randomly scales the input sample with scale index from range [0.90, 1.10] with provided probability."""
def __init__(self, p=0.5):
assert isinstance(p, float)
self.p = p
def __call__(self, sample):
image, landmarks, label = sample['image'], sample['landmarks'], sample['label']
scaled_landmarks = landmarks.copy()
if np.random.rand(1) < self.p:
n = random.uniform(0.90, 1.10)
t = AffineTransform(scale=(n, n))
image = warp(image, t.inverse)
scaled_landmarks = []
for landmark in landmarks:
scaled_landmarks.append(warp(landmark, t.inverse))
return {'image': image,
'landmarks': scaled_landmarks,
'label': label}
class RandomRotation(object):
"""Randomly rotates the input sample from range [−11.25 deg, 11.25 deg] with provided probability."""
def __init__(self, p=0.5):
assert isinstance(p, float)
self.p = p
def __call__(self, sample):
image, landmarks, label = sample['image'], sample['landmarks'], sample['label']
rnd_num1 = randrange(-32, -6)
rnd_num2 = randrange(6, 32)
rnd_num = random.choice([rnd_num1, rnd_num2])
if np.random.rand(1) < self.p:
rotated_image = self.rotate(x=image.unsqueeze(0).type(torch.FloatTensor), theta=np.pi/rnd_num)
rotated_landmarks = []
for _, landmark in enumerate(landmarks):
rotated_landmark = self.rotate(x=landmark.unsqueeze(0).unsqueeze(0).type(torch.FloatTensor), theta=np.pi/rnd_num)
rotated_landmarks.append(rotated_landmark.squeeze(0))
result = torch.cat(rotated_landmarks, dim=0)
return {'image': rotated_image.squeeze(0),
'landmarks': result,
'label': label}
return {'image': image,
'landmarks': landmarks,
'label': label}
@staticmethod
def get_rotation_matrix(theta):
"""Returns a tensor rotation matrix with given theta value."""
theta = torch.tensor(theta)
return torch.tensor([[torch.cos(theta), -torch.sin(theta), 0],
[torch.sin(theta), torch.cos(theta), 0]])
def rotate(self, x, theta):
rot_mat = self.get_rotation_matrix(theta)[None, ...].repeat(x.shape[0], 1, 1)
grid = F.affine_grid(rot_mat, x.size(), align_corners=False)
x = F.grid_sample(x, grid, align_corners=False)
return x
| [
"torch.nn.functional.grid_sample",
"random.uniform",
"random.choice",
"numpy.random.rand",
"random.randrange",
"skimage.transform.AffineTransform",
"numpy.asarray",
"skimage.transform.warp",
"torch.sin",
"torch.tensor",
"torch.cos",
"skimage.transform.resize",
"torch.cat"
] | [((1466, 1487), 'numpy.asarray', 'np.asarray', (['landmarks'], {}), '(landmarks)\n', (1476, 1487), True, 'import numpy as np\n'), ((4681, 4699), 'random.randrange', 'randrange', (['(-32)', '(-6)'], {}), '(-32, -6)\n', (4690, 4699), False, 'from random import randrange\n'), ((4719, 4735), 'random.randrange', 'randrange', (['(6)', '(32)'], {}), '(6, 32)\n', (4728, 4735), False, 'from random import randrange\n'), ((4754, 4789), 'random.choice', 'random.choice', (['[rnd_num1, rnd_num2]'], {}), '([rnd_num1, rnd_num2])\n', (4767, 4789), False, 'import random\n'), ((5665, 5684), 'torch.tensor', 'torch.tensor', (['theta'], {}), '(theta)\n', (5677, 5684), False, 'import torch\n'), ((6028, 6071), 'torch.nn.functional.grid_sample', 'F.grid_sample', (['x', 'grid'], {'align_corners': '(False)'}), '(x, grid, align_corners=False)\n', (6041, 6071), True, 'import torch.nn.functional as F\n'), ((2182, 2242), 'skimage.transform.resize', 'resize', (['image', '(self.height, self.width)'], {'anti_aliasing': '(True)'}), '(image, (self.height, self.width), anti_aliasing=True)\n', (2188, 2242), False, 'from skimage.transform import resize, warp, AffineTransform\n'), ((3029, 3046), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (3043, 3046), True, 'import numpy as np\n'), ((3074, 3092), 'random.randrange', 'randrange', (['(-10)', '(10)'], {}), '(-10, 10)\n', (3083, 3092), False, 'from random import randrange\n'), ((3110, 3128), 'random.randrange', 'randrange', (['(-10)', '(10)'], {}), '(-10, 10)\n', (3119, 3128), False, 'from random import randrange\n'), ((3146, 3183), 'skimage.transform.AffineTransform', 'AffineTransform', ([], {'translation': '(n1, n2)'}), '(translation=(n1, n2))\n', (3161, 3183), False, 'from skimage.transform import resize, warp, AffineTransform\n'), ((3205, 3227), 'skimage.transform.warp', 'warp', (['image', 't.inverse'], {}), '(image, t.inverse)\n', (3209, 3227), False, 'from skimage.transform import resize, warp, AffineTransform\n'), ((3900, 3917), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (3914, 3917), True, 'import numpy as np\n'), ((3944, 3968), 'random.uniform', 'random.uniform', (['(0.9)', '(1.1)'], {}), '(0.9, 1.1)\n', (3958, 3968), False, 'import random\n'), ((3987, 4016), 'skimage.transform.AffineTransform', 'AffineTransform', ([], {'scale': '(n, n)'}), '(scale=(n, n))\n', (4002, 4016), False, 'from skimage.transform import resize, warp, AffineTransform\n'), ((4038, 4060), 'skimage.transform.warp', 'warp', (['image', 't.inverse'], {}), '(image, t.inverse)\n', (4042, 4060), False, 'from skimage.transform import resize, warp, AffineTransform\n'), ((4802, 4819), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (4816, 4819), True, 'import numpy as np\n'), ((5248, 5283), 'torch.cat', 'torch.cat', (['rotated_landmarks'], {'dim': '(0)'}), '(rotated_landmarks, dim=0)\n', (5257, 5283), False, 'import torch\n'), ((2389, 2452), 'skimage.transform.resize', 'resize', (['landmark', '(self.height, self.width)'], {'anti_aliasing': '(True)'}), '(landmark, (self.height, self.width), anti_aliasing=True)\n', (2395, 2452), False, 'from skimage.transform import resize, warp, AffineTransform\n'), ((3350, 3375), 'skimage.transform.warp', 'warp', (['landmark', 't.inverse'], {}), '(landmark, t.inverse)\n', (3354, 3375), False, 'from skimage.transform import resize, warp, AffineTransform\n'), ((4175, 4200), 'skimage.transform.warp', 'warp', (['landmark', 't.inverse'], {}), '(landmark, t.inverse)\n', (4179, 4200), False, 'from skimage.transform import resize, warp, AffineTransform\n'), ((5716, 5732), 'torch.cos', 'torch.cos', (['theta'], {}), '(theta)\n', (5725, 5732), False, 'import torch\n'), ((5787, 5803), 'torch.sin', 'torch.sin', (['theta'], {}), '(theta)\n', (5796, 5803), False, 'import torch\n'), ((5805, 5821), 'torch.cos', 'torch.cos', (['theta'], {}), '(theta)\n', (5814, 5821), False, 'import torch\n'), ((5735, 5751), 'torch.sin', 'torch.sin', (['theta'], {}), '(theta)\n', (5744, 5751), False, 'import torch\n')] |
import random
import shutil
import sys
from argparse import ArgumentParser
from os import path
from pathlib import Path
from threading import Thread
from PySide6 import __version__ as PySideVer
from PySide6.QtCore import (Property, QCoreApplication, QObject, Qt, QTimer,
Signal, Slot)
from PySide6.QtCore import __version__ as QtVer
from PySide6.QtGui import QGuiApplication, QIcon
from PySide6.QtQml import QQmlApplicationEngine
from downloader import Downloader
DOUNLOAD_COUNT = 100
MIN_SIZE = (300, 300)
IMAGE_INTERVAL = 20000
IMAGES_DIR_NAME = path.join(path.abspath(path.dirname(sys.argv[0])), 'images')
DEFAULT_KEYWORD = '女性ヘアカタログロング'
class MainModel(QObject):
is_download_changed = Signal(bool)
def __init__(self, is_download, dirname, parent=None):
super().__init__(parent)
self.__is_download = is_download
self.__dirname = dirname
@Property(bool, notify=is_download_changed)
def is_download(self):
return self.__is_download
@is_download.setter
def is_download(self, value):
if self.__is_download != value:
self.__is_download = value
self.is_download_changed.emit(self.__is_download)
@Slot()
def clear(self):
shutil.rmtree(self.__dirname)
def on_download_completed(self):
self.is_download = False
class DownloaderModel(QObject):
prog_value_changed = Signal(int)
prog_max_changed = Signal(int)
download_completed = Signal()
def __init__(self, download_keyword, dirname, parent=None):
super().__init__(parent)
self.__prog_value = 0
self.__download_keyword = download_keyword
self.__dirname = dirname
self.__downloader = Downloader(self.progress_download_callback)
@Property(int, notify=prog_value_changed)
def prog_value(self):
return self.__prog_value
@prog_value.setter
def prog_value(self, value):
if self.__prog_value != value:
self.__prog_value = value
self.prog_value_changed.emit(self.__prog_value)
@Property(int, notify=prog_max_changed)
def prog_max(self):
return DOUNLOAD_COUNT
@Slot()
def start_download(self):
def _inner(keyword, dirname):
self.__downloader.download_images(keyword, dirname, DOUNLOAD_COUNT, MIN_SIZE)
self.download_completed.emit()
th = Thread(target=_inner, args=(self.__download_keyword, self.__dirname))
th.setDaemon(True)
th.start()
def progress_download_callback(self, progress):
self.prog_value = progress
class ImageViewerModel(QObject):
image_url_changed = Signal(str)
def __init__(self, dirname, parent=None):
super().__init__(parent)
self.__image_url = ''
self.__dirname = dirname
self.__image_list = []
self.__timer = QTimer(self)
self.__timer.setInterval(IMAGE_INTERVAL)
self.__timer.timeout.connect(self.on_timeout)
@Property(str, notify=image_url_changed)
def image_url(self):
return self.__image_url
@image_url.setter
def image_url(self, value):
if self.__image_url != value:
self.__image_url = value
self.image_url_changed.emit(self.__image_url)
@Slot()
def start_view(self):
self.init_image_list()
self.random_set_image()
self.__timer.start()
def init_image_list(self):
self.__image_list = [str(x) for x in Path(self.__dirname).iterdir() if x.is_file()]
def random_set_image(self):
if not self.__image_list:
return
image = random.choice(self.__image_list)
self.__image_list.remove(image)
self.image_url = f'file:///{path.join(self.__dirname, image).replace(path.sep, "/")}'
def on_timeout(self):
if not self.__image_list:
self.init_image_list()
self.random_set_image()
def exist_images():
return path.isdir(IMAGES_DIR_NAME) and any([x.is_file() for x in Path(IMAGES_DIR_NAME).iterdir()])
def initialize_qt():
sys.argv += ['--style', 'Material']
QGuiApplication.setAttribute(Qt.AA_EnableHighDpiScaling)
QCoreApplication.setAttribute(Qt.AA_UseHighDpiPixmaps)
QCoreApplication.setAttribute(Qt.AA_UseOpenGLES)
def resource_path(relative):
if hasattr(sys, '_MEIPASS'):
return path.join(sys._MEIPASS, relative)
return path.join(path.abspath('.'), relative)
def main():
print(f'PySide6=={PySideVer} Qt=={QtVer}')
parser = ArgumentParser(description='cappuccino: Simple image viewer with download')
parser.add_argument('download_keyword', nargs='?', default='', help='image keyword to download')
args = parser.parse_args()
download_keyword = args.download_keyword
if not download_keyword and not exist_images():
download_keyword = DEFAULT_KEYWORD
initialize_qt()
app = QGuiApplication(sys.argv)
app.setWindowIcon(QIcon(resource_path('cappuccino.ico')))
is_download = download_keyword != ''
mmodel = MainModel(is_download, IMAGES_DIR_NAME)
dmodel = DownloaderModel(download_keyword, IMAGES_DIR_NAME)
imodel = ImageViewerModel(IMAGES_DIR_NAME)
dmodel.download_completed.connect(mmodel.on_download_completed)
engine = QQmlApplicationEngine()
engine.rootContext().setContextProperty('mmodel', mmodel)
engine.rootContext().setContextProperty('dmodel', dmodel)
engine.rootContext().setContextProperty('imodel', imodel)
engine.load(f'file:///{resource_path("qml/Main.qml")}')
if not engine.rootObjects():
sys.exit(-1)
sys.exit(app.exec())
if __name__ == '__main__':
main()
| [
"PySide6.QtCore.Slot",
"downloader.Downloader",
"PySide6.QtCore.Property",
"sys.exit",
"argparse.ArgumentParser",
"pathlib.Path",
"PySide6.QtGui.QGuiApplication.setAttribute",
"PySide6.QtCore.QTimer",
"os.path.isdir",
"PySide6.QtCore.Signal",
"random.choice",
"os.path.dirname",
"PySide6.QtGu... | [((730, 742), 'PySide6.QtCore.Signal', 'Signal', (['bool'], {}), '(bool)\n', (736, 742), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((916, 958), 'PySide6.QtCore.Property', 'Property', (['bool'], {'notify': 'is_download_changed'}), '(bool, notify=is_download_changed)\n', (924, 958), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((1226, 1232), 'PySide6.QtCore.Slot', 'Slot', ([], {}), '()\n', (1230, 1232), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((1422, 1433), 'PySide6.QtCore.Signal', 'Signal', (['int'], {}), '(int)\n', (1428, 1433), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((1457, 1468), 'PySide6.QtCore.Signal', 'Signal', (['int'], {}), '(int)\n', (1463, 1468), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((1494, 1502), 'PySide6.QtCore.Signal', 'Signal', ([], {}), '()\n', (1500, 1502), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((1793, 1833), 'PySide6.QtCore.Property', 'Property', (['int'], {'notify': 'prog_value_changed'}), '(int, notify=prog_value_changed)\n', (1801, 1833), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((2093, 2131), 'PySide6.QtCore.Property', 'Property', (['int'], {'notify': 'prog_max_changed'}), '(int, notify=prog_max_changed)\n', (2101, 2131), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((2192, 2198), 'PySide6.QtCore.Slot', 'Slot', ([], {}), '()\n', (2196, 2198), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((2676, 2687), 'PySide6.QtCore.Signal', 'Signal', (['str'], {}), '(str)\n', (2682, 2687), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((3007, 3046), 'PySide6.QtCore.Property', 'Property', (['str'], {'notify': 'image_url_changed'}), '(str, notify=image_url_changed)\n', (3015, 3046), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((3298, 3304), 'PySide6.QtCore.Slot', 'Slot', ([], {}), '()\n', (3302, 3304), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((4137, 4193), 'PySide6.QtGui.QGuiApplication.setAttribute', 'QGuiApplication.setAttribute', (['Qt.AA_EnableHighDpiScaling'], {}), '(Qt.AA_EnableHighDpiScaling)\n', (4165, 4193), False, 'from PySide6.QtGui import QGuiApplication, QIcon\n'), ((4198, 4252), 'PySide6.QtCore.QCoreApplication.setAttribute', 'QCoreApplication.setAttribute', (['Qt.AA_UseHighDpiPixmaps'], {}), '(Qt.AA_UseHighDpiPixmaps)\n', (4227, 4252), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((4257, 4305), 'PySide6.QtCore.QCoreApplication.setAttribute', 'QCoreApplication.setAttribute', (['Qt.AA_UseOpenGLES'], {}), '(Qt.AA_UseOpenGLES)\n', (4286, 4305), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((4544, 4619), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""cappuccino: Simple image viewer with download"""'}), "(description='cappuccino: Simple image viewer with download')\n", (4558, 4619), False, 'from argparse import ArgumentParser\n'), ((4925, 4950), 'PySide6.QtGui.QGuiApplication', 'QGuiApplication', (['sys.argv'], {}), '(sys.argv)\n', (4940, 4950), False, 'from PySide6.QtGui import QGuiApplication, QIcon\n'), ((5301, 5324), 'PySide6.QtQml.QQmlApplicationEngine', 'QQmlApplicationEngine', ([], {}), '()\n', (5322, 5324), False, 'from PySide6.QtQml import QQmlApplicationEngine\n'), ((605, 630), 'os.path.dirname', 'path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (617, 630), False, 'from os import path\n'), ((1262, 1291), 'shutil.rmtree', 'shutil.rmtree', (['self.__dirname'], {}), '(self.__dirname)\n', (1275, 1291), False, 'import shutil\n'), ((1743, 1786), 'downloader.Downloader', 'Downloader', (['self.progress_download_callback'], {}), '(self.progress_download_callback)\n', (1753, 1786), False, 'from downloader import Downloader\n'), ((2413, 2482), 'threading.Thread', 'Thread', ([], {'target': '_inner', 'args': '(self.__download_keyword, self.__dirname)'}), '(target=_inner, args=(self.__download_keyword, self.__dirname))\n', (2419, 2482), False, 'from threading import Thread\n'), ((2885, 2897), 'PySide6.QtCore.QTimer', 'QTimer', (['self'], {}), '(self)\n', (2891, 2897), False, 'from PySide6.QtCore import Property, QCoreApplication, QObject, Qt, QTimer, Signal, Slot\n'), ((3649, 3681), 'random.choice', 'random.choice', (['self.__image_list'], {}), '(self.__image_list)\n', (3662, 3681), False, 'import random\n'), ((3977, 4004), 'os.path.isdir', 'path.isdir', (['IMAGES_DIR_NAME'], {}), '(IMAGES_DIR_NAME)\n', (3987, 4004), False, 'from os import path\n'), ((4385, 4418), 'os.path.join', 'path.join', (['sys._MEIPASS', 'relative'], {}), '(sys._MEIPASS, relative)\n', (4394, 4418), False, 'from os import path\n'), ((4440, 4457), 'os.path.abspath', 'path.abspath', (['"""."""'], {}), "('.')\n", (4452, 4457), False, 'from os import path\n'), ((5613, 5625), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (5621, 5625), False, 'import sys\n'), ((3500, 3520), 'pathlib.Path', 'Path', (['self.__dirname'], {}), '(self.__dirname)\n', (3504, 3520), False, 'from pathlib import Path\n'), ((3758, 3790), 'os.path.join', 'path.join', (['self.__dirname', 'image'], {}), '(self.__dirname, image)\n', (3767, 3790), False, 'from os import path\n'), ((4035, 4056), 'pathlib.Path', 'Path', (['IMAGES_DIR_NAME'], {}), '(IMAGES_DIR_NAME)\n', (4039, 4056), False, 'from pathlib import Path\n')] |
import astropy.units as u
import numpy as np
from lofti_gaia.loftitools import *
from lofti_gaia.cFunctions import calcOFTI_C
#from loftitools import *
import pickle
import time
import matplotlib.pyplot as plt
# Astroquery throws some warnings we can ignore:
import warnings
warnings.filterwarnings("ignore")
'''This module obtaines measurements from Gaia EDR3 (Gaia DR2 is also available as a secondary option) and runs through the LOFTI Gaia/OFTI
wide stellar binary orbit fitting technique.
'''
class Fitter(object):
'''Initialize the Fitter object for the binary system, and compute observational constraints
to be used in the orbit fit. User must provide Gaia source ids, tuples of mass estimates for
both objects, specify the number of desired orbits in posterior sample. Fit will be
for object 2 relative to object 1.
Attributes are tuples of (value,uncertainty) unless otherwise indicated. Attributes
with astropy units are retrieved from Gaia archive, attributes without units are
computed from Gaia values. All relative values are for object 2 relative to object 1.
Args:
sourceid1, sourceid2 (int): Gaia source ids for the two objects, fit will be for motion of \
object 2 relative to object 1
mass1, mass2 (tuple, flt): tuple os mass estimate for object 1 and 2, of the form (value, uncertainty)
Norbits (int): Number of desired orbits in posterior sample. Default = 100000
results_filename (str): Filename for fit results files. If none, results will be written to files \
named FitResults.yr.mo.day.hr.min.s
astrometry (dict): User-supplied astrometric measurements. Must be dictionary or table or pandas dataframe with\
column names "sep,seperr,pa,paerr,dates" or "ra,raerr,dec,decerr,dates". May be same as the rv table. \
Sep, deltaRA, and deltaDEC must be in arcseconds, PA in degrees, dates in decimal years. \
Default = None
user_rv (dict): User-supplied radial velocity measurements. Must be dictionary or table or pandas dataframe with\
column names "rv,rverr,rv_dates". May be same as the astrometry table. Default = None.
catalog (str): name of Gaia catalog to query. Default = 'gaiaedr3.gaia_source'
ruwe1, ruwe2 (flt): RUWE value from Gaia archive
ref_epoch (flt): reference epoch in decimal years. For Gaia DR2 this is 2015.5, for Gaia EDR3 it is 2016.0
plx1, plx2 (flt): parallax from Gaia in mas
RA1, RA2 (flt): right ascension from Gaia; RA in deg, uncertainty in mas
Dec1, Dec2 (flt): declination from Gaia; Dec in deg, uncertainty in mas
pmRA1, pmRA2 (flt): proper motion in RA in mas yr^-1 from Gaia
pmDec1, pmDec2 (flt): proper motion in DEC in mas yr^-1 from Gaia
rv1, rv2 (flt, optional): radial velocity in km s^-1 from Gaia
rv (flt, optional): relative RV of 2 relative to 1, if both are present in Gaia
plx (flt): weighted mean parallax for the binary system in mas
distance (flt): distance of system in pc, computed from Gaia parallax using method \
of Bailer-Jones et. al 2018.
deltaRA, deltaDec (flt): relative separation in RA and Dec directions, in mas
pmRA, pmDec (flt): relative proper motion in RA/Dec directions in km s^-1
sep (flt): total separation vector in mas
pa (flt): postion angle of separation vector in degrees from North
sep_au (flt): separation in AU
sep_km (flt): separation in km
total_vel (flt): total velocity vector in km s^-1. If RV is available for both, \
this is the 3d velocity vector; if not it is just the plane of sky velocity.
total_planeofsky_vel (flt): total velocity in the plane of sky in km s^-1. \
In the absence of RV this is equivalent to the total velocity vector.
deltaGmag (flt): relative contrast in Gaia G magnitude. Does not include uncertainty.
inflateProperMOtionError (flt): an optional factor to mulitply default gaia proper motion error by.
Written by <NAME>, 2020
'''
def __init__(self, sourceid1, sourceid2, mass1, mass2, Norbits = 100000, \
results_filename = None,
astrometry = None,
user_rv = None,
catalog = 'gaiaedr3.gaia_source',
inflateProperMotionError=1
):
self.sourceid1 = sourceid1
self.sourceid2 = sourceid2
try:
self.mass1 = mass1[0]
self.mass1err = mass1[1]
self.mass2 = mass2[0]
self.mass2err = mass2[1]
self.mtot = [self.mass1 + self.mass2, np.sqrt((self.mass1err**2) + (self.mass2err**2))]
except:
raise ValueError('Masses must be tuples of (value,error), ex: mass1 = (1.0,0.05)')
self.Norbits = Norbits
if not results_filename:
self.results_filename = 'FitResults.'+time.strftime("%Y.%m.%d.%H.%M.%S")+'.txt'
self.stats_filename = 'FitResults.Stats.'+time.strftime("%Y.%m.%d.%H.%M.%S")+'.txt'
else:
self.results_filename = results_filename
self.stats_filename = results_filename+'.Stats.txt'
self.astrometry = False
# check if user supplied astrometry:
if astrometry is not None:
# if so, set astrometric flag to True:
self.astrometry = True
# store observation dates:
self.astrometric_dates = astrometry['dates']
# if in sep/pa, convert to ra/dec:
if 'sep' in astrometry:
try:
astr_ra = [MonteCarloIt([astrometry['sep'][i],astrometry['seperr'][i]]) * \
np.sin(np.radians(MonteCarloIt([astrometry['pa'][i],astrometry['paerr'][i]]))) \
for i in range(len(astrometry['sep']))]
astr_dec = [MonteCarloIt([astrometry['sep'][i],astrometry['seperr'][i]]) * \
np.cos(np.radians(MonteCarloIt([astrometry['pa'][i],astrometry['paerr'][i]]))) \
for i in range(len(astrometry['sep']))]
self.astrometric_ra = np.array([
[np.mean(astr_ra[i]) for i in range(len(astrometry['sep']))],
[np.std(astr_ra[i]) for i in range(len(astrometry['sep']))]
])
self.astrometric_dec = np.array([
[np.mean(astr_dec[i]) for i in range(len(astrometry['sep']))],
[np.std(astr_dec[i]) for i in range(len(astrometry['sep']))]
])
except:
raise ValueError('Astrometry keys not recognized. Please provide dictionary or table or pandas dataframe with\
column names "sep,seperr,pa,paerr,dates" or "ra,raerr,dec,decerr,dates"')
elif 'ra' in astrometry:
# else store the ra/dec as attributes:
try:
self.astrometric_ra = np.array([astrometry['ra'], astrometry['raerr']])
self.astrometric_dec = np.array([astrometry['dec'], astrometry['decerr']])
except:
raise ValueError('Astrometry keys not recognized. Please provide dictionary or table or pandas dataframe with\
column names "sep,seperr,pa,paerr,dates" or "ra,raerr,dec,decerr,dates"')
else:
raise ValueError('Astrometry keys not recognized. Please provide dictionary or table or pandas dataframe with\
column names "sep,seperr,pa,paerr,dates" or "ra,raerr,dec,decerr,dates"')
# Check if user supplied rv:
self.use_user_rv = False
if user_rv is not None:
# set user rv flag to true:
self.use_user_rv = True
try:
# set attributes; multiply rv by -1 due to difference in coordinate systems:
self.user_rv = np.array([user_rv['rv']*-1,user_rv['rverr']])
self.user_rv_dates = np.array(user_rv['rv_dates'])
except:
raise ValueError('RV keys not recognized. Please use column names "rv,rverr,rv_dates"')
self.catalog = catalog
# Get Gaia measurements, compute needed constraints, and add to object:
self.PrepareConstraints(catalog=self.catalog,inflateFactor=inflateProperMotionError)
def edr3ToICRF(self,pmra,pmdec,ra,dec,G):
''' Corrects for biases in proper motion. The function is from https://arxiv.org/pdf/2103.07432.pdf
Args:
pmra,pmdec (float): proper motion
ra, dec (float): right ascension and declination
G (float): G magnitude
Written by <NAME>, 2021
'''
if G>=13:
return pmra , pmdec
import numpy as np
def sind(x):
return np.sin(np.radians(x))
def cosd(x):
return np.cos(np.radians(x))
table1="""
0.0 9.0 9.0 9.5 9.5 10.0 10.0 10.5 10.5 11.0 11.0 11.5 11.5 11.75 11.75 12.0 12.0 12.25 12.25 12.5 12.5 12.75 12.75 13.0
18.4 33.8 -11.3 14.0 30.7 -19.4 12.8 31.4 -11.8 13.6 35.7 -10.5 16.2 50.0 2.1 19.4 59.9 0.2 21.8 64.2 1.0 17.7 65.6 -1.9 21.3 74.8 2.1 25.7 73.6 1.0 27.3 76.6 0.5
34.9 68.9 -2.9 """
table1 = np.fromstring(table1,sep=" ").reshape((12,5)).T
Gmin = table1[0]
Gmax = table1[1]
#pick the appropriate omegaXYZ for the source’s magnitude:
omegaX = table1[2][(Gmin<=G)&(Gmax>G)][0]
omegaY = table1[3][(Gmin<=G)&(Gmax>G)][0]
omegaZ = table1[4][(Gmin<=G)&(Gmax>G)][0]
pmraCorr = -1*sind(dec)*cosd(ra)*omegaX -sind(dec)*sind(ra)*omegaY + cosd(dec)*omegaZ
pmdecCorr = sind(ra)*omegaX -cosd(ra)*omegaY
return pmra-pmraCorr/1000., pmdec-pmdecCorr/1000.
def PrepareConstraints(self, rv=False, catalog='gaiaedr3.gaia_source', inflateFactor=1.):
'''Retrieves parameters for both objects from Gaia EDR3 archive and computes system attriubtes,
and assigns them to the Fitter object class.
Args:
rv (bool): flag for handling the presence or absence of RV measurements for both objects \
in EDR3. Gets set to True if both objects have Gaia RV measurements. Default = False
catalog (str): name of Gaia catalog to query. Default = 'gaiaedr3.gaia_source'
inflateFactor (flt): Factor by which to inflate the errors on Gaia proper motions to \
account for improper uncertainty estimates. Default = 1.0
Written by <NAME>, 2020
'''
from astroquery.gaia import Gaia
deg_to_mas = 3600000.
mas_to_deg = 1./3600000.
# Retrieve astrometric solution from Gaia EDR3
job = Gaia.launch_job("SELECT * FROM "+catalog+" WHERE source_id = "+str(self.sourceid1))
j = job.get_results()
job = Gaia.launch_job("SELECT * FROM "+catalog+" WHERE source_id = "+str(self.sourceid2))
k = job.get_results()
if catalog == 'gaiadr2.gaia_source':
# Retrieve RUWE from RUWE catalog for both sources and add to object state:
job = Gaia.launch_job("SELECT * FROM gaiadr2.ruwe WHERE source_id = "+str(self.sourceid1))
jruwe = job.get_results()
job = Gaia.launch_job("SELECT * FROM gaiadr2.ruwe WHERE source_id = "+str(self.sourceid2))
kruwe = job.get_results()
self.ruwe1 = jruwe['ruwe'][0]
self.ruwe2 = kruwe['ruwe'][0]
else:
# EDR3 contains ruwe in the main catalog:
self.ruwe1 = j['ruwe'][0]
self.ruwe2 = k['ruwe'][0]
# Check RUWE for both objects and warn if too high:
if self.ruwe1>1.2 or self.ruwe2>1.2:
print('''WARNING: RUWE for one or more of your solutions is greater than 1.2. This indicates
that the source might be an unresolved binary or experiencing acceleration
during the observation. Orbit fit results may not be trustworthy.''')
# reference epoch:
self.ref_epoch = j['ref_epoch'][0]
# parallax:
self.plx1 = [j[0]['parallax']*u.mas, j[0]['parallax_error']*u.mas]
self.plx2 = [k[0]['parallax']*u.mas, k[0]['parallax_error']*u.mas]
# RA/DEC
self.RA1 = [j[0]['ra']*u.deg, j[0]['ra_error']*mas_to_deg*u.deg]
self.RA2 = [k[0]['ra']*u.deg, k[0]['ra_error']*mas_to_deg*u.deg]
self.Dec1 = [j[0]['dec']*u.deg, j[0]['dec_error']*mas_to_deg*u.deg]
self.Dec2 = [k[0]['dec']*u.deg, k[0]['dec_error']*mas_to_deg*u.deg]
# Proper motions
pmRACorrected1,pmDecCorrected1 = self.edr3ToICRF(j[0]['pmra'],j[0]['pmdec'],j[0]['ra'],j[0]['dec'],j[0]["phot_g_mean_mag"])
pmRACorrected2,pmDecCorrected2 = self.edr3ToICRF(k[0]['pmra'],k[0]['pmdec'],k[0]['ra'],k[0]['dec'],k[0]["phot_g_mean_mag"])
self.pmRA1 = [pmRACorrected1*u.mas/u.yr, j[0]['pmra_error']*u.mas/u.yr*inflateFactor]
self.pmRA2 = [pmRACorrected2*u.mas/u.yr, k[0]['pmra_error']*u.mas/u.yr*inflateFactor]
self.pmDec1 = [pmDecCorrected1*u.mas/u.yr, j[0]['pmdec_error']*u.mas/u.yr*inflateFactor]
self.pmDec2 = [pmDecCorrected2*u.mas/u.yr, k[0]['pmdec_error']*u.mas/u.yr*inflateFactor]
# See if both objects have RV's in DR2:
if catalog == 'gaiaedr3.gaia_source':
key = 'dr2_radial_velocity'
error_key = 'dr2_radial_velocity_error'
elif catalog == 'gaiadr2.gaia_source':
key = 'radial_velocity'
error_key = 'radial_velocity_error'
if type(k[0][key]) == np.float64 and type(j[0][key]) == np.float64 or type(k[0][key]) == np.float32 and type(j[0][key]) == np.float32:
rv = True
self.rv1 = [j[0][key]*u.km/u.s,j[0][error_key]*u.km/u.s]
self.rv2 = [k[0][key]*u.km/u.s,k[0][error_key]*u.km/u.s]
rv1 = MonteCarloIt(self.rv1)
rv2 = MonteCarloIt(self.rv2)
self.rv = [ -np.mean(rv2-rv1) , np.std(rv2-rv1) ] # km/s
# negative to relfect change in coordinate system from RV measurements to lofti
# pos RV = towards observer in this coord system
else:
self.rv = [0,0]
# weighted mean of parallax values:
plx = np.average([self.plx1[0].value,self.plx2[0].value], weights = [self.plx1[1].value,self.plx2[1].value])
plxerr = np.max([self.plx1[1].value,self.plx2[1].value])
self.plx = [plx,plxerr] # mas
self.distance = distance(*self.plx) # pc
# Compute separations of component 2 relative to 1:
r1 = MonteCarloIt(self.RA1)
r2 = MonteCarloIt(self.RA2)
d1 = MonteCarloIt(self.Dec1)
d2 = MonteCarloIt(self.Dec2)
ra = (r2*deg_to_mas - r1*deg_to_mas) * np.cos(np.radians(np.mean([self.Dec1[0].value,self.Dec2[0].value])))
dec = ((d2 - d1)*u.deg).to(u.mas).value
self.deltaRA = [np.mean(ra),np.std(ra)] # mas
self.deltaDec = [np.mean(dec),np.std(dec)] # mas
# compute relative proper motion:
pr1 = MonteCarloIt(self.pmRA1)
pr2 = MonteCarloIt(self.pmRA2)
pd1 = MonteCarloIt(self.pmDec1)
pd2 = MonteCarloIt(self.pmDec2)
pmRA = [np.mean(pr2 - pr1), np.std(pr2-pr1)] # mas/yr
pmDec = [np.mean(pd2 - pd1), np.std(pd2 - pd1)] # mas/yr
self.pmRA = masyr_to_kms(pmRA,self.plx) # km/s
self.pmDec = masyr_to_kms(pmDec,self.plx) # km/s
# Compute separation/position angle:
r, p = to_polar(r1,r2,d1,d2)
self.sep = tuple([np.mean(r).value, np.std(r).value]) # mas
self.pa = tuple([np.mean(p).value, np.std(p).value]) # deg
self.sep_au = tuple([((self.sep[0]/1000)*self.distance[0]), ((self.sep[1]/1000)*self.distance[0])])
self.sep_km = tuple([ self.sep_au[0]*u.au.to(u.km) , self.sep_au[1]*u.au.to(u.km)])
# compute total velocities:
if rv:
self.total_vel = [ add_in_quad([self.pmRA[0],self.pmDec[0],self.rv[0]]) ,
add_in_quad([self.pmRA[1],self.pmDec[1],self.rv[1]]) ] # km/s
self.total_planeofsky_vel = [ add_in_quad([self.pmRA[0],self.pmDec[0]]) ,
add_in_quad([self.pmRA[1],self.pmDec[1]]) ] # km/s
else:
self.total_vel = [ add_in_quad([self.pmRA[0],self.pmDec[0]]) ,
add_in_quad([self.pmRA[1],self.pmDec[1]]) ] # km/s
self.total_planeofsky_vel = self.total_vel.copy() # km/s
# compute deltamag:
self.deltaGmag = j[0]['phot_g_mean_mag'] - k[0]['phot_g_mean_mag']
class FitOrbit(object):
''' Object for performing an orbit fit. Takes attributes from Fitter class.
ex: orbits = FitOrbit(fitterobject)
Args:
fitterobject (Fitter object): Fitter object initialized from the Fitter class
write_stats (bool): If True, write out summary statistics of orbit sample at \
conclusion of fit. Default = True.
write_results (bool): If True, write out the fit results to a pickle file \
in addition to the text file created during the fit. Default = True.
deltaRA, deltaDec (flt): relative separation in RA and Dec directions, in mas
pmRA, pmDec (flt): relative proper motion in RA/Dec directions in km s^-1
rv (flt, optional): relative RV of 2 relative to 1, if both are present in Gaia EDR3
mtot_init (flt): initial total system mass in Msun from user input
distance (flt): distance of system in pc, computed from Gaia parallax using method of Bailer-Jones et. al 2018.
sep (flt): separation vector in mas
pa (flt): postion angle of separation vector in degrees from North
ref_epoch (flt): epoch of the measurement, 2016.0 for Gaia EDR3 and 2015.5 for Gaia DR2.
Norbits (int): number of desired orbit samples
write_stats (bool): if True, write summary of sample statistics to human-readable file at end of run. Default = True
write_results (bool): if True, write out current state of sample orbits in pickle file in periodic intervals during \
run, and again at the end of the run. RECOMMENDED. Default = True
results_filename (str): name of file for saving pickled results to disk. If not supplied, \
defaul name is FitResults.y.mo.d.h.m.s.pkl, saved in same directory as fit was run.
stats_filename (str): name of file for saving human-readable file of stats of sample results. If not supplied, \
defaul name is FitResults.Stats.y.mo.d.h.m.s.pkl, saved in same directory as fit was run.
run_time (flt): run time for the last fit. astropy units object
Written by <NAME>, 2020
'''
def __init__(self, fitterobject, write_stats = True, write_results = True, python_version=False, \
use_pm_cross_term = False, corr_coeff = None):
# establish fit parameters:
self.deltaRA = fitterobject.deltaRA
self.deltaDec = fitterobject.deltaDec
self.pmRA = fitterobject.pmRA
self.pmDec = fitterobject.pmDec
self.rv = fitterobject.rv
self.mtot_init = fitterobject.mtot
self.distance = fitterobject.distance
self.sep = fitterobject.sep
self.pa = fitterobject.pa
self.ref_epoch = fitterobject.ref_epoch
self.Norbits = fitterobject.Norbits
self.write_results = write_results
self.write_stats = write_stats
self.results_filename = fitterobject.results_filename
self.stats_filename = fitterobject.stats_filename
self.astrometry = fitterobject.astrometry
if self.astrometry:
self.astrometric_ra = fitterobject.astrometric_ra
self.astrometric_dec = fitterobject.astrometric_dec
self.astrometric_dates = fitterobject.astrometric_dates
self.use_user_rv = fitterobject.use_user_rv
if self.use_user_rv:
self.user_rv = fitterobject.user_rv
self.user_rv_dates = fitterobject.user_rv_dates
# run orbit fitter:
self.fitorbit(python_fitOFTI=python_version, use_pm_cross_term = use_pm_cross_term, corr_coeff = corr_coeff)
def fitorbit(self, save_results_every_X_loops = 100, python_fitOFTI=False, use_pm_cross_term = False, corr_coeff = None):
'''Run the OFTI fitting run on the Fitter object. Called when FitOrbit object
is created.
Args:
save_results_every_X_loops (int): on every Xth loop, save status of the \
orbit sample arrays to a pickle file, if write_results = True (Default)
python_fitOFTI (bool): If True, fit using python only without using C Kepler's equation solver. Default = False
use_pm_cross_term (bool): If True, include the proper motion correlation cross term in the Chi^2 computation \
Default = False
Written by <NAME>, 2020
'''
# write header:
print('Saving orbits in',self.results_filename)
k = open(self.results_filename, 'w')
output_file_header = '# sma [arcsec] period [yrs] orbit phase t_0 [yr] ecc incl [deg]\
argp [deg] lan [deg] m_tot [Msun] dist [pc] chi^2 ln(prob) ln(randn)'
k.write(output_file_header + "\n")
k.close()
import time as tm
########### Perform initial run to get initial chi-squared: #############
# Draw random orbits:
#parameters = a,T,const,to,e,i,w,O,m1,dist
numSamples = 10000
parameters_init = draw_samples(numSamples, self.mtot_init, self.distance, self.ref_epoch)
# Compute positions and velocities:
if(python_fitOFTI):
X,Y,Z,Xdot,Ydot,Zdot,Xddot,Yddot,Zddot,parameters=calc_OFTI(parameters_init,self.ref_epoch,self.sep,self.pa)
else:
returnArray = np.zeros((19,numSamples))
returnArray = calcOFTI_C(parameters_init,self.ref_epoch,self.sep,self.pa,returnArray.copy())
X,Y,Z,Xdot,Ydot,Zdot,Xddot,Yddot,Zddot = returnArray[0:9]
parameters = returnArray[9:]
# Compute chi squared:
if self.rv[0] != 0:
model = np.array([Y,X,Ydot,Xdot,Zdot])
data = np.array([self.deltaRA, self.deltaDec, self.pmRA, self.pmDec, self.rv])
else:
model = np.array([Y,X,Ydot,Xdot])
data = np.array([self.deltaRA, self.deltaDec, self.pmRA, self.pmDec])
chi2 = ComputeChi2(data,model)
if use_pm_cross_term:
chi2 -= ( 2 * corr_coeff * (data[2][0] - model[2]) * (data[3][0] - model[3]) ) / (data[2][1] * data[3][1])
if self.astrometry:
p = parameters.copy()
a,T,const,to,e,i,w,O,m1,dist = p[0],p[1],p[2],p[3],p[4],p[5],p[6],p[7],p[8],p[9]
chi2_astr = np.zeros(10000)
# Calculate predicted positions at astr observation dates for each orbit:
for j in range(self.astrometric_ra.shape[1]):
# for each date, compute XYZ for each 10000 trial orbit. We can
# skip scale and rotate because that was accomplished in the calc_OFTI call above.
X1,Y1,Z1,E1 = calc_XYZ(a,T,to,e,i,w,O,self.astrometric_dates[j])
# Place astrometry into data array where: data[0][0]=ra obs, data[0][1]=ra err, etc:
data = np.array([self.astrometric_ra[:,j], self.astrometric_dec[:,j]])
# place corresponding predicited positions at that date for each trial orbit in arcsec:
model = np.array([Y1*1000,X1*1000])
# compute chi2 for trial orbits at that date and add to the total chi2 sum:
chi2_astr += ComputeChi2(data,model)
chi2 = chi2 + chi2_astr
if self.use_user_rv:
p = parameters.copy()
a,T,const,to,e,i,w,O,m1,dist = p[0],p[1],p[2],p[3],p[4],p[5],p[6],p[7],p[8],p[9]
chi2_rv = np.zeros(10000)
for j in range(self.user_rv.shape[1]):
# compute ecc anomaly at that date:
X1,Y1,Z1,E1 = calc_XYZ(a,T,to,e,i,w,O,self.user_rv_dates[j])
# compute velocities at that ecc anom:
Xdot,Ydot,Zdot = calc_velocities(a,T,to,e,i,w,O,dist,E1)
# compute chi2:
chi2_rv += ComputeChi2(np.array([self.user_rv[:,j]]),np.array([Zdot]))
chi2 = chi2 + chi2_rv
print('inital chi min',np.nanmin(chi2))
self.chi_min = np.nanmin(chi2)
# Accept/reject:
accepted, lnprob, lnrand = AcceptOrReject(chi2,self.chi_min)
# count number accepted:
number_orbits_accepted = np.size(accepted)
# tack on chi2, log probability, log random unif number to parameters array:
parameters = np.concatenate((parameters,chi2[None,:],lnprob[None,:],lnrand[None,:]), axis = 0)
# transpose:
parameters=np.transpose(parameters)
# write results to file:
k = open(self.results_filename, 'a')
for params in parameters[accepted]:
string = ' '.join([str(p) for p in params])
k.write(string + "\n")
k.close()
###### start loop ########
# initialize:
loop_count = 0
start=tm.time()
while number_orbits_accepted < self.Norbits:
# Draw random orbits:
numSamples = 10000
parameters_init = draw_samples(numSamples, self.mtot_init, self.distance, self.ref_epoch)
# Compute positions and velocities and new parameters array with scaled and rotated values:
if(python_fitOFTI):
X,Y,Z,Xdot,Ydot,Zdot,Xddot,Yddot,Zddot,parameters=calc_OFTI(parameters_init,self.ref_epoch,self.sep,self.pa)
else:
returnArray = np.zeros((19,numSamples))
returnArray = calcOFTI_C(parameters_init,self.ref_epoch,self.sep,self.pa,returnArray.copy())
X,Y,Z,Xdot,Ydot,Zdot,Xddot,Yddot,Zddot = returnArray[0:9]
parameters = returnArray[9:]
returnArray = None
# compute chi2 for orbits using Gaia observations:
if self.rv[0] != 0:
model = np.array([Y,X,Ydot,Xdot,Zdot])
data = np.array([self.deltaRA, self.deltaDec, self.pmRA, self.pmDec, self.rv])
else:
model = np.array([Y,X,Ydot,Xdot])
data = np.array([self.deltaRA, self.deltaDec, self.pmRA, self.pmDec])
chi2 = ComputeChi2(data,model)
if use_pm_cross_term:
chi2 -= ( 2 * (data[2][0] - model[2]) * (data[3][0] - model[3]) ) / (data[2][1] * data[3][1])
# add user astrometry if given:
if self.astrometry:
p = parameters.copy()
a,T,const,to,e,i,w,O,m1,dist = p[0],p[1],p[2],p[3],p[4],p[5],p[6],p[7],p[8],p[9]
chi2_astr = np.zeros(10000)
# Calculate predicted positions at astr observation dates for each orbit:
for j in range(self.astrometric_ra.shape[1]):
# for each date, compute XYZ for each 10000 trial orbit. We can
# skip scale and rotate because that was accomplished in the calc_OFTI call above.
X1,Y1,Z1,E1 = calc_XYZ(a,T,to,e,i,w,O,self.astrometric_dates[j])
# Place astrometry into data array where: data[0][0]=ra obs, data[0][1]=ra err, etc:
data = np.array([self.astrometric_ra[:,j], self.astrometric_dec[:,j]])
# place corresponding predicited positions at that date for each trial orbit:
model = np.array([Y1*1000,X1*1000])
# compute chi2 for trial orbits at that date and add to the total chi2 sum:
chi2_astr += ComputeChi2(data,model)
chi2 = chi2 + chi2_astr
# add user rv if given:
if self.use_user_rv:
p = parameters.copy()
a,T,const,to,e,i,w,O,m1,dist = p[0],p[1],p[2],p[3],p[4],p[5],p[6],p[7],p[8],p[9]
chi2_rv = np.zeros(10000)
for j in range(self.user_rv.shape[1]):
# compute ecc anomaly at that date:
X1,Y1,Z1,E1 = calc_XYZ(a,T,to,e,i,w,O,self.user_rv_dates[j])
# compute velocities at that ecc anom:
Xdot,Ydot,Zdot = calc_velocities(a,T,to,e,i,w,O,dist,E1)
# compute chi2:
chi2_rv += ComputeChi2(np.array([self.user_rv[:,j]]),np.array([Zdot]))
chi2 = chi2 + chi2_rv
# Accept/reject:
accepted, lnprob, lnrand = AcceptOrReject(chi2,self.chi_min)
if np.size(accepted) == 0:
pass
else:
# count num accepted
p = parameters.copy()
a,T,const,to,e,i,w,O,m1,dist = p[0],p[1],p[2],p[3],p[4],p[5],p[6],p[7],p[8],p[9]
sampleResults = calc_XYZ(a,T,to,e,i/180*np.pi,w/180*np.pi,O/180*np.pi,2016.0)
number_orbits_accepted += np.size(accepted)
parameters = np.concatenate((parameters,chi2[None,:],lnprob[None,:],lnrand[None,:]), axis = 0)
parameters=np.transpose(parameters)
k = open(self.results_filename, 'a')
for params in parameters[accepted]:
string = ' '.join([str(p) for p in params])
k.write(string + "\n")
k.close()
if np.nanmin(chi2) < self.chi_min:
# If there is a new min chi2:
self.chi_min = np.nanmin(chi2)
#print('found new chi min:',self.chi_min)
# re-evaluate to accept/reject with new chi_min:
if number_orbits_accepted != 0:
dat = np.loadtxt(open(self.results_filename,"r"),delimiter=' ',ndmin=2)
lnprob = -(dat[:,10]-self.chi_min)/2.0
dat[:,11] = lnprob
accepted_retest = np.where(lnprob > dat[:,12])
q = open(self.results_filename, 'w')
q.write(output_file_header + "\n")
for data in dat[accepted_retest]:
string = ' '.join([str(d) for d in data])
q.write(string + "\n")
q.close()
dat2 = np.loadtxt(open(self.results_filename,"r"),delimiter=' ',ndmin=2)
number_orbits_accepted=dat2.shape[0]
loop_count += 1
#print('loop count',loop_count)
update_progress(number_orbits_accepted,self.Norbits)
# one last accept/reject with final chi_min value:
dat = np.loadtxt(open(self.results_filename,"r"),delimiter=' ',ndmin=2)
lnprob = -(dat[:,10]-self.chi_min)/2.0
dat[:,11] = lnprob
accepted_retest = np.where(lnprob > dat[:,12])
q = open(self.results_filename, 'w')
q.write(output_file_header + "\n")
for data in dat[accepted_retest]:
string = ' '.join([str(d) for d in data])
q.write(string + "\n")
q.close()
# when finished, upload results and store in object:
dat = np.loadtxt(open(self.results_filename,"r"),delimiter=' ',ndmin=2)
number_orbits_accepted=dat.shape[0]
print('Final Norbits:', number_orbits_accepted)
# intialise results object and store accepted orbits:
if self.rv[0] != 0:
self.results = Results(orbits = dat, limit_lan = False, limit_aop = False)
else:
self.results = Results(orbits = dat, limit_lan = True, limit_aop = False)
self.results.Update(self.results.orbits)
# pickle dump the results attribute:
if self.write_results:
self.results.SaveResults(self.results_filename.replace(".txt", ".pkl"), write_text_file = False)
stop = tm.time()
self.results.run_time = (stop - start)*u.s
# compute stats and write to file:
self.results.stats = Stats(orbits = self.results.orbits, write_to_file = self.write_stats, filename = self.stats_filename)
class Results(object):
'''A class for storing and manipulating the results of the orbit fit.
Args:
orbits (Norbits x 13 array): array of accepted orbits from \
OFTI fit in the same order as the following attributes
sma (1 x Norbits array): semi-major axis in arcsec
period (1 x Norbits array): period in years
orbit_fraction (1 x Norbits array): fraction of orbit past periastron \
passage the observation (2016) occured on. Values: [0,1)
t0 (1 x Norbits array): date of periastron passage in decimal years
ecc (1 x Norbits array): eccentricity
inc (1 x Norbits array): inclination relative to plane of the sky in deg
aop (1 x Norbits array): arguement of periastron in deg
lan (1 x Norbits array): longitude of ascending node in deg
mtot (1 x Norbits array): total system mass in Msun
distance (1 x Norbits array): distance to system in parsecs
chi2 (1 x Norbits array): chi^2 value for the orbit
lnprob (1 x Norbits array): log probability of orbit
lnrand (1 x Norbits array): log of random "dice roll" for \
orbit acceptance
limit_aop, limit_lan (bool): In the absence of radial velocity info, \
there is a degeneracy between arg of periastron and long of ascending \
node. Common practice is to limit one to the interval [0,180] deg. \
By default, lofti limits lan to this interval if rv = False. The user can \
choose to limit aop instead by setting limit_aop = True, limit_lan = False. \
The orbits[:,6] (aop) and orbits[:,7] (lan) arrays preserve the original values. \
Written by <NAME>, 2020
'''
def __init__(self, orbits = [], limit_aop = False, limit_lan = True):
self.orbits = orbits
self.limit_lan = limit_lan
self.limit_aop = limit_aop
def Update(self, orbits):
'''Take elements of the "orbits" attribute and populate
the orbital element attributes
Args:
orbits (arr): orbits array from Results class
Written by <NAME>, 2020
'''
self.sma = orbits[:,0]
self.period = orbits[:,1]
self.orbit_fraction = orbits[:,2]
self.t0 = orbits[:,3]
self.ecc = orbits[:,4]
self.inc = orbits[:,5]
self.aop = orbits[:,6]
if self.limit_aop:
self.aop = limit_to_180deg(self.aop)
self.lan = orbits[:,7] % 360
if self.limit_lan:
self.lan = limit_to_180deg(self.lan)
self.mtot = orbits[:,8]
self.distance = orbits[:,9]
self.chi2 = orbits[:,10]
self.lnprob = orbits[:,11]
self.lnrand = orbits[:,12]
def SaveResults(self, filename, write_text_file = False, text_filename = None):
'''Save the orbits and orbital parameters attributes in a pickle file
Args:
filename (str): filename for pickle file
write_text_file (bool): if True, also write out the accepted orbits to a \
human readable text file
text_filename (bool): if write_to_text = True, specifify filename for text file
Written by <NAME>, 2020
'''
pickle.dump(self, open( filename, "wb" ) )
# write results to file:
if write_text_file:
k = open(text_filename, 'a')
for params in self.orbits:
string = ' '.join([str(p) for p in params])
k.write(string + "\n")
k.close()
def LoadResults(self, filename, append = False):
'''Read in the orbits and orbital parameters attributes from a pickle file
Args:
filename (str): filename of pickle file to load
append (bool): if True, append read in orbit samples to another Results \
object. Default = False.
Written by <NAME>, 2020
'''
results_in = pickle.load( open( filename, "rb" ) )
if append == False:
self.orbits = results_in.orbits
self.Update(self.orbits)
else:
self.orbits = np.vstack((self.orbits,results_in.orbits))
self.Update(self.orbits)
# plotting results:
def PlotHists(self):
'''Plot 1-d histograms of orbital elements 'sma','ecc','inc','aop','lan','t0' from fit results.
Written by <NAME>, 2020
'''
if len(self.sma < 50):
bins = 50
else:
bins = 'fd'
fig = plt.figure(figsize=(30, 5.5))
params = np.array([self.sma,self.ecc,self.inc,self.aop,self.lan,self.t0])
names = np.array(['sma','ecc','inc','aop','lan','t0'])
for i in range(len(params)):
ax = plt.subplot2grid((1,len(params)), (0,i))
plt.hist(params[i],bins=bins,edgecolor='none',alpha=0.8)
plt.tick_params(axis='both', left=False, top=False, right=False, bottom=True, \
labelleft=False, labeltop=False, labelright=False, labelbottom=True)
plt.xticks(rotation=45, fontsize = 20)
plt.xlabel(names[i], fontsize = 25)
plt.tight_layout()
return fig
def PlotOrbits(self, color = True, colorbar = True, ref_epoch = 2016.0, size = 100, plot3d = False, cmap = 'viridis',xlim=False,ylim=False):
'''Plot a random selection of orbits from the sample in the plane of the sky.
Args:
color (bool): if True, plot orbit tracks using a colormap scale to orbit fraction (phase) \
past observation date (2015.5). If False, orbit tracks will be black. Default = True
colorbar (bool): if True and color = True, plot colorbar for orbit phase
ref_epoch (flt): reference epoch for drawing orbits. Default = 2015.5
size (int): Number of orbits to plot. Default = True
plot3d (bool): If True, return a plot of orbits in 3D space. Default = False
cmap (str): colormap for orbit phase plot
Written by <NAME>, 2020
'''
# Random selection of orbits to plot:
if len(self.sma) > size:
# if there are more orbits than desired size, randomly select orbits from
# the posterior sample:
ind = np.random.choice(range(0,len(self.sma)),replace=False,size=size)
else:
# if there are fewer orbits than desired size, take all of them:
ind = np.random.choice(range(0,len(self.sma)),replace=False,size=len(self.sma))
from numpy import tan, arctan, sqrt, cos, sin, arccos
# label for colormap axis:
colorlabel = 'Phase'
# create figure:
fig = plt.figure(figsize = (7.5, 6.))
plt.grid(ls=':')
# invert X axis for RA:
plt.gca().invert_xaxis()
if plot3d:
# Make 3d axis object:
ax = fig.add_subplot(111, projection='3d')
# plot central star:
ax.scatter(0,0,0,color='orange',marker='*',s=300,zorder=10)
ax.set_zlabel('Z (")',fontsize=20)
else:
# plot central star:
plt.scatter(0,0,color='orange',marker='*',s=300,zorder=10)
# For each orbit in the random selection from the posterior samples:
for a,T,to,e,i,w,O in zip(self.sma[ind],self.period[ind],self.t0[ind],self.ecc[ind],np.radians(self.inc[ind]),\
np.radians(self.aop[ind]),np.radians(self.lan[ind])):
# define an array of times along orbit:
times = np.linspace(ref_epoch,ref_epoch+T,5000)
X,Y,Z = np.array([]),np.array([]),np.array([])
E = np.array([])
# Compute X,Y,Z positions for each time:
for t in times:
n = (2*np.pi)/T
M = n*(t-to)
nextE = [danby_solve(eccentricity_anomaly, varM,vare, 0.001) for varM,vare in zip([M],[e])]
E = np.append(E,nextE)
r1 = a*(1.-e*cos(E))
f1 = sqrt(1.+e)*sin(E/2.)
f2 = sqrt(1.-e)*cos(E/2.)
f = 2.*np.arctan2(f1,f2)
r = (a*(1.-e**2))/(1.+(e*cos(f)))
X1 = r * ( cos(O)*cos(w+f) - sin(O)*sin(w+f)*cos(i) )
Y1 = r * ( sin(O)*cos(w+f) + cos(O)*sin(w+f)*cos(i) )
Z1 = r * sin(w+f) * sin(i)
X,Y,Z = np.append(X,X1),np.append(Y,Y1),np.append(Z,Z1)
# Plot the X,Y(Z) positions:
if not plot3d:
if color:
plt.scatter(Y,X,c=((times-ref_epoch)/T),cmap=cmap,s=3,lw=0)
plt.gca().set_aspect('equal', adjustable='datalim')
else:
plt.plot(Y,X, color='black',alpha=0.3)
plt.gca().set_aspect('equal', adjustable='datalim')
if plot3d:
from mpl_toolkits.mplot3d import Axes3D
if color:
ax.scatter(Y,X,Z,c=((times-ref_epoch)/T),cmap=cmap,s=3,lw=0)
else:
ax.plot(Y,X,Z, color='black',alpha=0.3)
# plot colorbar:
if not plot3d:
if color:
if colorbar == True:
cb = plt.colorbar().set_label(colorlabel, fontsize=20)
plt.gca().tick_params(labelsize=14)
plt.ylabel('Dec (")',fontsize=20)
plt.xlabel('RA (")',fontsize=20)
plt.gca().tick_params(labelsize=14)
if(xlim):
plt.xlim(xlim)
if(ylim):
plt.ylim(ylim)
return fig
def PlotSepPA(self, ref_epoch = 2016.0, size = 100, timespan = [20,20], orbitcolor = 'skyblue'):
'''Plot a random selection of orbits from the sample in separation and position angle as
a function of time.
Args:
ref_epoch (flt): reference epoch for drawing orbits. Default = 2015.5
size (int): Number of orbits to plot. Default = True
timespan (tuple, int): number of years before [0] and after [1] the ref epoch to \
plot sep and pa
orbitcolor (str): color to use to plot the orbits
Written by <NAME>, 2020
'''
# Random selection of orbits to plot:
if len(self.sma) > size:
# if there are more orbits than desired size, randomly select orbits from
# the posterior sample:
ind = np.random.choice(range(0,len(self.sma)),replace=False,size=size)
else:
# if there are fewer orbits than desired size, take all of them:
ind = np.random.choice(range(0,len(self.sma)),replace=False,size=len(self.sma))
from numpy import tan, arctan, sqrt, cos, sin, arccos
# make figure
fig = plt.figure(figsize = (8, 10))
# define subplots:
plt.subplot(2,1,1)
plt.gca().tick_params(labelsize=14)
plt.grid(ls=':')
# define times to compute sep/pa:
tmin,tmax = ref_epoch - timespan[0],ref_epoch + timespan[1]
t = np.linspace(tmin,tmax,2000)
date_ticks = np.arange(tmin,tmax,10)
# for each selected orbit from the sample:
for a,T,to,e,i,w,O in zip(self.sma[ind],self.period[ind],self.t0[ind],self.ecc[ind],np.radians(self.inc[ind]),\
np.radians(self.aop[ind]),np.radians(self.lan[ind])):
X = np.array([])
Y = np.array([])
# compute X,Y at each time point:
X1,Y1 = orbits_for_plotting(a,T,to,e,i,w,O,t)
X = np.append(X, X1)
Y = np.append(Y,Y1)
# compute sep:
r=np.sqrt((X**2)+(Y**2))
# plot sep in mas:
plt.plot(t,r*1000,color=orbitcolor,alpha=0.5)
plt.ylabel(r'$\rho$ (mas)',fontsize=20)
# next suplot:
plt.subplot(2,1,2)
plt.grid(ls=':')
# for each selected orbit from the sample:
for a,T,to,e,i,w,O in zip(self.sma[ind],self.period[ind],self.t0[ind],self.ecc[ind],np.radians(self.inc[ind]),\
np.radians(self.aop[ind]),np.radians(self.lan[ind])):
X = np.array([])
Y = np.array([])
X1,Y1 = orbits_for_plotting(a,T,to,e,i,w,O,t)
X = np.append(X, X1)
Y = np.append(Y,Y1)
# compute pa:
theta=np.arctan2(X,-Y)
theta=(np.degrees(theta)+270.)%360
# plot it:
plt.plot(t,theta,color=orbitcolor,alpha=0.5)
plt.ylabel(r'P.A. (deg)',fontsize=19)
plt.xlabel('Years',fontsize=19)
plt.gca().tick_params(labelsize=14)
plt.tight_layout()
return fig
class Stats(object):
'''A class for storing and manipulating the statistics of the results of the orbit fit.
For every parameter, there is a series of stats computed and saved as stats.param.stat
Examples:
stats.sma.mean = mean of semimajor axis
stats.ecc.ci68 = 68% confidence interval for eccentricity
stats.aop.std = standard deviation of arg of periastron
Args:
orbits (Norbits x 13 array): array of accepted orbits from \
OFTI fit in the same order as the following attributes
param.mean (flt): mean of parameter computed using np.mean
param.median (flt): np.median of parameter
param.mode (flt): mode of parameter
param.std (flt): standard deviation from np.std
param.ci68 (tuple,flt): 68% minimum credible interval of form (lower bound, upper bound)
param.ci95 (tuple,flt): 95% minimum credible interval
write_to_file (bool): If True, write stats to a human-readbale text file.
filename (str): filename for saving stats file. If not supplied, default \
name is FitResults.Stats.y.mo.d.h.m.s.pkl, saved in same directory as fit was run.
Written by <NAME>, 2020
'''
def __init__(self, orbits = [], write_to_file = False, filename = None):
self.orbits = orbits
# Compute stats on parameter arrays and save as attributes:
self.sma = StatsSubclass(self.orbits[:,0])
self.period = StatsSubclass(self.orbits[:,1])
self.orbit_fraction = StatsSubclass(self.orbits[:,2])
self.t0 = StatsSubclass(self.orbits[:,3])
self.ecc = StatsSubclass(self.orbits[:,4])
self.inc = StatsSubclass(self.orbits[:,5])
self.aop = StatsSubclass(self.orbits[:,6])
self.lan = StatsSubclass(self.orbits[:,7])
self.mtot = StatsSubclass(self.orbits[:,8])
self.distance = StatsSubclass(self.orbits[:,9])
if write_to_file:
params = np.array([self.sma,self.period,self.orbit_fraction,self.t0,self.ecc,self.inc,\
self.aop,self.lan,self.mtot,self.distance])
names = np.array(['sma','period','orbit fraction','t0','ecc','inc','aop','lan','mtot','distance'])
if not filename:
filename = 'FitResults.Stats.'+time.strftime("%Y.%m.%d.%H.%M.%S")+'.txt'
k = open(filename, 'w')
string = 'Parameter Mean Median Mode Std 68% Min Cred Int 95% Min Cred Int'
k.write(string + "\n")
for i in range(len(params)):
string = make_parameter_string(params[i],names[i])
k.write(string + "\n")
k.close()
class StatsSubclass(Stats):
'''Subclass for computing and storing statistics
Args:
array (arr): array for which to compute statistics
'''
def __init__(self, array):
self.mean,self.median,self.mode,self.std,self.ci68,self.ci95 = compute_statistics(array)
| [
"numpy.radians",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.hist",
"numpy.sqrt",
"matplotlib.pyplot.ylabel",
"numpy.array",
"numpy.arctan2",
"numpy.nanmin",
"numpy.sin",
"numpy.arange",
"numpy.mean",
"numpy.where",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.max",
"n... | [((275, 308), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (298, 308), False, 'import warnings\n'), ((14422, 14529), 'numpy.average', 'np.average', (['[self.plx1[0].value, self.plx2[0].value]'], {'weights': '[self.plx1[1].value, self.plx2[1].value]'}), '([self.plx1[0].value, self.plx2[0].value], weights=[self.plx1[1].\n value, self.plx2[1].value])\n', (14432, 14529), True, 'import numpy as np\n'), ((14542, 14590), 'numpy.max', 'np.max', (['[self.plx1[1].value, self.plx2[1].value]'], {}), '([self.plx1[1].value, self.plx2[1].value])\n', (14548, 14590), True, 'import numpy as np\n'), ((24848, 24863), 'numpy.nanmin', 'np.nanmin', (['chi2'], {}), '(chi2)\n', (24857, 24863), True, 'import numpy as np\n'), ((25025, 25042), 'numpy.size', 'np.size', (['accepted'], {}), '(accepted)\n', (25032, 25042), True, 'import numpy as np\n'), ((25149, 25239), 'numpy.concatenate', 'np.concatenate', (['(parameters, chi2[None, :], lnprob[None, :], lnrand[None, :])'], {'axis': '(0)'}), '((parameters, chi2[None, :], lnprob[None, :], lnrand[None, :]\n ), axis=0)\n', (25163, 25239), True, 'import numpy as np\n'), ((25271, 25295), 'numpy.transpose', 'np.transpose', (['parameters'], {}), '(parameters)\n', (25283, 25295), True, 'import numpy as np\n'), ((25632, 25641), 'time.time', 'tm.time', ([], {}), '()\n', (25639, 25641), True, 'import time as tm\n'), ((31428, 31457), 'numpy.where', 'np.where', (['(lnprob > dat[:, 12])'], {}), '(lnprob > dat[:, 12])\n', (31436, 31457), True, 'import numpy as np\n'), ((32467, 32476), 'time.time', 'tm.time', ([], {}), '()\n', (32474, 32476), True, 'import time as tm\n'), ((37267, 37296), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(30, 5.5)'}), '(figsize=(30, 5.5))\n', (37277, 37296), True, 'import matplotlib.pyplot as plt\n'), ((37314, 37383), 'numpy.array', 'np.array', (['[self.sma, self.ecc, self.inc, self.aop, self.lan, self.t0]'], {}), '([self.sma, self.ecc, self.inc, self.aop, self.lan, self.t0])\n', (37322, 37383), True, 'import numpy as np\n'), ((37395, 37446), 'numpy.array', 'np.array', (["['sma', 'ecc', 'inc', 'aop', 'lan', 't0']"], {}), "(['sma', 'ecc', 'inc', 'aop', 'lan', 't0'])\n", (37403, 37446), True, 'import numpy as np\n'), ((37894, 37912), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (37910, 37912), True, 'import matplotlib.pyplot as plt\n'), ((39443, 39473), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(7.5, 6.0)'}), '(figsize=(7.5, 6.0))\n', (39453, 39473), True, 'import matplotlib.pyplot as plt\n'), ((39483, 39499), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'ls': '""":"""'}), "(ls=':')\n", (39491, 39499), True, 'import matplotlib.pyplot as plt\n'), ((42049, 42083), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Dec (")"""'], {'fontsize': '(20)'}), '(\'Dec (")\', fontsize=20)\n', (42059, 42083), True, 'import matplotlib.pyplot as plt\n'), ((42091, 42124), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""RA (")"""'], {'fontsize': '(20)'}), '(\'RA (")\', fontsize=20)\n', (42101, 42124), True, 'import matplotlib.pyplot as plt\n'), ((43471, 43498), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 10)'}), '(figsize=(8, 10))\n', (43481, 43498), True, 'import matplotlib.pyplot as plt\n'), ((43536, 43556), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (43547, 43556), True, 'import matplotlib.pyplot as plt\n'), ((43607, 43623), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'ls': '""":"""'}), "(ls=':')\n", (43615, 43623), True, 'import matplotlib.pyplot as plt\n'), ((43746, 43775), 'numpy.linspace', 'np.linspace', (['tmin', 'tmax', '(2000)'], {}), '(tmin, tmax, 2000)\n', (43757, 43775), True, 'import numpy as np\n'), ((43795, 43820), 'numpy.arange', 'np.arange', (['tmin', 'tmax', '(10)'], {}), '(tmin, tmax, 10)\n', (43804, 43820), True, 'import numpy as np\n'), ((44452, 44492), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$\\\\rho$ (mas)"""'], {'fontsize': '(20)'}), "('$\\\\rho$ (mas)', fontsize=20)\n", (44462, 44492), True, 'import matplotlib.pyplot as plt\n'), ((44524, 44544), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(2)'], {}), '(2, 1, 2)\n', (44535, 44544), True, 'import matplotlib.pyplot as plt\n'), ((44551, 44567), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'ls': '""":"""'}), "(ls=':')\n", (44559, 44567), True, 'import matplotlib.pyplot as plt\n'), ((45190, 45227), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""P.A. (deg)"""'], {'fontsize': '(19)'}), "('P.A. (deg)', fontsize=19)\n", (45200, 45227), True, 'import matplotlib.pyplot as plt\n'), ((45236, 45268), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Years"""'], {'fontsize': '(19)'}), "('Years', fontsize=19)\n", (45246, 45268), True, 'import matplotlib.pyplot as plt\n'), ((45320, 45338), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (45336, 45338), True, 'import matplotlib.pyplot as plt\n'), ((15108, 15119), 'numpy.mean', 'np.mean', (['ra'], {}), '(ra)\n', (15115, 15119), True, 'import numpy as np\n'), ((15120, 15130), 'numpy.std', 'np.std', (['ra'], {}), '(ra)\n', (15126, 15130), True, 'import numpy as np\n'), ((15171, 15183), 'numpy.mean', 'np.mean', (['dec'], {}), '(dec)\n', (15178, 15183), True, 'import numpy as np\n'), ((15184, 15195), 'numpy.std', 'np.std', (['dec'], {}), '(dec)\n', (15190, 15195), True, 'import numpy as np\n'), ((15425, 15443), 'numpy.mean', 'np.mean', (['(pr2 - pr1)'], {}), '(pr2 - pr1)\n', (15432, 15443), True, 'import numpy as np\n'), ((15445, 15462), 'numpy.std', 'np.std', (['(pr2 - pr1)'], {}), '(pr2 - pr1)\n', (15451, 15462), True, 'import numpy as np\n'), ((15491, 15509), 'numpy.mean', 'np.mean', (['(pd2 - pd1)'], {}), '(pd2 - pd1)\n', (15498, 15509), True, 'import numpy as np\n'), ((15511, 15528), 'numpy.std', 'np.std', (['(pd2 - pd1)'], {}), '(pd2 - pd1)\n', (15517, 15528), True, 'import numpy as np\n'), ((22195, 22221), 'numpy.zeros', 'np.zeros', (['(19, numSamples)'], {}), '((19, numSamples))\n', (22203, 22221), True, 'import numpy as np\n'), ((22529, 22563), 'numpy.array', 'np.array', (['[Y, X, Ydot, Xdot, Zdot]'], {}), '([Y, X, Ydot, Xdot, Zdot])\n', (22537, 22563), True, 'import numpy as np\n'), ((22579, 22650), 'numpy.array', 'np.array', (['[self.deltaRA, self.deltaDec, self.pmRA, self.pmDec, self.rv]'], {}), '([self.deltaRA, self.deltaDec, self.pmRA, self.pmDec, self.rv])\n', (22587, 22650), True, 'import numpy as np\n'), ((22685, 22713), 'numpy.array', 'np.array', (['[Y, X, Ydot, Xdot]'], {}), '([Y, X, Ydot, Xdot])\n', (22693, 22713), True, 'import numpy as np\n'), ((22730, 22792), 'numpy.array', 'np.array', (['[self.deltaRA, self.deltaDec, self.pmRA, self.pmDec]'], {}), '([self.deltaRA, self.deltaDec, self.pmRA, self.pmDec])\n', (22738, 22792), True, 'import numpy as np\n'), ((23175, 23190), 'numpy.zeros', 'np.zeros', (['(10000)'], {}), '(10000)\n', (23183, 23190), True, 'import numpy as np\n'), ((24299, 24314), 'numpy.zeros', 'np.zeros', (['(10000)'], {}), '(10000)\n', (24307, 24314), True, 'import numpy as np\n'), ((24807, 24822), 'numpy.nanmin', 'np.nanmin', (['chi2'], {}), '(chi2)\n', (24816, 24822), True, 'import numpy as np\n'), ((36882, 36925), 'numpy.vstack', 'np.vstack', (['(self.orbits, results_in.orbits)'], {}), '((self.orbits, results_in.orbits))\n', (36891, 36925), True, 'import numpy as np\n'), ((37549, 37608), 'matplotlib.pyplot.hist', 'plt.hist', (['params[i]'], {'bins': 'bins', 'edgecolor': '"""none"""', 'alpha': '(0.8)'}), "(params[i], bins=bins, edgecolor='none', alpha=0.8)\n", (37557, 37608), True, 'import matplotlib.pyplot as plt\n'), ((37618, 37769), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'axis': '"""both"""', 'left': '(False)', 'top': '(False)', 'right': '(False)', 'bottom': '(True)', 'labelleft': '(False)', 'labeltop': '(False)', 'labelright': '(False)', 'labelbottom': '(True)'}), "(axis='both', left=False, top=False, right=False, bottom=\n True, labelleft=False, labeltop=False, labelright=False, labelbottom=True)\n", (37633, 37769), True, 'import matplotlib.pyplot as plt\n'), ((37799, 37835), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {'rotation': '(45)', 'fontsize': '(20)'}), '(rotation=45, fontsize=20)\n', (37809, 37835), True, 'import matplotlib.pyplot as plt\n'), ((37850, 37883), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['names[i]'], {'fontsize': '(25)'}), '(names[i], fontsize=25)\n', (37860, 37883), True, 'import matplotlib.pyplot as plt\n'), ((39885, 39948), 'matplotlib.pyplot.scatter', 'plt.scatter', (['(0)', '(0)'], {'color': '"""orange"""', 'marker': '"""*"""', 's': '(300)', 'zorder': '(10)'}), "(0, 0, color='orange', marker='*', s=300, zorder=10)\n", (39896, 39948), True, 'import matplotlib.pyplot as plt\n'), ((40113, 40138), 'numpy.radians', 'np.radians', (['self.inc[ind]'], {}), '(self.inc[ind])\n', (40123, 40138), True, 'import numpy as np\n'), ((40161, 40186), 'numpy.radians', 'np.radians', (['self.aop[ind]'], {}), '(self.aop[ind])\n', (40171, 40186), True, 'import numpy as np\n'), ((40187, 40212), 'numpy.radians', 'np.radians', (['self.lan[ind]'], {}), '(self.lan[ind])\n', (40197, 40212), True, 'import numpy as np\n'), ((40287, 40330), 'numpy.linspace', 'np.linspace', (['ref_epoch', '(ref_epoch + T)', '(5000)'], {}), '(ref_epoch, ref_epoch + T, 5000)\n', (40298, 40330), True, 'import numpy as np\n'), ((40402, 40414), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (40410, 40414), True, 'import numpy as np\n'), ((42198, 42212), 'matplotlib.pyplot.xlim', 'plt.xlim', (['xlim'], {}), '(xlim)\n', (42206, 42212), True, 'import matplotlib.pyplot as plt\n'), ((42243, 42257), 'matplotlib.pyplot.ylim', 'plt.ylim', (['ylim'], {}), '(ylim)\n', (42251, 42257), True, 'import matplotlib.pyplot as plt\n'), ((43962, 43987), 'numpy.radians', 'np.radians', (['self.inc[ind]'], {}), '(self.inc[ind])\n', (43972, 43987), True, 'import numpy as np\n'), ((44010, 44035), 'numpy.radians', 'np.radians', (['self.aop[ind]'], {}), '(self.aop[ind])\n', (44020, 44035), True, 'import numpy as np\n'), ((44036, 44061), 'numpy.radians', 'np.radians', (['self.lan[ind]'], {}), '(self.lan[ind])\n', (44046, 44061), True, 'import numpy as np\n'), ((44080, 44092), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (44088, 44092), True, 'import numpy as np\n'), ((44109, 44121), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (44117, 44121), True, 'import numpy as np\n'), ((44242, 44258), 'numpy.append', 'np.append', (['X', 'X1'], {}), '(X, X1)\n', (44251, 44258), True, 'import numpy as np\n'), ((44275, 44291), 'numpy.append', 'np.append', (['Y', 'Y1'], {}), '(Y, Y1)\n', (44284, 44291), True, 'import numpy as np\n'), ((44332, 44356), 'numpy.sqrt', 'np.sqrt', (['(X ** 2 + Y ** 2)'], {}), '(X ** 2 + Y ** 2)\n', (44339, 44356), True, 'import numpy as np\n'), ((44398, 44448), 'matplotlib.pyplot.plot', 'plt.plot', (['t', '(r * 1000)'], {'color': 'orbitcolor', 'alpha': '(0.5)'}), '(t, r * 1000, color=orbitcolor, alpha=0.5)\n', (44406, 44448), True, 'import matplotlib.pyplot as plt\n'), ((44711, 44736), 'numpy.radians', 'np.radians', (['self.inc[ind]'], {}), '(self.inc[ind])\n', (44721, 44736), True, 'import numpy as np\n'), ((44759, 44784), 'numpy.radians', 'np.radians', (['self.aop[ind]'], {}), '(self.aop[ind])\n', (44769, 44784), True, 'import numpy as np\n'), ((44785, 44810), 'numpy.radians', 'np.radians', (['self.lan[ind]'], {}), '(self.lan[ind])\n', (44795, 44810), True, 'import numpy as np\n'), ((44829, 44841), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (44837, 44841), True, 'import numpy as np\n'), ((44858, 44870), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (44866, 44870), True, 'import numpy as np\n'), ((44945, 44961), 'numpy.append', 'np.append', (['X', 'X1'], {}), '(X, X1)\n', (44954, 44961), True, 'import numpy as np\n'), ((44978, 44994), 'numpy.append', 'np.append', (['Y', 'Y1'], {}), '(Y, Y1)\n', (44987, 44994), True, 'import numpy as np\n'), ((45038, 45055), 'numpy.arctan2', 'np.arctan2', (['X', '(-Y)'], {}), '(X, -Y)\n', (45048, 45055), True, 'import numpy as np\n'), ((45137, 45184), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'theta'], {'color': 'orbitcolor', 'alpha': '(0.5)'}), '(t, theta, color=orbitcolor, alpha=0.5)\n', (45145, 45184), True, 'import matplotlib.pyplot as plt\n'), ((47352, 47485), 'numpy.array', 'np.array', (['[self.sma, self.period, self.orbit_fraction, self.t0, self.ecc, self.inc,\n self.aop, self.lan, self.mtot, self.distance]'], {}), '([self.sma, self.period, self.orbit_fraction, self.t0, self.ecc,\n self.inc, self.aop, self.lan, self.mtot, self.distance])\n', (47360, 47485), True, 'import numpy as np\n'), ((47511, 47614), 'numpy.array', 'np.array', (["['sma', 'period', 'orbit fraction', 't0', 'ecc', 'inc', 'aop', 'lan',\n 'mtot', 'distance']"], {}), "(['sma', 'period', 'orbit fraction', 't0', 'ecc', 'inc', 'aop',\n 'lan', 'mtot', 'distance'])\n", (47519, 47614), True, 'import numpy as np\n'), ((4688, 4736), 'numpy.sqrt', 'np.sqrt', (['(self.mass1err ** 2 + self.mass2err ** 2)'], {}), '(self.mass1err ** 2 + self.mass2err ** 2)\n', (4695, 4736), True, 'import numpy as np\n'), ((8026, 8074), 'numpy.array', 'np.array', (["[user_rv['rv'] * -1, user_rv['rverr']]"], {}), "([user_rv['rv'] * -1, user_rv['rverr']])\n", (8034, 8074), True, 'import numpy as np\n'), ((8109, 8138), 'numpy.array', 'np.array', (["user_rv['rv_dates']"], {}), "(user_rv['rv_dates'])\n", (8117, 8138), True, 'import numpy as np\n'), ((8954, 8967), 'numpy.radians', 'np.radians', (['x'], {}), '(x)\n', (8964, 8967), True, 'import numpy as np\n'), ((9016, 9029), 'numpy.radians', 'np.radians', (['x'], {}), '(x)\n', (9026, 9029), True, 'import numpy as np\n'), ((14141, 14158), 'numpy.std', 'np.std', (['(rv2 - rv1)'], {}), '(rv2 - rv1)\n', (14147, 14158), True, 'import numpy as np\n'), ((23720, 23785), 'numpy.array', 'np.array', (['[self.astrometric_ra[:, j], self.astrometric_dec[:, j]]'], {}), '([self.astrometric_ra[:, j], self.astrometric_dec[:, j]])\n', (23728, 23785), True, 'import numpy as np\n'), ((23912, 23944), 'numpy.array', 'np.array', (['[Y1 * 1000, X1 * 1000]'], {}), '([Y1 * 1000, X1 * 1000])\n', (23920, 23944), True, 'import numpy as np\n'), ((26180, 26206), 'numpy.zeros', 'np.zeros', (['(19, numSamples)'], {}), '((19, numSamples))\n', (26188, 26206), True, 'import numpy as np\n'), ((26588, 26622), 'numpy.array', 'np.array', (['[Y, X, Ydot, Xdot, Zdot]'], {}), '([Y, X, Ydot, Xdot, Zdot])\n', (26596, 26622), True, 'import numpy as np\n'), ((26642, 26713), 'numpy.array', 'np.array', (['[self.deltaRA, self.deltaDec, self.pmRA, self.pmDec, self.rv]'], {}), '([self.deltaRA, self.deltaDec, self.pmRA, self.pmDec, self.rv])\n', (26650, 26713), True, 'import numpy as np\n'), ((26756, 26784), 'numpy.array', 'np.array', (['[Y, X, Ydot, Xdot]'], {}), '([Y, X, Ydot, Xdot])\n', (26764, 26784), True, 'import numpy as np\n'), ((26805, 26867), 'numpy.array', 'np.array', (['[self.deltaRA, self.deltaDec, self.pmRA, self.pmDec]'], {}), '([self.deltaRA, self.deltaDec, self.pmRA, self.pmDec])\n', (26813, 26867), True, 'import numpy as np\n'), ((27296, 27311), 'numpy.zeros', 'np.zeros', (['(10000)'], {}), '(10000)\n', (27304, 27311), True, 'import numpy as np\n'), ((28523, 28538), 'numpy.zeros', 'np.zeros', (['(10000)'], {}), '(10000)\n', (28531, 28538), True, 'import numpy as np\n'), ((29175, 29192), 'numpy.size', 'np.size', (['accepted'], {}), '(accepted)\n', (29182, 29192), True, 'import numpy as np\n'), ((29546, 29563), 'numpy.size', 'np.size', (['accepted'], {}), '(accepted)\n', (29553, 29563), True, 'import numpy as np\n'), ((29593, 29683), 'numpy.concatenate', 'np.concatenate', (['(parameters, chi2[None, :], lnprob[None, :], lnrand[None, :])'], {'axis': '(0)'}), '((parameters, chi2[None, :], lnprob[None, :], lnrand[None, :]\n ), axis=0)\n', (29607, 29683), True, 'import numpy as np\n'), ((29702, 29726), 'numpy.transpose', 'np.transpose', (['parameters'], {}), '(parameters)\n', (29714, 29726), True, 'import numpy as np\n'), ((30001, 30016), 'numpy.nanmin', 'np.nanmin', (['chi2'], {}), '(chi2)\n', (30010, 30016), True, 'import numpy as np\n'), ((30110, 30125), 'numpy.nanmin', 'np.nanmin', (['chi2'], {}), '(chi2)\n', (30119, 30125), True, 'import numpy as np\n'), ((39540, 39549), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (39547, 39549), True, 'import matplotlib.pyplot as plt\n'), ((40347, 40359), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (40355, 40359), True, 'import numpy as np\n'), ((40360, 40372), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (40368, 40372), True, 'import numpy as np\n'), ((40373, 40385), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (40381, 40385), True, 'import numpy as np\n'), ((40685, 40704), 'numpy.append', 'np.append', (['E', 'nextE'], {}), '(E, nextE)\n', (40694, 40704), True, 'import numpy as np\n'), ((40754, 40767), 'numpy.sqrt', 'sqrt', (['(1.0 + e)'], {}), '(1.0 + e)\n', (40758, 40767), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40765, 40777), 'numpy.sin', 'sin', (['(E / 2.0)'], {}), '(E / 2.0)\n', (40768, 40777), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40792, 40805), 'numpy.sqrt', 'sqrt', (['(1.0 - e)'], {}), '(1.0 - e)\n', (40796, 40805), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40803, 40815), 'numpy.cos', 'cos', (['(E / 2.0)'], {}), '(E / 2.0)\n', (40806, 40815), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40832, 40850), 'numpy.arctan2', 'np.arctan2', (['f1', 'f2'], {}), '(f1, f2)\n', (40842, 40850), True, 'import numpy as np\n'), ((41060, 41066), 'numpy.sin', 'sin', (['i'], {}), '(i)\n', (41063, 41066), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((41087, 41103), 'numpy.append', 'np.append', (['X', 'X1'], {}), '(X, X1)\n', (41096, 41103), True, 'import numpy as np\n'), ((41103, 41119), 'numpy.append', 'np.append', (['Y', 'Y1'], {}), '(Y, Y1)\n', (41112, 41119), True, 'import numpy as np\n'), ((41119, 41135), 'numpy.append', 'np.append', (['Z', 'Z1'], {}), '(Z, Z1)\n', (41128, 41135), True, 'import numpy as np\n'), ((42132, 42141), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (42139, 42141), True, 'import matplotlib.pyplot as plt\n'), ((43563, 43572), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (43570, 43572), True, 'import matplotlib.pyplot as plt\n'), ((45276, 45285), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (45283, 45285), True, 'import matplotlib.pyplot as plt\n'), ((4963, 4997), 'time.strftime', 'time.strftime', (['"""%Y.%m.%d.%H.%M.%S"""'], {}), "('%Y.%m.%d.%H.%M.%S')\n", (4976, 4997), False, 'import time\n'), ((5059, 5093), 'time.strftime', 'time.strftime', (['"""%Y.%m.%d.%H.%M.%S"""'], {}), "('%Y.%m.%d.%H.%M.%S')\n", (5072, 5093), False, 'import time\n'), ((9394, 9424), 'numpy.fromstring', 'np.fromstring', (['table1'], {'sep': '""" """'}), "(table1, sep=' ')\n", (9407, 9424), True, 'import numpy as np\n'), ((14122, 14140), 'numpy.mean', 'np.mean', (['(rv2 - rv1)'], {}), '(rv2 - rv1)\n', (14129, 14140), True, 'import numpy as np\n'), ((14985, 15034), 'numpy.mean', 'np.mean', (['[self.Dec1[0].value, self.Dec2[0].value]'], {}), '([self.Dec1[0].value, self.Dec2[0].value])\n', (14992, 15034), True, 'import numpy as np\n'), ((15774, 15784), 'numpy.mean', 'np.mean', (['r'], {}), '(r)\n', (15781, 15784), True, 'import numpy as np\n'), ((15792, 15801), 'numpy.std', 'np.std', (['r'], {}), '(r)\n', (15798, 15801), True, 'import numpy as np\n'), ((15853, 15863), 'numpy.mean', 'np.mean', (['p'], {}), '(p)\n', (15860, 15863), True, 'import numpy as np\n'), ((15871, 15880), 'numpy.std', 'np.std', (['p'], {}), '(p)\n', (15877, 15880), True, 'import numpy as np\n'), ((16063, 16076), 'astropy.units.au.to', 'u.au.to', (['u.km'], {}), '(u.km)\n', (16070, 16076), True, 'import astropy.units as u\n'), ((16094, 16107), 'astropy.units.au.to', 'u.au.to', (['u.km'], {}), '(u.km)\n', (16101, 16107), True, 'import astropy.units as u\n'), ((24694, 24724), 'numpy.array', 'np.array', (['[self.user_rv[:, j]]'], {}), '([self.user_rv[:, j]])\n', (24702, 24724), True, 'import numpy as np\n'), ((24724, 24740), 'numpy.array', 'np.array', (['[Zdot]'], {}), '([Zdot])\n', (24732, 24740), True, 'import numpy as np\n'), ((27869, 27934), 'numpy.array', 'np.array', (['[self.astrometric_ra[:, j], self.astrometric_dec[:, j]]'], {}), '([self.astrometric_ra[:, j], self.astrometric_dec[:, j]])\n', (27877, 27934), True, 'import numpy as np\n'), ((28059, 28091), 'numpy.array', 'np.array', (['[Y1 * 1000, X1 * 1000]'], {}), '([Y1 * 1000, X1 * 1000])\n', (28067, 28091), True, 'import numpy as np\n'), ((30544, 30573), 'numpy.where', 'np.where', (['(lnprob > dat[:, 12])'], {}), '(lnprob > dat[:, 12])\n', (30552, 30573), True, 'import numpy as np\n'), ((41049, 41059), 'numpy.sin', 'sin', (['(w + f)'], {}), '(w + f)\n', (41052, 41059), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((41249, 41315), 'matplotlib.pyplot.scatter', 'plt.scatter', (['Y', 'X'], {'c': '((times - ref_epoch) / T)', 'cmap': 'cmap', 's': '(3)', 'lw': '(0)'}), '(Y, X, c=(times - ref_epoch) / T, cmap=cmap, s=3, lw=0)\n', (41260, 41315), True, 'import matplotlib.pyplot as plt\n'), ((41423, 41463), 'matplotlib.pyplot.plot', 'plt.plot', (['Y', 'X'], {'color': '"""black"""', 'alpha': '(0.3)'}), "(Y, X, color='black', alpha=0.3)\n", (41431, 41463), True, 'import matplotlib.pyplot as plt\n'), ((45074, 45091), 'numpy.degrees', 'np.degrees', (['theta'], {}), '(theta)\n', (45084, 45091), True, 'import numpy as np\n'), ((7071, 7120), 'numpy.array', 'np.array', (["[astrometry['ra'], astrometry['raerr']]"], {}), "([astrometry['ra'], astrometry['raerr']])\n", (7079, 7120), True, 'import numpy as np\n'), ((7164, 7215), 'numpy.array', 'np.array', (["[astrometry['dec'], astrometry['decerr']]"], {}), "([astrometry['dec'], astrometry['decerr']])\n", (7172, 7215), True, 'import numpy as np\n'), ((28946, 28976), 'numpy.array', 'np.array', (['[self.user_rv[:, j]]'], {}), '([self.user_rv[:, j]])\n', (28954, 28976), True, 'import numpy as np\n'), ((28976, 28992), 'numpy.array', 'np.array', (['[Zdot]'], {}), '([Zdot])\n', (28984, 28992), True, 'import numpy as np\n'), ((40729, 40735), 'numpy.cos', 'cos', (['E'], {}), '(E)\n', (40732, 40735), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40887, 40893), 'numpy.cos', 'cos', (['f'], {}), '(f)\n', (40890, 40893), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40919, 40925), 'numpy.cos', 'cos', (['O'], {}), '(O)\n', (40922, 40925), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40926, 40936), 'numpy.cos', 'cos', (['(w + f)'], {}), '(w + f)\n', (40929, 40936), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40953, 40959), 'numpy.cos', 'cos', (['i'], {}), '(i)\n', (40956, 40959), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40985, 40991), 'numpy.sin', 'sin', (['O'], {}), '(O)\n', (40988, 40991), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40992, 41002), 'numpy.cos', 'cos', (['(w + f)'], {}), '(w + f)\n', (40995, 41002), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((41019, 41025), 'numpy.cos', 'cos', (['i'], {}), '(i)\n', (41022, 41025), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((47678, 47712), 'time.strftime', 'time.strftime', (['"""%Y.%m.%d.%H.%M.%S"""'], {}), "('%Y.%m.%d.%H.%M.%S')\n", (47691, 47712), False, 'import time\n'), ((40937, 40943), 'numpy.sin', 'sin', (['O'], {}), '(O)\n', (40940, 40943), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((40944, 40954), 'numpy.sin', 'sin', (['(w + f)'], {}), '(w + f)\n', (40947, 40954), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((41003, 41009), 'numpy.cos', 'cos', (['O'], {}), '(O)\n', (41006, 41009), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((41010, 41020), 'numpy.sin', 'sin', (['(w + f)'], {}), '(w + f)\n', (41013, 41020), False, 'from numpy import tan, arctan, sqrt, cos, sin, arccos\n'), ((41329, 41338), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (41336, 41338), True, 'import matplotlib.pyplot as plt\n'), ((41482, 41491), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (41489, 41491), True, 'import matplotlib.pyplot as plt\n'), ((41934, 41948), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (41946, 41948), True, 'import matplotlib.pyplot as plt\n'), ((42004, 42013), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (42011, 42013), True, 'import matplotlib.pyplot as plt\n'), ((6241, 6260), 'numpy.mean', 'np.mean', (['astr_ra[i]'], {}), '(astr_ra[i])\n', (6248, 6260), True, 'import numpy as np\n'), ((6327, 6345), 'numpy.std', 'np.std', (['astr_ra[i]'], {}), '(astr_ra[i])\n', (6333, 6345), True, 'import numpy as np\n'), ((6492, 6512), 'numpy.mean', 'np.mean', (['astr_dec[i]'], {}), '(astr_dec[i])\n', (6499, 6512), True, 'import numpy as np\n'), ((6579, 6598), 'numpy.std', 'np.std', (['astr_dec[i]'], {}), '(astr_dec[i])\n', (6585, 6598), True, 'import numpy as np\n')] |
from src.data_curation.dataset_manager import get_meta
import pandas as pd
def view_all():
return get_meta()
def get_docs(id):
df = get_meta()
docs = df[df["id"] == id]["docs"].iloc[0]
df = pd.read_json(docs)
return df
| [
"src.data_curation.dataset_manager.get_meta",
"pandas.read_json"
] | [((104, 114), 'src.data_curation.dataset_manager.get_meta', 'get_meta', ([], {}), '()\n', (112, 114), False, 'from src.data_curation.dataset_manager import get_meta\n'), ((144, 154), 'src.data_curation.dataset_manager.get_meta', 'get_meta', ([], {}), '()\n', (152, 154), False, 'from src.data_curation.dataset_manager import get_meta\n'), ((210, 228), 'pandas.read_json', 'pd.read_json', (['docs'], {}), '(docs)\n', (222, 228), True, 'import pandas as pd\n')] |
# -*- coding: utf-8 -*-
"""
查看 django.db.backends.mysql.base.by 源码发现 django 连接 mysql 时没有使用连接池,
导致每次数据库操作都要新建新的连接并查询完后关闭,更坑的是按照 django 的官方文档设置
CONN_MAX_AGE 参数是为了复用连接,然后设置了 CONN_MAX_AGE 后,每个新连接查询完后并不
会 close 掉,而是一直在那占着。如果在高并发模式下,很容易出现 too many connections
错误。故重写 mysql 连接库,实现连接池功能。
"""
from django.core.exceptions import ImproperlyConfigured
import queue
import threading
try:
import MySQLdb as Database
except ImportError as err:
raise ImproperlyConfigured(
'Error loading MySQLdb module.\n'
'Did you install mysqlclient?'
) from err
from django.db.backends.mysql.base import *
from django.db.backends.mysql.base import DatabaseWrapper as _DatabaseWrapper
DEFAULT_DB_POOL_SIZE = 5
class DatabaseWrapper(_DatabaseWrapper):
"""
使用此库时绝对不能设置 CONN_MAX_AGE 连接参数,否则会造成使用连接后不会快速释放到连接池,从而造成连接池阻塞
"""
connect_pools = {}
pool_size = None
mutex = threading.Lock()
def get_new_connection(self, conn_params):
with self.mutex:
# 获取 DATABASES 配置字典中的 DB_POOL_SIZE 参数
if not self.pool_size:
self.pool_size = self.settings_dict.get('DB_POOL_SIZE') or DEFAULT_DB_POOL_SIZE
if self.alias not in self.connect_pools:
self.connect_pools[self.alias] = ConnectPool(conn_params, self.pool_size)
return self.connect_pools[self.alias].get_connection()
def _close(self):
with self.mutex:
# 覆盖掉原来的 close 方法,查询结束后连接释放回连接池
if self.connection is not None:
with self.wrap_database_errors:
return self.connect_pools[self.alias].release_connection(self.connection)
class ConnectPool(object):
def __init__(self, conn_params, pool_size):
self.conn_params = conn_params
self.pool_size = pool_size
self.connect_count = 0
self.connects = queue.Queue()
def get_connection(self):
if self.connect_count < self.pool_size:
self.connect_count = self.connect_count + 1
return Database.connect(**self.conn_params)
conn = self.connects.get()
try:
# 检测连接是否有效,去掉性能更好,但建议保留
conn.ping()
except Exception:
conn = Database.connect(**self.conn_params)
return conn
def release_connection(self, conn):
self.connects.put(conn)
| [
"threading.Lock",
"queue.Queue",
"django.core.exceptions.ImproperlyConfigured",
"MySQLdb.connect"
] | [((891, 907), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (905, 907), False, 'import threading\n'), ((444, 535), 'django.core.exceptions.ImproperlyConfigured', 'ImproperlyConfigured', (['"""Error loading MySQLdb module.\nDid you install mysqlclient?"""'], {}), '(\n """Error loading MySQLdb module.\nDid you install mysqlclient?""")\n', (464, 535), False, 'from django.core.exceptions import ImproperlyConfigured\n'), ((1856, 1869), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (1867, 1869), False, 'import queue\n'), ((2024, 2060), 'MySQLdb.connect', 'Database.connect', ([], {}), '(**self.conn_params)\n', (2040, 2060), True, 'import MySQLdb as Database\n'), ((2214, 2250), 'MySQLdb.connect', 'Database.connect', ([], {}), '(**self.conn_params)\n', (2230, 2250), True, 'import MySQLdb as Database\n')] |
from toee import *
import char_class_utils
###################################################
def GetConditionName():
return "Archmage"
def GetSpellCasterConditionName():
return "Archmage Spellcasting"
def GetCategory():
return "Core 3.5 Ed Prestige Classes"
def GetClassDefinitionFlags():
return CDF_CoreClass
def GetClassHelpTopic():
return "TAG_ARCHMAGES"
classEnum = stat_level_archmage
###################################################
class_feats = {
}
class_skills = (skill_alchemy, skill_concentration, skill_craft, skill_knowledge_all, skill_profession, skill_search, skill_spellcraft)
def IsEnabled():
return 1
def GetHitDieType():
return 4
def GetSkillPtsPerLevel():
return 2
def GetBabProgression():
return base_attack_bonus_type_non_martial
def IsFortSaveFavored():
return 0
def IsRefSaveFavored():
return 0
def IsWillSaveFavored():
return 1
def GetSpellListType():
return spell_list_type_arcane
def GetSpellSourceType():
return spell_source_type_arcane
def IsClassSkill(skillEnum):
return char_class_utils.IsClassSkill(class_skills, skillEnum)
def IsClassFeat(featEnum):
return char_class_utils.IsClassFeat(class_feats, featEnum)
def GetClassFeats():
return class_feats
def IsAlignmentCompatible( alignment):
return 1
def CanCastArcaneLvl7(obj):
# TODO: generalize (to support other arcane classes)
if obj.stat_level_get(stat_level_sorcerer) >= 14:
return 1
if obj.stat_level_get(stat_level_wizard) >= 13:
return 1
def HasSpellFocusInTwoSchool( obj ):
sf1 = 0
for p in range(feat_spell_focus_abjuration, feat_spell_focus_transmutation+1):
if obj.has_feat(p):
sf1 = p
break
if sf1 == 0:
return 0
sf2 = 0
for p in range(feat_spell_focus_abjuration, feat_spell_focus_transmutation+1):
if obj.has_feat(p) and p != sf1:
sf2 = p
break
if sf2 == 0:
return 0
return 1
def ObjMeetsPrereqs( obj ):
return 0 # WIP
# skill ranks (only Disable Device since Escape Artist, Decipher Script and Knowledge Arcana aren't implemented in ToEE)
if obj.skill_ranks_get(skill_spellcraft) < 15:
return 0
if (not obj.has_feat(feat_skill_focus_spellcraft)):
return 0
if (not CanCastArcaneLvl7(obj)):
return 0
if (not HasSpellFocusInTwoSchool(obj)):
return 0
return 1 | [
"char_class_utils.IsClassSkill",
"char_class_utils.IsClassFeat"
] | [((1043, 1097), 'char_class_utils.IsClassSkill', 'char_class_utils.IsClassSkill', (['class_skills', 'skillEnum'], {}), '(class_skills, skillEnum)\n', (1072, 1097), False, 'import char_class_utils\n'), ((1134, 1185), 'char_class_utils.IsClassFeat', 'char_class_utils.IsClassFeat', (['class_feats', 'featEnum'], {}), '(class_feats, featEnum)\n', (1162, 1185), False, 'import char_class_utils\n')] |
from django.contrib import admin
from parents.models import Guardian
admin.site.register(Guardian)
| [
"django.contrib.admin.site.register"
] | [((71, 100), 'django.contrib.admin.site.register', 'admin.site.register', (['Guardian'], {}), '(Guardian)\n', (90, 100), False, 'from django.contrib import admin\n')] |
import falcon.testing
import pytest
import json
from displacy_service.server import APP, MODELS
model = MODELS[0]
@pytest.fixture()
def api():
return falcon.testing.TestClient(APP)
def test_deps(api):
result = api.simulate_post(
path='/dep',
body='{{"text": "This is a test.", "model": "{model}", "collapse_punctuation": false, "collapse_phrases": false}}'.format(model=model)
)
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents(api):
result = api.simulate_post(
path='/ent',
body='{{"text": "What a great company Google is.", "model": "{model}"}}'.format(model=model))
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents(api):
sentences = api.simulate_post(
path='/sents',
body='{{"text": "This a test that should split into sentences! This is the second. Is this the third?", "model": "{model}"}}'.format(model=model)
)
assert sentences.json == ['This a test that should split into sentences!', 'This is the second.', 'Is this the third?']
def test_sents_dep(api):
sentence_parse = api.simulate_post(
path='/sents_dep',
body='{{"text": "This a test that should split into sentences! This is the second. Is this the third?", "model": "{model}", "collapse_punctuation": false, "collapse_phrases": false}}'.format(model=model)
)
sentences = [sp["sentence"] for sp in sentence_parse.json]
assert sentences == [
"This a test that should split into sentences!",
"This is the second.",
"Is this the third?",
]
words = [[w["text"] for w in sp["dep_parse"]["words"]] for sp in sentence_parse.json]
assert words == [
["This", "a", "test", "that", "should", "split", "into", "sentences", "!"],
["This", "is", "the", "second", "."],
["Is", "this", "the", "third", "?"],
]
arcs = [[arc for arc in sp['dep_parse']['arcs']] for sp in sentence_parse.json]
assert arcs == [[{'start': 0, 'end': 2, 'label': 'det', 'text': 'This', 'dir': 'left'},
{'start': 1, 'end': 2, 'label': 'det', 'text': 'a', 'dir': 'left'},
{'start': 2, 'end': 2, 'label': 'ROOT', 'text': 'test', 'dir': 'root'},
{'start': 3, 'end': 5, 'label': 'nsubj', 'text': 'that', 'dir': 'left'},
{'start': 4, 'end': 5, 'label': 'aux', 'text': 'should', 'dir': 'left'},
{'start': 2, 'end': 5, 'label': 'relcl', 'text': 'split', 'dir': 'right'},
{'start': 5, 'end': 6, 'label': 'prep', 'text': 'into', 'dir': 'right'},
{'start': 6, 'end': 7, 'label': 'pobj', 'text': 'sentences', 'dir': 'right'},
{'start': 2, 'end': 8, 'label': 'punct', 'text': '!', 'dir': 'right'}],
[{'start': 9, 'end': 10, 'label': 'nsubj', 'text': 'This', 'dir': 'left'},
{'start': 10, 'end': 10, 'label': 'ROOT', 'text': 'is', 'dir': 'root'},
{'start': 11, 'end': 12, 'label': 'det', 'text': 'the', 'dir': 'left'},
{'start': 10, 'end': 12, 'label': 'attr', 'text': 'second', 'dir': 'right'},
{'start': 10, 'end': 13, 'label': 'punct', 'text': '.', 'dir': 'right'}],
[{'start': 14, 'end': 14, 'label': 'ROOT', 'text': 'Is', 'dir': 'root'},
{'start': 14, 'end': 15, 'label': 'nsubj', 'text': 'this', 'dir': 'right'},
{'start': 16, 'end': 17, 'label': 'det', 'text': 'the', 'dir': 'left'},
{'start': 14, 'end': 17, 'label': 'attr', 'text': 'third', 'dir': 'right'},
{'start': 14, 'end': 18, 'label': 'punct', 'text': '?', 'dir': 'right'}]]
@pytest.mark.parametrize('endpoint, expected_message', [
('/dep', 'Dependency parsing failed'),
('/ent', 'Text parsing failed'),
('/sents', 'Sentence tokenization failed'),
('/sents_dep', 'Sentence tokenization and Dependency parsing failed'),
])
def test_bad_model_error_handling(endpoint, expected_message, api):
response = api.simulate_post(
path=endpoint,
body='{"text": "Here is some text for testing.", "model": "fake_model"}'
)
assert expected_message == response.json['title']
assert "Can't find model 'fake_model'." in response.json["description"]
| [
"pytest.fixture",
"pytest.mark.parametrize",
"json.loads"
] | [((120, 136), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (134, 136), False, 'import pytest\n'), ((3917, 4172), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""endpoint, expected_message"""', "[('/dep', 'Dependency parsing failed'), ('/ent', 'Text parsing failed'), (\n '/sents', 'Sentence tokenization failed'), ('/sents_dep',\n 'Sentence tokenization and Dependency parsing failed')]"], {}), "('endpoint, expected_message', [('/dep',\n 'Dependency parsing failed'), ('/ent', 'Text parsing failed'), (\n '/sents', 'Sentence tokenization failed'), ('/sents_dep',\n 'Sentence tokenization and Dependency parsing failed')])\n", (3940, 4172), False, 'import pytest\n'), ((427, 450), 'json.loads', 'json.loads', (['result.text'], {}), '(result.text)\n', (437, 450), False, 'import json\n'), ((741, 764), 'json.loads', 'json.loads', (['result.text'], {}), '(result.text)\n', (751, 764), False, 'import json\n')] |
#!/usr/bin/env python
import random
import argparse
def generate_passwords(password_file_path):
password_file = open(password_file_path, 'w')
chars = 'abcdefghijklmnopqrstuvxyz01234567890_-!*'
secret_key = ''.join(random.SystemRandom().choice(chars) for _ in range(50))
password_file.write("SECRET_KEY = '%s'\n" % secret_key)
password_file.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('password_file_path', help='Where password file will be placed')
args = parser.parse_args()
generate_passwords(args.password_file_path)
| [
"random.SystemRandom",
"argparse.ArgumentParser"
] | [((412, 437), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (435, 437), False, 'import argparse\n'), ((228, 249), 'random.SystemRandom', 'random.SystemRandom', ([], {}), '()\n', (247, 249), False, 'import random\n')] |
# Generated by Django 3.2.5 on 2022-02-18 08:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("dataset", "0001_initial"),
("cueSearch", "0004_auto_20220217_0217"),
]
operations = [
migrations.AddField(
model_name="searchcardtemplate",
name="connectionType",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="dataset.connectiontype",
),
),
]
| [
"django.db.models.ForeignKey"
] | [((426, 546), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""dataset.connectiontype"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, to='dataset.connectiontype')\n", (443, 546), False, 'from django.db import migrations, models\n')] |
from __future__ import annotations
from django.template.loader import render_to_string
from .conf import settings
def django_sys_indicator_tag() -> str:
template_name = 'django_sys_indicator/system_indicator.html'
try:
color, border_color = settings.SYSTEM_INDICATOR_COLORS[
settings.SYSTEM_INDICATOR_COLOR
]
except KeyError:
# Invalid colour chosen
color, border_color = settings.SYSTEM_INDICATOR_COLORS['red']
return render_to_string(
template_name,
{
'label': settings.SYSTEM_INDICATOR_LABEL,
'color': color,
'border_color': border_color,
}
)
| [
"django.template.loader.render_to_string"
] | [((484, 609), 'django.template.loader.render_to_string', 'render_to_string', (['template_name', "{'label': settings.SYSTEM_INDICATOR_LABEL, 'color': color, 'border_color':\n border_color}"], {}), "(template_name, {'label': settings.SYSTEM_INDICATOR_LABEL,\n 'color': color, 'border_color': border_color})\n", (500, 609), False, 'from django.template.loader import render_to_string\n')] |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import numpy as np
import pandas as pd
import os
# In[2]:
#function to get current directory
def getCurrentDirectory():
listDirectory = os.listdir('../')
return listDirectory
# In[3]:
#function to read csv file
def readCsvFile(path):
crimes_original = pd.read_csv(path, low_memory=False)
return crimes_original
# In[4]:
#function to filter Data
def filterData(data,column,value):
filterData = data.loc[data[column] == value]
return filterData
# In[5]:
#function to get count of a value
def getCount(data,column,columnName):
data_count = pd.DataFrame({columnName:data.groupby(column).size()}).reset_index()
return data_count
# In[7]:
#function to sort
def sortValue(data,column,ascBoolean):
sorted_data = data.sort_values(column,ascending = ascBoolean)
return sorted_data
# In[ ]:
| [
"os.listdir",
"pandas.read_csv"
] | [((193, 210), 'os.listdir', 'os.listdir', (['"""../"""'], {}), "('../')\n", (203, 210), False, 'import os\n'), ((321, 356), 'pandas.read_csv', 'pd.read_csv', (['path'], {'low_memory': '(False)'}), '(path, low_memory=False)\n', (332, 356), True, 'import pandas as pd\n')] |
import sys
from collections.abc import Mapping
import firebase_admin
from firebase_admin import credentials
from firebase_admin import firestore
# Use a service account
cred = credentials.Certificate('./service-account-key.json')
firebase_admin.initialize_app(cred)
db = firestore.client()
for row in sys.stdin:
id = row.strip()
sub = db.document(u'db_pilot_test', id).get()
if sub.exists:
#print (f'it exists, {sub.id}')
#print(f'{sub.id}, {sub.to_dict().get("totalEarnings") or 0}')
try:
sub.to_dict().get("totalEarnings")
except AttributeError:
print (f'Attribute error, {sub.id}')
else:
print(f'{sub.id}, {sub.to_dict().get("totalEarnings")}')
#else:
#print (f'nope it does not, {sub.id}')
| [
"firebase_admin.firestore.client",
"firebase_admin.credentials.Certificate",
"firebase_admin.initialize_app"
] | [((180, 233), 'firebase_admin.credentials.Certificate', 'credentials.Certificate', (['"""./service-account-key.json"""'], {}), "('./service-account-key.json')\n", (203, 233), False, 'from firebase_admin import credentials\n'), ((234, 269), 'firebase_admin.initialize_app', 'firebase_admin.initialize_app', (['cred'], {}), '(cred)\n', (263, 269), False, 'import firebase_admin\n'), ((277, 295), 'firebase_admin.firestore.client', 'firestore.client', ([], {}), '()\n', (293, 295), False, 'from firebase_admin import firestore\n')] |
import tensorflow as tf
import numpy as np
import math
# Parameter
order_num=2;
class Program:
def __init__(self,sess,state_dim,obj_num,fea_size,Theta,program_order,postfix):
self.sess = sess;
self.state_dim = state_dim;
self.fea_size=fea_size;
self.obj_num=obj_num;
self.order_num=order_num;
self.Theta=Theta;
self.program_order=program_order;
self.postfix=postfix;
self.p = self.compile_order();
def compile_order(self):
self.Theta=tf.reshape(self.Theta,[-1,self.obj_num,6]);
self.Theta=tf.transpose(self.Theta,perm=[0,2,1]);
self.Theta=tf.unstack(self.Theta,6,1);
# temporary ordering
p_1=tf.multiply(self.Theta[0],self.Theta[3]);
p_1=p_1+self.Theta[5];
p_2=tf.multiply(self.Theta[1],self.Theta[3]);
p_2=p_2+self.Theta[5];
p_3=tf.multiply(self.Theta[0],self.Theta[4]);
p_3=p_3+self.Theta[5];
p_4=tf.multiply(self.Theta[1],self.Theta[4]);
p_4=p_4+self.Theta[5];
program_order2=tf.unstack(self.program_order,(self.obj_num-1),1);
p=tf.multiply(tf.stack([program_order2[0]]*(self.obj_num),1),p_1)+tf.multiply(tf.stack([program_order2[1]]*(self.obj_num),1),p_2)+tf.multiply(tf.stack([program_order2[2]]*(self.obj_num),1),p_3)+tf.multiply(tf.stack([program_order2[3]]*(self.obj_num),1),p_4);
# Currently tf.cond makes problems
"""
program_order2=tf.unstack(self.program_order,self.order_num,1);
for i in range(self.order_num):
program_order2[i]=tf.unstack(program_order2[i],3,1);
for i in range(self.order_num):
for k in range(9):
for l in range(k+1,9):
# not=1, and=2, or=3
p=tf.cond(tf.equal(program_order2[i][0],1)&tf.equal(program_order2[i][1],k),lambda:1-self.Theta[k],lambda:p);
p=tf.cond(tf.equal(program_order2[i][0],1)&tf.equal(program_order2[i][1],-1),lambda:1-p,lambda:p);
p=tf.cond(tf.equal(program_order2[i][0],2)&tf.equal(program_order2[i][1],k)&tf.equal(program_order2[i][2],l),lambda:tf.multiply(self.Theta[k],self.Theta[l]),lambda:p);
p=tf.cond(tf.equal(program_order2[i][0],2)&tf.equal(program_order2[i][1],k)&tf.equal(program_order2[i][2],-1),lambda:tf.multiply(self.Theta[k],p),lambda:p);
p=tf.cond(tf.equal(program_order2[i][0],3)&tf.equal(program_order2[i][1],k)&tf.equal(program_order2[i][2],l),lambda:self.Theta[k]+self.Theta[l]-tf.multiply(self.Theta[k],self.Theta[l]),lambda:p);
p=tf.cond(tf.equal(program_order2[i][0],3)&tf.equal(program_order2[i][1],k)&tf.equal(program_order2[i][2],l),lambda:self.Theta[k]+p-tf.multiply(self.Theta[k],p),lambda:p);
"""
return p;
def run_target_nets(self,Theta,program_order):
Theta=tf.reshape(Theta,[-1,self.obj_num,6]);
Theta=tf.transpose(Theta,perm=[0,2,1]);
Theta=tf.unstack(Theta,6,1);
# temporary ordering
p_1=tf.multiply(Theta[0],Theta[3]);
p_1=p_1+Theta[5];
p_2=tf.multiply(Theta[1],Theta[3]);
p_2=p_2+Theta[5];
p_3=tf.multiply(Theta[0],Theta[4]);
p_3=p_3+Theta[5];
p_4=tf.multiply(Theta[1],Theta[4]);
p_4=p_4+Theta[5];
program_order2=tf.unstack(program_order,(self.obj_num-1),1);
p=tf.multiply(tf.stack([program_order2[0]]*(self.obj_num),1),p_1)+tf.multiply(tf.stack([program_order2[1]]*(self.obj_num),1),p_2)+tf.multiply(tf.stack([program_order2[2]]*(self.obj_num),1),p_3)+tf.multiply(tf.stack([program_order2[3]]*(self.obj_num),1),p_4);
# Currently tf.cond makes problems
"""
# Currently tf.cond makes problems
program_order2=tf.unstack(program_order,self.order_num,1);
for i in range(self.order_num):
program_order2[i]=tf.unstack(program_order2[i],3,1);
for i in range(self.order_num):
for k in range(9):
for l in range(k+1,9):
# not=1, and=2, or=3
p=tf.cond(tf.equal(program_order2[i][0],1)&tf.equal(program_order2[i][1],k),lambda:1-Theta[k],lambda:p);
p=tf.cond(tf.equal(program_order2[i][0],1)&tf.equal(program_order2[i][1],-1),lambda:1-p,lambda:p);
p=tf.cond(tf.equal(program_order2[i][0],2)&tf.equal(program_order2[i][1],k)&tf.equal(program_order2[i][2],l),lambda:tf.multiply(Theta[k],Theta[l]),lambda:p);
p=tf.cond(tf.equal(program_order2[i][0],2)&tf.equal(program_order2[i][1],k)&tf.equal(program_order2[i][2],-1),lambda:tf.multiply(Theta[k],p),lambda:p);
p=tf.cond(tf.equal(program_order2[i][0],3)&tf.equal(program_order2[i][1],k)&tf.equal(program_order2[i][2],l),lambda:Theta[k]+Theta[l]-tf.multiply(Theta[k],Theta[l]),lambda:p);
p=tf.cond(tf.equal(program_order2[i][0],3)&tf.equal(program_order2[i][1],k)&tf.equal(program_order2[i][2],l),lambda:Theta[k]+p-tf.multiply(Theta[k],p),lambda:p);
"""
return p;
| [
"tensorflow.unstack",
"tensorflow.transpose",
"tensorflow.multiply",
"tensorflow.reshape",
"tensorflow.stack"
] | [((482, 527), 'tensorflow.reshape', 'tf.reshape', (['self.Theta', '[-1, self.obj_num, 6]'], {}), '(self.Theta, [-1, self.obj_num, 6])\n', (492, 527), True, 'import tensorflow as tf\n'), ((541, 581), 'tensorflow.transpose', 'tf.transpose', (['self.Theta'], {'perm': '[0, 2, 1]'}), '(self.Theta, perm=[0, 2, 1])\n', (553, 581), True, 'import tensorflow as tf\n'), ((595, 623), 'tensorflow.unstack', 'tf.unstack', (['self.Theta', '(6)', '(1)'], {}), '(self.Theta, 6, 1)\n', (605, 623), True, 'import tensorflow as tf\n'), ((656, 697), 'tensorflow.multiply', 'tf.multiply', (['self.Theta[0]', 'self.Theta[3]'], {}), '(self.Theta[0], self.Theta[3])\n', (667, 697), True, 'import tensorflow as tf\n'), ((733, 774), 'tensorflow.multiply', 'tf.multiply', (['self.Theta[1]', 'self.Theta[3]'], {}), '(self.Theta[1], self.Theta[3])\n', (744, 774), True, 'import tensorflow as tf\n'), ((810, 851), 'tensorflow.multiply', 'tf.multiply', (['self.Theta[0]', 'self.Theta[4]'], {}), '(self.Theta[0], self.Theta[4])\n', (821, 851), True, 'import tensorflow as tf\n'), ((887, 928), 'tensorflow.multiply', 'tf.multiply', (['self.Theta[1]', 'self.Theta[4]'], {}), '(self.Theta[1], self.Theta[4])\n', (898, 928), True, 'import tensorflow as tf\n'), ((975, 1026), 'tensorflow.unstack', 'tf.unstack', (['self.program_order', '(self.obj_num - 1)', '(1)'], {}), '(self.program_order, self.obj_num - 1, 1)\n', (985, 1026), True, 'import tensorflow as tf\n'), ((2674, 2714), 'tensorflow.reshape', 'tf.reshape', (['Theta', '[-1, self.obj_num, 6]'], {}), '(Theta, [-1, self.obj_num, 6])\n', (2684, 2714), True, 'import tensorflow as tf\n'), ((2723, 2758), 'tensorflow.transpose', 'tf.transpose', (['Theta'], {'perm': '[0, 2, 1]'}), '(Theta, perm=[0, 2, 1])\n', (2735, 2758), True, 'import tensorflow as tf\n'), ((2767, 2790), 'tensorflow.unstack', 'tf.unstack', (['Theta', '(6)', '(1)'], {}), '(Theta, 6, 1)\n', (2777, 2790), True, 'import tensorflow as tf\n'), ((2823, 2854), 'tensorflow.multiply', 'tf.multiply', (['Theta[0]', 'Theta[3]'], {}), '(Theta[0], Theta[3])\n', (2834, 2854), True, 'import tensorflow as tf\n'), ((2885, 2916), 'tensorflow.multiply', 'tf.multiply', (['Theta[1]', 'Theta[3]'], {}), '(Theta[1], Theta[3])\n', (2896, 2916), True, 'import tensorflow as tf\n'), ((2947, 2978), 'tensorflow.multiply', 'tf.multiply', (['Theta[0]', 'Theta[4]'], {}), '(Theta[0], Theta[4])\n', (2958, 2978), True, 'import tensorflow as tf\n'), ((3009, 3040), 'tensorflow.multiply', 'tf.multiply', (['Theta[1]', 'Theta[4]'], {}), '(Theta[1], Theta[4])\n', (3020, 3040), True, 'import tensorflow as tf\n'), ((3082, 3128), 'tensorflow.unstack', 'tf.unstack', (['program_order', '(self.obj_num - 1)', '(1)'], {}), '(program_order, self.obj_num - 1, 1)\n', (3092, 3128), True, 'import tensorflow as tf\n'), ((1236, 1283), 'tensorflow.stack', 'tf.stack', (['([program_order2[3]] * self.obj_num)', '(1)'], {}), '([program_order2[3]] * self.obj_num, 1)\n', (1244, 1283), True, 'import tensorflow as tf\n'), ((3338, 3385), 'tensorflow.stack', 'tf.stack', (['([program_order2[3]] * self.obj_num)', '(1)'], {}), '([program_order2[3]] * self.obj_num, 1)\n', (3346, 3385), True, 'import tensorflow as tf\n'), ((1172, 1219), 'tensorflow.stack', 'tf.stack', (['([program_order2[2]] * self.obj_num)', '(1)'], {}), '([program_order2[2]] * self.obj_num, 1)\n', (1180, 1219), True, 'import tensorflow as tf\n'), ((3274, 3321), 'tensorflow.stack', 'tf.stack', (['([program_order2[2]] * self.obj_num)', '(1)'], {}), '([program_order2[2]] * self.obj_num, 1)\n', (3282, 3321), True, 'import tensorflow as tf\n'), ((1044, 1091), 'tensorflow.stack', 'tf.stack', (['([program_order2[0]] * self.obj_num)', '(1)'], {}), '([program_order2[0]] * self.obj_num, 1)\n', (1052, 1091), True, 'import tensorflow as tf\n'), ((1108, 1155), 'tensorflow.stack', 'tf.stack', (['([program_order2[1]] * self.obj_num)', '(1)'], {}), '([program_order2[1]] * self.obj_num, 1)\n', (1116, 1155), True, 'import tensorflow as tf\n'), ((3146, 3193), 'tensorflow.stack', 'tf.stack', (['([program_order2[0]] * self.obj_num)', '(1)'], {}), '([program_order2[0]] * self.obj_num, 1)\n', (3154, 3193), True, 'import tensorflow as tf\n'), ((3210, 3257), 'tensorflow.stack', 'tf.stack', (['([program_order2[1]] * self.obj_num)', '(1)'], {}), '([program_order2[1]] * self.obj_num, 1)\n', (3218, 3257), True, 'import tensorflow as tf\n')] |
#!/usr/bin/env python
# ===========================================================================
# Copyright 2017 `<NAME>`
# Email: ttungl at gmail dot com
# Heterogeneous Architecture Configurations Generator for Multi2Sim simulator
# (aka, `HeteroArchGen4M2S`)
# `HeteroArchGen4M2S` is free software, which is freely to be
# redistributed and modified it under the terms of
# the GNU General Public License as published by
# the Free Software Foundation.
# For more details `http://www.gnu.org/licenses`
# `HeteroArchGen4M2S` is written to help you configure M2S
# easily, but non-warranty and non-mechantability.
# ============================================================================
#
# `create_cpuconfig` is part of M2S configuration files.
# ==========================================================
# Description: This generates `x86_cpuconfig` file for M2S
# Input:
# Output:
# Note: Each core can contain several threads.
# ==========================================================
# E.g.,
# num_of_cores = 16 : number of cores in the CPUs
# num_of_threads = 1 : number of threads in each core
# ROB_size = 128 : number of in-flight instructions allowed
# pipelines_size = 4: decode/dispatch/issue/commit width
# bimod_size = 4096 : Size of local predictor (larger size means less aliasing in history table)
# bpred_size = 1024 : Size of global predictor (larger size means longer global history register)
# ==========================================================
import math # to roundup the float numbers.
# benchmark, fast_forward: binary flag, enables fastforward past sequential portion of benchmark
def create_cpuconfig( num_of_cores,
cpu_frequency,
num_of_threads,
ROB_size,
pipelines_size,
bimod_size,
bpred_size):
# Check inputs validation
assert(num_of_cores>=4), "Error! Number of CPU cores must be at least 4."
assert(num_of_threads>=0), "Error! Number of threads should be at least zero."
# Adapted the additional parameters from M2StoMcPAT of Caleb (Univ. Maryland College Park)
IQ_ratio = 0.4; # size of instruction (issue) queue w.r.t. ROB
LSQ_ratio = 0.5; # size of LSQ w.r.t. ROB
RF_ratio = 1; # size of register file w.r.t. ROB
RF_int_ratio = 0.666666;# (2/3) ratio of int vs FP regissters in the RF
Fetch_Queue_size = 64; # queue holding instructions fetched from I$ waiting to be decoded
history_size = 8; # size of the local histroy table entries
# File name
f = open('configs/x86_cpuconfig', 'w');
# General
f.write("[ General ]\n");
f.write(" Cores = %0.f\n" % num_of_cores);
f.write(" Threads = %0.f\n" % num_of_threads);
f.write(" Frequency = %0.f\n" % cpu_frequency);
f.write("\n");
# Pipeline
f.write("[ Pipeline ]\n");
f.write(" DecodeWidth = %0.f\n" % pipelines_size);
f.write(" DispatchWidth = %0.f\n" % pipelines_size);
f.write(" IssueWidth = %0.f\n" % pipelines_size);
f.write(" CommitWidth = %0.f\n" % pipelines_size);
f.write("\n");
# Queues
f.write("[ Queues ]\n");
f.write(" FetchQueueSize = %0.f\n" % Fetch_Queue_size);
f.write(" RobSize = %0.f\n" % ROB_size);
f.write(" IqSize = %0.f\n" % (IQ_ratio*ROB_size));
f.write(" LsqSize = %0.f\n" % (LSQ_ratio*ROB_size));
f.write(" RfIntSize = %0.f\n" % (RF_ratio*(RF_int_ratio)*ROB_size));
f.write(" RfFpSize = %0.f\n" % (RF_ratio*(1-RF_int_ratio)*ROB_size));
f.write("\n");
# FunctionalUnits
f.write("[ FunctionalUnits ]\n");
f.write(" IntAdd.Count = %0.f\n" % pipelines_size);
f.write(" IntMult.Count = %0.f\n" % (pipelines_size/4));
f.write(" IntDiv.Count = %0.f\n" % math.ceil(pipelines_size/8+0.55)); # added 0.55 to roundup the float number.
f.write(" EffAddr.Count = %0.f\n" % pipelines_size);
f.write(" Logic.Count = %0.f\n" % pipelines_size);
f.write(" FpSimple.Count = %0.f\n" % pipelines_size);
f.write(" FpAdd.Count = %0.f\n" % pipelines_size);
f.write(" FpMult.Count = %0.f\n" % (pipelines_size/4));
f.write(" FpDiv.Count = %0.f\n" % math.ceil(pipelines_size/8+0.55)); # added 0.55 to roundup the float number.
f.write(" FpComplex.Count = %0.f\n" % math.ceil(pipelines_size/8+0.55)); # added 0.55 to roundup the float number.
f.write("\n");
# BranchPredictor
f.write("[ BranchPredictor ]\n");
f.write(" Kind = Combined\n");
f.write(" Bimod.Size = %0.f\n" % bimod_size);
f.write(" Choice.Size = %0.f\n" % bimod_size);
f.write(" TwoLevel.L1Size = %0.f\n" % bpred_size);
f.write(" TwoLevel.L2Size = 1\n");
f.write(" TwoLevel.HistorySize = %0.f\n" % history_size);
f.write(" BTB.Sets = 1024\n");
f.write(" BTB.Assoc = 1");
# close
f.close();
## Tested
# def main():
# create_cpuconfig(16, 1, 128, 4, 4096, 1024, 1);
# print "This %s file is just executed!" % __file__
# if __name__ == "__main__": main()
| [
"math.ceil"
] | [((3581, 3617), 'math.ceil', 'math.ceil', (['(pipelines_size / 8 + 0.55)'], {}), '(pipelines_size / 8 + 0.55)\n', (3590, 3617), False, 'import math\n'), ((3963, 3999), 'math.ceil', 'math.ceil', (['(pipelines_size / 8 + 0.55)'], {}), '(pipelines_size / 8 + 0.55)\n', (3972, 3999), False, 'import math\n'), ((4079, 4115), 'math.ceil', 'math.ceil', (['(pipelines_size / 8 + 0.55)'], {}), '(pipelines_size / 8 + 0.55)\n', (4088, 4115), False, 'import math\n')] |
import os
from sqlalchemy import (Column, DateTime, Integer, MetaData, String, Text, Table,
create_engine)
from sqlalchemy.sql import func
from databases import Database
DATABASE_URL = os.getenv("DATABASE_URL")
# SQLAlchemy
engine = create_engine(DATABASE_URL)
metadata = MetaData()
movies = Table(
"movies",
metadata,
Column("id", Integer, primary_key=True),
Column("release_year", String()),
Column("title", String()),
Column("origin_ethnicity", String()),
Column("director", String()),
Column("cast", String()),
Column("genre", String()),
Column("wiki_page", String()),
Column("plot", Text()),
Column("created_date", DateTime, default=func.now(), nullable=False),
)
# databases query builder
database = Database(DATABASE_URL) | [
"databases.Database",
"os.getenv",
"sqlalchemy.Text",
"sqlalchemy.sql.func.now",
"sqlalchemy.create_engine",
"sqlalchemy.MetaData",
"sqlalchemy.String",
"sqlalchemy.Column"
] | [((212, 237), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (221, 237), False, 'import os\n'), ((261, 288), 'sqlalchemy.create_engine', 'create_engine', (['DATABASE_URL'], {}), '(DATABASE_URL)\n', (274, 288), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((300, 310), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (308, 310), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((784, 806), 'databases.Database', 'Database', (['DATABASE_URL'], {}), '(DATABASE_URL)\n', (792, 806), False, 'from databases import Database\n'), ((360, 399), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (366, 399), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((428, 436), 'sqlalchemy.String', 'String', ([], {}), '()\n', (434, 436), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((459, 467), 'sqlalchemy.String', 'String', ([], {}), '()\n', (465, 467), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((501, 509), 'sqlalchemy.String', 'String', ([], {}), '()\n', (507, 509), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((535, 543), 'sqlalchemy.String', 'String', ([], {}), '()\n', (541, 543), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((565, 573), 'sqlalchemy.String', 'String', ([], {}), '()\n', (571, 573), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((596, 604), 'sqlalchemy.String', 'String', ([], {}), '()\n', (602, 604), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((631, 639), 'sqlalchemy.String', 'String', ([], {}), '()\n', (637, 639), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((661, 667), 'sqlalchemy.Text', 'Text', ([], {}), '()\n', (665, 667), False, 'from sqlalchemy import Column, DateTime, Integer, MetaData, String, Text, Table, create_engine\n'), ((715, 725), 'sqlalchemy.sql.func.now', 'func.now', ([], {}), '()\n', (723, 725), False, 'from sqlalchemy.sql import func\n')] |
from collections import OrderedDict
from asyncio import TimeoutError, wait
from concurrent.futures import FIRST_COMPLETED
import time
from discord import TextChannel, DMChannel
from discord.errors import Forbidden, NotFound
from objects.context import Context
class PaginatorABC:
def __init__(self, bot, looped=True, timeout=180, additional_time=20):
self.bot = bot
self.looped = looped
self.timeout = timeout
self.additional_time = additional_time
self.index = 0
self._pages = []
self.current_page = {}
self.events = OrderedDict()
self.target_users = []
self.closed = False
def add_page(self, **kwargs):
page = kwargs
self._pages.append(page)
if len(self._pages) == 1:
self.current_page = page
def switch_to_next_page(self):
if self.index == len(self._pages) - 1:
if not self.looped:
return self.current_page
self.index = 0
else:
self.index += 1
return self._pages[self.index]
def switch_to_prev_page(self):
if self.index == 0:
if not self.looped:
return self.current_page
self.index = len(self._pages) - 1
else:
self.index -= 1
return self._pages[self.index]
def switch_to_page(self, index):
if len(self._pages) > index and index >= 0:
self.index = index
else:
self.index = 0
self.current_page = self._pages[self.index]
return self.current_page
async def init_reactions(self, force=False):
if len(self._pages) <= 1 and not force:
self.closed = True
return
try:
for emoji in self.events.keys():
await self.target_message.add_reaction(emoji)
except Exception:
pass
async def _reaction_add_callback(self, reaction, user):
await self.events[str(reaction)](reaction, user)
try:
await self.target_message.remove_reaction(reaction, user)
except NotFound:
self.closed = True
except Exception:
pass
async def _reaction_remove_callback(self, reaction, user):
await self.events[str(reaction)](reaction, user)
async def run(self, target, **kwargs):
"""
Runs paginator session
parameters:
:target:
Message or Context object attach paginator to
:target_user: (default: None or ctx author if ctx passed as target)
user wait actions from. Can be User or Member object
:target_users: (default: [])
list of users wait actions from. Can be User or Member object list
:force_run: (default: False)
force run paginator even if missing pages
:events: (default: {})
dict of events to wait as keys and their callbacks as values
!events should be lambda functions creating actual coroutine on call!
callbacks are coroutines recieving event result(s)
"""
if isinstance(target, Context):
self.target_message = await target.send(**self.current_page)
if self.target_message is None:
return await self.cleanup()
target_user = kwargs.pop('target_user', target.author)
else:
self.target_message = target
target_user = kwargs.pop('target_user', None)
target_users = kwargs.pop('target_users', [])
force_run = kwargs.pop('force_run', False)
events = kwargs.pop('events', {})
if target_user is None and len(target_users) == 0:
raise ValueError('No user objects passed')
if target_user is not None:
if len(target_users) != 0:
raise ValueError('Use either target_user or target_users, not both')
target_users.append(target_user)
self.target_users = target_users
def check(reaction, user):
return all((
any(user == u for u in target_users),
reaction.message.id == self.target_message.id,
str(reaction.emoji) in self.events
))
self.start_time = time.time()
time_left = self.timeout
manage_messages_permission = \
self.target_message.guild and self.target_message.channel.permissions_for(self.target_message.guild.me).manage_messages
await self.init_reactions(force=force_run)
while time_left >= 0 and not self.closed:
reaction_add_event = self.bot.wait_for('reaction_add', check=check)
_events = { l(): c for l, c in events.items() }
_events[reaction_add_event] = self._reaction_add_callback
if not manage_messages_permission:
reaction_remove_event = self.bot.wait_for('reaction_remove', check=check)
_events[reaction_remove_event] = self._reaction_remove_callback
done, _ = await wait(
_events.keys(), loop=self.bot.loop,
timeout=time_left, return_when=FIRST_COMPLETED
)
if not done:
# timeout
break
else:
for task in done:
cb = _events[task._coro]
task_result = task.result()
if task_result is None:
continue
if type(task_result) is tuple:
results = task_result
else:
results = [task_result]
await cb(*results)
self.start_time += self.additional_time
time_left = self.timeout - (time.time() - self.start_time)
await self.cleanup()
async def cleanup(self):
try:
await self.target_message.clear_reactions()
except Exception:
pass
self.closed = True
def __len__(self):
return len(self._pages)
class Paginator(PaginatorABC):
"""
Basic paginator class.
Requires PermissionAddReactions to work
"""
def __init__(self, *args,
emoji_go_left='◀', emoji_go_right='▶',
emoji_use_index='🔢', **kwargs
):
super().__init__(*args, **kwargs)
self.events[emoji_go_left] = self.on_go_left
self.events[emoji_use_index] = self.on_use_index
self.events[emoji_go_right] = self.on_go_right
async def on_go_left(self, reaction, user):
if not self.looped and self.index == 0:
return
await self.bot.edit_message(
self.target_message, **self.switch_to_prev_page())
async def on_go_right(self, reaction, user):
if not self.looped and self.index == len(self._pages) - 1:
return
await self.bot.edit_message(
self.target_message, **self.switch_to_next_page())
async def on_use_index(self, reaction, user):
index_request_message = None
index_response_message = None
def check(message):
return all((
message.author == user,
message.channel == self.target_message.channel,
message.content.isdigit()
))
try:
index_request_message = await self.target_message.channel.send('Please, send number of page you want to go')
index_response_message = await self.bot.wait_for('message', timeout=10, check=check)
index = int(index_response_message.content) - 1
if index != self.index:
await self.bot.edit_message(self.target_message, **self.switch_to_page(index))
except TimeoutError:
pass
finally:
if index_request_message is not None:
await index_request_message.delete()
if index_response_message is not None:
try:
await index_response_message.delete()
except Exception:
pass
class SelectionPaginator(Paginator):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.choice = None
self.num_elements = 0
async def _check_choice(self, msg):
if await self.check_choice(msg):
self.closed = True
await self.on_valid_choice(msg)
else:
await self.on_invalid_choice(msg)
async def check_choice(self, msg):
return msg.content.isdigit() and 0 < int(msg.content) <= self.num_elements
async def on_invalid_choice(self, msg):
pass
async def on_valid_choice(self, msg):
self.choice = int(msg.content)
await self.bot.delete_message(msg)
async def run(self, target, num_elements, **kwargs):
self.num_elements = num_elements
def check(msg):
return all((
msg.author in (self.target_users),
msg.channel == target.channel
))
message_event_lambda = lambda: self.bot.wait_for('message', check=check)
await super().run(
target,
events={ message_event_lambda: self._check_choice }, **kwargs
)
return self.choice
class UpdatingPaginator(PaginatorABC):
def __init__(self, *args, emoji_update='🆕', emoji_go_back='🔙', timeout=60, additional_time=30, **kwargs):
super().__init__(
*args, timeout=timeout, additional_time=additional_time, **kwargs)
self.events[emoji_update] = self.on_update
self.emoji_go_back = emoji_go_back
self.backup_pages = []
self.first_page_switch = True
self.last_time_popped = False
async def run(self, target, update_func, **kwargs):
self.update_func = update_func
self.update_args = kwargs.pop('update_args', ())
self.update_kwargs = kwargs.pop('update_kwargs', {})
fields = await self.get_fields()
if not fields:
return
self.add_page(**fields)
await super().run(target, force_run=True, **kwargs)
async def on_update(self, reaction, user):
fields = await self.get_fields()
if not fields:
return
await self.bot.edit_message(self.target_message, **fields)
if self.first_page_switch:
self.first_page_switch = False
self.events[self.emoji_go_back] = self.on_go_back
await self.init_reactions(force=True)
self.last_time_popped = False
async def on_go_back(self, reaction, user):
if not self.last_time_popped:
self.backup_pages.pop()
if len(self.backup_pages) > 1:
fields = self.backup_pages.pop()
await self.bot.edit_message(self.target_message, **fields)
else:
await self.bot.edit_message(
self.target_message, **self.backup_pages[0])
self.last_time_popped = True
async def get_fields(self):
try:
fields = await self.update_func(
self, *self.update_args, **self.update_kwargs)
fields = {} if fields is None else fields
except Exception:
fields = {}
self.backup_pages.append(fields)
return fields | [
"collections.OrderedDict",
"time.time"
] | [((595, 608), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (606, 608), False, 'from collections import OrderedDict\n'), ((4360, 4371), 'time.time', 'time.time', ([], {}), '()\n', (4369, 4371), False, 'import time\n'), ((5881, 5892), 'time.time', 'time.time', ([], {}), '()\n', (5890, 5892), False, 'import time\n')] |
from __future__ import unicode_literals
import os
import boto3
import sure # noqa
from freezegun import freeze_time
from unittest import SkipTest
from moto import mock_managedblockchain, settings
from . import helpers
@mock_managedblockchain
def test_vote_on_proposal_one_member_total_yes():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# List proposal votes
response = conn.list_proposal_votes(NetworkId=network_id, ProposalId=proposal_id)
response["ProposalVotes"][0]["MemberId"].should.equal(member_id)
# Get proposal details - should be APPROVED
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["Status"].should.equal("APPROVED")
response["Proposal"]["YesVoteCount"].should.equal(1)
response["Proposal"]["NoVoteCount"].should.equal(0)
response["Proposal"]["OutstandingVoteCount"].should.equal(0)
@mock_managedblockchain
def test_vote_on_proposal_one_member_total_no():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote no
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="NO",
)
# List proposal votes
response = conn.list_proposal_votes(NetworkId=network_id, ProposalId=proposal_id)
response["ProposalVotes"][0]["MemberId"].should.equal(member_id)
# Get proposal details - should be REJECTED
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["Status"].should.equal("REJECTED")
response["Proposal"]["YesVoteCount"].should.equal(0)
response["Proposal"]["NoVoteCount"].should.equal(1)
response["Proposal"]["OutstandingVoteCount"].should.equal(0)
@mock_managedblockchain
def test_vote_on_proposal_yes_greater_than():
conn = boto3.client("managedblockchain", region_name="us-east-1")
votingpolicy = {
"ApprovalThresholdPolicy": {
"ThresholdPercentage": 50,
"ProposalDurationInHours": 24,
"ThresholdComparator": "GREATER_THAN",
}
}
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
invitation_id = response["Invitations"][0]["InvitationId"]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False, "Test Member 2"
),
)
member_id2 = response["MemberId"]
# Create another proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes with member 1
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote no with member 2
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id2,
Vote="NO",
)
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["Status"].should.equal("REJECTED")
@mock_managedblockchain
def test_vote_on_proposal_no_greater_than():
conn = boto3.client("managedblockchain", region_name="us-east-1")
votingpolicy = {
"ApprovalThresholdPolicy": {
"ThresholdPercentage": 50,
"ProposalDurationInHours": 24,
"ThresholdComparator": "GREATER_THAN",
}
}
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
invitation_id = response["Invitations"][0]["InvitationId"]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False, "Test Member 2"
),
)
member_id2 = response["MemberId"]
# Create another proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote no with member 1
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="NO",
)
# Vote no with member 2
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id2,
Vote="NO",
)
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("REJECTED")
@mock_managedblockchain
def test_vote_on_proposal_expiredproposal():
if os.environ.get("TEST_SERVER_MODE", "false").lower() == "true":
raise SkipTest("Cant manipulate time in server mode")
votingpolicy = {
"ApprovalThresholdPolicy": {
"ThresholdPercentage": 50,
"ProposalDurationInHours": 1,
"ThresholdComparator": "GREATER_THAN_OR_EQUAL_TO",
}
}
conn = boto3.client("managedblockchain", region_name="us-east-1")
with freeze_time("2015-01-01 12:00:00"):
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
with freeze_time("2015-02-01 12:00:00"):
# Vote yes - should set status to expired
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
).should.throw(
Exception,
"Proposal {0} is expired and you cannot vote on it.".format(proposal_id),
)
# Get proposal details - should be EXPIRED
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["Status"].should.equal("EXPIRED")
@mock_managedblockchain
def test_vote_on_proposal_status_check():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create 2 more members
for counter in range(2, 4):
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
memberidlist = [None, None, None]
memberidlist[0] = member_id
for counter in range(2, 4):
# Get the invitation
response = conn.list_invitations()
invitation_id = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)[0]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember" + str(counter),
"admin",
"Admin12345",
False,
"Test Member " + str(counter),
),
)
member_id = response["MemberId"]
memberidlist[counter - 1] = member_id
# Should be no more pending invitations
response = conn.list_invitations()
pendinginvs = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)
pendinginvs.should.have.length_of(0)
# Create another proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes with member 1
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=memberidlist[0],
Vote="YES",
)
# Vote yes with member 2
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=memberidlist[1],
Vote="YES",
)
# Get proposal details - now approved (2 yes, 1 outstanding)
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("APPROVED")
# Should be one pending invitation
response = conn.list_invitations()
pendinginvs = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)
pendinginvs.should.have.length_of(1)
# Vote with member 3 - should throw an exception and not create a new invitation
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=memberidlist[2],
Vote="YES",
).should.throw(Exception, "and you cannot vote on it")
# Should still be one pending invitation
response = conn.list_invitations()
pendinginvs = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)
pendinginvs.should.have.length_of(1)
@mock_managedblockchain
def test_vote_on_proposal_badnetwork():
conn = boto3.client("managedblockchain", region_name="us-east-1")
response = conn.vote_on_proposal.when.called_with(
NetworkId="n-ABCDEFGHIJKLMNOP0123456789",
ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
VoterMemberId="m-<PASSWORD>",
Vote="YES",
).should.throw(Exception, "Network n-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_vote_on_proposal_badproposal():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
VoterMemberId="m-<PASSWORD>",
Vote="YES",
).should.throw(Exception, "Proposal p-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_vote_on_proposal_badmember():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId="m-ABCDEFGHIJKLMNOP0123456789",
Vote="YES",
).should.throw(Exception, "Member m-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_vote_on_proposal_badvote():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="FOO",
).should.throw(Exception, "Invalid request body")
@mock_managedblockchain
def test_vote_on_proposal_alreadyvoted():
conn = boto3.client("managedblockchain", region_name="us-east-1")
votingpolicy = {
"ApprovalThresholdPolicy": {
"ThresholdPercentage": 50,
"ProposalDurationInHours": 24,
"ThresholdComparator": "GREATER_THAN",
}
}
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
invitation_id = response["Invitations"][0]["InvitationId"]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False, "Test Member 2"
),
)
# Create another proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote yes with member 1
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Vote yes with member 1 again
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
).should.throw(
Exception,
"Member {0} has already voted on proposal {1}.".format(member_id, proposal_id),
)
@mock_managedblockchain
def test_list_proposal_votes_badnetwork():
conn = boto3.client("managedblockchain", region_name="us-east-1")
response = conn.list_proposal_votes.when.called_with(
NetworkId="n-ABCDEFGHIJKLMNOP0123456789",
ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
).should.throw(Exception, "Network n-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_list_proposal_votes_badproposal():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.list_proposal_votes.when.called_with(
NetworkId=network_id, ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
).should.throw(Exception, "Proposal p-ABCDEFGHIJKLMNOP0123456789 not found")
| [
"freezegun.freeze_time",
"boto3.client",
"unittest.SkipTest",
"os.environ.get"
] | [((309, 367), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (321, 367), False, 'import boto3\n'), ((2051, 2109), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (2063, 2109), False, 'import boto3\n'), ((3788, 3846), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (3800, 3846), False, 'import boto3\n'), ((6362, 6420), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (6374, 6420), False, 'import boto3\n'), ((9116, 9174), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (9128, 9174), False, 'import boto3\n'), ((10620, 10678), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (10632, 10678), False, 'import boto3\n'), ((14403, 14461), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (14415, 14461), False, 'import boto3\n'), ((14835, 14893), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (14847, 14893), False, 'import boto3\n'), ((15659, 15717), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (15671, 15717), False, 'import boto3\n'), ((16705, 16763), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (16717, 16763), False, 'import boto3\n'), ((17710, 17768), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (17722, 17768), False, 'import boto3\n'), ((20250, 20308), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (20262, 20308), False, 'import boto3\n'), ((20630, 20688), 'boto3.client', 'boto3.client', (['"""managedblockchain"""'], {'region_name': '"""us-east-1"""'}), "('managedblockchain', region_name='us-east-1')\n", (20642, 20688), False, 'import boto3\n'), ((8837, 8884), 'unittest.SkipTest', 'SkipTest', (['"""Cant manipulate time in server mode"""'], {}), "('Cant manipulate time in server mode')\n", (8845, 8884), False, 'from unittest import SkipTest\n'), ((9185, 9219), 'freezegun.freeze_time', 'freeze_time', (['"""2015-01-01 12:00:00"""'], {}), "('2015-01-01 12:00:00')\n", (9196, 9219), False, 'from freezegun import freeze_time\n'), ((9924, 9958), 'freezegun.freeze_time', 'freeze_time', (['"""2015-02-01 12:00:00"""'], {}), "('2015-02-01 12:00:00')\n", (9935, 9958), False, 'from freezegun import freeze_time\n'), ((8760, 8803), 'os.environ.get', 'os.environ.get', (['"""TEST_SERVER_MODE"""', '"""false"""'], {}), "('TEST_SERVER_MODE', 'false')\n", (8774, 8803), False, 'import os\n')] |
# -*- coding: utf-8 -*-
# (c) Copyright IBM Corp. 2018. All Rights Reserved.
# pragma pylint: disable=unused-argument, no-self-use
"""Function implementation"""
import os
import logging
import time
import shlex
import subprocess
import json
import chardet
import winrm
import re
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
from resilient_circuits.template_functions import render
class FunctionComponent(ResilientComponent):
"""Component that implements Resilient function 'shell_command"""
def __init__(self, opts):
"""constructor provides access to the configuration options"""
super(FunctionComponent, self).__init__(opts)
self.options = opts.get("fn_utilities", {})
@handler("reload")
def _reload(self, event, opts):
"""Configuration options have changed, save new values"""
self.options = opts.get("fn_utilities", {})
@function("utilities_shell_command")
def _shell_command_function(self, event, *args, **kwargs):
"""Function: Runs a shell command."""
try:
# Get the function parameters:
shell_command = kwargs.get('shell_command') # text
shell_remote = kwargs.get("shell_remote") # boolean
shell_param1 = kwargs.get("shell_param1") # text
shell_param2 = kwargs.get("shell_param2") # text
shell_param3 = kwargs.get("shell_param3") # text
log = logging.getLogger(__name__)
log.info("shell_command: %s", shell_command)
log.info("shell_remote: %s", shell_remote)
log.info("shell_param1: %s", shell_param1)
log.info("shell_param2: %s", shell_param2)
log.info("shell_param3: %s", shell_param3)
# Options keys are lowercase, so the shell command name needs to be lowercase
if shell_command:
shell_command = shell_command.lower()
# Escape the input parameters
escaping = self.options.get("shell_escaping", "sh")
escaped_args = {
"shell_param1": render(u"{{shell_param1|%s}}" % escaping, kwargs),
"shell_param2": render(u"{{shell_param2|%s}}" % escaping, kwargs),
"shell_param3": render(u"{{shell_param3|%s}}" % escaping, kwargs)
}
# If running a remote script, get the remote computer and the remote command
if shell_remote:
colon_split = shell_command.split(':')
if len(colon_split) != 2:
raise ValueError("Remote commands must be of the format remote_command_name:remote_computer_name, "
"'%s' was specified" % shell_command)
else:
shell_command = colon_split[0].strip()
if self.options.get(colon_split[1]) is None:
raise ValueError('The remote computer %s is not configured' % colon_split[1])
else:
remote = self.options.get(colon_split[1]).strip()
if remote.startswith('(') and remote.endswith(')'):
remote = remote[1:-1]
else:
raise ValueError('Remote computer configurations must be wrapped in parentheses (), '
"%s was specfied" % remote)
# Get remote credentials
remote_config = re.split(':|@', remote)
if len(remote_config) != 3:
raise ValueError('Remote machine %s must be of the format username:password@server, '
"'%s' was specified" % remote)
else:
remote_user = remote_config[0]
remote_password = remote_config[1]
remote_server = remote_config[2]
# Check if command is configured
if shell_command not in self.options:
if ':' in shell_command:
raise ValueError("Syntax for a remote command '%s' was used but remote_shell was set to False"
% shell_command)
raise ValueError('%s command not configured' % shell_command)
shell_command_base = self.options[shell_command].strip()
# Remote commands must wrap a path with []
if shell_command_base.startswith('[') and shell_command_base.endswith(']'):
if shell_remote:
extension = shell_command_base[1:-1].strip().split('.')[-1]
if extension not in self.options.get('remote_powershell_extensions'):
raise ValueError("The specified file must be have extension %s but %s was specified" %
(str(self.options.get('remote_powershell_extensions')), extension))
# Format shell parameters
shell_command_base = shell_command_base[1:-1].strip()
if shell_param1:
shell_command_base = shell_command_base + ' "{{shell_param1}}"'
else:
shell_command_base = shell_command_base + ' $null'
if shell_param2:
shell_command_base = shell_command_base + ' "{{shell_param2}}"'
else:
shell_command_base = shell_command_base + ' $null'
if shell_param3:
shell_command_base = shell_command_base + ' "{{shell_param3}}"'
else:
shell_command_base = shell_command_base + ' $null'
else:
raise ValueError("A remote command '%s' was specified but shell_remote was set to False"
% shell_command)
elif shell_remote:
raise ValueError('A remote command must specify a remote path wrapped in square brackets [], '
"'%s' was specified" % shell_command)
if shell_command_base.startswith('(') and shell_command_base.endswith(')') and not shell_remote:
raise ValueError('Please specify a valid shell command that is not wrapped in parentheses or brackets'
'when shell_remote is False')
commandline = render(shell_command_base, escaped_args)
if shell_remote:
session = winrm.Session(remote_server,
auth=(remote_user, remote_password),
transport=self.options.get('remote_auth_transport'))
tstart = time.time()
if escaping == "sh":
r = session.run_cmd(commandline)
elif escaping == "ps":
r = session.run_ps(commandline)
retcode = r.status_code
stdoutdata = r.std_out
stderrdata = r.std_err
tend = time.time()
else:
commandline = os.path.expandvars(commandline)
# Set up the environment
env = os.environ.copy()
# Execute the command line process (NOT in its own shell)
cmd = shlex.split(commandline, posix=True)
tstart = time.time()
call = subprocess.Popen(cmd,
shell=False,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
env=env)
stdoutdata, stderrdata = call.communicate()
retcode = call.returncode
tend = time.time()
encoding = chardet.detect(stdoutdata)["encoding"] or "utf-8"
result = stdoutdata.decode(encoding)
result_json = None
try:
# Let's see if the output can be decoded as JSON
result_json = json.loads(result)
except:
pass
output = stderrdata.decode(encoding)
output_json = None
try:
# Let's see if the output can be decoded as JSON
output_json = json.loads(output)
except:
pass
results = {
"commandline": commandline,
"start": int(tstart * 1000.0),
"end": int(tend * 1000.0),
"elapsed": int((tend - tstart) * 1000.0),
"exitcode": retcode, # Nonzero exit code indicates error
"stdout": result,
"stderr": output,
"stdout_json": result_json, # May be null
"stderr_json": output_json # May be null
}
yield FunctionResult(results)
except Exception:
yield FunctionError()
| [
"logging.getLogger",
"re.split",
"resilient_circuits.handler",
"json.loads",
"os.path.expandvars",
"shlex.split",
"subprocess.Popen",
"os.environ.copy",
"chardet.detect",
"resilient_circuits.FunctionError",
"resilient_circuits.template_functions.render",
"resilient_circuits.function",
"time.... | [((784, 801), 'resilient_circuits.handler', 'handler', (['"""reload"""'], {}), "('reload')\n", (791, 801), False, 'from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError\n'), ((962, 997), 'resilient_circuits.function', 'function', (['"""utilities_shell_command"""'], {}), "('utilities_shell_command')\n", (970, 997), False, 'from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError\n'), ((1497, 1524), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1514, 1524), False, 'import logging\n'), ((6512, 6552), 'resilient_circuits.template_functions.render', 'render', (['shell_command_base', 'escaped_args'], {}), '(shell_command_base, escaped_args)\n', (6518, 6552), False, 'from resilient_circuits.template_functions import render\n'), ((2145, 2194), 'resilient_circuits.template_functions.render', 'render', (["(u'{{shell_param1|%s}}' % escaping)", 'kwargs'], {}), "(u'{{shell_param1|%s}}' % escaping, kwargs)\n", (2151, 2194), False, 'from resilient_circuits.template_functions import render\n'), ((2228, 2277), 'resilient_circuits.template_functions.render', 'render', (["(u'{{shell_param2|%s}}' % escaping)", 'kwargs'], {}), "(u'{{shell_param2|%s}}' % escaping, kwargs)\n", (2234, 2277), False, 'from resilient_circuits.template_functions import render\n'), ((2311, 2360), 'resilient_circuits.template_functions.render', 'render', (["(u'{{shell_param3|%s}}' % escaping)", 'kwargs'], {}), "(u'{{shell_param3|%s}}' % escaping, kwargs)\n", (2317, 2360), False, 'from resilient_circuits.template_functions import render\n'), ((3551, 3574), 're.split', 're.split', (['""":|@"""', 'remote'], {}), "(':|@', remote)\n", (3559, 3574), False, 'import re\n'), ((6833, 6844), 'time.time', 'time.time', ([], {}), '()\n', (6842, 6844), False, 'import time\n'), ((7167, 7178), 'time.time', 'time.time', ([], {}), '()\n', (7176, 7178), False, 'import time\n'), ((7227, 7258), 'os.path.expandvars', 'os.path.expandvars', (['commandline'], {}), '(commandline)\n', (7245, 7258), False, 'import os\n'), ((7322, 7339), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (7337, 7339), False, 'import os\n'), ((7437, 7473), 'shlex.split', 'shlex.split', (['commandline'], {'posix': '(True)'}), '(commandline, posix=True)\n', (7448, 7473), False, 'import shlex\n'), ((7499, 7510), 'time.time', 'time.time', ([], {}), '()\n', (7508, 7510), False, 'import time\n'), ((7534, 7630), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(False)', 'stderr': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE', 'env': 'env'}), '(cmd, shell=False, stderr=subprocess.PIPE, stdout=\n subprocess.PIPE, env=env)\n', (7550, 7630), False, 'import subprocess\n'), ((7911, 7922), 'time.time', 'time.time', ([], {}), '()\n', (7920, 7922), False, 'import time\n'), ((8189, 8207), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (8199, 8207), False, 'import json\n'), ((8442, 8460), 'json.loads', 'json.loads', (['output'], {}), '(output)\n', (8452, 8460), False, 'import json\n'), ((9011, 9034), 'resilient_circuits.FunctionResult', 'FunctionResult', (['results'], {}), '(results)\n', (9025, 9034), False, 'from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError\n'), ((7947, 7973), 'chardet.detect', 'chardet.detect', (['stdoutdata'], {}), '(stdoutdata)\n', (7961, 7973), False, 'import chardet\n'), ((9079, 9094), 'resilient_circuits.FunctionError', 'FunctionError', ([], {}), '()\n', (9092, 9094), False, 'from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError\n')] |
"""
script to run mc sims on the three associations techniques when the tracks origin are equal. Used to calculate the
total number of correctly associating tracks and total # falsly not associating tracks from the same target.
"""
import numpy as np
from stonesoup.types.state import GaussianState
from data_association.CountingAssociator import CountingAssociator
from data_association.bar_shalom_hypothesis_associators import HypothesisTestDependenceAssociator, \
HypothesisTestIndependenceAssociator
from trackers.kf_dependent_fusion_async_sensors import KalmanFilterDependentFusionAsyncSensors
from utils import open_object
from utils.scenario_generator import generate_scenario_3
start_seed = 0
end_seed = 5 # normally 500
num_mc_iterations = end_seed - start_seed
# params
save_fig = False
# scenario parameters
sigma_process_list = [0.3] # [0.05, 0.05, 0.05, 0.5, 0.5, 0.5, 3, 3, 3]
sigma_meas_radar_list = [50] # [5, 30, 200, 5, 30, 200, 5, 30, 200]
sigma_meas_ais_list = [10] # [10] * 9
radar_meas_rate = 1 # relevant radar meas rates: 1
ais_meas_rate_list = [6] # relevant AIS meas rates: 2 - 12
timesteps = 200
# associator params
association_distance_threshold = 10
consecutive_hits_confirm_association = 3
consecutive_misses_end_association = 2
# dicts to store final results for printing in a latex friendly way
Pc_overall = {} # Pc is the percentage of correctly associating tracks that originate from the same target
something_else_overall = {}
stats = []
for sigma_process, sigma_meas_radar, sigma_meas_ais, ais_meas_rate in zip(sigma_process_list, sigma_meas_radar_list,
sigma_meas_ais_list, ais_meas_rate_list):
for seed in range(start_seed, end_seed):
# generate scenario
generate_scenario_3(seed=seed, permanent_save=False, radar_meas_rate=radar_meas_rate,
ais_meas_rate=ais_meas_rate, sigma_process=sigma_process,
sigma_meas_radar=sigma_meas_radar, sigma_meas_ais=sigma_meas_ais,
timesteps=timesteps)
folder = "temp" # temp instead of seed, as it is not a permanent save
# load ground truth and the measurements
data_folder = "../scenarios/scenario3/" + folder + "/"
ground_truth = open_object.open_object(data_folder + "ground_truth.pk1")
measurements_radar = open_object.open_object(data_folder + "measurements_radar.pk1")
measurements_ais = open_object.open_object(data_folder + "measurements_ais.pk1")
# load start_time
start_time = open_object.open_object(data_folder + "start_time.pk1")
# prior
initial_covar = np.diag([sigma_meas_radar * sigma_meas_ais, sigma_meas_radar * sigma_process,
sigma_meas_radar * sigma_meas_ais, sigma_meas_radar * sigma_process]) ** 2
prior = GaussianState([1, 1.1, -1, 0.9], initial_covar, timestamp=start_time)
kf_dependent_fusion = KalmanFilterDependentFusionAsyncSensors(start_time, prior,
sigma_process_radar=sigma_process,
sigma_process_ais=sigma_process,
sigma_meas_radar=sigma_meas_radar,
sigma_meas_ais=sigma_meas_ais)
tracks_fused_dependent, tracks_radar, tracks_ais = kf_dependent_fusion.track_async(
start_time, measurements_radar, measurements_ais, fusion_rate=1)
# use the CountingAssociator to evaluate whether the tracks are associated
associator = CountingAssociator(association_distance_threshold, consecutive_hits_confirm_association,
consecutive_misses_end_association)
num_correct_associations = 0
num_false_mis_associations = 0
for i in range(1, len(tracks_radar)):
# use the associator to check the association
associated = associator.associate_tracks(tracks_radar[:i], tracks_ais[:i])
if associated:
num_correct_associations += 1
else:
num_false_mis_associations += 1
# save the number of correct associations and false mis associations in a dict
stats_individual = {'seed': seed, 'num_correct_associations': num_correct_associations,
'num_false_mis_associations': num_false_mis_associations}
stats.append(stats_individual)
# todo count the number of associations that turn out to be correct
# calc the #correct_associations and #false_mis_associations
tot_num_correct_associations = sum([stat['num_correct_associations'] for stat in stats])
tot_num_false_mis_associations = sum([stat['num_false_mis_associations'] for stat in stats])
print("")
| [
"utils.open_object.open_object",
"stonesoup.types.state.GaussianState",
"numpy.diag",
"data_association.CountingAssociator.CountingAssociator",
"trackers.kf_dependent_fusion_async_sensors.KalmanFilterDependentFusionAsyncSensors",
"utils.scenario_generator.generate_scenario_3"
] | [((1806, 2051), 'utils.scenario_generator.generate_scenario_3', 'generate_scenario_3', ([], {'seed': 'seed', 'permanent_save': '(False)', 'radar_meas_rate': 'radar_meas_rate', 'ais_meas_rate': 'ais_meas_rate', 'sigma_process': 'sigma_process', 'sigma_meas_radar': 'sigma_meas_radar', 'sigma_meas_ais': 'sigma_meas_ais', 'timesteps': 'timesteps'}), '(seed=seed, permanent_save=False, radar_meas_rate=\n radar_meas_rate, ais_meas_rate=ais_meas_rate, sigma_process=\n sigma_process, sigma_meas_radar=sigma_meas_radar, sigma_meas_ais=\n sigma_meas_ais, timesteps=timesteps)\n', (1825, 2051), False, 'from utils.scenario_generator import generate_scenario_3\n'), ((2337, 2394), 'utils.open_object.open_object', 'open_object.open_object', (["(data_folder + 'ground_truth.pk1')"], {}), "(data_folder + 'ground_truth.pk1')\n", (2360, 2394), False, 'from utils import open_object\n'), ((2424, 2487), 'utils.open_object.open_object', 'open_object.open_object', (["(data_folder + 'measurements_radar.pk1')"], {}), "(data_folder + 'measurements_radar.pk1')\n", (2447, 2487), False, 'from utils import open_object\n'), ((2515, 2576), 'utils.open_object.open_object', 'open_object.open_object', (["(data_folder + 'measurements_ais.pk1')"], {}), "(data_folder + 'measurements_ais.pk1')\n", (2538, 2576), False, 'from utils import open_object\n'), ((2625, 2680), 'utils.open_object.open_object', 'open_object.open_object', (["(data_folder + 'start_time.pk1')"], {}), "(data_folder + 'start_time.pk1')\n", (2648, 2680), False, 'from utils import open_object\n'), ((2924, 2993), 'stonesoup.types.state.GaussianState', 'GaussianState', (['[1, 1.1, -1, 0.9]', 'initial_covar'], {'timestamp': 'start_time'}), '([1, 1.1, -1, 0.9], initial_covar, timestamp=start_time)\n', (2937, 2993), False, 'from stonesoup.types.state import GaussianState\n'), ((3025, 3225), 'trackers.kf_dependent_fusion_async_sensors.KalmanFilterDependentFusionAsyncSensors', 'KalmanFilterDependentFusionAsyncSensors', (['start_time', 'prior'], {'sigma_process_radar': 'sigma_process', 'sigma_process_ais': 'sigma_process', 'sigma_meas_radar': 'sigma_meas_radar', 'sigma_meas_ais': 'sigma_meas_ais'}), '(start_time, prior,\n sigma_process_radar=sigma_process, sigma_process_ais=sigma_process,\n sigma_meas_radar=sigma_meas_radar, sigma_meas_ais=sigma_meas_ais)\n', (3064, 3225), False, 'from trackers.kf_dependent_fusion_async_sensors import KalmanFilterDependentFusionAsyncSensors\n'), ((3773, 3901), 'data_association.CountingAssociator.CountingAssociator', 'CountingAssociator', (['association_distance_threshold', 'consecutive_hits_confirm_association', 'consecutive_misses_end_association'], {}), '(association_distance_threshold,\n consecutive_hits_confirm_association, consecutive_misses_end_association)\n', (3791, 3901), False, 'from data_association.CountingAssociator import CountingAssociator\n'), ((2722, 2877), 'numpy.diag', 'np.diag', (['[sigma_meas_radar * sigma_meas_ais, sigma_meas_radar * sigma_process, \n sigma_meas_radar * sigma_meas_ais, sigma_meas_radar * sigma_process]'], {}), '([sigma_meas_radar * sigma_meas_ais, sigma_meas_radar *\n sigma_process, sigma_meas_radar * sigma_meas_ais, sigma_meas_radar *\n sigma_process])\n', (2729, 2877), True, 'import numpy as np\n')] |
import json
import operator
import logging
import re
import time
from socket import socket, AF_INET, SOCK_DGRAM
from functools import reduce
logger = logging.getLogger(__name__)
def ip():
"""Find default IP"""
ip = None
s = socket(AF_INET, SOCK_DGRAM)
try:
s.connect(('172.16.31.10', 9))
ip = s.getsockname()[0]
except socket.error:
raise RuntimeError("Cannot determine host IP")
finally:
del s
return ip
def serialize(data):
try:
if type(data) != dict:
raise TypeError('Must be a dict')
return json.dumps(data)
except Exception as e:
logger.warn('Cannot serialize: %s [%s]', data, e)
return '{}'
def unserialize(serialized):
if not serialized:
return {}
try:
data = json.loads(serialized)
if type(data) != dict:
raise TypeError('Not a dict')
return data
except Exception as e:
logger.warn('Cannot unserialize: %s [%s]', serialized, e)
return {}
def dict_get_path(the_dict, path):
try:
return reduce(operator.getitem, [the_dict] + path.split('.'))
except:
return None
def dict_set_path(the_dict, path, value):
current = the_dict
for component in path.split('.')[:-1]:
if component not in current or type(current[component]) != dict:
current[component] = {}
current = current[component]
current[path.split('.')[-1]] = value
def dict_filter(the_dict, field_or_fields=None):
if field_or_fields is None:
return the_dict
elif type(field_or_fields) == list:
fields = {}
for f in field_or_fields:
fields[f] = dict_get_path(the_dict, f)
return fields
elif isinstance(field_or_fields, str):
return dict_get_path(the_dict, field_or_fields)
else:
raise TypeError('Invalid type for field path: %s' % type(field_or_fields))
def get_operator(op):
try:
return {"==": operator.eq,
"=": operator.eq,
"!=": operator.ne,
">=": operator.ge,
"<=": operator.le,
">": operator.gt,
"<": operator.lt}[op]
except KeyError:
raise ValueError('Unknown operator: %s' % op)
def match_predicates(predicates, the_dict):
for predicate in predicates:
m1, m2 = (dict_get_path(the_dict, predicate['path']), predicate['value'])
if m1 is None and m2 is not None:
return False
try:
int(m1)
int(m2)
m1 = int(m1)
m2 = int(m2)
except (ValueError, TypeError):
pass
if not predicate['op'](m1, m2):
return False
return True
def create_filter(filters):
if not filters:
return lambda a_dict: True
predicates = []
for f in filters.replace(' ', '').split(','):
predicate = {}
match = re.split('(!?[^><!=]+)(?:(>=|<=|!=|=|<|>)(.*))?', f, 2)
predicate['path'] = match[1]
if match[2]:
predicate['op'] = get_operator(match[2])
predicate['value'] = match[3]
else:
# predicate with not operator/value means "fields exists"
if predicate['path'][0] == '!':
predicate['path'] = predicate['path'][1:]
predicate['op'] = operator.is_
else:
predicate['op'] = operator.is_not
predicate['value'] = None
predicates.append(predicate)
return lambda the_dict: match_predicates(predicates, the_dict)
class ColorizingStreamHandler(logging.StreamHandler):
"""Provide a nicer logging output to error output with colors"""
def __init__(self):
self.colors = ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white']
self.color_map = dict([(x, self.colors.index(x)) for x in self.colors])
self.level_map = {
logging.DEBUG: (None, 'blue', " DBG"),
logging.INFO: (None, 'green', "INFO"),
logging.WARNING: (None, 'yellow', "WARN"),
logging.ERROR: (None, 'red', " ERR"),
logging.CRITICAL: ('red', 'white', "CRIT")
}
self.csi = '\x1b['
self.reset = '\x1b[0m'
@property
def is_tty(self):
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def colorize(self, message, record):
if record.levelno in self.level_map:
params = []
if bg in self.color_map:
params.append(str(self.color_map[bg] + 40))
if fg in self.color_map:
params.append(str(self.color_map[fg] + 30))
if bold:
params.append('1')
if params:
message = ''.join((self.csi, ';'.join(params),
'm', message, self.reset))
return message
def format(self, record):
message = logging.StreamHandler.format(self, record)
# Build the prefix
params = []
levelno = record.levelno
if levelno not in self.level_map:
levelno = logging.WARNING
bg, fg, level = self.level_map[levelno]
if bg in self.color_map:
params.append(str(self.color_map[bg] + 40))
if fg in self.color_map:
params.append(str(self.color_map[fg] + 30))
params.append("1m")
level = "[%s]" % level
return "\n".join(["%s %s: %s" % (
time.strftime("%Y-%m-%dT%H:%M:%S"),
self.is_tty and params and ''.join((self.csi, ';'.join(params),
level, self.reset)) or level,
line)
for line in message.split('\n')])
| [
"logging.getLogger",
"re.split",
"json.loads",
"socket.socket",
"json.dumps",
"time.strftime",
"logging.StreamHandler.format"
] | [((151, 178), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (168, 178), False, 'import logging\n'), ((238, 265), 'socket.socket', 'socket', (['AF_INET', 'SOCK_DGRAM'], {}), '(AF_INET, SOCK_DGRAM)\n', (244, 265), False, 'from socket import socket, AF_INET, SOCK_DGRAM\n'), ((590, 606), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (600, 606), False, 'import json\n'), ((808, 830), 'json.loads', 'json.loads', (['serialized'], {}), '(serialized)\n', (818, 830), False, 'import json\n'), ((2966, 3021), 're.split', 're.split', (['"""(!?[^><!=]+)(?:(>=|<=|!=|=|<|>)(.*))?"""', 'f', '(2)'], {}), "('(!?[^><!=]+)(?:(>=|<=|!=|=|<|>)(.*))?', f, 2)\n", (2974, 3021), False, 'import re\n'), ((5033, 5075), 'logging.StreamHandler.format', 'logging.StreamHandler.format', (['self', 'record'], {}), '(self, record)\n', (5061, 5075), False, 'import logging\n'), ((5585, 5619), 'time.strftime', 'time.strftime', (['"""%Y-%m-%dT%H:%M:%S"""'], {}), "('%Y-%m-%dT%H:%M:%S')\n", (5598, 5619), False, 'import time\n')] |
#!/usr/bin/env python3
#
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
import argparse
import Libraries.arguments as ar
import Libraries.tools.general as gt
import Libraries.tools.zabbix as zt
import Classes.AppConfig as AppConfig
import requests
import copy
def run():
description = 'Fetches list of dht servers from network config and performs sync with zabbix'
parser = argparse.ArgumentParser(formatter_class = argparse.RawDescriptionHelpFormatter,
description = description)
ar.set_standard_args(parser, "other")
cfg = AppConfig.AppConfig(parser.parse_args())
stats = {
"nodes": 0,
"hosts_known": 0,
"hosts_updated": 0,
"hosts_added": 0,
"hosts_disabled": 0
}
cfg.log.log(os.path.basename(__file__), 3, "Fetching network config.")
try:
rs = requests.get(cfg.config["configs"]["global_public"]).json()
except Exception as e:
cfg.log.log(os.path.basename(__file__), 1, "Could not retrieve network config: " + str(e))
sys.exit(1)
if len(rs["dht"]["static_nodes"]["nodes"]) > 0:
nodes = {}
# We identify DHT nodes by ip:port combination
#
for element in rs["dht"]["static_nodes"]["nodes"]:
nodes["{}.{}".format(gt.dec2ip(element["addr_list"]["addrs"][0]["ip"]),element["addr_list"]["addrs"][0]["port"])] = element
else:
cfg.log.log(os.path.basename(__file__), 1, "Network config contains no nodes")
sys.exit(1)
stats["nodes"] = len(nodes)
cfg.log.log(os.path.basename(__file__), 3, "Retrieved {} DHT servers.".format(stats["nodes"]))
cfg.log.log(os.path.basename(__file__), 3, "Fetching list of hosts in zabbix.")
rs = zt.fetch_hosts(cfg, [cfg.config["mapping"]["groups"]["ton_public_dht_servers"]])
if rs is None:
cfg.log.log(os.path.basename(__file__), 1, "Could not fetch list of hosts.")
sys.exit(1)
# Again, we identify hosts by ip:port
hdata = {}
for element in rs:
port = next((chunk for chunk in element["macros"] if chunk["macro"] == "{$DHT.PORT}"), None)
if port:
hdata["{}.{}".format(element["interfaces"][0]["ip"], port["value"])] = element
stats["hosts_known"] = len(hdata)
cfg.log.log(os.path.basename(__file__), 3, "Retrieved {} hosts.".format(stats["hosts_known"]))
# Scan nodes from network config, add or update key as needed
#
for element in nodes:
if element not in hdata:
if nodes[element]["addr_list"]["addrs"][0]["ip"] != 2130706433:
cfg.log.log(os.path.basename(__file__), 3, "Adding node {}.".format(element))
rs = add_node(cfg,nodes[element])
if not rs:
cfg.log.log(os.path.basename(__file__), 1, "Could not add host.")
sys.exit(1)
stats["hosts_added"] += 1
else:
host = copy.deepcopy(hdata[element])
key = next((chunk for chunk in host["macros"] if chunk["macro"] == "{$DHT.KEY}"), None)
if not key or key["value"] != nodes[element]["id"]["key"]:
zt.set_macro(host["macros"], "{$DHT.KEY}", str(nodes[element]["id"]["key"]))
if host != hdata[element]:
cfg.log.log(os.path.basename(__file__), 3, "Updating node {}.".format(element))
zt.update_host(cfg, host, hdata[element])
stats["hosts_updated"] += 1
# Scan nodes from zabbix, remove if unknown
#
for host in hdata:
if host not in nodes:
zt.delete_host(cfg, hdata[host])
sys.exit(0)
def add_node(cfg, server_data):
cfg.log.log(os.path.basename(__file__), 3, "Adding host with KEY {}".format(server_data["id"]["key"]))
groups = [
cfg.config["mapping"]["groups"]["ton_public_dht_servers"]
]
templates = [
cfg.config["mapping"]["templates"]["ton_dht_server"]
]
payload = {
"jsonrpc": "2.0",
"method": "host.create",
"params": {
"host": "TON DHT node {}.{}".format(gt.dec2ip(server_data["addr_list"]["addrs"][0]["ip"]),server_data["addr_list"]["addrs"][0]["port"]),
"interfaces":
[
{
"type": 1,
"main": 1,
"useip": 1,
"ip": gt.dec2ip(server_data["addr_list"]["addrs"][0]["ip"]),
"dns": "",
"port": "10050"
}
],
"tags": [
{
"tag": "c_network",
"value": cfg.config["net"]
},
{
"tag": "c_stage",
"value": "prod"
},
{
"tag": "c_origin",
"value": "dht_sync"
}
],
"macros":
[
{
"macro": "{$DHT.KEY}",
"value": server_data["id"]["key"]
},
{
"macro": "{$DHT.PORT}",
"value": str(server_data["addr_list"]["addrs"][0]["port"])
},
{
"macro": "{$UPDATED}",
"value": str(gt.get_timestamp())
}
],
"groups": [],
"templates": []
},
"auth": cfg.config["zabbix"]["api_token"],
"id": 1
}
for element in groups:
payload["params"]["groups"].append({"groupid": element})
for element in templates:
payload["params"]["templates"].append({"templateid": element})
rs = zt.execute_api_query(cfg, payload)
if not rs:
cfg.log.log(os.path.basename(__file__), 1, "Failed to add host with KEY {}".format(server_data["id"]["key"]))
sys.exit(1)
return rs["result"]["hostids"][0]
if __name__ == '__main__':
run()
| [
"sys.exit",
"copy.deepcopy",
"Libraries.tools.zabbix.fetch_hosts",
"argparse.ArgumentParser",
"Libraries.tools.general.dec2ip",
"Libraries.arguments.set_standard_args",
"requests.get",
"os.path.realpath",
"os.path.basename",
"Libraries.tools.zabbix.execute_api_query",
"Libraries.tools.general.ge... | [((437, 544), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.RawDescriptionHelpFormatter', 'description': 'description'}), '(formatter_class=argparse.\n RawDescriptionHelpFormatter, description=description)\n', (460, 544), False, 'import argparse\n'), ((584, 621), 'Libraries.arguments.set_standard_args', 'ar.set_standard_args', (['parser', '"""other"""'], {}), "(parser, 'other')\n", (604, 621), True, 'import Libraries.arguments as ar\n'), ((1802, 1887), 'Libraries.tools.zabbix.fetch_hosts', 'zt.fetch_hosts', (['cfg', "[cfg.config['mapping']['groups']['ton_public_dht_servers']]"], {}), "(cfg, [cfg.config['mapping']['groups']['ton_public_dht_servers']]\n )\n", (1816, 1887), True, 'import Libraries.tools.zabbix as zt\n'), ((3699, 3710), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3707, 3710), False, 'import sys\n'), ((5903, 5937), 'Libraries.tools.zabbix.execute_api_query', 'zt.execute_api_query', (['cfg', 'payload'], {}), '(cfg, payload)\n', (5923, 5937), True, 'import Libraries.tools.zabbix as zt\n'), ((839, 865), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (855, 865), False, 'import os\n'), ((1563, 1574), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1571, 1574), False, 'import sys\n'), ((1625, 1651), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (1641, 1651), False, 'import os\n'), ((1725, 1751), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (1741, 1751), False, 'import os\n'), ((1995, 2006), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2003, 2006), False, 'import sys\n'), ((2352, 2378), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (2368, 2378), False, 'import os\n'), ((3760, 3786), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (3776, 3786), False, 'import os\n'), ((6079, 6090), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6087, 6090), False, 'import sys\n'), ((95, 121), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (111, 121), False, 'import os\n'), ((1114, 1125), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1122, 1125), False, 'import sys\n'), ((1488, 1514), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (1504, 1514), False, 'import os\n'), ((1922, 1948), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (1938, 1948), False, 'import os\n'), ((3009, 3038), 'copy.deepcopy', 'copy.deepcopy', (['hdata[element]'], {}), '(hdata[element])\n', (3022, 3038), False, 'import copy\n'), ((3661, 3693), 'Libraries.tools.zabbix.delete_host', 'zt.delete_host', (['cfg', 'hdata[host]'], {}), '(cfg, hdata[host])\n', (3675, 3693), True, 'import Libraries.tools.zabbix as zt\n'), ((5973, 5999), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (5989, 5999), False, 'import os\n'), ((920, 972), 'requests.get', 'requests.get', (["cfg.config['configs']['global_public']"], {}), "(cfg.config['configs']['global_public'])\n", (932, 972), False, 'import requests\n'), ((1027, 1053), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (1043, 1053), False, 'import os\n'), ((3455, 3496), 'Libraries.tools.zabbix.update_host', 'zt.update_host', (['cfg', 'host', 'hdata[element]'], {}), '(cfg, host, hdata[element])\n', (3469, 3496), True, 'import Libraries.tools.zabbix as zt\n'), ((4168, 4221), 'Libraries.tools.general.dec2ip', 'gt.dec2ip', (["server_data['addr_list']['addrs'][0]['ip']"], {}), "(server_data['addr_list']['addrs'][0]['ip'])\n", (4177, 4221), True, 'import Libraries.tools.general as gt\n'), ((1355, 1404), 'Libraries.tools.general.dec2ip', 'gt.dec2ip', (["element['addr_list']['addrs'][0]['ip']"], {}), "(element['addr_list']['addrs'][0]['ip'])\n", (1364, 1404), True, 'import Libraries.tools.general as gt\n'), ((2672, 2698), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (2688, 2698), False, 'import os\n'), ((2921, 2932), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2929, 2932), False, 'import sys\n'), ((3371, 3397), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (3387, 3397), False, 'import os\n'), ((4471, 4524), 'Libraries.tools.general.dec2ip', 'gt.dec2ip', (["server_data['addr_list']['addrs'][0]['ip']"], {}), "(server_data['addr_list']['addrs'][0]['ip'])\n", (4480, 4524), True, 'import Libraries.tools.general as gt\n'), ((2847, 2873), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (2863, 2873), False, 'import os\n'), ((5501, 5519), 'Libraries.tools.general.get_timestamp', 'gt.get_timestamp', ([], {}), '()\n', (5517, 5519), True, 'import Libraries.tools.general as gt\n')] |
from numpy import exp, pi, cos, sin, tan
from ....Functions.Geometry.inter_line_circle import inter_line_circle
def _comp_point_coordinate(self):
"""Compute the point coordinates needed to plot the Slot.
Parameters
----------
self : HoleM51
A HoleM51 object
Returns
-------
point_dict: dict
A dict of the slot coordinates
"""
Rext = self.get_Rext()
# comp point coordinate (in complex)
alpha = self.comp_alpha()
Wslot = 2 * sin(self.W1 / 2) * (Rext - self.H1)
L = 0.5 * (Wslot - self.W0) / cos(alpha) # ||P2,P5||
# Center of the hole
Z0 = Rext - self.H0
Z2 = Z0 + 1j * self.W0 / 2
Z25 = Z0 - 1j * self.W0 / 2
Z15 = Z25 - self.H2
Z1 = Z2 - 1j * self.W2
Z26 = Z1 - 1j * self.W3
Z12 = Z2 - self.H2
Z13 = Z12 - 1j * self.W2
Z14 = Z13 - 1j * self.W3
Z11 = Z12 + 1j * tan(alpha / 2) * self.H2
Z16 = Z15 - 1j * tan(alpha / 2) * self.H2
# Draw the left side with center P2, and X axis =(P2,P5), Y axis=(P2,P10)
Z3 = self.W4 * exp(1j * (pi / 2 - alpha)) + Z2
Z4 = (self.W4 + self.W5) * exp(1j * (pi / 2 - alpha)) + Z2
Z5 = (Rext - self.H1) * exp(1j * self.W1 / 2)
Z10 = (1j * self.H2) * exp(1j * (pi / 2 - alpha)) + Z2
Z9 = (1j * self.H2 + self.W4) * exp(1j * (pi / 2 - alpha)) + Z2
Z8 = (1j * self.H2 + self.W4 + self.W5) * exp(1j * (pi / 2 - alpha)) + Z2
Z7 = (1j * self.H2 + L) * exp(1j * (pi / 2 - alpha)) + Z2
# Draw the right side with center P25, X axis (P25,P23), Y axis(P25,P17)
Z24 = self.W6 * exp(-1j * (pi / 2 - alpha)) + Z25
Z23 = (self.W6 + self.W7) * exp(-1j * (pi / 2 - alpha)) + Z25
Z22 = (Rext - self.H1) * exp(-1j * self.W1 / 2)
Z17 = (-1j * self.H2) * exp(-1j * (pi / 2 - alpha)) + Z25
Z18 = (-1j * self.H2 + self.W6) * exp(-1j * (pi / 2 - alpha)) + Z25
Z19 = (-1j * self.H2 + self.W6 + self.W7) * exp(-1j * (pi / 2 - alpha)) + Z25
Z20 = (-1j * self.H2 + L) * exp(-1j * (pi / 2 - alpha)) + Z25
# Z6 is the intersection of the line [Z7,Z10] and Circle centre
# (0,0) radius Rext - H1
Zint = inter_line_circle(Z7, Z10, Rext - self.H1)
# Select the point with Re(Z) > 0
if Zint[0].real > 0:
Z6 = Zint[0]
else:
Z6 = Zint[1]
Z21 = Z6.conjugate()
point_dict = dict()
point_dict["Z1"] = Z1
point_dict["Z2"] = Z2
point_dict["Z3"] = Z3
point_dict["Z4"] = Z4
point_dict["Z5"] = Z5
point_dict["Z6"] = Z6
point_dict["Z7"] = Z7
point_dict["Z8"] = Z8
point_dict["Z9"] = Z9
point_dict["Z10"] = Z10
point_dict["Z11"] = Z11
point_dict["Z12"] = Z12
point_dict["Z13"] = Z13
point_dict["Z14"] = Z14
point_dict["Z15"] = Z15
point_dict["Z16"] = Z16
point_dict["Z17"] = Z17
point_dict["Z18"] = Z18
point_dict["Z19"] = Z19
point_dict["Z20"] = Z20
point_dict["Z21"] = Z21
point_dict["Z22"] = Z22
point_dict["Z23"] = Z23
point_dict["Z24"] = Z24
point_dict["Z25"] = Z25
point_dict["Z26"] = Z26
return point_dict
| [
"numpy.exp",
"numpy.sin",
"numpy.cos",
"numpy.tan"
] | [((565, 575), 'numpy.cos', 'cos', (['alpha'], {}), '(alpha)\n', (568, 575), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1175, 1198), 'numpy.exp', 'exp', (['(1.0j * self.W1 / 2)'], {}), '(1.0j * self.W1 / 2)\n', (1178, 1198), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1691, 1715), 'numpy.exp', 'exp', (['(-1.0j * self.W1 / 2)'], {}), '(-1.0j * self.W1 / 2)\n', (1694, 1715), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((495, 511), 'numpy.sin', 'sin', (['(self.W1 / 2)'], {}), '(self.W1 / 2)\n', (498, 511), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1052, 1080), 'numpy.exp', 'exp', (['(1.0j * (pi / 2 - alpha))'], {}), '(1.0j * (pi / 2 - alpha))\n', (1055, 1080), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1115, 1143), 'numpy.exp', 'exp', (['(1.0j * (pi / 2 - alpha))'], {}), '(1.0j * (pi / 2 - alpha))\n', (1118, 1143), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1224, 1252), 'numpy.exp', 'exp', (['(1.0j * (pi / 2 - alpha))'], {}), '(1.0j * (pi / 2 - alpha))\n', (1227, 1252), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1292, 1320), 'numpy.exp', 'exp', (['(1.0j * (pi / 2 - alpha))'], {}), '(1.0j * (pi / 2 - alpha))\n', (1295, 1320), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1370, 1398), 'numpy.exp', 'exp', (['(1.0j * (pi / 2 - alpha))'], {}), '(1.0j * (pi / 2 - alpha))\n', (1373, 1398), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1432, 1460), 'numpy.exp', 'exp', (['(1.0j * (pi / 2 - alpha))'], {}), '(1.0j * (pi / 2 - alpha))\n', (1435, 1460), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1562, 1591), 'numpy.exp', 'exp', (['(-1.0j * (pi / 2 - alpha))'], {}), '(-1.0j * (pi / 2 - alpha))\n', (1565, 1591), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1628, 1657), 'numpy.exp', 'exp', (['(-1.0j * (pi / 2 - alpha))'], {}), '(-1.0j * (pi / 2 - alpha))\n', (1631, 1657), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1742, 1771), 'numpy.exp', 'exp', (['(-1.0j * (pi / 2 - alpha))'], {}), '(-1.0j * (pi / 2 - alpha))\n', (1745, 1771), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1814, 1843), 'numpy.exp', 'exp', (['(-1.0j * (pi / 2 - alpha))'], {}), '(-1.0j * (pi / 2 - alpha))\n', (1817, 1843), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1896, 1925), 'numpy.exp', 'exp', (['(-1.0j * (pi / 2 - alpha))'], {}), '(-1.0j * (pi / 2 - alpha))\n', (1899, 1925), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((1962, 1991), 'numpy.exp', 'exp', (['(-1.0j * (pi / 2 - alpha))'], {}), '(-1.0j * (pi / 2 - alpha))\n', (1965, 1991), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((883, 897), 'numpy.tan', 'tan', (['(alpha / 2)'], {}), '(alpha / 2)\n', (886, 897), False, 'from numpy import exp, pi, cos, sin, tan\n'), ((929, 943), 'numpy.tan', 'tan', (['(alpha / 2)'], {}), '(alpha / 2)\n', (932, 943), False, 'from numpy import exp, pi, cos, sin, tan\n')] |
print("\n")
print("PythonExercises-v2 by <NAME>")
print("\n")
print("=== EXERCISE 1 ===")
print("\n")
print("(a) 5 / 3 = " + str(5 / 3))
print("=> with python3 you can receive a float even if you divide two \
integers")
print("\n")
print("(b) 5 % 3 = " + str(5 % 3))
print("=> % is the modulus which divides left hand operand by right hand \
operand and returns remainder")
print("\n")
print("(c) 5.0 / 3 = " + str(5.0 / 3))
print("=> outputs a float number.. there is no difference if a plain 5 or 5.0 \
is used")
print("\n")
print("(d) 5 / 3.0 = " + str(5 / 3.0))
print("=> outputs a float number.. there is no difference if a plain 3 or 3.0 \
is used")
print("\n")
print("(e) 5.2 % 3 = " + str(5.2 % 3))
print("=> % is the modulus which divides left hand operand by right hand \
operand and returns remainder")
print("\n")
print("=== EXERCISE 2 ===")
print("\n")
print("(a) 2000.3 ** 200 = ...")
try:
print(str(2000.3 ** 200))
except OverflowError as e:
print("=> The python3 interpreter throws a OverflowError " + str(e))
print("\n")
print("(b) 1.0 + 1.0 - 1.0 = " + str(1.0 + 1.0 - 1.0))
print("=> Addition and substraction of float values which results in another \
float value")
print("\n")
print("(c) 1.0 + 1.0e20 - 1.0e20 = " + str(1.0 + 1.0e20 - 1.0e20))
print("=> 1.0 + 1.0e20 is rounded as close as possible, which is 1.0e20 and \
after substraction of it again it results in 0.0")
print("\n")
print("=== EXERCISE 3 ===")
print("\n")
print("(a) float(123) = " + str(float(123)))
print("=> Takes the integer value 123 as input and casts it to the float \
value 123.0")
print("\n")
print("(b) float('123') = " + str(float('123')))
print("=> Takes the string '123' as input and casts it to the float value \
123.0")
print("\n")
print("(c) float('123.23') = " + str(float('123.23')))
print("=> Takes the string '123.23' as input and casts it to the float value \
123.23")
print("\n")
print("(d) int(123.23) = " + str(int(123.23)))
print("=> Takes the float 123.23 as input and casts it to the integer value \
123")
print("\n")
print("(e) int('123.23') = ...")
try:
int('123.23')
except ValueError as e:
print("=> The int() function can't cast a string to float to int and thus \
throws a ValueError (" + str(e) + ")")
print("\n")
print("(f) int(float('123.23')) = " + str(int(float(123.23))))
print("=> As we cast the string to float first, we can use it as a input to \
the int() function and receive a integer")
print("\n")
print("(g) str(12) = " + str(12))
print("=> Takes the integer 12 as input and casts it to the string '12'")
print("\n")
print("(h) str(12.2) = " + str(12.2))
print("=> Takes the float 12.2 as input and casts it to the string '12.2'")
print("\n")
print("(i) bool('a') = " + str(bool('a')))
print("=> Because an actual value (the character 'a') is passed to the bool() \
function, True is returned")
print("\n")
print("(j) bool(0) = " + str(bool(0)))
print("=> The boolean value False equals 0 in python, thus False is returned")
print("\n")
print("(k) bool(0.1) = " + str(bool(0.1)))
print("=> Because a value != 0 is provided in the bool() function, \
it returns True")
print("\n")
print("=== EXERCISE 4 ===")
print("\n")
print("range(5) = {}".format(range(5)))
print("=> range(5) returns a sequence of integers from 0 to 4. for i in \
range(5) is consequently iterating over the sequence of integers")
print("\n")
print("type(range(5)) = {}".format(type(range(5))))
print("=> The type function returns an object's class. For range(5) the class \
range is returned")
print("\n")
print("=== EXERCISE 5 ===")
print("\n")
def div_by_number(numbers_list, max_found):
number_found = 0
x = 1
while number_found < max_found:
for number in numbers_list:
if x % number == 0:
print(x)
number_found = number_found + 1
x = x + 1
numbers_list = [5, 7, 11]
print("div_by_number({}, 20)\n".format(numbers_list))
div_by_number(numbers_list, 20)
print("\n")
print("=== EXERCISE 6 ===")
print("\n")
print("(a) & (b)\n")
def is_prime(n):
if n <= 3:
return n > 1
elif n % 2 == 0 or n % 3 == 0:
return False
i = 5
while i * i <= n:
if n % i == 0 or n % (i + 2) == 0:
return False
i = i + 6
return True
print("is_prime(0) = {}\n".format(is_prime(0)))
print("is_prime(1) = {}\n".format(is_prime(1)))
print("is_prime(3) = {}\n".format(is_prime(3)))
print("is_prime(7) = {}\n".format(is_prime(7)))
print("is_prime(8) = {}\n".format(is_prime(8)))
print("is_prime(112331) = {}".format(is_prime(112331)))
def primes_up_to(n):
primes = []
for i in range(0, n):
if is_prime(i):
primes.append(i)
return primes
print("\n(c) primes_up_to(100) = {}".format(primes_up_to(100)))
def first_primes(n):
primes = []
i = 0
while len(primes) < n:
if is_prime(i):
primes.append(i)
i = i + 1
return primes
print("\n(d) first_primes(12) = {}".format(first_primes(12)))
print("\n")
print("=== EXERCISE 7 ===")
print("\n")
print("(a) print_elements(elements_list)\n")
def print_elements(elements):
for element in elements:
print(element)
elements_list = [12, "abc", 92.2, "hello"]
print_elements(elements_list)
print("\n(b) print_elements_reverse(elements_list)\n")
def print_elements_reverse(elements):
for element in elements[::-1]:
print(element)
print_elements_reverse(elements_list)
print("\n(c) len_elements(elements_list)\n")
def len_elements(elements):
count = 0
for _ in elements:
count = count + 1
return count
print("len_elements(elements_list) = {}".format(len_elements(elements_list)))
print("\n")
print("=== EXERCISE 8 ===")
a = [12, "abc", 92.2, "hello"]
print("\n")
print("(a) a = {}".format(a))
print("\n(b) b = a")
b = a
print("\n(c) b[1] = 'changed'")
b[1] = "changed"
print("\n(d) a = {}".format(a))
print("=> b is binding to the same object as a, so when b[1] was changed \
a[1] also shows the change")
print("\n(e) c = a[:]")
c = a[:]
print("\n(f) c[2] = 'also changed'")
c[2] = "also changed"
print("\n(g) a = {}".format(a))
print("=> A copy of the list a was created with a[:] and assigned to c, thus \
a[2] did not change when c[2] changed")
def set_first_elem_to_zero(l):
if len(l) > 0:
l[0] = 0
return l
numbers = [12, 21, 214, 3]
print("\n...")
print("\nnumbers = {}".format(numbers))
print("set_first_elem_to_zero(numbers) = \
{}".format(set_first_elem_to_zero(numbers)))
print("numbers = {}".format(numbers))
print("=> The original list also changed, even though we did not assign \
the returned list to it (same binding)")
print("\n")
print("=== EXERCISE 9 ===")
elements = [[1,3], [3,6]]
print("\n")
print("elements = {}".format(elements))
flat_list = lambda l: [element for sublist in l for element in sublist]
print("flat_list(elements) = {}".format(flat_list(elements)))
print("\n")
print("=== EXERCISE 10 ===")
import matplotlib.pyplot as plt
import numpy as np
t = np.arange(0.0, 2.0, 0.01)
s = np.sin(t - 2) ** 2 * np.e ** (-t ** 2)
fig, ax = plt.subplots()
ax.plot(t, s)
ax.set(xlabel='x', ylabel='y',
title='Exercise 10')
plt.show()
print("\n")
print("See Figure_1.png")
print("\n")
print("=== EXERCISE 11 ===")
def product_iteration(numbers):
product = 0
if len(numbers) > 0:
product = numbers.pop()
for number in numbers:
product = product * number
return product
from functools import reduce
def product_recursive(numbers):
if len(numbers) > 0:
return reduce((lambda x, y: x * y), numbers)
else:
return 0
numbers = [21, 12, 10, 128, 2]
empty_list = []
print("\n")
print("product_iteration(numbers) = {}".format(product_iteration(numbers)))
print("product_iteration(empty_list) = \
{}".format(product_iteration(empty_list)))
numbers = [21, 12, 10, 128, 2]
print("\n")
print("product_recursive(numbers) = {}".format(product_recursive(numbers)))
print("product_recursive(empty_list) = \
{}".format(product_recursive(empty_list)))
print("\n")
print("=== EXERCISE 12 ===")
print("\n\nGood to know!")
print("\n")
print("=== EXERCISE 13 ===")
def read_file(filename):
with open(filename, 'r') as myfile:
data=myfile.read().replace('\n', '')
return data
file_content = read_file("emails.txt")
print("\n\nread_file('emails.txt')\n\n{}".format(file_content))
import re
def extract_email(string):
match = re.findall(r'[\w\.-]+@[\w\.-]+\.\w+', string)
return match
print("\nextract_email(file_content)\
\n\n{}".format(extract_email(file_content))) | [
"functools.reduce",
"numpy.sin",
"re.findall",
"matplotlib.pyplot.subplots",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((7097, 7122), 'numpy.arange', 'np.arange', (['(0.0)', '(2.0)', '(0.01)'], {}), '(0.0, 2.0, 0.01)\n', (7106, 7122), True, 'import numpy as np\n'), ((7177, 7191), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (7189, 7191), True, 'import matplotlib.pyplot as plt\n'), ((7266, 7276), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7274, 7276), True, 'import matplotlib.pyplot as plt\n'), ((8547, 8597), 're.findall', 're.findall', (['"""[\\\\w\\\\.-]+@[\\\\w\\\\.-]+\\\\.\\\\w+"""', 'string'], {}), "('[\\\\w\\\\.-]+@[\\\\w\\\\.-]+\\\\.\\\\w+', string)\n", (8557, 8597), False, 'import re\n'), ((7127, 7140), 'numpy.sin', 'np.sin', (['(t - 2)'], {}), '(t - 2)\n', (7133, 7140), True, 'import numpy as np\n'), ((7660, 7695), 'functools.reduce', 'reduce', (['(lambda x, y: x * y)', 'numbers'], {}), '(lambda x, y: x * y, numbers)\n', (7666, 7695), False, 'from functools import reduce\n')] |
import dash
import dash_core_components as dcc
import dash_bootstrap_components as dbc
import dash_html_components as html
import pandas as pd
import plotly.express as px
import plotly.graph_objs as go
from datetime import date
import dash_loading_spinners as dls
from dash.dependencies import Input, Output, ClientsideFunction, State
from app import app
import requests
features = ["Screw Speed", "Gas Flow Rate", "Steam Pressure", "Oven-Home Temperature",
"Water Temperature", "Oxygen_pct", "Oven-Home Pressure", "Combustion Air Pressure",
"Temperature before prear", "Temperature after prear", "Burner Position", "Burner_pct",
"Borra Flow Rate_kgh", "Cisco Flow Rate_kgh"]
cardtab_1 = dbc.Card([
html.Div(
id='output-container-date-picker-range',
className="month-container"
),
dls.Hash(
dcc.Graph(id="graph-steam", className = "graph-card"),
size = 160,
speed_multiplier = 0.8,
debounce = 200
)
])
cardtab_2 = dbc.Card([
html.Div(
id='output-container-date-picker-range',
className="month-container"
),
dls.Hash(
dcc.Graph(id="graph-distribution", className = "graph-card"),
size = 160,
speed_multiplier = 0.8,
debounce = 200
)
])
card_3 = dbc.Card(
[
dbc.Col([
dbc.Col([
html.P(
"Select date range that you want to see:"
),
dcc.DatePickerRange(
id='my-date-picker-range',
min_date_allowed=date(2020, 10, 1),
max_date_allowed=date(2021, 6, 30),
initial_visible_month=date(2020, 10, 1),
end_date=date(2021, 6, 30),
clearable=True,
with_portal=True,
month_format="MMMM, YYYY",
number_of_months_shown=3
)
]),
html.Hr(),
dbc.Col([
html.P(
"Select the data frequency:"
),
dbc.RadioItems(
id='frequency-radioitems',
labelStyle={"display": "inline-block"},
options= [
{"label": "Daily", "value": "data_daily"},
{"label": "Hourly", "value": "data_hourly"}
], value= "data_daily",
style= {"color": "black"}
)
])
])
])
card_4 = dbc.Card([
dbc.Col([
dbc.FormGroup([
dbc.Label("Y - Axis"),
dcc.Dropdown(
id="y-variable",
options=[{
"label": col,
"value": col
} for col in features],
value="Gas Flow Rate",
),
]),
html.H6("Efficiency Range"),
dcc.RangeSlider(
id='slider-efficiency',
min=0,
max=1.00,
step=0.01,
value=[0, 1.00]
),
html.P(id='range-efficiency')
])
])
card_5 = dbc.Card([
html.Div(
id='output-container-date-picker-range',
className="month-container"
),
dls.Hash(
dcc.Graph(id="graph-comparison", className = "graph-card"),
size = 160,
speed_multiplier = 0.8,
debounce = 200
)
])
layout= [
html.Div([
# html.Img(
# src = "/assets/images/C1_icon_1.png",
# className = "corr-icon"
# ),
html.Img(
src = "/assets/images/Buencafe-logo.png",
className = "corr-icon"
),
html.H2(
"Steam Analytics",
className = "content-title"
),
html.Div(children=[
html.Div([
# dbc.Row([
# dbc.Col(
# dbc.Tabs([
# dbc.Tab(cardtab_1, label="Time series"),
# dbc.Tab(cardtab_2, label="Distribution"),
# ],
# id="card-tabs",
# card=True,
# active_tab="tab-1",
# ),
# width=9
# ),
# dbc.Col(
# card_3, width=3
# )
# ]),
dbc.Tabs([
dbc.Tab(cardtab_1, label="Time series"),
dbc.Tab(cardtab_2, label="Distribution"),
],
id="card-tabs",
card=True,
active_tab="tab-1",
),
card_3,
], className = "graph_col_1"),
html.Div(children =[
# dbc.Row([
# dbc.Col(
# card_4, width=3
# ),
# dbc.Col(
# card_5, width=9
# )
# ]),
card_4,
card_5
], className = "data_col_2")
], className = "wrapper__steam-data")
],className = "wrapper__steam"),
]
@app.callback(
Output('graph-steam','figure'),
[Input('my-date-picker-range', 'start_date'),
Input('my-date-picker-range', 'end_date'),
Input('frequency-radioitems', 'value')]
)
def update_figure(start_date, end_date, value_radio):
# if value_radio == "data_daily":
# data = pd.read_csv("data/data_interpolate_daily.csv", parse_dates=["Time"])
# data.set_index(["Time"], inplace=True)
# elif value_radio == "data_hourly":
# data = pd.read_csv("data/data_interpolate_hourly.csv", parse_dates=["Time"])
# data.set_index(["Time"], inplace=True)
try:
if value_radio == "data_daily":
query = "SELECT * FROM daily"
payload = {
"query": query
}
petition = requests.post('https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data',payload)
test_var = petition.json()['body']
data = pd.DataFrame(test_var)
data['Time'] = pd.to_datetime(data['Time']).dt.date.astype("datetime64[ns]")
# print("Llegada ", data2['Time'].value_counts())
data.set_index(["Time"], inplace=True)
elif value_radio == "data_hourly":
query = "SELECT * FROM hourly"
payload = {
"query": query
}
petition = requests.post('https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data',payload)
test_var = petition.json()['body']
data = pd.DataFrame(test_var)
data['Time'] = pd.to_datetime(data['Time'])
data.set_index(["Time"], inplace=True)
fig = go.Figure()
fig.add_trace(go.Scatter(
x = data.loc[start_date: end_date].index,
y = data.loc[start_date: end_date]["Steam Flow Rate"],
mode = "lines",
name = "Steam"
))
fig.update_layout(title = 'Steam Generation',
xaxis_title='Date',
yaxis_title='Steam (Kg/hour)',
transition_duration=500,
paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)')
return fig
except:
fig = go.Figure()
fig.update_layout(title = 'Steam Generation',
xaxis_title='Date',
yaxis_title='Steam (Kg/hour)',
transition_duration=500,
paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)')
return fig
@app.callback(
Output('graph-distribution','figure'),
[Input('my-date-picker-range', 'start_date'),
Input('my-date-picker-range', 'end_date')]
)
def update_figure2(start_date, end_date):
# df = pd.read_csv("data/data_interpolate_hourly.csv", parse_dates=["Time"])
# df.set_index(["Time"], inplace=True)
try:
query = "SELECT * FROM daily"
payload = {
"query": query
}
petition = requests.post('https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data',payload)
test_var = petition.json()['body']
df = pd.DataFrame(test_var)
df['Time'] = pd.to_datetime(df['Time']).dt.date.astype("datetime64[ns]")
# print("Llegada ", data2['Time'].value_counts())
df.set_index(["Time"], inplace=True)
# df = pd.read_csv("data/data_interpolate_hourly.csv", parse_dates=["Time"])
# df.set_index(["Time"], inplace=True)
fig = px.histogram(df.loc[start_date: end_date], x="Steam Flow Rate", nbins=100)
fig.update_layout(title = 'Steam Flow Rate Distribution',
xaxis_title='Steam (Kg/hour)',
yaxis_title='Count',
transition_duration=500,
paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)')
return fig
except:
fig = px.histogram()
fig.update_layout(title = 'Steam Flow Rate Distribution',
xaxis_title='Steam (Kg/hour)',
yaxis_title='Count',
transition_duration=500,
paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)')
return fig
@app.callback(
[Output("graph-comparison", "figure"),
Output("range-efficiency", "children")],
[Input("y-variable", "value"),
Input("slider-efficiency", "value"),]
)
def update_figure3(feature, efficiency):
# df2 = pd.read_csv("data/data_interpolate_hourly.csv", parse_dates=["Time"])
# df2.set_index(["Time"], inplace=True)
try:
query = "SELECT * FROM hourly"
payload = {
"query": query
}
petition = requests.post('https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data',payload)
test_var = petition.json()['body']
df2 = pd.DataFrame(test_var)
df2['Time'] = pd.to_datetime(df2['Time']).dt.date.astype("datetime64[ns]")
# print("Llegada ", data2['Time'].value_counts())
df2.set_index(["Time"], inplace=True)
fig = px.scatter(
x = df2[(df2['Efficiency'] < efficiency[1]) & (df2['Efficiency'] > efficiency[0])]["Steam Flow Rate"],
y = df2[(df2['Efficiency'] < efficiency[1]) & (df2['Efficiency'] > efficiency[0])][feature]
)
# fig.layout.plot_bgcolor = '#fff'
# fig.layout.paper_bgcolor = '#fff'
fig.update_layout(title = 'Steam Flow Rate Comparison',
xaxis_title= 'Steam (Kg/hour)',
yaxis_title= feature,
transition_duration= 500,
paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)')
range_efficiency = str(efficiency[0]) + " - " + str(efficiency[1])
return fig, range_efficiency
except:
fig = px.scatter()
fig.update_layout(title = 'Steam Flow Rate Comparison',
xaxis_title= 'Steam (Kg/hour)',
yaxis_title= feature,
transition_duration= 500,
paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)')
range_efficiency = str(efficiency[0]) + " - " + str(efficiency[1])
return fig, range_efficiency | [
"requests.post",
"plotly.graph_objs.Scatter",
"dash.dependencies.Input",
"dash_bootstrap_components.Label",
"pandas.to_datetime",
"dash_html_components.Div",
"plotly.express.scatter",
"dash.dependencies.Output",
"dash_html_components.H6",
"datetime.date",
"dash_html_components.Hr",
"dash_html_... | [((5590, 5621), 'dash.dependencies.Output', 'Output', (['"""graph-steam"""', '"""figure"""'], {}), "('graph-steam', 'figure')\n", (5596, 5621), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((8142, 8180), 'dash.dependencies.Output', 'Output', (['"""graph-distribution"""', '"""figure"""'], {}), "('graph-distribution', 'figure')\n", (8148, 8180), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((746, 824), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""output-container-date-picker-range"""', 'className': '"""month-container"""'}), "(id='output-container-date-picker-range', className='month-container')\n", (754, 824), True, 'import dash_html_components as html\n'), ((1093, 1171), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""output-container-date-picker-range"""', 'className': '"""month-container"""'}), "(id='output-container-date-picker-range', className='month-container')\n", (1101, 1171), True, 'import dash_html_components as html\n'), ((3389, 3467), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""output-container-date-picker-range"""', 'className': '"""month-container"""'}), "(id='output-container-date-picker-range', className='month-container')\n", (3397, 3467), True, 'import dash_html_components as html\n'), ((7226, 7237), 'plotly.graph_objs.Figure', 'go.Figure', ([], {}), '()\n', (7235, 7237), True, 'import plotly.graph_objs as go\n'), ((5627, 5670), 'dash.dependencies.Input', 'Input', (['"""my-date-picker-range"""', '"""start_date"""'], {}), "('my-date-picker-range', 'start_date')\n", (5632, 5670), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((5676, 5717), 'dash.dependencies.Input', 'Input', (['"""my-date-picker-range"""', '"""end_date"""'], {}), "('my-date-picker-range', 'end_date')\n", (5681, 5717), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((5723, 5761), 'dash.dependencies.Input', 'Input', (['"""frequency-radioitems"""', '"""value"""'], {}), "('frequency-radioitems', 'value')\n", (5728, 5761), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((8570, 8664), 'requests.post', 'requests.post', (['"""https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data"""', 'payload'], {}), "('https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data'\n , payload)\n", (8583, 8664), False, 'import requests\n'), ((8715, 8737), 'pandas.DataFrame', 'pd.DataFrame', (['test_var'], {}), '(test_var)\n', (8727, 8737), True, 'import pandas as pd\n'), ((9068, 9141), 'plotly.express.histogram', 'px.histogram', (['df.loc[start_date:end_date]'], {'x': '"""Steam Flow Rate"""', 'nbins': '(100)'}), "(df.loc[start_date:end_date], x='Steam Flow Rate', nbins=100)\n", (9080, 9141), True, 'import plotly.express as px\n'), ((8186, 8229), 'dash.dependencies.Input', 'Input', (['"""my-date-picker-range"""', '"""start_date"""'], {}), "('my-date-picker-range', 'start_date')\n", (8191, 8229), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((8235, 8276), 'dash.dependencies.Input', 'Input', (['"""my-date-picker-range"""', '"""end_date"""'], {}), "('my-date-picker-range', 'end_date')\n", (8240, 8276), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((10346, 10440), 'requests.post', 'requests.post', (['"""https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data"""', 'payload'], {}), "('https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data'\n , payload)\n", (10359, 10440), False, 'import requests\n'), ((10492, 10514), 'pandas.DataFrame', 'pd.DataFrame', (['test_var'], {}), '(test_var)\n', (10504, 10514), True, 'import pandas as pd\n'), ((10716, 10926), 'plotly.express.scatter', 'px.scatter', ([], {'x': "df2[(df2['Efficiency'] < efficiency[1]) & (df2['Efficiency'] > efficiency[0])][\n 'Steam Flow Rate']", 'y': "df2[(df2['Efficiency'] < efficiency[1]) & (df2['Efficiency'] > efficiency[0])][\n feature]"}), "(x=df2[(df2['Efficiency'] < efficiency[1]) & (df2['Efficiency'] >\n efficiency[0])]['Steam Flow Rate'], y=df2[(df2['Efficiency'] <\n efficiency[1]) & (df2['Efficiency'] > efficiency[0])][feature])\n", (10726, 10926), True, 'import plotly.express as px\n'), ((9892, 9928), 'dash.dependencies.Output', 'Output', (['"""graph-comparison"""', '"""figure"""'], {}), "('graph-comparison', 'figure')\n", (9898, 9928), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((9934, 9972), 'dash.dependencies.Output', 'Output', (['"""range-efficiency"""', '"""children"""'], {}), "('range-efficiency', 'children')\n", (9940, 9972), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((9980, 10008), 'dash.dependencies.Input', 'Input', (['"""y-variable"""', '"""value"""'], {}), "('y-variable', 'value')\n", (9985, 10008), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((10014, 10049), 'dash.dependencies.Input', 'Input', (['"""slider-efficiency"""', '"""value"""'], {}), "('slider-efficiency', 'value')\n", (10019, 10049), False, 'from dash.dependencies import Input, Output, ClientsideFunction, State\n'), ((890, 941), 'dash_core_components.Graph', 'dcc.Graph', ([], {'id': '"""graph-steam"""', 'className': '"""graph-card"""'}), "(id='graph-steam', className='graph-card')\n", (899, 941), True, 'import dash_core_components as dcc\n'), ((1237, 1295), 'dash_core_components.Graph', 'dcc.Graph', ([], {'id': '"""graph-distribution"""', 'className': '"""graph-card"""'}), "(id='graph-distribution', className='graph-card')\n", (1246, 1295), True, 'import dash_core_components as dcc\n'), ((3533, 3589), 'dash_core_components.Graph', 'dcc.Graph', ([], {'id': '"""graph-comparison"""', 'className': '"""graph-card"""'}), "(id='graph-comparison', className='graph-card')\n", (3542, 3589), True, 'import dash_core_components as dcc\n'), ((3854, 3925), 'dash_html_components.Img', 'html.Img', ([], {'src': '"""/assets/images/Buencafe-logo.png"""', 'className': '"""corr-icon"""'}), "(src='/assets/images/Buencafe-logo.png', className='corr-icon')\n", (3862, 3925), True, 'import dash_html_components as html\n'), ((3973, 4026), 'dash_html_components.H2', 'html.H2', (['"""Steam Analytics"""'], {'className': '"""content-title"""'}), "('Steam Analytics', className='content-title')\n", (3980, 4026), True, 'import dash_html_components as html\n'), ((6354, 6448), 'requests.post', 'requests.post', (['"""https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data"""', 'payload'], {}), "('https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data'\n , payload)\n", (6367, 6448), False, 'import requests\n'), ((6522, 6544), 'pandas.DataFrame', 'pd.DataFrame', (['test_var'], {}), '(test_var)\n', (6534, 6544), True, 'import pandas as pd\n'), ((7260, 7394), 'plotly.graph_objs.Scatter', 'go.Scatter', ([], {'x': 'data.loc[start_date:end_date].index', 'y': "data.loc[start_date:end_date]['Steam Flow Rate']", 'mode': '"""lines"""', 'name': '"""Steam"""'}), "(x=data.loc[start_date:end_date].index, y=data.loc[start_date:\n end_date]['Steam Flow Rate'], mode='lines', name='Steam')\n", (7270, 7394), True, 'import plotly.graph_objs as go\n'), ((7815, 7826), 'plotly.graph_objs.Figure', 'go.Figure', ([], {}), '()\n', (7824, 7826), True, 'import plotly.graph_objs as go\n'), ((9512, 9526), 'plotly.express.histogram', 'px.histogram', ([], {}), '()\n', (9524, 9526), True, 'import plotly.express as px\n'), ((11516, 11528), 'plotly.express.scatter', 'px.scatter', ([], {}), '()\n', (11526, 11528), True, 'import plotly.express as px\n'), ((2095, 2104), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\n', (2102, 2104), True, 'import dash_html_components as html\n'), ((3079, 3106), 'dash_html_components.H6', 'html.H6', (['"""Efficiency Range"""'], {}), "('Efficiency Range')\n", (3086, 3106), True, 'import dash_html_components as html\n'), ((3120, 3206), 'dash_core_components.RangeSlider', 'dcc.RangeSlider', ([], {'id': '"""slider-efficiency"""', 'min': '(0)', 'max': '(1.0)', 'step': '(0.01)', 'value': '[0, 1.0]'}), "(id='slider-efficiency', min=0, max=1.0, step=0.01, value=[0,\n 1.0])\n", (3135, 3206), True, 'import dash_core_components as dcc\n'), ((3312, 3341), 'dash_html_components.P', 'html.P', ([], {'id': '"""range-efficiency"""'}), "(id='range-efficiency')\n", (3318, 3341), True, 'import dash_html_components as html\n'), ((6925, 7019), 'requests.post', 'requests.post', (['"""https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data"""', 'payload'], {}), "('https://k8nmzco6tb.execute-api.us-east-1.amazonaws.com/dev/data'\n , payload)\n", (6938, 7019), False, 'import requests\n'), ((7080, 7102), 'pandas.DataFrame', 'pd.DataFrame', (['test_var'], {}), '(test_var)\n', (7092, 7102), True, 'import pandas as pd\n'), ((7130, 7158), 'pandas.to_datetime', 'pd.to_datetime', (["data['Time']"], {}), "(data['Time'])\n", (7144, 7158), True, 'import pandas as pd\n'), ((1489, 1538), 'dash_html_components.P', 'html.P', (['"""Select date range that you want to see:"""'], {}), "('Select date range that you want to see:')\n", (1495, 1538), True, 'import dash_html_components as html\n'), ((2144, 2180), 'dash_html_components.P', 'html.P', (['"""Select the data frequency:"""'], {}), "('Select the data frequency:')\n", (2150, 2180), True, 'import dash_html_components as html\n'), ((2236, 2475), 'dash_bootstrap_components.RadioItems', 'dbc.RadioItems', ([], {'id': '"""frequency-radioitems"""', 'labelStyle': "{'display': 'inline-block'}", 'options': "[{'label': 'Daily', 'value': 'data_daily'}, {'label': 'Hourly', 'value':\n 'data_hourly'}]", 'value': '"""data_daily"""', 'style': "{'color': 'black'}"}), "(id='frequency-radioitems', labelStyle={'display':\n 'inline-block'}, options=[{'label': 'Daily', 'value': 'data_daily'}, {\n 'label': 'Hourly', 'value': 'data_hourly'}], value='data_daily', style=\n {'color': 'black'})\n", (2250, 2475), True, 'import dash_bootstrap_components as dbc\n'), ((2749, 2770), 'dash_bootstrap_components.Label', 'dbc.Label', (['"""Y - Axis"""'], {}), "('Y - Axis')\n", (2758, 2770), True, 'import dash_bootstrap_components as dbc\n'), ((2788, 2904), 'dash_core_components.Dropdown', 'dcc.Dropdown', ([], {'id': '"""y-variable"""', 'options': "[{'label': col, 'value': col} for col in features]", 'value': '"""Gas Flow Rate"""'}), "(id='y-variable', options=[{'label': col, 'value': col} for col in\n features], value='Gas Flow Rate')\n", (2800, 2904), True, 'import dash_core_components as dcc\n'), ((5129, 5188), 'dash_html_components.Div', 'html.Div', ([], {'children': '[card_4, card_5]', 'className': '"""data_col_2"""'}), "(children=[card_4, card_5], className='data_col_2')\n", (5137, 5188), True, 'import dash_html_components as html\n'), ((8759, 8785), 'pandas.to_datetime', 'pd.to_datetime', (["df['Time']"], {}), "(df['Time'])\n", (8773, 8785), True, 'import pandas as pd\n'), ((10537, 10564), 'pandas.to_datetime', 'pd.to_datetime', (["df2['Time']"], {}), "(df2['Time'])\n", (10551, 10564), True, 'import pandas as pd\n'), ((6572, 6600), 'pandas.to_datetime', 'pd.to_datetime', (["data['Time']"], {}), "(data['Time'])\n", (6586, 6600), True, 'import pandas as pd\n'), ((1699, 1716), 'datetime.date', 'date', (['(2020)', '(10)', '(1)'], {}), '(2020, 10, 1)\n', (1703, 1716), False, 'from datetime import date\n'), ((1755, 1772), 'datetime.date', 'date', (['(2021)', '(6)', '(30)'], {}), '(2021, 6, 30)\n', (1759, 1772), False, 'from datetime import date\n'), ((1816, 1833), 'datetime.date', 'date', (['(2020)', '(10)', '(1)'], {}), '(2020, 10, 1)\n', (1820, 1833), False, 'from datetime import date\n'), ((1864, 1881), 'datetime.date', 'date', (['(2021)', '(6)', '(30)'], {}), '(2021, 6, 30)\n', (1868, 1881), False, 'from datetime import date\n'), ((4797, 4836), 'dash_bootstrap_components.Tab', 'dbc.Tab', (['cardtab_1'], {'label': '"""Time series"""'}), "(cardtab_1, label='Time series')\n", (4804, 4836), True, 'import dash_bootstrap_components as dbc\n'), ((4858, 4898), 'dash_bootstrap_components.Tab', 'dbc.Tab', (['cardtab_2'], {'label': '"""Distribution"""'}), "(cardtab_2, label='Distribution')\n", (4865, 4898), True, 'import dash_bootstrap_components as dbc\n')] |
# Copyright 2021 Touca, Inc. Subject to Apache-2.0 License.
from ._types import IntegerType, VectorType, ToucaType
from datetime import datetime, timedelta
from enum import Enum
from typing import Dict, Tuple
class ResultCategory(Enum):
""" """
Check = 1
Assert = 2
class ResultEntry:
"""
Wrapper around a given ``ToucaType`` value that includes the category
it should belong to.
We are intentionally not using ``@dataclass`` to ensure the core library
has no dependency on ``dataclasses`` module. This may change in the future.
"""
def __init__(self, typ: ResultCategory, val: ToucaType):
"""
Creates an entry given its value and the category it should belong to.
:param typ: type of the entry
:param val: value of the entry
"""
self.typ = typ
self.val = val
class Case:
""" """
def __init__(self, **kwargs):
self._meta = kwargs
self._results: Dict[str, ResultEntry] = dict()
self._tics: Dict[str, datetime] = dict()
self._tocs: Dict[str, datetime] = dict()
def check(self, key: str, value: ToucaType):
"""
Logs a given value as a test result for the declared test case
and associates it with the specified key.
:param key: name to be associated with the logged test result
:param value: value to be logged as a test result
"""
self._results[key] = ResultEntry(typ=ResultCategory.Check, val=value)
def assume(self, key: str, value: ToucaType):
"""
Logs a given value as an assertion for the declared test case
and associates it with the specified key.
:param key: name to be associated with the logged test result
:param value: value to be logged as a test result
"""
self._results[key] = ResultEntry(typ=ResultCategory.Assert, val=value)
def add_array_element(self, key: str, value: ToucaType):
"""
Adds a given value to a list of results for the declared
test case which is associated with the specified key.
Could be considered as a helper utility function.
This method is particularly helpful to log a list of items as they
are found:
.. code-block:: python
for number in numbers:
if is_prime(number):
touca.add_array_element("prime numbers", number)
touca.add_hit_count("number of primes")
This pattern can be considered as a syntactic sugar for the following
alternative:
.. code-block:: python
primes = []
for number in numbers:
if is_prime(number):
primes.append(number)
if primes:
touca.check("prime numbers", primes)
touca.check("number of primes", len(primes))
The items added to the list are not required to be of the same type.
The following code is acceptable:
.. code-block:: python
touca.check("prime numbers", 42)
touca.check("prime numbers", "forty three")
:raises RuntimeError:
if specified key is already associated with
a test result which was not iterable
:param key: name to be associated with the logged test result
:param value: element to be appended to the array
:see also: :py:meth:`~check`
"""
if key not in self._results:
self._results[key] = ResultEntry(typ=ResultCategory.Check, val=VectorType())
vec = self._results.get(key)
if vec.typ is not ResultCategory.Check or not isinstance(vec.val, VectorType):
raise RuntimeError("specified key has a different type")
vec.val.add(value)
def add_hit_count(self, key: str):
"""
Increments value of key every time it is executed.
creates the key with initial value of one if it does not exist.
Could be considered as a helper utility function.
This method is particularly helpful to track variables whose values
are determined in loops with indeterminate execution cycles:
.. code-block:: python
for number in numbers:
if is_prime(number):
touca.add_array_element("prime numbers", number)
touca.add_hit_count("number of primes")
This pattern can be considered as a syntactic sugar for the following
alternative:
.. code-block:: python
primes = []
for number in numbers:
if is_prime(number):
primes.append(number)
if primes:
touca.check("prime numbers", primes)
touca.check("number of primes", len(primes))
:raises RuntimeError:
if specified key is already associated with
a test result which was not an integer
:param key: name to be associated with the logged test result
:see also: :py:meth:`~check`
"""
if key not in self._results:
self._results[key] = ResultEntry(
typ=ResultCategory.Check, val=IntegerType(1)
)
return
value = self._results.get(key)
if value.typ is not ResultCategory.Check or not isinstance(
value.val, IntegerType
):
raise RuntimeError("specified key has a different type")
value.val._value += 1
def add_metric(self, key: str, milliseconds: int):
"""
Adds an already obtained measurements to the list of captured
performance benchmarks.
Useful for logging a metric that is measured without using this SDK.
:param key: name to be associated with this performance benchmark
:param milliseconds: duration of this measurement in milliseconds
"""
value = datetime.now()
self._tics[key] = value
self._tocs[key] = value + timedelta(microseconds=milliseconds * 1000)
def start_timer(self, key: str):
"""
Starts timing an event with the specified name.
Measurement of the event is only complete when function
:py:meth:`~stop_timer` is later called for the specified name.
:param key: name to be associated with the performance metric
"""
self._tics[key] = datetime.now()
def stop_timer(self, key: str):
"""
Stops timing an event with the specified name.
Expects function :py:meth:`~start_timer` to have been called previously
with the specified name.
:param key: name to be associated with the performance metric
"""
if key in self._tics:
self._tocs[key] = datetime.now()
def _metrics(self) -> Tuple[str, ToucaType]:
for key, tic in self._tics.items():
if key not in self._tocs:
continue
diff = (self._tocs.get(key) - tic).microseconds / 1000
yield key, IntegerType(int(diff))
def _metadata(self) -> Dict[str, str]:
return {
"teamslug": self._meta.get("team") or "unknown",
"testsuite": self._meta.get("suite") or "unknown",
"version": self._meta.get("version") or "unknown",
"testcase": self._meta.get("name") or "unknown",
"builtAt": datetime.now().isoformat(),
}
def json(self):
return {
"metadata": self._metadata(),
"results": [
{"key": k, "value": v.val.json()}
for k, v in self._results.items()
if v.typ is ResultCategory.Check
],
"assertions": [
{"key": k, "value": v.val.json()}
for k, v in self._results.items()
if v.typ is ResultCategory.Assert
],
"metrics": [{"key": k, "value": v.json()} for k, v in self._metrics()],
}
def serialize(self) -> bytearray:
from flatbuffers import Builder
import touca._schema as schema
dicts = {
ResultCategory.Check: schema.ResultType.Check,
ResultCategory.Assert: schema.ResultType.Assert,
}
builder = Builder(1024)
metadata = {k: builder.CreateString(v) for k, v in self._metadata().items()}
schema.MetadataStart(builder)
schema.MetadataAddTeamslug(builder, metadata.get("teamslug"))
schema.MetadataAddTestsuite(builder, metadata.get("testsuite"))
schema.MetadataAddVersion(builder, metadata.get("version"))
schema.MetadataAddTestcase(builder, metadata.get("testcase"))
schema.MetadataAddBuiltAt(builder, metadata.get("builtAt"))
fbs_metadata = schema.MetadataEnd(builder)
result_entries = []
for k, v in self._results.items():
fbs_key = Builder.CreateString(builder, k)
fbs_value = v.val.serialize(builder)
schema.ResultStart(builder)
schema.ResultAddKey(builder, fbs_key)
schema.ResultAddValue(builder, fbs_value)
schema.ResultAddTyp(builder, dicts.get(v.typ))
result_entries.append(schema.ResultEnd(builder))
schema.ResultsStartEntriesVector(builder, len(result_entries))
for item in reversed(result_entries):
builder.PrependUOffsetTRelative(item)
fbs_result_entries = builder.EndVector()
schema.ResultsStart(builder)
schema.ResultsAddEntries(builder, fbs_result_entries)
fbs_results = schema.ResultsEnd(builder)
metric_entries = []
for k, v in self._metrics():
fbs_key = Builder.CreateString(builder, k)
fbs_value = v.serialize(builder)
schema.MetricStart(builder)
schema.MetricAddKey(builder, fbs_key)
schema.MetricAddValue(builder, fbs_value)
metric_entries.append(schema.MetricEnd(builder))
schema.MetricsStartEntriesVector(builder, len(metric_entries))
for item in reversed(metric_entries):
builder.PrependUOffsetTRelative(item)
fbs_metric_entries = builder.EndVector()
schema.MetricsStart(builder)
schema.MetricsAddEntries(builder, fbs_metric_entries)
fbs_metrics = schema.MetricsEnd(builder)
schema.MessageStart(builder)
schema.MessageAddMetadata(builder, fbs_metadata)
schema.MessageAddResults(builder, fbs_results)
schema.MessageAddMetrics(builder, fbs_metrics)
fbs_message = schema.MessageEnd(builder)
builder.Finish(fbs_message)
return builder.Output()
| [
"touca._schema.MessageAddMetadata",
"touca._schema.MetricStart",
"touca._schema.MetricAddValue",
"flatbuffers.Builder",
"datetime.timedelta",
"touca._schema.MetadataEnd",
"touca._schema.ResultAddKey",
"touca._schema.ResultStart",
"touca._schema.MetadataStart",
"touca._schema.MetricsEnd",
"touca.... | [((5964, 5978), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5976, 5978), False, 'from datetime import datetime, timedelta\n'), ((6440, 6454), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6452, 6454), False, 'from datetime import datetime, timedelta\n'), ((8316, 8329), 'flatbuffers.Builder', 'Builder', (['(1024)'], {}), '(1024)\n', (8323, 8329), False, 'from flatbuffers import Builder\n'), ((8424, 8453), 'touca._schema.MetadataStart', 'schema.MetadataStart', (['builder'], {}), '(builder)\n', (8444, 8453), True, 'import touca._schema as schema\n'), ((8825, 8852), 'touca._schema.MetadataEnd', 'schema.MetadataEnd', (['builder'], {}), '(builder)\n', (8843, 8852), True, 'import touca._schema as schema\n'), ((9519, 9547), 'touca._schema.ResultsStart', 'schema.ResultsStart', (['builder'], {}), '(builder)\n', (9538, 9547), True, 'import touca._schema as schema\n'), ((9556, 9609), 'touca._schema.ResultsAddEntries', 'schema.ResultsAddEntries', (['builder', 'fbs_result_entries'], {}), '(builder, fbs_result_entries)\n', (9580, 9609), True, 'import touca._schema as schema\n'), ((9632, 9658), 'touca._schema.ResultsEnd', 'schema.ResultsEnd', (['builder'], {}), '(builder)\n', (9649, 9658), True, 'import touca._schema as schema\n'), ((10256, 10284), 'touca._schema.MetricsStart', 'schema.MetricsStart', (['builder'], {}), '(builder)\n', (10275, 10284), True, 'import touca._schema as schema\n'), ((10293, 10346), 'touca._schema.MetricsAddEntries', 'schema.MetricsAddEntries', (['builder', 'fbs_metric_entries'], {}), '(builder, fbs_metric_entries)\n', (10317, 10346), True, 'import touca._schema as schema\n'), ((10369, 10395), 'touca._schema.MetricsEnd', 'schema.MetricsEnd', (['builder'], {}), '(builder)\n', (10386, 10395), True, 'import touca._schema as schema\n'), ((10405, 10433), 'touca._schema.MessageStart', 'schema.MessageStart', (['builder'], {}), '(builder)\n', (10424, 10433), True, 'import touca._schema as schema\n'), ((10442, 10490), 'touca._schema.MessageAddMetadata', 'schema.MessageAddMetadata', (['builder', 'fbs_metadata'], {}), '(builder, fbs_metadata)\n', (10467, 10490), True, 'import touca._schema as schema\n'), ((10499, 10545), 'touca._schema.MessageAddResults', 'schema.MessageAddResults', (['builder', 'fbs_results'], {}), '(builder, fbs_results)\n', (10523, 10545), True, 'import touca._schema as schema\n'), ((10554, 10600), 'touca._schema.MessageAddMetrics', 'schema.MessageAddMetrics', (['builder', 'fbs_metrics'], {}), '(builder, fbs_metrics)\n', (10578, 10600), True, 'import touca._schema as schema\n'), ((10623, 10649), 'touca._schema.MessageEnd', 'schema.MessageEnd', (['builder'], {}), '(builder)\n', (10640, 10649), True, 'import touca._schema as schema\n'), ((6045, 6088), 'datetime.timedelta', 'timedelta', ([], {'microseconds': '(milliseconds * 1000)'}), '(microseconds=milliseconds * 1000)\n', (6054, 6088), False, 'from datetime import datetime, timedelta\n'), ((6816, 6830), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6828, 6830), False, 'from datetime import datetime, timedelta\n'), ((8947, 8979), 'flatbuffers.Builder.CreateString', 'Builder.CreateString', (['builder', 'k'], {}), '(builder, k)\n', (8967, 8979), False, 'from flatbuffers import Builder\n'), ((9041, 9068), 'touca._schema.ResultStart', 'schema.ResultStart', (['builder'], {}), '(builder)\n', (9059, 9068), True, 'import touca._schema as schema\n'), ((9081, 9118), 'touca._schema.ResultAddKey', 'schema.ResultAddKey', (['builder', 'fbs_key'], {}), '(builder, fbs_key)\n', (9100, 9118), True, 'import touca._schema as schema\n'), ((9131, 9172), 'touca._schema.ResultAddValue', 'schema.ResultAddValue', (['builder', 'fbs_value'], {}), '(builder, fbs_value)\n', (9152, 9172), True, 'import touca._schema as schema\n'), ((9747, 9779), 'flatbuffers.Builder.CreateString', 'Builder.CreateString', (['builder', 'k'], {}), '(builder, k)\n', (9767, 9779), False, 'from flatbuffers import Builder\n'), ((9837, 9864), 'touca._schema.MetricStart', 'schema.MetricStart', (['builder'], {}), '(builder)\n', (9855, 9864), True, 'import touca._schema as schema\n'), ((9877, 9914), 'touca._schema.MetricAddKey', 'schema.MetricAddKey', (['builder', 'fbs_key'], {}), '(builder, fbs_key)\n', (9896, 9914), True, 'import touca._schema as schema\n'), ((9927, 9968), 'touca._schema.MetricAddValue', 'schema.MetricAddValue', (['builder', 'fbs_value'], {}), '(builder, fbs_value)\n', (9948, 9968), True, 'import touca._schema as schema\n'), ((9266, 9291), 'touca._schema.ResultEnd', 'schema.ResultEnd', (['builder'], {}), '(builder)\n', (9282, 9291), True, 'import touca._schema as schema\n'), ((10003, 10028), 'touca._schema.MetricEnd', 'schema.MetricEnd', (['builder'], {}), '(builder)\n', (10019, 10028), True, 'import touca._schema as schema\n'), ((7435, 7449), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7447, 7449), False, 'from datetime import datetime, timedelta\n')] |
from datetime import datetime
import sqlalchemy as sa
from flask_sqlalchemy import Model
from sqlalchemy import ForeignKey
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import relationship
from app.main.extensions import db
class BaseMixin(Model):
def save_to_db(self):
try:
db.session.add(self)
db.session.commit()
except:
db.session.rollback()
raise
def delete_from_db(self):
try:
db.session.delete(self)
db.session.commit()
except:
db.session.rollback()
raise
class UserMixin(BaseMixin, Model):
@declared_attr
def createdAt(cls):
return sa.Column(sa.DateTime, default=datetime.utcnow)
@declared_attr
def updateAt(cls):
return sa.Column(sa.DateTime)
@declared_attr
def createdBy_id(cls):
return sa.Column(sa.Integer, ForeignKey('users.id'),
nullable=False)
@declared_attr
def updatedBy_id(cls):
return sa.Column(sa.Integer, ForeignKey('users.id'))
@declared_attr
def createdBy(cls):
return relationship(
'UserModel', foreign_keys=[cls.createdBy_id])
@declared_attr
def updatedBy(cls):
return relationship(
'UserModel', foreign_keys=[cls.updatedBy_id])
| [
"sqlalchemy.orm.relationship",
"app.main.extensions.db.session.rollback",
"app.main.extensions.db.session.add",
"sqlalchemy.ForeignKey",
"sqlalchemy.Column",
"app.main.extensions.db.session.delete",
"app.main.extensions.db.session.commit"
] | [((728, 775), 'sqlalchemy.Column', 'sa.Column', (['sa.DateTime'], {'default': 'datetime.utcnow'}), '(sa.DateTime, default=datetime.utcnow)\n', (737, 775), True, 'import sqlalchemy as sa\n'), ((834, 856), 'sqlalchemy.Column', 'sa.Column', (['sa.DateTime'], {}), '(sa.DateTime)\n', (843, 856), True, 'import sqlalchemy as sa\n'), ((1173, 1231), 'sqlalchemy.orm.relationship', 'relationship', (['"""UserModel"""'], {'foreign_keys': '[cls.createdBy_id]'}), "('UserModel', foreign_keys=[cls.createdBy_id])\n", (1185, 1231), False, 'from sqlalchemy.orm import relationship\n'), ((1304, 1362), 'sqlalchemy.orm.relationship', 'relationship', (['"""UserModel"""'], {'foreign_keys': '[cls.updatedBy_id]'}), "('UserModel', foreign_keys=[cls.updatedBy_id])\n", (1316, 1362), False, 'from sqlalchemy.orm import relationship\n'), ((331, 351), 'app.main.extensions.db.session.add', 'db.session.add', (['self'], {}), '(self)\n', (345, 351), False, 'from app.main.extensions import db\n'), ((364, 383), 'app.main.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (381, 383), False, 'from app.main.extensions import db\n'), ((508, 531), 'app.main.extensions.db.session.delete', 'db.session.delete', (['self'], {}), '(self)\n', (525, 531), False, 'from app.main.extensions import db\n'), ((544, 563), 'app.main.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (561, 563), False, 'from app.main.extensions import db\n'), ((941, 963), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""users.id"""'], {}), "('users.id')\n", (951, 963), False, 'from sqlalchemy import ForeignKey\n'), ((1090, 1112), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""users.id"""'], {}), "('users.id')\n", (1100, 1112), False, 'from sqlalchemy import ForeignKey\n'), ((412, 433), 'app.main.extensions.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (431, 433), False, 'from app.main.extensions import db\n'), ((592, 613), 'app.main.extensions.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (611, 613), False, 'from app.main.extensions import db\n')] |
#-------------------------------------------------------------------------------
# This function defines the sea level change timeseries for marine ice sheet problem.
# *Default = sinusoidal tidal cycle if 'tides' with 1m amplitude if 'tides' turned 'on', OR...
# = zero if 'tides' turned 'off'
#-------------------------------------------------------------------------------
import numpy as np
from params import t_final,nt_per_year,tides
def sl_change(t):
if tides == 'on':
SLC = np.sin(4*np.pi*t/(3.154e7/12.0/30.0)) # tidal frequency of 2 per day
else:
SLC = 0.0 # no sea level change for
# long-time marine problem
return SLC
| [
"numpy.sin"
] | [((505, 555), 'numpy.sin', 'np.sin', (['(4 * np.pi * t / (31540000.0 / 12.0 / 30.0))'], {}), '(4 * np.pi * t / (31540000.0 / 12.0 / 30.0))\n', (511, 555), True, 'import numpy as np\n')] |
from typing import List, Dict
from ts.c_syntax import CSyntax
from ts import Tree
from cfa import CFANode, CFA, CFAEdge
from cfa import LocalisedCFA, LocalisedNode
from .tweet_handler import TweetHandler
from .decoration_strategy import StandardDecorationStrategy, DecorationStrategy
from .conversion_strategy import ConversionStrategy
class LocationDecorator():
def __init__(
self,
tree: Tree,
conversion_strategy: ConversionStrategy = None,
tweet_handler: TweetHandler = None,
decoration_strategy: DecorationStrategy = None
) -> None:
self.tree: Tree = tree
self._syntax = CSyntax()
self.tweet_handler = tweet_handler if tweet_handler is not None else TweetHandler(self.tree)
self.decoration_strategy = decoration_strategy if decoration_strategy is not None else StandardDecorationStrategy(self.tweet_handler)
self.edge_converter = conversion_strategy if conversion_strategy is not None else ConversionStrategy()
def map_node_to_location(self, cfa: CFA[CFANode]) -> Dict[CFANode, str]:
location_tweets = self.tweet_handler.get_all_location_tweet_nodes(cfa)
result: Dict[CFANode, str] = dict()
for tweet in location_tweets:
location = self.tweet_handler.extract_location_text_from_tweet(tweet.node)
result[location] = tweet
return result
def decorate(self, cfa: CFA[CFANode]) -> LocalisedCFA:
localised_cfa: LocalisedCFA = self.convert_cfa_to_localised(cfa)
# Step 1: Seed locations at tweet
self.decoration_strategy.decorate_initial_locations(localised_cfa)
# Step 2: Propagate seeds downwards
frontier: List[LocalisedNode] = list()
visited: List[LocalisedNode] = list()
frontier.append(localised_cfa.root)
while len(frontier) > 0:
cfa_node = frontier.pop(-1)
location = cfa_node.location
visited.append(cfa_node)
for edge in localised_cfa.outgoing_edges(cfa_node):
self.decoration_strategy.decorate_frontier(frontier, visited, location, edge)
# Step 3: Fixes where TWEETS comes after construct
for cfa_node in localised_cfa.nodes:
# Case 1: Switch cases propagation
if self._syntax.is_switch_case(cfa_node.node):
outgoings = localised_cfa.outgoing(cfa_node)
# We can assume that each case is followed by a location tweet
cfa_node.location = outgoings[0].location
return localised_cfa
def convert_cfa_to_localised(self, cfa: CFA[CFANode]) -> LocalisedCFA:
# Step 1: Convert all CFANodes to Localised CFA Nodes (CFANode -> Localised CFA Node)
converted_nodes: Dict[CFANode, LocalisedNode] = dict()
for cfa_node in cfa.nodes:
converted_nodes[cfa_node] = LocalisedNode(cfa_node.node)
localised_cfa = LocalisedCFA(
converted_nodes[cfa.root]
)
# Step 2: Reconstruct all edges
converted_edges: List[CFAEdge[CFANode]] = list()
for cfa_node in cfa.nodes:
self.edge_converter.convert_edges(
cfa.outgoing_edges(cfa_node),
converted_edges,
localised_cfa,
converted_nodes
)
self.edge_converter.convert_edges(
cfa.ingoing_edges(cfa_node),
converted_edges,
localised_cfa,
converted_nodes
)
return localised_cfa | [
"cfa.LocalisedNode",
"cfa.LocalisedCFA",
"ts.c_syntax.CSyntax"
] | [((641, 650), 'ts.c_syntax.CSyntax', 'CSyntax', ([], {}), '()\n', (648, 650), False, 'from ts.c_syntax import CSyntax\n'), ((2945, 2984), 'cfa.LocalisedCFA', 'LocalisedCFA', (['converted_nodes[cfa.root]'], {}), '(converted_nodes[cfa.root])\n', (2957, 2984), False, 'from cfa import LocalisedCFA, LocalisedNode\n'), ((2891, 2919), 'cfa.LocalisedNode', 'LocalisedNode', (['cfa_node.node'], {}), '(cfa_node.node)\n', (2904, 2919), False, 'from cfa import LocalisedCFA, LocalisedNode\n')] |
# Copyright 2020 Graphcore Ltd.
from pathlib import Path
import pytest
# NOTE: The import below is dependent on 'pytest.ini' in the root of
# the repository
from examples_tests.test_util import SubProcessChecker
working_path = Path(__file__).parent
class TestBuildAndRun(SubProcessChecker):
def setUp(self):
''' Compile the start here and complete versions of the tutorial code '''
self.run_command("make clean", working_path, [])
self.run_command("make all", working_path, [])
def tearDown(self):
self.run_command("make clean", working_path, [])
@pytest.mark.category1
def test_run_start_here(self):
''' Check that the start here version of the tutorial code runs '''
self.run_command("./tut3_start_here",
working_path,
["Program complete"])
@pytest.mark.category1
def test_run_complete(self):
''' Check that the complete version of the tutorial code runs '''
self.run_command("../complete/tut3_complete",
working_path.parent.joinpath("complete"),
["Program complete",
"v2: {7,6,4.5,2.5}"])
| [
"pathlib.Path"
] | [((229, 243), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (233, 243), False, 'from pathlib import Path\n')] |
from PIL import Image
import imagehash
from dotenv import load_dotenv
load_dotenv()
import google_streetview.api
import os
from shapely.geometry import shape, Polygon, Point
def get_point_photo(coords, download_folder):
# Define parameters for street view api
params_0 = [{
'size': '640x640', # max 640x640 pixels
'location': coord,
'fov': '90',
'heading': '0',
'pitch': '0',
'key': os.getenv("GOOGLE_API_KEY")
} for coord in coords]
params_90 = [{
'size': '640x640', # max 640x640 pixels
'location': coord,
'fov': '90',
'heading': '90',
'pitch': '0',
'key': os.getenv("GOOGLE_API_KEY")
} for coord in coords]
params_180 = [{
'size': '640x640', # max 640x640 pixels
'location': coord,
'fov': '90',
'heading': '180',
'pitch': '0',
'key': os.getenv("GOOGLE_API_KEY")
} for coord in coords]
params_270 = [{
'size': '640x640', # max 640x640 pixels
'location': coord,
'fov': '90',
'heading': '270',
'pitch': '0',
'key': os.getenv("GOOGLE_API_KEY")
} for coord in coords]
# Create a results object
results_0 = google_streetview.api.results(params_0)
results_90 = google_streetview.api.results(params_90)
results_180 = google_streetview.api.results(params_180)
results_270 = google_streetview.api.results(params_270)
# Download images to directory 'downloads'
results_0.download_links(download_folder + "_0")
results_90.download_links(download_folder + "_90")
results_180.download_links(download_folder + "_180")
results_270.download_links(download_folder + "_270")
# NOTE: probably a bug while saving different polygons
def convert_to_linestring(data, progress_bar):
features = data['features']
j = 0
progr = 0
for feature in features:
geometry = feature['geometry']
coords = []
string_coords = []
multipoly = shape(geometry)
progr += 1
# print('Fetching data.', end='')
for pol in multipoly:
for i in list(pol.exterior.coords):
if (round(Point(i).y, 4), round(Point(i).x, 4)) in coords:
continue
temp_coords_x = round(Point(i).x, 4)
temp_coords_y = round(Point(i).y, 4)
coords.append((temp_coords_y, temp_coords_x))
string_coords.append('{},{}'.format(round(Point(i).y, 4), round(Point(i).x, 4)))
j = j + 1
# print(string_coords)
# print(progr)
progress_bar.UpdateBar((progr/len(features)) * 100)
get_point_photo(string_coords, os.path.join('downloads', str(j)))
def get_images_folder(root_folder):
return [
os.path.join(
os.getcwd(), f
) for f in os.listdir(
root_folder) if f.endswith("_0") or f.endswith("_90") or f.endswith("_180") or f.endswith("_270")]
# TODO: update using pillow
# from PIL import Image
# import imagehash
# import os
import shutil
import sys
def remove_duplicates(img_folder, img_format='.jpg'):
# Get images in folder
images = [img for img in os.listdir(img_folder) if img.endswith(img_format)]
# Get a dict containing image's hashes
hashes = { image: imagehash.average_hash(Image.open(os.path.join(img_folder, image))) for image in images }
# Get a list of images to remove (TODO this should be refactored)
to_remove = []
for i in hashes.keys():
for j in hashes.keys():
if hashes[i] == hashes[j] and i != j:
if i in to_remove or j in to_remove:
continue
to_remove.append(j)
to_remove = list(set(to_remove))
for index in to_remove:
os.remove(os.path.join(img_folder, index))
# # remove empty dirs
def remove_empty_dir(path):
try:
if any([True if f.endswith('.jpg') else False for f in os.listdir(path)]):
os.rmdir(path)
except OSError as e:
print('unremoved - {}'.format(e))
def remove_empty_dirs(path):
for root, dirnames, filenames in os.walk(path, topdown=False):
for dirname in dirnames:
remove_empty_dir(os.path.realpath(os.path.join(root, dirname)))
| [
"os.listdir",
"os.getenv",
"os.path.join",
"os.getcwd",
"dotenv.load_dotenv",
"os.rmdir",
"shapely.geometry.Point",
"shapely.geometry.shape",
"os.walk"
] | [((70, 83), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (81, 83), False, 'from dotenv import load_dotenv\n'), ((3663, 3691), 'os.walk', 'os.walk', (['path'], {'topdown': '(False)'}), '(path, topdown=False)\n', (3670, 3691), False, 'import os\n'), ((1797, 1812), 'shapely.geometry.shape', 'shape', (['geometry'], {}), '(geometry)\n', (1802, 1812), False, 'from shapely.geometry import shape, Polygon, Point\n'), ((399, 426), 'os.getenv', 'os.getenv', (['"""GOOGLE_API_KEY"""'], {}), "('GOOGLE_API_KEY')\n", (408, 426), False, 'import os\n'), ((591, 618), 'os.getenv', 'os.getenv', (['"""GOOGLE_API_KEY"""'], {}), "('GOOGLE_API_KEY')\n", (600, 618), False, 'import os\n'), ((785, 812), 'os.getenv', 'os.getenv', (['"""GOOGLE_API_KEY"""'], {}), "('GOOGLE_API_KEY')\n", (794, 812), False, 'import os\n'), ((979, 1006), 'os.getenv', 'os.getenv', (['"""GOOGLE_API_KEY"""'], {}), "('GOOGLE_API_KEY')\n", (988, 1006), False, 'import os\n'), ((2470, 2481), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2479, 2481), False, 'import os\n'), ((2498, 2521), 'os.listdir', 'os.listdir', (['root_folder'], {}), '(root_folder)\n', (2508, 2521), False, 'import os\n'), ((2829, 2851), 'os.listdir', 'os.listdir', (['img_folder'], {}), '(img_folder)\n', (2839, 2851), False, 'import os\n'), ((3355, 3386), 'os.path.join', 'os.path.join', (['img_folder', 'index'], {}), '(img_folder, index)\n', (3367, 3386), False, 'import os\n'), ((3525, 3539), 'os.rmdir', 'os.rmdir', (['path'], {}), '(path)\n', (3533, 3539), False, 'import os\n'), ((2974, 3005), 'os.path.join', 'os.path.join', (['img_folder', 'image'], {}), '(img_folder, image)\n', (2986, 3005), False, 'import os\n'), ((3502, 3518), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (3512, 3518), False, 'import os\n'), ((3757, 3784), 'os.path.join', 'os.path.join', (['root', 'dirname'], {}), '(root, dirname)\n', (3769, 3784), False, 'import os\n'), ((2028, 2036), 'shapely.geometry.Point', 'Point', (['i'], {}), '(i)\n', (2033, 2036), False, 'from shapely.geometry import shape, Polygon, Point\n'), ((2069, 2077), 'shapely.geometry.Point', 'Point', (['i'], {}), '(i)\n', (2074, 2077), False, 'from shapely.geometry import shape, Polygon, Point\n'), ((1939, 1947), 'shapely.geometry.Point', 'Point', (['i'], {}), '(i)\n', (1944, 1947), False, 'from shapely.geometry import shape, Polygon, Point\n'), ((1961, 1969), 'shapely.geometry.Point', 'Point', (['i'], {}), '(i)\n', (1966, 1969), False, 'from shapely.geometry import shape, Polygon, Point\n'), ((2180, 2188), 'shapely.geometry.Point', 'Point', (['i'], {}), '(i)\n', (2185, 2188), False, 'from shapely.geometry import shape, Polygon, Point\n'), ((2202, 2210), 'shapely.geometry.Point', 'Point', (['i'], {}), '(i)\n', (2207, 2210), False, 'from shapely.geometry import shape, Polygon, Point\n')] |
import dateutil
import yaml
import feedparser as fp
import newspaper as np
from medios.medio import Medio
from medios.diarios.noticia import Noticia
from bd.entidades import Kiosco
class Diario(Medio):
def __init__(self, etiqueta):
Medio.__init__(self, etiqueta)
self.noticias = []
self.feeds = {}
self.feed_noticias = ""
self.categorias = []
self.configurar()
def configurar(self):
with open('medios/diarios/config.yaml', 'r') as stream:
try:
config = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
for diario in config['diarios']:
if diario['tag'] != self.etiqueta:
continue
if 'feed_noticias' in diario:
self.feed_noticias = diario['feed_noticias']
if 'categorias' in diario:
self.categorias = diario['categorias']
if 'feeds' in diario:
self.categorias = []
for feed in diario['feeds']:
self.feeds[feed['tag']] = feed['url']
self.categorias.append(feed['tag'])
def leer(self):
kiosco = Kiosco()
print("leyendo '" + self.etiqueta + "'...")
for tag, url_feed in self.feeds.items():
for url_noticia, fecha in self.reconocer_urls_y_fechas_noticias(url_feed=url_feed):
if kiosco.bd.noticias.find(filter={'diario':self.etiqueta, 'url':url_noticia}).count() > 0: # si existe ya la noticia (url), no la decargo
continue
noticia = self.nueva_noticia(url=url_noticia, categoria=tag, diario=self.etiqueta)
if noticia == None:
continue
if noticia.fecha == None:
noticia.fecha = fecha
self.noticias.append(noticia)
def limpiar_texto(self, texto):
return texto
def reconocer_urls_y_fechas_noticias(self, url_feed):
urls_y_fechas = []
for entrada in fp.parse(url_feed).entries:
fecha = self.parsear_fecha(entrada)
urls_y_fechas.append((entrada.link, fecha))
return urls_y_fechas
def nueva_noticia(self, url, categoria, diario):
articulo = np.Article(url=url, language='es')
try:
articulo.download()
articulo.parse()
except:
return None
return Noticia(fecha=articulo.publish_date, url=url, diario=diario, categoria=categoria, titulo=articulo.title, texto=self.limpiar_texto(articulo.text))
def parsear_fecha(self, entrada):
return dateutil.parser.parse(entrada.published) | [
"dateutil.parser.parse",
"feedparser.parse",
"yaml.safe_load",
"newspaper.Article",
"medios.medio.Medio.__init__",
"bd.entidades.Kiosco"
] | [((248, 278), 'medios.medio.Medio.__init__', 'Medio.__init__', (['self', 'etiqueta'], {}), '(self, etiqueta)\n', (262, 278), False, 'from medios.medio import Medio\n'), ((1241, 1249), 'bd.entidades.Kiosco', 'Kiosco', ([], {}), '()\n', (1247, 1249), False, 'from bd.entidades import Kiosco\n'), ((2349, 2383), 'newspaper.Article', 'np.Article', ([], {'url': 'url', 'language': '"""es"""'}), "(url=url, language='es')\n", (2359, 2383), True, 'import newspaper as np\n'), ((2714, 2754), 'dateutil.parser.parse', 'dateutil.parser.parse', (['entrada.published'], {}), '(entrada.published)\n', (2735, 2754), False, 'import dateutil\n'), ((2115, 2133), 'feedparser.parse', 'fp.parse', (['url_feed'], {}), '(url_feed)\n', (2123, 2133), True, 'import feedparser as fp\n'), ((550, 572), 'yaml.safe_load', 'yaml.safe_load', (['stream'], {}), '(stream)\n', (564, 572), False, 'import yaml\n')] |
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits import mplot3d
# from scipy.stats import entropy
import sys
sys.path.append('../')
import cloudtropy
# data
gen_dim = 2
gen_N = 300
lims = (-2,6)
scale = 0.2
X = np.random.uniform(low=lims[0],high=lims[1],size=(10000,2)) # background
X = np.concatenate([X,scale*np.random.randn(gen_N,gen_dim)+np.array([0,0])] )
X = np.concatenate([X,scale*np.random.randn(gen_N,gen_dim)+np.array([4,0])] )
X = np.concatenate([X,scale*np.random.randn(gen_N,gen_dim)+np.array([0,4])] )
X = np.concatenate([X,scale*np.random.randn(gen_N,gen_dim)+np.array([4,4])] )
# input parameters
N_grid = 80
delta_c = 0.35
# grid,pmf = cloudtropy.pmf(X,N=N_grid,delta_c=delta_c,lims=[(-2,6),(-2,6)])
grid,pmf = cloudtropy.pmf(X,d=0.1,delta_c=delta_c,lims=[(-2,6),(-2,6)])
entropy = cloudtropy.entropy(X,base=2,N=N_grid,delta_c=delta_c,lims=[(-3,7),(-3,7)])
print(cloudtropy.entropy(X,base=2,d=0.1,delta_c=delta_c,lims=[(-3,7),(-3,7)]))
############## All in one
fig = plt.figure(figsize=(14,3))
#
ax1 = fig.add_subplot(1,4,1)
# levels = np.linspace(0,flat_pmf.max(),40)
ax1.scatter(X[:,0], X[:,1],s=1,alpha=0.1,color='k')
ax1.set_xlabel('x'),ax1.set_ylabel('y')
ax1.set_xlim(lims),ax1.set_xlim(lims)
ax1.axis('equal')
#
ax2 = fig.add_subplot(1,3,2,projection='3d')
ax2.plot_surface(grid[0], grid[1], pmf,cmap='coolwarm', edgecolor='none',shade='interp')
ax2.set_xlabel('x'),ax2.set_ylabel('y')#,ax.set_zlabel('PMF',rotation=90)
ax2.view_init(elev=60, azim=-45)
#
ax3 = fig.add_subplot(1,3,3)
cs = ax3.contourf(grid[0], grid[1], pmf, levels=np.linspace(0,pmf.max(),40), cmap='Purples_r')
ax3.set_xlabel('x'),ax3.set_ylabel('y')
ax3.set_title('Entropy = %.3f'%entropy)
ax3.set_xlim(lims),ax3.set_xlim(lims),
ax3.axis('equal')
cbar = fig.colorbar(cs)
#
plt.tight_layout()
# plt.savefig('all.pdf')
plt.savefig('all.png',dpi=400)
############## Separate
fig = plt.figure(figsize=(4,3))
#
ax1 = fig.add_subplot(1,1,1)
# levels = np.linspace(0,flat_pmf.max(),40)
ax1.scatter(X[:,0], X[:,1],s=1,alpha=0.1,color='k')
ax1.set_xlabel('x'),ax1.set_ylabel('y')
ax1.set_xlim(lims),ax1.set_xlim(lims)
ax1.axis('equal')
plt.savefig('scatter.png',dpi=400)
#
fig = plt.figure(figsize=(4,3))
#
ax2 = fig.add_subplot(1,1,1,projection='3d')
ax2.plot_surface(grid[0], grid[1], pmf,cmap='coolwarm', edgecolor='none',shade='interp')
ax2.set_xlabel('x'),ax2.set_ylabel('y')#,ax.set_zlabel('PMF',rotation=90)
ax2.view_init(elev=60, azim=-45)
plt.savefig('surf.png',dpi=400)
#
fig = plt.figure(figsize=(4,3))
#
ax3 = fig.add_subplot(1,1,1)
cs = ax3.contourf(grid[0], grid[1], pmf, levels=np.linspace(0,pmf.max(),40), cmap='Purples_r')
# ax3.set_xlabel('x'),ax3.set_ylabel('y')
# ax3.set_title('Entropy = %.3f'%entropy)
ax3.set_xlim(lims),ax3.set_xlim(lims),
ax3.axis('equal')
cbar = fig.colorbar(cs)
#
plt.tight_layout()
# plt.savefig('all.pdf')
plt.savefig('contour_simple.png',dpi=400)
| [
"matplotlib.pyplot.savefig",
"cloudtropy.entropy",
"cloudtropy.pmf",
"numpy.array",
"matplotlib.pyplot.figure",
"numpy.random.randn",
"matplotlib.pyplot.tight_layout",
"numpy.random.uniform",
"sys.path.append"
] | [((133, 155), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (148, 155), False, 'import sys\n'), ((239, 300), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': 'lims[0]', 'high': 'lims[1]', 'size': '(10000, 2)'}), '(low=lims[0], high=lims[1], size=(10000, 2))\n', (256, 300), True, 'import numpy as np\n'), ((761, 827), 'cloudtropy.pmf', 'cloudtropy.pmf', (['X'], {'d': '(0.1)', 'delta_c': 'delta_c', 'lims': '[(-2, 6), (-2, 6)]'}), '(X, d=0.1, delta_c=delta_c, lims=[(-2, 6), (-2, 6)])\n', (775, 827), False, 'import cloudtropy\n'), ((832, 917), 'cloudtropy.entropy', 'cloudtropy.entropy', (['X'], {'base': '(2)', 'N': 'N_grid', 'delta_c': 'delta_c', 'lims': '[(-3, 7), (-3, 7)]'}), '(X, base=2, N=N_grid, delta_c=delta_c, lims=[(-3, 7), (-3,\n 7)])\n', (850, 917), False, 'import cloudtropy\n'), ((1023, 1050), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(14, 3)'}), '(figsize=(14, 3))\n', (1033, 1050), True, 'import matplotlib.pyplot as plt\n'), ((1805, 1823), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1821, 1823), True, 'import matplotlib.pyplot as plt\n'), ((1849, 1880), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""all.png"""'], {'dpi': '(400)'}), "('all.png', dpi=400)\n", (1860, 1880), True, 'import matplotlib.pyplot as plt\n'), ((1912, 1938), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4, 3)'}), '(figsize=(4, 3))\n', (1922, 1938), True, 'import matplotlib.pyplot as plt\n'), ((2161, 2196), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""scatter.png"""'], {'dpi': '(400)'}), "('scatter.png', dpi=400)\n", (2172, 2196), True, 'import matplotlib.pyplot as plt\n'), ((2204, 2230), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4, 3)'}), '(figsize=(4, 3))\n', (2214, 2230), True, 'import matplotlib.pyplot as plt\n'), ((2473, 2505), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""surf.png"""'], {'dpi': '(400)'}), "('surf.png', dpi=400)\n", (2484, 2505), True, 'import matplotlib.pyplot as plt\n'), ((2513, 2539), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4, 3)'}), '(figsize=(4, 3))\n', (2523, 2539), True, 'import matplotlib.pyplot as plt\n'), ((2832, 2850), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2848, 2850), True, 'import matplotlib.pyplot as plt\n'), ((2876, 2918), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""contour_simple.png"""'], {'dpi': '(400)'}), "('contour_simple.png', dpi=400)\n", (2887, 2918), True, 'import matplotlib.pyplot as plt\n'), ((915, 993), 'cloudtropy.entropy', 'cloudtropy.entropy', (['X'], {'base': '(2)', 'd': '(0.1)', 'delta_c': 'delta_c', 'lims': '[(-3, 7), (-3, 7)]'}), '(X, base=2, d=0.1, delta_c=delta_c, lims=[(-3, 7), (-3, 7)])\n', (933, 993), False, 'import cloudtropy\n'), ((370, 386), 'numpy.array', 'np.array', (['[0, 0]'], {}), '([0, 0])\n', (378, 386), True, 'import numpy as np\n'), ((448, 464), 'numpy.array', 'np.array', (['[4, 0]'], {}), '([4, 0])\n', (456, 464), True, 'import numpy as np\n'), ((526, 542), 'numpy.array', 'np.array', (['[0, 4]'], {}), '([0, 4])\n', (534, 542), True, 'import numpy as np\n'), ((604, 620), 'numpy.array', 'np.array', (['[4, 4]'], {}), '([4, 4])\n', (612, 620), True, 'import numpy as np\n'), ((339, 370), 'numpy.random.randn', 'np.random.randn', (['gen_N', 'gen_dim'], {}), '(gen_N, gen_dim)\n', (354, 370), True, 'import numpy as np\n'), ((417, 448), 'numpy.random.randn', 'np.random.randn', (['gen_N', 'gen_dim'], {}), '(gen_N, gen_dim)\n', (432, 448), True, 'import numpy as np\n'), ((495, 526), 'numpy.random.randn', 'np.random.randn', (['gen_N', 'gen_dim'], {}), '(gen_N, gen_dim)\n', (510, 526), True, 'import numpy as np\n'), ((573, 604), 'numpy.random.randn', 'np.random.randn', (['gen_N', 'gen_dim'], {}), '(gen_N, gen_dim)\n', (588, 604), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# _*_ encoding: utf-8 _*_
"""simplex.py: Simplex algorithm with rational coefficients"""
import numpy as np
import fractions as frac
__author__ = "<NAME>"
__email__ = "<EMAIL>"
class RestrictedSimplex(object):
def __init__(self, leaving_index=None, entering_index=None):
if not leaving_index:
def func(l):
m = 0
while not l[m] and m < len(l):
m += 1
if m == len(l):
return 0
for i in range(len(l)):
if l[i] and l[m] > l[i]:
m = i
return m
leaving_index = func
if not entering_index:
def func(l):
return l.index(min(l))
entering_index = func
self.leaving_index = leaving_index
self.entering_index = entering_index
def __call__(self, lin_p, recursion_limit=100):
""" Runs a restricted version of the simplex algorithm
Runs simplex algorithm on linear programs having feasible basic
solution. It takes in an integer to limit the number of recursions.
:return: a linear program whose basic solution has maximal objective
value.
"""
a = lin_p.table
if not lin_p.has_feasible_basic:
raise TypeError("Linear program doesn't have feasible base solution")
n = 0
while any(a[0, :-1] < 0) and n < recursion_limit:
entering_choices = [i for i in map(lambda x: 0 if x > 0 else x,
a[0, :-1])]
e = self.entering_index(entering_choices)
leaving_choices = [None]*lin_p.shape[0]
for i in range(lin_p.shape[0]):
if a[i+1, e] > 0:
leaving_choices[i] = (a[i+1, -1]/a[i+1, e])
if not [i for i in leaving_choices if i]:
raise OverflowError("Linear program unbounded | check model and state.")
else:
l = 1 + self.leaving_index(leaving_choices)
lin_p.pivot(e, l)
n += 1
form = "Basic solution = " + \
"(" + "{}, " * (lin_p.shape[1] - 1) + "{})" + \
" with objective value = {}."
print(form.format(*lin_p.basic_solution(), lin_p.table[0, -1]), end="\n\n")
return lin_p.basic_solution(), lin_p.table[0, -1]
class Simplex(RestrictedSimplex):
def is_feasible(self, lin_p):
""" Checks if linear program is feasible..
Has side effect: transforms linear program if not basic feasible
into an equivalent linear program having basic feasible solution.
:return: boolean.
"""
print(" ### Checking feasibility of linear program", lin_p, sep="\n\n")
if lin_p.has_feasible_basic():
print(" ### Input linear program has feasible basic solution", end="\n\n")
return True
print(" ### Basic solution is not feasible: using auxiliary linear program in next step", end="\n\n")
gain_fun = np.copy(lin_p.table[0])
lin_p.shape = (lin_p.shape[0], lin_p.shape[1] + 1)
lin_p.table = np.insert(lin_p.table, 0, frac.Fraction(-1, 1), axis=1)
lin_p.table[0] = np.hstack((np.ones(1, dtype=frac.Fraction),
np.zeros(lin_p.shape[1], dtype=frac.Fraction)))
lin_p.basic = [i+1 for i in lin_p.basic]
l = 1 + np.argmin(lin_p.table[1:, -1])
lin_p.pivot(0, l) # Now program has feasible basic solution
if RestrictedSimplex.__call__(self, lin_p)[1] == 0:
print(" ### Input linear program is thus feasible", end="\n\n")
if 0 in lin_p.basic:
l = lin_p.basic.index(0)
e = 0
while e < lin_p.shape and lin_p.table[l, e] == 0:
# There is a at least an e with this property
# Unbounded otherwise
e += 1
lin_p.pivot(e, l) # 0 not basic anymore
lin_p.basic = [i-1 for i in lin_p.basic]
lin_p.table = lin_p.table[:, 1:]
lin_p.shape = (lin_p.shape[0], lin_p.shape[1] - 1)
lin_p.table[0] = gain_fun
for i in lin_p.basic:
lin_p.table[0, :] = lin_p.table[0, :] - \
lin_p.table[0, i] * \
lin_p.table[1 + lin_p.basic.index(i), :]
lin_p.table[0, -1] = -lin_p.table[0, -1]
return True
else:
return False
def __call__(self, lin_p, recursion_limit=100):
""" Simplex algorithm.
:return: a linear program whose basic solution has maximal objective
value.
"""
if self.is_feasible(lin_p):
simplex = RestrictedSimplex(self.leaving_index,
self.entering_index)
print(" ### Getting back to linear program equivalent to input with feasible basic solution", end="\n\n")
return simplex(lin_p, recursion_limit=recursion_limit)
else:
raise Exception("Linear program is not feasible.")
| [
"numpy.copy",
"numpy.ones",
"fractions.Fraction",
"numpy.zeros",
"numpy.argmin"
] | [((3105, 3128), 'numpy.copy', 'np.copy', (['lin_p.table[0]'], {}), '(lin_p.table[0])\n', (3112, 3128), True, 'import numpy as np\n'), ((3237, 3257), 'fractions.Fraction', 'frac.Fraction', (['(-1)', '(1)'], {}), '(-1, 1)\n', (3250, 3257), True, 'import fractions as frac\n'), ((3486, 3516), 'numpy.argmin', 'np.argmin', (['lin_p.table[1:, -1]'], {}), '(lin_p.table[1:, -1])\n', (3495, 3516), True, 'import numpy as np\n'), ((3303, 3334), 'numpy.ones', 'np.ones', (['(1)'], {'dtype': 'frac.Fraction'}), '(1, dtype=frac.Fraction)\n', (3310, 3334), True, 'import numpy as np\n'), ((3372, 3417), 'numpy.zeros', 'np.zeros', (['lin_p.shape[1]'], {'dtype': 'frac.Fraction'}), '(lin_p.shape[1], dtype=frac.Fraction)\n', (3380, 3417), True, 'import numpy as np\n')] |
"""
This program helps identify smithsonian unit codes which are not yet added to
the smithsonian sub-provider dictionary
"""
import logging
from textwrap import dedent
import requests
from airflow.providers.postgres.hooks.postgres import PostgresHook
from common.loader import provider_details as prov
from providers.provider_api_scripts import smithsonian
logger = logging.getLogger(__name__)
DELAY = smithsonian.DELAY
API_KEY = smithsonian.API_KEY
API_ROOT = smithsonian.API_ROOT
UNITS_ENDPOINT = smithsonian.UNITS_ENDPOINT
PARAMS = {"api_key": API_KEY, "q": "online_media_type:Images"}
SUB_PROVIDERS = prov.SMITHSONIAN_SUB_PROVIDERS
SI_UNIT_CODE_TABLE = "smithsonian_new_unit_codes"
def initialise_unit_code_table(postgres_conn_id, unit_code_table):
postgres = PostgresHook(postgres_conn_id=postgres_conn_id)
"""
Create table to store new unit codes if it does not exist
"""
postgres.run(
dedent(
f"""
CREATE TABLE IF NOT EXISTS public.{unit_code_table} (
new_unit_code character varying(80),
action character varying(40)
);
"""
)
)
"""
Delete old unit code entries
"""
postgres.run(
dedent(
f"""
DELETE FROM public.{unit_code_table};
"""
)
)
def get_new_and_outdated_unit_codes(unit_code_set, sub_prov_dict=SUB_PROVIDERS):
sub_provider_unit_code_set = set()
for sub_prov, unit_code_sub_set in sub_prov_dict.items():
sub_provider_unit_code_set = sub_provider_unit_code_set.union(unit_code_sub_set)
new_unit_codes = unit_code_set - sub_provider_unit_code_set
outdated_unit_codes = sub_provider_unit_code_set - unit_code_set
if bool(new_unit_codes):
logger.info(
f"The new unit codes {new_unit_codes} must be added to "
f"the SMITHSONIAN_SUB_PROVIDERS dictionary"
)
if bool(outdated_unit_codes):
logger.info(
f"The outdated unit codes {outdated_unit_codes} must be "
f"deleted from the SMITHSONIAN_SUB_PROVIDERS dictionary"
)
return new_unit_codes, outdated_unit_codes
def alert_unit_codes_from_api(
postgres_conn_id,
unit_code_table="smithsonian_new_unit_codes",
units_endpoint=UNITS_ENDPOINT,
query_params=PARAMS,
):
response = requests.get(units_endpoint, params=query_params)
unit_code_set = set(response.json().get("response", {}).get("terms", []))
new_unit_codes, outdated_unit_codes = get_new_and_outdated_unit_codes(unit_code_set)
initialise_unit_code_table(postgres_conn_id, unit_code_table)
postgres = PostgresHook(postgres_conn_id=postgres_conn_id)
"""
Populate the table with new unit codes
"""
for new_unit_code in new_unit_codes:
postgres.run(
dedent(
f"""
INSERT INTO public.{unit_code_table}
(new_unit_code, action)
VALUES (
'{new_unit_code}', 'add'
);
"""
)
)
"""
Populate the table with outdated unit codes
"""
for outdated_unit_code in outdated_unit_codes:
postgres.run(
dedent(
f"""
INSERT INTO public.{unit_code_table}
(new_unit_code, action)
VALUES (
'{outdated_unit_code}', 'delete'
);
"""
)
)
"""
Raise exception if human intervention is needed to update the
SMITHSONIAN_SUB_PROVIDERS dictionary by checking the entries in the
smithsonian_new_unit_codes table
"""
if bool(new_unit_codes) or bool(outdated_unit_codes):
raise Exception(
"Please check the smithsonian_new_unit_codes table for necessary "
"updates to the SMITHSONIAN_SUB_PROVIDERS dictionary"
)
| [
"logging.getLogger",
"airflow.providers.postgres.hooks.postgres.PostgresHook",
"requests.get",
"textwrap.dedent"
] | [((371, 398), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (388, 398), False, 'import logging\n'), ((776, 823), 'airflow.providers.postgres.hooks.postgres.PostgresHook', 'PostgresHook', ([], {'postgres_conn_id': 'postgres_conn_id'}), '(postgres_conn_id=postgres_conn_id)\n', (788, 823), False, 'from airflow.providers.postgres.hooks.postgres import PostgresHook\n'), ((2342, 2391), 'requests.get', 'requests.get', (['units_endpoint'], {'params': 'query_params'}), '(units_endpoint, params=query_params)\n', (2354, 2391), False, 'import requests\n'), ((2642, 2689), 'airflow.providers.postgres.hooks.postgres.PostgresHook', 'PostgresHook', ([], {'postgres_conn_id': 'postgres_conn_id'}), '(postgres_conn_id=postgres_conn_id)\n', (2654, 2689), False, 'from airflow.providers.postgres.hooks.postgres import PostgresHook\n'), ((929, 1118), 'textwrap.dedent', 'dedent', (['f"""\n CREATE TABLE IF NOT EXISTS public.{unit_code_table} (\n new_unit_code character varying(80),\n action character varying(40)\n );\n """'], {}), '(\n f"""\n CREATE TABLE IF NOT EXISTS public.{unit_code_table} (\n new_unit_code character varying(80),\n action character varying(40)\n );\n """\n )\n', (935, 1118), False, 'from textwrap import dedent\n'), ((1213, 1283), 'textwrap.dedent', 'dedent', (['f"""\n DELETE FROM public.{unit_code_table};\n """'], {}), '(f"""\n DELETE FROM public.{unit_code_table};\n """)\n', (1219, 1283), False, 'from textwrap import dedent\n'), ((2825, 3047), 'textwrap.dedent', 'dedent', (['f"""\n INSERT INTO public.{unit_code_table}\n (new_unit_code, action)\n VALUES (\n \'{new_unit_code}\', \'add\'\n );\n """'], {}), '(\n f"""\n INSERT INTO public.{unit_code_table}\n (new_unit_code, action)\n VALUES (\n \'{new_unit_code}\', \'add\'\n );\n """\n )\n', (2831, 3047), False, 'from textwrap import dedent\n'), ((3228, 3458), 'textwrap.dedent', 'dedent', (['f"""\n INSERT INTO public.{unit_code_table}\n (new_unit_code, action)\n VALUES (\n \'{outdated_unit_code}\', \'delete\'\n );\n """'], {}), '(\n f"""\n INSERT INTO public.{unit_code_table}\n (new_unit_code, action)\n VALUES (\n \'{outdated_unit_code}\', \'delete\'\n );\n """\n )\n', (3234, 3458), False, 'from textwrap import dedent\n')] |
import logging
import json
import re
import sys
import gdrivefs.conf
_logger = logging.getLogger(__name__)
# TODO(dustin): Make these individual functions.
class _DriveUtility(object):
"""General utility functions loosely related to GD."""
# # Mime-types to translate to, if they appear within the "exportLinks" list.
# gd_to_normal_mime_mappings = {
# 'application/vnd.google-apps.document':
# 'text/plain',
# 'application/vnd.google-apps.spreadsheet':
# 'application/vnd.ms-excel',
# 'application/vnd.google-apps.presentation':
#/gd_to_normal_mime_mappings
# 'application/vnd.ms-powerpoint',
# 'application/vnd.google-apps.drawing':
# 'application/pdf',
# 'application/vnd.google-apps.audio':
# 'audio/mpeg',
# 'application/vnd.google-apps.photo':
# 'image/png',
# 'application/vnd.google-apps.video':
# 'video/x-flv'
# }
# Default extensions for mime-types.
# TODO(dustin): !! Move this to the config directory.
default_extensions = {
'text/plain': 'txt',
'application/vnd.ms-excel': 'xls',
'application/vnd.ms-powerpoint': 'ppt',
'application/pdf': 'pdf',
'audio/mpeg': 'mp3',
'image/png': 'png',
'video/x-flv': 'flv'
}
local_character_set = sys.getfilesystemencoding()
def __init__(self):
self.__load_mappings()
def __load_mappings(self):
# Allow someone to override our default mappings of the GD types.
# TODO(dustin): Isn't actually used, so commenting.
# gd_to_normal_mapping_filepath = \
# gdrivefs.conf.Conf.get('gd_to_normal_mapping_filepath')
#
# try:
# with open(gd_to_normal_mapping_filepath, 'r') as f:
# self.gd_to_normal_mime_mappings.extend(json.load(f))
# except IOError:
# _logger.info("No mime-mapping was found.")
# Allow someone to set file-extensions for mime-types, and not rely on
# Python's educated guesses.
extension_mapping_filepath = \
gdrivefs.conf.Conf.get('extension_mapping_filepath')
try:
with open(extension_mapping_filepath, 'r') as f:
self.default_extensions.extend(json.load(f))
except IOError:
_logger.info("No extension-mapping was found.")
def get_first_mime_type_by_extension(self, extension):
found = [
mime_type
for mime_type, temp_extension
in self.default_extensions.items()
if temp_extension == extension
]
if not found:
return None
return found[0]
def translate_filename_charset(self, original_filename):
"""Convert the given filename to the correct character set."""
# fusepy doesn't support the Python 2.x Unicode type. Expect a native
# string (anything but a byte string).
return original_filename
# # If we're in an older version of Python that still defines the Unicode
# # class and the filename isn't unicode, translate it.
#
# try:
# sys.modules['__builtin__'].unicode
# except AttributeError:
# pass
# else:
# if issubclass(original_filename.__class__, unicode) is False:
# return unicode(original_filename)#original_filename.decode(self.local_character_set)
#
# # It's already unicode. Don't do anything.
# return original_filename
def make_safe_for_filename(self, text):
"""Remove any filename-invalid characters."""
return re.sub('[^a-z0-9\-_\.]+', '', text)
utility = _DriveUtility()
| [
"logging.getLogger",
"re.sub",
"sys.getfilesystemencoding",
"json.load"
] | [((81, 108), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (98, 108), False, 'import logging\n'), ((1633, 1660), 'sys.getfilesystemencoding', 'sys.getfilesystemencoding', ([], {}), '()\n', (1658, 1660), False, 'import sys\n'), ((3938, 3975), 're.sub', 're.sub', (['"""[^a-z0-9\\\\-_\\\\.]+"""', '""""""', 'text'], {}), "('[^a-z0-9\\\\-_\\\\.]+', '', text)\n", (3944, 3975), False, 'import re\n'), ((2565, 2577), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2574, 2577), False, 'import json\n')] |
import FWCore.ParameterSet.Config as cms
HEBRecHitGPUtoSoAProd = cms.EDProducer('HEBRecHitGPUtoSoA',
HEBRecHitGPUTok = cms.InputTag('HEBRecHitGPUProd'))
| [
"FWCore.ParameterSet.Config.InputTag"
] | [((159, 191), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""HEBRecHitGPUProd"""'], {}), "('HEBRecHitGPUProd')\n", (171, 191), True, 'import FWCore.ParameterSet.Config as cms\n')] |
import matplotlib.pyplot as plt
from collections import Counter
def line_graph(plt):
# years observed since 2000
years = [2000, 2002, 2005, 2007, 2010, 2012, 2014, 2015]
# total number of websites on the world wide web
# (source: Internet Live Stats)
websites = [17, 38, 64, 121, 206, 697, 968, 863]
# create a line chart with years on x-axis and number of websites on y-axis
plt.plot(years, websites, color='blue', marker='o', linestyle='solid',
linewidth=2)
# adjust the x and y axis markers
plt.xlim(2000, 2015)
plt.ylim(10, 1000)
# add a title to the chart
plt.title("Total number of websites online")
# add a label to the y-axis
plt.ylabel("Websites (millions)")
plt.show()
if __name__ == "__main__":
line_graph(plt)
| [
"matplotlib.pyplot.title",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.show"
] | [((409, 496), 'matplotlib.pyplot.plot', 'plt.plot', (['years', 'websites'], {'color': '"""blue"""', 'marker': '"""o"""', 'linestyle': '"""solid"""', 'linewidth': '(2)'}), "(years, websites, color='blue', marker='o', linestyle='solid',\n linewidth=2)\n", (417, 496), True, 'import matplotlib.pyplot as plt\n'), ((549, 569), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(2000)', '(2015)'], {}), '(2000, 2015)\n', (557, 569), True, 'import matplotlib.pyplot as plt\n'), ((574, 592), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(10)', '(1000)'], {}), '(10, 1000)\n', (582, 592), True, 'import matplotlib.pyplot as plt\n'), ((629, 673), 'matplotlib.pyplot.title', 'plt.title', (['"""Total number of websites online"""'], {}), "('Total number of websites online')\n", (638, 673), True, 'import matplotlib.pyplot as plt\n'), ((711, 744), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Websites (millions)"""'], {}), "('Websites (millions)')\n", (721, 744), True, 'import matplotlib.pyplot as plt\n'), ((750, 760), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (758, 760), True, 'import matplotlib.pyplot as plt\n')] |
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from mock import patch, call, Mock, PropertyMock
import requests_mock
import os, sys
test_path=os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
class TestSyncOLTDevice(unittest.TestCase):
def setUp(self):
global DeferredException
self.sys_path_save = sys.path
# Setting up the config module
from xosconfig import Config
config = os.path.join(test_path, "../test_config.yaml")
Config.clear()
Config.init(config, "synchronizer-config-schema.yaml")
# END Setting up the config module
from xossynchronizer.mock_modelaccessor_build import mock_modelaccessor_config
mock_modelaccessor_config(test_path, [("olt-service", "volt.xproto"),
("vsg", "vsg.xproto"),
("rcord", "rcord.xproto"),])
import xossynchronizer.modelaccessor
reload(xossynchronizer.modelaccessor) # in case nose2 loaded it in a previous test
from xossynchronizer.modelaccessor import model_accessor
self.model_accessor = model_accessor
from pull_olts import OLTDevicePullStep
# import all class names to globals
for (k, v) in model_accessor.all_model_classes.items():
globals()[k] = v
self.sync_step = OLTDevicePullStep
# mock volt service
self.volt_service = Mock()
self.volt_service.id = "volt_service_id"
self.volt_service.voltha_url = "voltha_url"
self.volt_service.voltha_user = "voltha_user"
self.volt_service.voltha_pass = "<PASSWORD>"
self.volt_service.voltha_port = 1234
# mock voltha responses
self.devices = {
"items": [
{
"id": "test_id",
"type": "simulated_olt",
"host_and_port": "172.17.0.1:50060",
"admin_state": "ENABLED",
"oper_status": "ACTIVE",
"serial_number": "serial_number",
}
]
}
self.logical_devices = {
"items": [
{
"root_device_id": "test_id",
"id": "of_id",
"datapath_id": "55334486016"
}
]
}
self.ports = {
"items": [
{
"label": "PON port",
"port_no": 1,
"type": "PON_OLT",
"admin_state": "ENABLED",
"oper_status": "ACTIVE"
},
{
"label": "NNI facing Ethernet port",
"port_no": 2,
"type": "ETHERNET_NNI",
"admin_state": "ENABLED",
"oper_status": "ACTIVE"
}
]
}
def tearDown(self):
sys.path = self.sys_path_save
@requests_mock.Mocker()
def test_missing_volt_service(self, m):
self.assertFalse(m.called)
@requests_mock.Mocker()
def test_pull(self, m):
with patch.object(VOLTService.objects, "all") as olt_service_mock, \
patch.object(OLTDevice, "save") as mock_olt_save, \
patch.object(PONPort, "save") as mock_pon_save, \
patch.object(NNIPort, "save") as mock_nni_save:
olt_service_mock.return_value = [self.volt_service]
m.get("http://voltha_url:1234/api/v1/devices", status_code=200, json=self.devices)
m.get("http://voltha_url:1234/api/v1/devices/test_id/ports", status_code=200, json=self.ports)
m.get("http://voltha_url:1234/api/v1/logical_devices", status_code=200, json=self.logical_devices)
self.sync_step(model_accessor=self.model_accessor).pull_records()
# TODO how to asster this?
# self.assertEqual(existing_olt.admin_state, "ENABLED")
# self.assertEqual(existing_olt.oper_status, "ACTIVE")
# self.assertEqual(existing_olt.volt_service_id, "volt_service_id")
# self.assertEqual(existing_olt.device_id, "test_id")
# self.assertEqual(existing_olt.of_id, "of_id")
# self.assertEqual(existing_olt.dp_id, "of:0000000ce2314000")
mock_olt_save.assert_called()
mock_pon_save.assert_called()
mock_nni_save.assert_called()
@requests_mock.Mocker()
def test_pull_existing(self, m):
existing_olt = Mock()
existing_olt.admin_state = "ENABLED"
existing_olt.enacted = 2
existing_olt.updated = 1
with patch.object(VOLTService.objects, "all") as olt_service_mock, \
patch.object(OLTDevice.objects, "filter") as mock_get, \
patch.object(PONPort, "save") as mock_pon_save, \
patch.object(NNIPort, "save") as mock_nni_save, \
patch.object(existing_olt, "save") as mock_olt_save:
olt_service_mock.return_value = [self.volt_service]
mock_get.return_value = [existing_olt]
m.get("http://voltha_url:1234/api/v1/devices", status_code=200, json=self.devices)
m.get("http://voltha_url:1234/api/v1/devices/test_id/ports", status_code=200, json=self.ports)
m.get("http://voltha_url:1234/api/v1/logical_devices", status_code=200, json=self.logical_devices)
self.sync_step(model_accessor=self.model_accessor).pull_records()
self.assertEqual(existing_olt.admin_state, "ENABLED")
self.assertEqual(existing_olt.oper_status, "ACTIVE")
self.assertEqual(existing_olt.volt_service_id, "volt_service_id")
self.assertEqual(existing_olt.device_id, "test_id")
self.assertEqual(existing_olt.of_id, "of_id")
self.assertEqual(existing_olt.dp_id, "of:0000000ce2314000")
mock_olt_save.assert_called()
mock_pon_save.assert_called()
mock_nni_save.assert_called()
@requests_mock.Mocker()
def test_pull_existing_do_not_sync(self, m):
existing_olt = Mock()
existing_olt.enacted = 1
existing_olt.updated = 2
existing_olt.device_id = "test_id"
with patch.object(VOLTService.objects, "all") as olt_service_mock, \
patch.object(OLTDevice.objects, "filter") as mock_get, \
patch.object(PONPort, "save") as mock_pon_save, \
patch.object(NNIPort, "save") as mock_nni_save, \
patch.object(existing_olt, "save") as mock_olt_save:
olt_service_mock.return_value = [self.volt_service]
mock_get.return_value = [existing_olt]
m.get("http://voltha_url:1234/api/v1/devices", status_code=200, json=self.devices)
m.get("http://voltha_url:1234/api/v1/devices/test_id/ports", status_code=200, json=self.ports)
m.get("http://voltha_url:1234/api/v1/logical_devices", status_code=200, json=self.logical_devices)
self.sync_step(model_accessor=self.model_accessor).pull_records()
mock_olt_save.assert_not_called()
mock_pon_save.assert_called()
mock_nni_save.assert_called()
@requests_mock.Mocker()
def test_pull_deleted_object(self, m):
existing_olt = Mock()
existing_olt.enacted = 2
existing_olt.updated = 1
existing_olt.device_id = "test_id"
m.get("http://voltha_url:1234/api/v1/devices", status_code=200, json={"items": []})
with patch.object(VOLTService.objects, "all") as olt_service_mock, \
patch.object(OLTDevice.objects, "get_items") as mock_get, \
patch.object(existing_olt, "delete") as mock_olt_delete:
olt_service_mock.return_value = [self.volt_service]
mock_get.return_value = [existing_olt]
self.sync_step(model_accessor=self.model_accessor).pull_records()
mock_olt_delete.assert_called()
if __name__ == "__main__":
unittest.main() | [
"xosconfig.Config.clear",
"xossynchronizer.mock_modelaccessor_build.mock_modelaccessor_config",
"mock.Mock",
"requests_mock.Mocker",
"os.path.join",
"os.path.realpath",
"xossynchronizer.modelaccessor.model_accessor.all_model_classes.items",
"mock.patch.object",
"xosconfig.Config.init",
"unittest.m... | [((3610, 3632), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (3630, 3632), False, 'import requests_mock\n'), ((3722, 3744), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (3742, 3744), False, 'import requests_mock\n'), ((5094, 5116), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (5114, 5116), False, 'import requests_mock\n'), ((6694, 6716), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (6714, 6716), False, 'import requests_mock\n'), ((7903, 7925), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (7923, 7925), False, 'import requests_mock\n'), ((8701, 8716), 'unittest.main', 'unittest.main', ([], {}), '()\n', (8714, 8716), False, 'import unittest\n'), ((740, 766), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (756, 766), False, 'import os, sys\n'), ((1002, 1048), 'os.path.join', 'os.path.join', (['test_path', '"""../test_config.yaml"""'], {}), "(test_path, '../test_config.yaml')\n", (1014, 1048), False, 'import os, sys\n'), ((1057, 1071), 'xosconfig.Config.clear', 'Config.clear', ([], {}), '()\n', (1069, 1071), False, 'from xosconfig import Config\n'), ((1080, 1134), 'xosconfig.Config.init', 'Config.init', (['config', '"""synchronizer-config-schema.yaml"""'], {}), "(config, 'synchronizer-config-schema.yaml')\n", (1091, 1134), False, 'from xosconfig import Config\n'), ((1274, 1399), 'xossynchronizer.mock_modelaccessor_build.mock_modelaccessor_config', 'mock_modelaccessor_config', (['test_path', "[('olt-service', 'volt.xproto'), ('vsg', 'vsg.xproto'), ('rcord',\n 'rcord.xproto')]"], {}), "(test_path, [('olt-service', 'volt.xproto'), (\n 'vsg', 'vsg.xproto'), ('rcord', 'rcord.xproto')])\n", (1299, 1399), False, 'from xossynchronizer.mock_modelaccessor_build import mock_modelaccessor_config\n'), ((1861, 1901), 'xossynchronizer.modelaccessor.model_accessor.all_model_classes.items', 'model_accessor.all_model_classes.items', ([], {}), '()\n', (1899, 1901), False, 'from xossynchronizer.modelaccessor import model_accessor\n'), ((2033, 2039), 'mock.Mock', 'Mock', ([], {}), '()\n', (2037, 2039), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((5178, 5184), 'mock.Mock', 'Mock', ([], {}), '()\n', (5182, 5184), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((6789, 6795), 'mock.Mock', 'Mock', ([], {}), '()\n', (6793, 6795), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((7992, 7998), 'mock.Mock', 'Mock', ([], {}), '()\n', (7996, 7998), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((3787, 3827), 'mock.patch.object', 'patch.object', (['VOLTService.objects', '"""all"""'], {}), "(VOLTService.objects, 'all')\n", (3799, 3827), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((3867, 3898), 'mock.patch.object', 'patch.object', (['OLTDevice', '"""save"""'], {}), "(OLTDevice, 'save')\n", (3879, 3898), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((3935, 3964), 'mock.patch.object', 'patch.object', (['PONPort', '"""save"""'], {}), "(PONPort, 'save')\n", (3947, 3964), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((4001, 4030), 'mock.patch.object', 'patch.object', (['NNIPort', '"""save"""'], {}), "(NNIPort, 'save')\n", (4013, 4030), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((5310, 5350), 'mock.patch.object', 'patch.object', (['VOLTService.objects', '"""all"""'], {}), "(VOLTService.objects, 'all')\n", (5322, 5350), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((5390, 5431), 'mock.patch.object', 'patch.object', (['OLTDevice.objects', '"""filter"""'], {}), "(OLTDevice.objects, 'filter')\n", (5402, 5431), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((5463, 5492), 'mock.patch.object', 'patch.object', (['PONPort', '"""save"""'], {}), "(PONPort, 'save')\n", (5475, 5492), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((5529, 5558), 'mock.patch.object', 'patch.object', (['NNIPort', '"""save"""'], {}), "(NNIPort, 'save')\n", (5541, 5558), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((5595, 5629), 'mock.patch.object', 'patch.object', (['existing_olt', '"""save"""'], {}), "(existing_olt, 'save')\n", (5607, 5629), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((6919, 6959), 'mock.patch.object', 'patch.object', (['VOLTService.objects', '"""all"""'], {}), "(VOLTService.objects, 'all')\n", (6931, 6959), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((6999, 7040), 'mock.patch.object', 'patch.object', (['OLTDevice.objects', '"""filter"""'], {}), "(OLTDevice.objects, 'filter')\n", (7011, 7040), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((7072, 7101), 'mock.patch.object', 'patch.object', (['PONPort', '"""save"""'], {}), "(PONPort, 'save')\n", (7084, 7101), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((7138, 7167), 'mock.patch.object', 'patch.object', (['NNIPort', '"""save"""'], {}), "(NNIPort, 'save')\n", (7150, 7167), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((7204, 7238), 'mock.patch.object', 'patch.object', (['existing_olt', '"""save"""'], {}), "(existing_olt, 'save')\n", (7216, 7238), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((8215, 8255), 'mock.patch.object', 'patch.object', (['VOLTService.objects', '"""all"""'], {}), "(VOLTService.objects, 'all')\n", (8227, 8255), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((8295, 8339), 'mock.patch.object', 'patch.object', (['OLTDevice.objects', '"""get_items"""'], {}), "(OLTDevice.objects, 'get_items')\n", (8307, 8339), False, 'from mock import patch, call, Mock, PropertyMock\n'), ((8371, 8407), 'mock.patch.object', 'patch.object', (['existing_olt', '"""delete"""'], {}), "(existing_olt, 'delete')\n", (8383, 8407), False, 'from mock import patch, call, Mock, PropertyMock\n')] |
import os
import sys
from collections import defaultdict
import datetime
import pickle
import re
import time
import json
from selenium import webdriver
def main():
driver = webdriver.Chrome() # Optional argument, if not specified will search path.
#load login cookie
driver.get('https://www.messenger.com')
cookies=pickle.load(open('data/logincookies.pkl','rb'))
for c in cookies:
driver.add_cookie(c)
driver.get('https://www.messenger.com')
#get page source
source=(driver.page_source).encode('utf8','replace')
#get last active times and add them to database
v=re.compile("lastActiveTimes\":(.*),\"chatNotif")
lolo=json.loads(v.findall(source)[0])
d=defaultdict(lambda:[0],json.load(open("data/lastActiveTimes.json",'r')))
for k in lolo:
if lolo[k]!=d[k][-1]:
d[k].append(lolo[k])
json.dump(d,open("data/lastActiveTimes.json",'w'))
#maintain up to date database of friends profiles
v=re.compile("shortProfiles\":(.*),\"nearby")
lala=json.loads(v.findall(source)[0])
d=json.load(open('data/shortProfiles.json','r'))
d.update(lala)
json.dump(d,open('data/shortProfiles.json','w'))
driver.quit()
if not os.path.exists('data/logincookies.pkl'):
print ('missing cookie. Have you run init.py?')
sys.exit()
while True:
main()
with open('data/lastScrapeTime.txt','a') as f:
f.write(str(datetime.datetime.now())+'\n')
time.sleep(600)
| [
"os.path.exists",
"re.compile",
"selenium.webdriver.Chrome",
"time.sleep",
"datetime.datetime.now",
"sys.exit"
] | [((178, 196), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (194, 196), False, 'from selenium import webdriver\n'), ((613, 659), 're.compile', 're.compile', (['"""lastActiveTimes":(.*),"chatNotif"""'], {}), '(\'lastActiveTimes":(.*),"chatNotif\')\n', (623, 659), False, 'import re\n'), ((981, 1022), 're.compile', 're.compile', (['"""shortProfiles":(.*),"nearby"""'], {}), '(\'shortProfiles":(.*),"nearby\')\n', (991, 1022), False, 'import re\n'), ((1218, 1257), 'os.path.exists', 'os.path.exists', (['"""data/logincookies.pkl"""'], {}), "('data/logincookies.pkl')\n", (1232, 1257), False, 'import os\n'), ((1315, 1325), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1323, 1325), False, 'import sys\n'), ((1457, 1472), 'time.sleep', 'time.sleep', (['(600)'], {}), '(600)\n', (1467, 1472), False, 'import time\n'), ((1422, 1445), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1443, 1445), False, 'import datetime\n')] |
# encoding: utf-8
import asyncio
import logging
import random
from typing import Optional, Tuple
import discord
from discord.ext import commands
import lifesaver
from lifesaver.utils.formatting import truncate
from lifesaver.utils.timing import Timer, format_seconds
log = logging.getLogger(__name__)
SendVerdict = Tuple[bool, Optional[Exception]]
def bold_timer(timer: Timer) -> str:
if timer.duration > 1:
return f"**{timer}**"
else:
return str(timer)
class Health(lifesaver.Cog):
def __init__(self, bot, *args, **kwargs):
super().__init__(bot, *args, **kwargs)
self.rtt_sends = {}
self.rtt_edits = {}
@commands.Cog.listener()
async def on_message_edit(self, message, _message):
event = self.rtt_edits.get(message.id)
if event:
log.debug("RTT: Received edit_rx for %d.", message.id)
event.set()
del self.rtt_edits[message.id]
@commands.Cog.listener()
async def on_message(self, message):
event = self.rtt_sends.get(message.nonce)
if event:
log.debug(
"RTT: Received send_rx for %d (nonce=%d).", message.id, message.nonce
)
event.set()
del self.rtt_sends[message.nonce]
@lifesaver.command(aliases=["p"])
@commands.cooldown(1, 1, type=commands.BucketType.guild)
async def ping(self, ctx: lifesaver.commands.Context):
"""Pings the bot."""
await ctx.send("Pong!")
@lifesaver.command()
@commands.cooldown(3, 4, type=commands.BucketType.guild)
async def rtt(self, ctx: lifesaver.commands.Context):
"""Measures API and gateway latency.
"TX" refers to the time it takes for the HTTP request to be sent, and
for a response to be received and processed.
"RX" refers to the time it takes for the gateway to dispatch an action,
for example "Edit RX" refers to the time between editing a message with
the APIand the gateway dispatching a MESSAGE_UPDATE packet.
"""
nonce = random.randint(1000, 10000)
send_failed: SendVerdict = (False, None)
edit_failed: SendVerdict = (False, None)
# Send a message, and wait for it to come back.
with Timer() as send:
event = asyncio.Event()
self.rtt_sends[nonce] = event
with Timer() as send_tx:
try:
message = await ctx.send("RTT: `\N{LOWER HALF BLOCK}`", nonce=nonce)
except discord.HTTPException as error:
send_failed = (True, error)
with Timer() as send_rx:
await event.wait()
# Edit a message, and wait for it to come back.
with Timer() as edit:
event = asyncio.Event()
self.rtt_edits[message.id] = event
with Timer() as edit_tx:
try:
await message.edit(content="RTT: `\N{FULL BLOCK}`")
except discord.HTTPException as error:
edit_failed = (True, error)
with Timer() as edit_rx:
await event.wait()
avg_rx = (send_rx.duration + edit_rx.duration) / 2
avg_tx = (send_tx.duration + edit_tx.duration) / 2
slow = send.duration > 1 or edit.duration > 1
def format_transfer(timer, tx, rx):
timer = bold_timer(timer)
tx = bold_timer(tx)
rx = bold_timer(rx)
return f"RTT: {timer}\n\nTX: {tx}\nRX: {rx}"
if slow:
color = discord.Color.red()
else:
color = discord.Color.green()
embed = discord.Embed(title="RTT Results", color=color)
embed.add_field(
name="MESSAGE_CREATE", value=format_transfer(send, send_tx, send_rx),
)
embed.add_field(
name="MESSAGE_UPDATE", value=format_transfer(edit, edit_tx, edit_rx),
)
embed.set_footer(
text=f"Avg. TX: {format_seconds(avg_tx)}, RX: {format_seconds(avg_rx)}",
)
failures = {"Send": send_failed, "Edit": edit_failed}
if any(result[0] for (name, result) in failures.items()):
content = "\n".join(
f"{name}: Failed with HTTP {result[1].code}: {truncate(result[1].message, 100)}" # type: ignore
for (name, result) in failures.items()
if result[0] is not False
)
embed.add_field(name="Failures", value=content, inline=False)
await message.edit(content="", embed=embed)
def setup(bot):
bot.add_cog(Health(bot))
| [
"logging.getLogger",
"discord.ext.commands.Cog.listener",
"lifesaver.utils.formatting.truncate",
"lifesaver.utils.timing.Timer",
"asyncio.Event",
"discord.Color.green",
"lifesaver.command",
"discord.ext.commands.cooldown",
"lifesaver.utils.timing.format_seconds",
"discord.Embed",
"random.randint... | [((277, 304), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (294, 304), False, 'import logging\n'), ((671, 694), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (692, 694), False, 'from discord.ext import commands\n'), ((956, 979), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (977, 979), False, 'from discord.ext import commands\n'), ((1288, 1320), 'lifesaver.command', 'lifesaver.command', ([], {'aliases': "['p']"}), "(aliases=['p'])\n", (1305, 1320), False, 'import lifesaver\n'), ((1326, 1381), 'discord.ext.commands.cooldown', 'commands.cooldown', (['(1)', '(1)'], {'type': 'commands.BucketType.guild'}), '(1, 1, type=commands.BucketType.guild)\n', (1343, 1381), False, 'from discord.ext import commands\n'), ((1508, 1527), 'lifesaver.command', 'lifesaver.command', ([], {}), '()\n', (1525, 1527), False, 'import lifesaver\n'), ((1533, 1588), 'discord.ext.commands.cooldown', 'commands.cooldown', (['(3)', '(4)'], {'type': 'commands.BucketType.guild'}), '(3, 4, type=commands.BucketType.guild)\n', (1550, 1588), False, 'from discord.ext import commands\n'), ((2080, 2107), 'random.randint', 'random.randint', (['(1000)', '(10000)'], {}), '(1000, 10000)\n', (2094, 2107), False, 'import random\n'), ((3683, 3730), 'discord.Embed', 'discord.Embed', ([], {'title': '"""RTT Results"""', 'color': 'color'}), "(title='RTT Results', color=color)\n", (3696, 3730), False, 'import discord\n'), ((2277, 2284), 'lifesaver.utils.timing.Timer', 'Timer', ([], {}), '()\n', (2282, 2284), False, 'from lifesaver.utils.timing import Timer, format_seconds\n'), ((2314, 2329), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (2327, 2329), False, 'import asyncio\n'), ((2766, 2773), 'lifesaver.utils.timing.Timer', 'Timer', ([], {}), '()\n', (2771, 2773), False, 'from lifesaver.utils.timing import Timer, format_seconds\n'), ((2803, 2818), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (2816, 2818), False, 'import asyncio\n'), ((3590, 3609), 'discord.Color.red', 'discord.Color.red', ([], {}), '()\n', (3607, 3609), False, 'import discord\n'), ((3644, 3665), 'discord.Color.green', 'discord.Color.green', ([], {}), '()\n', (3663, 3665), False, 'import discord\n'), ((2390, 2397), 'lifesaver.utils.timing.Timer', 'Timer', ([], {}), '()\n', (2395, 2397), False, 'from lifesaver.utils.timing import Timer, format_seconds\n'), ((2641, 2648), 'lifesaver.utils.timing.Timer', 'Timer', ([], {}), '()\n', (2646, 2648), False, 'from lifesaver.utils.timing import Timer, format_seconds\n'), ((2884, 2891), 'lifesaver.utils.timing.Timer', 'Timer', ([], {}), '()\n', (2889, 2891), False, 'from lifesaver.utils.timing import Timer, format_seconds\n'), ((3118, 3125), 'lifesaver.utils.timing.Timer', 'Timer', ([], {}), '()\n', (3123, 3125), False, 'from lifesaver.utils.timing import Timer, format_seconds\n'), ((4021, 4043), 'lifesaver.utils.timing.format_seconds', 'format_seconds', (['avg_tx'], {}), '(avg_tx)\n', (4035, 4043), False, 'from lifesaver.utils.timing import Timer, format_seconds\n'), ((4051, 4073), 'lifesaver.utils.timing.format_seconds', 'format_seconds', (['avg_rx'], {}), '(avg_rx)\n', (4065, 4073), False, 'from lifesaver.utils.timing import Timer, format_seconds\n'), ((4312, 4344), 'lifesaver.utils.formatting.truncate', 'truncate', (['result[1].message', '(100)'], {}), '(result[1].message, 100)\n', (4320, 4344), False, 'from lifesaver.utils.formatting import truncate\n')] |
import pytest
from app import create_app
@pytest.fixture
def client():
app = create_app()
client = app.test_client()
return client
| [
"app.create_app"
] | [((83, 95), 'app.create_app', 'create_app', ([], {}), '()\n', (93, 95), False, 'from app import create_app\n')] |
from django.contrib import admin
from api.models import Answer, Question, User
from django import forms
class AnswerAdmin(admin.ModelAdmin):
model = Answer
class QuestionAdmin(admin.ModelAdmin):
model = Question
# class UserForm(forms.ModelForm):
# password = forms.CharField(widget=forms.PasswordInput)
#
# def __init__(self, *args, **kwargs):
# super(UserForm, self).__init__(*args, **kwargs)
#
# class Meta:
# model = User
class UserAdmin(admin.ModelAdmin):
model = User
admin.site.register(Answer, AnswerAdmin)
admin.site.register(Question, QuestionAdmin)
admin.site.register(User, UserAdmin)
| [
"django.contrib.admin.site.register"
] | [((524, 564), 'django.contrib.admin.site.register', 'admin.site.register', (['Answer', 'AnswerAdmin'], {}), '(Answer, AnswerAdmin)\n', (543, 564), False, 'from django.contrib import admin\n'), ((565, 609), 'django.contrib.admin.site.register', 'admin.site.register', (['Question', 'QuestionAdmin'], {}), '(Question, QuestionAdmin)\n', (584, 609), False, 'from django.contrib import admin\n'), ((610, 646), 'django.contrib.admin.site.register', 'admin.site.register', (['User', 'UserAdmin'], {}), '(User, UserAdmin)\n', (629, 646), False, 'from django.contrib import admin\n')] |
"""Parsing utilities for moreos."""
import re
import attr
@attr.s(frozen=True)
class ABNF:
"""Container of regular expressions both raw and compiled for parsing."""
# From https://tools.ietf.org/html/rfc2616#section-2.2
ctl = control_characters = "\x7f\x00-\x1f"
digit = "0-9"
separators = r"\[\]\(\)<>@,;:\\\"/?={}\s"
token = f"[^{ctl}{separators}]+"
# RFC1123 date components
wkday = "(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun)"
month = "(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)"
time = f"[{digit}]{{2}}:[{digit}]{{2}}:[{digit}]{{2}}"
date1 = f"[{digit}]{{1,2}} {month} [{digit}]{{4}}"
# NOTE(sigmavirus24) This allows some nonsense but it's a decent
# high-level check
rfc1123_date = f"{wkday}, {date1} {time} GMT"
# From https://tools.ietf.org/html/rfc1034#section-3.5, enhanced by
# https://tools.ietf.org/html/rfc1123#section-2.1
letter = "A-Za-z"
let_dig = f"{letter}{digit}"
let_dig_hyp = f"{let_dig}-"
ldh_str = f"[{let_dig_hyp}]+"
# This is where the update from rfc1123#section2.1 is relevant
label = f"[{let_dig}](?:(?:{ldh_str})?[{let_dig}])?"
subdomain = f"\\.?(?:{label}\\.)*(?:{label})"
# From https://tools.ietf.org/html/rfc6265#section-3.1
# NOTE: \x5b = [, \x5d = ] so let's escape those directly
cookie_octet = "[\x21\x23-\x2b\\\x2d-\x3a\x3c-\\[\\]-\x7e]"
cookie_value = f'(?:{cookie_octet}*|"{cookie_octet}*")'
cookie_name = token
cookie_pair = f"(?P<name>{cookie_name})=(?P<value>{cookie_value})"
_any_char_except_ctls_or_semicolon = f"[^;{ctl}]+"
extension_av = _any_char_except_ctls_or_semicolon
httponly_av = "(?P<httponly>HttpOnly)"
secure_av = "(?P<secure>Secure)"
path_value = _any_char_except_ctls_or_semicolon
path_av = f"Path=(?P<path>{path_value})"
domain_value = subdomain
domain_av = f"Domain=(?P<domain>{domain_value})"
non_zero_digit = "1-9"
max_age_av = f"Max-Age=(?P<max_age>[{non_zero_digit}][{digit}]*)"
sane_cookie_date = rfc1123_date
expires_av = f"Expires=(?P<expires>{sane_cookie_date})"
samesite_value = "(?:Strict|Lax|None)"
samesite_av = f"SameSite=(?P<samesite>{samesite_value})"
cookie_av = (
f"(?:{expires_av}|{max_age_av}|{domain_av}|{path_av}|"
f"{secure_av}|{httponly_av}|{samesite_av}|{extension_av})"
)
set_cookie_string = f"{cookie_pair}(?:; {cookie_av})*"
# Not specified in either RFC
client_cookie_string = f"(?:({cookie_name})=({cookie_value}))(?:; )?"
# Pre-compiled version of the above abnf
separators_re = re.compile(f"[{separators}]+")
control_characters_re = re.compile(f"[{ctl}]+")
cookie_name_re = token_re = re.compile(token)
cookie_value_re = re.compile(cookie_value)
set_cookie_string_re = re.compile(set_cookie_string)
client_cookie_string_re = re.compile(client_cookie_string)
| [
"attr.s",
"re.compile"
] | [((62, 81), 'attr.s', 'attr.s', ([], {'frozen': '(True)'}), '(frozen=True)\n', (68, 81), False, 'import attr\n'), ((2595, 2625), 're.compile', 're.compile', (['f"""[{separators}]+"""'], {}), "(f'[{separators}]+')\n", (2605, 2625), False, 'import re\n'), ((2654, 2677), 're.compile', 're.compile', (['f"""[{ctl}]+"""'], {}), "(f'[{ctl}]+')\n", (2664, 2677), False, 'import re\n'), ((2710, 2727), 're.compile', 're.compile', (['token'], {}), '(token)\n', (2720, 2727), False, 'import re\n'), ((2750, 2774), 're.compile', 're.compile', (['cookie_value'], {}), '(cookie_value)\n', (2760, 2774), False, 'import re\n'), ((2802, 2831), 're.compile', 're.compile', (['set_cookie_string'], {}), '(set_cookie_string)\n', (2812, 2831), False, 'import re\n'), ((2862, 2894), 're.compile', 're.compile', (['client_cookie_string'], {}), '(client_cookie_string)\n', (2872, 2894), False, 'import re\n')] |
# STL imports
import random
import logging
import string
import time
import datetime
import random
import struct
import sys
from functools import wraps
# Third party imports
import numpy as np
import faker
from faker.providers import BaseProvider
logging.getLogger('faker').setLevel(logging.ERROR)
sys.path.append('.')
def gen_vectors(num, dim):
return [[random.random() for _ in range(dim)] for _ in range(num)]
def gen_single_vector(dim):
return [[random.random() for _ in range(dim)]]
def gen_vector(nb, d, seed=np.random.RandomState(1234)):
xb = seed.rand(nb, d).astype("float32")
return xb.tolist()
def gen_unique_str(str=None):
prefix = "".join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
return prefix if str is None else str + "_" + prefix
def get_current_day():
return time.strftime('%Y-%m-%d', time.localtime())
def get_last_day(day):
tmp = datetime.datetime.now() - datetime.timedelta(days=day)
return tmp.strftime('%Y-%m-%d')
def get_next_day(day):
tmp = datetime.datetime.now() + datetime.timedelta(days=day)
return tmp.strftime('%Y-%m-%d')
def gen_long_str(num):
string = ''
for _ in range(num):
char = random.choice('tomorrow')
string += char
class FakerProvider(BaseProvider):
def collection_name(self):
return 'collection_names' + str(random.randint(1000, 9999))
def name(self):
return 'name' + str(random.randint(1000, 9999))
def dim(self):
return random.randint(0, 999)
fake = faker.Faker()
fake.add_provider(FakerProvider)
def collection_name_factory():
return fake.collection_name()
def records_factory(dimension, nq):
return [[random.random() for _ in range(dimension)] for _ in range(nq)]
def binary_records_factory(dim, nq):
# uint8 values range is [0, 256), so we specify the high range is 256.
xnb = np.random.randint(256, size=[nq, (dim // 8)], dtype="uint8")
xb = [bytes(b) for b in xnb]
return xb
def integer_factory(nq):
return [random.randint(0, 128) for _ in range(nq)]
def time_it(func):
@wraps(func)
def inner(*args, **kwrgs):
pref = time.perf_counter()
result = func(*args, **kwrgs)
delt = time.perf_counter() - pref
print(f"[{func.__name__}][{delt:.4}s]")
return result
return inner
| [
"logging.getLogger",
"random.choice",
"time.perf_counter",
"functools.wraps",
"datetime.timedelta",
"faker.Faker",
"datetime.datetime.now",
"numpy.random.randint",
"random.random",
"time.localtime",
"sys.path.append",
"random.randint",
"numpy.random.RandomState"
] | [((301, 321), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (316, 321), False, 'import sys\n'), ((1553, 1566), 'faker.Faker', 'faker.Faker', ([], {}), '()\n', (1564, 1566), False, 'import faker\n'), ((532, 559), 'numpy.random.RandomState', 'np.random.RandomState', (['(1234)'], {}), '(1234)\n', (553, 559), True, 'import numpy as np\n'), ((1905, 1963), 'numpy.random.randint', 'np.random.randint', (['(256)'], {'size': '[nq, dim // 8]', 'dtype': '"""uint8"""'}), "(256, size=[nq, dim // 8], dtype='uint8')\n", (1922, 1963), True, 'import numpy as np\n'), ((2121, 2132), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (2126, 2132), False, 'from functools import wraps\n'), ((249, 275), 'logging.getLogger', 'logging.getLogger', (['"""faker"""'], {}), "('faker')\n", (266, 275), False, 'import logging\n'), ((872, 888), 'time.localtime', 'time.localtime', ([], {}), '()\n', (886, 888), False, 'import time\n'), ((925, 948), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (946, 948), False, 'import datetime\n'), ((951, 979), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': 'day'}), '(days=day)\n', (969, 979), False, 'import datetime\n'), ((1051, 1074), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1072, 1074), False, 'import datetime\n'), ((1077, 1105), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': 'day'}), '(days=day)\n', (1095, 1105), False, 'import datetime\n'), ((1223, 1248), 'random.choice', 'random.choice', (['"""tomorrow"""'], {}), "('tomorrow')\n", (1236, 1248), False, 'import random\n'), ((1521, 1543), 'random.randint', 'random.randint', (['(0)', '(999)'], {}), '(0, 999)\n', (1535, 1543), False, 'import random\n'), ((2052, 2074), 'random.randint', 'random.randint', (['(0)', '(128)'], {}), '(0, 128)\n', (2066, 2074), False, 'import random\n'), ((2179, 2198), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2196, 2198), False, 'import time\n'), ((364, 379), 'random.random', 'random.random', ([], {}), '()\n', (377, 379), False, 'import random\n'), ((465, 480), 'random.random', 'random.random', ([], {}), '()\n', (478, 480), False, 'import random\n'), ((682, 733), 'random.choice', 'random.choice', (['(string.ascii_letters + string.digits)'], {}), '(string.ascii_letters + string.digits)\n', (695, 733), False, 'import random\n'), ((1718, 1733), 'random.random', 'random.random', ([], {}), '()\n', (1731, 1733), False, 'import random\n'), ((2252, 2271), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2269, 2271), False, 'import time\n'), ((1381, 1407), 'random.randint', 'random.randint', (['(1000)', '(9999)'], {}), '(1000, 9999)\n', (1395, 1407), False, 'import random\n'), ((1458, 1484), 'random.randint', 'random.randint', (['(1000)', '(9999)'], {}), '(1000, 9999)\n', (1472, 1484), False, 'import random\n')] |
import logging
from typing import Any
from _pytest._io import TerminalWriter
from _pytest.logging import ColoredLevelFormatter
def test_coloredlogformatter() -> None:
logfmt = "%(filename)-25s %(lineno)4d %(levelname)-8s %(message)s"
record = logging.LogRecord(
name="dummy",
level=logging.INFO,
pathname="dummypath",
lineno=10,
msg="Test Message",
args=(),
exc_info=None,
)
class ColorConfig:
class option:
pass
tw = TerminalWriter()
tw.hasmarkup = True
formatter = ColoredLevelFormatter(tw, logfmt)
output = formatter.format(record)
assert output == (
"dummypath 10 \x1b[32mINFO \x1b[0m Test Message"
)
tw.hasmarkup = False
formatter = ColoredLevelFormatter(tw, logfmt)
output = formatter.format(record)
assert output == ("dummypath 10 INFO Test Message")
def test_multiline_message() -> None:
from _pytest.logging import PercentStyleMultiline
logfmt = "%(filename)-25s %(lineno)4d %(levelname)-8s %(message)s"
record: Any = logging.LogRecord(
name="dummy",
level=logging.INFO,
pathname="dummypath",
lineno=10,
msg="Test Message line1\nline2",
args=(),
exc_info=None,
)
# this is called by logging.Formatter.format
record.message = record.getMessage()
ai_on_style = PercentStyleMultiline(logfmt, True)
output = ai_on_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\n"
" line2"
)
ai_off_style = PercentStyleMultiline(logfmt, False)
output = ai_off_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\nline2"
)
ai_none_style = PercentStyleMultiline(logfmt, None)
output = ai_none_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\nline2"
)
record.auto_indent = False
output = ai_on_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\nline2"
)
record.auto_indent = True
output = ai_off_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\n"
" line2"
)
record.auto_indent = "False"
output = ai_on_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\nline2"
)
record.auto_indent = "True"
output = ai_off_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\n"
" line2"
)
# bad string values default to False
record.auto_indent = "junk"
output = ai_off_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\nline2"
)
# anything other than string or int will default to False
record.auto_indent = dict()
output = ai_off_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\nline2"
)
record.auto_indent = "5"
output = ai_off_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\n line2"
)
record.auto_indent = 5
output = ai_off_style.format(record)
assert output == (
"dummypath 10 INFO Test Message line1\n line2"
)
def test_colored_short_level() -> None:
logfmt = "%(levelname).1s %(message)s"
record = logging.LogRecord(
name="dummy",
level=logging.INFO,
pathname="dummypath",
lineno=10,
msg="Test Message",
args=(),
exc_info=None,
)
class ColorConfig:
class option:
pass
tw = TerminalWriter()
tw.hasmarkup = True
formatter = ColoredLevelFormatter(tw, logfmt)
output = formatter.format(record)
# the I (of INFO) is colored
assert output == ("\x1b[32mI\x1b[0m Test Message")
| [
"_pytest.logging.PercentStyleMultiline",
"_pytest._io.TerminalWriter",
"logging.LogRecord",
"_pytest.logging.ColoredLevelFormatter"
] | [((255, 387), 'logging.LogRecord', 'logging.LogRecord', ([], {'name': '"""dummy"""', 'level': 'logging.INFO', 'pathname': '"""dummypath"""', 'lineno': '(10)', 'msg': '"""Test Message"""', 'args': '()', 'exc_info': 'None'}), "(name='dummy', level=logging.INFO, pathname='dummypath',\n lineno=10, msg='Test Message', args=(), exc_info=None)\n", (272, 387), False, 'import logging\n'), ((520, 536), '_pytest._io.TerminalWriter', 'TerminalWriter', ([], {}), '()\n', (534, 536), False, 'from _pytest._io import TerminalWriter\n'), ((577, 610), '_pytest.logging.ColoredLevelFormatter', 'ColoredLevelFormatter', (['tw', 'logfmt'], {}), '(tw, logfmt)\n', (598, 610), False, 'from _pytest.logging import ColoredLevelFormatter\n'), ((798, 831), '_pytest.logging.ColoredLevelFormatter', 'ColoredLevelFormatter', (['tw', 'logfmt'], {}), '(tw, logfmt)\n', (819, 831), False, 'from _pytest.logging import ColoredLevelFormatter\n'), ((1133, 1281), 'logging.LogRecord', 'logging.LogRecord', ([], {'name': '"""dummy"""', 'level': 'logging.INFO', 'pathname': '"""dummypath"""', 'lineno': '(10)', 'msg': '"""Test Message line1\nline2"""', 'args': '()', 'exc_info': 'None'}), '(name=\'dummy\', level=logging.INFO, pathname=\'dummypath\',\n lineno=10, msg="""Test Message line1\nline2""", args=(), exc_info=None)\n', (1150, 1281), False, 'import logging\n'), ((1447, 1482), '_pytest.logging.PercentStyleMultiline', 'PercentStyleMultiline', (['logfmt', '(True)'], {}), '(logfmt, True)\n', (1468, 1482), False, 'from _pytest.logging import PercentStyleMultiline\n'), ((1699, 1735), '_pytest.logging.PercentStyleMultiline', 'PercentStyleMultiline', (['logfmt', '(False)'], {}), '(logfmt, False)\n', (1720, 1735), False, 'from _pytest.logging import PercentStyleMultiline\n'), ((1903, 1938), '_pytest.logging.PercentStyleMultiline', 'PercentStyleMultiline', (['logfmt', 'None'], {}), '(logfmt, None)\n', (1924, 1938), False, 'from _pytest.logging import PercentStyleMultiline\n'), ((3820, 3952), 'logging.LogRecord', 'logging.LogRecord', ([], {'name': '"""dummy"""', 'level': 'logging.INFO', 'pathname': '"""dummypath"""', 'lineno': '(10)', 'msg': '"""Test Message"""', 'args': '()', 'exc_info': 'None'}), "(name='dummy', level=logging.INFO, pathname='dummypath',\n lineno=10, msg='Test Message', args=(), exc_info=None)\n", (3837, 3952), False, 'import logging\n'), ((4085, 4101), '_pytest._io.TerminalWriter', 'TerminalWriter', ([], {}), '()\n', (4099, 4101), False, 'from _pytest._io import TerminalWriter\n'), ((4142, 4175), '_pytest.logging.ColoredLevelFormatter', 'ColoredLevelFormatter', (['tw', 'logfmt'], {}), '(tw, logfmt)\n', (4163, 4175), False, 'from _pytest.logging import ColoredLevelFormatter\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'tabresmatcher.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_tabresmatcher(object):
def setupUi(self, tabresmatcher):
tabresmatcher.setObjectName("tabresmatcher")
tabresmatcher.resize(697, 570)
self.horizontalLayout = QtWidgets.QHBoxLayout(tabresmatcher)
self.horizontalLayout.setObjectName("horizontalLayout")
self.vL_match_1 = QtWidgets.QVBoxLayout()
self.vL_match_1.setObjectName("vL_match_1")
self.l_match_res = QtWidgets.QLabel(tabresmatcher)
self.l_match_res.setObjectName("l_match_res")
self.vL_match_1.addWidget(self.l_match_res)
self.lV_match_res = QtWidgets.QListView(tabresmatcher)
self.lV_match_res.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
self.lV_match_res.setObjectName("lV_match_res")
self.vL_match_1.addWidget(self.lV_match_res)
self.hL_match_pB1 = QtWidgets.QHBoxLayout()
self.hL_match_pB1.setObjectName("hL_match_pB1")
self.pB_match_choose_res = QtWidgets.QPushButton(tabresmatcher)
self.pB_match_choose_res.setObjectName("pB_match_choose_res")
self.hL_match_pB1.addWidget(self.pB_match_choose_res)
self.l_solve_count = QtWidgets.QLabel(tabresmatcher)
self.l_solve_count.setObjectName("l_solve_count")
self.hL_match_pB1.addWidget(self.l_solve_count)
self.vL_match_1.addLayout(self.hL_match_pB1)
self.horizontalLayout.addLayout(self.vL_match_1)
self.line = QtWidgets.QFrame(tabresmatcher)
self.line.setFrameShape(QtWidgets.QFrame.VLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.horizontalLayout.addWidget(self.line)
self.vL_match_2 = QtWidgets.QVBoxLayout()
self.vL_match_2.setContentsMargins(-1, 25, -1, -1)
self.vL_match_2.setObjectName("vL_match_2")
self.hL_match_pdb = QtWidgets.QHBoxLayout()
self.hL_match_pdb.setObjectName("hL_match_pdb")
self.pB_pdb = QtWidgets.QPushButton(tabresmatcher)
self.pB_pdb.setObjectName("pB_pdb")
self.hL_match_pdb.addWidget(self.pB_pdb)
self.l_pdb = QtWidgets.QLabel(tabresmatcher)
self.l_pdb.setObjectName("l_pdb")
self.hL_match_pdb.addWidget(self.l_pdb)
self.vL_match_2.addLayout(self.hL_match_pdb)
self.fL_match_1 = QtWidgets.QFormLayout()
self.fL_match_1.setObjectName("fL_match_1")
self.l_old_algorithm = QtWidgets.QLabel(tabresmatcher)
self.l_old_algorithm.setObjectName("l_old_algorithm")
self.fL_match_1.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.l_old_algorithm)
self.l_loss_atom = QtWidgets.QLabel(tabresmatcher)
self.l_loss_atom.setObjectName("l_loss_atom")
self.fL_match_1.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.l_loss_atom)
self.sB_loss_atom = QtWidgets.QSpinBox(tabresmatcher)
self.sB_loss_atom.setObjectName("sB_loss_atom")
self.fL_match_1.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.sB_loss_atom)
self.l_threshold = QtWidgets.QLabel(tabresmatcher)
self.l_threshold.setObjectName("l_threshold")
self.fL_match_1.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.l_threshold)
self.cB_threshold = QtWidgets.QComboBox(tabresmatcher)
self.cB_threshold.setObjectName("cB_threshold")
self.cB_threshold.addItem("")
self.cB_threshold.addItem("")
self.cB_threshold.addItem("")
self.cB_threshold.addItem("")
self.cB_threshold.addItem("")
self.cB_threshold.addItem("")
self.cB_threshold.addItem("")
self.fL_match_1.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.cB_threshold)
self.l_dBS_threshold = QtWidgets.QLabel(tabresmatcher)
self.l_dBS_threshold.setObjectName("l_dBS_threshold")
self.fL_match_1.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.l_dBS_threshold)
self.dSB_threshold = QtWidgets.QDoubleSpinBox(tabresmatcher)
self.dSB_threshold.setObjectName("dSB_threshold")
self.fL_match_1.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.dSB_threshold)
self.rB_old_algorithm = QtWidgets.QRadioButton(tabresmatcher)
self.rB_old_algorithm.setText("")
self.rB_old_algorithm.setObjectName("rB_old_algorithm")
self.fL_match_1.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.rB_old_algorithm)
self.vL_match_2.addLayout(self.fL_match_1)
self.hL_match_thickness = QtWidgets.QHBoxLayout()
self.hL_match_thickness.setObjectName("hL_match_thickness")
self.l_match_thick = QtWidgets.QLabel(tabresmatcher)
self.l_match_thick.setObjectName("l_match_thick")
self.hL_match_thickness.addWidget(self.l_match_thick)
self.cB_thick_x = QtWidgets.QCheckBox(tabresmatcher)
self.cB_thick_x.setObjectName("cB_thick_x")
self.hL_match_thickness.addWidget(self.cB_thick_x)
self.cB_thick_y = QtWidgets.QCheckBox(tabresmatcher)
self.cB_thick_y.setObjectName("cB_thick_y")
self.hL_match_thickness.addWidget(self.cB_thick_y)
self.cB_thick_z = QtWidgets.QCheckBox(tabresmatcher)
self.cB_thick_z.setObjectName("cB_thick_z")
self.hL_match_thickness.addWidget(self.cB_thick_z)
self.vL_match_2.addLayout(self.hL_match_thickness)
self.gL_match_output = QtWidgets.QGridLayout()
self.gL_match_output.setObjectName("gL_match_output")
self.cB_Ra = QtWidgets.QCheckBox(tabresmatcher)
self.cB_Ra.setChecked(True)
self.cB_Ra.setObjectName("cB_Ra")
self.gL_match_output.addWidget(self.cB_Ra, 4, 1, 1, 1)
self.cB_Rb = QtWidgets.QCheckBox(tabresmatcher)
self.cB_Rb.setChecked(True)
self.cB_Rb.setObjectName("cB_Rb")
self.gL_match_output.addWidget(self.cB_Rb, 4, 2, 1, 1)
self.l_match_output = QtWidgets.QLabel(tabresmatcher)
self.l_match_output.setObjectName("l_match_output")
self.gL_match_output.addWidget(self.l_match_output, 1, 0, 1, 1)
self.cB_Nm = QtWidgets.QCheckBox(tabresmatcher)
self.cB_Nm.setChecked(True)
self.cB_Nm.setObjectName("cB_Nm")
self.gL_match_output.addWidget(self.cB_Nm, 1, 2, 1, 1)
self.cB_Tm = QtWidgets.QCheckBox(tabresmatcher)
self.cB_Tm.setChecked(True)
self.cB_Tm.setObjectName("cB_Tm")
self.gL_match_output.addWidget(self.cB_Tm, 1, 1, 1, 1)
self.cB_R1 = QtWidgets.QCheckBox(tabresmatcher)
self.cB_R1.setChecked(True)
self.cB_R1.setObjectName("cB_R1")
self.gL_match_output.addWidget(self.cB_R1, 5, 1, 1, 1)
self.cB_Alpha = QtWidgets.QCheckBox(tabresmatcher)
self.cB_Alpha.setChecked(True)
self.cB_Alpha.setObjectName("cB_Alpha")
self.gL_match_output.addWidget(self.cB_Alpha, 5, 3, 1, 1)
self.cB_Rweak = QtWidgets.QCheckBox(tabresmatcher)
self.cB_Rweak.setChecked(True)
self.cB_Rweak.setObjectName("cB_Rweak")
self.gL_match_output.addWidget(self.cB_Rweak, 5, 2, 1, 1)
self.cB_Rw = QtWidgets.QCheckBox(tabresmatcher)
self.cB_Rw.setChecked(True)
self.cB_Rw.setObjectName("cB_Rw")
self.gL_match_output.addWidget(self.cB_Rw, 1, 3, 1, 1)
self.cB_Rc = QtWidgets.QCheckBox(tabresmatcher)
self.cB_Rc.setChecked(True)
self.cB_Rc.setObjectName("cB_Rc")
self.gL_match_output.addWidget(self.cB_Rc, 4, 3, 1, 1)
self.vL_match_2.addLayout(self.gL_match_output)
self.hL_match_sort = QtWidgets.QHBoxLayout()
self.hL_match_sort.setObjectName("hL_match_sort")
self.l_match_sort = QtWidgets.QLabel(tabresmatcher)
self.l_match_sort.setObjectName("l_match_sort")
self.hL_match_sort.addWidget(self.l_match_sort)
self.lE_match_sort = QtWidgets.QLineEdit(tabresmatcher)
self.lE_match_sort.setInputMask("")
self.lE_match_sort.setMaxLength(32767)
self.lE_match_sort.setObjectName("lE_match_sort")
self.hL_match_sort.addWidget(self.lE_match_sort)
self.vL_match_2.addLayout(self.hL_match_sort)
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.vL_match_2.addItem(spacerItem)
self.hL_match_start = QtWidgets.QHBoxLayout()
self.hL_match_start.setObjectName("hL_match_start")
self.pB_match_start = QtWidgets.QPushButton(tabresmatcher)
self.pB_match_start.setObjectName("pB_match_start")
self.hL_match_start.addWidget(self.pB_match_start)
self.l_match_start = QtWidgets.QLabel(tabresmatcher)
self.l_match_start.setObjectName("l_match_start")
self.hL_match_start.addWidget(self.l_match_start)
self.vL_match_2.addLayout(self.hL_match_start)
self.bar_match = QtWidgets.QProgressBar(tabresmatcher)
self.bar_match.setProperty("value", 0)
self.bar_match.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.bar_match.setObjectName("bar_match")
self.vL_match_2.addWidget(self.bar_match)
self.horizontalLayout.addLayout(self.vL_match_2)
self.retranslateUi(tabresmatcher)
self.cB_threshold.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(tabresmatcher)
def retranslateUi(self, tabresmatcher):
_translate = QtCore.QCoreApplication.translate
tabresmatcher.setWindowTitle(_translate("tabresmatcher", "Form"))
self.l_match_res.setText(_translate("tabresmatcher", "RES文件"))
self.pB_match_choose_res.setText(_translate("tabresmatcher", "选择RES文件"))
self.l_solve_count.setText(_translate("tabresmatcher", "已选0个"))
self.pB_pdb.setText(_translate("tabresmatcher", "选择待搜索结构(pdb)"))
self.l_pdb.setText(_translate("tabresmatcher", "未选择"))
self.l_old_algorithm.setText(_translate("tabresmatcher", "使用旧算法"))
self.l_loss_atom.setText(_translate("tabresmatcher", "可损失原子数"))
self.l_threshold.setText(_translate("tabresmatcher", "汇报阈值基于"))
self.cB_threshold.setCurrentText(_translate("tabresmatcher", "无"))
self.cB_threshold.setItemText(0, _translate("tabresmatcher", "无"))
self.cB_threshold.setItemText(1, _translate("tabresmatcher", "Tm(匹配上次数)"))
self.cB_threshold.setItemText(2, _translate("tabresmatcher", "Nm(匹配上原子数)"))
self.cB_threshold.setItemText(3, _translate("tabresmatcher", "Rwm(质量加权匹配比例)"))
self.cB_threshold.setItemText(4, _translate("tabresmatcher", "Rwe2(电子加权匹配比例)"))
self.cB_threshold.setItemText(5, _translate("tabresmatcher", "Ram(元素匹配相似度)"))
self.cB_threshold.setItemText(6, _translate("tabresmatcher", "Rc(坐标匹配相似度)"))
self.l_dBS_threshold.setText(_translate("tabresmatcher", "汇报阈值"))
self.l_match_thick.setText(_translate("tabresmatcher", "晶胞加层"))
self.cB_thick_x.setText(_translate("tabresmatcher", "x"))
self.cB_thick_y.setText(_translate("tabresmatcher", "y"))
self.cB_thick_z.setText(_translate("tabresmatcher", "z"))
self.cB_Ra.setText(_translate("tabresmatcher", "Ra"))
self.cB_Rb.setText(_translate("tabresmatcher", "Rb"))
self.l_match_output.setText(_translate("tabresmatcher", "输出指标"))
self.cB_Nm.setText(_translate("tabresmatcher", "Nm"))
self.cB_Tm.setText(_translate("tabresmatcher", "Tm"))
self.cB_R1.setText(_translate("tabresmatcher", "R1"))
self.cB_Alpha.setText(_translate("tabresmatcher", "Alpha"))
self.cB_Rweak.setText(_translate("tabresmatcher", "Rweak"))
self.cB_Rw.setText(_translate("tabresmatcher", "Rw"))
self.cB_Rc.setText(_translate("tabresmatcher", "Rc"))
self.l_match_sort.setText(_translate("tabresmatcher", "排序规则"))
self.lE_match_sort.setText(_translate("tabresmatcher", "-Tm,-Nm"))
self.pB_match_start.setText(_translate("tabresmatcher", "开始匹配"))
self.l_match_start.setText(_translate("tabresmatcher", "未开始匹配"))
| [
"PyQt5.QtWidgets.QListView",
"PyQt5.QtWidgets.QLineEdit",
"PyQt5.QtWidgets.QSpinBox",
"PyQt5.QtWidgets.QComboBox",
"PyQt5.QtWidgets.QDoubleSpinBox",
"PyQt5.QtWidgets.QSpacerItem",
"PyQt5.QtWidgets.QRadioButton",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QFrame",
"PyQt5.QtWidg... | [((535, 571), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['tabresmatcher'], {}), '(tabresmatcher)\n', (556, 571), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((662, 685), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (683, 685), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((765, 796), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (781, 796), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((931, 965), 'PyQt5.QtWidgets.QListView', 'QtWidgets.QListView', (['tabresmatcher'], {}), '(tabresmatcher)\n', (950, 965), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1193, 1216), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (1214, 1216), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1308, 1344), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['tabresmatcher'], {}), '(tabresmatcher)\n', (1329, 1344), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1506, 1537), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (1522, 1537), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1782, 1813), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['tabresmatcher'], {}), '(tabresmatcher)\n', (1798, 1813), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2045, 2068), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (2066, 2068), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2208, 2231), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (2229, 2231), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2310, 2346), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['tabresmatcher'], {}), '(tabresmatcher)\n', (2331, 2346), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2461, 2492), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (2477, 2492), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2662, 2685), 'PyQt5.QtWidgets.QFormLayout', 'QtWidgets.QFormLayout', ([], {}), '()\n', (2683, 2685), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2769, 2800), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (2785, 2800), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2982, 3013), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (2998, 3013), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3184, 3217), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (3202, 3217), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3390, 3421), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (3406, 3421), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3592, 3626), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (3611, 3626), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4069, 4100), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (4085, 4100), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4284, 4323), 'PyQt5.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (4308, 4323), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4504, 4541), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['tabresmatcher'], {}), '(tabresmatcher)\n', (4526, 4541), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4826, 4849), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (4847, 4849), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4947, 4978), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (4963, 4978), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5125, 5159), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (5144, 5159), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5297, 5331), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (5316, 5331), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5469, 5503), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (5488, 5503), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5705, 5728), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (5726, 5728), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5812, 5846), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (5831, 5846), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6009, 6043), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (6028, 6043), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6215, 6246), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (6231, 6246), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6400, 6434), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (6419, 6434), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6597, 6631), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (6616, 6631), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6794, 6828), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (6813, 6828), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6994, 7028), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (7013, 7028), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7206, 7240), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (7225, 7240), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7415, 7449), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (7434, 7449), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7612, 7646), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['tabresmatcher'], {}), '(tabresmatcher)\n', (7631, 7646), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7873, 7896), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (7894, 7896), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7983, 8014), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (7999, 8014), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8156, 8190), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['tabresmatcher'], {}), '(tabresmatcher)\n', (8175, 8190), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8472, 8570), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(20)', '(40)', 'QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Expanding'], {}), '(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.\n QSizePolicy.Expanding)\n', (8493, 8570), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8640, 8663), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (8661, 8663), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8754, 8790), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['tabresmatcher'], {}), '(tabresmatcher)\n', (8775, 8790), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8939, 8970), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['tabresmatcher'], {}), '(tabresmatcher)\n', (8955, 8970), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9167, 9204), 'PyQt5.QtWidgets.QProgressBar', 'QtWidgets.QProgressBar', (['tabresmatcher'], {}), '(tabresmatcher)\n', (9189, 9204), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9608, 9660), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['tabresmatcher'], {}), '(tabresmatcher)\n', (9645, 9660), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
# -*- coding: utf-8 -*-
"""Export the Bioregistry."""
import click
@click.command()
@click.pass_context
def export(ctx: click.Context):
"""Export the Bioregistry."""
from .prefix_maps import generate_contexts
from .rdf_export import export_rdf
from .sssom_export import export_sssom
from .tables_export import export_tables
from .tsv_export import export_tsv
from .warnings_export import export_warnings
from .yaml_export import export_yaml
ctx.invoke(export_warnings)
ctx.invoke(export_rdf)
ctx.invoke(export_tsv)
ctx.invoke(export_yaml)
ctx.invoke(export_sssom)
ctx.invoke(export_tables)
ctx.invoke(generate_contexts)
if __name__ == "__main__":
export()
| [
"click.command"
] | [((72, 87), 'click.command', 'click.command', ([], {}), '()\n', (85, 87), False, 'import click\n')] |
# Copyright (c) 2020-2021, <NAME>
# License: MIT License
from typing import (
TYPE_CHECKING,
List,
Iterable,
Tuple,
Optional,
Dict,
Sequence,
)
import math
import itertools
from ezdxf.math import (
Vec3,
Z_AXIS,
OCS,
Matrix44,
BoundingBox,
ConstructionEllipse,
cubic_bezier_from_ellipse,
Bezier4P,
Bezier3P,
BSpline,
reverse_bezier_curves,
bulge_to_arc,
)
from ezdxf.query import EntityQuery
from .path import Path
from .commands import Command
from . import converter
if TYPE_CHECKING:
from ezdxf.eztypes import Vertex, Layout, EntityQuery
__all__ = [
"bbox",
"fit_paths_into_box",
"transform_paths",
"transform_paths_to_ocs",
"render_lwpolylines",
"render_polylines2d",
"render_polylines3d",
"render_lines",
"render_hatches",
"render_mpolygons",
"render_splines_and_polylines",
"add_bezier4p",
"add_bezier3p",
"add_ellipse",
"add_2d_polyline",
"add_spline",
"to_multi_path",
"single_paths",
]
MAX_DISTANCE = 0.01
MIN_SEGMENTS = 4
G1_TOL = 1e-4
IS_CLOSE_TOL = 1e-10
def to_multi_path(paths: Iterable[Path]) -> Path:
"""Returns a multi-path object from all given paths and their sub-paths.
.. versionadded:: 0.17
"""
multi_path = Path()
for p in paths:
multi_path.extend_multi_path(p)
return multi_path
def single_paths(paths: Iterable[Path]) -> Iterable[Path]:
"""Yields all given paths and their sub-paths as single path objects.
.. versionadded:: 0.17
"""
for p in paths:
if p.has_sub_paths:
yield from p.sub_paths()
else:
yield p
def transform_paths(paths: Iterable[Path], m: Matrix44) -> List[Path]:
"""Transform multiple :class:`Path` objects at once by transformation
matrix `m`. Returns a list of the transformed :class:`Path` objects.
Args:
paths: iterable of :class:`Path` objects
m: transformation matrix of type :class:`~ezdxf.math.Matrix44`
"""
def decompose(path: Path):
vertices.append(path.start)
commands.append(Command.START_PATH)
for cmd in path:
commands.extend(itertools.repeat(cmd.type, len(cmd)))
vertices.extend(cmd)
def rebuild(vertices):
# localize variables:
start_path, line_to, curve3_to, curve4_to, move_to = Command
path = None
collect = []
for vertex, cmd in zip(vertices, commands):
if cmd == start_path:
if path is not None:
transformed_paths.append(path)
path = Path(vertex)
elif cmd == line_to:
path.line_to(vertex)
elif cmd == curve3_to:
collect.append(vertex)
if len(collect) == 2:
path.curve3_to(collect[0], collect[1])
collect.clear()
elif cmd == curve4_to:
collect.append(vertex)
if len(collect) == 3:
path.curve4_to(collect[0], collect[1], collect[2])
collect.clear()
elif cmd == move_to:
path.move_to(vertex)
if path is not None:
transformed_paths.append(path)
vertices = []
commands = []
transformed_paths = []
for path in paths:
decompose(path)
if len(commands):
rebuild(m.transform_vertices(vertices))
return transformed_paths
def transform_paths_to_ocs(paths: Iterable[Path], ocs: OCS) -> List[Path]:
"""Transform multiple :class:`Path` objects at once from WCS to OCS.
Returns a list of the transformed :class:`Path` objects.
Args:
paths: iterable of :class:`Path` objects
ocs: OCS transformation of type :class:`~ezdxf.math.OCS`
"""
t = ocs.matrix.copy()
t.transpose()
return transform_paths(paths, t)
def bbox(
paths: Iterable[Path], flatten=0.01, segments: int = 16
) -> BoundingBox:
"""Returns the :class:`~ezdxf.math.BoundingBox` for the given paths.
Args:
paths: iterable of :class:`~ezdxf.path.Path` objects
flatten: value != 0 for bounding box calculation from the flattened
path and value == 0 for bounding box from the control vertices.
Default value is 0.01 as max flattening distance.
segments: minimal segment count for flattening
"""
box = BoundingBox()
for p in paths:
if flatten:
box.extend(p.flattening(distance=abs(flatten), segments=segments))
else:
box.extend(p.control_vertices())
return box
def fit_paths_into_box(
paths: Iterable[Path],
size: Tuple[float, float, float],
uniform: bool = True,
source_box: BoundingBox = None,
) -> List[Path]:
"""Scale the given `paths` to fit into a box of the given `size`,
so that all path vertices are inside this borders.
If `source_box` is ``None`` the default source bounding box is calculated
from the control points of the `paths`.
`Note:` if the target size has a z-size of 0, the `paths` are
projected into the xy-plane, same is true for the x-size, projects into
the yz-plane and the y-size, projects into and xz-plane.
Args:
paths: iterable of :class:`~ezdxf.path.Path` objects
size: target box size as tuple of x-, y- ond z-size values
uniform: ``True`` for uniform scaling
source_box: pass precalculated source bounding box, or ``None`` to
calculate the default source bounding box from the control vertices
"""
paths = list(paths)
if len(paths) == 0:
return paths
if source_box is None:
current_box = bbox(paths, flatten=0)
else:
current_box = source_box
if not current_box.has_data or current_box.size == (0, 0, 0):
return paths
target_size = Vec3(size)
if target_size == (0, 0, 0) or min(target_size) < 0:
raise ValueError("invalid target size")
if uniform:
sx, sy, sz = _get_uniform_scaling(current_box.size, target_size)
else:
sx, sy, sz = _get_non_uniform_scaling(current_box.size, target_size)
m = Matrix44.scale(sx, sy, sz)
return transform_paths(paths, m)
def _get_uniform_scaling(current_size: Vec3, target_size: Vec3):
TOL = 1e-6
scale_x = math.inf
if current_size.x > TOL and target_size.x > TOL:
scale_x = target_size.x / current_size.x
scale_y = math.inf
if current_size.y > TOL and target_size.y > TOL:
scale_y = target_size.y / current_size.y
scale_z = math.inf
if current_size.z > TOL and target_size.z > TOL:
scale_z = target_size.z / current_size.z
uniform_scale = min(scale_x, scale_y, scale_z)
if uniform_scale is math.inf:
raise ArithmeticError("internal error")
scale_x = uniform_scale if target_size.x > TOL else 0
scale_y = uniform_scale if target_size.y > TOL else 0
scale_z = uniform_scale if target_size.z > TOL else 0
return scale_x, scale_y, scale_z
def _get_non_uniform_scaling(current_size: Vec3, target_size: Vec3):
TOL = 1e-6
scale_x = 1.0
if current_size.x > TOL:
scale_x = target_size.x / current_size.x
scale_y = 1.0
if current_size.y > TOL:
scale_y = target_size.y / current_size.y
scale_z = 1.0
if current_size.z > TOL:
scale_z = target_size.z / current_size.z
return scale_x, scale_y, scale_z
# Path to entity converter and render utilities:
def render_lwpolylines(
layout: "Layout",
paths: Iterable[Path],
*,
distance: float = MAX_DISTANCE,
segments: int = MIN_SEGMENTS,
extrusion: "Vertex" = Z_AXIS,
dxfattribs: Optional[Dict] = None
) -> EntityQuery:
"""Render the given `paths` into `layout` as
:class:`~ezdxf.entities.LWPolyline` entities.
The `extrusion` vector is applied to all paths, all vertices are projected
onto the plane normal to this extrusion vector. The default extrusion vector
is the WCS z-axis. The plane elevation is the distance from the WCS origin
to the start point of the first path.
Args:
layout: the modelspace, a paperspace layout or a block definition
paths: iterable of :class:`Path` objects
distance: maximum distance, see :meth:`Path.flattening`
segments: minimum segment count per Bézier curve
extrusion: extrusion vector for all paths
dxfattribs: additional DXF attribs
Returns:
created entities in an :class:`~ezdxf.query.EntityQuery` object
.. versionadded:: 0.16
"""
lwpolylines = list(
converter.to_lwpolylines(
paths,
distance=distance,
segments=segments,
extrusion=extrusion,
dxfattribs=dxfattribs,
)
)
for lwpolyline in lwpolylines:
layout.add_entity(lwpolyline)
return EntityQuery(lwpolylines)
def render_polylines2d(
layout: "Layout",
paths: Iterable[Path],
*,
distance: float = 0.01,
segments: int = 4,
extrusion: "Vertex" = Z_AXIS,
dxfattribs: Optional[Dict] = None
) -> EntityQuery:
"""Render the given `paths` into `layout` as 2D
:class:`~ezdxf.entities.Polyline` entities.
The `extrusion` vector is applied to all paths, all vertices are projected
onto the plane normal to this extrusion vector.The default extrusion vector
is the WCS z-axis. The plane elevation is the distance from the WCS origin
to the start point of the first path.
Args:
layout: the modelspace, a paperspace layout or a block definition
paths: iterable of :class:`Path` objects
distance: maximum distance, see :meth:`Path.flattening`
segments: minimum segment count per Bézier curve
extrusion: extrusion vector for all paths
dxfattribs: additional DXF attribs
Returns:
created entities in an :class:`~ezdxf.query.EntityQuery` object
.. versionadded:: 0.16
"""
polylines2d = list(
converter.to_polylines2d(
paths,
distance=distance,
segments=segments,
extrusion=extrusion,
dxfattribs=dxfattribs,
)
)
for polyline2d in polylines2d:
layout.add_entity(polyline2d)
return EntityQuery(polylines2d)
def render_hatches(
layout: "Layout",
paths: Iterable[Path],
*,
edge_path: bool = True,
distance: float = MAX_DISTANCE,
segments: int = MIN_SEGMENTS,
g1_tol: float = G1_TOL,
extrusion: "Vertex" = Z_AXIS,
dxfattribs: Optional[Dict] = None
) -> EntityQuery:
"""Render the given `paths` into `layout` as
:class:`~ezdxf.entities.Hatch` entities.
The `extrusion` vector is applied to all paths, all vertices are projected
onto the plane normal to this extrusion vector. The default extrusion vector
is the WCS z-axis. The plane elevation is the distance from the WCS origin
to the start point of the first path.
Args:
layout: the modelspace, a paperspace layout or a block definition
paths: iterable of :class:`Path` objects
edge_path: ``True`` for edge paths build of LINE and SPLINE edges,
``False`` for only LWPOLYLINE paths as boundary paths
distance: maximum distance, see :meth:`Path.flattening`
segments: minimum segment count per Bézier curve to flatten polyline paths
g1_tol: tolerance for G1 continuity check to separate SPLINE edges
extrusion: extrusion vector for all paths
dxfattribs: additional DXF attribs
Returns:
created entities in an :class:`~ezdxf.query.EntityQuery` object
.. versionadded:: 0.16
"""
hatches = list(
converter.to_hatches(
paths,
edge_path=edge_path,
distance=distance,
segments=segments,
g1_tol=g1_tol,
extrusion=extrusion,
dxfattribs=dxfattribs,
)
)
for hatch in hatches:
layout.add_entity(hatch)
return EntityQuery(hatches)
def render_mpolygons(
layout: "Layout",
paths: Iterable[Path],
*,
distance: float = MAX_DISTANCE,
segments: int = MIN_SEGMENTS,
extrusion: "Vertex" = Z_AXIS,
dxfattribs: Optional[Dict] = None
) -> EntityQuery:
"""Render the given `paths` into `layout` as
:class:`~ezdxf.entities.MPolygon` entities. The MPOLYGON entity supports
only polyline boundary paths. All curves will be approximated.
The `extrusion` vector is applied to all paths, all vertices are projected
onto the plane normal to this extrusion vector. The default extrusion vector
is the WCS z-axis. The plane elevation is the distance from the WCS origin
to the start point of the first path.
Args:
layout: the modelspace, a paperspace layout or a block definition
paths: iterable of :class:`Path` objects
distance: maximum distance, see :meth:`Path.flattening`
segments: minimum segment count per Bézier curve to flatten polyline paths
extrusion: extrusion vector for all paths
dxfattribs: additional DXF attribs
Returns:
created entities in an :class:`~ezdxf.query.EntityQuery` object
.. versionadded:: 0.17
"""
polygons = list(
converter.to_mpolygons(
paths,
distance=distance,
segments=segments,
extrusion=extrusion,
dxfattribs=dxfattribs,
)
)
for polygon in polygons:
layout.add_entity(polygon)
return EntityQuery(polygons)
def render_polylines3d(
layout: "Layout",
paths: Iterable[Path],
*,
distance: float = MAX_DISTANCE,
segments: int = MIN_SEGMENTS,
dxfattribs: Optional[Dict] = None
) -> EntityQuery:
"""Render the given `paths` into `layout` as 3D
:class:`~ezdxf.entities.Polyline` entities.
Args:
layout: the modelspace, a paperspace layout or a block definition
paths: iterable of :class:`Path` objects
distance: maximum distance, see :meth:`Path.flattening`
segments: minimum segment count per Bézier curve
dxfattribs: additional DXF attribs
Returns:
created entities in an :class:`~ezdxf.query.EntityQuery` object
.. versionadded:: 0.16
"""
polylines3d = list(
converter.to_polylines3d(
paths,
distance=distance,
segments=segments,
dxfattribs=dxfattribs,
)
)
for polyline3d in polylines3d:
layout.add_entity(polyline3d)
return EntityQuery(polylines3d)
def render_lines(
layout: "Layout",
paths: Iterable[Path],
*,
distance: float = MAX_DISTANCE,
segments: int = MIN_SEGMENTS,
dxfattribs: Optional[Dict] = None
) -> EntityQuery:
"""Render the given `paths` into `layout` as
:class:`~ezdxf.entities.Line` entities.
Args:
layout: the modelspace, a paperspace layout or a block definition
paths: iterable of :class:`Path` objects
distance: maximum distance, see :meth:`Path.flattening`
segments: minimum segment count per Bézier curve
dxfattribs: additional DXF attribs
Returns:
created entities in an :class:`~ezdxf.query.EntityQuery` object
.. versionadded:: 0.16
"""
lines = list(
converter.to_lines(
paths,
distance=distance,
segments=segments,
dxfattribs=dxfattribs,
)
)
for line in lines:
layout.add_entity(line)
return EntityQuery(lines)
def render_splines_and_polylines(
layout: "Layout",
paths: Iterable[Path],
*,
g1_tol: float = G1_TOL,
dxfattribs: Optional[Dict] = None
) -> EntityQuery:
"""Render the given `paths` into `layout` as :class:`~ezdxf.entities.Spline`
and 3D :class:`~ezdxf.entities.Polyline` entities.
Args:
layout: the modelspace, a paperspace layout or a block definition
paths: iterable of :class:`Path` objects
g1_tol: tolerance for G1 continuity check
dxfattribs: additional DXF attribs
Returns:
created entities in an :class:`~ezdxf.query.EntityQuery` object
.. versionadded:: 0.16
"""
entities = list(
converter.to_splines_and_polylines(
paths,
g1_tol=g1_tol,
dxfattribs=dxfattribs,
)
)
for entity in entities:
layout.add_entity(entity)
return EntityQuery(entities)
def add_ellipse(
path: Path, ellipse: ConstructionEllipse, segments=1, reset=True
) -> None:
"""Add an elliptical arc as multiple cubic Bèzier-curves to the given
`path`, use :meth:`~ezdxf.math.ConstructionEllipse.from_arc` constructor
of class :class:`~ezdxf.math.ConstructionEllipse` to add circular arcs.
Auto-detect the connection point to the given `path`, if neither the start-
nor the end point of the ellipse is close to the path end point, a line from
the path end point to the ellipse start point will be added automatically
(see :func:`add_bezier4p`).
By default the start of an **empty** path is set to the start point of
the ellipse, setting argument `reset` to ``False`` prevents this
behavior.
Args:
path: :class:`~ezdxf.path.Path` object
ellipse: ellipse parameters as :class:`~ezdxf.math.ConstructionEllipse`
object
segments: count of Bèzier-curve segments, at least one segment for
each quarter (pi/2), ``1`` for as few as possible.
reset: set start point to start of ellipse if path is empty
"""
if abs(ellipse.param_span) < 1e-9:
return
if len(path) == 0 and reset:
path.start = ellipse.start_point
add_bezier4p(path, cubic_bezier_from_ellipse(ellipse, segments))
def add_bezier4p(path: Path, curves: Iterable[Bezier4P]) -> None:
"""Add multiple cubic Bèzier-curves to the given `path`.
Auto-detect the connection point to the given `path`, if neither the start-
nor the end point of the curves is close to the path end point, a line from
the path end point to the start point of the first curve will be added
automatically.
.. versionchanged:: 0.16.2
add linear Bézier curve segments as LINE_TO commands
"""
curves = list(curves)
if not len(curves):
return
end = curves[-1].control_points[-1]
if path.end.isclose(end):
# connect to new curves end point
curves = reverse_bezier_curves(curves)
for curve in curves:
start, ctrl1, ctrl2, end = curve.control_points
if not start.isclose(path.end):
path.line_to(start)
# add linear bezier segments as LINE_TO commands
if start.isclose(ctrl1) and end.isclose(ctrl2):
path.line_to(end)
else:
path.curve4_to(end, ctrl1, ctrl2)
def add_bezier3p(path: Path, curves: Iterable[Bezier3P]) -> None:
"""Add multiple quadratic Bèzier-curves to the given `path`.
Auto-detect the connection point to the given `path`, if neither the start-
nor the end point of the curves is close to the path end point, a line from
the path end point to the start point of the first curve will be added
automatically.
.. versionchanged:: 0.16.2
add linear Bézier curve segments as LINE_TO commands
"""
curves = list(curves)
if not len(curves):
return
end = curves[-1].control_points[-1]
if path.end.isclose(end):
# connect to new curves end point
curves = reverse_bezier_curves(curves)
for curve in curves:
start, ctrl, end = curve.control_points
if not start.isclose(path.end, abs_tol=0): # only rel_tol=1e-9
path.line_to(start)
# add linear bezier segments as LINE_TO commands, use only rel_tol=1e-9
if start.isclose(ctrl, abs_tol=0) or end.isclose(ctrl, abs_tol=0):
path.line_to(end)
else:
path.curve3_to(end, ctrl)
def add_2d_polyline(
path: Path,
points: Iterable[Sequence[float]],
close: bool,
ocs: OCS,
elevation: float,
) -> None:
"""Internal API to add 2D polylines which may include bulges to an
**empty** path.
"""
def bulge_to(p1: Vec3, p2: Vec3, bulge: float):
if p1.isclose(p2, rel_tol=IS_CLOSE_TOL, abs_tol=0):
return
center, start_angle, end_angle, radius = bulge_to_arc(p1, p2, bulge)
ellipse = ConstructionEllipse.from_arc(
center,
radius,
Z_AXIS,
math.degrees(start_angle),
math.degrees(end_angle),
)
curves = list(cubic_bezier_from_ellipse(ellipse))
curve0 = curves[0]
cp0 = curve0.control_points[0]
if cp0.isclose(p2, rel_tol=IS_CLOSE_TOL, abs_tol=0):
curves = reverse_bezier_curves(curves)
add_bezier4p(path, curves)
if len(path):
raise ValueError("Requires an empty path.")
prev_point = None
prev_bulge = 0
for x, y, bulge in points:
# Bulge values near 0 but != 0 cause crashes! #329
if abs(bulge) < 1e-6:
bulge = 0
point = Vec3(x, y)
if prev_point is None:
path.start = point
prev_point = point
prev_bulge = bulge
continue
if prev_bulge:
bulge_to(prev_point, point, prev_bulge)
else:
path.line_to(point)
prev_point = point
prev_bulge = bulge
if close and not path.start.isclose(
path.end, rel_tol=IS_CLOSE_TOL, abs_tol=0
):
if prev_bulge:
bulge_to(path.end, path.start, prev_bulge)
else:
path.line_to(path.start)
if ocs.transform or elevation:
path.to_wcs(ocs, elevation)
def add_spline(path: Path, spline: BSpline, level=4, reset=True) -> None:
"""Add a B-spline as multiple cubic Bèzier-curves.
Non-rational B-splines of 3rd degree gets a perfect conversion to
cubic bezier curves with a minimal count of curve segments, all other
B-spline require much more curve segments for approximation.
Auto-detect the connection point to the given `path`, if neither the start-
nor the end point of the B-spline is close to the path end point, a line
from the path end point to the start point of the B-spline will be added
automatically. (see :meth:`add_bezier4p`).
By default the start of an **empty** path is set to the start point of
the spline, setting argument `reset` to ``False`` prevents this
behavior.
Args:
path: :class:`~ezdxf.path.Path` object
spline: B-spline parameters as :class:`~ezdxf.math.BSpline` object
level: subdivision level of approximation segments
reset: set start point to start of spline if path is empty
"""
if len(path) == 0 and reset:
path.start = spline.point(0)
if spline.degree == 3 and not spline.is_rational and spline.is_clamped:
curves = [Bezier4P(points) for points in spline.bezier_decomposition()]
else:
curves = spline.cubic_bezier_approximation(level=level)
add_bezier4p(path, curves)
| [
"ezdxf.eztypes.EntityQuery",
"ezdxf.math.bulge_to_arc",
"math.degrees",
"ezdxf.math.reverse_bezier_curves",
"ezdxf.math.BoundingBox",
"ezdxf.math.cubic_bezier_from_ellipse",
"ezdxf.math.Bezier4P",
"ezdxf.math.Vec3",
"ezdxf.math.Matrix44.scale"
] | [((4463, 4476), 'ezdxf.math.BoundingBox', 'BoundingBox', ([], {}), '()\n', (4474, 4476), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n'), ((5929, 5939), 'ezdxf.math.Vec3', 'Vec3', (['size'], {}), '(size)\n', (5933, 5939), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n'), ((6230, 6256), 'ezdxf.math.Matrix44.scale', 'Matrix44.scale', (['sx', 'sy', 'sz'], {}), '(sx, sy, sz)\n', (6244, 6256), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n'), ((8959, 8983), 'ezdxf.eztypes.EntityQuery', 'EntityQuery', (['lwpolylines'], {}), '(lwpolylines)\n', (8970, 8983), False, 'from ezdxf.eztypes import Vertex, Layout, EntityQuery\n'), ((10366, 10390), 'ezdxf.eztypes.EntityQuery', 'EntityQuery', (['polylines2d'], {}), '(polylines2d)\n', (10377, 10390), False, 'from ezdxf.eztypes import Vertex, Layout, EntityQuery\n'), ((12119, 12139), 'ezdxf.eztypes.EntityQuery', 'EntityQuery', (['hatches'], {}), '(hatches)\n', (12130, 12139), False, 'from ezdxf.eztypes import Vertex, Layout, EntityQuery\n'), ((13646, 13667), 'ezdxf.eztypes.EntityQuery', 'EntityQuery', (['polygons'], {}), '(polygons)\n', (13657, 13667), False, 'from ezdxf.eztypes import Vertex, Layout, EntityQuery\n'), ((14673, 14697), 'ezdxf.eztypes.EntityQuery', 'EntityQuery', (['polylines3d'], {}), '(polylines3d)\n', (14684, 14697), False, 'from ezdxf.eztypes import Vertex, Layout, EntityQuery\n'), ((15659, 15677), 'ezdxf.eztypes.EntityQuery', 'EntityQuery', (['lines'], {}), '(lines)\n', (15670, 15677), False, 'from ezdxf.eztypes import Vertex, Layout, EntityQuery\n'), ((16575, 16596), 'ezdxf.eztypes.EntityQuery', 'EntityQuery', (['entities'], {}), '(entities)\n', (16586, 16596), False, 'from ezdxf.eztypes import Vertex, Layout, EntityQuery\n'), ((17877, 17921), 'ezdxf.math.cubic_bezier_from_ellipse', 'cubic_bezier_from_ellipse', (['ellipse', 'segments'], {}), '(ellipse, segments)\n', (17902, 17921), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n'), ((18604, 18633), 'ezdxf.math.reverse_bezier_curves', 'reverse_bezier_curves', (['curves'], {}), '(curves)\n', (18625, 18633), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n'), ((19677, 19706), 'ezdxf.math.reverse_bezier_curves', 'reverse_bezier_curves', (['curves'], {}), '(curves)\n', (19698, 19706), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n'), ((20546, 20573), 'ezdxf.math.bulge_to_arc', 'bulge_to_arc', (['p1', 'p2', 'bulge'], {}), '(p1, p2, bulge)\n', (20558, 20573), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n'), ((21310, 21320), 'ezdxf.math.Vec3', 'Vec3', (['x', 'y'], {}), '(x, y)\n', (21314, 21320), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n'), ((20694, 20719), 'math.degrees', 'math.degrees', (['start_angle'], {}), '(start_angle)\n', (20706, 20719), False, 'import math\n'), ((20733, 20756), 'math.degrees', 'math.degrees', (['end_angle'], {}), '(end_angle)\n', (20745, 20756), False, 'import math\n'), ((20790, 20824), 'ezdxf.math.cubic_bezier_from_ellipse', 'cubic_bezier_from_ellipse', (['ellipse'], {}), '(ellipse)\n', (20815, 20824), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n'), ((20974, 21003), 'ezdxf.math.reverse_bezier_curves', 'reverse_bezier_curves', (['curves'], {}), '(curves)\n', (20995, 21003), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n'), ((23155, 23171), 'ezdxf.math.Bezier4P', 'Bezier4P', (['points'], {}), '(points)\n', (23163, 23171), False, 'from ezdxf.math import Vec3, Z_AXIS, OCS, Matrix44, BoundingBox, ConstructionEllipse, cubic_bezier_from_ellipse, Bezier4P, Bezier3P, BSpline, reverse_bezier_curves, bulge_to_arc\n')] |
import model
from model import whole_foods_sale
from model import aldis_au_sale
from model import aldis_us_sale
from model import aldis_uk_sale
def go(inputs, store_name):
if store_name == 'WholeFoods':
final_df = whole_foods_sale.items_on_sale()
elif store_name == 'Aldi AU':
final_df = aldis_au_sale.items_on_sale()
elif store_name == 'Aldi US':
final_df = aldis_us_sale.items_on_sale()
elif store_name == 'Aldi UK':
final_df = aldis_uk_sale.items_on_sale()
return final_df.to_html()
| [
"model.aldis_au_sale.items_on_sale",
"model.aldis_us_sale.items_on_sale",
"model.aldis_uk_sale.items_on_sale",
"model.whole_foods_sale.items_on_sale"
] | [((219, 251), 'model.whole_foods_sale.items_on_sale', 'whole_foods_sale.items_on_sale', ([], {}), '()\n', (249, 251), False, 'from model import whole_foods_sale\n'), ((296, 325), 'model.aldis_au_sale.items_on_sale', 'aldis_au_sale.items_on_sale', ([], {}), '()\n', (323, 325), False, 'from model import aldis_au_sale\n'), ((370, 399), 'model.aldis_us_sale.items_on_sale', 'aldis_us_sale.items_on_sale', ([], {}), '()\n', (397, 399), False, 'from model import aldis_us_sale\n'), ((444, 473), 'model.aldis_uk_sale.items_on_sale', 'aldis_uk_sale.items_on_sale', ([], {}), '()\n', (471, 473), False, 'from model import aldis_uk_sale\n')] |
import json
import logging
import mqtt.callbacks as mqtt_util
import paho.mqtt.client as mqtt
logger = logging.getLogger('pyledserver.PyLEDClient')
logger.setLevel(logging.DEBUG)
class PyLEDClient(mqtt.Client):
def __init__(self, client_id, credentials, mqtt_topic, led_strip):
logger.debug('Creating client: {}'.format(client_id))
# create and associate callbacks
super().__init__(client_id=client_id, clean_session=False)
self.callback = mqtt_util.CallbackContainer(led_strip)
self.on_message = self.callback.on_message
self.on_publish = self.callback.on_publish
self.on_subscribe = self.callback.on_subscribe
self.on_connect = self.callback.on_connect
self.on_disconnect = self.callback.on_disconnect
# assign user credentials to client
self.username_pw_set(credentials.mqtt_username, credentials.mqtt_password)
# connect to MQTT server and subscribe to topic
logger.info('Connecting to server {}:{}'.format(credentials.mqtt_url, credentials.mqtt_port))
self.connect(credentials.mqtt_url, int(credentials.mqtt_port))
self.subscribe(mqtt_topic, 0)
success = {'message': 'gradient',
'args': {}}
# publish connection message to ensure successful connection
self.publish(mqtt_topic, json.dumps(success, ensure_ascii=True))
@property
def is_connected(self):
return self.callback.is_connected | [
"logging.getLogger",
"mqtt.callbacks.CallbackContainer",
"json.dumps"
] | [((105, 149), 'logging.getLogger', 'logging.getLogger', (['"""pyledserver.PyLEDClient"""'], {}), "('pyledserver.PyLEDClient')\n", (122, 149), False, 'import logging\n'), ((480, 518), 'mqtt.callbacks.CallbackContainer', 'mqtt_util.CallbackContainer', (['led_strip'], {}), '(led_strip)\n', (507, 518), True, 'import mqtt.callbacks as mqtt_util\n'), ((1357, 1395), 'json.dumps', 'json.dumps', (['success'], {'ensure_ascii': '(True)'}), '(success, ensure_ascii=True)\n', (1367, 1395), False, 'import json\n')] |
from pilco.policies.policy import Policy
import tensorflow as tf
class TransformedPolicy(Policy):
def __init__(self,
policy,
transform,
name="sine_bounded_action_policy",
**kwargs):
super().__init__(state_dim=policy.state_dim,
action_dim=policy.action_dim,
name=name,
dtype=policy.dtype,
**kwargs)
self.policy = policy
self.transform = transform
@property
def parameters(self):
return self.policy.parameters
@property
def action_indices(self):
return tf.range(self.state_dim, self.state_dim + self.action_dim)
def reset(self):
self.policy.reset()
def match_moments(self, state_loc, state_cov, joint_result=True):
# We first match the moments through the base policy
loc, cov = self.policy.match_moments(state_loc, state_cov)
loc, cov = self.transform.match_moments(loc=loc,
cov=cov,
indices=self.action_indices)
return loc, cov
def call(self, state):
full_vec = tf.concat([state, [self.policy(state)]], axis=0)
return self.transform(full_vec, indices=self.action_indices)[self.state_dim:]
| [
"tensorflow.range"
] | [((687, 745), 'tensorflow.range', 'tf.range', (['self.state_dim', '(self.state_dim + self.action_dim)'], {}), '(self.state_dim, self.state_dim + self.action_dim)\n', (695, 745), True, 'import tensorflow as tf\n')] |
# Copyright 2021 The MathWorks, Inc.
"""Tests for functions in matlab_desktop_proxy/util/mwi_validators.py
"""
import pytest, os, tempfile, socket, random
import matlab_desktop_proxy
from matlab_desktop_proxy.util import mwi_validators
from matlab_desktop_proxy import mwi_environment_variables as mwi_env
from matlab_desktop_proxy.util.mwi_exceptions import NetworkLicensingError
def test_validate_mlm_license_file_for_invalid_string(monkeypatch):
"""Check if validator raises expected exception"""
# Delete the environment variables if they do exist
env_name = mwi_env.get_env_name_network_license_manager()
invalid_string = "/Invalid/String/"
monkeypatch.setenv(env_name, invalid_string)
nlm_conn_str = os.getenv(env_name)
with pytest.raises(NetworkLicensingError) as e_info:
conn_str = mwi_validators.validate_mlm_license_file(nlm_conn_str)
assert invalid_string in str(e_info.value)
def test_validate_mlm_license_file_for_valid_server_syntax(monkeypatch):
"""Check if port@hostname passes validation"""
env_name = mwi_env.get_env_name_network_license_manager()
license_manager_address = "1234@1.2_any-alphanumeric"
monkeypatch.setenv(env_name, license_manager_address)
conn_str = mwi_validators.validate_mlm_license_file(os.getenv(env_name))
assert conn_str == license_manager_address
def test_validate_mlm_license_file_for_valid_server_triad_syntax(monkeypatch):
"""Check if port@hostname passes validation"""
env_name = mwi_env.get_env_name_network_license_manager()
license_manager_address = (
"1234@1.2_any-alphanumeric,1234@1.2_any-alphanumeric,1234@1.2_any-alphanumeric"
)
monkeypatch.setenv(env_name, license_manager_address)
conn_str = mwi_validators.validate_mlm_license_file(os.getenv(env_name))
assert conn_str == license_manager_address
def test_validate_mlm_license_file_None():
"""Test to check if validate_mlm_license_file() returns None when nlm_conn_str is None."""
assert mwi_validators.validate_mlm_license_file(None) is None
def test_get_with_environment_variables(monkeypatch):
"""Check if path to license file passes validation"""
env_name = mwi_env.get_env_name_network_license_manager()
fd, path = tempfile.mkstemp()
monkeypatch.setenv(env_name, path)
try:
conn_str = mwi_validators.validate_mlm_license_file(os.getenv(env_name))
assert conn_str == str(path)
finally:
os.remove(path)
def test_validate_app_port_is_free_false():
"""Test to validate if supplied app port is free"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("", 0))
port = s.getsockname()[1]
with pytest.raises(SystemExit) as e:
mwi_validators.validate_app_port_is_free(port)
assert e.value.code == 1
s.close()
def test_validate_app_port_is_free_true():
"""Test to validate if supplied app port is free"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("", 0))
port = s.getsockname()[1]
s.close()
assert mwi_validators.validate_app_port_is_free(port) == port
def test_validate_app_port_None():
"""Tests if validated app port is None when MWI_APP_PORT env variable is not set.
If validated app port is None implies a random free port will be used at launch.
"""
assert mwi_validators.validate_app_port_is_free(None) is None
def test_validate_env_config_true():
"""Validate the default config which is used in this package."""
config = mwi_validators.validate_env_config(
matlab_desktop_proxy.get_default_config_name()
)
assert isinstance(config, dict)
def test_validate_env_config_false():
"""Passing a non existent config should raise SystemExit exception"""
with pytest.raises(SystemExit) as e:
config = mwi_validators.validate_env_config(str(random.randint(10, 100)))
assert e.value.code == 1
def test_get_configs():
"""Test to check if atleast 1 env config is discovered.
When this package is installed, we will have a default config.
"""
configs = mwi_validators.__get_configs()
assert len(configs.keys()) >= 1
@pytest.mark.parametrize(
"base_url, validated_base_url",
[
("", ""),
("/bla", "/bla"),
("/bla/", "/bla"),
],
ids=[
"Launch integration at root",
"Launch at custom path",
"Launch at custom with suffix: /",
],
)
def test_validate_base_url(base_url, validated_base_url):
"""Tests multiple base_urls which will beparsed and validated successfully.
Args:
base_url (str): base_url
validated_base_url (str): validated base_url
"""
assert mwi_validators.validate_base_url(base_url) == validated_base_url
def test_validate_base_url_no_prefix_error():
"""Test to check base_url will throw error when a prefix / is not present in it.[summary]"""
with pytest.raises(SystemExit) as e:
mwi_validators.validate_base_url("matlab/")
assert e.value.code == 1
| [
"socket.socket",
"matlab_desktop_proxy.mwi_environment_variables.get_env_name_network_license_manager",
"os.getenv",
"matlab_desktop_proxy.util.mwi_validators.validate_mlm_license_file",
"matlab_desktop_proxy.get_default_config_name",
"pytest.mark.parametrize",
"pytest.raises",
"matlab_desktop_proxy.u... | [((4168, 4376), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""base_url, validated_base_url"""', "[('', ''), ('/bla', '/bla'), ('/bla/', '/bla')]"], {'ids': "['Launch integration at root', 'Launch at custom path',\n 'Launch at custom with suffix: /']"}), "('base_url, validated_base_url', [('', ''), ('/bla',\n '/bla'), ('/bla/', '/bla')], ids=['Launch integration at root',\n 'Launch at custom path', 'Launch at custom with suffix: /'])\n", (4191, 4376), False, 'import pytest, os, tempfile, socket, random\n'), ((578, 624), 'matlab_desktop_proxy.mwi_environment_variables.get_env_name_network_license_manager', 'mwi_env.get_env_name_network_license_manager', ([], {}), '()\n', (622, 624), True, 'from matlab_desktop_proxy import mwi_environment_variables as mwi_env\n'), ((733, 752), 'os.getenv', 'os.getenv', (['env_name'], {}), '(env_name)\n', (742, 752), False, 'import pytest, os, tempfile, socket, random\n'), ((1072, 1118), 'matlab_desktop_proxy.mwi_environment_variables.get_env_name_network_license_manager', 'mwi_env.get_env_name_network_license_manager', ([], {}), '()\n', (1116, 1118), True, 'from matlab_desktop_proxy import mwi_environment_variables as mwi_env\n'), ((1506, 1552), 'matlab_desktop_proxy.mwi_environment_variables.get_env_name_network_license_manager', 'mwi_env.get_env_name_network_license_manager', ([], {}), '()\n', (1550, 1552), True, 'from matlab_desktop_proxy import mwi_environment_variables as mwi_env\n'), ((2196, 2242), 'matlab_desktop_proxy.mwi_environment_variables.get_env_name_network_license_manager', 'mwi_env.get_env_name_network_license_manager', ([], {}), '()\n', (2240, 2242), True, 'from matlab_desktop_proxy import mwi_environment_variables as mwi_env\n'), ((2258, 2276), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (2274, 2276), False, 'import pytest, os, tempfile, socket, random\n'), ((2590, 2639), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (2603, 2639), False, 'import pytest, os, tempfile, socket, random\n'), ((2938, 2987), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (2951, 2987), False, 'import pytest, os, tempfile, socket, random\n'), ((4097, 4127), 'matlab_desktop_proxy.util.mwi_validators.__get_configs', 'mwi_validators.__get_configs', ([], {}), '()\n', (4125, 4127), False, 'from matlab_desktop_proxy.util import mwi_validators\n'), ((762, 798), 'pytest.raises', 'pytest.raises', (['NetworkLicensingError'], {}), '(NetworkLicensingError)\n', (775, 798), False, 'import pytest, os, tempfile, socket, random\n'), ((829, 883), 'matlab_desktop_proxy.util.mwi_validators.validate_mlm_license_file', 'mwi_validators.validate_mlm_license_file', (['nlm_conn_str'], {}), '(nlm_conn_str)\n', (869, 883), False, 'from matlab_desktop_proxy.util import mwi_validators\n'), ((1291, 1310), 'os.getenv', 'os.getenv', (['env_name'], {}), '(env_name)\n', (1300, 1310), False, 'import pytest, os, tempfile, socket, random\n'), ((1793, 1812), 'os.getenv', 'os.getenv', (['env_name'], {}), '(env_name)\n', (1802, 1812), False, 'import pytest, os, tempfile, socket, random\n'), ((2012, 2058), 'matlab_desktop_proxy.util.mwi_validators.validate_mlm_license_file', 'mwi_validators.validate_mlm_license_file', (['None'], {}), '(None)\n', (2052, 2058), False, 'from matlab_desktop_proxy.util import mwi_validators\n'), ((2464, 2479), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (2473, 2479), False, 'import pytest, os, tempfile, socket, random\n'), ((2699, 2724), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (2712, 2724), False, 'import pytest, os, tempfile, socket, random\n'), ((2739, 2785), 'matlab_desktop_proxy.util.mwi_validators.validate_app_port_is_free', 'mwi_validators.validate_app_port_is_free', (['port'], {}), '(port)\n', (2779, 2785), False, 'from matlab_desktop_proxy.util import mwi_validators\n'), ((3063, 3109), 'matlab_desktop_proxy.util.mwi_validators.validate_app_port_is_free', 'mwi_validators.validate_app_port_is_free', (['port'], {}), '(port)\n', (3103, 3109), False, 'from matlab_desktop_proxy.util import mwi_validators\n'), ((3345, 3391), 'matlab_desktop_proxy.util.mwi_validators.validate_app_port_is_free', 'mwi_validators.validate_app_port_is_free', (['None'], {}), '(None)\n', (3385, 3391), False, 'from matlab_desktop_proxy.util import mwi_validators\n'), ((3565, 3611), 'matlab_desktop_proxy.get_default_config_name', 'matlab_desktop_proxy.get_default_config_name', ([], {}), '()\n', (3609, 3611), False, 'import matlab_desktop_proxy\n'), ((3778, 3803), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (3791, 3803), False, 'import pytest, os, tempfile, socket, random\n'), ((4700, 4742), 'matlab_desktop_proxy.util.mwi_validators.validate_base_url', 'mwi_validators.validate_base_url', (['base_url'], {}), '(base_url)\n', (4732, 4742), False, 'from matlab_desktop_proxy.util import mwi_validators\n'), ((4919, 4944), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (4932, 4944), False, 'import pytest, os, tempfile, socket, random\n'), ((4959, 5002), 'matlab_desktop_proxy.util.mwi_validators.validate_base_url', 'mwi_validators.validate_base_url', (['"""matlab/"""'], {}), "('matlab/')\n", (4991, 5002), False, 'from matlab_desktop_proxy.util import mwi_validators\n'), ((2385, 2404), 'os.getenv', 'os.getenv', (['env_name'], {}), '(env_name)\n', (2394, 2404), False, 'import pytest, os, tempfile, socket, random\n'), ((3866, 3889), 'random.randint', 'random.randint', (['(10)', '(100)'], {}), '(10, 100)\n', (3880, 3889), False, 'import pytest, os, tempfile, socket, random\n')] |