code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import os
import tkinter as tk
from .textpad import TextPad
from ..tab_view import AbstractTabView
class TabbedTextpad(AbstractTabView):
NEW_TAB_BASENAME = "new%d"
def __init__(self, parent, *args, **kwargs):
super().__init__(parent, *args, **kwargs)
self.set_options()
self.add_tab()
def add_tab(self, event=None, widget=None, text=None, **kwargs):
if widget is None:
return self._add_default_tab(text=None, **kwargs)
else:
return super().add_tab(widget=widget, text=text)
def bind_keys(self):
super().bind_keys()
for key in ['<Control-n>', '<Control-N>']:
self.bind(key, self.add_tab)
def bind_child_keys(self, child):
for key in ['<Control-n>', '<Control-N>']:
child.bind(key, self.add_tab)
def get_widget(self, index, widget='!textpad'):
return super().get_widget(index, widget=widget)
def on_right_click(self, event=None):
if event.widget.identify(event.x, event.y) == 'label':
index = event.widget.index('@%d,%d' % (event.x, event.y))
frame = self.get_container(index)
if '!textpad' in frame.children:
popup = _TextpadTabPopup(self, index)
popup.tk_popup(event.x_root, event.y_root)
def save_tab(self, index):
pad = self.get_widget(index)
path = pad.functions['save_file']()
self.tab(self.tabs()[index], text=os.path.split(path)[-1])
def save_tab_as(self, index):
pad = self.get_widget(index)
path = pad.functions['save_file_as']()
self.tab(self.tabs()[index], text=os.path.split(path)[-1])
def set_options(self):
self.option_add('*tearOff', False)
def _add_default_tab(self, text=None, frame_kwargs={}, textpad_kwargs={}, tab_kwargs={}):
child = tk.Frame(self, **frame_kwargs)
new_tab = super().add_tab(widget=child, text=text, tab_kwargs=tab_kwargs)
pad = TextPad(child, **textpad_kwargs)
pad.pack(expand=True, fill=tk.BOTH)
self.bind_child_keys(pad)
return new_tab, child, pad
class _TextpadTabPopup(tk.Menu):
def __init__(self, parent, tab_index):
super().__init__(parent)
self.parent = parent
self.tab_index = tab_index
self.add_command(label="Save", command=self.save_tab)
self.add_command(label="Save As", command=self.save_tab_as)
def save_tab(self, event=None):
self.parent.save_tab(self.tab_index)
def save_tab_as(self, event=None):
self.parent.save_tab_as(self.tab_index)
if __name__ == "__main__":
root = tk.Tk()
nb = TabbedTextpad(root)
nb.pack(expand=1, fill='both')
nb.add_tab()
root.mainloop()
| [
"tkinter.Tk",
"tkinter.Frame",
"os.path.split"
] | [((2799, 2806), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (2804, 2806), True, 'import tkinter as tk\n'), ((1946, 1976), 'tkinter.Frame', 'tk.Frame', (['self'], {}), '(self, **frame_kwargs)\n', (1954, 1976), True, 'import tkinter as tk\n'), ((1533, 1552), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (1546, 1552), False, 'import os\n'), ((1727, 1746), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (1740, 1746), False, 'import os\n')] |
from utils.data_set.classes_correlation_between_data_sets import ClassesCorrelationBetweenDataSets
from utils.data_set.initial_data_set import InitialDataSet
from graphics.input.class_identifiers_input_f import ClassIdentifiersInputF
from graphics.widgets.qna_f import QnAF
from tkinter import filedialog
from tkinter import messagebox
import file_experts.file_expert as f_expert
import constants.input_constants as const
import tkinter as tk
class InitialDataSetPickerF(tk.Frame):
"""
- Use to get input regarding the relations between the new data set's
class identifiers and the identifiers form the initial data set.
"""
def __init__(self,
parent,
classes: [],
picker_id: int,
valid_input_eh,
invalid_input_eh,
disabled=False):
"""
:param parent: Parent.
:param classes: List with the class names.
:param valid_input_eh: Method that will be called when the form is
completed.
:param invalid_input_eh: Method that will be called after each user
input if the form is not/no longer
completed.
:param disabled: - Default: False;
- If True all the widgets will be disabled.
"""
tk.Frame.__init__(self,
parent,
padx=const.IDSP_FRAME_PADX,
pady=const.IDSP_FRAME_PADY,
relief='sunken',
bd=5)
self._invalid_input_eh = invalid_input_eh
self._valid_input_eh = valid_input_eh
self._picker_id = picker_id
self._classes = classes
self._valid = False
self._var_initial_data_set = tk.IntVar()
self._previous_selection = const.IDSP_NO_SELECTION_VAL
self._data_set_details = InitialDataSet()
self._create_frames()
self._place_widgets()
if disabled:
self.disable()
#########################################################################
# Widget creation and placement
def _create_frames(self):
""" Creates the frames."""
question = self._create_question_text()
self._qna_initial_data_set = QnAF(
parent=self,
frame_variable=self._var_initial_data_set,
question_text=question,
answers_text=[
const.IDSP_CIFAR10_BTN_TEXT,
const.IDSP_IMAGENET_BTN_TEXT,
const.IDSP_FROM_IMAGES_BTN_TEXT
],
answers_eh=[
self._cifar_10_eh,
self._imagenet_eh,
self._from_images_eh,
]
)
self._cii_class_input = ClassIdentifiersInputF(
parent=self,
valid_input_eh=self._valid_cii_form_eh,
invalid_input_eh=self._invalid_cii_form_eh,
disabled=False
)
self._cii_class_input.update_classes(class_names=self._classes)
self._lbl_instruction = tk.Label(
self,
padx=const.IDSP_WIDGETS_PADX,
pady=const.IDSP_WIDGETS_PADY,
text=const.IDSP_INSTRUCTION_TEXT,
font=const.IDSP_FONT
)
def _place_widgets(self):
""" Places the widgets."""
self._qna_initial_data_set.pack(side='top',
fill='both',
expand=True)
self._cii_class_input.pack(side='top',
fill='both',
expand=True)
self._lbl_instruction.pack(side='top',
fill='both',
expand=True)
#########################################################################
# Event handling
def _cifar_10_eh(self):
"""
- Called when the user selects cifar 10 as the initial data set.
"""
self._data_set_details.cifar10_train_ds = messagebox.askyesno(
title=const.IDSP_CIFAR10_TRAIN_DS_TITLE,
message=const.IDSP_CIFAR10_TRAIN_DS_MSG
)
if self._data_set_details.cifar10_train_ds:
self._data_set_details.cifar10_test_ds = messagebox.askyesno(
title=const.IDSP_CIFAR10_TEST_DS_TITLE,
message=const.IDSP_CIFAR10_TEST_DS_MSG
)
else:
self._data_set_details.cifar10_test_ds = True
self._data_set_details.cifar10 = True
self._data_set_details.imagenet = False
self._data_set_details.images = False
self._data_set_details.identifiers = const.IDSP_CIFAR10_IDENTIFIERS
self._cii_class_input.update_data_set_identifier(
self._data_set_details.identifiers)
# Sets the previous selection value to the current button.
self._previous_selection = const.IDSP_CIFAR10_VAL
# Until the user selects the identifiers for each class the form is
# invalid.
self._invalid_cii_form_eh()
self._lbl_instruction.forget()
def _imagenet_eh(self):
"""
- Called when the user selects ImageNet as the initial data set.
- Not yet implemented.
"""
# TODO
messagebox.showinfo(
title=const.IDSP_IMAGENET_TITLE,
message=const.IDSP_IMAGENET_MSG
)
if self._previous_selection != const.IDSP_NO_SELECTION_VAL:
self._qna_initial_data_set.select_button(
self._previous_selection)
else:
self._qna_initial_data_set.deselect_button(
const.IDSP_IMAGENET_VAL)
"""
self._data_set_details.cifar10 = False
self._data_set_details.imagenet = True
self._data_set_details.images = False
"""
def _from_images_eh(self):
"""
- Called when the user wants to use folders with images as initial
data set.
"""
self._data_set_details.data_set_location = filedialog.askdirectory(
title=const.IDSP_FROM_IMAGES_INITIAL_DIR,
initialdir=const.IDSP_FROM_IMAGES_TITLE
)
selected_path = self._data_set_details.data_set_location
# Checks if the user has selected a directory.
if isinstance(selected_path, str) and selected_path != '':
# Checks if the received path is a directory.
if f_expert.is_directory(selected_path):
# Gets a list with all the visible subdirectories.
self._data_set_details.identifiers = \
f_expert.get_directories(selected_path)
# Updates the identifier list
self._cii_class_input.update_data_set_identifier(
self._data_set_details.identifiers)
# Sets teh previous selection value to the current button.
self._previous_selection = const.IDSP_FROM_IMAGES_VAL
# Saving details about the initial data set.
self._data_set_details.cifar10 = False
self._data_set_details.imagenet = False
self._data_set_details.images = True
# Until the user selects the identifiers for each class the
# form is invalid.
self._invalid_cii_form_eh()
self._lbl_instruction.forget()
else:
# If the user did not select a directory.
if self._previous_selection != const.IDSP_NO_SELECTION_VAL:
# If another initial data set was selected before, that
# button is selected again.
self._qna_initial_data_set.select_button(
self._previous_selection)
else:
# If no initial data set was selected before, the current
# button is deselected.
self._qna_initial_data_set.deselect_button(
const.IDSP_FROM_IMAGES_VAL)
def _valid_cii_form_eh(self):
"""
- Called when the form is completed.
"""
self._data_set_details.class_correlation = \
self._cii_class_input.get_input()
self._valid = True
self._valid_input_eh()
self.disable()
def _invalid_cii_form_eh(self):
"""
- Called when the form is not completed.
"""
self._data_set_details.class_correlation = \
ClassesCorrelationBetweenDataSets()
self._valid = False
self._invalid_input_eh()
def _create_question_text(self):
"""
- Creates form title.
"""
return const.IDSP_QUESTION_TEXT + ' #' + str(self._picker_id)
#########################################################################
# Public methods
def update_classes(
self,
classes: []):
self._classes = classes
self._cii_class_input.update_classes(classes)
def get_input(self):
"""
:return: - List with the relations between the new data set
identifier (list index) and the old data set identifier.
- [] if the form is not completed.
"""
return self._data_set_details
def update_picker_id(
self,
new_id):
self._picker_id = new_id
self._qna_initial_data_set.update_question_text(self._create_question_text())
def form_is_valid(self):
"""
:return: - True if the form is valid.
- False otherwise.
"""
return self._valid
# ~~~~~~~~~~~~~~~~~~~~~Enable / disable the widget.~~~~~~~~~~~~~~~~~~~~~~
def enable(self):
""" Enables all the widgets."""
if not self._valid:
self._lbl_instruction.config(state='normal')
self._qna_initial_data_set.enable()
self._cii_class_input.enable()
else:
self.disable()
def disable(self):
""" Disables all the widgets."""
self._cii_class_input.disable()
self._qna_initial_data_set.disable()
self._lbl_instruction.config(state='disabled')
# ~~~~~~~~~~~~~~~~~~~~~Enable / disable Cifar10 button.~~~~~~~~~~~~~~~~~~
def enable_cifar10(self):
""" Enables Cifar10 button."""
# 0 is the index of the "Cifar10" button.
self._qna_initial_data_set.enable_button(0)
def disable_cifar10(self):
""" Disables Cifar10 button."""
# 0 is the index of the "Cifar10" button.
self._qna_initial_data_set.disable_button(0)
# ~~~~~~~~~~~~~~~~~~~~~Enable / disable ImageNet button.~~~~~~~~~~~~~~~~~
def enable_imagenet(self):
""" Enables ImageNet button."""
# 1 is the index of the "ImageNet" button.
self._qna_initial_data_set.enable_button(1)
def disable_imagenet(self):
""" Disables ImageNet button."""
# 1 is the index of the "ImageNet" button.
self._qna_initial_data_set.disable_button(1)
# ~~~~~~~~~~~~~~~~~~~~~Enable / disable from images button.~~~~~~~~~~~~~~
def enable_from_images(self):
""" Enables From images button."""
# 2 is the index of the "From images" button.
self._qna_initial_data_set.enable_button(2)
def disable_from_images(self):
""" Disables From images button."""
# 2 is the index of the "From images" button.
self._qna_initial_data_set.disable_button(2)
#########################################################################
| [
"tkinter.IntVar",
"tkinter.filedialog.askdirectory",
"tkinter.Frame.__init__",
"graphics.widgets.qna_f.QnAF",
"tkinter.messagebox.askyesno",
"graphics.input.class_identifiers_input_f.ClassIdentifiersInputF",
"utils.data_set.initial_data_set.InitialDataSet",
"utils.data_set.classes_correlation_between_... | [((1437, 1552), 'tkinter.Frame.__init__', 'tk.Frame.__init__', (['self', 'parent'], {'padx': 'const.IDSP_FRAME_PADX', 'pady': 'const.IDSP_FRAME_PADY', 'relief': '"""sunken"""', 'bd': '(5)'}), "(self, parent, padx=const.IDSP_FRAME_PADX, pady=const.\n IDSP_FRAME_PADY, relief='sunken', bd=5)\n", (1454, 1552), True, 'import tkinter as tk\n'), ((1910, 1921), 'tkinter.IntVar', 'tk.IntVar', ([], {}), '()\n', (1919, 1921), True, 'import tkinter as tk\n'), ((2019, 2035), 'utils.data_set.initial_data_set.InitialDataSet', 'InitialDataSet', ([], {}), '()\n', (2033, 2035), False, 'from utils.data_set.initial_data_set import InitialDataSet\n'), ((2414, 2693), 'graphics.widgets.qna_f.QnAF', 'QnAF', ([], {'parent': 'self', 'frame_variable': 'self._var_initial_data_set', 'question_text': 'question', 'answers_text': '[const.IDSP_CIFAR10_BTN_TEXT, const.IDSP_IMAGENET_BTN_TEXT, const.\n IDSP_FROM_IMAGES_BTN_TEXT]', 'answers_eh': '[self._cifar_10_eh, self._imagenet_eh, self._from_images_eh]'}), '(parent=self, frame_variable=self._var_initial_data_set, question_text=\n question, answers_text=[const.IDSP_CIFAR10_BTN_TEXT, const.\n IDSP_IMAGENET_BTN_TEXT, const.IDSP_FROM_IMAGES_BTN_TEXT], answers_eh=[\n self._cifar_10_eh, self._imagenet_eh, self._from_images_eh])\n', (2418, 2693), False, 'from graphics.widgets.qna_f import QnAF\n'), ((2910, 3049), 'graphics.input.class_identifiers_input_f.ClassIdentifiersInputF', 'ClassIdentifiersInputF', ([], {'parent': 'self', 'valid_input_eh': 'self._valid_cii_form_eh', 'invalid_input_eh': 'self._invalid_cii_form_eh', 'disabled': '(False)'}), '(parent=self, valid_input_eh=self._valid_cii_form_eh,\n invalid_input_eh=self._invalid_cii_form_eh, disabled=False)\n', (2932, 3049), False, 'from graphics.input.class_identifiers_input_f import ClassIdentifiersInputF\n'), ((3212, 3346), 'tkinter.Label', 'tk.Label', (['self'], {'padx': 'const.IDSP_WIDGETS_PADX', 'pady': 'const.IDSP_WIDGETS_PADY', 'text': 'const.IDSP_INSTRUCTION_TEXT', 'font': 'const.IDSP_FONT'}), '(self, padx=const.IDSP_WIDGETS_PADX, pady=const.IDSP_WIDGETS_PADY,\n text=const.IDSP_INSTRUCTION_TEXT, font=const.IDSP_FONT)\n', (3220, 3346), True, 'import tkinter as tk\n'), ((4203, 4309), 'tkinter.messagebox.askyesno', 'messagebox.askyesno', ([], {'title': 'const.IDSP_CIFAR10_TRAIN_DS_TITLE', 'message': 'const.IDSP_CIFAR10_TRAIN_DS_MSG'}), '(title=const.IDSP_CIFAR10_TRAIN_DS_TITLE, message=const.\n IDSP_CIFAR10_TRAIN_DS_MSG)\n', (4222, 4309), False, 'from tkinter import messagebox\n'), ((5476, 5566), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', ([], {'title': 'const.IDSP_IMAGENET_TITLE', 'message': 'const.IDSP_IMAGENET_MSG'}), '(title=const.IDSP_IMAGENET_TITLE, message=const.\n IDSP_IMAGENET_MSG)\n', (5495, 5566), False, 'from tkinter import messagebox\n'), ((6239, 6349), 'tkinter.filedialog.askdirectory', 'filedialog.askdirectory', ([], {'title': 'const.IDSP_FROM_IMAGES_INITIAL_DIR', 'initialdir': 'const.IDSP_FROM_IMAGES_TITLE'}), '(title=const.IDSP_FROM_IMAGES_INITIAL_DIR,\n initialdir=const.IDSP_FROM_IMAGES_TITLE)\n', (6262, 6349), False, 'from tkinter import filedialog\n'), ((8670, 8705), 'utils.data_set.classes_correlation_between_data_sets.ClassesCorrelationBetweenDataSets', 'ClassesCorrelationBetweenDataSets', ([], {}), '()\n', (8703, 8705), False, 'from utils.data_set.classes_correlation_between_data_sets import ClassesCorrelationBetweenDataSets\n'), ((4445, 4549), 'tkinter.messagebox.askyesno', 'messagebox.askyesno', ([], {'title': 'const.IDSP_CIFAR10_TEST_DS_TITLE', 'message': 'const.IDSP_CIFAR10_TEST_DS_MSG'}), '(title=const.IDSP_CIFAR10_TEST_DS_TITLE, message=const.\n IDSP_CIFAR10_TEST_DS_MSG)\n', (4464, 4549), False, 'from tkinter import messagebox\n'), ((6642, 6678), 'file_experts.file_expert.is_directory', 'f_expert.is_directory', (['selected_path'], {}), '(selected_path)\n', (6663, 6678), True, 'import file_experts.file_expert as f_expert\n'), ((6822, 6861), 'file_experts.file_expert.get_directories', 'f_expert.get_directories', (['selected_path'], {}), '(selected_path)\n', (6846, 6861), True, 'import file_experts.file_expert as f_expert\n')] |
from .npd_wraper import npd
from datetime import datetime
import pandas as pd
class field(npd):
def get_field_production_monthly(self):
'''
get monthly production
'''
url_dataset=self.npd_path+"field/production-monthly-by-field"
df = self._get_dataframe_data(url_dataset)
df["Date"] = df.apply(lambda row: datetime(int(row['prfYear']),int(row['prfMonth']), 1),axis=1)
df["Date"]=pd.to_datetime(df.Date)
df.set_index("Date", inplace=True)
cols = ["prfInformationCarrier", "prfPrdOilNetMillSm3", "prfPrdGasNetBillSm3",
"prfPrdNGLNetMillSm3", "prfPrdCondensateNetMillSm3", "prfPrdOeNetMillSm3",
"prfPrdProducedWaterInFieldMillSm3"]
return df[cols]
def get_field_production_yearly(self):
'''
return: production yearly data
'''
url_dataset = self.npd_path+"field/production-yearly-by-field"
return self._get_dataframe_data(url_dataset).set_index('prfYear')
def get_field_cumulative_production(self):
'''
return: cumulative production
'''
url_dataset=self.npd_path+"field/production-yearly-total"
return self._get_dataframe_data(url_dataset).set_index('prfYear')
def get_field_description(self):
'''
return: field description
'''
url_dataset=self.npd_path+"field/description"
return self._get_dataframe_data(url_dataset)
def get_field_inplace_volume(self):
'''
return: get field in place volume
'''
url_dataset= self.npd_path+"field/in-place-volumes"
return self._get_dataframe_data(url_dataset)
def get_field_licenses(self):
'''
return field licensees
'''
url_dataset= self.npd_path+"field/licensees"
return self._get_dataframe_data(url_dataset)
def get_field_operators(self):
'''
return field operators
'''
url_dataset= self.npd_path+"field/operators"
return self._get_dataframe_data(url_dataset)
def get_field_overview(self):
'''
return: field overview
'''
url_dataset = self.npd_path+"field/overview"
return self._get_dataframe_data(url_dataset)
def get_field_owners(self):
'''
return field owners
'''
url_dataset=self.npd_path+"field/owners"
return self._get_dataframe_data(url_dataset)
def get_field_reserves(self):
'''
return field reserves
'''
url_dataset=self.npd_path+"field/reserves"
return self._get_dataframe_data(url_dataset)
def get_field_status(self):
'''
return field status
'''
url_dataset=self.npd_path+"field/status"
return self._get_dataframe_data(url_dataset)
def get_field_investments(self):
'''
get field investment yearly
'''
url_dataset=self.npd_path+"investments/yearly-by-field"
return self._get_dataframe_data(url_dataset)
| [
"pandas.to_datetime"
] | [((442, 465), 'pandas.to_datetime', 'pd.to_datetime', (['df.Date'], {}), '(df.Date)\n', (456, 465), True, 'import pandas as pd\n')] |
import unittest
from strblackout import blackout
class TestBlackout(unittest.TestCase):
def test_blackout_default(self):
self.assertEqual(blackout("123456789"), "123456789")
def test_blackout_left(self):
self.assertEqual(blackout("123456789", left=5), "*****6789")
def test_blackout_right(self):
self.assertEqual(blackout("123456789", right=3), "123456***")
def test_blackout_replacement(self):
self.assertEqual(blackout("123456789", left=3, replacement="x"), "xxx456789")
def test_blackout_short_text(self):
self.assertEqual(blackout("123", left=10, right=20), "***")
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"strblackout.blackout"
] | [((668, 683), 'unittest.main', 'unittest.main', ([], {}), '()\n', (681, 683), False, 'import unittest\n'), ((152, 173), 'strblackout.blackout', 'blackout', (['"""123456789"""'], {}), "('123456789')\n", (160, 173), False, 'from strblackout import blackout\n'), ((248, 277), 'strblackout.blackout', 'blackout', (['"""123456789"""'], {'left': '(5)'}), "('123456789', left=5)\n", (256, 277), False, 'from strblackout import blackout\n'), ((353, 383), 'strblackout.blackout', 'blackout', (['"""123456789"""'], {'right': '(3)'}), "('123456789', right=3)\n", (361, 383), False, 'from strblackout import blackout\n'), ((465, 511), 'strblackout.blackout', 'blackout', (['"""123456789"""'], {'left': '(3)', 'replacement': '"""x"""'}), "('123456789', left=3, replacement='x')\n", (473, 511), False, 'from strblackout import blackout\n'), ((592, 626), 'strblackout.blackout', 'blackout', (['"""123"""'], {'left': '(10)', 'right': '(20)'}), "('123', left=10, right=20)\n", (600, 626), False, 'from strblackout import blackout\n')] |
# -*- coding: utf-8 -*-
import unittest
import os
import pickle
import pandas as pd
import numpy as np
from td_query import ROOT_PATH
from td_query.data_manipulate import data_manipulate_instance as instance
from teradata import UdaExec
class TestDataManipulate(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("**************************************** setUpClass ****************************************")
instance.init()
print(instance.teradata)
@classmethod
def tearDownClass(cls):
print("************************************** tearDownClass ***************************************")
def setUp(self):
print("****** setUp *******")
def tearDown(self):
print("***** tearDown *****")
def _example(self):
df = instance.query_sample()
# with open(ROOT_PATH + '/external/df_dispatch_bna.pickle', 'wb') as f: # save
# pickle.dump(df, f)
print(df)
def _calculate(self):
def percent(x, y):
return round(x/y*100, 2)
total = 115554
print(
percent(2877, total),
percent(3909, total),
percent(23030, total),
percent(18840, total),
percent(66898, total),
)
def _query(self):
query = '''select top 10 * from pp_scratch_risk.ms_auto_trend_us_bad;'''
df = instance.query(query)
print(df)
def _query_table_schema(self):
dest_db = "pp_scratch_risk"
dest_table = "ms_auto_trend_us2_1_3_100_100_1_1_1"
result_cursor = instance.teradata.execute("show select * from {}.{};".format(dest_db, dest_table))
last_row = result_cursor.fetchall()
print(last_row)
def _query_table_top_rows(self):
table = "pp_scratch_risk.ms_auto_trend_us_bad"
df = instance.query_table_top_rows(table)
print(df)
def _insert_to_table(self):
cols = ['id', 'name', 'phone']
data = [
(1, "jy", "1888"),
(2, "jy", "1999"),
]
df = pd.DataFrame.from_records(data, columns=cols)
df_name_is_jy = df[df['name']=='jy']
df = df.append([df_name_is_jy]*2, ignore_index=True)
print(pd.concat([df_name_is_jy]*2, ignore_index=True))
# print(df)
print("-------------")
database = "pp_scratch_risk"
table = "jy_test"
instance.insert_to_table(df, database, table)
query = '''select * from {}.{};'''.format(database, table)
result_df = instance.query(query)
print(result_df)
def _create_table_from_src_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
dest_db = "pp_scratch_risk"
dest_table = "ms_auto_trend_us2_1_3_100_100_1_1_1"
instance.create_table_from_src_table_schema(src_db, src_table, dest_db, dest_table)
def _drop_table(self):
dest_db = "pp_scratch_risk"
dest_table = "ms_auto_trend_us2_1_3_100_100_1_1_1"
instance.drop_table(dest_db, dest_table)
def _transalte_100_63_22_14_1(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C')",
"(SELLER_CONSUMER_SEG == 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string == '10008') & (amt2 != 'c-1h') & (amt2 != 'e-<50')",
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _transalte_30_20_3_4_1(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Money Request - Invoicing') & (SUB_FLOW == 'MS Mobile Cons App Send Money - Commercial') & (IS_ULP_TRANS_T_F >= 0.5)",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Money Request - Invoicing') & (SUB_FLOW == 'MS Mobile Cons App Send Money - Commercial') & (IS_ULP_TRANS_T_F < 0.5)",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Money Request - Invoicing') & (SUB_FLOW != 'MS Mobile Cons App Send Money - Commercial')",
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _transalte_30_20_3_4_1_nloss(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string == '10008') & (SUB_FLOW == 'MS Mobile Cons App Send Money - Commercial') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C') & (SELLER_SEG == '04 YS')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string != '10008') & (amt2 != 'e-<50') & (dc_string != '10002') & (amt2 != 'd-50') & (SUB_FLOW != 'MS Mobile Money Request - Invoicing') & (SUB_FLOW != 'MS Money Request') & (SUB_FLOW != 'MS Mobile Money Request') & (IS_ULP_TRANS_T_F >= 0.5) & (amt2 != 'c-1h') & (dc_string == '10010')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string == '10008') & (SUB_FLOW != 'MS Mobile Cons App Send Money - Commercial') & (SELLER_CONSUMER_SEG == 'C') & (amt2 != 'c-1h') & (amt2 != 'd-50') & (amt2 != 'e-<50')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '10008') & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Mobile Cons App Send Money - Commercial') & (SUB_FLOW == 'MS Send Money Internal') & (dc_string == '10002')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Money Request - Invoicing') & (SUB_FLOW == 'MS Mobile Cons App Send Money - Commercial') & (IS_ULP_TRANS_T_F >= 0.5)",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string != '10008') & (amt2 != 'e-<50') & (dc_string != '10002') & (amt2 != 'd-50') & (SUB_FLOW != 'MS Mobile Money Request - Invoicing') & (SUB_FLOW != 'MS Money Request') & (SUB_FLOW != 'MS Mobile Money Request') & (IS_ULP_TRANS_T_F >= 0.5) & (amt2 != 'c-1h') & (dc_string != '10010') & (SUB_FLOW == 'MS Send Money Internal') & (amt2 != 'a-1k') & (RCVR_CNTRY_CODE != 'CA ') & (SELLER_SEG != '04 YS')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '10008') & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SUB_FLOW == 'MS Mobile Cons App Send Money - Commercial')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 != 'a-1k') & (IS_ULP_TRANS_T_F >= 0.5) & (amt2 == 'b-5h')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string == '10008') & (SUB_FLOW != 'MS Mobile Cons App Send Money - Commercial') & (SELLER_CONSUMER_SEG != 'C') & (SUB_FLOW == 'MS Send Money Internal') & (amt2 == 'a-1k')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '10008') & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Mobile Cons App Send Money - Commercial') & (SUB_FLOW == 'MS Send Money Internal') & (dc_string != '10002') & (SELLER_SEG == '04 YS') & (dc_string == '10010')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Money Request - Invoicing') & (SUB_FLOW != 'MS Mobile Cons App Send Money - Commercial')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '10008') & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Mobile Cons App Send Money - Commercial') & (SUB_FLOW == 'MS Send Money Internal') & (dc_string != '10002') & (SELLER_SEG == '04 YS') & (dc_string != '10010')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string != '10008') & (amt2 != 'e-<50') & (dc_string != '10002') & (amt2 != 'd-50') & (SUB_FLOW != 'MS Mobile Money Request - Invoicing') & (SUB_FLOW != 'MS Money Request') & (SUB_FLOW != 'MS Mobile Money Request') & (IS_ULP_TRANS_T_F < 0.5) & (amt2 != 'c-1h') & (dc_string == '10003')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Money Request - Invoicing') & (SUB_FLOW == 'MS Mobile Cons App Send Money - Commercial') & (IS_ULP_TRANS_T_F < 0.5)",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string != '10008') & (amt2 != 'e-<50') & (dc_string == '10002') & (amt2 == 'a-1k')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (SUB_FLOW == 'MS Money Request - Invoicing')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string != '10008') & (amt2 != 'e-<50') & (dc_string != '10002') & (amt2 != 'd-50') & (SUB_FLOW == 'MS Mobile Money Request - Invoicing') & (amt2 != 'c-1h') & (RCVR_CNTRY_CODE != 'CA ') & (dc_string != '<missing>') & (amt2 == 'a-1k')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string != '10008') & (amt2 != 'e-<50') & (dc_string != '10002') & (amt2 != 'd-50') & (SUB_FLOW != 'MS Mobile Money Request - Invoicing') & (SUB_FLOW != 'MS Money Request') & (SUB_FLOW != 'MS Mobile Money Request') & (IS_ULP_TRANS_T_F >= 0.5) & (amt2 != 'c-1h') & (dc_string != '10010') & (SUB_FLOW == 'MS Send Money Internal') & (amt2 == 'a-1k') & (RCVR_CNTRY_CODE != 'CA ') & (dc_string != '<missing>')",
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _transalte_164_89_5_8_1(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (dc_string != '10005') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C') & (SELLER_SEG == '04 YS')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (dc_string != '10005') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C') & (SELLER_SEG != '04 YS')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (IS_ULP_TRANS_T_F >= 0.5) & (amt2 != 'c-1h') & (amt2 != 'e-<50')",
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _transalte_1000_500_100_50_1(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 != 'c-1h') & (dc_string != '10005') & (amt2 != 'd-50') & (dc_string != '12123') & (SELLER_CONSUMER_SEG == 'C') & (amt2 == 'a-1k') & (SELLER_SEG == '04 YS') & (dc_string == '10008') & (SUB_FLOW == 'MS Send Money Internal')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 != 'c-1h') & (dc_string != '10005') & (amt2 != 'd-50') & (dc_string != '12123') & (SELLER_CONSUMER_SEG == 'C') & (amt2 == 'a-1k') & (SELLER_SEG == '04 YS') & (dc_string == '10008') & (SUB_FLOW != 'MS Send Money Internal')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Money Request - Invoicing') & (SUB_FLOW == 'MS Mobile Cons App Send Money - Commercial') & (IS_ULP_TRANS_T_F >= 0.5)",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 != 'c-1h') & (dc_string != '10005') & (amt2 != 'd-50') & (dc_string != '12123') & (SELLER_CONSUMER_SEG == 'C') & (amt2 == 'a-1k') & (SELLER_SEG != '04 YS')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 != 'c-1h') & (dc_string != '10005') & (amt2 != 'd-50') & (dc_string != '12123') & (SELLER_CONSUMER_SEG == 'C') & (amt2 == 'a-1k') & (SELLER_SEG == '04 YS') & (dc_string != '10008')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (SUB_FLOW != 'MS Money Request - Invoicing') & (SUB_FLOW != 'MS Mobile Cons App Send Money - Commercial')",
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _transalte_1_1_1_1_1(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (SELLER_SEG == '04 YS') & (amt2 != 'e-<50') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 != 'd-50')",
"(SELLER_CONSUMER_SEG == 'Y') & (SELLER_SEG == '04 YS') & (amt2 != 'e-<50') & (SUB_FLOW == 'MS Send Money Internal') & (IS_ULP_TRANS_T_F >= 0.5) & (RCVR_CNTRY_CODE != 'CA ') & (dc_string == '10008')",
"(SELLER_CONSUMER_SEG != 'Y') & (SELLER_SEG != '04 YS') & (amt2 != 'e-<50') & (SUB_FLOW != 'MS Mobile Cons App Send Money - Commercial') & (amt2 == 'a-1k') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>')",
"(SELLER_CONSUMER_SEG == 'Y') & (SELLER_SEG != '04 YS') & (SUB_FLOW != 'MS Send Money Internal') & (SUB_FLOW == 'MS Mobile Money Request - Invoicing API')",
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _transalte_mix(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (dc_string != '10005') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C') & (SELLER_SEG == '04 YS') & (dc_string == '10008') & (SUB_FLOW == 'MS Send Money Internal')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (dc_string != '10005') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C') & (SELLER_SEG == '04 YS') & (dc_string == '10008') & (SUB_FLOW != 'MS Send Money Internal')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (dc_string != '10005') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C') & (SELLER_SEG != '04 YS')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (dc_string != '10005') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C') & (SELLER_SEG == '04 YS') & (dc_string != '10008')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (IS_ULP_TRANS_T_F >= 0.5) & (amt2 != 'c-1h') & (amt2 != 'e-<50')",
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _transalte_tpv(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C') & (SELLER_SEG == '04 YS') & (SUB_FLOW == 'MS Send Money Internal') & (dc_string == '10008')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C') & (SELLER_SEG == '04 YS') & (SUB_FLOW != 'MS Send Money Internal')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string != '10008') & (SUB_FLOW != 'MS Mobile Money Request') & (SUB_FLOW != 'MS Money Request') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string == '10002') & (amt2 != 'c-1h')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string != '10008') & (SUB_FLOW != 'MS Mobile Money Request') & (SUB_FLOW != 'MS Money Request') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '10002') & (amt2 != 'c-1h') & (SUB_FLOW == 'MS Send Money Internal') & (RCVR_CNTRY_CODE != 'CA ') & (dc_string != '12122') & (dc_string != '10010') & (SELLER_SEG != '04 YS') & (amt2 != 'e-<50')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C') & (SELLER_SEG != '04 YS')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (IS_ULP_TRANS_T_F >= 0.5)"
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _transalte_tpv2(self):
rules = [
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 != 'c-1h') & (dc_string != '10005') & (amt2 != 'd-50') & (amt2 != 'e-<50') & (SELLER_CONSUMER_SEG == 'C') & (dc_string != '12123') & (amt2 == 'a-1k') & (SELLER_SEG == '04 YS') & (SUB_FLOW == 'MS Send Money Internal') & (dc_string == '10008')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 != 'c-1h') & (dc_string != '10005') & (amt2 != 'd-50') & (amt2 != 'e-<50') & (SELLER_CONSUMER_SEG == 'C') & (dc_string != '12123') & (amt2 == 'a-1k') & (SELLER_SEG == '04 YS') & (SUB_FLOW != 'MS Send Money Internal')",
"(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 != 'c-1h') & (dc_string != '10005') & (amt2 != 'd-50') & (amt2 != 'e-<50') & (SELLER_CONSUMER_SEG == 'C') & (dc_string != '12123') & (amt2 == 'a-1k') & (SELLER_SEG != '04 YS')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string != '10008') & (SUB_FLOW != 'MS Mobile Money Request') & (SUB_FLOW != 'MS Money Request') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string == '10002') & (amt2 != 'c-1h')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string != '10008') & (SUB_FLOW != 'MS Mobile Money Request') & (SUB_FLOW != 'MS Money Request') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '10002') & (amt2 != 'c-1h') & (SUB_FLOW == 'MS Send Money Internal') & (RCVR_CNTRY_CODE != 'CA ') & (dc_string != '12122') & (dc_string != '10010') & (SELLER_SEG != '04 YS') & (amt2 != 'e-<50')",
"(SELLER_CONSUMER_SEG == 'Y') & (dc_string == '10008') & (amt2 == 'a-1k') & (IS_ULP_TRANS_T_F >= 0.5)"
]
result = instance.translate_hyperloop_rules_to_sql(rules)
print(result)
def _duplicate_rows_to_new_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
dest_db = "pp_scratch_risk"
weight_a = 900
weight_b = 400
weight_c = 9
weight_d = 16
weight_e = 1
dest_table = "ms_auto_trend_us2_1_3_{}_{}_{}_{}_{}".format(weight_a, weight_b, weight_c, weight_d, weight_e)
instance.duplicate_rows_to_new_table(src_db, src_table, dest_db, dest_table, weight_a, weight_b, weight_c, weight_d, weight_e)
def _duplicate_rows_from_bad_and_sample_from_good_into_new_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us'
dest_db = "pp_scratch_risk"
bad_scale = 1
good_scale = 3
weight_a = 52
weight_b = 16
weight_c = 23
weight_d = 5
weight_e = 4
dest_table = "ms_auto_trend_us_{}_{}__{}_{}_{}_{}_{}_v2".format(bad_scale, good_scale, weight_a, weight_b, weight_c, weight_d, weight_e)
instance.duplicate_rows_from_bad_and_sample_from_good_into_new_table(src_db, src_table, dest_db, dest_table,
bad_scale, good_scale,
weight_a, weight_b, weight_c, weight_d, weight_e)
def _generate_hl_job_json(self):
training_table = "ms_auto_trend_us2_1_3"
testing_table = "ms_auto_trend_us_t"
instance.generate_hl_job_json(training_table, testing_table, template_name='hl_job_template_na.json')
def _add_weight_col_to_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
# weight_a = 0.312
# weight_b = 0.140
# weight_c = 0.011
# weight_d = 0.011
# weight_e = 0.001
weight_a = 10 * 30
weight_b = 8 * 20
weight_c = 4.6 * 3
weight_d = 3.7 * 4
weight_e = 1 * 1
instance.add_weight_col_to_table(src_db, src_table, weight_a, weight_b, weight_c, weight_d, weight_e)
def _update_weight_col_in_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
src_col = 'PMT_USD_AMT'
instance.update_weight_col_in_table(src_db, src_table, src_col)
def _update_custom_weight_col_in_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
src_col = 'PMT_USD_AMT'
instance.update_custom_weight_col_in_table(src_db, src_table, src_col) | [
"pandas.DataFrame.from_records",
"td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql",
"td_query.data_manipulate.data_manipulate_instance.generate_hl_job_json",
"td_query.data_manipulate.data_manipulate_instance.create_table_from_src_table_schema",
"td_query.data_manipulate.d... | [((445, 460), 'td_query.data_manipulate.data_manipulate_instance.init', 'instance.init', ([], {}), '()\n', (458, 460), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((811, 834), 'td_query.data_manipulate.data_manipulate_instance.query_sample', 'instance.query_sample', ([], {}), '()\n', (832, 834), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((1402, 1423), 'td_query.data_manipulate.data_manipulate_instance.query', 'instance.query', (['query'], {}), '(query)\n', (1416, 1423), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((1854, 1890), 'td_query.data_manipulate.data_manipulate_instance.query_table_top_rows', 'instance.query_table_top_rows', (['table'], {}), '(table)\n', (1883, 1890), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((2083, 2128), 'pandas.DataFrame.from_records', 'pd.DataFrame.from_records', (['data'], {'columns': 'cols'}), '(data, columns=cols)\n', (2108, 2128), True, 'import pandas as pd\n'), ((2420, 2465), 'td_query.data_manipulate.data_manipulate_instance.insert_to_table', 'instance.insert_to_table', (['df', 'database', 'table'], {}), '(df, database, table)\n', (2444, 2465), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((2553, 2574), 'td_query.data_manipulate.data_manipulate_instance.query', 'instance.query', (['query'], {}), '(query)\n', (2567, 2574), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((2827, 2914), 'td_query.data_manipulate.data_manipulate_instance.create_table_from_src_table_schema', 'instance.create_table_from_src_table_schema', (['src_db', 'src_table', 'dest_db', 'dest_table'], {}), '(src_db, src_table, dest_db,\n dest_table)\n', (2870, 2914), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((3042, 3082), 'td_query.data_manipulate.data_manipulate_instance.drop_table', 'instance.drop_table', (['dest_db', 'dest_table'], {}), '(dest_db, dest_table)\n', (3061, 3082), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((3457, 3505), 'td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql', 'instance.translate_hyperloop_rules_to_sql', (['rules'], {}), '(rules)\n', (3498, 3505), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((4406, 4454), 'td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql', 'instance.translate_hyperloop_rules_to_sql', (['rules'], {}), '(rules)\n', (4447, 4454), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((9326, 9374), 'td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql', 'instance.translate_hyperloop_rules_to_sql', (['rules'], {}), '(rules)\n', (9367, 9374), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((10022, 10070), 'td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql', 'instance.translate_hyperloop_rules_to_sql', (['rules'], {}), '(rules)\n', (10063, 10070), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((11820, 11868), 'td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql', 'instance.translate_hyperloop_rules_to_sql', (['rules'], {}), '(rules)\n', (11861, 11868), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((12748, 12796), 'td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql', 'instance.translate_hyperloop_rules_to_sql', (['rules'], {}), '(rules)\n', (12789, 12796), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((13996, 14044), 'td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql', 'instance.translate_hyperloop_rules_to_sql', (['rules'], {}), '(rules)\n', (14037, 14044), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((15498, 15546), 'td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql', 'instance.translate_hyperloop_rules_to_sql', (['rules'], {}), '(rules)\n', (15539, 15546), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((17325, 17373), 'td_query.data_manipulate.data_manipulate_instance.translate_hyperloop_rules_to_sql', 'instance.translate_hyperloop_rules_to_sql', (['rules'], {}), '(rules)\n', (17366, 17373), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((17791, 17921), 'td_query.data_manipulate.data_manipulate_instance.duplicate_rows_to_new_table', 'instance.duplicate_rows_to_new_table', (['src_db', 'src_table', 'dest_db', 'dest_table', 'weight_a', 'weight_b', 'weight_c', 'weight_d', 'weight_e'], {}), '(src_db, src_table, dest_db, dest_table,\n weight_a, weight_b, weight_c, weight_d, weight_e)\n', (17827, 17921), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((18412, 18601), 'td_query.data_manipulate.data_manipulate_instance.duplicate_rows_from_bad_and_sample_from_good_into_new_table', 'instance.duplicate_rows_from_bad_and_sample_from_good_into_new_table', (['src_db', 'src_table', 'dest_db', 'dest_table', 'bad_scale', 'good_scale', 'weight_a', 'weight_b', 'weight_c', 'weight_d', 'weight_e'], {}), '(src_db,\n src_table, dest_db, dest_table, bad_scale, good_scale, weight_a,\n weight_b, weight_c, weight_d, weight_e)\n', (18480, 18601), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((18887, 18993), 'td_query.data_manipulate.data_manipulate_instance.generate_hl_job_json', 'instance.generate_hl_job_json', (['training_table', 'testing_table'], {'template_name': '"""hl_job_template_na.json"""'}), "(training_table, testing_table, template_name=\n 'hl_job_template_na.json')\n", (18916, 18993), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((19384, 19489), 'td_query.data_manipulate.data_manipulate_instance.add_weight_col_to_table', 'instance.add_weight_col_to_table', (['src_db', 'src_table', 'weight_a', 'weight_b', 'weight_c', 'weight_d', 'weight_e'], {}), '(src_db, src_table, weight_a, weight_b,\n weight_c, weight_d, weight_e)\n', (19416, 19489), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((19649, 19712), 'td_query.data_manipulate.data_manipulate_instance.update_weight_col_in_table', 'instance.update_weight_col_in_table', (['src_db', 'src_table', 'src_col'], {}), '(src_db, src_table, src_col)\n', (19684, 19712), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((19883, 19953), 'td_query.data_manipulate.data_manipulate_instance.update_custom_weight_col_in_table', 'instance.update_custom_weight_col_in_table', (['src_db', 'src_table', 'src_col'], {}), '(src_db, src_table, src_col)\n', (19925, 19953), True, 'from td_query.data_manipulate import data_manipulate_instance as instance\n'), ((2249, 2298), 'pandas.concat', 'pd.concat', (['([df_name_is_jy] * 2)'], {'ignore_index': '(True)'}), '([df_name_is_jy] * 2, ignore_index=True)\n', (2258, 2298), True, 'import pandas as pd\n')] |
import csv
adex_info = {} # map from ticker to line of adex info
url_to_ticker = {} # dict from url to ticker
HEADER = "Url,Date,PageviewsPerMillion,PageviewsPerUser,Rank,ReachPerMillion,gvkey,datadate,fyear,tic,conm,curcd,revt,sale,xad,exch\n"
# populate adex info
with open("data/ad-ex/batch2.csv", "r") as adex, open("data/ad-ex/batch2.csv", "r") as adex1:
content = adex.readlines()
reader = csv.DictReader(adex1)
for i, row in enumerate(reader):
if i == 0:
continue
ticker = row["tic"]
adex_info[ticker] = content[i]
# populate map from URL to ticker
with open("data/consolidated-sites-tickers.csv", "r") as csvFile:
reader = csv.DictReader(csvFile, fieldnames=("url", "ticker"))
for row in reader:
ticker = row["ticker"]
url_to_ticker[row["url"]] = ticker
# consolidate info
with open("data/web-traffic/2019.csv", "r") as web_traffic, open("data/web-traffic/2019.csv", "r") as web_traffic1:
with open("data/total-info-2019.csv", "w") as output:
reader = csv.DictReader(web_traffic)
for i, (line, line1) in enumerate(zip(reader, web_traffic1)):
# write header
if i == 0:
output.write(HEADER)
else:
url = line["Url"]
ticker = url_to_ticker[url]
line1 = line1.rstrip('\n')
newline = line1 + "," + adex_info[ticker]
output.write(newline)
| [
"csv.DictReader"
] | [((423, 444), 'csv.DictReader', 'csv.DictReader', (['adex1'], {}), '(adex1)\n', (437, 444), False, 'import csv\n'), ((704, 757), 'csv.DictReader', 'csv.DictReader', (['csvFile'], {'fieldnames': "('url', 'ticker')"}), "(csvFile, fieldnames=('url', 'ticker'))\n", (718, 757), False, 'import csv\n'), ((1067, 1094), 'csv.DictReader', 'csv.DictReader', (['web_traffic'], {}), '(web_traffic)\n', (1081, 1094), False, 'import csv\n')] |
# Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from oslo_serialization import jsonutils
from zaqar import bootstrap
from zaqar.conf import default
from zaqar.conf import drivers_transport_websocket
from zaqar.conf import transport
from zaqar import tests as testing
class TestBase(testing.TestBase):
config_file = None
def setUp(self):
super(TestBase, self).setUp()
if not self.config_file:
self.skipTest("No config specified")
self.conf.register_opts(default.ALL_OPTS)
self.conf.register_opts(transport.ALL_OPTS,
group=transport.GROUP_NAME)
self.transport_cfg = self.conf[transport.GROUP_NAME]
self.conf.register_opts(drivers_transport_websocket.ALL_OPTS,
group=drivers_transport_websocket.GROUP_NAME)
self.ws_cfg = self.conf[drivers_transport_websocket.GROUP_NAME]
self.conf.unreliable = True
self.conf.admin_mode = True
self.boot = bootstrap.Bootstrap(self.conf)
self.addCleanup(self.boot.storage.close)
self.addCleanup(self.boot.control.close)
self.transport = self.boot.transport
self.api = self.boot.api
def tearDown(self):
if self.conf.pooling:
self.boot.control.pools_controller.drop_all()
self.boot.control.catalogue_controller.drop_all()
super(TestBase, self).tearDown()
class TestBaseFaulty(TestBase):
"""This test ensures we aren't letting any exceptions go unhandled."""
class V1Base(TestBase):
"""Base class for V1 API Tests.
Should contain methods specific to V1 of the API
"""
pass
class V1BaseFaulty(TestBaseFaulty):
"""Base class for V1 API Faulty Tests.
Should contain methods specific to V1 exception testing
"""
pass
class V1_1Base(TestBase):
"""Base class for V1.1 API Tests.
Should contain methods specific to V1.1 of the API
"""
def _empty_message_list(self, body):
self.assertEqual([], jsonutils.loads(body[0])['messages'])
class V1_1BaseFaulty(TestBaseFaulty):
"""Base class for V1.1 API Faulty Tests.
Should contain methods specific to V1.1 exception testing
"""
pass
class V2Base(V1_1Base):
"""Base class for V2 API Tests.
Should contain methods specific to V2 of the API
"""
class V2BaseFaulty(V1_1BaseFaulty):
"""Base class for V2 API Faulty Tests.
Should contain methods specific to V2 exception testing
"""
| [
"zaqar.bootstrap.Bootstrap",
"oslo_serialization.jsonutils.loads"
] | [((1543, 1573), 'zaqar.bootstrap.Bootstrap', 'bootstrap.Bootstrap', (['self.conf'], {}), '(self.conf)\n', (1562, 1573), False, 'from zaqar import bootstrap\n'), ((2569, 2593), 'oslo_serialization.jsonutils.loads', 'jsonutils.loads', (['body[0]'], {}), '(body[0])\n', (2584, 2593), False, 'from oslo_serialization import jsonutils\n')] |
import math
import sys
import argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=("""This loan calculator can compute the following. \
\n-----------------------------------------------
- Your loans annuity monthly payment amount.
- Your number of monthly payments due.
- Your loan principal.
- Your differentiated payments."""))
parser.add_argument("-t", "--type",
help="You need to choose from either option: \
- annuity: for annuity options. \
- diff: for the differentiated payments.")
parser.add_argument("-l", "--principal", type=float,
help="The amount being borrowed.")
parser.add_argument("-p", "--payment", type=float,
help="The monthly payment due on the loan.")
parser.add_argument("-n", "--periods", type=float,
help="The time between the first payment on a loan and \
its maturity.")
parser.add_argument("-i", "--interest", type=float,
help="The amount charged on top of the principal.")
args = parser.parse_args()
if args.type is None or args.type not in ["annuity", "diff"]:
print("Incorrect parameters")
exit()
elif args.type == "diff" and args.payment:
print("Incorrect parameters")
exit()
elif len(sys.argv) != 5:
print("Incorrect parameters")
exit()
elif args.interest is None:
print("Incorrect parameters")
exit()
else:
for i in sys.argv[2:]:
if float(i.split("=")[-1]) < 0:
print("Incorrect parameters")
exit()
# Calculates loan differentiated payments and overpayment.
if args.type == "diff":
p = args.principal
n = args.periods
i = args.interest
r = (i / (12 * 100)) # Interest Rate
total_payments = 0
for m in range(int(n)):
m = m + 1
d = (p / n) + r * (p - ((p * (m - 1)) / n))
d = (math.ceil(d))
total_payments += d
print(f"Month {m}: payment is {d}")
overpayment = int(total_payments - p)
if overpayment > 0:
print(f"\nOverpayment = {overpayment}")
# Calculates monthly payments.
if args.type == "annuity":
if args.principal and args.payment and args.interest:
loan = args.principal
payment = args.payment
interest = args.interest
y = 0
m = 0
rate = interest / (12 * 100)
base = payment / (payment - rate * loan)
months = math.log(base, 1 + rate)
y = math.floor(months / 12)
m = math.ceil(months % 12)
if m == 12:
y += 1
m = 0
if y >= 2 and m >= 2:
print(f"It will take {y} years and {m} months to repay this \
loan!")
elif y == 1 and m >= 2:
print(f"It will take {y} year and {m} months to repay this \
loan!")
elif y >= 2 and m == 1:
print(f"It will take {y} years and {m} month to repay this \
loan!")
elif y == 0 and m >= 2:
print(f"It will take {m} months to repay this loan!")
elif y == 0 and m == 1:
print(f"It will take {m} month to repay this loan!")
elif y >= 2 and m == 0:
print(f"It will take {y} years to repay this loan!")
elif y == 1 and m == 0:
print(f"It will take {y} year to repay this loan!")
total_payments = int(payment * ((y * 12) + m))
overpayment = int(total_payments - loan)
if overpayment > 0:
print(f"Overpayment = {overpayment}")
# Calculates loan annuity.
elif args.principal and args.periods and args.interest:
loan = args.principal
n = args.periods
i = args.interest
total_payments = 0
r = i / (12 * 100)
annuity = loan * ((r * ((1 + r) ** n)) / (((1 + r) ** n) - 1))
annuity = math.ceil(annuity)
total_payments += annuity * n
print(f"Your annuity payment = {annuity}!")
overpayment = int(total_payments - loan)
if overpayment > 0:
print(f"Overpayment = {overpayment}")
# Calculates loan principal.
elif args.payment and args.periods and args.interest:
a = args.payment
n = args.periods
i = args.interest
p = 0
if i == 0:
principal = math.floor(a * n)
p = 0
print(f"\nYour loan principal = {principal}!")
else:
r = i / (12 * 100)
principal = a / ((r * ((1 + r) ** n)) / (((1 + r) ** n) - 1))
principal = math.floor(principal)
p += principal
print(f"Your loan principal = {principal}!")
overpayment = int((a * n) - p)
if overpayment > 0:
print(f"Overpayment = {overpayment}")
| [
"math.floor",
"math.ceil",
"argparse.ArgumentParser",
"math.log"
] | [((49, 404), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.RawDescriptionHelpFormatter', 'description': '"""This loan calculator can compute the following. \n-----------------------------------------------\n - Your loans annuity monthly payment amount.\n - Your number of monthly payments due.\n - Your loan principal.\n - Your differentiated payments."""'}), '(formatter_class=argparse.\n RawDescriptionHelpFormatter, description=\n """This loan calculator can compute the following. \n-----------------------------------------------\n - Your loans annuity monthly payment amount.\n - Your number of monthly payments due.\n - Your loan principal.\n - Your differentiated payments."""\n )\n', (72, 404), False, 'import argparse\n'), ((1970, 1982), 'math.ceil', 'math.ceil', (['d'], {}), '(d)\n', (1979, 1982), False, 'import math\n'), ((2514, 2538), 'math.log', 'math.log', (['base', '(1 + rate)'], {}), '(base, 1 + rate)\n', (2522, 2538), False, 'import math\n'), ((2551, 2574), 'math.floor', 'math.floor', (['(months / 12)'], {}), '(months / 12)\n', (2561, 2574), False, 'import math\n'), ((2587, 2609), 'math.ceil', 'math.ceil', (['(months % 12)'], {}), '(months % 12)\n', (2596, 2609), False, 'import math\n'), ((3928, 3946), 'math.ceil', 'math.ceil', (['annuity'], {}), '(annuity)\n', (3937, 3946), False, 'import math\n'), ((4388, 4405), 'math.floor', 'math.floor', (['(a * n)'], {}), '(a * n)\n', (4398, 4405), False, 'import math\n'), ((4626, 4647), 'math.floor', 'math.floor', (['principal'], {}), '(principal)\n', (4636, 4647), False, 'import math\n')] |
import pandas as pd
from ppandas import PDataFrame
df1 = pd.read_csv("testing/populational1.csv")
df1 = df1.drop(columns=["Gender"])
pd1 = PDataFrame.from_populational_data(["Age"],df1,600)
pd1.visualise(show_tables=True) | [
"ppandas.PDataFrame.from_populational_data",
"pandas.read_csv"
] | [((59, 99), 'pandas.read_csv', 'pd.read_csv', (['"""testing/populational1.csv"""'], {}), "('testing/populational1.csv')\n", (70, 99), True, 'import pandas as pd\n'), ((141, 193), 'ppandas.PDataFrame.from_populational_data', 'PDataFrame.from_populational_data', (["['Age']", 'df1', '(600)'], {}), "(['Age'], df1, 600)\n", (174, 193), False, 'from ppandas import PDataFrame\n')] |
import ctypes
from pypen.drawing.color import Color
from pypen.utils.math import TAU
from pypen.settings import default_settings
import cairo
from pyglet import gl, image
class PyPen():
def __init__(self, user_sketch):
self.user_sketch = user_sketch
self.surface_data = None
self.surface = None
self.context = None
self.update_settings()
self._fix_primitive_functions()
def _fix_primitive_functions(self):
self.user_sketch.fill_screen = self.fill_screen
self.user_sketch.clear_screen = self.clear_screen
self.user_sketch.clear = self.clear
self.user_sketch.begin_shape = self.begin_shape
self.user_sketch.vertex = self.vertex
self.user_sketch.end_shape = self.end_shape
self.user_sketch.rectangle = self.rectangle
self.user_sketch.circle = self.circle
self.user_sketch.ellipse = self.ellipse
self.user_sketch.arc = self.arc
self.user_sketch.triangle = self.triangle
self.user_sketch.line = self.line
self.user_sketch.arc = self.arc
self.user_sketch.rotate = self.rotate
self.user_sketch.translate = self.translate
self.user_sketch.scale = self.scale
self.user_sketch.save = self.save
self.user_sketch.restore = self.restore
self.user_sketch.reset_style = self.reset_style
def _fill(self, unparsed_fill_color):
if unparsed_fill_color != "":
self.user_sketch.settings.fill_color = unparsed_fill_color
fill_color = Color.from_user_input(self.user_sketch.settings.fill_color)
self.context.set_source_rgba(*fill_color.rgba())
self.context.fill()
def _stroke(self, unparsed_stroke_color, unparsed_stroke_width):
if unparsed_stroke_color != "":
self.user_sketch.settings.stroke_color = unparsed_stroke_color
if unparsed_stroke_width >= 0:
self.user_sketch.settings.stroke_width = unparsed_stroke_width
stroke_color = Color.from_user_input(self.user_sketch.settings.stroke_color)
stroke_width = self.user_sketch.settings.stroke_width
self.context.set_line_width(stroke_width)
self.context.set_source_rgba(*stroke_color.rgba())
self.context.stroke_preserve()
def rotate(self, angle=0):
self.context.rotate(angle)
def translate(self, x=0, y=0):
self.context.translate(x, y)
def scale(self, factor=1):
self.context.scale(factor)
def save(self):
self.context.save()
def restore(self):
self.context.restore()
def reset_style(self):
self.user_sketch.settings.fill_color = default_settings.fill_color
self.user_sketch.settings.stroke_color = default_settings.stroke_color
self.user_sketch.settings.stroke_width = default_settings.stroke_width
def update_settings(self):
self.surface_data = (ctypes.c_ubyte * (self.user_sketch.settings.width * self.user_sketch.settings.height * 4))()
self.surface = cairo.ImageSurface.create_for_data(self.surface_data,
cairo.FORMAT_ARGB32,
self.user_sketch.settings.width,
self.user_sketch.settings.height,
self.user_sketch.settings.width * 4)
self.context = cairo.Context(self.surface)
self.texture = image.Texture.create_for_size(gl.GL_TEXTURE_2D, self.user_sketch.settings.width, self.user_sketch.settings.height, gl.GL_RGBA)
def clear_screen(self):
self.fill_screen("default_background_color")
def clear(self):
self.clear_screen()
def fill_screen(self, color="default_background_color"):
background_color = Color.from_user_input(color)
self.context.save()
self.context.scale(self.user_sketch.settings.width, self.user_sketch.settings.height)
self.context.rectangle(0, 0, 1, 1)
self.context.set_source_rgba(*background_color.rgba())
self.context.fill()
self.context.restore()
def begin_shape(self):
self.user_sketch.settings._shape_begun = True
def vertex(self, x, y):
if self.user_sketch.settings._shape_begun:
self.context.move_to(x, y)
self.user_sketch.settings._starting_point = (x, y)
self.user_sketch.settings._shape_begun = False
else:
self.context.line_to(x, y)
def end_shape(self, fill_color="", stroke_color="", stroke_width=-1):
if self.user_sketch.settings._starting_point is not None:
starting_point = self.user_sketch.settings._starting_point
self.context.line_to(starting_point[0], starting_point[1])
self.user_sketch.settings._starting_point = None
self._stroke(stroke_color, stroke_width)
self._fill(fill_color)
def rectangle(self, x, y, width, height, fill_color="", stroke_color="", stroke_width=-1):
self.context.rectangle(x, y, width, height)
self._stroke(stroke_color, stroke_width)
self._fill(fill_color)
def circle(self, x, y, radius, fill_color="", stroke_color="", stroke_width=-1):
self.context.arc(x, y, radius, 0, TAU)
self._stroke(stroke_color, stroke_width)
self._fill(fill_color)
def ellipse(self, x, y, width, height, fill_color="", stroke_color="", stroke_width=-1):
ratio = height/width
self.save()
self.context.scale(1, ratio)
self.context.arc(x, y/ratio, width, 0, TAU)
self.restore()
self._stroke(stroke_color, stroke_width)
self._fill(fill_color)
def arc(self, x, y, radius, start_angle, stop_angle, fill_color="", stroke_color="", stroke_width=-1):
self.context.arc(x, y, radius, start_angle, stop_angle)
self._stroke(stroke_color, stroke_width)
self._fill(fill_color)
def triangle(self, x1_or_x, y1_or_y, x2_or_width, y2_or_height, x3_or_p=0.5, y3=None, fill_color="", stroke_color="", stroke_width=-1):
if y3 is not None:
x1 = x1_or_x
y1 = y1_or_y
x2 = x2_or_width
y2 = y2_or_height
x3 = x3_or_p
else:
x = x1_or_x
y = y1_or_y
width = x2_or_width
height = y2_or_height
p = x3_or_p
x1 = x - width/2
y1 = y + height/2
x2 = x + width/2
y2 = x + height/2
x3 = (x2 - x1) * p + x1
y3 = y - height/2
self.context.move_to(x1, y1)
self.context.line_to(x2, y2)
self.context.line_to(x3, y3)
self.context.line_to(x1, y1)
self._stroke(stroke_color, stroke_width)
self._fill(fill_color)
def line(self, x1, y1, x2, y2, stroke_color="", stroke_width=-1):
self.context.move_to(x1, y1)
self.context.line_to(x2, y2)
self._stroke(stroke_color, stroke_width)
| [
"cairo.Context",
"cairo.ImageSurface.create_for_data",
"pypen.drawing.color.Color.from_user_input",
"pyglet.image.Texture.create_for_size"
] | [((1563, 1622), 'pypen.drawing.color.Color.from_user_input', 'Color.from_user_input', (['self.user_sketch.settings.fill_color'], {}), '(self.user_sketch.settings.fill_color)\n', (1584, 1622), False, 'from pypen.drawing.color import Color\n'), ((2032, 2093), 'pypen.drawing.color.Color.from_user_input', 'Color.from_user_input', (['self.user_sketch.settings.stroke_color'], {}), '(self.user_sketch.settings.stroke_color)\n', (2053, 2093), False, 'from pypen.drawing.color import Color\n'), ((3054, 3241), 'cairo.ImageSurface.create_for_data', 'cairo.ImageSurface.create_for_data', (['self.surface_data', 'cairo.FORMAT_ARGB32', 'self.user_sketch.settings.width', 'self.user_sketch.settings.height', '(self.user_sketch.settings.width * 4)'], {}), '(self.surface_data, cairo.FORMAT_ARGB32,\n self.user_sketch.settings.width, self.user_sketch.settings.height, self\n .user_sketch.settings.width * 4)\n', (3088, 3241), False, 'import cairo\n'), ((3488, 3515), 'cairo.Context', 'cairo.Context', (['self.surface'], {}), '(self.surface)\n', (3501, 3515), False, 'import cairo\n'), ((3539, 3670), 'pyglet.image.Texture.create_for_size', 'image.Texture.create_for_size', (['gl.GL_TEXTURE_2D', 'self.user_sketch.settings.width', 'self.user_sketch.settings.height', 'gl.GL_RGBA'], {}), '(gl.GL_TEXTURE_2D, self.user_sketch.settings.\n width, self.user_sketch.settings.height, gl.GL_RGBA)\n', (3568, 3670), False, 'from pyglet import gl, image\n'), ((3887, 3915), 'pypen.drawing.color.Color.from_user_input', 'Color.from_user_input', (['color'], {}), '(color)\n', (3908, 3915), False, 'from pypen.drawing.color import Color\n')] |
# coding: utf-8
""" MIT License """
'''
<NAME> & <NAME>
<NAME> & <NAME>
---
Description:
Function designed to evaluate all parameters provided to the gp and identify the best parameters.
Saves all fitness of individuals by logging them into csv files which will then be evaluated on plots.py
---
Copyright (c) 2018
'''
# libraries and dependencies
# ---------------------------------------------------------------------------- #
from evolution import Evolution
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import classifier
import random
import utils
import csv
import os
# ---------------------------------------------------------------------------- #
if __name__ == '__main__':
# import data
X, y = utils.load_data(
filename='data_trimmed.csv',
clean=False,
normalize=True,
resample=2 # (2) to downsample the negative cases
)
# concatenate selected features with their target values
dataset = np.column_stack((X, y))
popsize = [100, 250, 500, 1000]
GenMax = [50, 100, 250, 500]
mutRate = [0.01, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
crRate = [0.01, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6]# 0.7, 0.8, 0.9]
reps = 5
states = 1
i = 6
evo = Evolution(
dataset = dataset.tolist(), # data samples
popsize = popsize[3], # initial population size
hofsize = 10, # the number of best individual to track
cx = crRate[i], # crossover rate
mut = mutRate[6], # mutation rate
maxgen = GenMax[2], # max number of generations
)
logs = pd.DataFrame()
gen = np.zeros((reps, GenMax[2]+1))
nevals = np.zeros((reps, GenMax[2]+1))
avg = np.zeros((reps, GenMax[2]+1))
mini = np.zeros((reps, GenMax[2]+1))
maxi = np.zeros((reps, GenMax[2]+1))
for l in range(reps):
np.random.seed(reps)
pop, logbook, hof= evo.run()
gen[l][:] = logbook.select('gen')
nevals[l][:] = logbook.select('nevals')
avg[l][:] = logbook.select('avg')
mini[l][:] = logbook.select('min')
maxi[l][:] = logbook.select('max')
AvgEval = []
Avg = []
AvgMin = []
AvgMax = []
for n in range(GenMax[2]+1):
totalEval = 0
totalAvg = 0
totalMin = 0
totalMax = 0
for m in range(reps):
totalEval += nevals[m][n]
totalAvg += avg[m][n]
totalMin += mini[m][n]
totalMax += maxi[m][n]
AvgEval.append(totalEval/reps)
Avg.append(totalAvg/reps)
AvgMin.append(totalMin/reps)
AvgMax.append(totalMax/reps)
logs['gen'] = gen[l][:]
logs['nEval'] = AvgEval
logs['Avg Fitness'] = Avg
logs['Avg Min'] = AvgMin
logs['Avg Max'] = AvgMax
#print(logs)
cwd = os.getcwd()
pth_to_save = cwd + "/results/mutEphemeralAll.6_cxOnePoint_.6_selDoubleTournament_codeBloatOn.csv"
logs.to_csv(pth_to_save)
print('Done') | [
"utils.load_data",
"numpy.column_stack",
"os.getcwd",
"numpy.zeros",
"numpy.random.seed",
"pandas.DataFrame"
] | [((777, 866), 'utils.load_data', 'utils.load_data', ([], {'filename': '"""data_trimmed.csv"""', 'clean': '(False)', 'normalize': '(True)', 'resample': '(2)'}), "(filename='data_trimmed.csv', clean=False, normalize=True,\n resample=2)\n", (792, 866), False, 'import utils\n'), ((1019, 1042), 'numpy.column_stack', 'np.column_stack', (['(X, y)'], {}), '((X, y))\n', (1034, 1042), True, 'import numpy as np\n'), ((1733, 1747), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1745, 1747), True, 'import pandas as pd\n'), ((1759, 1790), 'numpy.zeros', 'np.zeros', (['(reps, GenMax[2] + 1)'], {}), '((reps, GenMax[2] + 1))\n', (1767, 1790), True, 'import numpy as np\n'), ((1802, 1833), 'numpy.zeros', 'np.zeros', (['(reps, GenMax[2] + 1)'], {}), '((reps, GenMax[2] + 1))\n', (1810, 1833), True, 'import numpy as np\n'), ((1842, 1873), 'numpy.zeros', 'np.zeros', (['(reps, GenMax[2] + 1)'], {}), '((reps, GenMax[2] + 1))\n', (1850, 1873), True, 'import numpy as np\n'), ((1883, 1914), 'numpy.zeros', 'np.zeros', (['(reps, GenMax[2] + 1)'], {}), '((reps, GenMax[2] + 1))\n', (1891, 1914), True, 'import numpy as np\n'), ((1928, 1959), 'numpy.zeros', 'np.zeros', (['(reps, GenMax[2] + 1)'], {}), '((reps, GenMax[2] + 1))\n', (1936, 1959), True, 'import numpy as np\n'), ((2991, 3002), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3000, 3002), False, 'import os\n'), ((1997, 2017), 'numpy.random.seed', 'np.random.seed', (['reps'], {}), '(reps)\n', (2011, 2017), True, 'import numpy as np\n')] |
"""
Example script on how to use the esp32serial library.
For the documentation. open a python and type:
>>> import esp32serial
>>> help(esp32serial.ESP32Serial)
"""
# import the library
import esp32serial
# create a connection
esp32 = esp32serial.ESP32Serial("/dev/ttyACM0")
# get an observable or parameter, converting it inline to float
pressure = float(esp32.get("pressure"))
# set a parameter to a value
result = esp32.set("intparameter", 3)
# this is just an example on what to expect and a possible reaction
if result != 'OK':
raise Exception("setting intparameter failed")
| [
"esp32serial.ESP32Serial"
] | [((239, 278), 'esp32serial.ESP32Serial', 'esp32serial.ESP32Serial', (['"""/dev/ttyACM0"""'], {}), "('/dev/ttyACM0')\n", (262, 278), False, 'import esp32serial\n')] |
import unittest
import tempfile
from flask import Flask
from flask_csp.csp import csp_default, create_csp_header, csp_header
class CspTestFunctions(unittest.TestCase):
""" test base functions """
def setUp(self):
tmp = tempfile.mkstemp()
self.dh = csp_default()
self.dh.default_file = tmp[1]
def test_create_csp_header(self):
""" test dict -> csp header """
self.assertEquals(create_csp_header({'foo':'bar','lorem':'ipsum'}),'foo bar; lorem ipsum')
def test_default_empty_exception(self):
""" test empty default file """
with self.assertRaises(Exception):
self.dh.read()
def test_default_read_write(self):
""" test read/write to default """
self.dh.update() # test empty file
t = self.dh.read()
self.assertEquals(t['default-src'],"'self'")
self.dh.update({'default-src':"'none'",'script-src':"'self'"}) # test update
t = self.dh.read()
self.assertEquals(t['default-src'],"'none'")
self.assertEquals(t['script-src'],"'self'")
def test_included_json_file(self):
""" make sure included json file is readable / writeable """
h = csp_default()
ret = h.read()
assert "default-src" in ret
h.update({'default-src':"'self'"})
ret = h.read()
self.assertEquals(ret['default-src'],"'self'")
class CspTestDefaultDecorator(unittest.TestCase):
""" test decorator with no values passed """
def setUp(self):
self.app = Flask(__name__)
@self.app.route('/')
@csp_header()
def index():
return "test"
def test_csp_header(self):
with self.app.test_client() as c:
result = c.get('/')
assert "default-src 'self'" in result.headers.get('Content-Security-Policy')
class CspTestCustomDecoratorUpdate(unittest.TestCase):
""" test decorator with custom values passed by dict """
def setUp(self):
self.app = Flask(__name__)
@self.app.route('/')
@csp_header({'default-src':"'none'",'script-src':"'self'"})
def index():
return "test"
def test_csp_header(self):
with self.app.test_client() as c:
result = c.get('/')
assert "default-src 'none'" in result.headers.get('Content-Security-Policy')
assert "script-src 'self'" in result.headers.get('Content-Security-Policy')
class CspTestCustomDecoratorRemove(unittest.TestCase):
""" test removing policy through custom decorator values """
def setUp(self):
self.app = Flask(__name__)
@self.app.route('/')
@csp_header({'default-src':''})
def index():
return "hi"
def test_csp_header(self):
with self.app.test_client() as c:
result = c.get('/')
assert "default-src" not in result.headers.get('Content-Security-Policy')
class CspTestReadOnly(unittest.TestCase):
""" test read only """
def setUp(self):
self.app = Flask(__name__)
@self.app.route('/')
@csp_header({'report-only':True})
def index():
return "hi"
def test_csp_header(self):
with self.app.test_client() as c:
result = c.get('/')
assert "default-src" in result.headers.get('Content-Security-Policy-Report-Only')
assert "report-only" not in result.headers.get('Content-Security-Policy-Report-Only')
if __name__ == '__main__':
unittest.main()
| [
"flask_csp.csp.csp_header",
"flask.Flask",
"flask_csp.csp.csp_default",
"flask_csp.csp.create_csp_header",
"unittest.main",
"tempfile.mkstemp"
] | [((3071, 3086), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3084, 3086), False, 'import unittest\n'), ((224, 242), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (240, 242), False, 'import tempfile\n'), ((255, 268), 'flask_csp.csp.csp_default', 'csp_default', ([], {}), '()\n', (266, 268), False, 'from flask_csp.csp import csp_default, create_csp_header, csp_header\n'), ((1080, 1093), 'flask_csp.csp.csp_default', 'csp_default', ([], {}), '()\n', (1091, 1093), False, 'from flask_csp.csp import csp_default, create_csp_header, csp_header\n'), ((1377, 1392), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1382, 1392), False, 'from flask import Flask\n'), ((1419, 1431), 'flask_csp.csp.csp_header', 'csp_header', ([], {}), '()\n', (1429, 1431), False, 'from flask_csp.csp import csp_default, create_csp_header, csp_header\n'), ((1777, 1792), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1782, 1792), False, 'from flask import Flask\n'), ((1819, 1880), 'flask_csp.csp.csp_header', 'csp_header', (['{\'default-src\': "\'none\'", \'script-src\': "\'self\'"}'], {}), '({\'default-src\': "\'none\'", \'script-src\': "\'self\'"})\n', (1829, 1880), False, 'from flask_csp.csp import csp_default, create_csp_header, csp_header\n'), ((2306, 2321), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (2311, 2321), False, 'from flask import Flask\n'), ((2348, 2379), 'flask_csp.csp.csp_header', 'csp_header', (["{'default-src': ''}"], {}), "({'default-src': ''})\n", (2358, 2379), False, 'from flask_csp.csp import csp_default, create_csp_header, csp_header\n'), ((2672, 2687), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (2677, 2687), False, 'from flask import Flask\n'), ((2714, 2747), 'flask_csp.csp.csp_header', 'csp_header', (["{'report-only': True}"], {}), "({'report-only': True})\n", (2724, 2747), False, 'from flask_csp.csp import csp_default, create_csp_header, csp_header\n'), ((391, 442), 'flask_csp.csp.create_csp_header', 'create_csp_header', (["{'foo': 'bar', 'lorem': 'ipsum'}"], {}), "({'foo': 'bar', 'lorem': 'ipsum'})\n", (408, 442), False, 'from flask_csp.csp import csp_default, create_csp_header, csp_header\n')] |
# Generated by Django 3.1.6 on 2021-02-11 11:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0003_auto_20210210_2324'),
]
operations = [
migrations.AddField(
model_name='setting',
name='site_address',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Address'),
),
migrations.AddField(
model_name='setting',
name='site_closetime',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='setting',
name='site_email',
field=models.EmailField(max_length=254, null=True),
),
migrations.AddField(
model_name='setting',
name='site_opentime',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='setting',
name='site_phone',
field=models.IntegerField(null=True, verbose_name='Phone Number'),
),
]
| [
"django.db.models.DateTimeField",
"django.db.models.EmailField",
"django.db.models.CharField",
"django.db.models.IntegerField"
] | [((338, 417), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(255)', 'null': '(True)', 'verbose_name': '"""Address"""'}), "(blank=True, max_length=255, null=True, verbose_name='Address')\n", (354, 417), False, 'from django.db import migrations, models\n'), ((546, 577), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)'}), '(null=True)\n', (566, 577), False, 'from django.db import migrations, models\n'), ((702, 746), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(254)', 'null': '(True)'}), '(max_length=254, null=True)\n', (719, 746), False, 'from django.db import migrations, models\n'), ((874, 905), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)'}), '(null=True)\n', (894, 905), False, 'from django.db import migrations, models\n'), ((1030, 1089), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'verbose_name': '"""Phone Number"""'}), "(null=True, verbose_name='Phone Number')\n", (1049, 1089), False, 'from django.db import migrations, models\n')] |
import torch
import torch.nn.functional as F
import numpy as np
from utils import *
from core.config import config
from train.BaseTrainer import BaseTrainer
class PartNetTrainer(BaseTrainer):
def __init__(self):
super(PartNetTrainer, self).__init__()
def build_opt_and_lr(self, model):
if config.get('debug'):
lr_mul = 1
else:
lr_mul = len(config.get('gpus'))
ocfg = config.get('optm_config')
ignored_params = list(map(id, model.module.resnet.parameters()))
base_params = filter(lambda p: id(p) not in ignored_params, model.parameters())
cfg = config.get('dataset_config')
if cfg['train_name'] == 'market1501':
new_p_mul = 0.1
else:
new_p_mul = 1.
param_groups = [
{'params': model.module.resnet.parameters(), 'lr': ocfg['lr'] * new_p_mul * lr_mul},
{'params': base_params}
]
if ocfg['name'] == 'SGD':
optimizer = torch.optim.SGD(param_groups, ocfg['lr'] * lr_mul,
momentum=ocfg['momentum'],
weight_decay=ocfg['weight_decay'])
else:
optimizer = torch.optim.Adam(param_groups, ocfg['lr'] * lr_mul,
weight_decay=ocfg['weight_decay'])
if 'multistep' in ocfg and ocfg['multistep']:
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer,
ocfg['step'],
gamma=ocfg['gamma'],
last_epoch=-1)
else:
lr_scheduler = CosineAnnealingWarmUp(optimizer,
T_0=5,
T_end=ocfg.get('epochs'),
warmup_factor=ocfg.get('warmup_factor'),
last_epoch=-1)
self.logger.write(optimizer)
return optimizer, lr_scheduler
def extract(self, test_data, model):
model.eval()
res = {}
for p, val_loader in test_data.items():
global_features, local_features, local_parts = [], [], []
with torch.no_grad():
paths = []
for i, (input, _, path) in enumerate(val_loader):
# print(input[0])
input = input.cuda(non_blocking=True)
# compute output
global_feature, local_feature, local_part = model(input)
global_feature = F.normalize(global_feature, p=2, dim=-1)
local_feature = F.normalize(local_feature, p=2, dim=-1)
if config.get('with_flip'):
input_ = input.flip(3)
global_feature_, local_feature_, _ = model(input_)
global_feature_ = F.normalize(global_feature_, p=2, dim=-1)
local_feature_ = F.normalize(local_feature_, p=2, dim=-1)
global_feature = (global_feature + global_feature_) / 2
local_feature = (local_feature + local_feature_) / 2
global_feature = F.normalize(global_feature, p=2, dim=-1)
local_feature = F.normalize(local_feature, p=2, dim=-1)
global_features.append(global_feature)
local_features.append(local_feature)
local_parts.append(local_part)
paths.extend(path)
global_features = torch.cat(global_features, dim=0)
local_features = torch.cat(local_features, dim=0)
local_parts = torch.cat(local_parts, dim=0)
print(global_features.size(), local_features.size(), local_parts.size())
res[p] = {
'global_features': global_features,
'local_features': local_features,
'local_parts': local_parts,
'path': paths
}
return res
def eval_status(self, epoch):
cfg = config.get('dataset_config')
if cfg['train_name'] == 'market1501':
return True
else:
ocfg = config.get('optm_config')
return ocfg.get('epochs') - 10 <= epoch <= ocfg.get('epochs')
def eval_result(self, **kwargs):
info = kwargs.get('info')
return evaluate.eval_part_result(info, use_pcb_format=config.get('use_pcb_format', True), logger=self.logger)
def extract_and_eval(self, test_loader, model):
res = self.extract(test_loader, model)
mAP, rank_1 = self.eval_result(info=res)
return mAP, rank_1
if __name__ == '__main__':
trainer = PartNetTrainer()
trainer.train_or_val()
| [
"torch.optim.Adam",
"torch.optim.SGD",
"core.config.config.get",
"torch.optim.lr_scheduler.MultiStepLR",
"torch.nn.functional.normalize",
"torch.no_grad",
"torch.cat"
] | [((320, 339), 'core.config.config.get', 'config.get', (['"""debug"""'], {}), "('debug')\n", (330, 339), False, 'from core.config import config\n'), ((438, 463), 'core.config.config.get', 'config.get', (['"""optm_config"""'], {}), "('optm_config')\n", (448, 463), False, 'from core.config import config\n'), ((639, 667), 'core.config.config.get', 'config.get', (['"""dataset_config"""'], {}), "('dataset_config')\n", (649, 667), False, 'from core.config import config\n'), ((4318, 4346), 'core.config.config.get', 'config.get', (['"""dataset_config"""'], {}), "('dataset_config')\n", (4328, 4346), False, 'from core.config import config\n'), ((1025, 1142), 'torch.optim.SGD', 'torch.optim.SGD', (['param_groups', "(ocfg['lr'] * lr_mul)"], {'momentum': "ocfg['momentum']", 'weight_decay': "ocfg['weight_decay']"}), "(param_groups, ocfg['lr'] * lr_mul, momentum=ocfg['momentum'\n ], weight_decay=ocfg['weight_decay'])\n", (1040, 1142), False, 'import torch\n'), ((1256, 1347), 'torch.optim.Adam', 'torch.optim.Adam', (['param_groups', "(ocfg['lr'] * lr_mul)"], {'weight_decay': "ocfg['weight_decay']"}), "(param_groups, ocfg['lr'] * lr_mul, weight_decay=ocfg[\n 'weight_decay'])\n", (1272, 1347), False, 'import torch\n'), ((1466, 1568), 'torch.optim.lr_scheduler.MultiStepLR', 'torch.optim.lr_scheduler.MultiStepLR', (['optimizer', "ocfg['step']"], {'gamma': "ocfg['gamma']", 'last_epoch': '(-1)'}), "(optimizer, ocfg['step'], gamma=ocfg[\n 'gamma'], last_epoch=-1)\n", (1502, 1568), False, 'import torch\n'), ((4450, 4475), 'core.config.config.get', 'config.get', (['"""optm_config"""'], {}), "('optm_config')\n", (4460, 4475), False, 'from core.config import config\n'), ((403, 421), 'core.config.config.get', 'config.get', (['"""gpus"""'], {}), "('gpus')\n", (413, 421), False, 'from core.config import config\n'), ((2390, 2405), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2403, 2405), False, 'import torch\n'), ((3763, 3796), 'torch.cat', 'torch.cat', (['global_features'], {'dim': '(0)'}), '(global_features, dim=0)\n', (3772, 3796), False, 'import torch\n'), ((3830, 3862), 'torch.cat', 'torch.cat', (['local_features'], {'dim': '(0)'}), '(local_features, dim=0)\n', (3839, 3862), False, 'import torch\n'), ((3893, 3922), 'torch.cat', 'torch.cat', (['local_parts'], {'dim': '(0)'}), '(local_parts, dim=0)\n', (3902, 3922), False, 'import torch\n'), ((4684, 4718), 'core.config.config.get', 'config.get', (['"""use_pcb_format"""', '(True)'], {}), "('use_pcb_format', True)\n", (4694, 4718), False, 'from core.config import config\n'), ((2747, 2787), 'torch.nn.functional.normalize', 'F.normalize', (['global_feature'], {'p': '(2)', 'dim': '(-1)'}), '(global_feature, p=2, dim=-1)\n', (2758, 2787), True, 'import torch.nn.functional as F\n'), ((2824, 2863), 'torch.nn.functional.normalize', 'F.normalize', (['local_feature'], {'p': '(2)', 'dim': '(-1)'}), '(local_feature, p=2, dim=-1)\n', (2835, 2863), True, 'import torch.nn.functional as F\n'), ((2888, 2911), 'core.config.config.get', 'config.get', (['"""with_flip"""'], {}), "('with_flip')\n", (2898, 2911), False, 'from core.config import config\n'), ((3077, 3118), 'torch.nn.functional.normalize', 'F.normalize', (['global_feature_'], {'p': '(2)', 'dim': '(-1)'}), '(global_feature_, p=2, dim=-1)\n', (3088, 3118), True, 'import torch.nn.functional as F\n'), ((3160, 3200), 'torch.nn.functional.normalize', 'F.normalize', (['local_feature_'], {'p': '(2)', 'dim': '(-1)'}), '(local_feature_, p=2, dim=-1)\n', (3171, 3200), True, 'import torch.nn.functional as F\n'), ((3399, 3439), 'torch.nn.functional.normalize', 'F.normalize', (['global_feature'], {'p': '(2)', 'dim': '(-1)'}), '(global_feature, p=2, dim=-1)\n', (3410, 3439), True, 'import torch.nn.functional as F\n'), ((3480, 3519), 'torch.nn.functional.normalize', 'F.normalize', (['local_feature'], {'p': '(2)', 'dim': '(-1)'}), '(local_feature, p=2, dim=-1)\n', (3491, 3519), True, 'import torch.nn.functional as F\n')] |
from unittest.mock import patch
from django.core.management import call_command
from django.core.management.base import CommandError
from orchestra.tests.helpers import OrchestraTestCase
class MigrateCertificationsTestCase(OrchestraTestCase):
patch_path = ('orchestra.management.commands.'
'migrate_certifications.migrate_certifications')
@patch(patch_path)
def test_options(self, mock_migrate):
# Test no options
with self.assertRaises(CommandError):
call_command('migrate_certifications')
mock_migrate.assert_not_called()
# Test
call_command('migrate_certifications',
'test_source_workflow_slug',
'ntest_destination_workflow_slug',
certifications=['test_cert_1', 'test_cert_2'])
mock_migrate.called_once_with(
'test_source_workflow_slug',
'test_destination_workflow_slug',
['test_cert_1', 'test_cert_2']
)
| [
"unittest.mock.patch",
"django.core.management.call_command"
] | [((371, 388), 'unittest.mock.patch', 'patch', (['patch_path'], {}), '(patch_path)\n', (376, 388), False, 'from unittest.mock import patch\n'), ((623, 780), 'django.core.management.call_command', 'call_command', (['"""migrate_certifications"""', '"""test_source_workflow_slug"""', '"""ntest_destination_workflow_slug"""'], {'certifications': "['test_cert_1', 'test_cert_2']"}), "('migrate_certifications', 'test_source_workflow_slug',\n 'ntest_destination_workflow_slug', certifications=['test_cert_1',\n 'test_cert_2'])\n", (635, 780), False, 'from django.core.management import call_command\n'), ((515, 553), 'django.core.management.call_command', 'call_command', (['"""migrate_certifications"""'], {}), "('migrate_certifications')\n", (527, 553), False, 'from django.core.management import call_command\n')] |
"""
Most recently tested against PySAM 2.1.4
"""
from pathlib import Path
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import PySAM.Singleowner as Singleowner
import time
import multiprocessing
from itertools import product
import PySAM.Pvsamv1 as Pvsamv1
solar_resource_file = Path(__file__).parent / "tests" / "blythe_ca_33.617773_-114.588261_psmv3_60_tmy.csv"
def gcr_func(gcr, cost_per_land_area):
"""
Returns the Internal Rate of Return of a default PV single owner project given modified ground-coverage-ratio (GCR)
and cost per land area
Args:
gcr: ratio, between 0.1 - 1
cost_per_land_area: $
Returns: IRR
"""
# set up base
a = Pvsamv1.default("FlatPlatePVSingleowner")
a.SolarResource.solar_resource_file = solar_resource_file
b = Singleowner.default("FlatPlatePVSingleowner")
# set up shading
a.Shading.subarray1_shade_mode = 1
a.Layout.subarray1_nmodx = 12
a.Layout.subarray1_nmody = 2
a.SystemDesign.subarray1_gcr = float(gcr)
land_area = a.CECPerformanceModelWithModuleDatabase.cec_area * (a.SystemDesign.subarray1_nstrings
* a.SystemDesign.subarray1_modules_per_string) / gcr * 0.0002471
a.execute(0)
# total_installed_cost = total_direct_cost + permitting_total + engr_total + grid_total + landprep_total + sales_tax_total + land_total
b.SystemCosts.total_installed_cost += cost_per_land_area * land_area * 1000
b.SystemOutput.system_pre_curtailment_kwac = a.Outputs.gen
b.SystemOutput.gen = a.Outputs.gen
b.execute(0)
return b.Outputs.analysis_period_irr
gcrs = np.arange(1, 10)
costs = np.arange(1, 10)
multi1 = time.process_time()
if __name__ == '__main__':
with multiprocessing.Pool(processes=4) as pool:
results = pool.starmap(gcr_func, product(gcrs / 10, repeat=2))
multi2 = time.process_time()
print("multi process time:", multi2 - multi1, "\n")
results = np.array([results])
results = np.reshape(results, (-1, 9))
X, Y = np.meshgrid(gcrs, costs)
fig = plt.figure()
ax = Axes3D(fig)
ax.plot_surface(X, Y, results)
plt.title("Internal Rate of Return")
plt.xlabel("GCR")
plt.ylabel("$ / land area")
plt.show()
plt.contour(X, Y, results)
plt.title("Internal Rate of Return")
plt.xlabel("GCR")
plt.ylabel("$ / land area")
plt.show()
| [
"PySAM.Pvsamv1.default",
"PySAM.Singleowner.default",
"numpy.reshape",
"matplotlib.pyplot.title",
"matplotlib.pyplot.ylabel",
"pathlib.Path",
"matplotlib.pyplot.xlabel",
"itertools.product",
"numpy.array",
"matplotlib.pyplot.figure",
"mpl_toolkits.mplot3d.Axes3D",
"matplotlib.pyplot.contour",
... | [((1671, 1687), 'numpy.arange', 'np.arange', (['(1)', '(10)'], {}), '(1, 10)\n', (1680, 1687), True, 'import numpy as np\n'), ((1696, 1712), 'numpy.arange', 'np.arange', (['(1)', '(10)'], {}), '(1, 10)\n', (1705, 1712), True, 'import numpy as np\n'), ((1723, 1742), 'time.process_time', 'time.process_time', ([], {}), '()\n', (1740, 1742), False, 'import time\n'), ((1904, 1923), 'time.process_time', 'time.process_time', ([], {}), '()\n', (1921, 1923), False, 'import time\n'), ((1987, 2006), 'numpy.array', 'np.array', (['[results]'], {}), '([results])\n', (1995, 2006), True, 'import numpy as np\n'), ((2017, 2045), 'numpy.reshape', 'np.reshape', (['results', '(-1, 9)'], {}), '(results, (-1, 9))\n', (2027, 2045), True, 'import numpy as np\n'), ((2054, 2078), 'numpy.meshgrid', 'np.meshgrid', (['gcrs', 'costs'], {}), '(gcrs, costs)\n', (2065, 2078), True, 'import numpy as np\n'), ((2085, 2097), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2095, 2097), True, 'import matplotlib.pyplot as plt\n'), ((2103, 2114), 'mpl_toolkits.mplot3d.Axes3D', 'Axes3D', (['fig'], {}), '(fig)\n', (2109, 2114), False, 'from mpl_toolkits.mplot3d import Axes3D\n'), ((2146, 2182), 'matplotlib.pyplot.title', 'plt.title', (['"""Internal Rate of Return"""'], {}), "('Internal Rate of Return')\n", (2155, 2182), True, 'import matplotlib.pyplot as plt\n'), ((2183, 2200), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""GCR"""'], {}), "('GCR')\n", (2193, 2200), True, 'import matplotlib.pyplot as plt\n'), ((2201, 2228), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$ / land area"""'], {}), "('$ / land area')\n", (2211, 2228), True, 'import matplotlib.pyplot as plt\n'), ((2229, 2239), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2237, 2239), True, 'import matplotlib.pyplot as plt\n'), ((2241, 2267), 'matplotlib.pyplot.contour', 'plt.contour', (['X', 'Y', 'results'], {}), '(X, Y, results)\n', (2252, 2267), True, 'import matplotlib.pyplot as plt\n'), ((2268, 2304), 'matplotlib.pyplot.title', 'plt.title', (['"""Internal Rate of Return"""'], {}), "('Internal Rate of Return')\n", (2277, 2304), True, 'import matplotlib.pyplot as plt\n'), ((2305, 2322), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""GCR"""'], {}), "('GCR')\n", (2315, 2322), True, 'import matplotlib.pyplot as plt\n'), ((2323, 2350), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$ / land area"""'], {}), "('$ / land area')\n", (2333, 2350), True, 'import matplotlib.pyplot as plt\n'), ((2351, 2361), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2359, 2361), True, 'import matplotlib.pyplot as plt\n'), ((739, 780), 'PySAM.Pvsamv1.default', 'Pvsamv1.default', (['"""FlatPlatePVSingleowner"""'], {}), "('FlatPlatePVSingleowner')\n", (754, 780), True, 'import PySAM.Pvsamv1 as Pvsamv1\n'), ((852, 897), 'PySAM.Singleowner.default', 'Singleowner.default', (['"""FlatPlatePVSingleowner"""'], {}), "('FlatPlatePVSingleowner')\n", (871, 897), True, 'import PySAM.Singleowner as Singleowner\n'), ((1780, 1813), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {'processes': '(4)'}), '(processes=4)\n', (1800, 1813), False, 'import multiprocessing\n'), ((329, 343), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (333, 343), False, 'from pathlib import Path\n'), ((1864, 1892), 'itertools.product', 'product', (['(gcrs / 10)'], {'repeat': '(2)'}), '(gcrs / 10, repeat=2)\n', (1871, 1892), False, 'from itertools import product\n')] |
# Copyright 2013 New Dream Network, LLC (DreamHost)
# Copyright 2015 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import rpc as n_rpc
import oslo_messaging
class LbaasAgentApi(object):
"""Agent side of the Agent to Plugin RPC API."""
# history
# 1.0 Initial version
def __init__(self, topic, context, host):
self.context = context
self.host = host
target = oslo_messaging.Target(topic=topic, version='1.0')
self.client = n_rpc.get_client(target)
def get_ready_devices(self):
cctxt = self.client.prepare()
return cctxt.call(self.context, 'get_ready_devices', host=self.host)
def get_loadbalancer(self, loadbalancer_id):
cctxt = self.client.prepare()
return cctxt.call(self.context, 'get_loadbalancer',
loadbalancer_id=loadbalancer_id)
def loadbalancer_deployed(self, loadbalancer_id):
cctxt = self.client.prepare()
return cctxt.call(self.context, 'loadbalancer_deployed',
loadbalancer_id=loadbalancer_id)
def update_status(self, obj_type, obj_id, provisioning_status=None,
operating_status=None):
cctxt = self.client.prepare()
return cctxt.call(self.context, 'update_status', obj_type=obj_type,
obj_id=obj_id,
provisioning_status=provisioning_status,
operating_status=operating_status)
def loadbalancer_destroyed(self, loadbalancer_id):
cctxt = self.client.prepare()
return cctxt.call(self.context, 'loadbalancer_destroyed',
loadbalancer_id=loadbalancer_id)
def plug_vip_port(self, port_id):
cctxt = self.client.prepare()
return cctxt.call(self.context, 'plug_vip_port', port_id=port_id,
host=self.host)
def unplug_vip_port(self, port_id):
cctxt = self.client.prepare()
return cctxt.call(self.context, 'unplug_vip_port', port_id=port_id,
host=self.host)
def update_loadbalancer_stats(self, loadbalancer_id, stats):
cctxt = self.client.prepare()
return cctxt.call(self.context, 'update_loadbalancer_stats',
loadbalancer_id=loadbalancer_id, stats=stats)
| [
"neutron.common.rpc.get_client",
"oslo_messaging.Target"
] | [((963, 1012), 'oslo_messaging.Target', 'oslo_messaging.Target', ([], {'topic': 'topic', 'version': '"""1.0"""'}), "(topic=topic, version='1.0')\n", (984, 1012), False, 'import oslo_messaging\n'), ((1035, 1059), 'neutron.common.rpc.get_client', 'n_rpc.get_client', (['target'], {}), '(target)\n', (1051, 1059), True, 'from neutron.common import rpc as n_rpc\n')] |
'''
Main Driver program for PiCa, a Raspberry pi based car --- Get it? "Driver"
Responsible for starting video feed in thread, handling web socket communication and moving the car
Program by <NAME>, started on the fifteenth of December 2017
'''
#Start with imports
from Car2 import Car
import socket
import subprocess
import picamera
import thread
import os
def sendvid(f=24,v=8160):
#Code to stream video to the interwebs using PiCam
#command from http://www.raspberry-projects.com/pi/pi-hardware/raspberry-pi-camera/streaming-video-using-vlc-player
#Remove '-vf' if your video is upside down
command= "raspivid -o - -t0 -hf -vf -w 640 -h 480 -n -fps "+f+"|cvlc -vvv stream:///dev/stdin --sout '#standard{access=http,mux=ts,dst=:"+v+"}' :demux=h264"
subprocess.Popen(command, shell= True)
#start video feed in different thread
#uncomment to start video
#thread.start_new_thread(sendvid, ())
#Receive the angle from android app and steer the car
PORT = int(0) # Port to communicate over, 0 will find any free port
HOST = '0.0.0.0' #listen from any device
pin1=38
pin2=40
pin3=36
pin4=32
myCar = Car(pin1,pin2,pin3,pin4)
#Global variables
serversocket = socket.socket()
serversocket.bind((HOST, PORT))
# Specify to only listen to one device
serversocket.listen(1)
print("Socket now listening at port " + str(serversocket.getsockname()[1]))
# Get and display input
print("n")
while True:
# Find client socket
connection, address = serversocket.accept()
receivedstring = connection.recv(10).decode('utf-8')[2:]
if (receivedstring == '-1'):
print("shutdown")
subprocess.Popen("sudo shutdown now", shell=True)
print(receivedstring)
myCar.steer(receivedstring)
| [
"subprocess.Popen",
"Car2.Car",
"socket.socket"
] | [((1116, 1143), 'Car2.Car', 'Car', (['pin1', 'pin2', 'pin3', 'pin4'], {}), '(pin1, pin2, pin3, pin4)\n', (1119, 1143), False, 'from Car2 import Car\n'), ((1176, 1191), 'socket.socket', 'socket.socket', ([], {}), '()\n', (1189, 1191), False, 'import socket\n'), ((762, 799), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'shell': '(True)'}), '(command, shell=True)\n', (778, 799), False, 'import subprocess\n'), ((1614, 1663), 'subprocess.Popen', 'subprocess.Popen', (['"""sudo shutdown now"""'], {'shell': '(True)'}), "('sudo shutdown now', shell=True)\n", (1630, 1663), False, 'import subprocess\n')] |
from req import WebRequestHandler
from req import Service
import tornado
class WebProductHandler(WebRequestHandler):
@tornado.gen.coroutine
def get(self, action=None, product_id=None):
print(action)
if action == None:
err, data = yield from Service.Product.get_product({'id': self.id})
if err:
self.wrire_error(500)
else: self.render('product/get_product.html', data=data)
elif action == 'add':
err, data = yield from Service.Product.get_product({'id': self.id})
self.render('product/add_product.html')
elif action == 'show':
err, data = yield from Service.Product.get_product_by_id({'id': product_id})
if err: self.write_error(500, err)
else: self.render('product/show_product.html', data=data)
elif action == 'fast':
self.render('product/fast_product.html')
| [
"req.Service.Product.get_product_by_id",
"req.Service.Product.get_product"
] | [((278, 322), 'req.Service.Product.get_product', 'Service.Product.get_product', (["{'id': self.id}"], {}), "({'id': self.id})\n", (305, 322), False, 'from req import Service\n'), ((515, 559), 'req.Service.Product.get_product', 'Service.Product.get_product', (["{'id': self.id}"], {}), "({'id': self.id})\n", (542, 559), False, 'from req import Service\n'), ((678, 731), 'req.Service.Product.get_product_by_id', 'Service.Product.get_product_by_id', (["{'id': product_id}"], {}), "({'id': product_id})\n", (711, 731), False, 'from req import Service\n')] |
#
# MIT License
#
# Copyright (c) 2022 GT4SD team
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import argparse
import configparser
from typing import Any, Dict, Optional
import sentencepiece as _sentencepiece
from pytorch_lightning import Trainer
from ..ml.models import ARCHITECTURE_FACTORY
from .utils import convert_string_to_class
# sentencepiece has to be loaded before lightning to avoid segfaults
_sentencepiece
def parse_arguments_from_config(conf_file: Optional[str] = None) -> argparse.Namespace:
"""Parse arguments from configuration file.
Args:
conf_file: configuration file. Defaults to None, a.k.a. us a default configuration
in ./config/config.ini.
Returns:
the parsed arguments.
"""
parser = argparse.ArgumentParser()
# open config.ini file, either from parser or default file
parser.add_argument(
"--conf_file",
type=str,
help=("config file for the defaults value"),
default="./config/config.ini",
)
# Read config file
args, remaining_argv = parser.parse_known_args()
config = configparser.ConfigParser()
if conf_file:
config.read(conf_file)
else:
config.read(args.conf_file)
# classes that are not model name
general_config_classes = ["general", "trainer", "default"]
# adding a list of all model name into the args
result: Dict[str, Any] = dict()
result["model_list"] = [
i for i in list(config.keys()) if i.lower() not in general_config_classes
]
for key in [*config.keys()]:
# go trough all models parameter, replace the parsed ones from the the config files ones
if key.lower() not in general_config_classes:
model_type = config[key]["type"]
params_from_configfile = dict(config[key])
model = ARCHITECTURE_FACTORY[model_type.lower()]
parser = model.add_model_specific_args(parser, key)
args, _ = parser.parse_known_args()
args_dictionary = vars(args)
params_from_configfile["name"] = key
for i in params_from_configfile:
params_from_configfile[i] = convert_string_to_class(
params_from_configfile[i]
)
params_from_configfile.update(
{
k[: -len(key) - 1]: v
for k, v in args_dictionary.items()
if v is not None and k.endswith("_" + key)
}
)
result[key] = params_from_configfile
elif key.lower() == "trainer" or key.lower() == "general":
params_from_configfile = dict(config[key])
for i in params_from_configfile:
params_from_configfile[i] = convert_string_to_class(
params_from_configfile[i]
)
result.update(params_from_configfile)
# parser Pytorch Trainer arguments
parser = Trainer.add_argparse_args(parser)
# adding basename as the name of the run
parser.add_argument("--basename", type=str)
parser.add_argument("--batch_size", type=int)
parser.add_argument("--num_workers", type=int)
parser.add_argument("--lr", type=float)
parser.add_argument("--validation_split", type=float, default=None)
parser.add_argument("--validation_indices_file", type=str)
args_dictionary = vars(parser.parse_args(remaining_argv))
result.update({k: v for k, v in args_dictionary.items() if v is not None})
result_namespace = argparse.Namespace(**result)
return result_namespace
| [
"pytorch_lightning.Trainer.add_argparse_args",
"configparser.ConfigParser",
"argparse.ArgumentParser",
"argparse.Namespace"
] | [((1777, 1802), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1800, 1802), False, 'import argparse\n'), ((2121, 2148), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (2146, 2148), False, 'import configparser\n'), ((3988, 4021), 'pytorch_lightning.Trainer.add_argparse_args', 'Trainer.add_argparse_args', (['parser'], {}), '(parser)\n', (4013, 4021), False, 'from pytorch_lightning import Trainer\n'), ((4560, 4588), 'argparse.Namespace', 'argparse.Namespace', ([], {}), '(**result)\n', (4578, 4588), False, 'import argparse\n')] |
#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2021 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys, os, argparse
sys.path.append(os.path.join(sys.path[0],'../..'))
import TermTk as ttk
def demoColorPicker(root=None):
frame = ttk.TTkFrame(parent=root, border=False)
winCP = ttk.TTkWindow(parent=frame,pos = (0,0), size=(30,16), title="Test Color Pickers", border=True)
ttk.TTkColorButtonPicker(parent=winCP, pos=( 0,0), size=(8,3), border=True, color=ttk.TTkColor.bg('#88ffff') )
ttk.TTkColorButtonPicker(parent=winCP, pos=( 0,3), size=(8,3), border=True, color=ttk.TTkColor.bg('#ff88ff') )
ttk.TTkColorButtonPicker(parent=winCP, pos=( 0,6), size=(8,3), border=True, color=ttk.TTkColor.bg('#ffff88') )
ttk.TTkColorButtonPicker(parent=winCP, pos=( 0,9), size=(8,3), border=True, color=ttk.TTkColor.bg('#8888ff') )
ttk.TTkColorButtonPicker(parent=winCP, pos=(10,0), size=(8,3), border=True, color=ttk.TTkColor.fg('#00ffff') )
ttk.TTkColorButtonPicker(parent=winCP, pos=(10,3), size=(8,3), border=True, color=ttk.TTkColor.fg('#ff00ff') )
ttk.TTkColorButtonPicker(parent=winCP, pos=(10,6), size=(8,3), border=True, color=ttk.TTkColor.fg('#ffff00') )
ttk.TTkColorButtonPicker(parent=winCP, pos=(10,9), size=(8,3), border=True, color=ttk.TTkColor.fg('#0000ff') )
ttk.TTkColorButtonPicker(parent=winCP, pos=(20,0), size=(8,3), border=True, color=ttk.TTkColor.bg('#ffffff') )
ttk.TTkColorButtonPicker(parent=winCP, pos=(20,3), size=(8,3), border=True, color=ttk.TTkColor.bg('#ffffff') )
ttk.TTkColorButtonPicker(parent=winCP, pos=(20,6), size=(8,3), border=True, color=ttk.TTkColor.bg('#ffffff') )
ttk.TTkColorButtonPicker(parent=winCP, pos=(20,9), size=(8,3), border=True, color=ttk.TTkColor.bg('#ffffff') )
# win2_1 = ttk.TTkColorDialogPicker(parent=frame,pos = (3,3), size=(110,40), title="Test Color Picker", border=True)
return frame
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-f', help='Full Screen', action='store_true')
args = parser.parse_args()
ttk.TTkLog.use_default_file_logging()
root = ttk.TTk()
if args.f:
root.setLayout(ttk.TTkGridLayout())
winColor1 = root
else:
winColor1 = ttk.TTkWindow(parent=root,pos = (0,0), size=(120,50), title="Test Color Picker", border=True, layout=ttk.TTkGridLayout())
demoColorPicker(winColor1)
root.mainloop()
if __name__ == "__main__":
main() | [
"argparse.ArgumentParser",
"TermTk.TTkGridLayout",
"TermTk.TTkColor.fg",
"TermTk.TTkLog.use_default_file_logging",
"os.path.join",
"TermTk.TTkColor.bg",
"TermTk.TTk",
"TermTk.TTkFrame",
"TermTk.TTkWindow"
] | [((1178, 1212), 'os.path.join', 'os.path.join', (['sys.path[0]', '"""../.."""'], {}), "(sys.path[0], '../..')\n", (1190, 1212), False, 'import sys, os, argparse\n'), ((1280, 1319), 'TermTk.TTkFrame', 'ttk.TTkFrame', ([], {'parent': 'root', 'border': '(False)'}), '(parent=root, border=False)\n', (1292, 1319), True, 'import TermTk as ttk\n'), ((1333, 1433), 'TermTk.TTkWindow', 'ttk.TTkWindow', ([], {'parent': 'frame', 'pos': '(0, 0)', 'size': '(30, 16)', 'title': '"""Test Color Pickers"""', 'border': '(True)'}), "(parent=frame, pos=(0, 0), size=(30, 16), title=\n 'Test Color Pickers', border=True)\n", (1346, 1433), True, 'import TermTk as ttk\n'), ((2975, 3000), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2998, 3000), False, 'import sys, os, argparse\n'), ((3108, 3145), 'TermTk.TTkLog.use_default_file_logging', 'ttk.TTkLog.use_default_file_logging', ([], {}), '()\n', (3143, 3145), True, 'import TermTk as ttk\n'), ((3158, 3167), 'TermTk.TTk', 'ttk.TTk', ([], {}), '()\n', (3165, 3167), True, 'import TermTk as ttk\n'), ((1514, 1540), 'TermTk.TTkColor.bg', 'ttk.TTkColor.bg', (['"""#88ffff"""'], {}), "('#88ffff')\n", (1529, 1540), True, 'import TermTk as ttk\n'), ((1629, 1655), 'TermTk.TTkColor.bg', 'ttk.TTkColor.bg', (['"""#ff88ff"""'], {}), "('#ff88ff')\n", (1644, 1655), True, 'import TermTk as ttk\n'), ((1744, 1770), 'TermTk.TTkColor.bg', 'ttk.TTkColor.bg', (['"""#ffff88"""'], {}), "('#ffff88')\n", (1759, 1770), True, 'import TermTk as ttk\n'), ((1859, 1885), 'TermTk.TTkColor.bg', 'ttk.TTkColor.bg', (['"""#8888ff"""'], {}), "('#8888ff')\n", (1874, 1885), True, 'import TermTk as ttk\n'), ((1974, 2000), 'TermTk.TTkColor.fg', 'ttk.TTkColor.fg', (['"""#00ffff"""'], {}), "('#00ffff')\n", (1989, 2000), True, 'import TermTk as ttk\n'), ((2089, 2115), 'TermTk.TTkColor.fg', 'ttk.TTkColor.fg', (['"""#ff00ff"""'], {}), "('#ff00ff')\n", (2104, 2115), True, 'import TermTk as ttk\n'), ((2204, 2230), 'TermTk.TTkColor.fg', 'ttk.TTkColor.fg', (['"""#ffff00"""'], {}), "('#ffff00')\n", (2219, 2230), True, 'import TermTk as ttk\n'), ((2319, 2345), 'TermTk.TTkColor.fg', 'ttk.TTkColor.fg', (['"""#0000ff"""'], {}), "('#0000ff')\n", (2334, 2345), True, 'import TermTk as ttk\n'), ((2434, 2460), 'TermTk.TTkColor.bg', 'ttk.TTkColor.bg', (['"""#ffffff"""'], {}), "('#ffffff')\n", (2449, 2460), True, 'import TermTk as ttk\n'), ((2549, 2575), 'TermTk.TTkColor.bg', 'ttk.TTkColor.bg', (['"""#ffffff"""'], {}), "('#ffffff')\n", (2564, 2575), True, 'import TermTk as ttk\n'), ((2664, 2690), 'TermTk.TTkColor.bg', 'ttk.TTkColor.bg', (['"""#ffffff"""'], {}), "('#ffffff')\n", (2679, 2690), True, 'import TermTk as ttk\n'), ((2779, 2805), 'TermTk.TTkColor.bg', 'ttk.TTkColor.bg', (['"""#ffffff"""'], {}), "('#ffffff')\n", (2794, 2805), True, 'import TermTk as ttk\n'), ((3206, 3225), 'TermTk.TTkGridLayout', 'ttk.TTkGridLayout', ([], {}), '()\n', (3223, 3225), True, 'import TermTk as ttk\n'), ((3383, 3402), 'TermTk.TTkGridLayout', 'ttk.TTkGridLayout', ([], {}), '()\n', (3400, 3402), True, 'import TermTk as ttk\n')] |
# Parse Newick-formatted file into tree of { 'kids': [], 'label': '', 'length': '' }
import logging
from utils import die
def skipSpaces(treeString, offset):
while (offset != len(treeString) and treeString[offset].isspace()):
offset += 1
return offset
def parseQuoted(treeString, offset):
"""Read in a quoted, possibly \-escaped string in treeString at offset"""
label = ''
quoteChar = treeString[offset]
offset += 1
labelStart = offset
while (offset != len(treeString) and treeString[offset] != quoteChar):
if (treeString[offset] == '\\'):
offset += 1
offset += 1
if (treeString[offset] != quoteChar):
die("Missing end-" + quoteChar + " after '" + treeString + "'")
else:
label = treeString[labelStart:offset]
offset = skipSpaces(treeString, offset + 1)
return (label, offset)
def terminatesLabel(treeString, offset):
"""Return True if treeString+offset is empty or starts w/char that would terminate a label"""
return (offset == len(treeString) or treeString[offset] == ',' or treeString[offset] == ')' or
treeString[offset] == ';' or treeString[offset] == ':')
def parseLabel(treeString, offset):
"""Read in a possibly quoted, possibly \-escaped node label terminated by [,):;]"""
if (offset == len(treeString)):
label = ''
elif (treeString[offset] == "'" or treeString[offset] == '"'):
(label, offset) = parseQuoted(treeString, offset)
else:
labelStart = offset
while (not terminatesLabel(treeString, offset)):
if (treeString[offset] == '\\'):
offset += 1
offset += 1
label = treeString[labelStart:offset]
offset = skipSpaces(treeString, offset)
return(label, offset)
def parseLength(treeString, offset):
"""If treeString[offset] is ':', then parse the number that follows; otherwise return 0.0."""
if (offset != len(treeString) and treeString[offset] == ':'):
# Branch length
offset = skipSpaces(treeString, offset + 1)
if (not treeString[offset].isdigit()):
die("Expected number to follow ':' but instead got '" +
treeString[offset:offset+100] + "'")
lengthStart = offset
while (offset != len(treeString) and
(treeString[offset].isdigit() or treeString[offset] == '.' or
treeString[offset] == 'E' or treeString[offset] == 'e' or
treeString[offset] == '-')):
offset += 1
lengthStr = treeString[lengthStart:offset]
offset = skipSpaces(treeString, offset)
return (lengthStr, offset)
else:
return ('', offset)
def parseBranch(treeString, offset, internalNode):
"""Recursively parse Newick branch (x, y, z)[label][:length] from treeString at offset"""
if (treeString[offset] != '('):
die("parseBranch called on treeString that doesn't begin with '(': '" +
treeString + "'")
branchStart = offset
internalNode += 1
branch = { 'kids': [], 'label': '', 'length': '', 'inode': internalNode }
offset = skipSpaces(treeString, offset + 1)
while (offset != len(treeString) and treeString[offset] != ')' and treeString[offset] != ';'):
(child, offset, internalNode) = parseString(treeString, offset, internalNode)
branch['kids'].append(child)
if (treeString[offset] == ','):
offset = skipSpaces(treeString, offset + 1)
if (offset == len(treeString)):
die("Input ended before ')' for '" + treeString[branchStart:branchStart+100] + "'")
if (treeString[offset] == ')'):
offset = skipSpaces(treeString, offset + 1)
else:
die("Can't find ')' matching '" + treeString[branchStart:branchStart+100] + "', " +
"instead got '" + treeString[offset:offset+100] + "'")
(branch['label'], offset) = parseLabel(treeString, offset)
(branch['length'], offset) = parseLength(treeString, offset)
return (branch, offset, internalNode)
def parseString(treeString, offset=0, internalNode=0):
"""Recursively parse Newick tree from treeString"""
offset = skipSpaces(treeString, offset)
if (treeString[offset] == '('):
return parseBranch(treeString, offset, internalNode)
else:
(label, offset) = parseLabel(treeString, offset)
(length, offset) = parseLength(treeString, offset)
leaf = { 'kids': None, 'label': label, 'length': length }
return (leaf, offset, internalNode)
def parseFile(treeFile):
"""Read Newick file, return tree object"""
with open(treeFile, 'r') as treeF:
line1 = treeF.readline().strip()
if (line1 == ''):
return None
(tree, offset, internalNode) = parseString(line1)
if (offset != len(line1) and line1[offset] != ';'):
die("Tree terminated without ';' before '" + line1[offset:offset+100] + "'")
treeF.close()
return tree
def treeToString(node, pretty=False, indent=0):
"""Return a Newick string encoding node and its descendants, optionally pretty-printing with
newlines and indentation. String is not ';'-terminated, caller must do that."""
if not node:
return ''
labelLen = ''
if (node['label']):
labelLen += node['label']
if (node['length']):
labelLen += ':' + node['length']
if (node['kids']):
string = '('
kidIndent = indent + 1
kidStrings = [ treeToString(kid, pretty, kidIndent) for kid in node['kids'] ]
sep = ','
if (pretty):
sep = ',\n' + ' ' * kidIndent
string += sep.join(kidStrings)
string += ')'
string += labelLen
else:
string = labelLen
return string;
def printTree(tree, pretty=False, indent=0):
"""Print out Newick text encoding tree, optionally pretty-printing with
newlines and indentation."""
print(treeToString(tree, pretty, indent) + ';')
def leafNames(node):
"""Return a list of labels of all leaf descendants of node"""
if (node['kids']):
return [ leaf for kid in node['kids'] for leaf in leafNames(kid) ]
else:
return [ node['label'] ]
def treeIntersectIds(node, idLookup, sampleSet, lookupFunc=None):
"""For each leaf in node, attempt to look up its label in idLookup; replace if found.
Prune nodes with no matching leaves. Store new leaf labels in sampleSet.
If lookupFunc is given, it is passed two arguments (label, idLookup) and returns a
possible empty list of matches."""
if (node['kids']):
# Internal node: prune
prunedKids = []
for kid in (node['kids']):
kidIntersected = treeIntersectIds(kid, idLookup, sampleSet, lookupFunc)
if (kidIntersected):
prunedKids.append(kidIntersected)
if (len(prunedKids) > 1):
node['kids'] = prunedKids
elif (len(prunedKids) == 1):
node = prunedKids[0]
else:
node = None
else:
# Leaf: lookup, prune if not found
label = node['label']
if (lookupFunc):
matchList = lookupFunc(node['label'], idLookup)
elif label in idLookup:
matchList = idLookup[label]
else:
matchList = []
if (not matchList):
logging.info("No match for leaf '" + label + "'")
node = None
else:
if (len(matchList) != 1):
logging.warn("Non-unique match for leaf '" + label + "': ['" +
"', '".join(matchList) + "']")
else:
logging.debug(label + ' --> ' + matchList[0]);
node['label'] = matchList[0]
sampleSet.add(matchList[0])
return node
| [
"utils.die",
"logging.info",
"logging.debug"
] | [((686, 749), 'utils.die', 'die', (['(\'Missing end-\' + quoteChar + " after \'" + treeString + "\'")'], {}), '(\'Missing end-\' + quoteChar + " after \'" + treeString + "\'")\n', (689, 749), False, 'from utils import die\n'), ((2911, 3004), 'utils.die', 'die', (['("parseBranch called on treeString that doesn\'t begin with \'(\': \'" +\n treeString + "\'")'], {}), '("parseBranch called on treeString that doesn\'t begin with \'(\': \'" +\n treeString + "\'")\n', (2914, 3004), False, 'from utils import die\n'), ((3549, 3639), 'utils.die', 'die', (['("Input ended before \')\' for \'" + treeString[branchStart:branchStart + 100] +\n "\'")'], {}), '("Input ended before \')\' for \'" + treeString[branchStart:branchStart + \n 100] + "\'")\n', (3552, 3639), False, 'from utils import die\n'), ((3739, 3885), 'utils.die', 'die', (['("Can\'t find \')\' matching \'" + treeString[branchStart:branchStart + 100] +\n "\', " + "instead got \'" + treeString[offset:offset + 100] + "\'")'], {}), '("Can\'t find \')\' matching \'" + treeString[branchStart:branchStart + 100] +\n "\', " + "instead got \'" + treeString[offset:offset + 100] + "\'")\n', (3742, 3885), False, 'from utils import die\n'), ((2146, 2245), 'utils.die', 'die', (['("Expected number to follow \':\' but instead got \'" + treeString[offset:\n offset + 100] + "\'")'], {}), '("Expected number to follow \':\' but instead got \'" + treeString[offset:\n offset + 100] + "\'")\n', (2149, 2245), False, 'from utils import die\n'), ((4882, 4960), 'utils.die', 'die', (['("Tree terminated without \';\' before \'" + line1[offset:offset + 100] + "\'")'], {}), '("Tree terminated without \';\' before \'" + line1[offset:offset + 100] + "\'")\n', (4885, 4960), False, 'from utils import die\n'), ((7368, 7417), 'logging.info', 'logging.info', (['("No match for leaf \'" + label + "\'")'], {}), '("No match for leaf \'" + label + "\'")\n', (7380, 7417), False, 'import logging\n'), ((7667, 7712), 'logging.debug', 'logging.debug', (["(label + ' --> ' + matchList[0])"], {}), "(label + ' --> ' + matchList[0])\n", (7680, 7712), False, 'import logging\n')] |
digits = '0123456789'
chars = 'abcdefghijklmn' + \
'opqrstuvwxyz'
up = chars.upper()
special = '_!$%&?ù'
all = digits+chars+up+special
from random import choice
password = ''.join (
choice(all) for i in range(10)
)
f = open('ascii.txt', 'r')
file_contents = f.read()
print("\x1b[1;32m ")
print (file_contents)
f.close()
print("\033[0;31m")
print(password)
print("\033[0;37;40m")
| [
"random.choice"
] | [((204, 215), 'random.choice', 'choice', (['all'], {}), '(all)\n', (210, 215), False, 'from random import choice\n')] |
from app import db
from app import login
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
from datetime import datetime
from hashlib import md5
@login.user_loader
def load_user(id):
return User.query.get(int(id))
project_contributors = db.Table('project_contributors', db.Model.metadata,
db.Column('contributors', db.Integer, db.ForeignKey('user.id')),
db.Column('project', db.Integer, db.ForeignKey('project.id'))
)
project_upvoters = db.Table('project_upvoters', db.Model.metadata,
db.Column('project', db.Integer, db.ForeignKey('project.id')),
db.Column('upvoters', db.Integer, db.ForeignKey('user.id'))
)
project_comment = db.Table('project_comments', db.Model.metadata,
db.Column('project', db.Integer, db.ForeignKey('project.id')),
db.Column('comment', db.Integer, db.ForeignKey('comment.id'))
)
class User(UserMixin, db.Model):
__tablename__ = 'user'
# Login details
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), index=True, unique=True, nullable=False)
email = db.Column(db.String(128), index=True, unique=True, nullable=False)
password_hash = db.Column(db.String(128))
# Profile details
first_name = db.Column(db.String(32))
last_name = db.Column(db.String(32))
college = db.Column(db.String(64))
website = db.Column(db.String(128))
about_me = db.Column(db.Text)
social_github = db.Column(db.String(128))
social_linked = db.Column(db.String(128))
social_twitter = db.Column(db.String(128))
# Posts
comments = db.relationship('Comment', backref='author', lazy='dynamic')
upvotes = db.relationship('Project', secondary=project_upvoters, backref='upvoters')
def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def avatar(self, size):
digest = md5(self.email.lower().encode('utf-8')).hexdigest()
return f'https://www.gravatar.com/avatar/{digest}?d=identicon&s={size}'
def __repr__(self):
return f'<User {self.username}>'
class Project(db.Model):
__tablename__ = 'project'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), nullable=False)
summary = db.Column(db.String(256))
website = db.Column(db.String(128))
description = db.Column(db.Text)
future_scope = db.Column(db.String(32))
short_term_goal = db.Column(db.Text)
category_primary = db.Column(db.String(32))
category_secondary = db.Column(db.String(32))
category_tertiary = db.Column(db.String(32))
comments = db.relationship('Comment', secondary=project_comment, backref='project')
contributors = db.relationship('User', secondary=project_contributors, backref='projects')
#rank = %COUNT(upvotes) WHERE PROJECT = self.id
def rank(self):
raise NotImplementedError
def upvote_number(self):
raise NotImplementedError
def upvote_user_list(self):
raise NotImplementedError
def __repr__(self):
return f'<Project {self.name}>'
class Comment(db.Model):
__tablename__ = 'comment'
id = db.Column(db.Integer, primary_key=True)
text = db.Column(db.Text, nullable=False)
timestamp = db.Column(db.DateTime, default=datetime.utcnow)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return f'<Comment {self.author} - {self.timestamp}>'
@login.user_loader
def load_user(id):
return User.query.get(int(id))
| [
"app.db.String",
"werkzeug.security.generate_password_hash",
"app.db.Column",
"app.db.ForeignKey",
"app.db.relationship",
"werkzeug.security.check_password_hash"
] | [((983, 1022), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (992, 1022), False, 'from app import db\n'), ((1428, 1446), 'app.db.Column', 'db.Column', (['db.Text'], {}), '(db.Text)\n', (1437, 1446), False, 'from app import db\n'), ((1615, 1675), 'app.db.relationship', 'db.relationship', (['"""Comment"""'], {'backref': '"""author"""', 'lazy': '"""dynamic"""'}), "('Comment', backref='author', lazy='dynamic')\n", (1630, 1675), False, 'from app import db\n'), ((1691, 1765), 'app.db.relationship', 'db.relationship', (['"""Project"""'], {'secondary': 'project_upvoters', 'backref': '"""upvoters"""'}), "('Project', secondary=project_upvoters, backref='upvoters')\n", (1706, 1765), False, 'from app import db\n'), ((2286, 2325), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (2295, 2325), False, 'from app import db\n'), ((2476, 2494), 'app.db.Column', 'db.Column', (['db.Text'], {}), '(db.Text)\n', (2485, 2494), False, 'from app import db\n'), ((2561, 2579), 'app.db.Column', 'db.Column', (['db.Text'], {}), '(db.Text)\n', (2570, 2579), False, 'from app import db\n'), ((2742, 2814), 'app.db.relationship', 'db.relationship', (['"""Comment"""'], {'secondary': 'project_comment', 'backref': '"""project"""'}), "('Comment', secondary=project_comment, backref='project')\n", (2757, 2814), False, 'from app import db\n'), ((2834, 2909), 'app.db.relationship', 'db.relationship', (['"""User"""'], {'secondary': 'project_contributors', 'backref': '"""projects"""'}), "('User', secondary=project_contributors, backref='projects')\n", (2849, 2909), False, 'from app import db\n'), ((3278, 3317), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (3287, 3317), False, 'from app import db\n'), ((3329, 3363), 'app.db.Column', 'db.Column', (['db.Text'], {'nullable': '(False)'}), '(db.Text, nullable=False)\n', (3338, 3363), False, 'from app import db\n'), ((3380, 3427), 'app.db.Column', 'db.Column', (['db.DateTime'], {'default': 'datetime.utcnow'}), '(db.DateTime, default=datetime.utcnow)\n', (3389, 3427), False, 'from app import db\n'), ((395, 419), 'app.db.ForeignKey', 'db.ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (408, 419), False, 'from app import db\n'), ((459, 486), 'app.db.ForeignKey', 'db.ForeignKey', (['"""project.id"""'], {}), "('project.id')\n", (472, 486), False, 'from app import db\n'), ((595, 622), 'app.db.ForeignKey', 'db.ForeignKey', (['"""project.id"""'], {}), "('project.id')\n", (608, 622), False, 'from app import db\n'), ((663, 687), 'app.db.ForeignKey', 'db.ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (676, 687), False, 'from app import db\n'), ((795, 822), 'app.db.ForeignKey', 'db.ForeignKey', (['"""project.id"""'], {}), "('project.id')\n", (808, 822), False, 'from app import db\n'), ((862, 889), 'app.db.ForeignKey', 'db.ForeignKey', (['"""comment.id"""'], {}), "('comment.id')\n", (875, 889), False, 'from app import db\n'), ((1048, 1061), 'app.db.String', 'db.String', (['(64)'], {}), '(64)\n', (1057, 1061), False, 'from app import db\n'), ((1126, 1140), 'app.db.String', 'db.String', (['(128)'], {}), '(128)\n', (1135, 1140), False, 'from app import db\n'), ((1213, 1227), 'app.db.String', 'db.String', (['(128)'], {}), '(128)\n', (1222, 1227), False, 'from app import db\n'), ((1278, 1291), 'app.db.String', 'db.String', (['(32)'], {}), '(32)\n', (1287, 1291), False, 'from app import db\n'), ((1319, 1332), 'app.db.String', 'db.String', (['(32)'], {}), '(32)\n', (1328, 1332), False, 'from app import db\n'), ((1358, 1371), 'app.db.String', 'db.String', (['(64)'], {}), '(64)\n', (1367, 1371), False, 'from app import db\n'), ((1397, 1411), 'app.db.String', 'db.String', (['(128)'], {}), '(128)\n', (1406, 1411), False, 'from app import db\n'), ((1478, 1492), 'app.db.String', 'db.String', (['(128)'], {}), '(128)\n', (1487, 1492), False, 'from app import db\n'), ((1525, 1539), 'app.db.String', 'db.String', (['(128)'], {}), '(128)\n', (1534, 1539), False, 'from app import db\n'), ((1572, 1586), 'app.db.String', 'db.String', (['(128)'], {}), '(128)\n', (1581, 1586), False, 'from app import db\n'), ((1834, 1866), 'werkzeug.security.generate_password_hash', 'generate_password_hash', (['password'], {}), '(password)\n', (1856, 1866), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n'), ((1927, 1976), 'werkzeug.security.check_password_hash', 'check_password_hash', (['self.password_hash', 'password'], {}), '(self.password_hash, password)\n', (1946, 1976), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n'), ((2347, 2360), 'app.db.String', 'db.String', (['(64)'], {}), '(64)\n', (2356, 2360), False, 'from app import db\n'), ((2402, 2416), 'app.db.String', 'db.String', (['(256)'], {}), '(256)\n', (2411, 2416), False, 'from app import db\n'), ((2442, 2456), 'app.db.String', 'db.String', (['(128)'], {}), '(128)\n', (2451, 2456), False, 'from app import db\n'), ((2524, 2537), 'app.db.String', 'db.String', (['(32)'], {}), '(32)\n', (2533, 2537), False, 'from app import db\n'), ((2613, 2626), 'app.db.String', 'db.String', (['(32)'], {}), '(32)\n', (2622, 2626), False, 'from app import db\n'), ((2663, 2676), 'app.db.String', 'db.String', (['(32)'], {}), '(32)\n', (2672, 2676), False, 'from app import db\n'), ((2712, 2725), 'app.db.String', 'db.String', (['(32)'], {}), '(32)\n', (2721, 2725), False, 'from app import db\n'), ((3464, 3488), 'app.db.ForeignKey', 'db.ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (3477, 3488), False, 'from app import db\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contact', '0001_initial'),
('product', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ProductWish',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('_details', models.TextField(null=True, verbose_name='Details', blank=True)),
('create_date', models.DateTimeField(verbose_name='Creation Date')),
('contact', models.ForeignKey(related_name='wishlist', verbose_name='Contact', to='contact.Contact', on_delete=models.CASCADE)),
('product', models.ForeignKey(related_name='wishes', verbose_name='Product', to='product.Product', on_delete=models.CASCADE)),
],
options={
'verbose_name': 'Product Wish',
'verbose_name_plural': 'Product Wishes',
},
bases=(models.Model,),
),
]
| [
"django.db.models.DateTimeField",
"django.db.models.AutoField",
"django.db.models.TextField",
"django.db.models.ForeignKey"
] | [((377, 470), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (393, 470), False, 'from django.db import models, migrations\n'), ((498, 561), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'verbose_name': '"""Details"""', 'blank': '(True)'}), "(null=True, verbose_name='Details', blank=True)\n", (514, 561), False, 'from django.db import models, migrations\n'), ((596, 646), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""Creation Date"""'}), "(verbose_name='Creation Date')\n", (616, 646), False, 'from django.db import models, migrations\n'), ((677, 796), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""wishlist"""', 'verbose_name': '"""Contact"""', 'to': '"""contact.Contact"""', 'on_delete': 'models.CASCADE'}), "(related_name='wishlist', verbose_name='Contact', to=\n 'contact.Contact', on_delete=models.CASCADE)\n", (694, 796), False, 'from django.db import models, migrations\n'), ((822, 939), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""wishes"""', 'verbose_name': '"""Product"""', 'to': '"""product.Product"""', 'on_delete': 'models.CASCADE'}), "(related_name='wishes', verbose_name='Product', to=\n 'product.Product', on_delete=models.CASCADE)\n", (839, 939), False, 'from django.db import models, migrations\n')] |
import sys
import time
import traceback
import subprocess
pods = [{
"name": pod,
"num_errors": 0,
"last_tmp_tar_size": 0,
"last_.redirector-data_tar_size": 0
} for pod in sys.argv[1:]]
while True:
time.sleep(10)
for pod in pods:
if pod["num_errors"] >= 20:
continue
for dirname in ['.redirector-data', '/tmp']:
if dirname == '/tmp':
filename = 'tmp'
else:
filename = dirname
kwargs = {"pod_name": pod["name"], "filename": filename, "dirname": dirname}
returncode, output = subprocess.getstatusoutput("kubectl exec {pod_name} -- tar cvf {filename}.tar {dirname}".format(**kwargs))
if returncode != 0:
print(output)
print('{pod_name} failed to tar {dirname}'.format(**kwargs))
pod["num_errors"] += 1
continue
returncode, tarsize = subprocess.getstatusoutput("kubectl exec {pod_name} -- stat --format=%s {filename}.tar".format(**kwargs))
if returncode != 0:
print('{pod_name}: failed to get {filename}.tar size'.format(**kwargs))
pod["num_errors"] += 1
continue
try:
tarsize = int(tarsize)
except Exception as e:
traceback.print_exc()
pod["num_errors"] += 1
continue
if tarsize > pod["last_{filename}_tar_size".format(**kwargs)]:
print("Copying new tar for pod {pod_name} {filename} ({tarsize} bytes)".format(tarsize=tarsize, **kwargs))
returncode, output = subprocess.getstatusoutput("kubectl cp {pod_name}:{filename}.tar redirector-data-{pod_name}-{filename}.tar".format(**kwargs))
pod["last_{filename}_tar_size".format(**kwargs)] = tarsize
| [
"traceback.print_exc",
"time.sleep"
] | [((219, 233), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (229, 233), False, 'import time\n'), ((1348, 1369), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1367, 1369), False, 'import traceback\n')] |
'''
@Author: <NAME> <EMAIL>
'''
import sys
from abc import *
class Config:
__metaclass__ = ABCMeta
def __init__(self):
#Number of Url Data Fetching Threads Allowed
self.MaxWorkerThreads = 8
#Timeout(Seconds) for trying to get the next url from the frontier.
self.FrontierTimeOut = 60
#Timeout(Seconds) for trying to get a free worker thread, (worker is taking too long maybe?)
self.WorkerTimeOut = 60
#Timeout(Seconds) for getting data from the output queue
self.OutBufferTimeOut = 60
#Timeout(Seconds) for getting data from a url
self.UrlFetchTimeOut = 2
#The User Agent String that this crawler is going to identify itself as. http://tools.ietf.org/html/rfc2616#section-14.43
self.__UserAgentString = None
#To allow resume of fetching from last point of closure. Set to False to always restart from seed set of urls.
self.Resumable = True
#Number of times to retry fetching a url if it fails
self.MaxRetryDownloadOnFail = 5
#PolitenessDelay that the crawler is forced to adhere to. http://en.wikipedia.org/wiki/Web_crawler#Politeness_policy
self.PolitenessDelay = 300
#The Persistent File to store current state of crawler for resuming (if Resumable is True)
self.PersistentFile = "Persistent.shelve"
#Total (Approximate) documents to fetch before stopping
self.NoOfDocToFetch = -1
#The Max Depth of the page to go to while fetching (depth = distance of first discovery from seed urls)
self.MaxDepth = -1
#Max size of page in bytes that is allowed to be fetched. (Only works for websites that send Content-Length in response header)
self.MaxPageSize = 1048576
#Max size of output queue. If the HandleData function is slow, then output buffer might not clear up fast.
#This enforces that the queue does not go beyond a certain size.
#Set to 0 if you want unlimited size
#Advantages of setting > 0: Fetch url waits for the buffer to become free when its full. If crawler crashes max of this size output is lost.
#Disadvantage of setting > 0: Slows down the crawling.
self.MaxQueueSize = 0
#This ignores the rules at robot.txt. Be very careful with this. Only make it True with permission of the host/API pulling that does not need robot rules.
self.IgnoreRobotRule = False
#This sets the mode of traversal: False -> Breadth First, True -> Depth First.
self.DepthFirstTraversal = False
def ValidateConfig(self):
'''Validates the config to see if everything is in order. No need to extend this'''
try:
assert (self.UserAgentString != "" or self.UserAgentString != "Set This Value!")
except AssertionError:
print ("Set value of UserAgentString")
sys.exit(1)
try:
assert (self.MaxWorkerThreads != 0)
except AssertionError:
print ("MaxWorkerThreads cannot be 0")
sys.exit(1)
@abstractmethod
def GetSeeds(self):
'''Returns the first set of urls to start crawling from'''
return ["Sample Url 1", "Sample Url 2", "Etc"]
@abstractmethod
def HandleData(self, parsedData):
'''Function to handle url data. Guaranteed to be Thread safe.
parsedData = {"url" : "url", "text" : "text data from html", "html" : "raw html data"}
Advisable to make this function light. Data can be massaged later. Storing data probably is more important'''
print (parsedData["url"])
pass
def AllowedSchemes(self, scheme):
'''Function that allows the schemes/protocols in the set.'''
return scheme.lower() in set(["http", "https", "ftp", b"http", b"https", b"ftp"])
@abstractmethod
def ValidUrl(self, url):
'''Function to determine if the url is a valid url that should be fetched or not.'''
return True
parsed = urlparse(url)
try:
return ".ics.uci.edu" in parsed.hostname.decode("utf-8") \
and not re.match(".*\.(css|js|bmp|gif|jpe?g|ico|png|tiff?|mid|mp2|mp3|mp4)$", parsed.path.decode("utf-8"))
except TypeError:
print ("TypeError for ", parsed)
def GetTextData(self, htmlData):
'''Function to clean up html raw data and get the text from it. Keep it small.
Not thread safe, returns an object that will go into the parsedData["text"] field for HandleData function above'''
import nltk
return nltk.clean_html(htmlData)
def ExtractNextLinks(self, url, rawData, outputLinks):
'''Function to extract the next links to iterate over. No need to validate the links. They get validated at the ValudUrl function when added to the frontier
Add the output links to the outputLinks parameter (has to be a list). Return Bool signifying success of extracting the links.
rawData for url will not be stored if this function returns False. If there are no links but the rawData is still valid and has to be saved return True
Keep this default implementation if you need all the html links from rawData'''
from lxml import html,etree
try:
htmlParse = html.document_fromstring(rawData)
htmlParse.make_links_absolute(url)
except etree.ParserError:
return False
except etree.XMLSyntaxError:
return False
for element, attribute, link, pos in htmlParse.iterlinks():
outputLinks.append(link)
return True
def GetAuthenticationData(self):
''' Function that returns dict(top_level_url : tuple(username, password)) for basic authentication purposes'''
return {} | [
"lxml.html.document_fromstring",
"nltk.clean_html",
"sys.exit"
] | [((4731, 4756), 'nltk.clean_html', 'nltk.clean_html', (['htmlData'], {}), '(htmlData)\n', (4746, 4756), False, 'import nltk\n'), ((5450, 5483), 'lxml.html.document_fromstring', 'html.document_fromstring', (['rawData'], {}), '(rawData)\n', (5474, 5483), False, 'from lxml import html, etree\n'), ((2992, 3003), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3000, 3003), False, 'import sys\n'), ((3164, 3175), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3172, 3175), False, 'import sys\n')] |
from viroconcom.fitting import Fit
from viroconcom.contours import IFormContour
import numpy as np
prng = np.random.RandomState(42)
# Draw 1000 observations from a Weibull distribution with
# shape=1.5 and scale=3, which represents significant
# wave height.
sample_0 = prng.weibull(1.5, 1000) * 3
# Let the second sample, which represents spectral peak
# period, increase with significant wave height and follow
# a lognormal distribution with sigma=0.2.
sample_1 = [0.1 + 1.5 * np.exp(0.2 * point) +
prng.lognormal(2, 0.2) for point in sample_0]
# Define a bivariate probabilistic model that will be fitted to
# the samples. Set a parametric distribution for each variable and
# a dependence structure. Set the lognormal distribution's scale
# parameter to depend on the variable with index 0, which represents
# significant wave height by using the 'dependency' key-value pair.
# A 3-parameter exponential function is chosen to define the
# dependency by setting the function to 'exp3'. The dependency for
# the parameters must be given in the order shape, location, scale.
dist_description_0 = {'name': 'Weibull',
'dependency': (None, None, None),
'width_of_intervals': 2}
dist_description_1 = {'name': 'Lognormal',
'dependency': (None, None, 0),
'functions': (None, None, 'exp3')}
# Compute the fit based on maximum likelihood estimation.
my_fit = Fit((sample_0, sample_1),
(dist_description_0, dist_description_1))
# Compute an environmental contour with a return period of
# 25 years and a sea state duration of 3 hours. 100 data points
# along the contour shall be calculated.
iform_contour = IFormContour(my_fit.mul_var_dist, 25, 3, 100)
| [
"numpy.exp",
"viroconcom.fitting.Fit",
"viroconcom.contours.IFormContour",
"numpy.random.RandomState"
] | [((107, 132), 'numpy.random.RandomState', 'np.random.RandomState', (['(42)'], {}), '(42)\n', (128, 132), True, 'import numpy as np\n'), ((1458, 1525), 'viroconcom.fitting.Fit', 'Fit', (['(sample_0, sample_1)', '(dist_description_0, dist_description_1)'], {}), '((sample_0, sample_1), (dist_description_0, dist_description_1))\n', (1461, 1525), False, 'from viroconcom.fitting import Fit\n'), ((1720, 1765), 'viroconcom.contours.IFormContour', 'IFormContour', (['my_fit.mul_var_dist', '(25)', '(3)', '(100)'], {}), '(my_fit.mul_var_dist, 25, 3, 100)\n', (1732, 1765), False, 'from viroconcom.contours import IFormContour\n'), ((483, 502), 'numpy.exp', 'np.exp', (['(0.2 * point)'], {}), '(0.2 * point)\n', (489, 502), True, 'import numpy as np\n')] |
from django.contrib import admin
from .models import Product, ContainerType, RateSlab
class RateSlabInline(admin.TabularInline):
model = RateSlab
extra = 3
class ContainerTypeAdmin(admin.ModelAdmin):
inlines = [RateSlabInline]
# Register your models here.
admin.site.register(Product)
admin.site.register(ContainerType, ContainerTypeAdmin)
| [
"django.contrib.admin.site.register"
] | [((275, 303), 'django.contrib.admin.site.register', 'admin.site.register', (['Product'], {}), '(Product)\n', (294, 303), False, 'from django.contrib import admin\n'), ((304, 358), 'django.contrib.admin.site.register', 'admin.site.register', (['ContainerType', 'ContainerTypeAdmin'], {}), '(ContainerType, ContainerTypeAdmin)\n', (323, 358), False, 'from django.contrib import admin\n')] |
import numpy as np
from numpy.testing import assert_array_equal
from seai_deap import dim
def test_calculate_building_volume() -> None:
expected_output = np.array(4)
output = dim.calculate_building_volume(
ground_floor_area=np.array(1),
first_floor_area=np.array(1),
second_floor_area=np.array(1),
third_floor_area=np.array(1),
ground_floor_height=np.array(1),
first_floor_height=np.array(1),
second_floor_height=np.array(1),
third_floor_height=np.array(1),
)
assert_array_equal(output, expected_output)
def test_calculate_total_floor_area() -> None:
expected_output = np.array((4))
output = dim.calculate_total_floor_area(
ground_floor_area=np.array(1),
first_floor_area=np.array(1),
second_floor_area=np.array(1),
third_floor_area=np.array(1),
)
assert_array_equal(output, expected_output)
| [
"numpy.array",
"numpy.testing.assert_array_equal"
] | [((162, 173), 'numpy.array', 'np.array', (['(4)'], {}), '(4)\n', (170, 173), True, 'import numpy as np\n'), ((546, 589), 'numpy.testing.assert_array_equal', 'assert_array_equal', (['output', 'expected_output'], {}), '(output, expected_output)\n', (564, 589), False, 'from numpy.testing import assert_array_equal\n'), ((662, 673), 'numpy.array', 'np.array', (['(4)'], {}), '(4)\n', (670, 673), True, 'import numpy as np\n'), ((887, 930), 'numpy.testing.assert_array_equal', 'assert_array_equal', (['output', 'expected_output'], {}), '(output, expected_output)\n', (905, 930), False, 'from numpy.testing import assert_array_equal\n'), ((245, 256), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (253, 256), True, 'import numpy as np\n'), ((283, 294), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (291, 294), True, 'import numpy as np\n'), ((322, 333), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (330, 333), True, 'import numpy as np\n'), ((360, 371), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (368, 371), True, 'import numpy as np\n'), ((401, 412), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (409, 412), True, 'import numpy as np\n'), ((441, 452), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (449, 452), True, 'import numpy as np\n'), ((482, 493), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (490, 493), True, 'import numpy as np\n'), ((522, 533), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (530, 533), True, 'import numpy as np\n'), ((748, 759), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (756, 759), True, 'import numpy as np\n'), ((786, 797), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (794, 797), True, 'import numpy as np\n'), ((825, 836), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (833, 836), True, 'import numpy as np\n'), ((863, 874), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (871, 874), True, 'import numpy as np\n')] |
from collections import namedtuple
import graphene
import datetime
import json
from .new_models import Agent, Community, Collection
def _json_object_hook(d):
return namedtuple('X', d.keys())(*d.values())
def json2obj(data):
return json.loads(data, object_hook=_json_object_hook)
class AgentSchema(graphene.ObjectType):
name = graphene.String(required=True)
dateTimeAdded = graphene.DateTime()
knows = graphene.List(graphene.String)
belongs = graphene.List(graphene.String)
tags = graphene.List(graphene.String)
email = graphene.String(required=False)
loves = graphene.String(required=False)
hates = graphene.String(required=False)
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.name = kwargs.pop('name')
self.agent = Agent(name=self.name)
def resolve_knows(self, info):
_agent = Agent(name=self.name).fetch()
return _agent.knows
def resolve_belongs(self, info):
_agent = Agent(name=self.name).fetch()
return _agent.belongs
class CreateAgent(graphene.Mutation):
class Arguments:
name = graphene.String(required=True)
dateTimeAdded = graphene.DateTime()
knows = graphene.List(graphene.String)
belongs = graphene.List(graphene.String)
tags = graphene.List(graphene.String)
email = graphene.String(required=False)
loves = graphene.String(required=False)
hates = graphene.String(required=False)
success = graphene.Boolean()
agent = graphene.Field(lambda: AgentSchema)
def mutate(self, info, **kwargs):
agent = Agent(**kwargs)
agent.save()
agent._link_connections()
agent._link_communities()
return CreateAgent(agent=agent, success=True)
class CommunitySchema(graphene.ObjectType):
name = graphene.String()
description = graphene.String()
def __init__(self, **kwargs):
self._id = kwargs.pop('_id')
super().__init__(**kwargs)
class CreateCommunity(graphene.Mutation):
class Arguments:
name = graphene.String(required=True)
description = graphene.String()
success = graphene.Boolean()
community = graphene.Field(lambda: CommunitySchema)
def mutate(self, info, **kwargs):
community = Community(**kwargs)
community.save()
return CreateCommunity(community=community, success=True)
class CollectionSchema(graphene.ObjectType):
name = graphene.String()
description = graphene.String()
def __init__(self, **kwargs):
self._id = kwargs.pop('_id')
super().__init__(**kwargs)
class CreateCollection(graphene.Mutation):
class Arguments:
name = graphene.String(required=True)
description = graphene.String()
success = graphene.Boolean()
collection = graphene.Field(lambda: CollectionSchema)
def mutate(self, info, **kwargs):
collection = Collection(**kwargs)
collection.save()
return CreateCollection(community=collection, success=True)
class Query(graphene.ObjectType):
agent = graphene.Field(lambda: AgentSchema, name=graphene.String(required=True))
community = graphene.Field(lambda: CommunitySchema, name=graphene.String())
collection = graphene.Field(lambda: CollectionSchema, name=graphene.String())
def resolve_agent(self, info, name):
agent = Agent(name=name)
return AgentSchema(**agent.as_dict())
class Mutations(graphene.ObjectType):
create_agent = CreateAgent.Field()
create_community = CreateCommunity.Field()
create_collection = CreateCollection.Field()
schema = graphene.Schema(query=Query, mutation=Mutations, auto_camelcase=False)
| [
"graphene.String",
"json.loads",
"graphene.List",
"graphene.Field",
"graphene.DateTime",
"graphene.Schema",
"graphene.Boolean"
] | [((3648, 3718), 'graphene.Schema', 'graphene.Schema', ([], {'query': 'Query', 'mutation': 'Mutations', 'auto_camelcase': '(False)'}), '(query=Query, mutation=Mutations, auto_camelcase=False)\n', (3663, 3718), False, 'import graphene\n'), ((245, 292), 'json.loads', 'json.loads', (['data'], {'object_hook': '_json_object_hook'}), '(data, object_hook=_json_object_hook)\n', (255, 292), False, 'import json\n'), ((346, 376), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (361, 376), False, 'import graphene\n'), ((397, 416), 'graphene.DateTime', 'graphene.DateTime', ([], {}), '()\n', (414, 416), False, 'import graphene\n'), ((429, 459), 'graphene.List', 'graphene.List', (['graphene.String'], {}), '(graphene.String)\n', (442, 459), False, 'import graphene\n'), ((474, 504), 'graphene.List', 'graphene.List', (['graphene.String'], {}), '(graphene.String)\n', (487, 504), False, 'import graphene\n'), ((516, 546), 'graphene.List', 'graphene.List', (['graphene.String'], {}), '(graphene.String)\n', (529, 546), False, 'import graphene\n'), ((559, 590), 'graphene.String', 'graphene.String', ([], {'required': '(False)'}), '(required=False)\n', (574, 590), False, 'import graphene\n'), ((603, 634), 'graphene.String', 'graphene.String', ([], {'required': '(False)'}), '(required=False)\n', (618, 634), False, 'import graphene\n'), ((647, 678), 'graphene.String', 'graphene.String', ([], {'required': '(False)'}), '(required=False)\n', (662, 678), False, 'import graphene\n'), ((1509, 1527), 'graphene.Boolean', 'graphene.Boolean', ([], {}), '()\n', (1525, 1527), False, 'import graphene\n'), ((1540, 1576), 'graphene.Field', 'graphene.Field', (['(lambda : AgentSchema)'], {}), '(lambda : AgentSchema)\n', (1554, 1576), False, 'import graphene\n'), ((1848, 1865), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1863, 1865), False, 'import graphene\n'), ((1884, 1901), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1899, 1901), False, 'import graphene\n'), ((2175, 2193), 'graphene.Boolean', 'graphene.Boolean', ([], {}), '()\n', (2191, 2193), False, 'import graphene\n'), ((2210, 2250), 'graphene.Field', 'graphene.Field', (['(lambda : CommunitySchema)'], {}), '(lambda : CommunitySchema)\n', (2224, 2250), False, 'import graphene\n'), ((2478, 2495), 'graphene.String', 'graphene.String', ([], {}), '()\n', (2493, 2495), False, 'import graphene\n'), ((2514, 2531), 'graphene.String', 'graphene.String', ([], {}), '()\n', (2529, 2531), False, 'import graphene\n'), ((2806, 2824), 'graphene.Boolean', 'graphene.Boolean', ([], {}), '()\n', (2822, 2824), False, 'import graphene\n'), ((2842, 2883), 'graphene.Field', 'graphene.Field', (['(lambda : CollectionSchema)'], {}), '(lambda : CollectionSchema)\n', (2856, 2883), False, 'import graphene\n'), ((1133, 1163), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (1148, 1163), False, 'import graphene\n'), ((1188, 1207), 'graphene.DateTime', 'graphene.DateTime', ([], {}), '()\n', (1205, 1207), False, 'import graphene\n'), ((1224, 1254), 'graphene.List', 'graphene.List', (['graphene.String'], {}), '(graphene.String)\n', (1237, 1254), False, 'import graphene\n'), ((1273, 1303), 'graphene.List', 'graphene.List', (['graphene.String'], {}), '(graphene.String)\n', (1286, 1303), False, 'import graphene\n'), ((1319, 1349), 'graphene.List', 'graphene.List', (['graphene.String'], {}), '(graphene.String)\n', (1332, 1349), False, 'import graphene\n'), ((1366, 1397), 'graphene.String', 'graphene.String', ([], {'required': '(False)'}), '(required=False)\n', (1381, 1397), False, 'import graphene\n'), ((1414, 1445), 'graphene.String', 'graphene.String', ([], {'required': '(False)'}), '(required=False)\n', (1429, 1445), False, 'import graphene\n'), ((1462, 1493), 'graphene.String', 'graphene.String', ([], {'required': '(False)'}), '(required=False)\n', (1477, 1493), False, 'import graphene\n'), ((2089, 2119), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (2104, 2119), False, 'import graphene\n'), ((2142, 2159), 'graphene.String', 'graphene.String', ([], {}), '()\n', (2157, 2159), False, 'import graphene\n'), ((2720, 2750), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (2735, 2750), False, 'import graphene\n'), ((2773, 2790), 'graphene.String', 'graphene.String', ([], {}), '()\n', (2788, 2790), False, 'import graphene\n'), ((3147, 3177), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (3162, 3177), False, 'import graphene\n'), ((3240, 3257), 'graphene.String', 'graphene.String', ([], {}), '()\n', (3255, 3257), False, 'import graphene\n'), ((3322, 3339), 'graphene.String', 'graphene.String', ([], {}), '()\n', (3337, 3339), False, 'import graphene\n')] |
from pathlib import Path
from util.plot_utils import plot_logs, plot_precision_recall
def main():
logs_path = Path('output/21_09_2021_with_corss_en_loss')
logs_paths_list = [logs_path]
plot_logs(logs_paths_list)
if __name__ == '__main__':
main() | [
"util.plot_utils.plot_logs",
"pathlib.Path"
] | [((117, 161), 'pathlib.Path', 'Path', (['"""output/21_09_2021_with_corss_en_loss"""'], {}), "('output/21_09_2021_with_corss_en_loss')\n", (121, 161), False, 'from pathlib import Path\n'), ((200, 226), 'util.plot_utils.plot_logs', 'plot_logs', (['logs_paths_list'], {}), '(logs_paths_list)\n', (209, 226), False, 'from util.plot_utils import plot_logs, plot_precision_recall\n')] |
import argparse
from multiprocessing import Queue, Process
from logging import getLogger
def main():
parser = argparse.ArgumentParser(
description="QkouBot is an application for KIT students. This automatically collect and "
"redistribute information and cancellation of lectures. QkouBot detect update of information "
"and tweet it."
)
common_parser = argparse.ArgumentParser(add_help=False)
common_parser.add_argument("-v",
"--verbose",
dest="verbose",
default=False,
help="Default logging output is above WARNING level to stderr. "
"If this option enabled, logging output is above INFO level to stdout by default."
"You can change its level using `-l` or `--log-level` option.",
action="store_true"
)
common_parser.add_argument("-l",
"--log-level",
dest="log_level",
default=[2],
type=int,
nargs=1,
choices=[1, 2, 3],
help="Choose a log level. 1: debug, 2: info, 3: warning. Default value is 2"
)
common_parser.add_argument("--ini",
dest="ini",
type=str,
help="Read `*.ini` file and overwrite environment variables."
)
common_parser.add_argument("--file-log-enable",
dest="file_log",
default=False,
help="Enable logging to `*.log` file. "
"These files are save into `log` directory by default.",
action="store_true"
)
common_parser.add_argument("--log-path",
dest="log_path",
default="log",
type=str,
help="Specify location of `*.log` file."
)
sub_parser = parser.add_subparsers(help="sub commands help")
bot_parser = sub_parser.add_parser("qkoubot", help="Start QkouBot command", parents=[common_parser])
bot_parser.add_argument("-t",
"--tweet",
dest="tweet",
default=False,
help="Enable tweet update of any information.",
action="store_true"
)
bot_parser.add_argument("--without-failure",
dest="without_f",
default=True,
action="store_false",
help="Tweet update of information, but do not tweet login failure information."
)
bot_parser.set_defaults(func=bot)
stream_parser = sub_parser.add_parser("stream", help="Start stream processing", parents=[common_parser])
stream_parser.set_defaults(func=stream)
today_job_parser = sub_parser.add_parser("dailyjob", help="Run daily job", parents=[common_parser])
today_job_parser.add_argument("-t",
"--tweet",
dest="tweet",
default=False,
help="Enable tweet update of any information.",
action="store_true"
)
today_job_parser.set_defaults(func=daily_job)
args = parser.parse_args()
if args.ini is not None:
config_parse(args.ini)
args.func(args)
def bot(args):
from log_modules import log_listener_process, configure_queue_logger
from qkoubot import cron_process, TweetProcess
log_queue = Queue()
tweet_queue = Queue()
log_listener = Process(target=log_listener_process,
args=(log_queue, args.log_level[0], args.verbose, args.file_log, args.log_path, "bot"),
name="LogListenerProcess")
log_listener.start()
configure_queue_logger(queue=log_queue)
logger = getLogger("Manage")
logger.info("launching on QkouBot")
tweet_process = TweetProcess(tweet_queue)
tweet_process.tweetable = args.tweet
tweet_process.start()
try:
cron_process(args.without_f, tweet_queue)
except KeyboardInterrupt:
from static import TESTING
if TESTING:
from qkoubot.models import Base, engine
logger.info("Dropping Database ...")
Base.metadata.drop_all(engine)
logger.info("Complete.")
except (AssertionError, FileNotFoundError, KeyError) as e:
logger.exception(e.args)
except Exception as e:
logger.exception(e.args)
finally:
tweet_queue.put(None)
tweet_process.join()
log_queue.put(None)
log_listener.join()
exit()
def stream(args):
from log_modules import log_listener_process, configure_queue_logger
from qkoubot import stream_process, GetAuth, StreamReceiverProcess
log_queue = Queue(-1)
log_listener = Process(target=log_listener_process,
args=(log_queue, args.log_level[0], args.verbose, args.file_log, args.log_path, "stream"),
name="LogListenerProcess")
log_listener.start()
configure_queue_logger(queue=log_queue)
logger = getLogger("Manage")
logger.info("launching on QkouBot Stream Process")
status_queue = Queue()
auth = GetAuth()
stream_receive_process = StreamReceiverProcess(status_queue, auth)
stream_receive_process.start()
try:
stream_process(status_queue=status_queue, auth=auth)
except KeyboardInterrupt:
from static import TESTING
if TESTING:
from qkoubot.models import Base, engine
Base.metadata.drop_all(engine)
logger.info("Database was dropped.")
except (AssertionError, FileNotFoundError, KeyError) as e:
logger.exception(e.args)
except Exception as e:
logger.exception(e.args)
finally:
log_queue.put(None)
log_listener.join()
exit()
def daily_job(args):
import time
time.sleep(30)
from log_modules import log_listener_process, configure_queue_logger
from qkoubot import today_cancel_tweet, TweetProcess
log_queue = Queue()
tweet_queue = Queue()
log_listener = Process(target=log_listener_process,
args=(log_queue, args.log_level[0], args.verbose, args.file_log, args.log_path, "daily"),
name="LogListenerProcess")
log_listener.start()
configure_queue_logger(log_queue)
logger = getLogger(__name__)
tweet_process = TweetProcess(tweet_queue)
tweet_process.tweetable = args.tweet
tweet_process.start()
try:
today_cancel_tweet(tweet_queue)
except KeyboardInterrupt:
pass
except Exception as e:
logger.exception(e)
finally:
tweet_queue.put(None)
tweet_process.join()
log_queue.put(None)
log_listener.join()
exit()
def config_parse(path: str) -> None:
import os
from configparser import ConfigParser
if not os.path.exists(path):
raise FileNotFoundError(path)
sections = [
{
"section": "mysql",
"keys": ["MYSQL_USERNAME", "MYSQL_PASSWORD", "MYSQL_HOST", "MYSQL_DATABASE_NAME"]
},
{
"section": "shibboleth",
"keys": ["SHIBBOLETH_USERNAME", "SHIBBOLETH_PASSWORD"]
},
{
"section": "twitter",
"keys": ["CONSUMER_KEY", "CONSUMER_SECRET", "ACCESS_TOKEN", "ACCESS_SECRET"]
},
{
"section": "other",
"keys": ["TESTING", "SCRAPING_INTERVAL", "LOGIN_FAILURE_TWEET_INTERVAL",
"DAILY_TWEET_HOUR", "SQLITE_PATH", "LOG_LOCATION"]
}
]
section_titles = [section["section"] for section in sections]
parser = ConfigParser()
parser.read(path)
for section in parser.sections():
if section not in section_titles:
raise KeyError(section + " is invalid section.")
this_section = [sec for sec in sections if sec["section"] == section][0]
for key, value in dict(parser.items(section)).items():
if key.upper() not in this_section["keys"]:
raise KeyError(key + " is invalid key name.")
os.environ[key.upper()] = value
if __name__ == '__main__':
main()
| [
"logging.getLogger",
"qkoubot.StreamReceiverProcess",
"os.path.exists",
"configparser.ConfigParser",
"argparse.ArgumentParser",
"qkoubot.stream_process",
"multiprocessing.Process",
"log_modules.configure_queue_logger",
"qkoubot.TweetProcess",
"qkoubot.today_cancel_tweet",
"time.sleep",
"qkoubo... | [((116, 345), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""QkouBot is an application for KIT students. This automatically collect and redistribute information and cancellation of lectures. QkouBot detect update of information and tweet it."""'}), "(description=\n 'QkouBot is an application for KIT students. This automatically collect and redistribute information and cancellation of lectures. QkouBot detect update of information and tweet it.'\n )\n", (139, 345), False, 'import argparse\n'), ((416, 455), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (439, 455), False, 'import argparse\n'), ((4182, 4189), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (4187, 4189), False, 'from multiprocessing import Queue, Process\n'), ((4208, 4215), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (4213, 4215), False, 'from multiprocessing import Queue, Process\n'), ((4235, 4395), 'multiprocessing.Process', 'Process', ([], {'target': 'log_listener_process', 'args': "(log_queue, args.log_level[0], args.verbose, args.file_log, args.log_path,\n 'bot')", 'name': '"""LogListenerProcess"""'}), "(target=log_listener_process, args=(log_queue, args.log_level[0],\n args.verbose, args.file_log, args.log_path, 'bot'), name=\n 'LogListenerProcess')\n", (4242, 4395), False, 'from multiprocessing import Queue, Process\n'), ((4470, 4509), 'log_modules.configure_queue_logger', 'configure_queue_logger', ([], {'queue': 'log_queue'}), '(queue=log_queue)\n', (4492, 4509), False, 'from log_modules import log_listener_process, configure_queue_logger\n'), ((4523, 4542), 'logging.getLogger', 'getLogger', (['"""Manage"""'], {}), "('Manage')\n", (4532, 4542), False, 'from logging import getLogger\n'), ((4603, 4628), 'qkoubot.TweetProcess', 'TweetProcess', (['tweet_queue'], {}), '(tweet_queue)\n', (4615, 4628), False, 'from qkoubot import today_cancel_tweet, TweetProcess\n'), ((5500, 5509), 'multiprocessing.Queue', 'Queue', (['(-1)'], {}), '(-1)\n', (5505, 5509), False, 'from multiprocessing import Queue, Process\n'), ((5529, 5692), 'multiprocessing.Process', 'Process', ([], {'target': 'log_listener_process', 'args': "(log_queue, args.log_level[0], args.verbose, args.file_log, args.log_path,\n 'stream')", 'name': '"""LogListenerProcess"""'}), "(target=log_listener_process, args=(log_queue, args.log_level[0],\n args.verbose, args.file_log, args.log_path, 'stream'), name=\n 'LogListenerProcess')\n", (5536, 5692), False, 'from multiprocessing import Queue, Process\n'), ((5767, 5806), 'log_modules.configure_queue_logger', 'configure_queue_logger', ([], {'queue': 'log_queue'}), '(queue=log_queue)\n', (5789, 5806), False, 'from log_modules import log_listener_process, configure_queue_logger\n'), ((5820, 5839), 'logging.getLogger', 'getLogger', (['"""Manage"""'], {}), "('Manage')\n", (5829, 5839), False, 'from logging import getLogger\n'), ((5914, 5921), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (5919, 5921), False, 'from multiprocessing import Queue, Process\n'), ((5933, 5942), 'qkoubot.GetAuth', 'GetAuth', ([], {}), '()\n', (5940, 5942), False, 'from qkoubot import stream_process, GetAuth, StreamReceiverProcess\n'), ((5972, 6013), 'qkoubot.StreamReceiverProcess', 'StreamReceiverProcess', (['status_queue', 'auth'], {}), '(status_queue, auth)\n', (5993, 6013), False, 'from qkoubot import stream_process, GetAuth, StreamReceiverProcess\n'), ((6631, 6645), 'time.sleep', 'time.sleep', (['(30)'], {}), '(30)\n', (6641, 6645), False, 'import time\n'), ((6792, 6799), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (6797, 6799), False, 'from multiprocessing import Queue, Process\n'), ((6818, 6825), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (6823, 6825), False, 'from multiprocessing import Queue, Process\n'), ((6845, 7007), 'multiprocessing.Process', 'Process', ([], {'target': 'log_listener_process', 'args': "(log_queue, args.log_level[0], args.verbose, args.file_log, args.log_path,\n 'daily')", 'name': '"""LogListenerProcess"""'}), "(target=log_listener_process, args=(log_queue, args.log_level[0],\n args.verbose, args.file_log, args.log_path, 'daily'), name=\n 'LogListenerProcess')\n", (6852, 7007), False, 'from multiprocessing import Queue, Process\n'), ((7082, 7115), 'log_modules.configure_queue_logger', 'configure_queue_logger', (['log_queue'], {}), '(log_queue)\n', (7104, 7115), False, 'from log_modules import log_listener_process, configure_queue_logger\n'), ((7129, 7148), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (7138, 7148), False, 'from logging import getLogger\n'), ((7169, 7194), 'qkoubot.TweetProcess', 'TweetProcess', (['tweet_queue'], {}), '(tweet_queue)\n', (7181, 7194), False, 'from qkoubot import today_cancel_tweet, TweetProcess\n'), ((8445, 8459), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (8457, 8459), False, 'from configparser import ConfigParser\n'), ((4713, 4754), 'qkoubot.cron_process', 'cron_process', (['args.without_f', 'tweet_queue'], {}), '(args.without_f, tweet_queue)\n', (4725, 4754), False, 'from qkoubot import cron_process, TweetProcess\n'), ((6066, 6118), 'qkoubot.stream_process', 'stream_process', ([], {'status_queue': 'status_queue', 'auth': 'auth'}), '(status_queue=status_queue, auth=auth)\n', (6080, 6118), False, 'from qkoubot import stream_process, GetAuth, StreamReceiverProcess\n'), ((7279, 7310), 'qkoubot.today_cancel_tweet', 'today_cancel_tweet', (['tweet_queue'], {}), '(tweet_queue)\n', (7297, 7310), False, 'from qkoubot import today_cancel_tweet, TweetProcess\n'), ((7658, 7678), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (7672, 7678), False, 'import os\n'), ((4953, 4983), 'qkoubot.models.Base.metadata.drop_all', 'Base.metadata.drop_all', (['engine'], {}), '(engine)\n', (4975, 4983), False, 'from qkoubot.models import Base, engine\n'), ((6268, 6298), 'qkoubot.models.Base.metadata.drop_all', 'Base.metadata.drop_all', (['engine'], {}), '(engine)\n', (6290, 6298), False, 'from qkoubot.models import Base, engine\n')] |
#!/usr/bin/python3
"""
AUTHOR: <NAME> - <EMAIL>
"""
# Imports
import json
import maxminddb
import redis
import re
import random
import io
from const import META, PORTMAP
from argparse import ArgumentParser, RawDescriptionHelpFormatter
from sys import exit
from time import localtime, sleep, strftime
from os import getuid
from sys import exit
# start the Redis server if it isn't started already.
# $ redis-server
# default port is 6379
# make sure system can use a lot of memory and overcommit memory
redis_ip = '127.0.0.1'
redis_instance = None
# required input paths
syslog_path = '/var/log/fortigate.log'
db_path = '/home/ubuntu/Desktop/Work_dir/geoip-attack-map/DataServerDB/GeoLite2-City.mmdb'
# ip for headquarters
hq_ip = '192.168.127.12' # Need to add destination IP here
# stats
server_start_time = strftime("%d-%m-%Y %H:%M:%S", localtime()) # local time
event_count = 0
continents_tracked = {}
countries_tracked = {}
country_to_code = {}
ip_to_code = {}
ips_tracked = {}
unknowns = {}
# Create clean dictionary using unclean db dictionary contents
def clean_db(unclean):
selected = {}
for tag in META:
head = None
if tag['tag'] in unclean:
head = unclean[tag['tag']]
for node in tag['path']:
if node in head:
head = head[node]
else:
head = None
break
selected[tag['lookup']] = head
return selected
def connect_redis(redis_ip):
r = redis.StrictRedis(host=redis_ip, port=6379, db=0)
return r
def get_msg_type():
return "Traffic"
# Check to see if packet is using an interesting TCP/UDP protocol based on source or destination port
def get_port_service(src_port, dst_port, service):
src_port = int(src_port)
dst_port = int(dst_port)
if src_port in PORTMAP:
return PORTMAP[src_port]
if dst_port in PORTMAP:
return PORTMAP[dst_port]
return service
def get_tcp_udp_proto(protocol):
# check if function input was a integer
# and translate if we know translation
try:
if int(protocol) == 1:
return "ICMP" # icmp has protocol 1
elif int(protocol) == 6:
return "TCP" # tcp has protocol 6
elif int(protocol) == 17:
return "UDP" # udp has protocol 17
else:
return int(protocol)
# if function input was something else than int
except (ValueError, AttributeError, TypeError):
return protocol
def find_hq_lat_long(hq_ip):
hq_ip_db_unclean = parse_maxminddb(db_path, hq_ip)
if hq_ip_db_unclean:
hq_ip_db_clean = clean_db(hq_ip_db_unclean)
dst_lat = hq_ip_db_clean['latitude']
dst_long = hq_ip_db_clean['longitude']
hq_dict = {
'dst_lat': dst_lat,
'dst_long': dst_long
}
return hq_dict
else:
print('Please provide a valid IP address for headquarters')
exit()
def parse_maxminddb(db_path, ip):
try:
reader = maxminddb.open_database(db_path)
response = reader.get(ip)
reader.close()
return response
except FileNotFoundError:
print('DB not found')
print('SHUTTING DOWN')
exit()
except ValueError:
return False
def parse_syslog(line):
kvdelim = '=' # key and value deliminator
logdatadic = {} # dictionary for logdata
# regex matches internal sub strings such as field = "word1 word2" and returns a list
for field in re.findall(r'(?:[^\s"]|"(?:[^"])*")+', line):
try:
if kvdelim in field:
key, value = field.split(kvdelim)
logdatadic[key] = value
except:
continue
return logdatadic
def shutdown_and_report_stats():
print('\nSHUTTING DOWN')
# Report stats tracked
print('\nREPORTING STATS...')
print('\nEvent Count: {}'.format(event_count)) # report event count
print('\nContinent Stats...') # report continents stats
for key in continents_tracked:
print('{}: {}'.format(key, continents_tracked[key]))
print('\nCountry Stats...') # report country stats
for country in countries_tracked:
print('{}: {}'.format(country, countries_tracked[country]))
print('\nCountries to iso_codes...')
for key in country_to_code:
print('{}: {}'.format(key, country_to_code[key]))
print('\nIP Stats...') # report IP stats
for ip in ips_tracked:
print('{}: {}'.format(ip, ips_tracked[ip]))
print('\nIPs to iso_codes...')
for key in ip_to_code:
print('{}: {}'.format(key, ip_to_code[key]))
print('\nUnknowns...')
for key in unknowns:
print('{}: {}'.format(key, unknowns[key]))
exit()
def merge_dicts(*args):
super_dict = {}
for arg in args:
super_dict.update(arg)
return super_dict
def track_flags(super_dict, tracking_dict, key1, key2):
if key1 in super_dict:
if key2 in super_dict:
if key1 in tracking_dict:
return None
else:
tracking_dict[super_dict[key1]] = super_dict[key2]
else:
return None
else:
return None
def track_stats(super_dict, tracking_dict, key):
if key in super_dict:
node = super_dict[key]
if node in tracking_dict:
tracking_dict[node] += 1
else:
tracking_dict[node] = 1
else:
if key in unknowns:
unknowns[key] += 1
else:
unknowns[key] = 1
def main():
global db_path, log_file_out, redis_ip, redis_instance, syslog_path, hq_ip
global continents_tracked, countries_tracked, ips_tracked, postal_codes_tracked, event_count, unknown, ip_to_code, country_to_code
# Connect to Redis
redis_instance = connect_redis(redis_ip)
# Find HQ lat/long
hq_dict = find_hq_lat_long(hq_ip)
# TO DO
cve_attack = 'CVE:{}:{}'.format(
random.randrange(1, 2000),
random.randrange(100, 1000)
)
# # Follow/parse/format/publish syslog data
with io.open(syslog_path, "r", encoding='ISO-8859-1') as syslog_file:
syslog_file.readlines()
while True:
where = syslog_file.tell()
line = syslog_file.readline()
if not line:
sleep(.1)
syslog_file.seek(where)
else:
syslog_data_dict = parse_syslog(line)
if syslog_data_dict:
if syslog_data_dict['type'] == 'traffic':
ip_db_unclean = parse_maxminddb(db_path, syslog_data_dict['srcip'])
if ip_db_unclean:
event_count += 1
ip_db_clean = clean_db(ip_db_unclean)
msg_type = {'msg_type': get_msg_type()}
msg_type2 = {
'msg_type2': get_port_service(syslog_data_dict['srcport'], syslog_data_dict['dstport'], syslog_data_dict['service'])}
msg_type3 = {'msg_type3': cve_attack} # TO DO
proto = {'protocol': get_tcp_udp_proto(syslog_data_dict['proto'])}
super_dict = merge_dicts(
hq_dict,
ip_db_clean,
msg_type,
msg_type2,
msg_type3,
proto,
syslog_data_dict
)
# Track Stats
track_stats(super_dict, continents_tracked, 'continent')
track_stats(super_dict, countries_tracked, 'country')
track_stats(super_dict, ips_tracked, 'srcip')
event_time = strftime("%d-%m-%Y %H:%M:%S", localtime()) # local time
track_flags(super_dict, country_to_code, 'country', 'iso_code')
track_flags(super_dict, ip_to_code, 'srcip', 'iso_code')
# Append stats to super_dict
super_dict['event_count'] = event_count
super_dict['continents_tracked'] = continents_tracked
super_dict['countries_tracked'] = countries_tracked
super_dict['ips_tracked'] = ips_tracked
super_dict['unknowns'] = unknowns
super_dict['event_time'] = event_time
super_dict['country_to_code'] = country_to_code
super_dict['ip_to_code'] = ip_to_code
json_data = json.dumps(super_dict)
redis_instance.publish('attack-map-production', json_data)
print('Event Count: {}'.format(event_count))
print('------------------------')
else:
continue
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
shutdown_and_report_stats()
| [
"time.localtime",
"maxminddb.open_database",
"random.randrange",
"json.dumps",
"io.open",
"time.sleep",
"redis.StrictRedis",
"sys.exit",
"re.findall"
] | [((848, 859), 'time.localtime', 'localtime', ([], {}), '()\n', (857, 859), False, 'from time import localtime, sleep, strftime\n'), ((1517, 1566), 'redis.StrictRedis', 'redis.StrictRedis', ([], {'host': 'redis_ip', 'port': '(6379)', 'db': '(0)'}), '(host=redis_ip, port=6379, db=0)\n', (1534, 1566), False, 'import redis\n'), ((3550, 3594), 're.findall', 're.findall', (['"""(?:[^\\\\s"]|"(?:[^"])*")+"""', 'line'], {}), '(\'(?:[^\\\\s"]|"(?:[^"])*")+\', line)\n', (3560, 3594), False, 'import re\n'), ((4786, 4792), 'sys.exit', 'exit', ([], {}), '()\n', (4790, 4792), False, 'from sys import exit\n'), ((2991, 2997), 'sys.exit', 'exit', ([], {}), '()\n', (2995, 2997), False, 'from sys import exit\n'), ((3060, 3092), 'maxminddb.open_database', 'maxminddb.open_database', (['db_path'], {}), '(db_path)\n', (3083, 3092), False, 'import maxminddb\n'), ((6007, 6032), 'random.randrange', 'random.randrange', (['(1)', '(2000)'], {}), '(1, 2000)\n', (6023, 6032), False, 'import random\n'), ((6042, 6069), 'random.randrange', 'random.randrange', (['(100)', '(1000)'], {}), '(100, 1000)\n', (6058, 6069), False, 'import random\n'), ((6134, 6182), 'io.open', 'io.open', (['syslog_path', '"""r"""'], {'encoding': '"""ISO-8859-1"""'}), "(syslog_path, 'r', encoding='ISO-8859-1')\n", (6141, 6182), False, 'import io\n'), ((3273, 3279), 'sys.exit', 'exit', ([], {}), '()\n', (3277, 3279), False, 'from sys import exit\n'), ((6373, 6383), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (6378, 6383), False, 'from time import localtime, sleep, strftime\n'), ((8881, 8903), 'json.dumps', 'json.dumps', (['super_dict'], {}), '(super_dict)\n', (8891, 8903), False, 'import json\n'), ((8010, 8021), 'time.localtime', 'localtime', ([], {}), '()\n', (8019, 8021), False, 'from time import localtime, sleep, strftime\n')] |
import numpy as np
import skimage as ski
import os
from matplotlib import pyplot as plt
from skimage.feature import blob_dog, blob_log, blob_doh
from skimage.color import rgb2gray
from math import sqrt
log_defaults = {
'min_s': 1,
'max_s': 30,
'num_s': 10,
'thresh':0.1,
'overlap': 0.5,
'log_scale': False,
'exclude_border': False
}
def run_log(image, plot_im = False, verbose = False, log_params = log_defaults):
if verbose == True:
print (log_params)
# Find blobs with Laplacian of Gaussian
blobs_log = blob_log(
image,
min_sigma = log_params['min_s'],
max_sigma = log_params['max_s'],
num_sigma = log_params['num_s'],
threshold = log_params['thresh'],
overlap = log_params['overlap'],
log_scale = log_params['log_scale'],
exclude_border = log_params['exclude_border']
)
if len(blobs_log) == 0:
print('No Blobs')
# Compute radii in the 3rd column.
blobs_log[:, 2] = blobs_log[:, 2] * sqrt(2)
if plot_im == True:
# Generate figure to check accuracy
fig, (ax0, ax1) = plt.subplots(1, 2, figsize=(20, 10), sharex=True, sharey=True)
ax0.imshow(image)
ax1.imshow(image)
for blob in blobs_log:
y, x, r = blob
c = plt.Circle((x, y), r, color='r', linewidth=2, fill=False)
ax1.add_patch(c)
plt.tight_layout()
plt.show()
return fig, blobs_log
# Return fig and blobs_log for counting blobs
return blobs_log
class cell_counts:
def __init__(self, name, image, blobs, pixels_per_micron, log_params):
self.id = os.path.basename(name)[0:5]
self.name = name
self.image = image
self.blobs = blobs[blobs[:,2] > 2] # restriction on minimum blob size
self.pixels_per_micron = pixels_per_micron
self.log_params = log_params
@ property
def num_cells(self):
return len(self.blobs)
@ property
def im_area(self):
microns_per_pixel = 1/self.pixels_per_micron
im_area = self.image.shape[0] * self.image.shape[1] * microns_per_pixel**2
return im_area
@ property
def slice_area(self):
"""
CMH 20191217
Adding the below to extract only pixels above value
This is to extract area of the actual slice rather than the
area of the image, will save a lot of time cropping images
Sum across RGB pixel values to get one value for boolean
Update: Passing only green channel so not necessary
#sim = np.sum(self.image, axis = 2)
Calculate number of pixels with value > 1
Note: 1 is chosen as occasionally black pixels are [0,1,0]
as well as [0,0,0]
#
Return slice area = num true pixels * mpp^2
"""
bim = self.image[self.image>1]
microns_per_pixel = 1/self.pixels_per_micron
slice_area = bim.size * microns_per_pixel**2
return slice_area
@ property
def cells_per_um2(self):
um2 = self.slice_area
cells_per_um2 = self.num_cells/um2
return cells_per_um2
@ property
def cells_per_mm2(self):
return self.cells_per_um2 * 1e6
@ property
def percent_slice(self):
return 100 * self.slice_area/self.im_area
def to_dict(self):
return {
'id': self.id,
'name': self.name,
'image': self.image,
'blobs': self.blobs,
'pixels_per_micron': self.pixels_per_micron,
'num_cells': self.num_cells,
'im_area': self.im_area,
'slice_area': self.slice_area,
'cells_per_um2': self.cells_per_um2,
'cells_per_mm2': self.cells_per_mm2,
'percent_slice': self.percent_slice,
'LOG_params': self.log_params
}
def overlay(self, return_fig = False):
fig, (ax0, ax1) = plt.subplots(1, 2, figsize=(20, 10), sharex=True, sharey=True)
ax0.imshow(self.image)
ax1.imshow(self.image)
for blob in self.blobs:
y, x, r = blob
c = plt.Circle((x, y), r, color='r', linewidth=2, fill=False)
ax1.add_patch(c)
plt.tight_layout()
plt.show()
if return_fig == True:
return fig
else:
return
def collect_cell_counts(
image_directory,
log_params = log_defaults,
testi = 0,
verbose = False,
pixels_per_micron = 1.5
):
images = ski.io.ImageCollection(os.path.join(image_directory, '*.tif'))
# For testing, allow the check of first set of images up to i = testi
if testi > 0:
images = images[0:testi]
# Verbose
if verbose == True:
print ('LOG parameters are:')
print (log_params)
print()
print ('The first 5 files are:')
print (images.files[0:5])
print ('...')
print ('The last 5 files are:')
print (images.files[-5:])
print()
# Run
counted = []
for i, image in enumerate(images):
if verbose == True:
print('i is:', i)
print("Current file is:")
print(images.files[i])
print()
"""
Commenting out for training
if verbose == False:
if i%10 == 0:
print('Current index:', i)
"""
greyscale_im = rgb2gray(image)
image8 = ski.img_as_ubyte(greyscale_im)
blobs_log = run_log(image8, plot_im = False, log_params = log_params)
clob = cell_counts(
name = images.files[i],
image = image8,
blobs = blobs_log,
pixels_per_micron= pixels_per_micron,
log_params = log_params
)
counted.append(clob)
return counted
def clob_to_dict(clob):
return {
'id': clob.id,
'name': os.path.basename(clob.name)[:-4],
#'image': clob.image,
#'blobs': clob.blobs,
#'pixels_per_micron': clob.pixels_per_micron,
'num_cells': clob.num_cells,
#'im_area': clob.im_area,
'slice_area': clob.slice_area,
'cells_per_um2': clob.cells_per_um2,
'cells_per_mm2': clob.cells_per_mm2,
'percent_slice': clob.percent_slice
}
def extract_panda(clob_list):
dictlist = []
for i in range(len(clob_list)):
dictlist += [clob_to_dict(clob_list[i])]
DF = pd.DataFrame(dictlist)
return DF | [
"skimage.feature.blob_log",
"skimage.color.rgb2gray",
"matplotlib.pyplot.Circle",
"math.sqrt",
"os.path.join",
"os.path.basename",
"matplotlib.pyplot.tight_layout",
"skimage.img_as_ubyte",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((561, 825), 'skimage.feature.blob_log', 'blob_log', (['image'], {'min_sigma': "log_params['min_s']", 'max_sigma': "log_params['max_s']", 'num_sigma': "log_params['num_s']", 'threshold': "log_params['thresh']", 'overlap': "log_params['overlap']", 'log_scale': "log_params['log_scale']", 'exclude_border': "log_params['exclude_border']"}), "(image, min_sigma=log_params['min_s'], max_sigma=log_params['max_s'\n ], num_sigma=log_params['num_s'], threshold=log_params['thresh'],\n overlap=log_params['overlap'], log_scale=log_params['log_scale'],\n exclude_border=log_params['exclude_border'])\n", (569, 825), False, 'from skimage.feature import blob_dog, blob_log, blob_doh\n'), ((1038, 1045), 'math.sqrt', 'sqrt', (['(2)'], {}), '(2)\n', (1042, 1045), False, 'from math import sqrt\n'), ((1145, 1207), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(20, 10)', 'sharex': '(True)', 'sharey': '(True)'}), '(1, 2, figsize=(20, 10), sharex=True, sharey=True)\n', (1157, 1207), True, 'from matplotlib import pyplot as plt\n'), ((1429, 1447), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1445, 1447), True, 'from matplotlib import pyplot as plt\n'), ((1456, 1466), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1464, 1466), True, 'from matplotlib import pyplot as plt\n'), ((4060, 4122), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(20, 10)', 'sharex': '(True)', 'sharey': '(True)'}), '(1, 2, figsize=(20, 10), sharex=True, sharey=True)\n', (4072, 4122), True, 'from matplotlib import pyplot as plt\n'), ((4355, 4373), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4371, 4373), True, 'from matplotlib import pyplot as plt\n'), ((4382, 4392), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4390, 4392), True, 'from matplotlib import pyplot as plt\n'), ((4670, 4708), 'os.path.join', 'os.path.join', (['image_directory', '"""*.tif"""'], {}), "(image_directory, '*.tif')\n", (4682, 4708), False, 'import os\n'), ((5577, 5592), 'skimage.color.rgb2gray', 'rgb2gray', (['image'], {}), '(image)\n', (5585, 5592), False, 'from skimage.color import rgb2gray\n'), ((5610, 5640), 'skimage.img_as_ubyte', 'ski.img_as_ubyte', (['greyscale_im'], {}), '(greyscale_im)\n', (5626, 5640), True, 'import skimage as ski\n'), ((1334, 1391), 'matplotlib.pyplot.Circle', 'plt.Circle', (['(x, y)', 'r'], {'color': '"""r"""', 'linewidth': '(2)', 'fill': '(False)'}), "((x, y), r, color='r', linewidth=2, fill=False)\n", (1344, 1391), True, 'from matplotlib import pyplot as plt\n'), ((1683, 1705), 'os.path.basename', 'os.path.basename', (['name'], {}), '(name)\n', (1699, 1705), False, 'import os\n'), ((4260, 4317), 'matplotlib.pyplot.Circle', 'plt.Circle', (['(x, y)', 'r'], {'color': '"""r"""', 'linewidth': '(2)', 'fill': '(False)'}), "((x, y), r, color='r', linewidth=2, fill=False)\n", (4270, 4317), True, 'from matplotlib import pyplot as plt\n'), ((6137, 6164), 'os.path.basename', 'os.path.basename', (['clob.name'], {}), '(clob.name)\n', (6153, 6164), False, 'import os\n')] |
# Copyright 2020 BULL SAS All rights reserved #
from collections import Counter
from logflow.logsparser.Pattern import Pattern
from loguru import logger
from typing import Dict, List
class Cardinality:
"""A cardinality is a length of line. The length is defined as the number of words.
Args:
counter_general (dict): Counter of the different logs according in all the dataset.
cardinality (int): Number of words
"""
def __init__(self, counter_general : dict, cardinality : int):
self.counter_general = counter_general
self.cardinality = cardinality
self.dict_words : Dict[int, Dict[str, int]] = {}
for i in range(self.cardinality):
self.dict_words.setdefault(i, {})
self.list_pattern : List[Pattern] = []
def counter_word(self):
"""Count the number of words according to their place in the log.
"""
for entry in self.counter_general:
position = 0
for word in entry:
self.dict_words[position].setdefault(word,0)
self.dict_words[position][word] += self.counter_general[entry]
position += 1
def detect_patterns(self):
"""Detect the pattern based on the maximum number of similar words.
"""
for entry in self.counter_general:
comparison_vector = [0]*self.cardinality
position = 0
entry = list(entry)
# Once the dict_words is created, we get the number of entries with the same word by only one access to the dictionnary.
for word in entry:
comparison_vector[position] += self.dict_words[position][word]
position += 1
# We take the best subset of the similar words, i.e [10,10,2,2,2] keeps 2 as best subset.
best_subset_words_number = Counter(comparison_vector).most_common(1)[0][0] # [(value, nb_value)]
# We compute the index of the words kept
best_subset_words_index = [i for i, e in enumerate(comparison_vector) if e == best_subset_words_number]
# And the words theirself.
best_subset_words_value = [entry[i] for i in best_subset_words_index]
self.list_pattern.append(Pattern(self.cardinality, best_subset_words_value, best_subset_words_index))
self.list_pattern = list(set(self.list_pattern))
# logger.debug("Cardinality: " + str(self.cardinality) + " found " + str(len(self.list_pattern)) + " patterns")
def order_pattern(self):
"""Order the pattern by size to have a fast association between lines and patterns.
"""
for pattern in self.list_pattern:
self.dict_patterns.setdefault(len(pattern), [])
self.dict_patterns[len(pattern)].append(pattern)
def compute(self) -> Dict[int, List[Pattern]]:
"""Start the workflow for the multithreading implementation.
Returns:
(dict): the dict of patterns detected.
"""
self.counter_word()
self.detect_patterns()
self.dict_patterns : Dict[int, List[Pattern]]= {}
self.order_pattern()
return self.dict_patterns
| [
"logflow.logsparser.Pattern.Pattern",
"collections.Counter"
] | [((2261, 2336), 'logflow.logsparser.Pattern.Pattern', 'Pattern', (['self.cardinality', 'best_subset_words_value', 'best_subset_words_index'], {}), '(self.cardinality, best_subset_words_value, best_subset_words_index)\n', (2268, 2336), False, 'from logflow.logsparser.Pattern import Pattern\n'), ((1864, 1890), 'collections.Counter', 'Counter', (['comparison_vector'], {}), '(comparison_vector)\n', (1871, 1890), False, 'from collections import Counter\n')] |
from django.db import models
from django.contrib.auth.models import User
from cloudinary.models import CloudinaryField
from django.dispatch import receiver
from django.db.models.signals import post_save
class Project(models.Model):
'''Model class for Projects tha user posts'''
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='project')
title = models.CharField(max_length=50)
project_image = CloudinaryField('image')
description = models.TextField()
live_link = models.CharField(max_length=100)
def __str__(self):
return '{} project {}'.format(self.user.username, self.title)
class Profile(models.Model):
'''Model class for User profile'''
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile')
profile_picture = CloudinaryField('image')
profile_bio = models.TextField()
contact_info = models.EmailField()
def __str__(self):
return self.user.username
# @receiver(post_save, sender=User)
# def create_profile(sender, instance, created, **kwargs):
# if created:
# Profile.objects.create(user=instance)
class Rating(models.Model):
'''Model class for rating values'''
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='rating')
project = models.ForeignKey(Project, on_delete=models.CASCADE, related_name='projectrating')
design = models.IntegerField()
usability = models.IntegerField()
content = models.IntegerField()
def __str__(self):
return self.user.design | [
"django.db.models.OneToOneField",
"django.db.models.EmailField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"cloudinary.models.CloudinaryField",
"django.db.models.CharField"
] | [((294, 367), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""project"""'}), "(User, on_delete=models.CASCADE, related_name='project')\n", (311, 367), False, 'from django.db import models\n'), ((380, 411), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (396, 411), False, 'from django.db import models\n'), ((432, 456), 'cloudinary.models.CloudinaryField', 'CloudinaryField', (['"""image"""'], {}), "('image')\n", (447, 456), False, 'from cloudinary.models import CloudinaryField\n'), ((475, 493), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (491, 493), False, 'from django.db import models\n'), ((510, 542), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (526, 542), False, 'from django.db import models\n'), ((717, 793), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""profile"""'}), "(User, on_delete=models.CASCADE, related_name='profile')\n", (737, 793), False, 'from django.db import models\n'), ((816, 840), 'cloudinary.models.CloudinaryField', 'CloudinaryField', (['"""image"""'], {}), "('image')\n", (831, 840), False, 'from cloudinary.models import CloudinaryField\n'), ((859, 877), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (875, 877), False, 'from django.db import models\n'), ((897, 916), 'django.db.models.EmailField', 'models.EmailField', ([], {}), '()\n', (914, 916), False, 'from django.db import models\n'), ((1218, 1290), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""rating"""'}), "(User, on_delete=models.CASCADE, related_name='rating')\n", (1235, 1290), False, 'from django.db import models\n'), ((1305, 1392), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Project'], {'on_delete': 'models.CASCADE', 'related_name': '"""projectrating"""'}), "(Project, on_delete=models.CASCADE, related_name=\n 'projectrating')\n", (1322, 1392), False, 'from django.db import models\n'), ((1401, 1422), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1420, 1422), False, 'from django.db import models\n'), ((1439, 1460), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1458, 1460), False, 'from django.db import models\n'), ((1475, 1496), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1494, 1496), False, 'from django.db import models\n')] |
from gui.gui import Main
Main()
| [
"gui.gui.Main"
] | [((26, 32), 'gui.gui.Main', 'Main', ([], {}), '()\n', (30, 32), False, 'from gui.gui import Main\n')] |
import pdb
import numpy
import geometry.conversions
import geometry.helpers
import geometry.quaternion
import geodesy.conversions
import environments.earth
import spherical_geometry.vector
import spherical_geometry.great_circle_arc
def line_distance(point_1, point_2, ignore_alt=True):
"""Compute the straight line distance between two points on the earth
Arguments:
point_1: numpy (1,3) array giving lat/lon/alt of the first point
point_2: numpy (1,3) array giving lat/lon/alt of the second point
ignore_alt: optional, ignore the altitude component, defaults True
Returns:
r: distance between the points (m)
"""
dX = geodesy.conversions.lla_to_ned(point_1, point_2)
if ignore_alt:
dX *= numpy.array([1.0, 1.0, 0.0], ndmin=2)
return numpy.linalg.norm(dX)
def arc_length(point_1, point_2, ignore_alt=True):
"""Compute the great circle arc length between two points on a sphere
Arguments:
point_1: numpy (1,3) array giving lat/lon/alt of the first point
point_2: numpy (1,3) array giving lat/lon/alt of the second point
ignore_alt: optional, ignore the altitude component, defaults True
Returns:
arc_length: the great circle distance
"""
p1_X = geodesy.conversions.lla_to_vector(point_1)
p2_X = geodesy.conversions.lla_to_vector(point_2)
theta = spherical_geometry.great_circle_arc.length(
p1_X, p2_X, degrees=False)
return theta
def arc_distance(point_1, point_2, r=None, ignore_alt=True):
"""Compute the great circle distance between two points on a sphere
Arguments:
point_1: numpy (1,3) array giving lat/lon/alt of the first point
point_2: numpy (1,3) array giving lat/lon/alt of the second point
r: radius of the sphere we're on. Defaults to the earth
ignore_alt: optional, ignore the altitude component, defaults True
Returns:
arc_distance: the great circle distance
"""
theta = arc_length(point_1, point_2, ignore_alt=ignore_alt)
if r is None:
return theta * environments.earth.constants['r0']
return theta * r
def great_circle_direction(point_1, point_2):
"""Compute the direction for a great circle arc
Arguments:
point_1: the starting point of the great circle. The direction will be
given in a NED frame at this point. Numpy (3,) array in radians, lla
point_2: the other end of the great circle. can specify a numpy (3,)
array for a single computation or a numpy (n,3) array for a series
of computations.
Returns:
r_hat: the initial direction of the great circle starting from point_1
"""
if point_2.ndim > 1:
directions = numpy.zeros(point_2.shape)
for idx, coord in enumerate(point_2):
directions[idx] = great_circle_direction(point_1, coord)
return directions
xyz_1 = geodesy.conversions.lla_to_xyz(point_1)
xyz_2 = geodesy.conversions.lla_to_xyz(point_2)
khat_xyz = geometry.conversions.to_unit_vector(xyz_1)
delta = xyz_2 - xyz_1
delta_hat = geometry.conversions.to_unit_vector(delta)
r_xyz = numpy.cross(khat_xyz, numpy.cross(delta_hat, khat_xyz))
r_hat = geodesy.conversions.xyz_to_ned(
r_xyz + xyz_1, numpy.array(point_1, ndmin=2))[0]
return geometry.conversions.to_unit_vector(r_hat)
def distance_on_great_circle(start_point, direction, distance):
"""compute the location of a point a specified distance along a great circle
NOTE: This assumes a spherical earth. The error introduced in the location
is pretty small (~15 km for a 13000 km path), but it totall screws with
the altitude. YOU SHOULD NOT USE THE ALTITUDE COMING OUT OF THIS, ESPECIALLY
IF YOU HAVE ANY MEANGINFUL DISTANCE
Arguments:
start_point: the starting point of the great circle. The direction is
given in a NED frame at this point. Numpy (3,) array in radians, lla
direction: a NED vector indicating the direction of the great circle
distance: the length of the great circle arc (m)
Returns:
end_point: the end of a great circle path of length <distance> from
<start_point> with initial <direction>
"""
start_xyz = geodesy.conversions.lla_to_xyz(start_point)
direction = geometry.conversions.to_unit_vector(direction)
delta_xyz = geodesy.conversions.ned_to_xyz(
direction, numpy.array(start_point, ndmin=2))
rotation_axis = -geometry.conversions.to_unit_vector(
numpy.cross(start_xyz, delta_xyz))
rotation_magnitude = distance / environments.earth.constants['r0']
rotation_quaternion = geometry.quaternion.Quaternion()
rotation_quaternion.from_axis_and_rotation(
rotation_axis, rotation_magnitude)
end_point_xyz = rotation_quaternion.rot(start_xyz)
end_point = geodesy.conversions.xyz_to_lla(end_point_xyz)
return end_point
| [
"numpy.array",
"numpy.zeros",
"numpy.cross",
"numpy.linalg.norm"
] | [((810, 831), 'numpy.linalg.norm', 'numpy.linalg.norm', (['dX'], {}), '(dX)\n', (827, 831), False, 'import numpy\n'), ((760, 797), 'numpy.array', 'numpy.array', (['[1.0, 1.0, 0.0]'], {'ndmin': '(2)'}), '([1.0, 1.0, 0.0], ndmin=2)\n', (771, 797), False, 'import numpy\n'), ((2757, 2783), 'numpy.zeros', 'numpy.zeros', (['point_2.shape'], {}), '(point_2.shape)\n', (2768, 2783), False, 'import numpy\n'), ((3209, 3241), 'numpy.cross', 'numpy.cross', (['delta_hat', 'khat_xyz'], {}), '(delta_hat, khat_xyz)\n', (3220, 3241), False, 'import numpy\n'), ((4469, 4502), 'numpy.array', 'numpy.array', (['start_point'], {'ndmin': '(2)'}), '(start_point, ndmin=2)\n', (4480, 4502), False, 'import numpy\n'), ((3310, 3339), 'numpy.array', 'numpy.array', (['point_1'], {'ndmin': '(2)'}), '(point_1, ndmin=2)\n', (3321, 3339), False, 'import numpy\n'), ((4571, 4604), 'numpy.cross', 'numpy.cross', (['start_xyz', 'delta_xyz'], {}), '(start_xyz, delta_xyz)\n', (4582, 4604), False, 'import numpy\n')] |
import doctest
import unittest
from zope.site.folder import Folder
from zope.site.testing import siteSetUp, siteTearDown, checker
from zope.site.tests.test_site import TestSiteManagerContainer
def setUp(test=None):
siteSetUp()
def tearDown(test=None):
siteTearDown()
class FolderTest(TestSiteManagerContainer):
def makeTestObject(self):
return Folder()
def test_suite():
flags = doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE
return unittest.TestSuite((
unittest.defaultTestLoader.loadTestsFromName(__name__),
doctest.DocTestSuite('zope.site.folder',
setUp=setUp, tearDown=tearDown),
doctest.DocFileSuite("folder.txt",
setUp=setUp, tearDown=tearDown,
checker=checker, optionflags=flags),
))
| [
"doctest.DocTestSuite",
"doctest.DocFileSuite",
"zope.site.testing.siteSetUp",
"zope.site.folder.Folder",
"unittest.defaultTestLoader.loadTestsFromName",
"zope.site.testing.siteTearDown"
] | [((224, 235), 'zope.site.testing.siteSetUp', 'siteSetUp', ([], {}), '()\n', (233, 235), False, 'from zope.site.testing import siteSetUp, siteTearDown, checker\n'), ((267, 281), 'zope.site.testing.siteTearDown', 'siteTearDown', ([], {}), '()\n', (279, 281), False, 'from zope.site.testing import siteSetUp, siteTearDown, checker\n'), ((374, 382), 'zope.site.folder.Folder', 'Folder', ([], {}), '()\n', (380, 382), False, 'from zope.site.folder import Folder\n'), ((503, 557), 'unittest.defaultTestLoader.loadTestsFromName', 'unittest.defaultTestLoader.loadTestsFromName', (['__name__'], {}), '(__name__)\n', (547, 557), False, 'import unittest\n'), ((567, 639), 'doctest.DocTestSuite', 'doctest.DocTestSuite', (['"""zope.site.folder"""'], {'setUp': 'setUp', 'tearDown': 'tearDown'}), "('zope.site.folder', setUp=setUp, tearDown=tearDown)\n", (587, 639), False, 'import doctest\n'), ((678, 785), 'doctest.DocFileSuite', 'doctest.DocFileSuite', (['"""folder.txt"""'], {'setUp': 'setUp', 'tearDown': 'tearDown', 'checker': 'checker', 'optionflags': 'flags'}), "('folder.txt', setUp=setUp, tearDown=tearDown, checker=\n checker, optionflags=flags)\n", (698, 785), False, 'import doctest\n')] |
import requests
import os
scrape_key = os.environ['SCRAPE_KEY']
payload = {'key': scrape_key}
headers = {'content-type': 'application/json', 'Accept-Charset': 'UTF-8'}
response = requests.put('https://apw.locrian24.now.sh/api/scrape', headers = headers, json = payload)
print(response)
| [
"requests.put"
] | [((181, 271), 'requests.put', 'requests.put', (['"""https://apw.locrian24.now.sh/api/scrape"""'], {'headers': 'headers', 'json': 'payload'}), "('https://apw.locrian24.now.sh/api/scrape', headers=headers,\n json=payload)\n", (193, 271), False, 'import requests\n')] |
# Author: <NAME>
# Datetime: 2021/9/14
# Copyright belongs to the author.
# Please indicate the source for reprinting.
import sys
import webbrowser
import time
from typing import List
import tkinter
from tkinter import ttk
from tkinter.scrolledtext import ScrolledText
from qgui.manager import BLACK, FONT
from qgui.banner_tools import BaseBarTool
from qgui.third_party.collapsing_frame import CollapsingFrame
from qgui.notebook_tools import BaseNotebookTool
from qgui.os_tools import StdOutWrapper, DataCache
from qgui.base_tools import ArgInfo
TITLE_BG_COLOR = BLACK
# ToDo 主题部分可考虑通过增加warmup来解决
class _Backbone:
"""
整个界面的基础,存放共有的变量
"""
def __init__(self, f_style="primary"):
"""
请务必检查self.frame是否做了pack等定位操作,无操作将不会被显示
:param f_style:
"""
# 统一用place
self.style = f_style
# 全局变量
self.global_info = ArgInfo()
def build(self, master, global_info):
self.frame = ttk.Frame(master, style=self.style + ".TFrame")
self.global_info = global_info
class BaseNavigation(_Backbone):
"""
左侧导航栏基本框架
"""
def __init__(self, style="primary"):
super(BaseNavigation, self).__init__(f_style=style)
self.tabs = dict()
def add_about(self,
author: str = "未知作者",
version: str = "0.0.1",
github_url: str = None,
other_info: List[str] = None):
bus_cf = CollapsingFrame(self.frame)
bus_cf.pack(fill='x', pady=0)
bus_frm = ttk.Frame(bus_cf, padding=5)
bus_frm.columnconfigure(1, weight=1)
bus_cf.add(bus_frm, title="相关信息", style='secondary.TButton')
ttk.Label(bus_frm, text=f"作者:\t{author}", style="TLabel", justify="left", wraplength=160).pack(anchor="nw")
ttk.Label(bus_frm, text=f"版本:\t{version}", style="TLabel", justify="left", wraplength=160).pack(anchor="nw")
if other_info:
for line in other_info:
ttk.Label(bus_frm, text=line, style="TLabel").pack(anchor="nw")
if github_url:
def github_callback(event):
webbrowser.open_new(github_url)
github_label = ttk.Label(bus_frm, text=f"> 进入GitHub", style="info.TLabel", justify="left")
github_label.pack(anchor="nw")
github_label.bind("<Button-1>", github_callback)
def add_info(self,
title: str,
info: str):
bus_cf = CollapsingFrame(self.frame)
bus_cf.pack(fill='x', pady=0)
bus_frm = ttk.Frame(bus_cf, padding=5)
bus_frm.columnconfigure(1, weight=1)
bus_cf.add(bus_frm, title=title, style='secondary.TButton', justify="left")
ttk.Label(bus_frm, text=info, style="TLabel", wraplength=160).pack(anchor="nw")
# def add_homepage(self, tool):
# btn = ttk.Button(self.frame,
# text=tool.name,
# image=tool.name,
# compound='left',
# command=tool.bind_func)
# btn.pack(side='left', ipadx=5, ipady=5, padx=0, pady=1)
def build(self, master, global_info):
super(BaseNavigation, self).build(master, global_info)
self.frame.place(x=0, y=50, width=180, height=470)
class BaseNoteBook(_Backbone):
"""
中间Notebook部分框架
"""
def __init__(self,
style="primary",
tab_names: List[str] = None,
stdout=None):
super(BaseNoteBook, self).__init__(f_style=style)
self.tab_names = tab_names
self.nb_frames = list()
# 初始化总输出行数
self.line_len = 2
if not stdout:
stdout = sys.stdout
self.stdout = stdout
sys.stdout = StdOutWrapper(self.stdout, callback=self._write_log_callback)
sys.stderr = StdOutWrapper(self.stdout, callback=self._write_log_callback)
self.image_cache = DataCache()
def add_tool(self, tool: BaseNotebookTool, to_notebook=True):
if tool.tab_index >= len(self.nb_frames):
raise ValueError(f"设置的index大小越界,当前页面数量为{len(self.nb_frames)},分别为:{self.nb_frames},而"
f"您设置的index为{tool.tab_index},超过了当前页面数量。")
if to_notebook:
frame = self.nb_frames[tool.tab_index]
tool_frame = tool.build(master=frame, global_info=self.global_info)
else:
tool_frame = tool.build(global_info=self.global_info)
tool_info = tool.get_arg_info()
self.global_info += tool_info
return tool_frame
def build(self, master, global_info):
super(BaseNoteBook, self).build(master, global_info)
self.frame.place(x=182, y=55, width=750, height=460)
self.nb = ttk.Notebook(self.frame)
self.nb.pack(side="top", fill="both")
if self.tab_names:
for tab_name in self.tab_names:
sub_frame = ttk.Frame(self.nb)
sub_frame.pack(anchor="nw", expand="yes")
self.nb_frames.append(sub_frame)
self.nb.add(sub_frame, text=tab_name)
else:
sub_frame = ttk.Frame(self.nb)
sub_frame.pack(anchor="nw", expand="yes")
self.nb_frames.append(sub_frame)
self.nb.add(sub_frame, text="主程序控制台")
self.global_info += ArgInfo(name="QGUI-BaseNoteBook",
set_func=self._select_notebook_callback,
get_func=lambda: print("BaseNoteBook不支持get"))
# 增加OutPut
self.console_frame = ttk.Frame(self.frame,
style=self.style + ".TFrame")
self.console_frame.pack(side="top", fill='both', expand="yes")
# 标题
self.title = ttk.Label(self.console_frame,
font=(FONT, 15),
style=self.style + ".Inverse.TLabel",
text="控制台日志",
justify="left")
self.title.pack(side="top", fill="x", padx=10, pady=5)
# 文本
self.text_area = ScrolledText(self.console_frame,
highlightcolor=master.style.colors.primary,
highlightbackground=master.style.colors.border,
highlightthickness=1)
self.text_area.pack(fill="both", expand="yes")
self.text_area.insert("end", "控制台链接成功\n")
self.text_area.configure(state="disable")
def print_tool(self, tool: BaseNotebookTool):
self.text_area.configure(state="normal")
self.text_area.window_create("end", window=self.add_tool(tool, to_notebook=False))
self.text_area.configure(state="disable")
print("")
def print_image(self, image):
from PIL import Image, ImageTk
if isinstance(image, str):
image = Image.open(image)
w, h = image.size
scale = 128 / max(w, h)
w *= scale
h *= scale
image = image.resize((int(w), int(h)))
image = ImageTk.PhotoImage(image)
self.image_cache += image
self.text_area.configure(state="normal")
self.text_area.image_create("end", image=image)
self.text_area.configure(state="disable")
print("")
def _select_notebook_callback(self, index):
self.nb.select(index)
def _write_log_callback(self, text):
self.text_area.configure(state="normal")
# 对print形式的进度条进行适配
if "\r" in text:
self.text_area.delete(str(self.line_len) + ".0", str(self.line_len) + ".end")
self.line_len -= 1
text = text[text.index("\r") + 1:] + " "
if len(text) > 0 and text != "\n":
text = time.strftime("%H:%M:%S", time.localtime()) + "\t" + text
self.text_area.insert("end", text)
self.line_len += 1
self.text_area.configure(state="disable")
self.text_area.see("end")
class BaseBanner(_Backbone):
def __init__(self,
title: str = "QGUI测试程序",
style="primary"):
super(BaseBanner, self).__init__(f_style=style)
self.img_info = dict()
self.title = title
def add_tool(self, tool: BaseBarTool):
"""
添加小工具组件
:param
"""
tool.build(master=self.frame, global_info=self.global_info)
tool_info = tool.get_arg_info()
self.global_info += tool_info
def build(self, master, global_info):
super(BaseBanner, self).build(master, global_info)
self.frame.place(x=0, y=0, width=940, height=50)
# 占位标题
black = tkinter.Frame(self.frame,
height=10,
bg=TITLE_BG_COLOR)
black.pack(side="right", anchor="se")
# 主标题
title = ttk.Label(self.frame,
font=(FONT, 25),
text=self.title,
style=self.style + ".Inverse.TLabel")
title.pack(side="right", anchor="se", padx=5, pady=3)
if __name__ == '__main__':
pass
| [
"PIL.Image.open",
"tkinter.ttk.Frame",
"tkinter.ttk.Label",
"webbrowser.open_new",
"qgui.os_tools.StdOutWrapper",
"qgui.third_party.collapsing_frame.CollapsingFrame",
"qgui.base_tools.ArgInfo",
"tkinter.scrolledtext.ScrolledText",
"tkinter.ttk.Notebook",
"time.localtime",
"qgui.os_tools.DataCach... | [((888, 897), 'qgui.base_tools.ArgInfo', 'ArgInfo', ([], {}), '()\n', (895, 897), False, 'from qgui.base_tools import ArgInfo\n'), ((962, 1009), 'tkinter.ttk.Frame', 'ttk.Frame', (['master'], {'style': "(self.style + '.TFrame')"}), "(master, style=self.style + '.TFrame')\n", (971, 1009), False, 'from tkinter import ttk\n'), ((1458, 1485), 'qgui.third_party.collapsing_frame.CollapsingFrame', 'CollapsingFrame', (['self.frame'], {}), '(self.frame)\n', (1473, 1485), False, 'from qgui.third_party.collapsing_frame import CollapsingFrame\n'), ((1543, 1571), 'tkinter.ttk.Frame', 'ttk.Frame', (['bus_cf'], {'padding': '(5)'}), '(bus_cf, padding=5)\n', (1552, 1571), False, 'from tkinter import ttk\n'), ((2479, 2506), 'qgui.third_party.collapsing_frame.CollapsingFrame', 'CollapsingFrame', (['self.frame'], {}), '(self.frame)\n', (2494, 2506), False, 'from qgui.third_party.collapsing_frame import CollapsingFrame\n'), ((2564, 2592), 'tkinter.ttk.Frame', 'ttk.Frame', (['bus_cf'], {'padding': '(5)'}), '(bus_cf, padding=5)\n', (2573, 2592), False, 'from tkinter import ttk\n'), ((3779, 3840), 'qgui.os_tools.StdOutWrapper', 'StdOutWrapper', (['self.stdout'], {'callback': 'self._write_log_callback'}), '(self.stdout, callback=self._write_log_callback)\n', (3792, 3840), False, 'from qgui.os_tools import StdOutWrapper, DataCache\n'), ((3862, 3923), 'qgui.os_tools.StdOutWrapper', 'StdOutWrapper', (['self.stdout'], {'callback': 'self._write_log_callback'}), '(self.stdout, callback=self._write_log_callback)\n', (3875, 3923), False, 'from qgui.os_tools import StdOutWrapper, DataCache\n'), ((3952, 3963), 'qgui.os_tools.DataCache', 'DataCache', ([], {}), '()\n', (3961, 3963), False, 'from qgui.os_tools import StdOutWrapper, DataCache\n'), ((4772, 4796), 'tkinter.ttk.Notebook', 'ttk.Notebook', (['self.frame'], {}), '(self.frame)\n', (4784, 4796), False, 'from tkinter import ttk\n'), ((5599, 5650), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.frame'], {'style': "(self.style + '.TFrame')"}), "(self.frame, style=self.style + '.TFrame')\n", (5608, 5650), False, 'from tkinter import ttk\n'), ((5796, 5914), 'tkinter.ttk.Label', 'ttk.Label', (['self.console_frame'], {'font': '(FONT, 15)', 'style': "(self.style + '.Inverse.TLabel')", 'text': '"""控制台日志"""', 'justify': '"""left"""'}), "(self.console_frame, font=(FONT, 15), style=self.style +\n '.Inverse.TLabel', text='控制台日志', justify='left')\n", (5805, 5914), False, 'from tkinter import ttk\n'), ((6137, 6287), 'tkinter.scrolledtext.ScrolledText', 'ScrolledText', (['self.console_frame'], {'highlightcolor': 'master.style.colors.primary', 'highlightbackground': 'master.style.colors.border', 'highlightthickness': '(1)'}), '(self.console_frame, highlightcolor=master.style.colors.primary,\n highlightbackground=master.style.colors.border, highlightthickness=1)\n', (6149, 6287), False, 'from tkinter.scrolledtext import ScrolledText\n'), ((7120, 7145), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['image'], {}), '(image)\n', (7138, 7145), False, 'from PIL import Image, ImageTk\n'), ((8706, 8761), 'tkinter.Frame', 'tkinter.Frame', (['self.frame'], {'height': '(10)', 'bg': 'TITLE_BG_COLOR'}), '(self.frame, height=10, bg=TITLE_BG_COLOR)\n', (8719, 8761), False, 'import tkinter\n'), ((8898, 8995), 'tkinter.ttk.Label', 'ttk.Label', (['self.frame'], {'font': '(FONT, 25)', 'text': 'self.title', 'style': "(self.style + '.Inverse.TLabel')"}), "(self.frame, font=(FONT, 25), text=self.title, style=self.style +\n '.Inverse.TLabel')\n", (8907, 8995), False, 'from tkinter import ttk\n'), ((2200, 2275), 'tkinter.ttk.Label', 'ttk.Label', (['bus_frm'], {'text': 'f"""> 进入GitHub"""', 'style': '"""info.TLabel"""', 'justify': '"""left"""'}), "(bus_frm, text=f'> 进入GitHub', style='info.TLabel', justify='left')\n", (2209, 2275), False, 'from tkinter import ttk\n'), ((5161, 5179), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.nb'], {}), '(self.nb)\n', (5170, 5179), False, 'from tkinter import ttk\n'), ((6943, 6960), 'PIL.Image.open', 'Image.open', (['image'], {}), '(image)\n', (6953, 6960), False, 'from PIL import Image, ImageTk\n'), ((1695, 1788), 'tkinter.ttk.Label', 'ttk.Label', (['bus_frm'], {'text': 'f"""作者:\t{author}"""', 'style': '"""TLabel"""', 'justify': '"""left"""', 'wraplength': '(160)'}), "(bus_frm, text=f'作者:\\t{author}', style='TLabel', justify='left',\n wraplength=160)\n", (1704, 1788), False, 'from tkinter import ttk\n'), ((1811, 1905), 'tkinter.ttk.Label', 'ttk.Label', (['bus_frm'], {'text': 'f"""版本:\t{version}"""', 'style': '"""TLabel"""', 'justify': '"""left"""', 'wraplength': '(160)'}), "(bus_frm, text=f'版本:\\t{version}', style='TLabel', justify='left',\n wraplength=160)\n", (1820, 1905), False, 'from tkinter import ttk\n'), ((2140, 2171), 'webbrowser.open_new', 'webbrowser.open_new', (['github_url'], {}), '(github_url)\n', (2159, 2171), False, 'import webbrowser\n'), ((2731, 2792), 'tkinter.ttk.Label', 'ttk.Label', (['bus_frm'], {'text': 'info', 'style': '"""TLabel"""', 'wraplength': '(160)'}), "(bus_frm, text=info, style='TLabel', wraplength=160)\n", (2740, 2792), False, 'from tkinter import ttk\n'), ((4943, 4961), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.nb'], {}), '(self.nb)\n', (4952, 4961), False, 'from tkinter import ttk\n'), ((1996, 2041), 'tkinter.ttk.Label', 'ttk.Label', (['bus_frm'], {'text': 'line', 'style': '"""TLabel"""'}), "(bus_frm, text=line, style='TLabel')\n", (2005, 2041), False, 'from tkinter import ttk\n'), ((7839, 7855), 'time.localtime', 'time.localtime', ([], {}), '()\n', (7853, 7855), False, 'import time\n')] |
import infer_organism
import subprocess as sp
print(infer_organism.infer(
file_1="./first_mate.fastq",
min_match=2,factor=1,
transcript_fasta="transcripts.fasta.zip"
))
print(infer_organism.infer(
file_1="./SRR13496438.fastq.gz",
min_match=2,factor=1,
transcript_fasta="transcripts.fasta.zip"
))
'''
print(infer_read_orientation.infer(
file_1="./files/SRR13496438.fastq.gz",
fasta="transcripts.fasta.zip",
organism="oaries"
))
import subprocess as sp
file_1 = "./files/SRR13496438.fastq.gz"
quant_single = "kallisto quant -i transcripts.idx -o output" + \
" -l 100 -s 300 --single " + file_1
result = sp.run(quant_single, shell=True,capture_output=True, text=True)
print(result.stderr)
print(result.returncode)
'''
| [
"infer_organism.infer"
] | [((56, 174), 'infer_organism.infer', 'infer_organism.infer', ([], {'file_1': '"""./first_mate.fastq"""', 'min_match': '(2)', 'factor': '(1)', 'transcript_fasta': '"""transcripts.fasta.zip"""'}), "(file_1='./first_mate.fastq', min_match=2, factor=1,\n transcript_fasta='transcripts.fasta.zip')\n", (76, 174), False, 'import infer_organism\n'), ((184, 306), 'infer_organism.infer', 'infer_organism.infer', ([], {'file_1': '"""./SRR13496438.fastq.gz"""', 'min_match': '(2)', 'factor': '(1)', 'transcript_fasta': '"""transcripts.fasta.zip"""'}), "(file_1='./SRR13496438.fastq.gz', min_match=2, factor=1,\n transcript_fasta='transcripts.fasta.zip')\n", (204, 306), False, 'import infer_organism\n')] |
# Copyright 2017 Neosapience, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========================================================================
import unittest
import darkon
import tensorflow as tf
import numpy as np
_classes = 2
def nn_graph(activation):
# create graph
x = tf.placeholder(tf.float32, (1, 2, 2, 3), 'x_placeholder')
y = tf.placeholder(tf.int32, name='y_placeholder', shape=[1, 2])
with tf.name_scope('conv1'):
conv_1 = tf.layers.conv2d(
inputs=x,
filters=10,
kernel_size=[2, 2],
padding="same",
activation=activation)
with tf.name_scope('fc2'):
flatten = tf.layers.flatten(conv_1)
top = tf.layers.dense(flatten, _classes)
logits = tf.nn.softmax(top)
return x
class GradcamGuidedBackprop(unittest.TestCase):
def setUp(self):
tf.reset_default_graph()
def tearDown(self):
x = nn_graph(activation=self.activation_fn)
image = np.random.uniform(size=(2, 2, 3))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
gradcam_ops = darkon.Gradcam.candidate_featuremap_op_names(sess)
if self.enable_guided_backprop:
_ = darkon.Gradcam(x, _classes, gradcam_ops[-1])
g = tf.get_default_graph()
from_ts = g.get_operation_by_name(gradcam_ops[-1]).outputs
to_ts = g.get_operation_by_name(gradcam_ops[-2]).outputs
max_output = tf.reduce_max(from_ts, axis=3)
y = tf.reduce_sum(-max_output * 1e2)
grad = tf.gradients(y, to_ts)[0]
grad_val = sess.run(grad, feed_dict={x: np.expand_dims(image, 0)})
if self.enable_guided_backprop:
self.assertTrue(not np.any(grad_val))
else:
self.assertTrue(np.any(grad_val))
def test_relu(self):
self.activation_fn = tf.nn.relu
self.enable_guided_backprop = False
def test_relu_guided(self):
self.activation_fn = tf.nn.relu
self.enable_guided_backprop = True
def test_tanh(self):
self.activation_fn = tf.nn.tanh
self.enable_guided_backprop = False
def test_tanh_guided(self):
self.activation_fn = tf.nn.tanh
self.enable_guided_backprop = True
def test_sigmoid(self):
self.activation_fn = tf.nn.sigmoid
self.enable_guided_backprop = False
def test_sigmoid_guided(self):
self.activation_fn = tf.nn.sigmoid
self.enable_guided_backprop = True
def test_relu6(self):
self.activation_fn = tf.nn.relu6
self.enable_guided_backprop = False
def test_relu6_guided(self):
self.activation_fn = tf.nn.relu6
self.enable_guided_backprop = True
def test_elu(self):
self.activation_fn = tf.nn.elu
self.enable_guided_backprop = False
def test_elu_guided(self):
self.activation_fn = tf.nn.elu
self.enable_guided_backprop = True
def test_selu(self):
self.activation_fn = tf.nn.selu
self.enable_guided_backprop = False
def test_selu_guided(self):
self.activation_fn = tf.nn.selu
self.enable_guided_backprop = True
def test_softplus(self):
self.activation_fn = tf.nn.softplus
self.enable_guided_backprop = False
def test_test_softplus_guided(self):
self.activation_fn = tf.nn.softplus
self.enable_guided_backprop = True
def test_softsign(self):
self.activation_fn = tf.nn.softsign
self.enable_guided_backprop = False
def test_softsign_guided(self):
self.activation_fn = tf.nn.softsign
self.enable_guided_backprop = True
| [
"tensorflow.reset_default_graph",
"tensorflow.layers.flatten",
"darkon.Gradcam.candidate_featuremap_op_names",
"darkon.Gradcam",
"tensorflow.reduce_sum",
"tensorflow.placeholder",
"tensorflow.Session",
"numpy.any",
"tensorflow.reduce_max",
"tensorflow.global_variables_initializer",
"tensorflow.l... | [((799, 856), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '(1, 2, 2, 3)', '"""x_placeholder"""'], {}), "(tf.float32, (1, 2, 2, 3), 'x_placeholder')\n", (813, 856), True, 'import tensorflow as tf\n'), ((865, 925), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {'name': '"""y_placeholder"""', 'shape': '[1, 2]'}), "(tf.int32, name='y_placeholder', shape=[1, 2])\n", (879, 925), True, 'import tensorflow as tf\n'), ((1275, 1293), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['top'], {}), '(top)\n', (1288, 1293), True, 'import tensorflow as tf\n'), ((936, 958), 'tensorflow.name_scope', 'tf.name_scope', (['"""conv1"""'], {}), "('conv1')\n", (949, 958), True, 'import tensorflow as tf\n'), ((977, 1078), 'tensorflow.layers.conv2d', 'tf.layers.conv2d', ([], {'inputs': 'x', 'filters': '(10)', 'kernel_size': '[2, 2]', 'padding': '"""same"""', 'activation': 'activation'}), "(inputs=x, filters=10, kernel_size=[2, 2], padding='same',\n activation=activation)\n", (993, 1078), True, 'import tensorflow as tf\n'), ((1146, 1166), 'tensorflow.name_scope', 'tf.name_scope', (['"""fc2"""'], {}), "('fc2')\n", (1159, 1166), True, 'import tensorflow as tf\n'), ((1186, 1211), 'tensorflow.layers.flatten', 'tf.layers.flatten', (['conv_1'], {}), '(conv_1)\n', (1203, 1211), True, 'import tensorflow as tf\n'), ((1226, 1260), 'tensorflow.layers.dense', 'tf.layers.dense', (['flatten', '_classes'], {}), '(flatten, _classes)\n', (1241, 1260), True, 'import tensorflow as tf\n'), ((1386, 1410), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (1408, 1410), True, 'import tensorflow as tf\n'), ((1504, 1537), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': '(2, 2, 3)'}), '(size=(2, 2, 3))\n', (1521, 1537), True, 'import numpy as np\n'), ((1552, 1564), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (1562, 1564), True, 'import tensorflow as tf\n'), ((1657, 1707), 'darkon.Gradcam.candidate_featuremap_op_names', 'darkon.Gradcam.candidate_featuremap_op_names', (['sess'], {}), '(sess)\n', (1701, 1707), False, 'import darkon\n'), ((1835, 1857), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (1855, 1857), True, 'import tensorflow as tf\n'), ((2024, 2054), 'tensorflow.reduce_max', 'tf.reduce_max', (['from_ts'], {'axis': '(3)'}), '(from_ts, axis=3)\n', (2037, 2054), True, 'import tensorflow as tf\n'), ((2071, 2105), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(-max_output * 100.0)'], {}), '(-max_output * 100.0)\n', (2084, 2105), True, 'import tensorflow as tf\n'), ((1595, 1628), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (1626, 1628), True, 'import tensorflow as tf\n'), ((1773, 1817), 'darkon.Gradcam', 'darkon.Gradcam', (['x', '_classes', 'gradcam_ops[-1]'], {}), '(x, _classes, gradcam_ops[-1])\n', (1787, 1817), False, 'import darkon\n'), ((2124, 2146), 'tensorflow.gradients', 'tf.gradients', (['y', 'to_ts'], {}), '(y, to_ts)\n', (2136, 2146), True, 'import tensorflow as tf\n'), ((2378, 2394), 'numpy.any', 'np.any', (['grad_val'], {}), '(grad_val)\n', (2384, 2394), True, 'import numpy as np\n'), ((2202, 2226), 'numpy.expand_dims', 'np.expand_dims', (['image', '(0)'], {}), '(image, 0)\n', (2216, 2226), True, 'import numpy as np\n'), ((2310, 2326), 'numpy.any', 'np.any', (['grad_val'], {}), '(grad_val)\n', (2316, 2326), True, 'import numpy as np\n')] |
import os
import pwd
import sys
import argparse
import subprocess
try:
# Location of run_with_reloader in the latest version of Werkzeug
from werkzeug._reloader import run_with_reloader
except ImportError:
# Old location of run_with_reloader
from werkzeug.serving import run_with_reloader
def get_command():
'''
Return the command from the args supplied.
'''
parser = argparse.ArgumentParser(
description='Automatically re-run Python commands when files change.')
parser.add_argument(
'--command',
'-c',
action='store',
default=None,
help='Specify any other shell command as a single string.')
parser.add_argument(
"args",
nargs=argparse.REMAINDER,
help='Arguments as if you were passing them to the `python` command.')
op = parser.parse_args(sys.argv[1:])
if op.command:
return op.command
elif op.args:
return "python %s" % (" ".join(op.args))
else:
parser.print_usage()
sys.stderr.write('repyt: error: too few arguments.\n')
sys.exit(2)
def get_shell():
'''
Return the current shell.
'''
return os.environ.get('SHELL', pwd.getpwuid(os.getuid()).pw_shell)
def get_files():
'''
Get the files to monitor.
'''
matches = []
for root, dirnames, filenames in os.walk('.'):
for filename in filenames:
matches.append(os.path.join(root, filename))
return matches
def main():
# Command-line entry point for setup.py install/develop
command = get_command()
shell = get_shell()
files = get_files()
run_with_reloader(
lambda: subprocess.call(
command,
shell=True,
executable=shell),
extra_files=files)
| [
"argparse.ArgumentParser",
"os.getuid",
"os.path.join",
"sys.stderr.write",
"subprocess.call",
"sys.exit",
"os.walk"
] | [((403, 502), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Automatically re-run Python commands when files change."""'}), "(description=\n 'Automatically re-run Python commands when files change.')\n", (426, 502), False, 'import argparse\n'), ((1368, 1380), 'os.walk', 'os.walk', (['"""."""'], {}), "('.')\n", (1375, 1380), False, 'import os\n'), ((1038, 1092), 'sys.stderr.write', 'sys.stderr.write', (['"""repyt: error: too few arguments.\n"""'], {}), "('repyt: error: too few arguments.\\n')\n", (1054, 1092), False, 'import sys\n'), ((1101, 1112), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (1109, 1112), False, 'import sys\n'), ((1683, 1737), 'subprocess.call', 'subprocess.call', (['command'], {'shell': '(True)', 'executable': 'shell'}), '(command, shell=True, executable=shell)\n', (1698, 1737), False, 'import subprocess\n'), ((1226, 1237), 'os.getuid', 'os.getuid', ([], {}), '()\n', (1235, 1237), False, 'import os\n'), ((1444, 1472), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (1456, 1472), False, 'import os\n')] |
import torch
from collections import namedtuple
from itertools import product
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
def get_num_correct(preds, labels):
"""
calculates the number of correct predictions.
Args:
preds: the predictions tensor with shape (batch_size, num_classes)
labels: the labels tensor with shape (batch_size, num_classes)
Returns:
int: sum of correct predictions across the batch
"""
return preds.argmax(dim=1).eq(labels).sum().item()
class RunBuilder():
@staticmethod
def get_runs(params):
"""
build sets of parameters that define the runs.
Args:
params (OrderedDict): OrderedDict having hyper-parameter values
Returns:
list: containing list of all runs
"""
Run = namedtuple('run', params.keys())
runs = []
for v in product(*params.values()):
runs.append(Run(*v))
return runs
def get_all_preds(model, loader):
"""
returns all the predictions of the entire dataset
"""
all_preds = torch.tensor([])
for batch in loader:
images = batch[0].to(device)
preds = model(images)
all_preds = torch.cat((all_preds, preds), dim=0)
return all_preds
def get_mean_std(loader):
"""
returns mean and std of a dataset
"""
# VAR[X] = E[X**2] - E[X]**2
channels_sum, channels_squared_sum, num_batches = 0, 0, 0
for data, _ in loader:
channels_sum += torch.mean(data, dim=[0, 2, 3])
channels_squared_sum += torch.mean(data**2, dim=[0, 2, 3])
num_batches += 1
mean = channels_sum/num_batches
std = (channels_squared_sum/num_batches - mean**2)**0.5
return mean, std
| [
"torch.mean",
"torch.tensor",
"torch.cuda.is_available",
"torch.cat"
] | [((1123, 1139), 'torch.tensor', 'torch.tensor', (['[]'], {}), '([])\n', (1135, 1139), False, 'import torch\n'), ((113, 138), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (136, 138), False, 'import torch\n'), ((1252, 1288), 'torch.cat', 'torch.cat', (['(all_preds, preds)'], {'dim': '(0)'}), '((all_preds, preds), dim=0)\n', (1261, 1288), False, 'import torch\n'), ((1539, 1570), 'torch.mean', 'torch.mean', (['data'], {'dim': '[0, 2, 3]'}), '(data, dim=[0, 2, 3])\n', (1549, 1570), False, 'import torch\n'), ((1603, 1639), 'torch.mean', 'torch.mean', (['(data ** 2)'], {'dim': '[0, 2, 3]'}), '(data ** 2, dim=[0, 2, 3])\n', (1613, 1639), False, 'import torch\n')] |
import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskManager(object):
"""
Manage all running tasks and refresh gauge values.
"""
def __init__(self, refresh_period=30, refresh_enable=True, loop=None):
self._loop = loop or asyncio.get_event_loop()
self.tasks = []
self._refresh_enable = refresh_enable
self._refresh_period = refresh_period
self._refresh_task = None
self._refreshers = []
self._refresh_lock = asyncio.Lock()
self._close = False
def set_refresh_period(self, period):
self._refresh_period = period
def add_task(self, coro):
if self._close:
raise Exception("Cant add task for closed manager.")
task = asyncio.ensure_future(coro, loop=self._loop)
self.tasks.append(task)
task.add_done_callback(self.tasks.remove)
async def add_refresher(self, refresh_async_func: callable):
if not self._refresh_enable:
raise Exception('Refresh disable in this manager. Use refresh_enable=True in constructor.')
async with self._refresh_lock:
if self._close:
raise Exception("Cant add refresh function in closed manager.")
self._refreshers.append(refresh_async_func)
if self._refresh_task is None:
self._refresh_task = asyncio.ensure_future(
self.refresh(), loop=self._loop
)
async def refresh(self):
while self._close is False:
await asyncio.sleep(self._refresh_period)
async with self._refresh_lock:
for refresher in self._refreshers:
await refresher()
async def wait_tasks(self):
await asyncio.gather(
*self.tasks,
return_exceptions=True,
loop=self._loop
)
async def close(self):
self._close = True
await self.wait_tasks()
async with self._refresh_lock:
if self._refresh_task:
self._refresh_task.cancel() | [
"logging.getLogger",
"asyncio.sleep",
"asyncio.Lock",
"asyncio.ensure_future",
"asyncio.gather",
"asyncio.get_event_loop"
] | [((40, 67), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (57, 67), False, 'import logging\n'), ((507, 521), 'asyncio.Lock', 'asyncio.Lock', ([], {}), '()\n', (519, 521), False, 'import asyncio\n'), ((766, 810), 'asyncio.ensure_future', 'asyncio.ensure_future', (['coro'], {'loop': 'self._loop'}), '(coro, loop=self._loop)\n', (787, 810), False, 'import asyncio\n'), ((273, 297), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (295, 297), False, 'import asyncio\n'), ((1775, 1843), 'asyncio.gather', 'asyncio.gather', (['*self.tasks'], {'return_exceptions': '(True)', 'loop': 'self._loop'}), '(*self.tasks, return_exceptions=True, loop=self._loop)\n', (1789, 1843), False, 'import asyncio\n'), ((1560, 1595), 'asyncio.sleep', 'asyncio.sleep', (['self._refresh_period'], {}), '(self._refresh_period)\n', (1573, 1595), False, 'import asyncio\n')] |
import numpy as np
import sys
def micrograph2np(width,shift):
r = int(width/shift-1)
#I = np.load("../DATA_SETS/004773_ProtRelionRefine3D/kino.micrograph.numpy.npy")
I = np.load("../DATA_SETS/004773_ProtRelionRefine3D/full_micrograph.stack_0001.numpy.npy")
I = (I-I.mean())/I.std()
N = int(I.shape[0]/shift)
M = int(I.shape[1]/shift)
S=[]
for i in range(N-r):
for j in range(M-r):
x1 = i*shift
x2 = x1+width
y1 = j*shift
y2 = y1+width
w = I[x1:x2,y1:y2]
S.append(w)
S = np.array(S)
np.save("../DATA_SETS/004773_ProtRelionRefine3D/fraction_micrograph.numpy", S)
| [
"numpy.array",
"numpy.load",
"numpy.save"
] | [((180, 276), 'numpy.load', 'np.load', (['"""../DATA_SETS/004773_ProtRelionRefine3D/full_micrograph.stack_0001.numpy.npy"""'], {}), "(\n '../DATA_SETS/004773_ProtRelionRefine3D/full_micrograph.stack_0001.numpy.npy'\n )\n", (187, 276), True, 'import numpy as np\n'), ((541, 552), 'numpy.array', 'np.array', (['S'], {}), '(S)\n', (549, 552), True, 'import numpy as np\n'), ((555, 633), 'numpy.save', 'np.save', (['"""../DATA_SETS/004773_ProtRelionRefine3D/fraction_micrograph.numpy"""', 'S'], {}), "('../DATA_SETS/004773_ProtRelionRefine3D/fraction_micrograph.numpy', S)\n", (562, 633), True, 'import numpy as np\n')] |
# python3
import re
report = open("финансы 31.10.2017 по 20.11.2017", 'r')
lines = report.readlines()
foundHeader = False
cardName = ""
operationDateTime = ""
for line in lines:
if not foundHeader:
if line.find('Фінансові трансакції за рахунком') != -1:
foundHeader = True
else:
if line.find('Блокування по карті') != -1:
foundHeader = False
elif line.find("Ім'я на карті:") != -1:
cardName = line[15:-1]
else:
ro = re.search("\d\d\.\d\d\.\d\d \d\d:\d\d", line)
if ro:
operationDateTime = ro.group(0);
else:
ro = re.search("(\d\d\d\d\d\d) +(.+) +(UAH) +(-*\d*,\d\d) +(UAH) +(-*\d*,\d\d)", line)
if ro:
operation = ro.group(2)
summ = ro.group(4)
separator = ';'
print(operationDateTime[:8].replace('.', '/'), separator, summ, separator, cardName, separator, operation)
| [
"re.search"
] | [((441, 498), 're.search', 're.search', (['"""\\\\d\\\\d\\\\.\\\\d\\\\d\\\\.\\\\d\\\\d \\\\d\\\\d:\\\\d\\\\d"""', 'line'], {}), "('\\\\d\\\\d\\\\.\\\\d\\\\d\\\\.\\\\d\\\\d \\\\d\\\\d:\\\\d\\\\d', line)\n", (450, 498), False, 'import re\n'), ((552, 655), 're.search', 're.search', (['"""(\\\\d\\\\d\\\\d\\\\d\\\\d\\\\d) +(.+) +(UAH) +(-*\\\\d*,\\\\d\\\\d) +(UAH) +(-*\\\\d*,\\\\d\\\\d)"""', 'line'], {}), "(\n '(\\\\d\\\\d\\\\d\\\\d\\\\d\\\\d) +(.+) +(UAH) +(-*\\\\d*,\\\\d\\\\d) +(UAH) +(-*\\\\d*,\\\\d\\\\d)'\n , line)\n", (561, 655), False, 'import re\n')] |
import os
import warnings
import torch.backends.cudnn as cudnn
warnings.filterwarnings("ignore")
from torch.utils.data import DataLoader
from decaps import CapsuleNet
from torch.optim import Adam
import numpy as np
from config import options
import torch
import torch.nn.functional as F
from utils.eval_utils import binary_cls_compute_metrics
import torch.nn as nn
os.environ['CUDA_VISIBLE_DEVICES'] = '2'
theta_c = 0.5 # crop region with attention values higher than this
theta_d = 0.5 # drop region with attention values higher than this
def log_string(out_str):
LOG_FOUT.write(out_str + '\n')
LOG_FOUT.flush()
print(out_str)
@torch.no_grad()
def evaluate():
capsule_net.eval()
test_loss = np.zeros(4)
targets, predictions_raw, predictions_crop, predictions_drop, predictions_combined = [], [], [], [], []
outputs_raw, outputs_crop, outputs_combined = [], [], []
with torch.no_grad():
for batch_id, (data, target) in enumerate(test_loader):
data, target = data.cuda(), target.cuda()
target_ohe = F.one_hot(target, options.num_classes)
y_pred_raw, x_reconst, output, attention_map, _, c_maps, out_vec_raw = capsule_net(data, target_ohe)
loss = capsule_loss(output, target)
targets += [target_ohe]
outputs_raw += [output]
predictions_raw += [y_pred_raw]
test_loss[0] += loss
##################################
# Object Localization and Refinement
##################################
bbox_coords = []
upsampled_attention_map = F.upsample_bilinear(attention_map, size=(data.size(2), data.size(3)))
crop_mask = upsampled_attention_map > theta_c
crop_images = []
for batch_index in range(crop_mask.size(0)):
nonzero_indices = torch.nonzero(crop_mask[batch_index, 0, ...])
height_min = nonzero_indices[:, 0].min()
height_max = nonzero_indices[:, 0].max()
width_min = nonzero_indices[:, 1].min()
width_max = nonzero_indices[:, 1].max()
bbox_coord = np.array([height_min, height_max, width_min, width_max])
bbox_coords.append(bbox_coord)
crop_images.append(F.upsample_bilinear(
data[batch_index:batch_index + 1, :, height_min:height_max, width_min:width_max],
size=options.img_h))
crop_images = torch.cat(crop_images, dim=0)
y_pred_crop, _, output_crop, _, _, c_maps_crop, out_vec_crop = capsule_net(crop_images, target_ohe)
loss = capsule_loss(output_crop, target)
predictions_crop += [y_pred_crop]
outputs_crop += [output_crop]
test_loss[1] += loss
# final prediction
output_combined = (output + output_crop) / 2
outputs_combined += [output_combined]
y_pred_combined = output_combined.argmax(dim=1)
y_pred_combined_ohe = F.one_hot(y_pred_combined, options.num_classes)
test_loss[3] += capsule_loss(output_combined, target)
predictions_combined += [y_pred_combined_ohe]
##################################
# Attention Dropping
##################################
drop_mask = F.upsample_bilinear(attention_map, size=(data.size(2), data.size(3))) <= theta_d
drop_images = data * drop_mask.float()
# drop images forward
y_pred_drop, _, output_drop, _, _, c_maps_drop, out_vec_drop = capsule_net(drop_images.cuda(), target_ohe)
loss = capsule_loss(output_crop, target)
predictions_drop += [y_pred_drop]
test_loss[2] += loss
test_loss /= (batch_id + 1)
metrics_raw = binary_cls_compute_metrics(torch.cat(outputs_raw).cpu(), torch.cat(targets).cpu())
metrics_crop = binary_cls_compute_metrics(torch.cat(outputs_crop).cpu(), torch.cat(targets).cpu())
metrics_combined = binary_cls_compute_metrics(torch.cat(outputs_combined).cpu(), torch.cat(targets).cpu())
# display
log_string(" - (Raw) loss: {0:.4f}, acc: {1:.02%}, auc: {2:.02%}"
.format(test_loss[0], metrics_raw['acc'], metrics_raw['auc']))
log_string(" - (Crop) loss: {0:.4f}, acc: {1:.02%}, auc: {2:.02%}"
.format(test_loss[1], metrics_crop['acc'], metrics_crop['auc']))
log_string(" - (Combined) loss: {0:.4f}, acc: {1:.02%}, auc: {2:.02%}"
.format(test_loss[2], metrics_combined['acc'], metrics_combined['auc']))
if __name__ == '__main__':
##################################
# Initialize saving directory
##################################
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
iter_num = options.load_model_path.split('/')[-1].split('.')[0]
save_dir = os.path.dirname(os.path.dirname(options.load_model_path))
img_dir = os.path.join(save_dir, 'imgs')
if not os.path.exists(img_dir):
os.makedirs(img_dir)
viz_dir = os.path.join(img_dir, iter_num+'_crop_{}'.format(theta_c))
if not os.path.exists(viz_dir):
os.makedirs(viz_dir)
LOG_FOUT = open(os.path.join(save_dir, 'log_inference.txt'), 'w')
LOG_FOUT.write(str(options) + '\n')
# bkp of inference
os.system('cp {}/inference.py {}'.format(BASE_DIR, save_dir))
##################################
# Create the model
##################################
capsule_net = CapsuleNet(options)
log_string('Model Generated.')
log_string("Number of trainable parameters: {}".format(sum(param.numel() for param in capsule_net.parameters())))
##################################
# Use cuda
##################################
cudnn.benchmark = True
capsule_net.cuda()
capsule_net = nn.DataParallel(capsule_net)
##################################
# Load the trained model
##################################
ckpt = options.load_model_path
checkpoint = torch.load(ckpt)
state_dict = checkpoint['state_dict']
# Load weights
capsule_net.load_state_dict(state_dict)
log_string('Model successfully loaded from {}'.format(ckpt))
if 'feature_center' in checkpoint:
feature_center = checkpoint['feature_center'].to(torch.device("cuda"))
log_string('feature_center loaded from {}'.format(ckpt))
##################################
# Loss and Optimizer
##################################
if options.loss_type == 'margin':
from utils.loss_utils import MarginLoss
capsule_loss = MarginLoss(options)
elif options.loss_type == 'spread':
from utils.loss_utils import SpreadLoss
capsule_loss = SpreadLoss(options)
elif options.loss_type == 'cross-entropy':
capsule_loss = nn.CrossEntropyLoss()
if options.add_decoder:
from utils.loss_utils import ReconstructionLoss
reconst_loss = ReconstructionLoss()
optimizer = Adam(capsule_net.parameters(), lr=options.lr, betas=(options.beta1, 0.999))
# scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=2, gamma=0.9)
##################################
# Load dataset
##################################
if options.data_name == 'mnist':
from dataset.mnist import MNIST as data
os.system('cp {}/dataset/mnist.py {}'.format(BASE_DIR, save_dir))
elif options.data_name == 'fashion_mnist':
from dataset.fashion_mnist import FashionMNIST as data
os.system('cp {}/dataset/fashion_mnist.py {}'.format(BASE_DIR, save_dir))
elif options.data_name == 't_mnist':
from dataset.mnist_translate import MNIST as data
os.system('cp {}/dataset/mnist_translate.py {}'.format(BASE_DIR, save_dir))
elif options.data_name == 'c_mnist':
from dataset.mnist_clutter import MNIST as data
os.system('cp {}/dataset/mnist_clutter.py {}'.format(BASE_DIR, save_dir))
elif options.data_name == 'cub':
from dataset.dataset_CUB import CUB as data
os.system('cp {}/dataset/dataset_CUB.py {}'.format(BASE_DIR, save_dir))
elif options.data_name == 'chexpert':
from dataset.chexpert_dataset import CheXpertDataSet as data
os.system('cp {}/dataset/chexpert_dataset.py {}'.format(BASE_DIR, save_dir))
test_dataset = data(mode='test')
test_loader = DataLoader(test_dataset, batch_size=options.batch_size,
shuffle=False, num_workers=options.workers, drop_last=False)
##################################
# TESTING
##################################
log_string('')
log_string('Start Testing')
evaluate()
| [
"torch.nn.CrossEntropyLoss",
"utils.loss_utils.ReconstructionLoss",
"numpy.array",
"torch.nn.functional.upsample_bilinear",
"utils.loss_utils.MarginLoss",
"os.path.exists",
"dataset.chexpert_dataset.CheXpertDataSet.cuda",
"config.options.load_model_path.split",
"dataset.chexpert_dataset.CheXpertData... | [((63, 96), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (86, 96), False, 'import warnings\n'), ((650, 665), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (663, 665), False, 'import torch\n'), ((722, 733), 'numpy.zeros', 'np.zeros', (['(4)'], {}), '(4)\n', (730, 733), True, 'import numpy as np\n'), ((5041, 5071), 'os.path.join', 'os.path.join', (['save_dir', '"""imgs"""'], {}), "(save_dir, 'imgs')\n", (5053, 5071), False, 'import os\n'), ((5596, 5615), 'decaps.CapsuleNet', 'CapsuleNet', (['options'], {}), '(options)\n', (5606, 5615), False, 'from decaps import CapsuleNet\n'), ((5931, 5959), 'torch.nn.DataParallel', 'nn.DataParallel', (['capsule_net'], {}), '(capsule_net)\n', (5946, 5959), True, 'import torch.nn as nn\n'), ((6120, 6136), 'torch.load', 'torch.load', (['ckpt'], {}), '(ckpt)\n', (6130, 6136), False, 'import torch\n'), ((8452, 8469), 'dataset.chexpert_dataset.CheXpertDataSet', 'data', ([], {'mode': '"""test"""'}), "(mode='test')\n", (8456, 8469), True, 'from dataset.chexpert_dataset import CheXpertDataSet as data\n'), ((8488, 8608), 'torch.utils.data.DataLoader', 'DataLoader', (['test_dataset'], {'batch_size': 'options.batch_size', 'shuffle': '(False)', 'num_workers': 'options.workers', 'drop_last': '(False)'}), '(test_dataset, batch_size=options.batch_size, shuffle=False,\n num_workers=options.workers, drop_last=False)\n', (8498, 8608), False, 'from torch.utils.data import DataLoader\n'), ((913, 928), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (926, 928), False, 'import torch\n'), ((4858, 4883), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (4873, 4883), False, 'import os\n'), ((4985, 5025), 'os.path.dirname', 'os.path.dirname', (['options.load_model_path'], {}), '(options.load_model_path)\n', (5000, 5025), False, 'import os\n'), ((5083, 5106), 'os.path.exists', 'os.path.exists', (['img_dir'], {}), '(img_dir)\n', (5097, 5106), False, 'import os\n'), ((5116, 5136), 'os.makedirs', 'os.makedirs', (['img_dir'], {}), '(img_dir)\n', (5127, 5136), False, 'import os\n'), ((5221, 5244), 'os.path.exists', 'os.path.exists', (['viz_dir'], {}), '(viz_dir)\n', (5235, 5244), False, 'import os\n'), ((5254, 5274), 'os.makedirs', 'os.makedirs', (['viz_dir'], {}), '(viz_dir)\n', (5265, 5274), False, 'import os\n'), ((5296, 5339), 'os.path.join', 'os.path.join', (['save_dir', '"""log_inference.txt"""'], {}), "(save_dir, 'log_inference.txt')\n", (5308, 5339), False, 'import os\n'), ((6705, 6724), 'utils.loss_utils.MarginLoss', 'MarginLoss', (['options'], {}), '(options)\n', (6715, 6724), False, 'from utils.loss_utils import MarginLoss\n'), ((7057, 7077), 'utils.loss_utils.ReconstructionLoss', 'ReconstructionLoss', ([], {}), '()\n', (7075, 7077), False, 'from utils.loss_utils import ReconstructionLoss\n'), ((1073, 1111), 'torch.nn.functional.one_hot', 'F.one_hot', (['target', 'options.num_classes'], {}), '(target, options.num_classes)\n', (1082, 1111), True, 'import torch.nn.functional as F\n'), ((2515, 2544), 'torch.cat', 'torch.cat', (['crop_images'], {'dim': '(0)'}), '(crop_images, dim=0)\n', (2524, 2544), False, 'import torch\n'), ((3065, 3112), 'torch.nn.functional.one_hot', 'F.one_hot', (['y_pred_combined', 'options.num_classes'], {}), '(y_pred_combined, options.num_classes)\n', (3074, 3112), True, 'import torch.nn.functional as F\n'), ((6404, 6424), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (6416, 6424), False, 'import torch\n'), ((6837, 6856), 'utils.loss_utils.SpreadLoss', 'SpreadLoss', (['options'], {}), '(options)\n', (6847, 6856), False, 'from utils.loss_utils import SpreadLoss\n'), ((1021, 1032), 'dataset.chexpert_dataset.CheXpertDataSet.cuda', 'data.cuda', ([], {}), '()\n', (1030, 1032), True, 'from dataset.chexpert_dataset import CheXpertDataSet as data\n'), ((1881, 1926), 'torch.nonzero', 'torch.nonzero', (['crop_mask[batch_index, 0, ...]'], {}), '(crop_mask[batch_index, 0, ...])\n', (1894, 1926), False, 'import torch\n'), ((2184, 2240), 'numpy.array', 'np.array', (['[height_min, height_max, width_min, width_max]'], {}), '([height_min, height_max, width_min, width_max])\n', (2192, 2240), True, 'import numpy as np\n'), ((6927, 6948), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (6946, 6948), True, 'import torch.nn as nn\n'), ((2324, 2450), 'torch.nn.functional.upsample_bilinear', 'F.upsample_bilinear', (['data[batch_index:batch_index + 1, :, height_min:height_max, width_min:width_max\n ]'], {'size': 'options.img_h'}), '(data[batch_index:batch_index + 1, :, height_min:\n height_max, width_min:width_max], size=options.img_h)\n', (2343, 2450), True, 'import torch.nn.functional as F\n'), ((3894, 3916), 'torch.cat', 'torch.cat', (['outputs_raw'], {}), '(outputs_raw)\n', (3903, 3916), False, 'import torch\n'), ((3924, 3942), 'torch.cat', 'torch.cat', (['targets'], {}), '(targets)\n', (3933, 3942), False, 'import torch\n'), ((4000, 4023), 'torch.cat', 'torch.cat', (['outputs_crop'], {}), '(outputs_crop)\n', (4009, 4023), False, 'import torch\n'), ((4031, 4049), 'torch.cat', 'torch.cat', (['targets'], {}), '(targets)\n', (4040, 4049), False, 'import torch\n'), ((4111, 4138), 'torch.cat', 'torch.cat', (['outputs_combined'], {}), '(outputs_combined)\n', (4120, 4138), False, 'import torch\n'), ((4146, 4164), 'torch.cat', 'torch.cat', (['targets'], {}), '(targets)\n', (4155, 4164), False, 'import torch\n'), ((4900, 4934), 'config.options.load_model_path.split', 'options.load_model_path.split', (['"""/"""'], {}), "('/')\n", (4929, 4934), False, 'from config import options\n'), ((1674, 1686), 'dataset.chexpert_dataset.CheXpertDataSet.size', 'data.size', (['(2)'], {}), '(2)\n', (1683, 1686), True, 'from dataset.chexpert_dataset import CheXpertDataSet as data\n'), ((1688, 1700), 'dataset.chexpert_dataset.CheXpertDataSet.size', 'data.size', (['(3)'], {}), '(3)\n', (1697, 1700), True, 'from dataset.chexpert_dataset import CheXpertDataSet as data\n'), ((3431, 3443), 'dataset.chexpert_dataset.CheXpertDataSet.size', 'data.size', (['(2)'], {}), '(2)\n', (3440, 3443), True, 'from dataset.chexpert_dataset import CheXpertDataSet as data\n'), ((3445, 3457), 'dataset.chexpert_dataset.CheXpertDataSet.size', 'data.size', (['(3)'], {}), '(3)\n', (3454, 3457), True, 'from dataset.chexpert_dataset import CheXpertDataSet as data\n')] |
import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(name='personal_assistant',
version='0.0.1',
author='<NAME>, <NAME>, <NAME>',
author_email='<EMAIL>, <EMAIL>, <EMAIL>',
description='Console script for working with Contacts lists, Notes and sorting files in the folders',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/Personal-Assistant-Project/ProjectHelper',
keywords="personal assistant helper",
license='MIT',
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
entry_points={'console_scripts': [
'helper-folder = ProjectHelper.main:main']},
packages=setuptools.find_packages(),
include_package_data=True,
python_requires=">=3.6",
)
| [
"setuptools.find_packages"
] | [((1098, 1124), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (1122, 1124), False, 'import setuptools\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Take a list of genome positions and return the dinucleotides around it.
For each position, will generate a list of + strand dinucleotides and - strand
dinucleotides.
Created: 2017-07-27 12:02
Last modified: 2017-10-18 00:17
"""
from __future__ import print_function
import os
import sys
import bz2
import gzip
from datetime import timedelta as _td
import logging as _log
import pandas as pd
import fyrd
from Bio import SeqIO as seqio
hg18 = "/godot/genomes/human/hg18"
hg19 = "/godot/genomes/human/hg19"
###############################################################################
# Core Algorithm #
###############################################################################
def get_dinucleotides(positions, genome_file, base=0, return_as='list'):
"""Return a list of all + and - strand dinucleotides around each position.
Will loop through each chromosome and search all positions in that
chromosome in one batch. Lookup is serial per chromosome.
Args:
positions (dict): Dictionary of {chrom->positons}
genome_file (str): Location of a genome fasta file or directory of
files. If directory, file names must be
<chrom_name>.fa[.gz]. Gzipped OK.
base (int): Either 0 or 1, base of positions in your list
return_as (str): dict: Return a dictionary of:
{chrom->{postion->{'ref': str, '+': tuple, '-': tuple}}}
list: just returns two lists with no positions.
df: return DataFrame
Returns:
(list, list): + strand dinucleotides, - strand dinucleotides. Returns
a dict or instead if requested through return_as.
"""
if os.path.isdir(genome_file):
chroms = positions.keys()
files = []
for chrom in chroms:
files.append(get_fasta_file(genome_file, chrom))
if return_as == 'df':
final = []
elif return_as == 'dict':
final = {}
else:
final = ([], [])
for chrom, fl in zip(chroms, files):
pos = {chrom: positions[chrom]}
res = get_dinucleotides(pos, fl, base, return_as)
if return_as == 'df':
final.append(res)
elif return_as == 'dict':
final.update(res)
else:
plus, minus = res
final[0] += plus
final[1] += minus
if return_as == 'df':
print('Converting to dataframe')
final = pd.concat(final)
return final
done = []
results = {} if return_as in ('dict', 'df') else ([], [])
with open_zipped(genome_file) as fasta_file:
for chrom in seqio.parse(fasta_file, 'fasta'):
if chrom.id not in positions:
continue
else:
done.append(chrom.id)
if return_as in ('dict', 'df'):
results[chrom.id] = {}
for pos in positions[chrom.id]:
pos = pos-base
ref = chrom[pos]
plus1 = chrom[pos-1:pos+1]
plus2 = chrom[pos:pos+2]
minus1 = plus1.reverse_complement()
minus2 = plus2.reverse_complement()
if return_as in ('dict', 'df'):
results[chrom.id][pos] = {
'ref': ref,
'+': (seq(plus1), seq(plus2)),
'-': (seq(minus1), seq(minus2))}
else:
results[0] += [plus1, plus2]
results[1] += [minus1, minus2]
if len(done) != len(positions.keys()):
print('The following chromosomes were not in files: {}'
.format([i for i in positions if i not in done]))
if return_as == 'df':
print('Converting to dataframe')
results = dict_to_df(results, base)
return results
def dict_to_df(results, base):
"""Convert results dictionary into a DataFrame."""
dfs = []
for chrom, data in results.items():
nuc_lookup = pd.DataFrame.from_dict(data, orient='index')
nuc_lookup['chrom'] = chrom
nuc_lookup['position'] = nuc_lookup.index.to_series().astype(int) + base
nuc_lookup['snp'] = nuc_lookup.chrom.astype(str) + '.' + nuc_lookup.position.astype(str)
nuc_lookup.set_index('snp', drop=True, inplace=True)
dfs.append(nuc_lookup)
result = pd.concat(dfs)
dfs = None
result = result[['ref', '+', '-']]
result.sort_index()
result.index.name = None
return result
###############################################################################
# Parallelization #
###############################################################################
def get_dinucleotides_parallel(positions, genome_file, base=0, return_as='list'):
"""Return a list of all + and - strand dinucleotides around each position.
Will loop through each chromosome and search all positions in that
chromosome in one batch. Lookup is parallel per chromosome.
Args:
positions (dict): Dictionary of {chrom->positons}
genome_file (str): Location of a genome fasta file or directory of
files. If directory, file names must be
<chrom_name>.fa[.gz]. Gzipped OK. Directory is
preferred in parallel mode.
base (int): Either 0 or 1, base of positions in your list
return_as (str): dict: Return a dictionary of:
{chrom->{postion->{'ref': str, '+': tuple, '-': tuple}}}
list: just returns two lists with no positions.
df: return DataFrame
Returns:
(list, list): + strand dinucleotides, - strand dinucleotides. Returns
a dict or instead if requested through return_as.
"""
outs = []
for chrom in positions.keys():
if os.path.isdir(genome_file):
fa_file = get_fasta_file(genome_file, chrom)
if not os.path.isfile(fa_file):
raise FileNotFoundError('{} not found.'.format(genome_file))
mins = int(len(positions[chrom])/2000)+45
time = str(_td(minutes=mins))
outs.append(
fyrd.submit(
get_dinucleotides,
({chrom: positions[chrom]}, fa_file, base, return_as),
cores=1, mem='6GB', time=time,
)
)
if return_as == 'df':
final = []
elif return_as == 'dict':
final = {}
else:
final = ([], [])
fyrd.wait(outs)
print('Getting results')
for out in outs:
res = out.get()
if return_as == 'df':
if isinstance(res, dict):
res = dict_to_df(res, base)
final.append(res)
elif return_as == 'dict':
final.update(res)
else:
plus, minus = res
final[0] += plus
final[1] += minus
if return_as == 'df':
print('Joining dataframe')
final = pd.concat(final)
return final
###############################################################################
# Helper Functions #
###############################################################################
def seq(sequence):
"""Convert Bio.Seq object to string."""
return str(sequence.seq.upper())
def get_fasta_file(directory, name):
"""Look in directory for name.fa or name.fa.gz and return path."""
fa_file = os.path.join(directory, name + '.fa')
gz_file = fa_file + '.gz'
if os.path.isfile(fa_file):
genome_file = fa_file
elif os.path.isfile(gz_file):
genome_file = fa_file
else:
raise FileNotFoundError(
'No {f}.fa or {f}.fa.gz file found in {d}'.format(
f=name, d=directory
)
)
return genome_file
def open_zipped(infile, mode='r'):
""" Return file handle of file regardless of zipped or not
Text mode enforced for compatibility with python2 """
mode = mode[0] + 't'
p2mode = mode
if hasattr(infile, 'write'):
return infile
if isinstance(infile, str):
if infile.endswith('.gz'):
return gzip.open(infile, mode)
if infile.endswith('.bz2'):
if hasattr(bz2, 'open'):
return bz2.open(infile, mode)
else:
return bz2.BZ2File(infile, p2mode)
return open(infile, p2mode)
###############################################################################
# Run On Files #
###############################################################################
def parse_location_file(infile, base=None):
"""Get a compatible dictionary from an input file.
Args:
infile (str): Path to a bed, vcf, or tsv. If tsv should be chrom\\tpos.
Filetype detected by extension. Gzipped/B2zipped OK.
base (int): Force base of file, if not set, bed/tsv assumed base 0,
vcf assumed base-1
Returns:
dict: A dict of {chrom->pos}
"""
if not isinstance(base, int):
base = 1 if 'vcf' in infile.split('.') else 0
out = {}
for chrom, pos in tsv_bed_vcf(infile, base):
if chrom not in out:
out[chrom] = []
out[chrom].append(pos)
return out
def tsv_bed_vcf(infile, base=0):
"""Interator for generic tsv, yields column1, column2 for every line.
column1 is assumed to be string, column2 is converted to int and base is
subtracted from it.
"""
with open_zipped(infile) as fin:
for line in fin:
if line.startswith('#'):
continue
f = line.rstrip().split('\t')
yield f[0], int(f[1])-base
| [
"fyrd.submit",
"gzip.open",
"os.path.join",
"pandas.DataFrame.from_dict",
"os.path.isfile",
"bz2.BZ2File",
"os.path.isdir",
"bz2.open",
"Bio.SeqIO.parse",
"datetime.timedelta",
"pandas.concat",
"fyrd.wait"
] | [((1884, 1910), 'os.path.isdir', 'os.path.isdir', (['genome_file'], {}), '(genome_file)\n', (1897, 1910), False, 'import os\n'), ((4644, 4658), 'pandas.concat', 'pd.concat', (['dfs'], {}), '(dfs)\n', (4653, 4658), True, 'import pandas as pd\n'), ((6878, 6893), 'fyrd.wait', 'fyrd.wait', (['outs'], {}), '(outs)\n', (6887, 6893), False, 'import fyrd\n'), ((7858, 7895), 'os.path.join', 'os.path.join', (['directory', "(name + '.fa')"], {}), "(directory, name + '.fa')\n", (7870, 7895), False, 'import os\n'), ((7933, 7956), 'os.path.isfile', 'os.path.isfile', (['fa_file'], {}), '(fa_file)\n', (7947, 7956), False, 'import os\n'), ((2898, 2930), 'Bio.SeqIO.parse', 'seqio.parse', (['fasta_file', '"""fasta"""'], {}), "(fasta_file, 'fasta')\n", (2909, 2930), True, 'from Bio import SeqIO as seqio\n'), ((4280, 4324), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['data'], {'orient': '"""index"""'}), "(data, orient='index')\n", (4302, 4324), True, 'import pandas as pd\n'), ((6234, 6260), 'os.path.isdir', 'os.path.isdir', (['genome_file'], {}), '(genome_file)\n', (6247, 6260), False, 'import os\n'), ((7355, 7371), 'pandas.concat', 'pd.concat', (['final'], {}), '(final)\n', (7364, 7371), True, 'import pandas as pd\n'), ((7997, 8020), 'os.path.isfile', 'os.path.isfile', (['gz_file'], {}), '(gz_file)\n', (8011, 8020), False, 'import os\n'), ((2713, 2729), 'pandas.concat', 'pd.concat', (['final'], {}), '(final)\n', (2722, 2729), True, 'import pandas as pd\n'), ((6334, 6357), 'os.path.isfile', 'os.path.isfile', (['fa_file'], {}), '(fa_file)\n', (6348, 6357), False, 'import os\n'), ((6501, 6518), 'datetime.timedelta', '_td', ([], {'minutes': 'mins'}), '(minutes=mins)\n', (6504, 6518), True, 'from datetime import timedelta as _td\n'), ((6553, 6673), 'fyrd.submit', 'fyrd.submit', (['get_dinucleotides', '({chrom: positions[chrom]}, fa_file, base, return_as)'], {'cores': '(1)', 'mem': '"""6GB"""', 'time': 'time'}), "(get_dinucleotides, ({chrom: positions[chrom]}, fa_file, base,\n return_as), cores=1, mem='6GB', time=time)\n", (6564, 6673), False, 'import fyrd\n'), ((8589, 8612), 'gzip.open', 'gzip.open', (['infile', 'mode'], {}), '(infile, mode)\n', (8598, 8612), False, 'import gzip\n'), ((8709, 8731), 'bz2.open', 'bz2.open', (['infile', 'mode'], {}), '(infile, mode)\n', (8717, 8731), False, 'import bz2\n'), ((8773, 8800), 'bz2.BZ2File', 'bz2.BZ2File', (['infile', 'p2mode'], {}), '(infile, p2mode)\n', (8784, 8800), False, 'import bz2\n')] |
# -*- coding: utf-8 -*-
from gevent import monkey, sleep, spawn
monkey.patch_all()
import logging.config
from datetime import datetime
from gevent.event import Event
from restkit import ResourceError
from retrying import retry
from reports.brokers.databridge.base_worker import BaseWorker
from reports.brokers.databridge.constants import retry_mult
from reports.brokers.databridge.journal_msg_ids import (DATABRIDGE_INFO, DATABRIDGE_SYNC_SLEEP,
DATABRIDGE_TENDER_PROCESS, DATABRIDGE_WORKER_DIED)
from reports.brokers.databridge.utils import generate_req_id, journal_context, more_tenders
logger = logging.getLogger(__name__)
class Scanner(BaseWorker):
""" Data Bridge """
def __init__(self, tenders_sync_client, filtered_tender_ids_queue, services_not_available,
sleep_change_value, delay=15):
super(Scanner, self).__init__(services_not_available)
self.start_time = datetime.now()
self.delay = delay
# init clients
self.tenders_sync_client = tenders_sync_client
# init queues for workers
self.filtered_tender_ids_queue = filtered_tender_ids_queue
# blockers
self.initialization_event = Event()
self.sleep_change_value = sleep_change_value
@retry(stop_max_attempt_number=5, wait_exponential_multiplier=retry_mult)
def initialize_sync(self, params=None, direction=None):
if direction == "backward":
self.initialization_event.clear()
assert params['descending']
response = self.tenders_sync_client.sync_tenders(params,
extra_headers={'X-Client-Request-ID': generate_req_id()})
# set values in reverse order due to 'descending' option
self.initial_sync_point = {'forward_offset': response.prev_page.offset,
'backward_offset': response.next_page.offset}
self.initialization_event.set() # wake up forward worker
logger.info("Initial sync point {}".format(self.initial_sync_point))
return response
else:
assert 'descending' not in params
self.initialization_event.wait()
params['offset'] = self.initial_sync_point['forward_offset']
logger.info("Starting forward sync from offset {}".format(params['offset']))
return self.tenders_sync_client.sync_tenders(params,
extra_headers={'X-Client-Request-ID': generate_req_id()})
def get_tenders(self, params={}, direction=""):
response = self.initialize_sync(params=params, direction=direction)
while more_tenders(params, response):
tenders = response.data if response else []
params['offset'] = response.next_page.offset
for tender in tenders:
if self.should_process_tender(tender):
yield tender
else:
logger.info('Skipping tender {}'.format(tender['id']),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_INFO},
params={"TENDER_ID": tender['id']}))
logger.info('Sleep {} sync...'.format(direction),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_SYNC_SLEEP}))
sleep(self.delay + self.sleep_change_value.time_between_requests)
try:
response = self.tenders_sync_client.sync_tenders(params, extra_headers={
'X-Client-Request-ID': generate_req_id()})
self.sleep_change_value.decrement()
except ResourceError as re:
if re.status_int == 429:
self.sleep_change_value.increment()
logger.info("Received 429, will sleep for {}".format(self.sleep_change_value.time_between_requests))
else:
raise re
def should_process_tender(self, tender):
# Adding conditions for filtering tenders.
return True
def get_tenders_forward(self):
self.services_not_available.wait()
logger.info('Start forward data sync worker...')
params = {'mode': '_all_'}
try:
self.put_tenders_to_process(params, "forward")
except Exception as e:
logger.warning('Forward worker died!', extra=journal_context({"MESSAGE_ID": DATABRIDGE_WORKER_DIED}, {}))
logger.exception("Message: {}".format(e.message))
else:
logger.warning('Forward data sync finished!')
def get_tenders_backward(self):
self.services_not_available.wait()
logger.info('Start backward data sync worker...')
params = {'descending': 1, 'mode': '_all_'}
try:
self.put_tenders_to_process(params, "backward")
except Exception as e:
logger.warning('Backward worker died!', extra=journal_context({"MESSAGE_ID": DATABRIDGE_WORKER_DIED}, {}))
logger.exception("Message: {}".format(e.message))
return False
else:
logger.info('Backward data sync finished.')
return True
def put_tenders_to_process(self, params, direction):
for tender in self.get_tenders(params=params, direction=direction):
# logger.info('{} sync: Put tender {} to process...'.format(direction.capitalize(), tender['id']),
# extra=journal_context({"MESSAGE_ID": DATABRIDGE_TENDER_PROCESS},
# {"TENDER_ID": tender['id']}))
self.filtered_tender_ids_queue.put(tender['id'])
def _start_jobs(self):
return {'get_tenders_backward': spawn(self.get_tenders_backward),
'get_tenders_forward': spawn(self.get_tenders_forward)}
def check_and_revive_jobs(self):
for name, job in self.immortal_jobs.items():
if job.dead and not job.value:
self.revive_job(name)
| [
"gevent.event.Event",
"gevent.sleep",
"gevent.monkey.patch_all",
"reports.brokers.databridge.utils.journal_context",
"reports.brokers.databridge.utils.generate_req_id",
"datetime.datetime.now",
"reports.brokers.databridge.utils.more_tenders",
"retrying.retry",
"gevent.spawn"
] | [((65, 83), 'gevent.monkey.patch_all', 'monkey.patch_all', ([], {}), '()\n', (81, 83), False, 'from gevent import monkey, sleep, spawn\n'), ((1316, 1388), 'retrying.retry', 'retry', ([], {'stop_max_attempt_number': '(5)', 'wait_exponential_multiplier': 'retry_mult'}), '(stop_max_attempt_number=5, wait_exponential_multiplier=retry_mult)\n', (1321, 1388), False, 'from retrying import retry\n'), ((971, 985), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (983, 985), False, 'from datetime import datetime\n'), ((1249, 1256), 'gevent.event.Event', 'Event', ([], {}), '()\n', (1254, 1256), False, 'from gevent.event import Event\n'), ((2767, 2797), 'reports.brokers.databridge.utils.more_tenders', 'more_tenders', (['params', 'response'], {}), '(params, response)\n', (2779, 2797), False, 'from reports.brokers.databridge.utils import generate_req_id, journal_context, more_tenders\n'), ((3470, 3535), 'gevent.sleep', 'sleep', (['(self.delay + self.sleep_change_value.time_between_requests)'], {}), '(self.delay + self.sleep_change_value.time_between_requests)\n', (3475, 3535), False, 'from gevent import monkey, sleep, spawn\n'), ((5846, 5878), 'gevent.spawn', 'spawn', (['self.get_tenders_backward'], {}), '(self.get_tenders_backward)\n', (5851, 5878), False, 'from gevent import monkey, sleep, spawn\n'), ((5919, 5950), 'gevent.spawn', 'spawn', (['self.get_tenders_forward'], {}), '(self.get_tenders_forward)\n', (5924, 5950), False, 'from gevent import monkey, sleep, spawn\n'), ((3402, 3456), 'reports.brokers.databridge.utils.journal_context', 'journal_context', (["{'MESSAGE_ID': DATABRIDGE_SYNC_SLEEP}"], {}), "({'MESSAGE_ID': DATABRIDGE_SYNC_SLEEP})\n", (3417, 3456), False, 'from reports.brokers.databridge.utils import generate_req_id, journal_context, more_tenders\n'), ((1739, 1756), 'reports.brokers.databridge.utils.generate_req_id', 'generate_req_id', ([], {}), '()\n', (1754, 1756), False, 'from reports.brokers.databridge.utils import generate_req_id, journal_context, more_tenders\n'), ((2603, 2620), 'reports.brokers.databridge.utils.generate_req_id', 'generate_req_id', ([], {}), '()\n', (2618, 2620), False, 'from reports.brokers.databridge.utils import generate_req_id, journal_context, more_tenders\n'), ((4514, 4573), 'reports.brokers.databridge.utils.journal_context', 'journal_context', (["{'MESSAGE_ID': DATABRIDGE_WORKER_DIED}", '{}'], {}), "({'MESSAGE_ID': DATABRIDGE_WORKER_DIED}, {})\n", (4529, 4573), False, 'from reports.brokers.databridge.utils import generate_req_id, journal_context, more_tenders\n'), ((5061, 5120), 'reports.brokers.databridge.utils.journal_context', 'journal_context', (["{'MESSAGE_ID': DATABRIDGE_WORKER_DIED}", '{}'], {}), "({'MESSAGE_ID': DATABRIDGE_WORKER_DIED}, {})\n", (5076, 5120), False, 'from reports.brokers.databridge.utils import generate_req_id, journal_context, more_tenders\n'), ((3170, 3258), 'reports.brokers.databridge.utils.journal_context', 'journal_context', (["{'MESSAGE_ID': DATABRIDGE_INFO}"], {'params': "{'TENDER_ID': tender['id']}"}), "({'MESSAGE_ID': DATABRIDGE_INFO}, params={'TENDER_ID':\n tender['id']})\n", (3185, 3258), False, 'from reports.brokers.databridge.utils import generate_req_id, journal_context, more_tenders\n'), ((3685, 3702), 'reports.brokers.databridge.utils.generate_req_id', 'generate_req_id', ([], {}), '()\n', (3700, 3702), False, 'from reports.brokers.databridge.utils import generate_req_id, journal_context, more_tenders\n')] |
"""Using flask to expose the main entry point as it makes it easier to
expose an input thread.
Individual threads will boot manually or attached through manual setup
"""
from multiprocessing import Process, Queue
import flask
from flask import Flask
from core.run import run_core, thread_run
from log import log
app = Flask(__name__)
global_prc = None
proc_q = None
def main():
app.run(
debug=True,
host='127.0.0.1',
port=9000,
)
def proc_start():
global global_prc
global proc_q
if global_prc is not None:
return global_prc
options = {}
global_prc, proc_q = thread_run(proc_q, options)
return global_prc
def proc_stop():
global global_prc
if global_prc is None:
return True
proc_q.put_nowait('kill')
log('sent kill command, waiting for death.')
global_prc.join()
log('Stop complete')
global_prc = None
return True
@app.route("/start")
def start():
proc_start()
return "Run main thread!"
@app.route("/")
def index_page():
proc_start()
return "first page. Welcome."
@app.route('/put/<sentence>')
def put_string(sentence):
proc_q.put(sentence)
return sentence
@app.route("/stop")
def stop():
proc_stop()
return "kill thread"
if __name__ == '__main__':
main()
| [
"core.run.thread_run",
"log.log",
"flask.Flask"
] | [((331, 346), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (336, 346), False, 'from flask import Flask\n'), ((661, 688), 'core.run.thread_run', 'thread_run', (['proc_q', 'options'], {}), '(proc_q, options)\n', (671, 688), False, 'from core.run import run_core, thread_run\n'), ((844, 888), 'log.log', 'log', (['"""sent kill command, waiting for death."""'], {}), "('sent kill command, waiting for death.')\n", (847, 888), False, 'from log import log\n'), ((917, 937), 'log.log', 'log', (['"""Stop complete"""'], {}), "('Stop complete')\n", (920, 937), False, 'from log import log\n')] |
import sys
from skimage import color, data
import matplotlib.pyplot as plt
from hogpylib.hog import HistogramOfGradients
def main(args=None):
from skimage.feature import hog
PIXELS_PER_CELL = (8, 8)
CELLS_PER_BLOCK = (2, 2)
NUMBER_OF_BINS = ORIENTATIONS = 9 # NUMBER_OF_BINS
VISUALISE = True
orig_img = color.rgb2gray(data.astronaut())
# orig_img = color.rgb2gray(skimage.io.imread("../data/people.jpg"))
custom_hog = HistogramOfGradients(pixels_per_cell=PIXELS_PER_CELL,
cells_per_block=CELLS_PER_BLOCK,
num_of_bins=NUMBER_OF_BINS,
visualise=VISUALISE)
hog_features, hog_image = custom_hog.compute_features(orig_img)
hog_features_check, hog_image_scikit = hog(orig_img,
orientations=ORIENTATIONS,
pixels_per_cell=PIXELS_PER_CELL,
cells_per_block=CELLS_PER_BLOCK,
block_norm='L2',
visualize=VISUALISE)
fig, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(12, 4))
ax1.axis('off')
ax2.axis('off')
ax3.axis('off')
ax1.imshow(orig_img, cmap=plt.get_cmap('gray'))
ax1.set_title('Input Image')
ax2.imshow(hog_image, cmap=plt.get_cmap('gray'))
ax2.set_title('Custom HOG')
ax3.imshow(hog_image_scikit, cmap=plt.get_cmap('gray'))
ax3.set_title('Scikit HOG')
plt.show()
if __name__ == "__main__":
sys.exit(main())
| [
"hogpylib.hog.HistogramOfGradients",
"skimage.data.astronaut",
"matplotlib.pyplot.get_cmap",
"skimage.feature.hog",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((455, 595), 'hogpylib.hog.HistogramOfGradients', 'HistogramOfGradients', ([], {'pixels_per_cell': 'PIXELS_PER_CELL', 'cells_per_block': 'CELLS_PER_BLOCK', 'num_of_bins': 'NUMBER_OF_BINS', 'visualise': 'VISUALISE'}), '(pixels_per_cell=PIXELS_PER_CELL, cells_per_block=\n CELLS_PER_BLOCK, num_of_bins=NUMBER_OF_BINS, visualise=VISUALISE)\n', (475, 595), False, 'from hogpylib.hog import HistogramOfGradients\n'), ((817, 965), 'skimage.feature.hog', 'hog', (['orig_img'], {'orientations': 'ORIENTATIONS', 'pixels_per_cell': 'PIXELS_PER_CELL', 'cells_per_block': 'CELLS_PER_BLOCK', 'block_norm': '"""L2"""', 'visualize': 'VISUALISE'}), "(orig_img, orientations=ORIENTATIONS, pixels_per_cell=PIXELS_PER_CELL,\n cells_per_block=CELLS_PER_BLOCK, block_norm='L2', visualize=VISUALISE)\n", (820, 965), False, 'from skimage.feature import hog\n'), ((1225, 1260), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(3)'], {'figsize': '(12, 4)'}), '(1, 3, figsize=(12, 4))\n', (1237, 1260), True, 'import matplotlib.pyplot as plt\n'), ((1591, 1601), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1599, 1601), True, 'import matplotlib.pyplot as plt\n'), ((347, 363), 'skimage.data.astronaut', 'data.astronaut', ([], {}), '()\n', (361, 363), False, 'from skimage import color, data\n'), ((1353, 1373), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (1365, 1373), True, 'import matplotlib.pyplot as plt\n'), ((1440, 1460), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (1452, 1460), True, 'import matplotlib.pyplot as plt\n'), ((1533, 1553), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (1545, 1553), True, 'import matplotlib.pyplot as plt\n')] |
from trajminer import TrajectoryData
from trajminer.preprocessing import TrajectorySegmenter
data = TrajectoryData(attributes=['poi', 'hour', 'rating'],
data=[[['Bakery', 8, 8.6], ['Work', 9, 8.9],
['Restaurant', 12, 7.7], ['Bank', 12, 5.6],
['Work', 13, 8.9], ['Home', 19, 0]],
[['Home', 8, 0], ['Mall', 10, 9.3],
['Home', 19, 0], ['Pub', 21, 9.5]]],
tids=[20, 24],
labels=[1, 2])
class TestTrajectorySegmenter(object):
def test_missing(self):
assert True
def test_ignore_missing(self):
assert True
def test_strict_no_thresholds(self):
segmenter = TrajectorySegmenter(attributes=data.get_attributes(),
thresholds=None, mode='strict',
n_jobs=1)
print(segmenter.fit_transform(data))
assert True # TO-DO
def test_strict_subset_thresholds(self):
segmenter = TrajectorySegmenter(attributes=data.get_attributes(),
thresholds=None, mode='strict',
n_jobs=1)
print(segmenter.fit_transform(data))
assert True # TO-DO
def test_any_no_thresholds(self):
segmenter = TrajectorySegmenter(attributes=data.get_attributes(),
thresholds=None, mode='any',
n_jobs=1)
print(segmenter.fit_transform(data))
assert True # TO-DO
def test_any_subset_thresholds(self):
segmenter = TrajectorySegmenter(attributes=data.get_attributes(),
thresholds=None, mode='any',
n_jobs=1)
print(segmenter.fit_transform(data))
assert True # TO-DO
| [
"trajminer.TrajectoryData"
] | [((102, 397), 'trajminer.TrajectoryData', 'TrajectoryData', ([], {'attributes': "['poi', 'hour', 'rating']", 'data': "[[['Bakery', 8, 8.6], ['Work', 9, 8.9], ['Restaurant', 12, 7.7], ['Bank', \n 12, 5.6], ['Work', 13, 8.9], ['Home', 19, 0]], [['Home', 8, 0], ['Mall',\n 10, 9.3], ['Home', 19, 0], ['Pub', 21, 9.5]]]", 'tids': '[20, 24]', 'labels': '[1, 2]'}), "(attributes=['poi', 'hour', 'rating'], data=[[['Bakery', 8, \n 8.6], ['Work', 9, 8.9], ['Restaurant', 12, 7.7], ['Bank', 12, 5.6], [\n 'Work', 13, 8.9], ['Home', 19, 0]], [['Home', 8, 0], ['Mall', 10, 9.3],\n ['Home', 19, 0], ['Pub', 21, 9.5]]], tids=[20, 24], labels=[1, 2])\n", (116, 397), False, 'from trajminer import TrajectoryData\n')] |
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError
from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional
from models.profile import User
class LoginForm(FlaskForm):
username = StringField()
password = PasswordField()
remember_me = BooleanField('Keep me logged in')
class RegisterForm(FlaskForm):
users_in_db = User.objects
name_rule = Regexp('^[A-Za-z0-9_.]*$', 0, 'User names must have only letters, numbers dots or underscores')
username = StringField('Username', validators=[DataRequired(), Length(1, 64), name_rule])
email = StringField('Email', validators=[DataRequired(), Length(1, 128), Email()])
password = PasswordField('Password', validators=[DataRequired(), EqualTo('password2', message='Does not match')])
password2 = PasswordField('<PASSWORD>', validators=[DataRequired()])
register_submit = SubmitField('Register')
def validate_username(self, field):
if self.users_in_db.filter(username=field.data).count() > 0:
raise ValidationError('Username already in use')
def validate_email(self, field):
if self.users_in_db.filter(email=field.data).count() > 0:
raise ValidationError('Email already in registered')
| [
"wtforms.validators.Email",
"wtforms.validators.DataRequired",
"wtforms.PasswordField",
"wtforms.BooleanField",
"wtforms.SubmitField",
"wtforms.StringField",
"wtforms.validators.EqualTo",
"wtforms.validators.Length",
"wtforms.validators.Regexp",
"wtforms.ValidationError"
] | [((292, 305), 'wtforms.StringField', 'StringField', ([], {}), '()\n', (303, 305), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError\n'), ((321, 336), 'wtforms.PasswordField', 'PasswordField', ([], {}), '()\n', (334, 336), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError\n'), ((355, 388), 'wtforms.BooleanField', 'BooleanField', (['"""Keep me logged in"""'], {}), "('Keep me logged in')\n", (367, 388), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError\n'), ((469, 568), 'wtforms.validators.Regexp', 'Regexp', (['"""^[A-Za-z0-9_.]*$"""', '(0)', '"""User names must have only letters, numbers dots or underscores"""'], {}), "('^[A-Za-z0-9_.]*$', 0,\n 'User names must have only letters, numbers dots or underscores')\n", (475, 568), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional\n'), ((959, 982), 'wtforms.SubmitField', 'SubmitField', (['"""Register"""'], {}), "('Register')\n", (970, 982), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError\n'), ((1111, 1153), 'wtforms.ValidationError', 'ValidationError', (['"""Username already in use"""'], {}), "('Username already in use')\n", (1126, 1153), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError\n'), ((1276, 1322), 'wtforms.ValidationError', 'ValidationError', (['"""Email already in registered"""'], {}), "('Email already in registered')\n", (1291, 1322), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError\n'), ((616, 630), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (628, 630), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional\n'), ((632, 645), 'wtforms.validators.Length', 'Length', (['(1)', '(64)'], {}), '(1, 64)\n', (638, 645), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional\n'), ((704, 718), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (716, 718), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional\n'), ((720, 734), 'wtforms.validators.Length', 'Length', (['(1)', '(128)'], {}), '(1, 128)\n', (726, 734), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional\n'), ((736, 743), 'wtforms.validators.Email', 'Email', ([], {}), '()\n', (741, 743), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional\n'), ((799, 813), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (811, 813), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional\n'), ((815, 861), 'wtforms.validators.EqualTo', 'EqualTo', (['"""password2"""'], {'message': '"""Does not match"""'}), "('password2', message='Does not match')\n", (822, 861), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional\n'), ((920, 934), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (932, 934), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo, URL, Optional\n')] |
# -*- coding: utf-8 -*-
import obs
import os,sys,time,datetime
import logging
class _ListAll(object):
def __init__(self, marker=''):
self.marker = marker
self.is_Truncated = True
self.entity = []
def _listresult(self):
raise NotImplemented
def __iter__(self):
return self
def next(self):
while True:
if self.entity:
return self.entity.pop(0)
if not self.is_Truncated:
raise StopIteration()
self._listresult()
class ListAllObjects(_ListAll):
def __init__(self,service,bucket_name, prefix='', marker='', delimiter='', max_keys=1000):
super(ListAllObjects,self).__init__(marker)
self.service = service
self.bucket_name = bucket_name
self.prefix = prefix
self.marker = marker
self.delimiter = delimiter
self.max_keys = max_keys
def _listresult(self):
logger = logging.getLogger('pyftpdlib')
result = self.service.listObjects(self.bucket_name, prefix=self.prefix,marker=self.marker, max_keys=self.max_keys, delimiter=self.delimiter)
logger.info("listresult %r" % result.body)
for content in result.body.contents:
self.entity.append(content)
for dir in result.body.commonPrefixs:
self.entity.append(EnhanceObjectInfo(dir.prefix, None, None, None, None))
self.entity.sort(key=lambda obj : obj.key)
self.is_Truncated = result.body.is_truncated
self.marker = result.body.next_marker
# This class is used when there is no property when getting a directory
# or when you need to customize some properties of the object.
class EnhanceObjectInfo(object):
def __init__(self, key, lastModified, etag, size, storageClass):
#: object or dir name。
self.key = key
#: defined the last modified time of object
self.lastModified = lastModified
#: HTTP ETag
self.etag = etag
#: object contentlength
self.size = size
#: object.storageclass , usual to be STANDARD,WARM,COLD
self.storageClass = storageClass
def is_prefix(self):
# IF the obsject is a dir ,judge lastModefied is None
return self.lastModified is None
class RequestResult(object):
def __init__(self, resp):
#: HTTP response pointer
self.resp = resp
#: HTTP response code
self.status = resp.status
#: HTTP response header
self.headers = resp.headers
#: OBS Only ID of every request, if have some request error,like 5xx,you can provide this id to cloud tecchnical support.
self.request_id = resp.requestId
| [
"logging.getLogger"
] | [((968, 998), 'logging.getLogger', 'logging.getLogger', (['"""pyftpdlib"""'], {}), "('pyftpdlib')\n", (985, 998), False, 'import logging\n')] |
#!/usr/bin/env python
# coding: utf-8
"""
Classification Using Hidden Markov Model
========================================
This is a demonstration using the implemented Hidden Markov model to classify multiple targets.
We will attempt to classify 3 targets in an undefined region.
Our sensor will be all-seeing, and provide us with indirect observations of the targets such that,
using the implemented Hidden Markov Model (HMM), we should hopefully successfully classify exactly
3 targets correctly.
"""
# %%
# All Stone Soup imports will be given in order of usage.
from datetime import datetime, timedelta
import numpy as np
# %%
# Ground Truth
# ^^^^^^^^^^^^
# The targets may take one of three discrete hidden classes: 'bike', 'car' and 'bus'.
# It will be assumed that the targets cannot transition from one class to another, hence an
# identity transition matrix is given to the :class:`~.CategoricalTransitionModel`.
#
# A :class:`~.CategoricalState` class is used to store information on the classification/category
# of the targets. The state vector will define a categorical distribution over the 3 possible
# classes, whereby each component defines the probability that a target is of the corresponding
# class. For example, the state vector (0.2, 0.3, 0.5), with category names ('bike', 'car', 'bus')
# indicates that a target has a 20% probability of being class 'bike', a 30% probability of being
# class 'car' etc.
# It does not make sense to have a true target being a distribution over the possible classes, and
# therefore the true categorical states will have binary state vectors indicating a specific class
# (i.e. a '1' at one state vector index, and '0's elsewhere).
# The :class:`~.CategoricalGroundTruthState` class inherits directly from the base
# :class:`~.CategoricalState` class.
#
# While the category will remain the same, a :class:`~.CategoricalTransitionModel` is used here
# for the sake of demonstration.
#
# The category and timings for one of the ground truth paths will be printed.
from stonesoup.models.transition.categorical import CategoricalTransitionModel
from stonesoup.types.groundtruth import CategoricalGroundTruthState
from stonesoup.types.groundtruth import GroundTruthPath
category_transition = CategoricalTransitionModel(transition_matrix=np.eye(3),
transition_covariance=0.1 * np.eye(3))
start = datetime.now()
hidden_classes = ['bike', 'car', 'bus']
# Generating ground truth
ground_truths = list()
for i in range(1, 4):
state_vector = np.zeros(3) # create a vector with 3 zeroes
state_vector[np.random.choice(3, 1, p=[1/3, 1/3, 1/3])] = 1 # pick a random class out of the 3
ground_truth_state = CategoricalGroundTruthState(state_vector,
timestamp=start,
category_names=hidden_classes)
ground_truth = GroundTruthPath([ground_truth_state], id=f"GT{i}")
for _ in range(10):
new_vector = category_transition.function(ground_truth[-1],
noise=True,
time_interval=timedelta(seconds=1))
new_state = CategoricalGroundTruthState(
new_vector,
timestamp=ground_truth[-1].timestamp + timedelta(seconds=1),
category_names=hidden_classes
)
ground_truth.append(new_state)
ground_truths.append(ground_truth)
for states in np.vstack(ground_truths).T:
print(f"{states[0].timestamp:%H:%M:%S}", end="")
for state in states:
print(f" -- {state.category}", end="")
print()
# %%
# Measurement
# ^^^^^^^^^^^
# Using a Hidden markov model, it is assumed the hidden class of a target cannot be directly
# observed, and instead indirect observations are taken. In this instance, observations of the
# targets' sizes are taken ('small' or 'large'), which have direct implications as to the targets'
# hidden classes, and this relationship is modelled by the `emission matrix` of the
# :class:`~.CategoricalMeasurementModel`, which is used by the :class:`~.CategoricalSensor` to
# provide :class:`~.CategoricalDetection` types.
# We will model this such that a 'bike' has a very small chance of being observed as a 'big'
# target. Similarly, a 'bus' will tend to appear as 'large'. Whereas, a 'car' has equal chance of
# being observed as either.
from stonesoup.models.measurement.categorical import CategoricalMeasurementModel
from stonesoup.sensor.categorical import CategoricalSensor
E = np.array([[0.99, 0.01], # P(small | bike) P(large | bike)
[0.5, 0.5],
[0.01, 0.99]])
model = CategoricalMeasurementModel(ndim_state=3,
emission_matrix=E,
emission_covariance=0.1 * np.eye(2),
mapping=[0, 1, 2])
eo = CategoricalSensor(measurement_model=model,
category_names=['small', 'large'])
# Generating measurements
measurements = list()
for index, states in enumerate(np.vstack(ground_truths).T):
if index == 5:
measurements_at_time = set() # Give tracker chance to use prediction instead
else:
measurements_at_time = eo.measure(states)
timestamp = next(iter(states)).timestamp
measurements.append((timestamp, measurements_at_time))
print(f"{timestamp:%H:%M:%S} -- {[meas.category for meas in measurements_at_time]}")
# %%
# Tracking Components
# ^^^^^^^^^^^^^^^^^^^
# %%
# Predictor
# ---------
# A :class:`~.HMMPredictor` specifically uses :class:`~.CategoricalTransitionModel` types to
# predict.
from stonesoup.predictor.categorical import HMMPredictor
predictor = HMMPredictor(category_transition)
# %%
# Updater
# -------
from stonesoup.updater.categorical import HMMUpdater
updater = HMMUpdater()
# %%
# Hypothesiser
# ------------
# A :class:`~.CategoricalHypothesiser` is used for calculating categorical hypotheses.
# It utilises the :class:`~.ObservationAccuracy` measure: a multi-dimensional extension of an
# 'accuracy' score, essentially providing a measure of the similarity between two categorical
# distributions.
from stonesoup.hypothesiser.categorical import CategoricalHypothesiser
hypothesiser = CategoricalHypothesiser(predictor=predictor, updater=updater)
# %%
# Data Associator
# ---------------
# We will use a standard :class:`~.GNNWith2DAssignment` data associator.
from stonesoup.dataassociator.neighbour import GNNWith2DAssignment
data_associator = GNNWith2DAssignment(hypothesiser)
# %%
# Prior
# -----
# As we are tracking in a categorical state space, we should initiate with a categorical state for
# the prior. Equal probability is given to all 3 of the possible hidden classes that a target
# might take (the category names are also provided here).
from stonesoup.types.state import CategoricalState
prior = CategoricalState([1 / 3, 1 / 3, 1 / 3], category_names=hidden_classes)
# %%
# Initiator
# ---------
# For each unassociated detection, a new track will be initiated. In this instance we use a
# :class:`~.SimpleCategoricalInitiator`, which specifically handles categorical state priors.
from stonesoup.initiator.categorical import SimpleCategoricalInitiator
initiator = SimpleCategoricalInitiator(prior_state=prior, measurement_model=None)
# %%
# Deleter
# -------
# We can use a standard :class:`~.UpdateTimeStepsDeleter`.
from stonesoup.deleter.time import UpdateTimeStepsDeleter
deleter = UpdateTimeStepsDeleter(2)
# %%
# Tracker
# -------
# We can use a standard :class:`~.MultiTargetTracker`.
from stonesoup.tracker.simple import MultiTargetTracker
tracker = MultiTargetTracker(initiator, deleter, measurements, data_associator, updater)
# %%
# Tracking
# ^^^^^^^^
tracks = set()
for time, ctracks in tracker:
tracks.update(ctracks)
print(f"Number of tracks: {len(tracks)}")
for track in tracks:
certainty = track.state_vector[np.argmax(track.state_vector)][0] * 100
print(f"id: {track.id} -- category: {track.category} -- certainty: {certainty}%")
for state in track:
_time = state.timestamp.strftime('%H:%M')
_type = str(type(state)).replace("class 'stonesoup.types.", "").strip("<>'. ")
state_string = f"{_time} -- {_type} -- {state.category}"
try:
meas_string = f"associated measurement: {state.hypothesis.measurement.category}"
except AttributeError:
pass
else:
state_string += f" -- {meas_string}"
print(state_string)
print()
# %%
# Metric
# ^^^^^^
# Determining tracking accuracy.
# In calculating how many targets were classified correctly, only tracks with the highest
# classification certainty are considered. In the situation where probabilities are equal, a
# random classification is chosen.
excess_tracks = len(tracks) - len(ground_truths) # target value = 0
sorted_tracks = sorted(tracks,
key=lambda track: track.state_vector[np.argmax(track.state_vector)][0],
reverse=True)
best_tracks = sorted_tracks[:3]
true_classifications = [ground_truth.category for ground_truth in ground_truths]
track_classifications = [track.category for track in best_tracks]
num_correct_classifications = 0 # target value = num ground truths
for true_classification in true_classifications:
for i in range(len(track_classifications)):
if track_classifications[i] == true_classification:
num_correct_classifications += 1
del track_classifications[i]
break
print(f"Excess tracks: {excess_tracks}")
print(f"No. correct classifications: {num_correct_classifications}")
| [
"numpy.array",
"stonesoup.hypothesiser.categorical.CategoricalHypothesiser",
"datetime.timedelta",
"numpy.vstack",
"stonesoup.initiator.categorical.SimpleCategoricalInitiator",
"stonesoup.tracker.simple.MultiTargetTracker",
"stonesoup.types.state.CategoricalState",
"stonesoup.predictor.categorical.HMM... | [((2407, 2421), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2419, 2421), False, 'from datetime import datetime, timedelta\n'), ((4603, 4653), 'numpy.array', 'np.array', (['[[0.99, 0.01], [0.5, 0.5], [0.01, 0.99]]'], {}), '([[0.99, 0.01], [0.5, 0.5], [0.01, 0.99]])\n', (4611, 4653), True, 'import numpy as np\n'), ((4957, 5034), 'stonesoup.sensor.categorical.CategoricalSensor', 'CategoricalSensor', ([], {'measurement_model': 'model', 'category_names': "['small', 'large']"}), "(measurement_model=model, category_names=['small', 'large'])\n", (4974, 5034), False, 'from stonesoup.sensor.categorical import CategoricalSensor\n'), ((5780, 5813), 'stonesoup.predictor.categorical.HMMPredictor', 'HMMPredictor', (['category_transition'], {}), '(category_transition)\n', (5792, 5813), False, 'from stonesoup.predictor.categorical import HMMPredictor\n'), ((5904, 5916), 'stonesoup.updater.categorical.HMMUpdater', 'HMMUpdater', ([], {}), '()\n', (5914, 5916), False, 'from stonesoup.updater.categorical import HMMUpdater\n'), ((6332, 6393), 'stonesoup.hypothesiser.categorical.CategoricalHypothesiser', 'CategoricalHypothesiser', ([], {'predictor': 'predictor', 'updater': 'updater'}), '(predictor=predictor, updater=updater)\n', (6355, 6393), False, 'from stonesoup.hypothesiser.categorical import CategoricalHypothesiser\n'), ((6595, 6628), 'stonesoup.dataassociator.neighbour.GNNWith2DAssignment', 'GNNWith2DAssignment', (['hypothesiser'], {}), '(hypothesiser)\n', (6614, 6628), False, 'from stonesoup.dataassociator.neighbour import GNNWith2DAssignment\n'), ((6962, 7032), 'stonesoup.types.state.CategoricalState', 'CategoricalState', (['[1 / 3, 1 / 3, 1 / 3]'], {'category_names': 'hidden_classes'}), '([1 / 3, 1 / 3, 1 / 3], category_names=hidden_classes)\n', (6978, 7032), False, 'from stonesoup.types.state import CategoricalState\n'), ((7333, 7402), 'stonesoup.initiator.categorical.SimpleCategoricalInitiator', 'SimpleCategoricalInitiator', ([], {'prior_state': 'prior', 'measurement_model': 'None'}), '(prior_state=prior, measurement_model=None)\n', (7359, 7402), False, 'from stonesoup.initiator.categorical import SimpleCategoricalInitiator\n'), ((7557, 7582), 'stonesoup.deleter.time.UpdateTimeStepsDeleter', 'UpdateTimeStepsDeleter', (['(2)'], {}), '(2)\n', (7579, 7582), False, 'from stonesoup.deleter.time import UpdateTimeStepsDeleter\n'), ((7731, 7809), 'stonesoup.tracker.simple.MultiTargetTracker', 'MultiTargetTracker', (['initiator', 'deleter', 'measurements', 'data_associator', 'updater'], {}), '(initiator, deleter, measurements, data_associator, updater)\n', (7749, 7809), False, 'from stonesoup.tracker.simple import MultiTargetTracker\n'), ((2554, 2565), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (2562, 2565), True, 'import numpy as np\n'), ((2724, 2818), 'stonesoup.types.groundtruth.CategoricalGroundTruthState', 'CategoricalGroundTruthState', (['state_vector'], {'timestamp': 'start', 'category_names': 'hidden_classes'}), '(state_vector, timestamp=start, category_names=\n hidden_classes)\n', (2751, 2818), False, 'from stonesoup.types.groundtruth import CategoricalGroundTruthState\n'), ((2940, 2990), 'stonesoup.types.groundtruth.GroundTruthPath', 'GroundTruthPath', (['[ground_truth_state]'], {'id': 'f"""GT{i}"""'}), "([ground_truth_state], id=f'GT{i}')\n", (2955, 2990), False, 'from stonesoup.types.groundtruth import GroundTruthPath\n'), ((3524, 3548), 'numpy.vstack', 'np.vstack', (['ground_truths'], {}), '(ground_truths)\n', (3533, 3548), True, 'import numpy as np\n'), ((2299, 2308), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (2305, 2308), True, 'import numpy as np\n'), ((2616, 2663), 'numpy.random.choice', 'np.random.choice', (['(3)', '(1)'], {'p': '[1 / 3, 1 / 3, 1 / 3]'}), '(3, 1, p=[1 / 3, 1 / 3, 1 / 3])\n', (2632, 2663), True, 'import numpy as np\n'), ((5138, 5162), 'numpy.vstack', 'np.vstack', (['ground_truths'], {}), '(ground_truths)\n', (5147, 5162), True, 'import numpy as np\n'), ((2387, 2396), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (2393, 2396), True, 'import numpy as np\n'), ((4885, 4894), 'numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (4891, 4894), True, 'import numpy as np\n'), ((3210, 3230), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (3219, 3230), False, 'from datetime import datetime, timedelta\n'), ((8010, 8039), 'numpy.argmax', 'np.argmax', (['track.state_vector'], {}), '(track.state_vector)\n', (8019, 8039), True, 'import numpy as np\n'), ((3356, 3376), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (3365, 3376), False, 'from datetime import datetime, timedelta\n'), ((9055, 9084), 'numpy.argmax', 'np.argmax', (['track.state_vector'], {}), '(track.state_vector)\n', (9064, 9084), True, 'import numpy as np\n')] |
from resources import analyze_str, calculate, CommandHandler, History
from os import system, path, chdir
if __name__ == '__main__':
# * Lately thought of system for auto-updating pycalc :)
print("Checking for possible updates...")
chdir(f"{path.dirname(path.dirname(__file__))}")
system("git pull")
print("\n")
commands = {
"exit": ["--exit", "-e"],
"help": ["--help", "-h"],
"history": ["--history", "-hi"]
}
history: History = History()
handler: CommandHandler = CommandHandler(commands, history)
input_str: str = input("Input expression, that you wish to be calculated or command, "
"that you wish to be executed. For help type '-h' or '--help'.\n$ ")
while True:
history.add(input_str)
expression = analyze_str(input_str.strip(), handler)
calculate(expression)
input_str = input("\n$ ")
| [
"resources.calculate",
"resources.History",
"resources.CommandHandler",
"os.path.dirname",
"os.system"
] | [((297, 315), 'os.system', 'system', (['"""git pull"""'], {}), "('git pull')\n", (303, 315), False, 'from os import system, path, chdir\n'), ((489, 498), 'resources.History', 'History', ([], {}), '()\n', (496, 498), False, 'from resources import analyze_str, calculate, CommandHandler, History\n'), ((529, 562), 'resources.CommandHandler', 'CommandHandler', (['commands', 'history'], {}), '(commands, history)\n', (543, 562), False, 'from resources import analyze_str, calculate, CommandHandler, History\n'), ((868, 889), 'resources.calculate', 'calculate', (['expression'], {}), '(expression)\n', (877, 889), False, 'from resources import analyze_str, calculate, CommandHandler, History\n'), ((266, 288), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (278, 288), False, 'from os import system, path, chdir\n')] |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""library to standardize data."""
from tensorflow.compat.v1.io import gfile
from tqdm import tqdm
import string
import json
printable = set(string.printable)
def add_dot(utt):
if utt.strip()[-1] != '.' and utt.strip()[-1] != '?':
return utt.strip() + '.'
else:
return utt.strip()
def standardize_message(utterances, time_stamp=None):
"""this function combines adjacent utternaces that belong to the same talker
into one. Sometimes time_stamp could be None.
For example
<t1> how are you. <t2> I am good. <t2> And you? <eod> <t1>
will be combined into
<t1> how are you. <t2> I am good. And you? <eod> <t1>
"""
new_utterance = []
new_time_stamp = []
for i, utt in enumerate(utterances):
if len(utt.strip()) == 0:
continue
utts = utt.split(':')
talker = utts[0]
sentence = ':'.join(utts[1:]).strip()
if len(sentence) == 0:
continue
if len(new_utterance) == 0 or talker != new_utterance[-1].split(':')[0]:
new_utterance.append(add_dot(utt))
if time_stamp:
new_time_stamp.append(time_stamp[i])
else:
new_utterance[-1] += ' ' + add_dot(sentence)
if time_stamp:
new_time_stamp[-1] = time_stamp[i]
return new_utterance, new_time_stamp
def delete_non_ascii(s):
return ''.join([x for x in s if x in printable])
def load_and_drop(data_file, kb_file, drop_incorrect=True, verbose=False):
""" this function filter incorrect samples without standardization."""
fin_data = gfile.GFile(data_file)
fin_kb = gfile.GFile(kb_file)
total_in_file = 0
loaded_data = []
loaded_kb = []
for line1 in tqdm(fin_data, desc='loading data'):
if len(line1.strip()) < 10:
continue
line2 = fin_kb.readline()
if len(line2.strip()) < 10:
continue
line1 = delete_non_ascii(line1)
line2 = delete_non_ascii(line2)
data_obj = json.loads(line1)
kb_obj = json.loads(line2)
if (not drop_incorrect) or (
'correct_sample' not in data_obj) or data_obj['correct_sample']:
loaded_data.append(data_obj)
loaded_kb.append(kb_obj)
total_in_file += 1
if verbose:
print(('loaded: ', len(loaded_data), '/', total_in_file, '=',
len(loaded_data) * 1.0 / total_in_file))
return loaded_data, loaded_kb
def load_and_drop_stream(data_file,
kb_file,
drop_incorrect=True,
verbose=False):
""" this function filter incorrect samples without standardization."""
if verbose:
print('loading stream')
fin_data = gfile.GFile(data_file)
if gfile.exists(kb_file):
fin_kb = gfile.GFile(kb_file)
else:
fin_kb = None
if verbose: print("gfile loaded: ", fin_data)
for line1 in fin_data:
if verbose:
print(line1)
if len(line1.strip()) < 10:
continue
line1 = delete_non_ascii(line1)
data_obj = json.loads(line1)
if fin_kb:
line2 = fin_kb.readline()
if len(line2.strip()) < 10:
continue
line2 = delete_non_ascii(line2)
kb_obj = json.loads(line2)
else:
kb_obj = None
if (not drop_incorrect) or (
'correct_sample' not in data_obj) or data_obj['correct_sample']:
yield data_obj, kb_obj
def standardize_and_drop(data_file,
kb_file,
drop_incorrect=True,
verbose=False):
""" this function filter incorrect samples and standardize them
the same time.
"""
loaded_data, loaded_kb = load_and_drop(data_file, kb_file, drop_incorrect,
verbose)
for data_obj in tqdm(loaded_data, desc='standardizing data'):
org_time = data_obj['timestamps'] if 'timestamps' in data_obj else None
org_diag = data_obj['dialogue'] if 'dialogue' in data_obj else None
if org_diag:
new_diag, new_time = standardize_message(org_diag, org_time)
data_obj['dialogue'] = new_diag
if new_time:
data_obj['timestamps'] = new_time
assert len(data_obj['dialogue']) == len(data_obj['timestamps'])
return loaded_data, loaded_kb
| [
"tensorflow.compat.v1.io.gfile.GFile",
"json.loads",
"tqdm.tqdm",
"tensorflow.compat.v1.io.gfile.exists"
] | [((2085, 2107), 'tensorflow.compat.v1.io.gfile.GFile', 'gfile.GFile', (['data_file'], {}), '(data_file)\n', (2096, 2107), False, 'from tensorflow.compat.v1.io import gfile\n'), ((2119, 2139), 'tensorflow.compat.v1.io.gfile.GFile', 'gfile.GFile', (['kb_file'], {}), '(kb_file)\n', (2130, 2139), False, 'from tensorflow.compat.v1.io import gfile\n'), ((2211, 2246), 'tqdm.tqdm', 'tqdm', (['fin_data'], {'desc': '"""loading data"""'}), "(fin_data, desc='loading data')\n", (2215, 2246), False, 'from tqdm import tqdm\n'), ((3156, 3178), 'tensorflow.compat.v1.io.gfile.GFile', 'gfile.GFile', (['data_file'], {}), '(data_file)\n', (3167, 3178), False, 'from tensorflow.compat.v1.io import gfile\n'), ((3184, 3205), 'tensorflow.compat.v1.io.gfile.exists', 'gfile.exists', (['kb_file'], {}), '(kb_file)\n', (3196, 3205), False, 'from tensorflow.compat.v1.io import gfile\n'), ((4225, 4269), 'tqdm.tqdm', 'tqdm', (['loaded_data'], {'desc': '"""standardizing data"""'}), "(loaded_data, desc='standardizing data')\n", (4229, 4269), False, 'from tqdm import tqdm\n'), ((2460, 2477), 'json.loads', 'json.loads', (['line1'], {}), '(line1)\n', (2470, 2477), False, 'import json\n'), ((2491, 2508), 'json.loads', 'json.loads', (['line2'], {}), '(line2)\n', (2501, 2508), False, 'import json\n'), ((3220, 3240), 'tensorflow.compat.v1.io.gfile.GFile', 'gfile.GFile', (['kb_file'], {}), '(kb_file)\n', (3231, 3240), False, 'from tensorflow.compat.v1.io import gfile\n'), ((3473, 3490), 'json.loads', 'json.loads', (['line1'], {}), '(line1)\n', (3483, 3490), False, 'import json\n'), ((3647, 3664), 'json.loads', 'json.loads', (['line2'], {}), '(line2)\n', (3657, 3664), False, 'import json\n')] |
import datetime
from .celery import celery
from backend.news import hot_topics
from backend.cache import sadd
from backend.utils import time_now_formatted
@celery.task(bind=True)
def store_hot_topics(a):
sadd(time_now_formatted('PESTO_SYSTEM_HOT_TOPICS'), hot_topics()) | [
"backend.news.hot_topics",
"backend.utils.time_now_formatted"
] | [((215, 260), 'backend.utils.time_now_formatted', 'time_now_formatted', (['"""PESTO_SYSTEM_HOT_TOPICS"""'], {}), "('PESTO_SYSTEM_HOT_TOPICS')\n", (233, 260), False, 'from backend.utils import time_now_formatted\n'), ((262, 274), 'backend.news.hot_topics', 'hot_topics', ([], {}), '()\n', (272, 274), False, 'from backend.news import hot_topics\n')] |
# Generated by Django 3.1.1 on 2020-09-07 19:50
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('actions', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='slackpost',
name='time_stamp',
),
migrations.RemoveField(
model_name='slackpost',
name='user',
),
]
| [
"django.db.migrations.RemoveField"
] | [((216, 281), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""slackpost"""', 'name': '"""time_stamp"""'}), "(model_name='slackpost', name='time_stamp')\n", (238, 281), False, 'from django.db import migrations\n'), ((326, 385), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""slackpost"""', 'name': '"""user"""'}), "(model_name='slackpost', name='user')\n", (348, 385), False, 'from django.db import migrations\n')] |
import os
import sys
import zipfile
import django
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from museum_site.models import * # noqa: E402
from museum_site.constants import * # noqa: E402
def main():
start = input("Starting PK: ")
if start:
start = int(start)
else:
start = 1
qs = File.objects.filter(pk__gte=start).order_by("id")
dqs = Detail.objects.all()
possible_details = {}
for d in dqs:
possible_details[d.id] = d
for f in qs:
os.system("clear")
hints = []
hint_ids = []
with zipfile.ZipFile(SITE_ROOT + f.download_url(), "r") as zf:
file_list = zf.namelist()
file_list.sort()
# Get suggested fetails based on the file list
unknown_extensions = []
for name in file_list:
ext = os.path.splitext(os.path.basename(name).upper())
if ext[1] == "":
ext = ext[0]
else:
ext = ext[1]
if ext in EXTENSION_HINTS:
suggest = (EXTENSION_HINTS[ext][1])
hints.append((name, EXTENSION_HINTS[ext][0], suggest))
hint_ids += EXTENSION_HINTS[ext][1]
elif ext == "": # Folders hit this
continue
hint_ids = set(hint_ids)
# Current details
details = list(f.details.all())
# Analysis
print("#{} - '{}' [{}]".format(f.id, f.title, f.filename))
print("=" * 80)
print("CURRENT DETAILS ({}):".format(len(details)))
current_detail_ids = []
for d in details:
current_detail_ids.append(d.id)
print(d.detail, end=", ")
print("\n")
print(
"+-FILENAME--------------------+-TYPE---------------------"
"+-DETAIL---------------"
)
for h in hints:
fname = ("| " + h[0] + " ")[:30] + "|"
ftype = (h[1] + " ")[:25] + "|"
if h[2]:
suggest = (
possible_details.get(h[2][0], "?{}".format(h[2][0]))
)
else:
suggest = ""
print(fname, ftype, suggest)
print("+" + ("-" * 79) + "\n")
print("DETAILS TO ADD:")
to_add = ""
for h in hint_ids:
if possible_details[h].id not in current_detail_ids:
to_add += str(possible_details[h].id) + ","
print(possible_details[h])
if to_add:
print("\nEnter comma separated detail IDs to add. (ie '15,29,23')")
print("Leave blank to apply all suggested details.")
print("Enter '0' to make no changes")
choice = input("CHOICE: ")
if choice == "0":
continue
elif choice == "":
apply_ids = to_add
else:
apply_ids = choice
# Apply
ids = apply_ids.split(",")
for detail_id in ids:
if not detail_id:
break
f.details.add(Detail.objects.get(pk=int(detail_id)))
print(" - Added detail", possible_details[int(detail_id)].detail.upper())
f.save()
print("Saved!")
else:
print("No details to add.")
input("Press Enter to continue.")
if __name__ == '__main__':
main()
| [
"os.environ.setdefault",
"django.setup",
"os.path.basename",
"os.path.abspath",
"os.system"
] | [((129, 195), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""museum.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'museum.settings')\n", (150, 195), False, 'import os\n'), ((196, 210), 'django.setup', 'django.setup', ([], {}), '()\n', (208, 210), False, 'import django\n'), ((623, 641), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (632, 641), False, 'import os\n'), ((100, 125), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (115, 125), False, 'import os\n'), ((972, 994), 'os.path.basename', 'os.path.basename', (['name'], {}), '(name)\n', (988, 994), False, 'import os\n')] |
# pynadjust module for classes used across adj, apu and xyz files
import geodepy.convert as gc
class Station(object):
def __init__(self, name=None, description=None, con=None, lat=None, lon=None, ohgt=None, ehgt=None,
sd_e=None, sd_n=None, sd_u=None, hpu=None, vpu=None, smaj=None, smin=None, brg=None, vcv=None,
covariances=None):
self.name = name
self.description = description
self.con = con
self.lat = lat
self.lon = lon
self.ohgt = ohgt
self.ehgt = ehgt
self.sd_e = sd_e
self.sd_n = sd_n
self.sd_u = sd_u
self.hpu = hpu
self.vpu = vpu
self.smaj = smaj
self.smin = smin
self.brg = brg
self.vcv = vcv
self.covariances = covariances
def xyz(self):
return gc.llh2xyz(self.lat, self.lon, self.ehgt)
def grid(self):
return gc.geo2grid(self.lat, self.lon)
class DynaMetadata(object):
def __init__(self, epoch, reference_frame, geoid_model, version):
self.epoch = epoch
self.reference_frame = reference_frame
self.geoid_model = geoid_model
self.version = version
class Switches(object):
def __init__(self, stns=False, msrs=False, header=False):
self.stns = stns
self.msrs = msrs
self.header = header
def reset(self):
self.stns = False
self.msrs = False
self.header = False
| [
"geodepy.convert.geo2grid",
"geodepy.convert.llh2xyz"
] | [((847, 888), 'geodepy.convert.llh2xyz', 'gc.llh2xyz', (['self.lat', 'self.lon', 'self.ehgt'], {}), '(self.lat, self.lon, self.ehgt)\n', (857, 888), True, 'import geodepy.convert as gc\n'), ((925, 956), 'geodepy.convert.geo2grid', 'gc.geo2grid', (['self.lat', 'self.lon'], {}), '(self.lat, self.lon)\n', (936, 956), True, 'import geodepy.convert as gc\n')] |
import urllib.request
from bs4 import BeautifulSoup
from assets import data
from assets import functions
from models.BaffleBoard import BaffleBoard
page = functions.scrape_url(data.WEB_LINK)
tableHead = page.find('span', {"id": "Yo-kai_Watch_3"})
table = tableHead.find_parent().find_next_sibling()
tableRows = table.find_all('tr')
rowCount = 0
for row in tableRows:
rowCount = rowCount + 1
if rowCount is not 1:
rowData = row.find_all('td')
dataCount = 0
for dataCol in rowData:
dataCount = dataCount + 1
## GET LOCATION
if dataCount is 1:
location = dataCol.text
## GET CLUES
if dataCount is 2:
clues = dataCol.find_all('li')
clueCount = 0
for clue in clues:
clueCount = clueCount + 1
if clueCount is 1:
clue_1 = clue.text
if clueCount is 2:
clue_2 = clue.text
if clueCount is 3:
clue_3 = clue.text
## GET SOLUTION
if dataCount is 3:
solution = dataCol.text
## GET EFFECT
if dataCount is 4:
effect = dataCol.text
#print(location)
#print('Location: ' + location + ', Clue 1: ' + clue_1 + ', Clue 2: ' + clue_2 + ', Clue 3: ' + clue_3 + ', Solution: ' + solution + ', Effect: ' + effect)
baffleBoard = BaffleBoard( location.rstrip(), clue_1.rstrip(), clue_2.rstrip(), clue_3.rstrip(), solution.rstrip(), effect.rstrip())
test = functions.add_object_json_to_file(baffleBoard, "test.json")
print(test)
| [
"assets.functions.add_object_json_to_file",
"assets.functions.scrape_url"
] | [((157, 192), 'assets.functions.scrape_url', 'functions.scrape_url', (['data.WEB_LINK'], {}), '(data.WEB_LINK)\n', (177, 192), False, 'from assets import functions\n'), ((1660, 1719), 'assets.functions.add_object_json_to_file', 'functions.add_object_json_to_file', (['baffleBoard', '"""test.json"""'], {}), "(baffleBoard, 'test.json')\n", (1693, 1719), False, 'from assets import functions\n')] |
#! /usr/bin/env python
# This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) <NAME> <<EMAIL>>
# Copyright (C) <NAME> <<EMAIL>>
# This program is published under a GPLv2 license
# scapy.contrib.description = GMLAN Utilities
# scapy.contrib.status = loads
import time
from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, \
GMLAN_TD, GMLAN_PM, GMLAN_RMBA
from scapy.config import conf
from scapy.contrib.isotp import ISOTPSocket
from scapy.error import warning, log_loading
from scapy.utils import PeriodicSenderThread
__all__ = ["GMLAN_TesterPresentSender", "GMLAN_InitDiagnostics",
"GMLAN_GetSecurityAccess", "GMLAN_RequestDownload",
"GMLAN_TransferData", "GMLAN_TransferPayload",
"GMLAN_ReadMemoryByAddress", "GMLAN_BroadcastSocket"]
log_loading.info("\"conf.contribs['GMLAN']"
"['treat-response-pending-as-answer']\" set to True). This "
"is required by the GMLAN-Utils module to operate "
"correctly.")
try:
conf.contribs['GMLAN']['treat-response-pending-as-answer'] = False
except KeyError:
conf.contribs['GMLAN'] = {'treat-response-pending-as-answer': False}
class GMLAN_TesterPresentSender(PeriodicSenderThread):
def __init__(self, sock, pkt=GMLAN(service="TesterPresent"), interval=2):
""" Thread to send TesterPresent messages packets periodically
Args:
sock: socket where packet is sent periodically
pkt: packet to send
interval: interval between two packets
"""
PeriodicSenderThread.__init__(self, sock, pkt, interval)
def _check_response(resp, verbose):
if resp is None:
if verbose:
print("Timeout.")
return False
if verbose:
resp.show()
return resp.sprintf("%GMLAN.service%") != "NegativeResponse"
def _send_and_check_response(sock, req, timeout, verbose):
if verbose:
print("Sending %s" % repr(req))
resp = sock.sr1(req, timeout=timeout, verbose=0)
return _check_response(resp, verbose)
def GMLAN_InitDiagnostics(sock, broadcastsocket=None, timeout=None,
verbose=None, retry=0):
"""Send messages to put an ECU into an diagnostic/programming state.
Args:
sock: socket to send the message on.
broadcast: socket for broadcasting. If provided some message will be
sent as broadcast. Recommended when used on a network with
several ECUs.
timeout: timeout for sending, receiving or sniffing packages.
verbose: set verbosity level
retry: number of retries in case of failure.
Returns true on success.
"""
if verbose is None:
verbose = conf.verb
retry = abs(retry)
while retry >= 0:
retry -= 1
# DisableNormalCommunication
p = GMLAN(service="DisableNormalCommunication")
if broadcastsocket is None:
if not _send_and_check_response(sock, p, timeout, verbose):
continue
else:
if verbose:
print("Sending %s as broadcast" % repr(p))
broadcastsocket.send(p)
time.sleep(0.05)
# ReportProgrammedState
p = GMLAN(service="ReportProgrammingState")
if not _send_and_check_response(sock, p, timeout, verbose):
continue
# ProgrammingMode requestProgramming
p = GMLAN() / GMLAN_PM(subfunction="requestProgrammingMode")
if not _send_and_check_response(sock, p, timeout, verbose):
continue
time.sleep(0.05)
# InitiateProgramming enableProgramming
# No response expected
p = GMLAN() / GMLAN_PM(subfunction="enableProgrammingMode")
if verbose:
print("Sending %s" % repr(p))
sock.send(p)
time.sleep(0.05)
return True
return False
def GMLAN_GetSecurityAccess(sock, keyFunction, level=1, timeout=None,
verbose=None, retry=0):
"""Authenticate on ECU. Implements Seey-Key procedure.
Args:
sock: socket to send the message on.
keyFunction: function implementing the key algorithm.
level: level of access
timeout: timeout for sending, receiving or sniffing packages.
verbose: set verbosity level
retry: number of retries in case of failure.
Returns true on success.
"""
if verbose is None:
verbose = conf.verb
retry = abs(retry)
if level % 2 == 0:
warning("Parameter Error: Level must be an odd number.")
return False
while retry >= 0:
retry -= 1
request = GMLAN() / GMLAN_SA(subfunction=level)
if verbose:
print("Requesting seed..")
resp = sock.sr1(request, timeout=timeout, verbose=0)
if not _check_response(resp, verbose):
if verbose:
print("Negative Response.")
continue
seed = resp.securitySeed
if seed == 0:
if verbose:
print("ECU security already unlocked. (seed is 0x0000)")
return True
keypkt = GMLAN() / GMLAN_SA(subfunction=level + 1,
securityKey=keyFunction(seed))
if verbose:
print("Responding with key..")
resp = sock.sr1(keypkt, timeout=timeout, verbose=0)
if resp is None:
if verbose:
print("Timeout.")
continue
if verbose:
resp.show()
if resp.sprintf("%GMLAN.service%") == "SecurityAccessPositiveResponse": # noqa: E501
if verbose:
print("SecurityAccess granted.")
return True
# Invalid Key
elif resp.sprintf("%GMLAN.service%") == "NegativeResponse" and \
resp.sprintf("%GMLAN.returnCode%") == "InvalidKey":
if verbose:
print("Key invalid")
continue
return False
def GMLAN_RequestDownload(sock, length, timeout=None, verbose=None, retry=0):
"""Send RequestDownload message.
Usually used before calling TransferData.
Args:
sock: socket to send the message on.
length: value for the message's parameter 'unCompressedMemorySize'.
timeout: timeout for sending, receiving or sniffing packages.
verbose: set verbosity level.
retry: number of retries in case of failure.
Returns true on success.
"""
if verbose is None:
verbose = conf.verb
retry = abs(retry)
while retry >= 0:
# RequestDownload
pkt = GMLAN() / GMLAN_RD(memorySize=length)
resp = sock.sr1(pkt, timeout=timeout, verbose=0)
if _check_response(resp, verbose):
return True
retry -= 1
if retry >= 0 and verbose:
print("Retrying..")
return False
def GMLAN_TransferData(sock, addr, payload, maxmsglen=None, timeout=None,
verbose=None, retry=0):
"""Send TransferData message.
Usually used after calling RequestDownload.
Args:
sock: socket to send the message on.
addr: destination memory address on the ECU.
payload: data to be sent.
maxmsglen: maximum length of a single iso-tp message. (default:
maximum length)
timeout: timeout for sending, receiving or sniffing packages.
verbose: set verbosity level.
retry: number of retries in case of failure.
Returns true on success.
"""
if verbose is None:
verbose = conf.verb
retry = abs(retry)
startretry = retry
scheme = conf.contribs['GMLAN']['GMLAN_ECU_AddressingScheme']
if addr < 0 or addr >= 2**(8 * scheme):
warning("Error: Invalid address " + hex(addr) + " for scheme " +
str(scheme))
return False
# max size of dataRecord according to gmlan protocol
if maxmsglen is None or maxmsglen <= 0 or maxmsglen > (4093 - scheme):
maxmsglen = (4093 - scheme)
for i in range(0, len(payload), maxmsglen):
retry = startretry
while True:
if len(payload[i:]) > maxmsglen:
transdata = payload[i:i + maxmsglen]
else:
transdata = payload[i:]
pkt = GMLAN() / GMLAN_TD(startingAddress=addr + i,
dataRecord=transdata)
resp = sock.sr1(pkt, timeout=timeout, verbose=0)
if _check_response(resp, verbose):
break
retry -= 1
if retry >= 0:
if verbose:
print("Retrying..")
else:
return False
return True
def GMLAN_TransferPayload(sock, addr, payload, maxmsglen=None, timeout=None,
verbose=None, retry=0):
"""Send data by using GMLAN services.
Args:
sock: socket to send the data on.
addr: destination memory address on the ECU.
payload: data to be sent.
maxmsglen: maximum length of a single iso-tp message. (default:
maximum length)
timeout: timeout for sending, receiving or sniffing packages.
verbose: set verbosity level.
retry: number of retries in case of failure.
Returns true on success.
"""
if not GMLAN_RequestDownload(sock, len(payload), timeout=timeout,
verbose=verbose, retry=retry):
return False
if not GMLAN_TransferData(sock, addr, payload, maxmsglen=maxmsglen,
timeout=timeout, verbose=verbose, retry=retry):
return False
return True
def GMLAN_ReadMemoryByAddress(sock, addr, length, timeout=None,
verbose=None, retry=0):
"""Read data from ECU memory.
Args:
sock: socket to send the data on.
addr: source memory address on the ECU.
length: bytes to read
timeout: timeout for sending, receiving or sniffing packages.
verbose: set verbosity level.
retry: number of retries in case of failure.
Returns the bytes read.
"""
if verbose is None:
verbose = conf.verb
retry = abs(retry)
scheme = conf.contribs['GMLAN']['GMLAN_ECU_AddressingScheme']
if addr < 0 or addr >= 2**(8 * scheme):
warning("Error: Invalid address " + hex(addr) + " for scheme " +
str(scheme))
return None
# max size of dataRecord according to gmlan protocol
if length <= 0 or length > (4094 - scheme):
warning("Error: Invalid length " + hex(length) + " for scheme " +
str(scheme) + ". Choose between 0x1 and " + hex(4094 - scheme))
return None
while retry >= 0:
# RequestDownload
pkt = GMLAN() / GMLAN_RMBA(memoryAddress=addr, memorySize=length)
resp = sock.sr1(pkt, timeout=timeout, verbose=0)
if _check_response(resp, verbose):
return resp.dataRecord
retry -= 1
if retry >= 0 and verbose:
print("Retrying..")
return None
def GMLAN_BroadcastSocket(interface):
"""Returns a GMLAN broadcast socket using interface."""
return ISOTPSocket(interface, sid=0x101, did=0x0, basecls=GMLAN,
extended_addr=0xfe)
| [
"scapy.error.warning",
"scapy.contrib.automotive.gm.gmlan.GMLAN_PM",
"scapy.error.log_loading.info",
"time.sleep",
"scapy.contrib.automotive.gm.gmlan.GMLAN",
"scapy.contrib.automotive.gm.gmlan.GMLAN_TD",
"scapy.contrib.automotive.gm.gmlan.GMLAN_SA",
"scapy.contrib.automotive.gm.gmlan.GMLAN_RMBA",
"s... | [((852, 1025), 'scapy.error.log_loading.info', 'log_loading.info', (['""""conf.contribs[\'GMLAN\'][\'treat-response-pending-as-answer\']" set to True). This is required by the GMLAN-Utils module to operate correctly."""'], {}), '(\n \'"conf.contribs[\\\'GMLAN\\\'][\\\'treat-response-pending-as-answer\\\']" set to True). This is required by the GMLAN-Utils module to operate correctly.\'\n )\n', (868, 1025), False, 'from scapy.error import warning, log_loading\n'), ((11447, 11519), 'scapy.contrib.isotp.ISOTPSocket', 'ISOTPSocket', (['interface'], {'sid': '(257)', 'did': '(0)', 'basecls': 'GMLAN', 'extended_addr': '(254)'}), '(interface, sid=257, did=0, basecls=GMLAN, extended_addr=254)\n', (11458, 11519), False, 'from scapy.contrib.isotp import ISOTPSocket\n'), ((1330, 1360), 'scapy.contrib.automotive.gm.gmlan.GMLAN', 'GMLAN', ([], {'service': '"""TesterPresent"""'}), "(service='TesterPresent')\n", (1335, 1360), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((1623, 1679), 'scapy.utils.PeriodicSenderThread.__init__', 'PeriodicSenderThread.__init__', (['self', 'sock', 'pkt', 'interval'], {}), '(self, sock, pkt, interval)\n', (1652, 1679), False, 'from scapy.utils import PeriodicSenderThread\n'), ((2945, 2988), 'scapy.contrib.automotive.gm.gmlan.GMLAN', 'GMLAN', ([], {'service': '"""DisableNormalCommunication"""'}), "(service='DisableNormalCommunication')\n", (2950, 2988), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((3263, 3279), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (3273, 3279), False, 'import time\n'), ((3325, 3364), 'scapy.contrib.automotive.gm.gmlan.GMLAN', 'GMLAN', ([], {'service': '"""ReportProgrammingState"""'}), "(service='ReportProgrammingState')\n", (3330, 3364), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((3665, 3681), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (3675, 3681), False, 'import time\n'), ((3921, 3937), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (3931, 3937), False, 'import time\n'), ((4639, 4695), 'scapy.error.warning', 'warning', (['"""Parameter Error: Level must be an odd number."""'], {}), "('Parameter Error: Level must be an odd number.')\n", (4646, 4695), False, 'from scapy.error import warning, log_loading\n'), ((3511, 3518), 'scapy.contrib.automotive.gm.gmlan.GMLAN', 'GMLAN', ([], {}), '()\n', (3516, 3518), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((3521, 3567), 'scapy.contrib.automotive.gm.gmlan.GMLAN_PM', 'GMLAN_PM', ([], {'subfunction': '"""requestProgrammingMode"""'}), "(subfunction='requestProgrammingMode')\n", (3529, 3567), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((3774, 3781), 'scapy.contrib.automotive.gm.gmlan.GMLAN', 'GMLAN', ([], {}), '()\n', (3779, 3781), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((3784, 3829), 'scapy.contrib.automotive.gm.gmlan.GMLAN_PM', 'GMLAN_PM', ([], {'subfunction': '"""enableProgrammingMode"""'}), "(subfunction='enableProgrammingMode')\n", (3792, 3829), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((4778, 4785), 'scapy.contrib.automotive.gm.gmlan.GMLAN', 'GMLAN', ([], {}), '()\n', (4783, 4785), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((4788, 4815), 'scapy.contrib.automotive.gm.gmlan.GMLAN_SA', 'GMLAN_SA', ([], {'subfunction': 'level'}), '(subfunction=level)\n', (4796, 4815), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((5267, 5274), 'scapy.contrib.automotive.gm.gmlan.GMLAN', 'GMLAN', ([], {}), '()\n', (5272, 5274), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((6756, 6763), 'scapy.contrib.automotive.gm.gmlan.GMLAN', 'GMLAN', ([], {}), '()\n', (6761, 6763), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((6766, 6793), 'scapy.contrib.automotive.gm.gmlan.GMLAN_RD', 'GMLAN_RD', ([], {'memorySize': 'length'}), '(memorySize=length)\n', (6774, 6793), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((11039, 11046), 'scapy.contrib.automotive.gm.gmlan.GMLAN', 'GMLAN', ([], {}), '()\n', (11044, 11046), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((11049, 11098), 'scapy.contrib.automotive.gm.gmlan.GMLAN_RMBA', 'GMLAN_RMBA', ([], {'memoryAddress': 'addr', 'memorySize': 'length'}), '(memoryAddress=addr, memorySize=length)\n', (11059, 11098), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((8475, 8482), 'scapy.contrib.automotive.gm.gmlan.GMLAN', 'GMLAN', ([], {}), '()\n', (8480, 8482), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n'), ((8485, 8541), 'scapy.contrib.automotive.gm.gmlan.GMLAN_TD', 'GMLAN_TD', ([], {'startingAddress': '(addr + i)', 'dataRecord': 'transdata'}), '(startingAddress=addr + i, dataRecord=transdata)\n', (8493, 8541), False, 'from scapy.contrib.automotive.gm.gmlan import GMLAN, GMLAN_SA, GMLAN_RD, GMLAN_TD, GMLAN_PM, GMLAN_RMBA\n')] |
"""
Create a 3D mask from labelled objects
"""
import os
import xarray as xr
def create_mask_from_objects(objects):
return objects != 0
if __name__ == "__main__":
import argparse
argparser = argparse.ArgumentParser(description=__doc__)
argparser.add_argument("object_file", type=str)
args = argparser.parse_args()
object_file = args.object_file.replace(".nc", "")
if "objects" not in object_file:
raise Exception()
base_name, mask_name = object_file.split(".objects.")
fn_objects = "{}.nc".format(object_file)
if not os.path.exists(fn_objects):
raise Exception("Couldn't find objects file `{}`".format(fn_objects))
objects = xr.open_dataarray(fn_objects, decode_times=False)
ds = create_mask_from_objects(objects=objects)
ds.attrs["input_name"] = object_file
ds.attrs["mask_name"] = mask_name
out_filename = "{}.mask_3d.objects.{}.nc".format(
base_name.replace("/", "__"), mask_name
)
ds.to_netcdf(out_filename)
print("Wrote output to `{}`".format(out_filename))
| [
"os.path.exists",
"xarray.open_dataarray",
"argparse.ArgumentParser"
] | [((209, 253), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (232, 253), False, 'import argparse\n'), ((697, 746), 'xarray.open_dataarray', 'xr.open_dataarray', (['fn_objects'], {'decode_times': '(False)'}), '(fn_objects, decode_times=False)\n', (714, 746), True, 'import xarray as xr\n'), ((577, 603), 'os.path.exists', 'os.path.exists', (['fn_objects'], {}), '(fn_objects)\n', (591, 603), False, 'import os\n')] |
from app.article import Articles
from flask import render_template,request,redirect,url_for
from . import main
from ..request import get_article,get_news
from app.request import search_article
# Views
@main.route('/')
def index():
'''
View root page function that returns the index page and its data
'''
all_news=get_news()
title='Home-See your news'
search_article=request.args.get('article_query')
if search_article:
return redirect(url_for('main.search',news_name=search_article))
else:
# if search_article:
return render_template('index.html', title=title,all_news=all_news)
@main.route('/news/<id>')
def articles(id):
'''
View root page function that returns the index page and its data
'''
# Getting articles news
articles=get_article(id)
# return render_template('news.html', articles = articles)
search_news=request.args.get('article_query')
if search_news:
return redirect(url_for('main.search',news_name=search_news))
else:
return render_template('news.html',articles=articles)
@main.route('/search/<news_name>')
def search(news_name):
'''
View function to display the search results
'''
news_name_list = news_name.split(" ")
news_name_format = "+".join(news_name_list)
searched_news = search_article(news_name_format)
title = f'search results for {news_name}'
return render_template('search.html',title=title,all_news = searched_news) | [
"flask.render_template",
"flask.request.args.get",
"app.request.search_article",
"flask.url_for"
] | [((392, 425), 'flask.request.args.get', 'request.args.get', (['"""article_query"""'], {}), "('article_query')\n", (408, 425), False, 'from flask import render_template, request, redirect, url_for\n'), ((906, 939), 'flask.request.args.get', 'request.args.get', (['"""article_query"""'], {}), "('article_query')\n", (922, 939), False, 'from flask import render_template, request, redirect, url_for\n'), ((1340, 1372), 'app.request.search_article', 'search_article', (['news_name_format'], {}), '(news_name_format)\n', (1354, 1372), False, 'from app.request import search_article\n'), ((1430, 1497), 'flask.render_template', 'render_template', (['"""search.html"""'], {'title': 'title', 'all_news': 'searched_news'}), "('search.html', title=title, all_news=searched_news)\n", (1445, 1497), False, 'from flask import render_template, request, redirect, url_for\n'), ((572, 633), 'flask.render_template', 'render_template', (['"""index.html"""'], {'title': 'title', 'all_news': 'all_news'}), "('index.html', title=title, all_news=all_news)\n", (587, 633), False, 'from flask import render_template, request, redirect, url_for\n'), ((1055, 1102), 'flask.render_template', 'render_template', (['"""news.html"""'], {'articles': 'articles'}), "('news.html', articles=articles)\n", (1070, 1102), False, 'from flask import render_template, request, redirect, url_for\n'), ((473, 521), 'flask.url_for', 'url_for', (['"""main.search"""'], {'news_name': 'search_article'}), "('main.search', news_name=search_article)\n", (480, 521), False, 'from flask import render_template, request, redirect, url_for\n'), ((985, 1030), 'flask.url_for', 'url_for', (['"""main.search"""'], {'news_name': 'search_news'}), "('main.search', news_name=search_news)\n", (992, 1030), False, 'from flask import render_template, request, redirect, url_for\n')] |
import sqlite3
import time
class sqliteConnector:
def __init__(self, dbpath):
self.__dbpath = dbpath
self.__connection = sqlite3.connect(self.__dbpath) # initialize 'connection' (actually, open file)
self.__cursor = self.__connection.cursor()
def add_user(self, username, usermail, userpass):
self.__cursor.execute("INSERT INTO User (username, email, regdate, pass_hash) VALUES (\"{}\", \"{}\", \"{}\", \"{}\");".format(username, usermail, time.ctime(), userpass))
self.__connection.commit() # should be done to finish operation
def is_available_user(self, username, usermail):
self.__cursor.execute(("SELECT * FROM User WHERE username=\"{}\" OR email=\"{}\";").format(username, usermail))
data = self.__cursor.fetchall()
return len(data) == 0
def get_user(self, username):
self.__cursor.execute(("SELECT * FROM User WHERE username=\"{}\" OR email=\"{}\";").format(username, username))
data = self.__cursor.fetchone()
return data
def drop(self):
self.__cursor.execute("DROP TABLE IF EXISTS User;")
self.__cursor.execute("DROP TABLE IF EXISTS Quote;")
self.__connection.commit()
def create(self):
self.__cursor.execute("CREATE TABLE User (username varchar,email varchar,regdate datetime,pass_hash varchar,is_admin boolean default false)")
self.__cursor.execute("CREATE TABLE Quote (id integer PRIMARY KEY AUTOINCREMENT,"
"quote_text text,author text,username varchar,publication_date datetime);")
self.__connection.commit()
def match_password(self, username, password):
self.__cursor.execute(("SELECT * FROM User WHERE username=\"{}\" AND pass_hash=\"{}\";").format(username, password))
data = self.__cursor.fetchall()
return len(data) != 0
def is_admin(self, username):
self.__cursor.execute("SELECT is_admin FROM User WHERE username=\"{}\";".format(username))
data = self.__cursor.fetchall()
if len(data) == 0:
return False
return data[0][0] == 'true'
def update_user(self, username, fieldname, fieldvalue):
self.__cursor.execute("UPDATE User SET {}=\"{}\" WHERE username=\"{}\";".format(fieldname, fieldvalue, username))
self.__connection.commit()
def delete_user(self, username):
self.__cursor.execute("DELETE FROM User WHERE username=\"{}\";".format(username))
self.__connection.commit()
def add_quote(self, text, author, username):
self.__cursor.execute("INSERT INTO Quote (quote_text, author, username, publication_date) VALUES (\"{}\", \"{}\", \"{}\", \"{}\");".format(text, author, username, time.ctime()))
self.__connection.commit()
self.__cursor.execute("SELECT * FROM Quote WHERE quote_text=\"{}\" AND author=\"{}\";".format(text, author))
data = self.__cursor.fetchall()
return data[-1]
def get_random_quote(self):
self.__cursor.execute("SELECT * FROM Quote ORDER BY RANDOM() LIMIT 1;")
data = self.__cursor.fetchone()
if not data:
return None
return data
def get_quote_by_id(self, id):
self.__cursor.execute("SELECT * FROM Quote WHERE id=\"{}\";".format(id))
data = self.__cursor.fetchone()
if not data:
return None
return data
def get_quotes_by_user(self, username):
self.__cursor.execute("SELECT * FROM Quote WHERE username=\"{}\";".format(username))
data = self.__cursor.fetchall()
if len(data) == 0:
return None
return data
def update_quote_field(self, quote_id, field, value):
self.__cursor.execute("UPDATE Quote SET {}=\"{}\" WHERE id=\"{}\";".format(field, value, quote_id))
self.__connection.commit()
def delete_quote(self, quote_id):
self.__cursor.execute("DELETE FROM Quote WHERE id=\"{}\";".format(quote_id))
self.__connection.commit()
def get_user_by_quote(self, quote_id):
query = "SELECT username FROM Quote WHERE id=\"{}\";".format(quote_id)
self.__cursor.execute(query)
data = self.__cursor.fetchone()
if not data:
return None
return data[0]
def reset(self):
self.drop()
self.create()
| [
"time.ctime",
"sqlite3.connect"
] | [((143, 173), 'sqlite3.connect', 'sqlite3.connect', (['self.__dbpath'], {}), '(self.__dbpath)\n', (158, 173), False, 'import sqlite3\n'), ((484, 496), 'time.ctime', 'time.ctime', ([], {}), '()\n', (494, 496), False, 'import time\n'), ((2732, 2744), 'time.ctime', 'time.ctime', ([], {}), '()\n', (2742, 2744), False, 'import time\n')] |
#!/usr/bin/env python3
import base64
import os
import sys
import datetime
import dotenv
import requests
from flask import Flask, render_template, session, redirect, url_for, request, flash
sys.path.append(os.path.abspath('src'))
from utils import utc_to_local
from client import Client
import runner
os.chdir(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
dotenv.load_dotenv('secrets.env')
app = Flask(__name__)
app.secret_key = os.environ['SECRET_KEY']
assert 'CLIENT_ID' in os.environ
assert 'CLIENT_SECRET' in os.environ
@app.template_filter()
def format_datetime(value, format='%d.%m.%Y %H:%M:%S'):
if not value:
return '-'
timezone = datetime.timezone(
datetime.timedelta(
hours=session['timezone']['hours'],
minutes=session['timezone']['minutes']
),
session['timezone']['timezone']
)
return utc_to_local(value, timezone).strftime(format)
@app.template_test()
def prio_label(value):
return value.startswith('prio')
@app.context_processor
def inject_now():
return {
'now': datetime.datetime.utcnow(),
'debug': app.debug,
}
@app.route('/')
def index():
if 'userid' in session:
return redirect(url_for('config'))
return render_template('index.html')
@app.route('/config')
def config():
if 'userid' not in session:
return redirect(url_for('index'))
with Client() as client:
current_config = client.get_config(session['userid'])
enabled = {}
for assistant in runner.ASSISTANTS:
enabled[assistant] = assistant in current_config and current_config[assistant]['enabled']
projects = client.get_projects(session['userid'])
labels = client.get_labels(session['userid'])
templates = client.get_templates(session['userid'])
return render_template('config.html', config=current_config, enabled=enabled, projects=projects, labels=labels, templates=templates)
@app.route('/config/update/<assistant>', methods=['POST'])
def update_config(assistant):
if 'userid' not in session:
return redirect(url_for('index'))
if assistant not in runner.ASSISTANTS:
flash('Unknown assistant ' + assistant)
return redirect(url_for('config'))
assistant_mod = runner.ASSISTANTS[assistant]
with Client() as client:
if 'enabled' in request.form:
client.set_enabled(session['userid'], assistant, request.form['enabled'] == 'true')
update = {}
for key in assistant_mod.CONFIG_WHITELIST:
if key in request.form:
intstr = int if key in assistant_mod.CONFIG_INT else str
if key in assistant_mod.CONFIG_LIST:
update[key] = list(filter(bool, map(intstr, request.form.getlist(key))))
else:
update[key] = intstr(request.form[key])
if update:
client.update_config(session['userid'], {assistant: update})
return redirect(url_for('config'))
@app.route('/template/start', methods=['POST'])
def start_template():
if 'userid' not in session:
return redirect(url_for('index'))
if 'template_id' not in request.form or 'project_id' not in request.form:
flash('Missing argument')
return redirect(url_for('config'))
try:
template_id = int(request.form['template_id'])
project_id = int(request.form['project_id'])
except ValueError:
flash('Invalid template or project')
return redirect(url_for('config'))
with Client() as client:
res = client.start_template(session['userid'], template_id, project_id)
if res != 'ok':
flash('Starting template failed: ' + res)
return redirect(url_for('config'))
@app.route('/config/telegram_disconnect', methods=['POST'])
def telegram_disconnect():
if 'userid' not in session:
return redirect(url_for('index'))
with Client() as client:
client.telegram_disconnect(session['userid'])
return redirect(url_for('config'))
@app.route('/config/telegram_connect', methods=['POST'])
def telegram_connect():
if 'userid' not in session:
return redirect(url_for('index'))
if 'code' not in request.form:
return redirect(url_for('config'))
with Client() as client:
res = client.telegram_connect(session['userid'], request.form['code'])
if res != 'ok':
flash('Connecting Telegram account failed: ' + res)
return redirect(url_for('config'))
@app.route('/login', methods=['POST'])
def login():
if not app.debug:
return redirect(url_for('index'))
session['userid'] = request.form['userid']
session['full_name'] = '<NAME>'
session['avatar'] = ''
session['timezone'] = {
'timezone': 'Europe/Zurich',
'hours': 2,
'minutes': 0,
}
return redirect(url_for('config'))
@app.route('/logout', methods=['POST'])
def logout():
session.clear()
return redirect(url_for('index'))
@app.route('/oauth/redirect')
def oauth_redirect():
state = base64.urlsafe_b64encode(os.urandom(32)).decode()
session['OAUTH_STATE'] = state
return redirect(
'https://todoist.com/oauth/authorize?client_id={}&scope={}&state={}'.format(
os.environ['CLIENT_ID'],
'data:read_write',
state
)
)
@app.route('/oauth/callback')
def oauth_callback():
def fail(msg):
flash(msg)
return redirect(url_for('index'))
if 'error' in request.args:
return fail('OAauth failed: ' + request.args['error'])
state = request.args.get('state')
if not state or state != session.get('OAUTH_STATE'):
return fail('Invalid OAuth state')
code = request.args.get('code')
if not code:
return fail('Missing OAuth code')
res = requests.post('https://todoist.com/oauth/access_token', data={
'client_id': os.environ['CLIENT_ID'],
'client_secret': os.environ['CLIENT_SECRET'],
'code': code,
}).json()
if 'error' in res:
return fail('OAuth failed: ' + res['error'])
token = res['access_token']
userinfo = requests.post('https://api.todoist.com/sync/v8/sync', data={
'token': token,
'sync_token': '*',
'resource_types': '["user"]',
}).json()['user']
userid = userinfo['id']
with Client() as client:
if not client.account_exists(userid):
return fail('Account is not known. Ask the admin to add your userid: ' + str(userid))
res = client.set_token(userid, token)
if res != 'ok':
return fail('Setting token failed: ' + res)
session['userid'] = userid
session['full_name'] = userinfo['full_name']
session['avatar'] = userinfo['avatar_big']
session['timezone'] = userinfo['tz_info']
return redirect(url_for('config'))
@app.route('/telegram/hook/<token>', methods=['POST'])
def telegram_hook(token):
with Client() as client:
client.telegram_update(token, request.json)
return ''
@app.route('/todoist/hook', methods=['POST'])
def todoist_hook():
# TODO check hmac
with Client() as client:
client.todoist_hook(request.headers['X-Todoist-Delivery-Id'], request.json)
return ''
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000, debug=True)
| [
"flask.render_template",
"flask.request.args.get",
"requests.post",
"flask.flash",
"flask.session.get",
"flask.Flask",
"datetime.datetime.utcnow",
"os.urandom",
"flask.request.form.getlist",
"utils.utc_to_local",
"dotenv.load_dotenv",
"flask.url_for",
"client.Client",
"os.path.abspath",
... | [((392, 425), 'dotenv.load_dotenv', 'dotenv.load_dotenv', (['"""secrets.env"""'], {}), "('secrets.env')\n", (410, 425), False, 'import dotenv\n'), ((433, 448), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (438, 448), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((208, 230), 'os.path.abspath', 'os.path.abspath', (['"""src"""'], {}), "('src')\n", (223, 230), False, 'import os\n'), ((1197, 1226), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (1212, 1226), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((4503, 4518), 'flask.session.clear', 'session.clear', ([], {}), '()\n', (4516, 4518), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((5076, 5101), 'flask.request.args.get', 'request.args.get', (['"""state"""'], {}), "('state')\n", (5092, 5101), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((5201, 5225), 'flask.request.args.get', 'request.args.get', (['"""code"""'], {}), "('code')\n", (5217, 5225), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((704, 803), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': "session['timezone']['hours']", 'minutes': "session['timezone']['minutes']"}), "(hours=session['timezone']['hours'], minutes=session[\n 'timezone']['minutes'])\n", (722, 803), False, 'import datetime\n'), ((1043, 1069), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1067, 1069), False, 'import datetime\n'), ((1336, 1344), 'client.Client', 'Client', ([], {}), '()\n', (1342, 1344), False, 'from client import Client\n'), ((1721, 1850), 'flask.render_template', 'render_template', (['"""config.html"""'], {'config': 'current_config', 'enabled': 'enabled', 'projects': 'projects', 'labels': 'labels', 'templates': 'templates'}), "('config.html', config=current_config, enabled=enabled,\n projects=projects, labels=labels, templates=templates)\n", (1736, 1850), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((2045, 2084), 'flask.flash', 'flash', (["('Unknown assistant ' + assistant)"], {}), "('Unknown assistant ' + assistant)\n", (2050, 2084), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((2174, 2182), 'client.Client', 'Client', ([], {}), '()\n', (2180, 2182), False, 'from client import Client\n'), ((2729, 2746), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (2736, 2746), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((2962, 2987), 'flask.flash', 'flash', (['"""Missing argument"""'], {}), "('Missing argument')\n", (2967, 2987), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((3229, 3237), 'client.Client', 'Client', ([], {}), '()\n', (3235, 3237), False, 'from client import Client\n'), ((3403, 3420), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (3410, 3420), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((3582, 3590), 'client.Client', 'Client', ([], {}), '()\n', (3588, 3590), False, 'from client import Client\n'), ((3667, 3684), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (3674, 3684), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((3909, 3917), 'client.Client', 'Client', ([], {}), '()\n', (3915, 3917), False, 'from client import Client\n'), ((4092, 4109), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (4099, 4109), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((4427, 4444), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (4434, 4444), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((4536, 4552), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (4543, 4552), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((4934, 4944), 'flask.flash', 'flash', (['msg'], {}), '(msg)\n', (4939, 4944), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((5751, 5759), 'client.Client', 'Client', ([], {}), '()\n', (5757, 5759), False, 'from client import Client\n'), ((6183, 6200), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (6190, 6200), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((6291, 6299), 'client.Client', 'Client', ([], {}), '()\n', (6297, 6299), False, 'from client import Client\n'), ((6461, 6469), 'client.Client', 'Client', ([], {}), '()\n', (6467, 6469), False, 'from client import Client\n'), ((855, 884), 'utils.utc_to_local', 'utc_to_local', (['value', 'timezone'], {}), '(value, timezone)\n', (867, 884), False, 'from utils import utc_to_local\n'), ((1170, 1187), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (1177, 1187), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((1312, 1328), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (1319, 1328), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((1985, 2001), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (1992, 2001), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((2103, 2120), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (2110, 2120), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((2867, 2883), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (2874, 2883), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((3006, 3023), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (3013, 3023), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((3149, 3185), 'flask.flash', 'flash', (['"""Invalid template or project"""'], {}), "('Invalid template or project')\n", (3154, 3185), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((3344, 3385), 'flask.flash', 'flash', (["('Starting template failed: ' + res)"], {}), "('Starting template failed: ' + res)\n", (3349, 3385), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((3558, 3574), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (3565, 3574), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((3816, 3832), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (3823, 3832), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((3884, 3901), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (3891, 3901), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((4023, 4074), 'flask.flash', 'flash', (["('Connecting Telegram account failed: ' + res)"], {}), "('Connecting Telegram account failed: ' + res)\n", (4028, 4074), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((4202, 4218), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (4209, 4218), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((4963, 4979), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (4970, 4979), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((5128, 5154), 'flask.session.get', 'session.get', (['"""OAUTH_STATE"""'], {}), "('OAUTH_STATE')\n", (5139, 5154), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((5283, 5451), 'requests.post', 'requests.post', (['"""https://todoist.com/oauth/access_token"""'], {'data': "{'client_id': os.environ['CLIENT_ID'], 'client_secret': os.environ[\n 'CLIENT_SECRET'], 'code': code}"}), "('https://todoist.com/oauth/access_token', data={'client_id':\n os.environ['CLIENT_ID'], 'client_secret': os.environ['CLIENT_SECRET'],\n 'code': code})\n", (5296, 5451), False, 'import requests\n'), ((362, 387), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (377, 387), False, 'import os\n'), ((3204, 3221), 'flask.url_for', 'url_for', (['"""config"""'], {}), "('config')\n", (3211, 3221), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n'), ((4642, 4656), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (4652, 4656), False, 'import os\n'), ((5569, 5698), 'requests.post', 'requests.post', (['"""https://api.todoist.com/sync/v8/sync"""'], {'data': '{\'token\': token, \'sync_token\': \'*\', \'resource_types\': \'["user"]\'}'}), '(\'https://api.todoist.com/sync/v8/sync\', data={\'token\': token,\n \'sync_token\': \'*\', \'resource_types\': \'["user"]\'})\n', (5582, 5698), False, 'import requests\n'), ((2551, 2576), 'flask.request.form.getlist', 'request.form.getlist', (['key'], {}), '(key)\n', (2571, 2576), False, 'from flask import Flask, render_template, session, redirect, url_for, request, flash\n')] |
from flask import Flask, jsonify, request
from blockchain.chain import BlockChain
from argparse import ArgumentParser
app = Flask(__name__)
blockchain = BlockChain()
@app.route('/chain', methods=['GET'])
def get_chain():
result = {
'id': blockchain.chain_id,
'chain': [blk.to_json() for blk in blockchain.chain.values()],
'length': len(blockchain)
}
return jsonify(result)
@app.route('/chain/block', methods=['POST'])
def add_block():
transaction = request.get_json()
try:
blk_id = blockchain.append(transaction)
except:
print('got an exception.')
result = {'block_id': blk_id}
return jsonify(result)
@app.route('/chain/block/<string:block_id>', methods=['GET'])
def get_block(block_id):
blk = blockchain[block_id]
result = {
'chain_id': blockchain.chain_id,
'block_id': blk.block_id,
'block': blk.to_json()
}
return jsonify(result)
def parse_args():
arg_parser = ArgumentParser()
arg_parser.add_argument('-H', '--host', default='127.0.0.1', type=str, help='host name')
arg_parser.add_argument('-p', '--port', default=5000, type=int, help='port to listen on')
arg_parser.add_argument('-d', '--debug', action='store_true', help='debug mode')
return arg_parser.parse_args()
if __name__ == '__main__':
args = parse_args()
app.debug = args.debug
app.testing = args.debug
app.run(host=args.host, port=args.port) | [
"argparse.ArgumentParser",
"flask.Flask",
"flask.request.get_json",
"blockchain.chain.BlockChain",
"flask.jsonify"
] | [((127, 142), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (132, 142), False, 'from flask import Flask, jsonify, request\n'), ((157, 169), 'blockchain.chain.BlockChain', 'BlockChain', ([], {}), '()\n', (167, 169), False, 'from blockchain.chain import BlockChain\n'), ((399, 414), 'flask.jsonify', 'jsonify', (['result'], {}), '(result)\n', (406, 414), False, 'from flask import Flask, jsonify, request\n'), ((497, 515), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (513, 515), False, 'from flask import Flask, jsonify, request\n'), ((665, 680), 'flask.jsonify', 'jsonify', (['result'], {}), '(result)\n', (672, 680), False, 'from flask import Flask, jsonify, request\n'), ((939, 954), 'flask.jsonify', 'jsonify', (['result'], {}), '(result)\n', (946, 954), False, 'from flask import Flask, jsonify, request\n'), ((991, 1007), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (1005, 1007), False, 'from argparse import ArgumentParser\n')] |
#! *-* coding: utf-8 *-*
#!/usr/bin/env python
"""
A simple scraper for recording the power supply of velodyne LiDAR,
by getting the `diag.json` files.
@author <NAME>
@file rc_velo_vol.py
"""
import argparse
import math
import time
import requests
import json
import logging
import os
from volt_temp import Volt_temp
url_1 = 'http://192.168.100.201/cgi/diag.json'
url_2 = 'http://192.168.100.202/cgi/diag.json'
## For test only
url_3 = 'http://127.0.0.1:8000/example_diag.json'
url_4 = 'http://127.0.0.1:8000/example_diag.json'
# Sleep a period after getting one diag, in seconds.
sleep_prd = 1.0
def volt_temp_logger(volts, lidar_id):
"""
A simple level logger based on the voltages and lidar_id.
"""
# Round the voltage into xx.xx
volts = round(volts, 2)
if volts >= 11.5 and volts <= 12.5:
logger.info('Lidar:{} voltage:{}'.format(lidar_id, volts))
elif volts >= 10.0 and volts < 11.5:
logger.warning('Lidar:{} voltage:{}'.format(lidar_id, volts))
elif volts >= 9.0 and volts < 10.0:
logger.error('Lidar:{} voltage:{}'.format(lidar_id, volts))
else:
logger.critical('Lidar:{} voltage:{}'.format(lidar_id, volts))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Velodyne LiDAR voltage logger.')
parser.add_argument('--num', type=int, help='Num of LiDARs', default=2)
parser.add_argument('--mode', choices=['run', 'test'], default='run')
parser.add_argument('--version', action='version', version='%(prog)s alpha 1.0')
args = args = parser.parse_args()
if args.mode == 'test':
url_lidar_1 = url_3
url_lidar_2 = url_4
else:
url_lidar_1 = url_1
url_lidar_2 = url_2
"""
Define logger and logfile path
"""
logger = logging.getLogger()
logger.setLevel(logging.INFO)
rq = time.strftime('velo_volt-%Y%m%d%H%M', time.localtime(time.time()))
log_path = os.path.join(os.getcwd(), 'data', 'logs')
log_name = os.path.join(log_path, rq + '.log')
logfile = log_name
# Check path exists or not
if not os.path.exists(log_path):
#os.makedirs(log_path, exists_ok=True)
os.makedirs(log_path)
fh = logging.FileHandler(logfile, mode='w')
fh.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
fh.setFormatter(formatter)
logger.addHandler(fh)
volt_temp_parser = Volt_temp()
while True:
"""
Get the `diag.json` file periodically
Parse and logs
"""
req = requests.get(url_lidar_1, timeout=0.20)
js = req.json()['volt_temp']
volt_temp_parser.parse(js)
volt_temp_logger(js['bot']['pwr_v_in'], 201)
if args.num >= 2:
#TODO: Not yet support more than two LiDARs
req = requests.get(url_lidar_2, timeout=0.20)
js = req.json()['volt_temp']
volt_temp_parser.parse(js)
volt_temp_logger(js['bot']['pwr_v_in'], 202)
time.sleep(sleep_prd)
| [
"logging.getLogger",
"volt_temp.Volt_temp",
"os.path.exists",
"argparse.ArgumentParser",
"os.makedirs",
"logging.Formatter",
"os.path.join",
"requests.get",
"os.getcwd",
"time.sleep",
"logging.FileHandler",
"time.time"
] | [((1230, 1299), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Velodyne LiDAR voltage logger."""'}), "(description='Velodyne LiDAR voltage logger.')\n", (1253, 1299), False, 'import argparse\n'), ((1793, 1812), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (1810, 1812), False, 'import logging\n'), ((1995, 2030), 'os.path.join', 'os.path.join', (['log_path', "(rq + '.log')"], {}), "(log_path, rq + '.log')\n", (2007, 2030), False, 'import os\n'), ((2208, 2246), 'logging.FileHandler', 'logging.FileHandler', (['logfile'], {'mode': '"""w"""'}), "(logfile, mode='w')\n", (2227, 2246), False, 'import logging\n'), ((2294, 2392), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s"""'], {}), "(\n '%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s')\n", (2311, 2392), False, 'import logging\n'), ((2469, 2480), 'volt_temp.Volt_temp', 'Volt_temp', ([], {}), '()\n', (2478, 2480), False, 'from volt_temp import Volt_temp\n'), ((1951, 1962), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1960, 1962), False, 'import os\n'), ((2096, 2120), 'os.path.exists', 'os.path.exists', (['log_path'], {}), '(log_path)\n', (2110, 2120), False, 'import os\n'), ((2177, 2198), 'os.makedirs', 'os.makedirs', (['log_path'], {}), '(log_path)\n', (2188, 2198), False, 'import os\n'), ((2604, 2642), 'requests.get', 'requests.get', (['url_lidar_1'], {'timeout': '(0.2)'}), '(url_lidar_1, timeout=0.2)\n', (2616, 2642), False, 'import requests\n'), ((3055, 3076), 'time.sleep', 'time.sleep', (['sleep_prd'], {}), '(sleep_prd)\n', (3065, 3076), False, 'import time\n'), ((1909, 1920), 'time.time', 'time.time', ([], {}), '()\n', (1918, 1920), False, 'import time\n'), ((2870, 2908), 'requests.get', 'requests.get', (['url_lidar_2'], {'timeout': '(0.2)'}), '(url_lidar_2, timeout=0.2)\n', (2882, 2908), False, 'import requests\n')] |
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from torchvision.models import resnet50, resnet101, resnext101_32x8d
import torch
import torch.nn as nn
import random
import numpy as np
class EncoderCNN(nn.Module):
def __init__(self, embed_size, dropout=0.5, image_model='resnet50', pretrained=True):
"""Load the pretrained model and replace top fc layer."""
super(EncoderCNN, self).__init__()
pretrained_net = globals()[image_model](pretrained=pretrained)
if 'resnet' in image_model or 'resnext' in image_model:
modules = list(pretrained_net.children())[:-2] # delete avg pooling and last fc layer
else:
raise ValueError('Invalid image_model {}'.format(image_model))
self.pretrained_net = nn.Sequential(*modules)
in_dim = pretrained_net.fc.in_features
if in_dim == embed_size:
self.last_module = None
else:
self.last_module = nn.Sequential(
nn.Conv2d(in_dim, embed_size, kernel_size=1, padding=0, bias=False),
nn.Dropout(dropout), nn.BatchNorm2d(embed_size, momentum=0.01), nn.ReLU())
def forward(self, images, keep_cnn_gradients=False):
"""Extract feature vectors from input images."""
if images is None:
return None
# Get encoder output
if keep_cnn_gradients:
raw_conv_feats = self.pretrained_net(images)
else:
with torch.no_grad():
raw_conv_feats = self.pretrained_net(images)
# Apply last_module to change the number of channels in the encoder output
if self.last_module is not None:
features = self.last_module(raw_conv_feats)
else:
features = raw_conv_feats
# Reshape features
features = features.view(features.size(0), features.size(1), -1)
return features
| [
"torch.nn.BatchNorm2d",
"torch.nn.ReLU",
"torch.nn.Dropout",
"torch.nn.Sequential",
"torch.nn.Conv2d",
"torch.no_grad"
] | [((901, 924), 'torch.nn.Sequential', 'nn.Sequential', (['*modules'], {}), '(*modules)\n', (914, 924), True, 'import torch.nn as nn\n'), ((1118, 1185), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_dim', 'embed_size'], {'kernel_size': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(in_dim, embed_size, kernel_size=1, padding=0, bias=False)\n', (1127, 1185), True, 'import torch.nn as nn\n'), ((1203, 1222), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (1213, 1222), True, 'import torch.nn as nn\n'), ((1224, 1265), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['embed_size'], {'momentum': '(0.01)'}), '(embed_size, momentum=0.01)\n', (1238, 1265), True, 'import torch.nn as nn\n'), ((1267, 1276), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1274, 1276), True, 'import torch.nn as nn\n'), ((1594, 1609), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1607, 1609), False, 'import torch\n')] |
from setuptools import setup
setup(name='rl_dobot',
version='0.1',
) | [
"setuptools.setup"
] | [((29, 66), 'setuptools.setup', 'setup', ([], {'name': '"""rl_dobot"""', 'version': '"""0.1"""'}), "(name='rl_dobot', version='0.1')\n", (34, 66), False, 'from setuptools import setup\n')] |
from django import template
import logging
from django.conf import settings
from django.template.defaultfilters import stringfilter
from django_cradmin import css_icon_map
register = template.Library()
log = logging.getLogger(__name__)
@register.simple_tag
@stringfilter
def cradmin_icon(iconkey):
"""
Returns the css class for an icon configured with the
given key in ``DJANGO_CRADMIN_CSS_ICON_MAP``.
"""
iconmap = getattr(settings, 'DJANGO_CRADMIN_CSS_ICON_MAP', css_icon_map.FONT_AWESOME)
icon_classes = iconmap.get(iconkey, '')
if not icon_classes:
log.warn('No icon named "%s" in settings.DJANGO_CRADMIN_ICONMAP.', iconkey)
return icon_classes
| [
"logging.getLogger",
"django.template.Library"
] | [((187, 205), 'django.template.Library', 'template.Library', ([], {}), '()\n', (203, 205), False, 'from django import template\n'), ((212, 239), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (229, 239), False, 'import logging\n')] |
from os.path import join, exists
import datatable as dt
from rs_datasets.data_loader import download_dataset
from rs_datasets.generic_dataset import Dataset, safe
class YooChoose(Dataset):
def __init__(self, path: str = None):
"""
:param path: folder which is used to download dataset to
if it does not contain dataset files.
If files are found, load them.
"""
super().__init__(path)
folder = join(self.data_folder, 'yoochoose')
if not exists(folder):
self._download(folder)
self.log = dt.fread(
join(folder, 'yoochoose-clicks.dat'),
columns=['session_id', 'ts', 'item_id', 'category']
).to_pandas()
self.purchases = dt.fread(
join(folder, 'yoochoose-buys.dat'),
columns=['session_id', 'ts', 'item_id', 'price', 'quantity']
).to_pandas()
self.test = dt.fread(
join(folder, 'yoochoose-test.dat'),
columns=['session_id', 'ts', 'item_id', 'category']
).to_pandas()
@safe
def _download(self, path):
self.logger.info('Downloading YooChoose Dataset...')
url = 'https://s3-eu-west-1.amazonaws.com/yc-rdata/yoochoose-data.7z'
download_dataset(url, join(self.data_folder, 'yoochoose.7z'))
| [
"os.path.exists",
"os.path.join"
] | [((465, 500), 'os.path.join', 'join', (['self.data_folder', '"""yoochoose"""'], {}), "(self.data_folder, 'yoochoose')\n", (469, 500), False, 'from os.path import join, exists\n'), ((516, 530), 'os.path.exists', 'exists', (['folder'], {}), '(folder)\n', (522, 530), False, 'from os.path import join, exists\n'), ((1289, 1327), 'os.path.join', 'join', (['self.data_folder', '"""yoochoose.7z"""'], {}), "(self.data_folder, 'yoochoose.7z')\n", (1293, 1327), False, 'from os.path import join, exists\n'), ((609, 645), 'os.path.join', 'join', (['folder', '"""yoochoose-clicks.dat"""'], {}), "(folder, 'yoochoose-clicks.dat')\n", (613, 645), False, 'from os.path import join, exists\n'), ((781, 815), 'os.path.join', 'join', (['folder', '"""yoochoose-buys.dat"""'], {}), "(folder, 'yoochoose-buys.dat')\n", (785, 815), False, 'from os.path import join, exists\n'), ((955, 989), 'os.path.join', 'join', (['folder', '"""yoochoose-test.dat"""'], {}), "(folder, 'yoochoose-test.dat')\n", (959, 989), False, 'from os.path import join, exists\n')] |
from ..cards import Card, Deck
from scipy.stats import bernoulli
from random import randint, shuffle
class Pile(object) :
def __init__(self):
self.cards = []
self.owner = None
def __getitem__(self, index):
if index < len(self.cards) :
return self.cards[-1 - index]
raise IndexError
def append(self, card, owner) :
self.cards.append(card)
if card._rank >= 9 :
self.owner = owner
def putUnder(self, card) :
newCards = [card]
for cards in self.cards :
newCards.append( cards )
self.cards = newCards
def slapable(self) :
if len(self.cards) < 2 :
return False
elif len(self.cards) == 2 :
if self.cards[-1]._rank == self.cards[-2]._rank :
return True
else :
return False
elif len(self.cards) >= 3 :
if self.cards[-1]._rank == self.cards[-2]._rank :
return True
elif self.cards[-1]._rank == self.cards[-3]._rank :
return True
else :
return False
def len(self):
return len(self.cards)
def give2owner(self):
if self.owner == None :
return False
def cardxPlayed(x) :
return len(self.cards) >= x-7 and self.cards[7-x]._rank == x
def afterxNothing(x) :
notFaceCard = True
for k in range(1,x-7) :
notFaceCard *= self.cards[-k]._rank < 9
return notFaceCard
for k in range(9,13) :
if cardxPlayed(k) :
if afterxNothing(k) :
return True
else :
return False
return False
class ErsPlayer(object) :
def __init__(self, speed = 9) :
self.speed = [1, speed % 11]
self.pile = Pile()
self.name = 'Player'
def Slap(self, pile) :
if pile.slapable() :
return randint( self.speed[0], self.speed[1] )
else :
return 0
def receiveCard(self, card):
self.pile.append( card, None )
def receivePile(self, pile):
for card in pile.cards :
self.pile.putUnder( card )
def deal(self):
return self.pile.cards.pop()
class ErsDummy(ErsPlayer) :
def __init__(self, p = 1) :
ErsPlayer.__init__(self)
self.propensityToSlap = p
self.name = 'Dummy'
def Slap(self, pile) :
if self.pile.len() == 0 or pile.len() < 2:
return 0
else :
if bernoulli.rvs(self.propensityToSlap) :
return 10
else :
return 0
class EgyptionRatScrew(object) :
def __init__(self, numberOfPlayers = 1, numberOfDummies = 1, shuffleOrder = False) :
self.nPlayers = numberOfPlayers + numberOfDummies
self._nPlayers = numberOfPlayers
self._nDummies = numberOfDummies
self._players = [ErsPlayer() for k in range(numberOfPlayers)]
self._players.extend( [ErsDummy() for k in range(numberOfDummies)] )
if shuffleOrder :
shuffle(self._players)
self.__deck = Deck()
self._whoDeals = randint(0,self.nPlayers-1)
self.pile = Pile()
self.PRINTSTATUS = False
while self.__deck._cardsRemaining > 0 :
for player in self._players :
if self.__deck._cardsRemaining > 0 :
player.receiveCard( self.__deck.deal() )
else :
break
def printStatus(self, n = 0, wonPile = None) :
if self.PRINTSTATUS :
if n == 0:
for k in range(self._nPlayers):
print("Player has %s cards: %s" \
% (self._players[k].pile.len(), self._players[k].pile.cards[-3:] ) )
for k in range(self.nPlayers-self._nDummies, self.nPlayers):
print("Dummy has %s cards: %s" \
% (self._players[k].pile.len(), self._players[k].pile.cards[-3:] ) )
elif n == 1 :
print("Person %s deals" % (self._whoDeals) )
elif n == 2 :
print("The pile has %s cards: %s" % (self.pile.len(), self.pile.cards[-6:]) )
elif n == 3 :
print("It was slapable and person %s won the pile!" % (wonPile) )
elif n == 4 :
print("The owner is %s" % (self.pile.owner) )
elif n == 5 :
print("Player %s wins the pile" % (self.pile.owner))
def action(self, PRINTSTATUS = False) :
while self._players[self._whoDeals].pile.len() == 0 :
self.nPlayers -= 1
if self.nPlayers == 1 :
return True
self._whoDeals = (self._whoDeals + 1) % self.nPlayers
self.printStatus(0)
self.printStatus(1)
self.pile.append( self._players[self._whoDeals].deal(), self._whoDeals )
self.printStatus(2)
peopleSlap = self.peopleSlapping()
if self.pile.slapable() :
tmp = []
for k in range( self.nPlayers ):
if peopleSlap[k] == max(peopleSlap) :
tmp.append(k)
if len(tmp) > 0 :
k = tmp[randint(0, len(tmp) - 1 )]
self.printStatus(3,k)
self._players[k].receivePile( self.pile )
self.pile = Pile()
self._whoDeals = k
else :
for k in range(self.nPlayers) :
if peopleSlap[k] > 0 :
self.pile.putUnder( self._players[k].deal() )
GIVEPILE = self.pile.give2owner()
self.printStatus(2)
self.printStatus(4)
if GIVEPILE :
self.printStatus(5)
self._players[self.pile.owner].receivePile( self.pile )
self._whoDeals = self.pile.owner
self.pile = Pile()
else :
if self.pile.len() > 0 :
if self.pile.cards[-1]._rank >= 9 :
self._whoDeals = (self.pile.owner + 1) % self.nPlayers
else :
if self.pile.owner != None :
self._whoDeals = (self.pile.owner + 1) % self.nPlayers
else :
self._whoDeals = (self._whoDeals + 1) % self.nPlayers
def peopleSlapping(self) :
myTuple = []
for player in self._players :
myTuple.append( player.Slap( self.pile ) )
return tuple(myTuple)
| [
"scipy.stats.bernoulli.rvs",
"random.shuffle",
"random.randint"
] | [((3337, 3366), 'random.randint', 'randint', (['(0)', '(self.nPlayers - 1)'], {}), '(0, self.nPlayers - 1)\n', (3344, 3366), False, 'from random import randint, shuffle\n'), ((2066, 2103), 'random.randint', 'randint', (['self.speed[0]', 'self.speed[1]'], {}), '(self.speed[0], self.speed[1])\n', (2073, 2103), False, 'from random import randint, shuffle\n'), ((2696, 2732), 'scipy.stats.bernoulli.rvs', 'bernoulli.rvs', (['self.propensityToSlap'], {}), '(self.propensityToSlap)\n', (2709, 2732), False, 'from scipy.stats import bernoulli\n'), ((3257, 3279), 'random.shuffle', 'shuffle', (['self._players'], {}), '(self._players)\n', (3264, 3279), False, 'from random import randint, shuffle\n')] |
from dagster import pipeline, ModeDefinition
from .resources import query_vm_resources
from .utilization import (
query_cpu_utilization, normalize_cpu_utilization,
query_mem_utilization, normalize_mem_utilization,
query_disk_utilization, normalize_disk_utilization,
default_azure_monitor_context
)
from .specifications import load_compute_specs
from .recommended import get_recommendations
from .right_size import right_size_engine, advisor_validator
from .output import write_operation_inventory, right_size_report, write_html_report
@pipeline(
mode_defs=[ModeDefinition(
resource_defs={'azure_monitor': default_azure_monitor_context}
)]
)
def rightsize_pipeline():
vm_resources = query_vm_resources()
cpu_utilization = query_cpu_utilization(vm_resources)
mem_utilization = query_mem_utilization(vm_resources)
disk_utilization = query_disk_utilization(vm_resources)
compute_specs = load_compute_specs()
cpu_utilization = normalize_cpu_utilization(utilization=cpu_utilization, compute_specs=compute_specs, resources=vm_resources)
mem_utilization = normalize_mem_utilization(utilization=mem_utilization, compute_specs=compute_specs, resources=vm_resources)
disk_utilization = normalize_disk_utilization(utilization=disk_utilization, compute_specs=compute_specs, resources=vm_resources)
recommendations = get_recommendations()
right_size_advisor_analysis = advisor_validator(cpu_utilization=cpu_utilization, mem_utilization=mem_utilization, disk_utilization=disk_utilization,
compute_specs=compute_specs, advisor_recommendations=recommendations, resources=vm_resources)
right_size_local_analysis = right_size_engine(cpu_utilization=cpu_utilization, mem_utilization=mem_utilization, disk_utilization=disk_utilization,
compute_specs=compute_specs, resources=vm_resources)
write_operation_inventory(analysis=right_size_local_analysis, resources=vm_resources)
report_notebook = right_size_report(advisor_analysis=right_size_advisor_analysis, local_analysis=right_size_local_analysis, resources=vm_resources,
compute_specs=compute_specs, cpu_utilization=cpu_utilization, mem_utilization=mem_utilization,
disk_utilization=disk_utilization)
write_html_report(report_notebook=report_notebook) | [
"dagster.ModeDefinition"
] | [((578, 656), 'dagster.ModeDefinition', 'ModeDefinition', ([], {'resource_defs': "{'azure_monitor': default_azure_monitor_context}"}), "(resource_defs={'azure_monitor': default_azure_monitor_context})\n", (592, 656), False, 'from dagster import pipeline, ModeDefinition\n')] |
"""
Read in and validate associations.
"""
### IMPORTS
import csv
import re
### CONSTANTS & DEFINES
FIRST_CAP_RE = re.compile ('(.)([A-Z][a-z]+)')
OTHER_CAP_RE = re.compile ('([a-z0-9])([A-Z])')
UNDERSCORE_RE = re.compile ('_+')
DATA_FLD_NAMES = (
'snp_id',
'snp_locn_chr',
'snp_locn_posn',
'snp_base_wild',
'snp_base_var',
'cpg_id',
'cpg_locn_chr',
'cpg_locn_posn',
'stat_beta',
'stat_stderr',
'stat_pval',
)
### CODE ###
def camel_to_snakecase (name):
s1 = FIRST_CAP_RE.sub (r'\1_\2', name)
return OTHER_CAP_RE.sub (r'\1_\2', s1).lower()
class AssocReader (csv.DictReader):
def __init__ (self, hndl):
super().__init__ (hndl)
# super(self.__class__, self).__init__()
self.csv_rdr = csv.DictReader (hndl)
@property
def fieldnames (self):
if self._fieldnames is None:
try:
tmp_fieldnames = next (self.reader)
san_fieldnames = self.sanitize_fieldnames (tmp_fieldnames)
self.check_fieldnames (san_fieldnames)
self._fieldnames = san_fieldnames
except StopIteration:
pass
self.line_num = self.reader.line_num
return self._fieldnames
def sanitize_fieldnames (self, fld_names):
fld_names = [s.strip() for s in fld_names]
fld_names = [s.replace (' ', '_') for s in fld_names]
fld_names = [s.replace ('.', '_') for s in fld_names]
# fld_names = [camel_to_snakecase (s) for s in fld_names]
fld_names = [UNDERSCORE_RE.sub (r'_', s) for s in fld_names]
fld_names = [s.lower() for s in fld_names]
fld_names = [s.strip() for s in fld_names]
return fld_names
def check_fieldnames (self, fld_names):
for f in DATA_FLD_NAMES:
assert f in fld_names, \
"required field '%s' missing from input fields" % f
### END ###
| [
"csv.DictReader",
"re.compile"
] | [((120, 150), 're.compile', 're.compile', (['"""(.)([A-Z][a-z]+)"""'], {}), "('(.)([A-Z][a-z]+)')\n", (130, 150), False, 'import re\n'), ((167, 198), 're.compile', 're.compile', (['"""([a-z0-9])([A-Z])"""'], {}), "('([a-z0-9])([A-Z])')\n", (177, 198), False, 'import re\n'), ((216, 232), 're.compile', 're.compile', (['"""_+"""'], {}), "('_+')\n", (226, 232), False, 'import re\n'), ((758, 778), 'csv.DictReader', 'csv.DictReader', (['hndl'], {}), '(hndl)\n', (772, 778), False, 'import csv\n')] |
#!/usr/local/bin/python
from __future__ import print_function
import sys
import time
import serial
import pylt
pusb = dict()
ver = "Prologix GPIB-USB Controller version 6.95"
hwset = (
"addr",
"auto",
"eoi",
"eos",
"eot_enable",
"eot_char",
"read_tmo_ms"
)
def def_set(setting):
setting["auto"] = 0
setting["eoi"] = 1
setting["eos"] = 0
setting["eot_enable"] = 0
setting["eot_char"] = 0
setting["read_tmo_ms"] = 500
setting["rd_mode"] = "eoi"
setting["autocr"] = 1
class prologix_usb(object):
def __init__(self, name):
self.name = name
self.debug_fd = open("_." + name, "w")
self.debug("====", "=============================")
self.ser = serial.Serial("/dev/" + name, 115200, timeout = 0.5)
self.version_check()
self.curset = dict()
self.rd_settings()
d = dict()
def_set(d)
self.set(d)
pusb[name] = self
def debug(self, pfx, str):
print((self.name, "%.6f" % time.time(), pfx, str),
file=self.debug_fd)
self.debug_fd.flush()
def version_check(self):
self.ser.write("\r")
self.cmd("++mode 1")
self.cmd("++auto 0")
self.cmd("++addr 0")
self.cmd("++savecfg 0")
self.cmd("++ifc")
while True:
x = self.ask("++ver")
if x == ver:
break;
assert x == ver
def ask(self, str):
self.cmd(str)
x = self.ser.readline()
x = x.strip("\r\n")
self.debug("{r", x)
return (x)
def rd_settings(self):
for i in hwset:
self.curset[i] = self.ask("++" + i)
def cmd(self, str):
assert str[0:2] == "++"
self.debug("}w", str)
self.ser.write(str + "\r")
def rd_eoi(self):
self.cmd("++read eoi")
x = self.ser.readline()
self.debug("<eoi<", x)
return (x)
def rd_chr(self, chr):
self.cmd("++read %d" % chr)
x = self.ser.readline()
self.debug("<%d<" % chr, x)
return (x)
def rd_bin(self, nbr, eoi = True):
if eoi:
self.cmd("++read eoi")
else:
self.cmd("++read")
x = self.ser.read(nbr)
x = bytearray(x)
self.debug("<%d/%d<" % (nbr, len(x)), x)
return (x)
def wr(self, str):
assert str[0:2] != "++"
self.debug(">", str)
self.ser.write(str + "\r")
def set(self, settings):
for i in hwset:
if i not in settings:
continue
if str(settings[i]) == self.curset[i]:
continue
self.cmd("++" + i + " %d" % settings[i])
self.curset[i] = "%d" % settings[i]
if "read_tmo_ms" in settings:
to = settings["read_tmo_ms"]
self.ser.timeout = (to + 500) * 1e-3
def spoll(self):
self.cmd("++spoll")
while True:
a = self.ser.readline()
self.debug("<sp<", a)
if a.strip().isdigit():
break
return(int(a))
def trigger(self):
self.cmd("++trg")
def clear(self):
self.cmd("++clr")
class gpib_dev(pylt.pylt):
def __init__(self, name, adr):
if not name in pusb:
x = prologix_usb(name)
self.pusb = pusb[name]
self.debug_fd = self.pusb.debug_fd
pylt.pylt.__init__(self)
self.setting = dict()
def_set(self.setting)
self.setting["addr"] = adr
def wr(self, str):
self.pusb.set(self.setting)
self.pusb.wr(str)
def rd_eoi(self, tmo=None, fail=True):
self.pusb.set(self.setting)
x = self.pusb.rd_eoi()
if self.setting["autocr"]:
x = x.strip("\r\n")
return (x)
def rd_chr(self, chr=10, tmo=None, fail=True):
self.pusb.set(self.setting)
x = self.pusb.rd_chr(chr)
if self.setting["autocr"]:
x = x.strip("\r\n")
return (x)
def rd_bin(self, cnt=1, tmo=None, fail=True):
self.pusb.set(self.setting)
x = self.pusb.rd_bin(cnt)
return (x)
def rd(self, tmo=None, fail=True):
m = self.setting["rd_mode"]
if m == "eoi":
return self.rd_eoi()
else:
return self.rd_chr(m)
def attr(self, name, val):
self.setting[name] = val
def spoll(self):
self.pusb.set(self.setting)
return(self.pusb.spoll())
def trigger(self):
self.pusb.set(self.setting)
return(self.pusb.trigger())
def clear(self):
self.pusb.set(self.setting)
self.pusb.clear()
| [
"pylt.pylt.__init__",
"serial.Serial",
"time.time"
] | [((680, 730), 'serial.Serial', 'serial.Serial', (["('/dev/' + name)", '(115200)'], {'timeout': '(0.5)'}), "('/dev/' + name, 115200, timeout=0.5)\n", (693, 730), False, 'import serial\n'), ((2825, 2849), 'pylt.pylt.__init__', 'pylt.pylt.__init__', (['self'], {}), '(self)\n', (2843, 2849), False, 'import pylt\n'), ((918, 929), 'time.time', 'time.time', ([], {}), '()\n', (927, 929), False, 'import time\n')] |
import time
import sys
import zmq
import pickle
from plico.utils.decorator import cacheResult, override, returnsNone
from plico.utils.logger import Logger
from plico.utils.barrier import Barrier, FunctionPredicate, BarrierTimeout
from plico.utils.constants import Constants
from plico.rpc.abstract_remote_procedure_call import \
AbstractRemoteProcedureCall
if sys.version_info[0] >= 3:
pickle_options = {'encoding': 'latin1'}
else:
pickle_options = {}
class ZmqRpcTimeoutError(Exception):
pass
class ZmqRemoteProcedureCall(AbstractRemoteProcedureCall):
def __init__(self, timeModule=time):
self._context = zmq.Context()
self._logger = Logger.of("ZmqRPC")
self._timeMod = timeModule
@override
@returnsNone
def publishPickable(self, socket, pickableObject):
resPickled = pickle.dumps(pickableObject, Constants.PICKLE_PROTOCOL)
self._logger.debug("sending %s (pickle size: %d)" %
(str(pickableObject), len(resPickled)))
socket.send(resPickled, zmq.NOBLOCK)
@override
def receivePickable(self, socket, timeoutInSec=10):
toBeReturned = self.receiveWithTimeout(socket, timeoutInSec)
retObj = pickle.loads(toBeReturned, **pickle_options)
if isinstance(retObj, Exception):
raise retObj
else:
return retObj
@override
@returnsNone
def sendCameraFrame(self, socket, frame):
self.publishPickable(socket, frame)
@override
def recvCameraFrame(self, socket, timeoutInSec=10):
return self.receivePickable(socket, timeoutInSec)
def publisherSocket(self,
port,
connect=False,
host='*',
hwm=1):
'''
Create a PUB-style socket for data publishers.
If <connect> is true, connects to a XPUB/XSUB forwarding device.
'''
socket = self._context.socket(zmq.PUB)
socket.setsockopt(zmq.SNDHWM, hwm)
socket.setsockopt(zmq.LINGER, 0)
try:
if connect:
socket.connect(self.tcpAddress(host, port))
else:
socket.bind(self.tcpAddress(host, port))
except Exception as e:
newMsg = str("%s %s:%d" % (str(e), host, port))
raise (type(e))(newMsg)
return socket
def tcpAddress(self, host, port):
return "tcp://%s:%d" % (host, port)
def subscriberSocket(self, host, port, filt=b'', conflate=False):
'''
Create a SUB-style socket for data receivers
'''
socket = self._context.socket(zmq.SUB)
if conflate:
socket.setsockopt(zmq.CONFLATE, 1)
socket.connect(self.tcpAddress(host, port))
socket.setsockopt(zmq.SUBSCRIBE, filt)
return socket
def xpubsubSockets(self, hostSub, portSub, hostPub, portPub):
'''
Creates frontend and backend for a XPUB/XSUB forwarding device
'''
frontend_addr = self.tcpAddress(hostSub, portSub)
backend_addr = self.tcpAddress(hostPub, portPub)
frontendSocket = self._context.socket(zmq.SUB)
frontendSocket.bind(frontend_addr)
frontendSocket.setsockopt(zmq.SUBSCRIBE, b'')
backendSocket = self._context.socket(zmq.PUB)
backendSocket.bind(backend_addr)
return frontendSocket, backendSocket
@cacheResult
def replySocket(self, port, host='*'):
'''
Create a REP-style socket for servers
'''
try:
socket = self._context.socket(zmq.REP)
socket.bind(self.tcpAddress(host, port))
except Exception as e:
newMsg = str("%s %s:%d" % (str(e), host, port))
raise (type(e))(newMsg)
return socket
@cacheResult
def requestSocket(self, host, port):
'''
Create a REQ-style socket for clients
'''
socket = self._context.socket(zmq.REQ)
socket.connect(self.tcpAddress(host, port))
return socket
def _discardPendingAnswers(self, socket):
try:
buf = socket.recv(zmq.NOBLOCK)
self._logger.debug("got pending buffer %d bytes (%s)" %
(len(buf), buf))
except zmq.ZMQError:
return
except Exception as e:
self._logger.debug("got Exception %s" % str(e))
def _hasSendMultiPartSucceded(self, socket, multiPartMsg):
try:
self._discardPendingAnswers(socket)
socket.send_multipart(multiPartMsg, zmq.NOBLOCK)
return True
except zmq.ZMQError as e:
self._logger.debug("trapped ZMQError on cmd %s: %s" %
(multiPartMsg[0].decode(), str(e)))
return False
def _sendMultiPartWithBarrierTimeout(self, socket,
multiPartMsg, timeoutSec):
try:
Barrier(timeoutSec, 0.1, self._timeMod).waitFor(
FunctionPredicate.create(self._hasSendMultiPartSucceded,
socket, multiPartMsg))
except BarrierTimeout as e:
raise ZmqRpcTimeoutError(str(e))
def sendRequest(self, socket, cmd, args=(), timeout=10):
'''
Perform client request/reply
Request is a ZMQ multipart message:
- command string
- pickled argument list
Reply is a pickled object
'''
self._logger.debug("sending request %s %s" % (cmd, args))
t0 = time.time()
self._sendMultiPartWithBarrierTimeout(
socket,
[cmd.encode(),
pickle.dumps(args, Constants.PICKLE_PROTOCOL)],
timeout)
toBeReturned = self.receiveWithTimeout(socket, timeout)
retObj = pickle.loads(toBeReturned, **pickle_options)
self._logger.debug("%s received %s in %.3fs" % (
cmd, str(retObj), time.time() - t0))
if isinstance(retObj, Exception):
raise retObj
else:
return retObj
def receiveWithTimeout(self, socket, timeoutInSeconds=1):
poller = zmq.Poller()
poller.register(socket, zmq.POLLIN)
msgs = dict(poller.poll(timeoutInSeconds * 1000))
if socket in msgs and msgs[socket] == zmq.POLLIN:
return socket.recv(zmq.NOBLOCK)
else:
self._logger.debug("raising ZmqRpcTimeoutError")
raise ZmqRpcTimeoutError()
@override
def handleRequest(self, obj, socket, multi=False):
'''
Handle one or more requests on a REP socket, with the format
sent by sendRequest()
'''
while 1:
try:
msg = socket.recv_multipart(zmq.NOBLOCK)
method = msg[0].decode()
try:
args = pickle.loads(msg[1], **pickle_options)
except ValueError as err:
self._logger.notice('Request %s failed. Caught %s %s' %
(method, type(err), str(err)))
self._sendAnswer(socket, err)
self._logger.debug("received request %s %s" %
(method, str(args)))
try:
res = getattr(obj, method).__call__(*args)
except Exception as e:
self._logger.notice('Request %s %s failed. Caught %s %s' %
(method, str(args), type(e), str(e)))
res = e
self._sendAnswer(socket, res)
except zmq.ZMQError:
return
except Exception as e:
self._logger.error("Unknown error %s" % str(e))
if not multi:
break
def _sendAnswer(self, socket, answer):
resPickled = pickle.dumps(answer, Constants.PICKLE_PROTOCOL)
self._logger.debug("answering %s (pickle size: %d)" %
(str(answer), len(resPickled)))
socket.send(resPickled, zmq.NOBLOCK)
| [
"plico.utils.logger.Logger.of",
"plico.utils.barrier.FunctionPredicate.create",
"pickle.dumps",
"zmq.Poller",
"time.time",
"pickle.loads",
"plico.utils.barrier.Barrier",
"zmq.Context"
] | [((640, 653), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (651, 653), False, 'import zmq\n'), ((677, 696), 'plico.utils.logger.Logger.of', 'Logger.of', (['"""ZmqRPC"""'], {}), "('ZmqRPC')\n", (686, 696), False, 'from plico.utils.logger import Logger\n'), ((840, 895), 'pickle.dumps', 'pickle.dumps', (['pickableObject', 'Constants.PICKLE_PROTOCOL'], {}), '(pickableObject, Constants.PICKLE_PROTOCOL)\n', (852, 895), False, 'import pickle\n'), ((1225, 1269), 'pickle.loads', 'pickle.loads', (['toBeReturned'], {}), '(toBeReturned, **pickle_options)\n', (1237, 1269), False, 'import pickle\n'), ((5601, 5612), 'time.time', 'time.time', ([], {}), '()\n', (5610, 5612), False, 'import time\n'), ((5870, 5914), 'pickle.loads', 'pickle.loads', (['toBeReturned'], {}), '(toBeReturned, **pickle_options)\n', (5882, 5914), False, 'import pickle\n'), ((6208, 6220), 'zmq.Poller', 'zmq.Poller', ([], {}), '()\n', (6218, 6220), False, 'import zmq\n'), ((7930, 7977), 'pickle.dumps', 'pickle.dumps', (['answer', 'Constants.PICKLE_PROTOCOL'], {}), '(answer, Constants.PICKLE_PROTOCOL)\n', (7942, 7977), False, 'import pickle\n'), ((5060, 5138), 'plico.utils.barrier.FunctionPredicate.create', 'FunctionPredicate.create', (['self._hasSendMultiPartSucceded', 'socket', 'multiPartMsg'], {}), '(self._hasSendMultiPartSucceded, socket, multiPartMsg)\n', (5084, 5138), False, 'from plico.utils.barrier import Barrier, FunctionPredicate, BarrierTimeout\n'), ((5720, 5765), 'pickle.dumps', 'pickle.dumps', (['args', 'Constants.PICKLE_PROTOCOL'], {}), '(args, Constants.PICKLE_PROTOCOL)\n', (5732, 5765), False, 'import pickle\n'), ((4995, 5034), 'plico.utils.barrier.Barrier', 'Barrier', (['timeoutSec', '(0.1)', 'self._timeMod'], {}), '(timeoutSec, 0.1, self._timeMod)\n', (5002, 5034), False, 'from plico.utils.barrier import Barrier, FunctionPredicate, BarrierTimeout\n'), ((6912, 6950), 'pickle.loads', 'pickle.loads', (['msg[1]'], {}), '(msg[1], **pickle_options)\n', (6924, 6950), False, 'import pickle\n'), ((6002, 6013), 'time.time', 'time.time', ([], {}), '()\n', (6011, 6013), False, 'import time\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from typing import (
Any,
Mapping,
MutableMapping,
MutableSequence,
Sequence,
Text,
)
import click
import pathlib
from numbers import Integral, Real
from frozendict import FrozenOrderedDict
def load(f) -> MutableMapping:
p = pathlib.PurePath(f.name)
if p.suffix.casefold() in ['.yml', '.yaml']:
return load_yaml(f)
if p.suffix.casefold() == '.json':
return load_json(f)
if p.suffix.casefold() == '.properties':
return load_props(f)
raise TypeError("unknown file type")
def load_yaml(f) -> MutableMapping:
try:
from ruamel.yaml import YAML
except ImportError:
import yaml
kwargs = {
'Loader': yaml.SafeLoader,
}
else:
yaml = YAML()
kwargs = {}
return yaml.load(f, **kwargs)
def load_json(f) -> MutableMapping:
import json
return json.load(f)
def load_props(f) -> MutableMapping:
import jprops
return jprops.load_properties(f)
def save(f, d: Mapping) -> None:
p = pathlib.PurePath(f.name)
if p.suffix.casefold() in ['.yml', '.yaml']:
return save_yaml(f, d)
if p.suffix.casefold() == '.json':
return save_json(f, d)
if p.suffix.casefold() == '.properties':
return save_props(f, d)
raise TypeError("unknown file type")
def save_yaml(f, d: Mapping) -> None:
try:
from ruamel.yaml import YAML
except ImportError:
import yaml
class _IndentDumper(yaml.SafeDumper):
def increase_indent(self, flow=False, indentless=False):
return super().increase_indent(flow, False)
kwargs = {
'Dumper': _IndentDumper,
'encoding': 'utf-8',
}
else:
yaml = YAML()
yaml.indent(mapping=2, sequence=4, offset=2)
kwargs = {}
yaml.dump(d, f, **kwargs)
def save_json(f, d: Mapping) -> None:
import json
f.write(json.dumps(d, ensure_ascii=False, indent=2).encode('utf-8'))
def save_props(f, d: Mapping) -> None:
import jprops
jprops.store_properties(f, d)
def merge(value1: Any, value2: Any) -> Any:
if value2 is None:
return value1
if value1 is None:
return value2
if isinstance(value1, Mapping):
if not isinstance(value1, MutableMapping):
raise TypeError("mapping must be mutable")
if not isinstance(value2, Mapping):
raise TypeError("can only merge another mapping into mapping")
merge_dict(value1, value2)
return value1
if isinstance(value1, Sequence) and not isinstance(value1, Text):
if not isinstance(value1, MutableSequence):
raise TypeError("sequence must be mutable")
if not isinstance(value2, Sequence):
raise TypeError("can only merge another sequence into sequence")
merge_list(value1, value2)
return value1
return merge_simple(value1, value2)
def merge_dict(d1: MutableMapping, d2: Mapping) -> None:
for key, value in d2.items():
if key not in d1:
d1[key] = value
continue
d1[key] = merge(d1[key], value)
def deep_freeze(obj: Any) -> Any:
if obj is None:
return obj
if isinstance(obj, bool):
return bool(obj)
if isinstance(obj, Integral):
return int(obj)
if isinstance(obj, Real):
return float(obj)
if isinstance(obj, Text):
return str(obj)
if isinstance(obj, Mapping):
if not isinstance(obj, MutableMapping):
return FrozenOrderedDict(obj)
return FrozenOrderedDict((deep_freeze(key), deep_freeze(value))
for key, value in obj.items())
if isinstance(obj, Sequence):
if not isinstance(obj, MutableSequence):
return tuple(obj)
return tuple(deep_freeze(item) for item in obj)
raise TypeError("unsupported type")
def merge_list(l1: MutableSequence, l2: Sequence) -> None:
member = set(deep_freeze(item) for item in l1)
for value in l2:
frozen = deep_freeze(value)
if frozen in member:
continue
l1.append(value)
member.add(frozen)
def merge_simple(value1: Any, value2: Any) -> Any:
if isinstance(value1, bool) and isinstance(value2, bool):
return value2
if isinstance(value1, Integral) and isinstance(value2, Integral):
return value2
if isinstance(value1, Real) and isinstance(value2, Real):
return value2
if isinstance(value1, Text) and isinstance(value2, Text):
return value2
raise TypeError("unsupported type or type combination")
@click.command()
@click.argument('destination', type=click.Path(dir_okay=False, writable=True))
@click.argument('merge-files', nargs=-1, type=click.File(mode='rb', lazy=True))
def main(destination, merge_files):
try:
with open(destination, mode='rb') as f:
d = load(f)
except FileNotFoundError:
d = None
for merge_file in merge_files:
m = load(merge_file)
d = merge(d, m)
with open(destination, mode='wb') as f:
save(f, d)
if __name__ == '__main__':
main()
| [
"yaml.dump",
"jprops.store_properties",
"json.dumps",
"click.File",
"yaml.load",
"ruamel.yaml.YAML",
"pathlib.PurePath",
"yaml.indent",
"click.Path",
"frozendict.FrozenOrderedDict",
"json.load",
"click.command",
"jprops.load_properties"
] | [((4765, 4780), 'click.command', 'click.command', ([], {}), '()\n', (4778, 4780), False, 'import click\n'), ((307, 331), 'pathlib.PurePath', 'pathlib.PurePath', (['f.name'], {}), '(f.name)\n', (323, 331), False, 'import pathlib\n'), ((862, 884), 'yaml.load', 'yaml.load', (['f'], {}), '(f, **kwargs)\n', (871, 884), False, 'import yaml\n'), ((951, 963), 'json.load', 'json.load', (['f'], {}), '(f)\n', (960, 963), False, 'import json\n'), ((1033, 1058), 'jprops.load_properties', 'jprops.load_properties', (['f'], {}), '(f)\n', (1055, 1058), False, 'import jprops\n'), ((1103, 1127), 'pathlib.PurePath', 'pathlib.PurePath', (['f.name'], {}), '(f.name)\n', (1119, 1127), False, 'import pathlib\n'), ((1924, 1949), 'yaml.dump', 'yaml.dump', (['d', 'f'], {}), '(d, f, **kwargs)\n', (1933, 1949), False, 'import yaml\n'), ((2144, 2173), 'jprops.store_properties', 'jprops.store_properties', (['f', 'd'], {}), '(f, d)\n', (2167, 2173), False, 'import jprops\n'), ((823, 829), 'ruamel.yaml.YAML', 'YAML', ([], {}), '()\n', (827, 829), False, 'from ruamel.yaml import YAML\n'), ((1839, 1845), 'ruamel.yaml.YAML', 'YAML', ([], {}), '()\n', (1843, 1845), False, 'from ruamel.yaml import YAML\n'), ((1854, 1898), 'yaml.indent', 'yaml.indent', ([], {'mapping': '(2)', 'sequence': '(4)', 'offset': '(2)'}), '(mapping=2, sequence=4, offset=2)\n', (1865, 1898), False, 'import yaml\n'), ((4817, 4858), 'click.Path', 'click.Path', ([], {'dir_okay': '(False)', 'writable': '(True)'}), '(dir_okay=False, writable=True)\n', (4827, 4858), False, 'import click\n'), ((4906, 4938), 'click.File', 'click.File', ([], {'mode': '"""rb"""', 'lazy': '(True)'}), "(mode='rb', lazy=True)\n", (4916, 4938), False, 'import click\n'), ((3652, 3674), 'frozendict.FrozenOrderedDict', 'FrozenOrderedDict', (['obj'], {}), '(obj)\n', (3669, 3674), False, 'from frozendict import FrozenOrderedDict\n'), ((2019, 2062), 'json.dumps', 'json.dumps', (['d'], {'ensure_ascii': '(False)', 'indent': '(2)'}), '(d, ensure_ascii=False, indent=2)\n', (2029, 2062), False, 'import json\n')] |
from cs229_project_scenario import ObstacleAvoidanceScenario
import numpy as np
import gym
import time
from matplotlib import pyplot as plt
if __name__ == "__main__":
oas = ObstacleAvoidanceScenario()
#oas = gym.make()
dt = 0.1
u = (0., 0.)
total_reward = 0.
plot_reward = []
for k in range(10):
total_reward = 0.
oas.reset()
while True:
state, reward, if_reset, non_defined = oas.step(u) # move one time step and get the tuple of data
oas.render() # Test case
# while time.time() - t < env.dt:
# pass
# We should apply the reinforcement method here!
# For example, policy = ReinforcementLearning() # (angular velocity, linear velocity)
# oas.ego.set_control(policy)
total_reward += oas._get_reward()
if if_reset:
oas.close()
print("The total reward for this episode is: ", total_reward)
plot_reward.append(total_reward)
break
time.sleep(dt)
plt.plot(plot_reward)
plt.savefig('Reward_demo') | [
"cs229_project_scenario.ObstacleAvoidanceScenario",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.savefig",
"time.sleep"
] | [((185, 212), 'cs229_project_scenario.ObstacleAvoidanceScenario', 'ObstacleAvoidanceScenario', ([], {}), '()\n', (210, 212), False, 'from cs229_project_scenario import ObstacleAvoidanceScenario\n'), ((1131, 1152), 'matplotlib.pyplot.plot', 'plt.plot', (['plot_reward'], {}), '(plot_reward)\n', (1139, 1152), True, 'from matplotlib import pyplot as plt\n'), ((1158, 1184), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""Reward_demo"""'], {}), "('Reward_demo')\n", (1169, 1184), True, 'from matplotlib import pyplot as plt\n'), ((1111, 1125), 'time.sleep', 'time.sleep', (['dt'], {}), '(dt)\n', (1121, 1125), False, 'import time\n')] |
"""
Implement Fuzzy set relations like - Max-Min and Max-Product
"""
import sys
from textwrap import dedent
from typing import List
from .fuzzy_sets import max_min, max_product
def get_output(x: List[List[float]]) -> str:
res = "\n ".join(str(i) for i in x)
return f"[{res}]"
def main():
choices = ["Max-Min", "Max-Product"]
choice = int(
input(
dedent(
"""
1. Max-Min
2. Max-Product
Enter operation: """
)
)
)
ma, na = [
int(i) for i in input("Enter m n dimensions for relation1: ").split()
]
mb, nb = [
int(i) for i in input("Enter m n dimensions for relation2: ").split()
]
a = []
for i in input("\nEnter relation1 [[e11...e1n],[em1...emn]]: ").split(","):
a.append([float(x) for x in i.split()])
b = []
for i in input("Enter relation2 [[e11...e1n],[em1...emn]]: ").split(","):
b.append([float(x) for x in i.split()])
if (
(na != mb)
or (ma != len(a))
or (mb != len(b))
or any(len(x) != na for x in a)
or any(len(x) != nb for x in b)
):
print("Invalid dimensions!")
sys.exit(1)
if choice == 1:
result = max_min(a, b)
elif choice == 2:
result = max_product(a, b)
else:
print("Invalid choice!")
sys.exit(1)
print(f"\nResult: {choices[choice-1]}")
print(f"{get_output(result)}")
if __name__ == "__main__":
main()
| [
"textwrap.dedent",
"sys.exit"
] | [((1230, 1241), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1238, 1241), False, 'import sys\n'), ((387, 503), 'textwrap.dedent', 'dedent', (['"""\n 1. Max-Min\n 2. Max-Product\n Enter operation: """'], {}), '(\n """\n 1. Max-Min\n 2. Max-Product\n Enter operation: """\n )\n', (393, 503), False, 'from textwrap import dedent\n'), ((1402, 1413), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1410, 1413), False, 'import sys\n')] |
#!/usr/bin/env python3
import numpy as np
import os
import sys
import argparse
import glob
import time
import onnx
import onnxruntime
import cv2
import caffe
from cvi_toolkit.model import OnnxModel
from cvi_toolkit.utils.yolov3_util import preprocess, postprocess_v2, postprocess_v3, postprocess_v4_tiny, draw
def check_files(args):
if not os.path.isfile(args.model_def):
print("cannot find the file %s", args.model_def)
sys.exit(1)
if not os.path.isfile(args.input_file):
print("cannot find the file %s", args.input_file)
sys.exit(1)
python/cvi_toolkit/inference/onnx/run_onnx_detector_yolo.py
def parse_args():
parser = argparse.ArgumentParser(description='Eval YOLO networks.')
parser.add_argument('--model_def', type=str, default='',
help="Model definition file")
parser.add_argument("--net_input_dims", default='416,416',
help="'height,width' dimensions of net input tensors.")
parser.add_argument("--input_file", type=str, default='',
help="Input image for testing")
parser.add_argument("--label_file", type=str, default='',
help="coco lable file in txt format")
parser.add_argument("--draw_image", type=str, default='',
help="Draw results on image")
parser.add_argument("--dump_blobs",
help="Dump all blobs into a file in npz format")
parser.add_argument("--obj_threshold", type=float, default=0.3,
help="Object confidence threshold")
parser.add_argument("--nms_threshold", type=float, default=0.5,
help="NMS threshold")
parser.add_argument("--batch_size", type=int, default=1,
help="Set batch size")
parser.add_argument("--yolov3", type=int, default=1,
help="yolov2 or yolov3")
parser.add_argument("--yolov4-tiny", type=int, default=0,
help="set to yolov4")
args = parser.parse_args()
check_files(args)
return args
def main(argv):
args = parse_args()
# Make Detector
net_input_dims = [int(s) for s in args.net_input_dims.split(',')]
obj_threshold = float(args.obj_threshold)
nms_threshold = float(args.nms_threshold)
yolov3 = True if args.yolov3 else False
yolov4_tiny = True if args.yolov4_tiny else False
print("net_input_dims", net_input_dims)
print("obj_threshold", obj_threshold)
print("nms_threshold", nms_threshold)
print("yolov3", yolov3)
print("yolov4_tiny", yolov4_tiny)
image = cv2.imread(args.input_file)
image_x = preprocess(image, net_input_dims)
image_x = np.expand_dims(image_x, axis=0)
inputs = image_x
for i in range(1, args.batch_size):
inputs = np.append(inputs, image_x, axis=0)
input_shape = np.array([net_input_dims[0], net_input_dims[1]], dtype=np.float32).reshape(1, 2)
ort_session = onnxruntime.InferenceSession(args.model_def)
ort_inputs = {'input': inputs}
ort_outs = ort_session.run(None, ort_inputs)
out_feat = {}
if yolov4_tiny:
batched_predictions = postprocess_v4_tiny(ort_outs, image.shape, net_input_dims,
obj_threshold, nms_threshold, args.batch_size)
else:
out_feat['layer82-conv'] = ort_outs[0]
out_feat['layer94-conv'] = ort_outs[1]
out_feat['layer106-conv'] = ort_outs[2]
batched_predictions = postprocess_v3(out_feat, image.shape, net_input_dims,
obj_threshold, nms_threshold, False, args.batch_size)
print(batched_predictions[0])
if args.draw_image:
image = draw(image, batched_predictions[0], args.label_file)
cv2.imwrite(args.draw_image, image)
if args.dump_blobs:
# second pass for dump all output
# plz refre https://github.com/microsoft/onnxruntime/issues/1455
output_keys = []
for i in range(len(ort_outs)):
output_keys.append('output_{}'.format(i))
model = onnx.load(args.model_def)
# tested commited #c3cea486d https://github.com/microsoft/onnxruntime.git
for x in model.graph.node:
_intermediate_tensor_name = list(x.output)
intermediate_tensor_name = ",".join(_intermediate_tensor_name)
intermediate_layer_value_info = onnx.helper.ValueInfoProto()
intermediate_layer_value_info.name = intermediate_tensor_name
model.graph.output.append(intermediate_layer_value_info)
output_keys.append(intermediate_layer_value_info.name + '_' + x.op_type)
dump_all_onnx = "dump_all.onnx"
if not os.path.exists(dump_all_onnx):
onnx.save(model, dump_all_onnx)
else:
print("{} is exitsed!".format(dump_all_onnx))
print("dump multi-output onnx all tensor at ", dump_all_onnx)
# dump all inferneced tensor
ort_session = onnxruntime.InferenceSession(dump_all_onnx)
ort_outs = ort_session.run(None, ort_inputs)
tensor_all_dict = dict(zip(output_keys, map(np.ndarray.flatten, ort_outs)))
tensor_all_dict['input'] = inputs
np.savez(args.dump_blobs, **tensor_all_dict)
print("dump all tensor at ", args.dump_blobs)
if __name__ == '__main__':
main(sys.argv)
| [
"cv2.imwrite",
"cvi_toolkit.utils.yolov3_util.draw",
"numpy.savez",
"os.path.exists",
"onnx.save",
"argparse.ArgumentParser",
"onnxruntime.InferenceSession",
"os.path.isfile",
"cvi_toolkit.utils.yolov3_util.preprocess",
"numpy.append",
"numpy.array",
"onnx.helper.ValueInfoProto",
"onnx.load"... | [((669, 727), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Eval YOLO networks."""'}), "(description='Eval YOLO networks.')\n", (692, 727), False, 'import argparse\n'), ((2620, 2647), 'cv2.imread', 'cv2.imread', (['args.input_file'], {}), '(args.input_file)\n', (2630, 2647), False, 'import cv2\n'), ((2662, 2695), 'cvi_toolkit.utils.yolov3_util.preprocess', 'preprocess', (['image', 'net_input_dims'], {}), '(image, net_input_dims)\n', (2672, 2695), False, 'from cvi_toolkit.utils.yolov3_util import preprocess, postprocess_v2, postprocess_v3, postprocess_v4_tiny, draw\n'), ((2711, 2742), 'numpy.expand_dims', 'np.expand_dims', (['image_x'], {'axis': '(0)'}), '(image_x, axis=0)\n', (2725, 2742), True, 'import numpy as np\n'), ((2971, 3015), 'onnxruntime.InferenceSession', 'onnxruntime.InferenceSession', (['args.model_def'], {}), '(args.model_def)\n', (2999, 3015), False, 'import onnxruntime\n'), ((346, 376), 'os.path.isfile', 'os.path.isfile', (['args.model_def'], {}), '(args.model_def)\n', (360, 376), False, 'import os\n'), ((443, 454), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (451, 454), False, 'import sys\n'), ((467, 498), 'os.path.isfile', 'os.path.isfile', (['args.input_file'], {}), '(args.input_file)\n', (481, 498), False, 'import os\n'), ((566, 577), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (574, 577), False, 'import sys\n'), ((2819, 2853), 'numpy.append', 'np.append', (['inputs', 'image_x'], {'axis': '(0)'}), '(inputs, image_x, axis=0)\n', (2828, 2853), True, 'import numpy as np\n'), ((3169, 3278), 'cvi_toolkit.utils.yolov3_util.postprocess_v4_tiny', 'postprocess_v4_tiny', (['ort_outs', 'image.shape', 'net_input_dims', 'obj_threshold', 'nms_threshold', 'args.batch_size'], {}), '(ort_outs, image.shape, net_input_dims, obj_threshold,\n nms_threshold, args.batch_size)\n', (3188, 3278), False, 'from cvi_toolkit.utils.yolov3_util import preprocess, postprocess_v2, postprocess_v3, postprocess_v4_tiny, draw\n'), ((3489, 3600), 'cvi_toolkit.utils.yolov3_util.postprocess_v3', 'postprocess_v3', (['out_feat', 'image.shape', 'net_input_dims', 'obj_threshold', 'nms_threshold', '(False)', 'args.batch_size'], {}), '(out_feat, image.shape, net_input_dims, obj_threshold,\n nms_threshold, False, args.batch_size)\n', (3503, 3600), False, 'from cvi_toolkit.utils.yolov3_util import preprocess, postprocess_v2, postprocess_v3, postprocess_v4_tiny, draw\n'), ((3707, 3759), 'cvi_toolkit.utils.yolov3_util.draw', 'draw', (['image', 'batched_predictions[0]', 'args.label_file'], {}), '(image, batched_predictions[0], args.label_file)\n', (3711, 3759), False, 'from cvi_toolkit.utils.yolov3_util import preprocess, postprocess_v2, postprocess_v3, postprocess_v4_tiny, draw\n'), ((3768, 3803), 'cv2.imwrite', 'cv2.imwrite', (['args.draw_image', 'image'], {}), '(args.draw_image, image)\n', (3779, 3803), False, 'import cv2\n'), ((4079, 4104), 'onnx.load', 'onnx.load', (['args.model_def'], {}), '(args.model_def)\n', (4088, 4104), False, 'import onnx\n'), ((4987, 5030), 'onnxruntime.InferenceSession', 'onnxruntime.InferenceSession', (['dump_all_onnx'], {}), '(dump_all_onnx)\n', (5015, 5030), False, 'import onnxruntime\n'), ((5218, 5262), 'numpy.savez', 'np.savez', (['args.dump_blobs'], {}), '(args.dump_blobs, **tensor_all_dict)\n', (5226, 5262), True, 'import numpy as np\n'), ((2872, 2938), 'numpy.array', 'np.array', (['[net_input_dims[0], net_input_dims[1]]'], {'dtype': 'np.float32'}), '([net_input_dims[0], net_input_dims[1]], dtype=np.float32)\n', (2880, 2938), True, 'import numpy as np\n'), ((4397, 4425), 'onnx.helper.ValueInfoProto', 'onnx.helper.ValueInfoProto', ([], {}), '()\n', (4423, 4425), False, 'import onnx\n'), ((4710, 4739), 'os.path.exists', 'os.path.exists', (['dump_all_onnx'], {}), '(dump_all_onnx)\n', (4724, 4739), False, 'import os\n'), ((4753, 4784), 'onnx.save', 'onnx.save', (['model', 'dump_all_onnx'], {}), '(model, dump_all_onnx)\n', (4762, 4784), False, 'import onnx\n')] |
import pickle
import time
import numpy as np
from macrel import graphs
from macrel import vast11data as vast
INTERVAL_SEC = 900.0
N = len(vast.NODES)
SERVICES = {
1: "Mux",
17: "Quote",
21: "FTP",
22: "SSH",
23: "Telnet",
25: "SMTP",
53: "DNS",
80: "HTTP",
88: "Kerberos",
123: "NTP",
135: "DCE",
139: "NETBIOS",
255: "Reserved",
389: "LDAP",
443: "HTTPS",
445: "Microsoft-DS",
464: "kpasswd",
481: "ph",
}
tally_map = {port: graphs.ConnectionTally(N) for port in SERVICES.keys()}
other_tally = graphs.ConnectionTally(N)
def add_tally(event):
src = vast.NODE_BY_IP.get(event.source_ip)
dest = vast.NODE_BY_IP.get(event.dest_ip)
if not src or not dest: return
port = event.dest_port
tally = tally_map.get(port, other_tally)
tally.connect(src.id, dest.id, event.conn_built)
start_times = []
snapshots = {port: [] for port in SERVICES}
snapshots["other"] = []
snapshots["all"] = []
def take_snapshot():
start_times.append(start_time)
all_totals = None
for port, tally in tally_map.items():
totals = tally.to_sparse_matrix()
snapshots[port].append(totals)
if all_totals is None:
all_totals = totals.copy()
else:
all_totals += totals
snapshots["other"].append(other_tally.to_sparse_matrix())
snapshots["all"].append(all_totals)
parser = vast.FWEventParser()
events = parser.parse_all_fw_events()
first_event = next(events)
start_time = first_event.time
tt = time.gmtime(start_time)
start_time -= (tt.tm_min * 60) # align to hour
end_time = start_time + INTERVAL_SEC
t = first_event.time
while t > end_time:
take_snapshot()
start_time = end_time
end_time = start_time + INTERVAL_SEC
add_tally(first_event)
for event in events:
t = event.time
while t > end_time:
take_snapshot()
start_time = end_time
end_time = start_time + INTERVAL_SEC
add_tally(event)
take_snapshot()
data = dict(
services = SERVICES,
start_times = np.asarray(start_times),
snapshots = snapshots,
)
pickle.dump(data, open("vast11-connections-by-port.pickle", "wb"))
| [
"macrel.vast11data.FWEventParser",
"numpy.asarray",
"macrel.vast11data.NODE_BY_IP.get",
"time.gmtime",
"macrel.graphs.ConnectionTally"
] | [((575, 600), 'macrel.graphs.ConnectionTally', 'graphs.ConnectionTally', (['N'], {}), '(N)\n', (597, 600), False, 'from macrel import graphs\n'), ((1344, 1364), 'macrel.vast11data.FWEventParser', 'vast.FWEventParser', ([], {}), '()\n', (1362, 1364), True, 'from macrel import vast11data as vast\n'), ((1466, 1489), 'time.gmtime', 'time.gmtime', (['start_time'], {}), '(start_time)\n', (1477, 1489), False, 'import time\n'), ((506, 531), 'macrel.graphs.ConnectionTally', 'graphs.ConnectionTally', (['N'], {}), '(N)\n', (528, 531), False, 'from macrel import graphs\n'), ((631, 667), 'macrel.vast11data.NODE_BY_IP.get', 'vast.NODE_BY_IP.get', (['event.source_ip'], {}), '(event.source_ip)\n', (650, 667), True, 'from macrel import vast11data as vast\n'), ((676, 710), 'macrel.vast11data.NODE_BY_IP.get', 'vast.NODE_BY_IP.get', (['event.dest_ip'], {}), '(event.dest_ip)\n', (695, 710), True, 'from macrel import vast11data as vast\n'), ((1944, 1967), 'numpy.asarray', 'np.asarray', (['start_times'], {}), '(start_times)\n', (1954, 1967), True, 'import numpy as np\n')] |
# Copyright (c) 2017-2021, <NAME>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Tests scrypt implementations using hypothesis"""
import sys
import unittest
from hypothesis import given, settings
from hypothesis.strategies import (
binary, integers, none, one_of, sampled_from, text)
from .common import (
SCRYPT_MCF_PREFIX_7, SCRYPT_MCF_PREFIX_s1,
SCRYPT_MCF_PREFIX_DEFAULT, SCRYPT_MCF_PREFIX_ANY)
# Strategies for producing parameters
def valid_pass():
return binary()
def valid_mcf_pass():
return one_of(binary().filter(lambda b: b'\0' not in b),
text().filter(lambda b: u'\0' not in b))
def valid_salt():
return binary()
def valid_mcf_salt():
return one_of(binary(min_size=1, max_size=16), none())
def valid_olen():
return integers(min_value=1, max_value=2**20)
def mcf_prefix():
return sampled_from([
SCRYPT_MCF_PREFIX_7,
SCRYPT_MCF_PREFIX_s1,
SCRYPT_MCF_PREFIX_DEFAULT,
SCRYPT_MCF_PREFIX_ANY,
])
class ScryptTests(unittest.TestCase):
"""Tests an scrypt implementation from module"""
set_up_lambda = lambda self:None
tear_down_lambda = lambda self:None
module = None
ref = None
def setUp(self):
if not self.module:
self.skipTest('module not tested')
self.set_up_lambda()
def tearDown(self):
self.tear_down_lambda()
def invalidPass(self, pw):
try:
return pw + b'_'
except TypeError:
return pw + u'_'
@given(valid_pass(), valid_salt(), valid_olen())
@settings(deadline=500)
def test_scrypt(self, pw, salt, olen):
h1 = self.module.scrypt(pw, salt, 2, 2, 2, olen)
self.assertEqual(olen, len(h1))
if (self.ref):
h2 = self.ref.scrypt(pw, salt, 2, 2, 2, olen)
self.assertEqual(h1, h2)
if olen >= 16: # short hashes can collide
h2 = self.module.scrypt(self.invalidPass(pw), salt, 2, 2, 2, olen)
h3 = self.module.scrypt(pw, salt + b'_', 2, 2, 2, olen)
self.assertNotEqual(h1, h2)
self.assertNotEqual(h1, h3)
@given(valid_mcf_pass(), valid_mcf_salt(), mcf_prefix())
@settings(deadline=500)
def test_mcf_scrypt(self, pw, salt, prefix):
m = self.module.scrypt_mcf(pw, salt, 2, 2, 2, prefix)
self.assertTrue(self.module.scrypt_mcf_check(m, pw))
self.assertFalse(self.module.scrypt_mcf_check(m, self.invalidPass(pw)))
if (self.ref):
self.assertTrue(self.ref.scrypt_mcf_check(m, pw))
self.assertFalse(self.ref.scrypt_mcf_check(m, self.invalidPass(pw)))
if salt and prefix != SCRYPT_MCF_PREFIX_ANY:
m2 = self.ref.scrypt_mcf(pw, salt, 2, 2, 2, prefix)
self.assertEqual(m, m2)
def load_scrypt_suite(name, module, ref=None):
tests = type(name, (ScryptTests,), {'module': module, 'ref': ref})
return unittest.defaultTestLoader.loadTestsFromTestCase(tests)
if __name__ == "__main__":
suite = unittest.TestSuite()
ref = None
try:
from . import hashlibscrypt
suite.addTest(load_scrypt_suite('hashlibscryptTests', hashlibscrypt, ref))
ref = hashlibscrypt
except ImportError:
suite.addTest(load_scrypt_suite('hashlibscryptTests', None, ref))
try:
from . import pylibscrypt
suite.addTest(load_scrypt_suite('pylibscryptTests', pylibscrypt, ref))
ref = ref or pylibscrypt
except ImportError:
suite.addTest(load_scrypt_suite('pylibscryptTests', None, ref))
try:
from . import pyscrypt
suite.addTest(load_scrypt_suite('pyscryptTests', pyscrypt, ref))
ref = ref or pyscrypt
except ImportError:
suite.addTest(load_scrypt_suite('pyscryptTests', None, ref))
try:
from . import pylibsodium
suite.addTest(load_scrypt_suite('pylibsodiumTests',
pylibsodium, ref))
from . import pylibscrypt
loader = unittest.defaultTestLoader
def set_up_ll(self):
if not self.module._scrypt_ll:
self.skipTest('no ll')
self.tmp_ll = self.module._scrypt_ll
self.tmp_scr = self.module.scr_mod
self.module._scrypt_ll = None
self.module.scr_mod = pylibscrypt
def tear_down_ll(self):
self.module._scrypt_ll = self.tmp_ll
self.module.scr_mod = self.tmp_scr
tmp = type(
'pylibsodiumFallbackTests', (ScryptTests,),
{
'module': pylibsodium,
'fast': False, # supports only large parameters
'set_up_lambda': set_up_ll,
'tear_down_lambda': tear_down_ll,
}
)
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(tmp))
except ImportError:
suite.addTest(load_scrypt_suite('pylibsodiumTests', None, ref))
try:
from . import pypyscrypt_inline as pypyscrypt
suite.addTest(load_scrypt_suite('pypyscryptTests', pypyscrypt, ref))
except ImportError:
suite.addTest(load_scrypt_suite('pypyscryptTests', None, ref))
result = unittest.TextTestRunner().run(suite)
sys.exit(not result.wasSuccessful())
| [
"unittest.defaultTestLoader.loadTestsFromTestCase",
"hypothesis.strategies.binary",
"hypothesis.strategies.sampled_from",
"unittest.TestSuite",
"hypothesis.strategies.text",
"hypothesis.strategies.none",
"hypothesis.strategies.integers",
"hypothesis.settings",
"unittest.TextTestRunner"
] | [((1162, 1170), 'hypothesis.strategies.binary', 'binary', ([], {}), '()\n', (1168, 1170), False, 'from hypothesis.strategies import binary, integers, none, one_of, sampled_from, text\n'), ((1344, 1352), 'hypothesis.strategies.binary', 'binary', ([], {}), '()\n', (1350, 1352), False, 'from hypothesis.strategies import binary, integers, none, one_of, sampled_from, text\n'), ((1465, 1505), 'hypothesis.strategies.integers', 'integers', ([], {'min_value': '(1)', 'max_value': '(2 ** 20)'}), '(min_value=1, max_value=2 ** 20)\n', (1473, 1505), False, 'from hypothesis.strategies import binary, integers, none, one_of, sampled_from, text\n'), ((1534, 1645), 'hypothesis.strategies.sampled_from', 'sampled_from', (['[SCRYPT_MCF_PREFIX_7, SCRYPT_MCF_PREFIX_s1, SCRYPT_MCF_PREFIX_DEFAULT,\n SCRYPT_MCF_PREFIX_ANY]'], {}), '([SCRYPT_MCF_PREFIX_7, SCRYPT_MCF_PREFIX_s1,\n SCRYPT_MCF_PREFIX_DEFAULT, SCRYPT_MCF_PREFIX_ANY])\n', (1546, 1645), False, 'from hypothesis.strategies import binary, integers, none, one_of, sampled_from, text\n'), ((2255, 2277), 'hypothesis.settings', 'settings', ([], {'deadline': '(500)'}), '(deadline=500)\n', (2263, 2277), False, 'from hypothesis import given, settings\n'), ((2880, 2902), 'hypothesis.settings', 'settings', ([], {'deadline': '(500)'}), '(deadline=500)\n', (2888, 2902), False, 'from hypothesis import given, settings\n'), ((3617, 3672), 'unittest.defaultTestLoader.loadTestsFromTestCase', 'unittest.defaultTestLoader.loadTestsFromTestCase', (['tests'], {}), '(tests)\n', (3665, 3672), False, 'import unittest\n'), ((3714, 3734), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (3732, 3734), False, 'import unittest\n'), ((1394, 1425), 'hypothesis.strategies.binary', 'binary', ([], {'min_size': '(1)', 'max_size': '(16)'}), '(min_size=1, max_size=16)\n', (1400, 1425), False, 'from hypothesis.strategies import binary, integers, none, one_of, sampled_from, text\n'), ((1427, 1433), 'hypothesis.strategies.none', 'none', ([], {}), '()\n', (1431, 1433), False, 'from hypothesis.strategies import binary, integers, none, one_of, sampled_from, text\n'), ((5490, 5543), 'unittest.defaultTestLoader.loadTestsFromTestCase', 'unittest.defaultTestLoader.loadTestsFromTestCase', (['tmp'], {}), '(tmp)\n', (5538, 5543), False, 'import unittest\n'), ((5891, 5916), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (5914, 5916), False, 'import unittest\n'), ((1212, 1220), 'hypothesis.strategies.binary', 'binary', ([], {}), '()\n', (1218, 1220), False, 'from hypothesis.strategies import binary, integers, none, one_of, sampled_from, text\n'), ((1273, 1279), 'hypothesis.strategies.text', 'text', ([], {}), '()\n', (1277, 1279), False, 'from hypothesis.strategies import binary, integers, none, one_of, sampled_from, text\n')] |
# -*- coding: utf-8 -*-
import scrapy
import json
import time
from filtering.items import FilteringItem
class AliTechSpider(scrapy.Spider):
name = 'ali_tech'
alias = '阿里技术'
group = '技术'
def start_requests(self):
headers = {
'User-Agent':'Mozilla/5.0 (Linux; Android 5.0; SM-G900P Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Mobile Safari/537.36'}
urls = ['https://www.zhihu.com/org/a-li-ji-zhu']
for url in urls:
yield scrapy.Request(url=url, headers=headers, callback=self.parse)
def parse(self, response):
for i in response.css('div.ContentItem.ArticleItem'):
item = FilteringItem()
item['title'] = i.css('[itemprop=headline]::attr(content)').get()
item['url'] = 'https:' + i.css('a::attr(href)').get()
t = i.css('[itemprop=datePublished]::attr(content)').get()
t = time.mktime(time.strptime(t, "%Y-%m-%dT%H:%M:%S.000Z"))
item['time'] = int(t)
item['site'] = self.alias
item['group'] = self.group
yield item
| [
"filtering.items.FilteringItem",
"time.strptime",
"scrapy.Request"
] | [((693, 708), 'filtering.items.FilteringItem', 'FilteringItem', ([], {}), '()\n', (706, 708), False, 'from filtering.items import FilteringItem\n'), ((518, 579), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'url', 'headers': 'headers', 'callback': 'self.parse'}), '(url=url, headers=headers, callback=self.parse)\n', (532, 579), False, 'import scrapy\n'), ((952, 994), 'time.strptime', 'time.strptime', (['t', '"""%Y-%m-%dT%H:%M:%S.000Z"""'], {}), "(t, '%Y-%m-%dT%H:%M:%S.000Z')\n", (965, 994), False, 'import time\n')] |
import types
import threading
import time
__all__ = 'suspend', 'suspending', 'start', 'Continuation',
_CONT_REQUEST = object()
@types.coroutine
def suspend(fn, *args):
"""Suspend the currently running coroutine and invoke FN with the continuation."""
cont = yield _CONT_REQUEST
fn(cont, *args)
cont_retval = yield
return cont_retval
class suspending:
"""Provide continuation to the code entering the context manager,
and suspend on exit.
async with corocc.suspending() as cont:
... # store cont somewhere, or call it
# if cont() was not invoked inside the async with block,
# suspension happens at this point
# invocation of cont() resumes coroutine here.
"""
__slots__ = ('_cont',)
@types.coroutine
def __aenter__(self):
cont = yield _CONT_REQUEST
self._cont = cont
return cont
@types.coroutine
def __aexit__(self, _t, v, _tb):
# if there is an exception, raise it immediately, don't wait
# until resumption
if v is not None:
raise v
self._cont.result = yield
class Continuation:
"""
Object that can be invoked to continue a suspended coroutine.
Continuations are one-shot, invoking a continuation more than once
results in a RuntimeError.
"""
# using __slots__ actually makes a noticable impact on performance
__slots__ = ('_driver', '_invoked_in', '_contval', '_coro_deliver',
'result')
def __cont(self, val, coro_deliver):
# Continue the coroutine, or store the intended continuation value and
# let the caller continue it. coro_deliver is coro.send if the
# coroutine resumes normally, or coro.throw if it resumes with an
# exception.
if self._invoked_in is not None:
raise RuntimeError("coroutine already resumed")
here = threading.current_thread()
self._invoked_in = here
driver = self._driver
if driver.step_thread is not here:
# resume the coroutine with the provided value
while driver.coro_running:
time.sleep(.0001)
driver.step(coro_deliver, val)
else:
# if cont() was invoked from inside suspend, do not step,
# just return to the current step and let it resume
self._contval = val
self._coro_deliver = coro_deliver
def __call__(self, value=None, __cont=__cont):
"""Continue the coroutine with the provided value, or None."""
__cont(self, value, self._driver.coro_send)
def throw(self, e, __cont=__cont):
"""Continue the coroutine with the provided exception."""
__cont(self, e, self._driver.coro_throw)
class _Driver:
__slots__ = ('coro', 'coro_send', 'coro_throw', 'resumefn',
'future', 'start_data', 'coro_running', 'step_thread')
def step(self, coro_deliver, contval):
# Run a step of the coroutine, i.e. execute it until a suspension or
# completion, whichever happens first.
here = self.step_thread = threading.current_thread()
while True:
if not self.resumefn(coro_deliver, contval):
return
cont = Continuation()
cont._driver = self
cont._invoked_in = None
self._resume_with_cont(cont)
if cont._invoked_in is not here:
# The continuation was not invoked, or was invoked in a
# different thread. This step is done, it's now up to cont to
# call us again. Set step_thread to a non-thread value, so
# that cont knows it has to call step() regardless of which
# thread it's invoked in.
self.step_thread = None
return
# The continuation was invoked immediately, so the suspension
# didn't really happen. Resume the coroutine with the provided
# value.
contval = cont._contval
coro_deliver = cont._coro_deliver
def _resume_with_cont(self, cont):
self.coro_running = True
try:
ret = self.coro_send(cont)
except StopIteration:
raise AssertionError("suspend didn't yield")
finally:
self.coro_running = False
if ret is _CONT_REQUEST:
raise RuntimeError("nested suspending() inside in the same coroutine "
"is not allowed")
@staticmethod
def resume_simple(coro_deliver, val):
# Resume the coroutine with VAL. coro_deliver is either
# self._coro.send or self._coro.throw.
#
# Return whether the coroutine can be further invoked, i.e. false if
# completed. Coroutine's result is ignored and exceptions raised by
# the coroutine are propagated to the caller.
try:
coro_deliver(val)
return True
except StopIteration:
return False
def resume_catching(self, coro_deliver, val):
# Like resume_simple, but if the coroutine completes, store result
# into self.future. If the coroutine execution raises, store
# exception into self.future.
try:
coro_deliver(val)
except StopIteration as e:
self.future.set_result(e.value)
return False
except Exception as e:
self.future.set_exception(e)
return False
return True
def start(coro, *, future=None, data=None):
"""
Start executing CORO, allowing it to suspend itself and continue
execution.
"""
d = _Driver()
d.coro = coro
# cached for efficiency
d.coro_send = coro.send
d.coro_throw = coro.throw
d.future = future
d.start_data = data
if future is None:
d.resumefn = d.resume_simple
else:
d.resumefn = d.resume_catching
d.coro_running = False
d.step(coro.send, None)
| [
"threading.current_thread",
"time.sleep"
] | [((1901, 1927), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (1925, 1927), False, 'import threading\n'), ((3119, 3145), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (3143, 3145), False, 'import threading\n'), ((2147, 2165), 'time.sleep', 'time.sleep', (['(0.0001)'], {}), '(0.0001)\n', (2157, 2165), False, 'import time\n')] |
import sys
from pathlib import Path
import os
import torch
from torch.optim import Adam
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from networks.critic import Critic
from networks.actor import NoisyActor, CategoricalActor, GaussianActor
base_dir = Path(__file__).resolve().parent.parent.parent
sys.path.append(str(base_dir))
from common.buffer import Replay_buffer as buffer
def get_trajectory_property(): #for adding terms to the memory buffer
return ["action"]
def weights_init_(m):
if isinstance(m, nn.Linear):
torch.nn.init.xavier_uniform_(m.weight, gain=1)
torch.nn.init.constant_(m.bias, 0)
def update_params(optim, loss, clip=False, param_list=False,retain_graph=False):
optim.zero_grad()
loss.backward(retain_graph=retain_graph)
if clip is not False:
for i in param_list:
torch.nn.utils.clip_grad_norm_(i, clip)
optim.step()
class SAC(object):
def __init__(self, args):
self.state_dim = args.obs_space
self.action_dim = args.action_space
self.gamma = args.gamma
self.tau = args.tau
self.action_continuous = args.action_continuous
self.batch_size = args.batch_size
self.hidden_size = args.hidden_size
self.actor_lr = args.a_lr
self.critic_lr = args.c_lr
self.alpha_lr = args.alpha_lr
self.buffer_size = args.buffer_capacity
self.policy_type = 'discrete' if (not self.action_continuous) else args.policy_type #deterministic or gaussian policy
self.device = 'cpu'
given_critic = Critic #need to set a default value
self.preset_alpha = args.alpha
if self.policy_type == 'deterministic':
self.tune_entropy = False
hid_layer = args.num_hid_layer
self.policy = NoisyActor(state_dim = self.state_dim, hidden_dim=self.hidden_size, out_dim=1,
num_hidden_layer=hid_layer).to(self.device)
self.policy_target = NoisyActor(state_dim = self.state_dim, hidden_dim=self.hidden_size, out_dim=1,
num_hidden_layer=hid_layer).to(self.device)
self.policy_target.load_state_dict(self.policy.state_dict())
self.q1 = given_critic(self.state_dim+self.action_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device)
self.q1.apply(weights_init_)
self.q1_target = given_critic(self.state_dim+self.action_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device)
self.q1_target.load_state_dict(self.q1.state_dict())
self.critic_optim = Adam(self.q1.parameters(), lr = self.critic_lr)
elif self.policy_type == 'discrete':
self.tune_entropy = args.tune_entropy
self.target_entropy_ratio = args.target_entropy_ratio
self.policy = CategoricalActor(self.state_dim, self.hidden_size, self.action_dim).to(self.device)
hid_layer = args.num_hid_layer
self.q1 = given_critic(self.state_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device)
self.q1.apply(weights_init_)
self.q2 = given_critic(self.state_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device)
self.q2.apply(weights_init_)
self.q1_target = given_critic(self.state_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device)
self.q2_target = given_critic(self.state_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device)
self.q1_target.load_state_dict(self.q1.state_dict())
self.q2_target.load_state_dict(self.q2.state_dict())
self.critic_optim = Adam(list(self.q1.parameters()) + list(self.q2.parameters()), lr=self.critic_lr)
elif self.policy_type == 'gaussian':
self.tune_entropy = args.tune_entropy
self.target_entropy_ratio = args.target_entropy_ratio
self.policy = GaussianActor(self.state_dim, self.hidden_size, 1, tanh = False).to(self.device)
#self.policy_target = GaussianActor(self.state_dim, self.hidden_size, 1, tanh = False).to(self.device)
hid_layer = args.num_hid_layer
self.q1 = given_critic(self.state_dim+self.action_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device)
self.q1.apply(weights_init_)
self.critic_optim = Adam(self.q1.parameters(), lr = self.critic_lr)
self.q1_target = given_critic(self.state_dim+self.action_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device)
self.q1_target.load_state_dict(self.q1.state_dict())
else:
raise NotImplementedError
self.eps = args.epsilon
self.eps_end = args.epsilon_end
self.eps_delay = 1 / (args.max_episodes * 100)
self.learn_step_counter = 0
self.target_replace_iter = args.target_replace
self.policy_optim = Adam(self.policy.parameters(), lr = self.actor_lr)
trajectory_property = get_trajectory_property()
self.memory = buffer(self.buffer_size, trajectory_property)
self.memory.init_item_buffers()
if self.tune_entropy:
self.target_entropy = -np.log(1./self.action_dim) * self.target_entropy_ratio
self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device)
#self.alpha = self.log_alpha.exp()
self.alpha = torch.tensor([self.preset_alpha])
self.alpha_optim = Adam([self.log_alpha], lr=self.alpha_lr)
else:
self.alpha = torch.tensor([self.preset_alpha]) # coefficiency for entropy term
def choose_action(self, state, train = True):
state = torch.tensor(state, dtype=torch.float).view(1, -1)
if self.policy_type == 'discrete':
if train:
action, _, _, _ = self.policy.sample(state)
action = action.item()
self.add_experience({"action": action})
else:
_, _, _, action = self.policy.sample(state)
action = action.item()
return {'action': action}
elif self.policy_type == 'deterministic':
if train:
_,_,_,action = self.policy.sample(state)
action = action.item()
self.add_experience({"action": action})
else:
_,_,_,action = self.policy.sample(state)
action = action.item()
return {'action':action}
elif self.policy_type == 'gaussian':
if train:
action, _, _ = self.policy.sample(state)
action = action.detach().numpy().squeeze(1)
self.add_experience({"action": action})
else:
_, _, action = self.policy.sample(state)
action = action.item()
return {'action':action}
else:
raise NotImplementedError
def add_experience(self, output):
agent_id = 0
for k, v in output.items():
self.memory.insert(k, agent_id, v)
def critic_loss(self, current_state, batch_action, next_state, reward, mask):
with torch.no_grad():
next_state_action, next_state_pi, next_state_log_pi, _ = self.policy.sample(next_state)
#qf1_next_target, qf2_next_target = self.critic_target(next_state)
qf1_next_target = self.q1_target(next_state)
qf2_next_target = self.q2_target(next_state)
min_qf_next_target = next_state_pi * (torch.min(qf1_next_target, qf2_next_target) - self.alpha
* next_state_log_pi) # V function
min_qf_next_target = min_qf_next_target.sum(dim=1, keepdim=True)
next_q_value = reward + mask * self.gamma * (min_qf_next_target)
#qf1, qf2 = self.critic(current_state) # Two Q-functions to mitigate positive bias in the policy improvement step, [batch, action_num]
qf1 = self.q1(current_state)
qf2 = self.q2(current_state)
qf1 = qf1.gather(1, batch_action.long())
qf2 = qf2.gather(1, batch_action.long()) #[batch, 1] , pick the actin-value for the given batched actions
qf1_loss = torch.mean((qf1 - next_q_value).pow(2))
qf2_loss = torch.mean((qf2 - next_q_value).pow(2))
return qf1_loss, qf2_loss
def policy_loss(self, current_state):
with torch.no_grad():
#qf1_pi, qf2_pi = self.critic(current_state)
qf1_pi = self.q1(current_state)
qf2_pi = self.q2(current_state)
min_qf_pi = torch.min(qf1_pi, qf2_pi)
pi, prob, log_pi, _ = self.policy.sample(current_state)
inside_term = self.alpha.detach() * log_pi - min_qf_pi # [batch, action_dim]
policy_loss = ((prob * inside_term).sum(1)).mean()
return policy_loss, prob.detach(), log_pi.detach()
def alpha_loss(self, action_prob, action_logprob):
if self.tune_entropy:
entropies = -torch.sum(action_prob * action_logprob, dim=1, keepdim=True) #[batch, 1]
entropies = entropies.detach()
alpha_loss = -torch.mean(self.log_alpha * (self.target_entropy - entropies))
alpha_logs = self.log_alpha.exp().detach()
else:
alpha_loss = torch.tensor(0.).to(self.device)
alpha_logs = self.alpha.detach().clone()
return alpha_loss, alpha_logs
def learn(self):
data = self.memory.sample(self.batch_size)
transitions = {
"o_0": np.array(data['states']),
"o_next_0": np.array(data['states_next']),
"r_0": np.array(data['rewards']).reshape(-1, 1),
"u_0": np.array(data['action']),
"d_0": np.array(data['dones']).reshape(-1, 1),
}
obs = torch.tensor(transitions["o_0"], dtype=torch.float)
obs_ = torch.tensor(transitions["o_next_0"], dtype=torch.float)
action = torch.tensor(transitions["u_0"], dtype=torch.long).view(self.batch_size, -1)
reward = torch.tensor(transitions["r_0"], dtype=torch.float)
done = torch.tensor(transitions["d_0"], dtype=torch.float)
if self.policy_type == 'discrete':
qf1_loss, qf2_loss = self.critic_loss(obs, action, obs_, reward, (1-done))
policy_loss, prob, log_pi = self.policy_loss(obs)
alpha_loss, alpha_logs = self.alpha_loss(prob, log_pi)
qf_loss = qf1_loss + qf2_loss
update_params(self.critic_optim,qf_loss)
update_params(self.policy_optim, policy_loss)
if self.tune_entropy:
update_params(self.alpha_optim, alpha_loss)
self.alpha = self.log_alpha.exp().detach()
if self.learn_step_counter % self.target_replace_iter == 0:
#self.critic_target.load_state_dict(self.critic.state_dict())
self.q1_target.load_state_dict(self.q1.state_dict())
self.q2_target.load_state_dict(self.q2.state_dict())
self.learn_step_counter += 1
elif self.policy_type == 'deterministic':
current_q = self.q1(torch.cat([obs, action], 1))
target_next_action = self.policy_target(obs_)
target_next_q = self.q1_target(torch.cat([obs_, target_next_action], 1))
next_q_value = reward + (1-done) * self.gamma * target_next_q
qf_loss = F.mse_loss(current_q, next_q_value.detach())
self.critic_optim.zero_grad()
qf_loss.backward()
self.critic_optim.step()
_, _, _, current_action = self.policy.sample(obs)
qf_pi = self.q1(torch.cat([obs, current_action], 1))
policy_loss = -qf_pi.mean()
self.policy_optim.zero_grad()
policy_loss.backward()
self.policy_optim.step()
if self.learn_step_counter % self.target_replace_iter == 0:
for param, target_param in zip(self.q1.parameters(), self.q1_target.parameters()):
target_param.data.copy_(self.tau * param.data + (1.-self.tau) * target_param.data)
for param, target_param in zip(self.policy.parameters(), self.policy_target.parameters()):
target_param.data.copy_(self.tau * param.data + (1.-self.tau) * target_param.data)
elif self.policy_type == 'gaussian':
action = torch.tensor(transitions["u_0"], dtype=torch.float).view(self.batch_size, -1)
with torch.no_grad():
# next_action, next_action_logprob, _ = self.policy_target.sample(obs_)
next_action, next_action_logprob, _ = self.policy.sample(obs_)
target_next_q = self.q1_target(
torch.cat([obs_, next_action], 1)) - self.alpha * next_action_logprob
next_q_value = reward + (1 - done) * self.gamma * target_next_q
qf1 = self.q1(torch.cat([obs, action], 1))
qf_loss = F.mse_loss(qf1, next_q_value)
self.critic_optim.zero_grad()
qf_loss.backward()
self.critic_optim.step()
pi, log_pi, _ = self.policy.sample(obs)
qf_pi = self.q1(torch.cat([obs, pi], 1))
policy_loss = ((self.alpha * log_pi) - qf_pi).mean()
self.policy_optim.zero_grad()
policy_loss.backward()
self.policy_optim.step()
if self.tune_entropy:
alpha_loss = -(self.log_alpha * (log_pi + self.target_entropy).detach()).mean()
self.alpha_optim.zero_grad()
alpha_loss.backward()
self.alpha_optim.step()
self.alpha = self.log_alpha.exp()
else:
pass
if self.learn_step_counter % self.target_replace_iter == 0:
for param, target_param in zip(self.q1.parameters(), self.q1_target.parameters()):
target_param.data.copy_(self.tau * param.data + (1. - self.tau) * target_param.data)
# for param, target_param in zip(self.policy.parameters(), self.policy_target.parameters()):
# target_param.data.copy_(self.tau * param.data + (1.-self.tau) * target_param.data)
else:
raise NotImplementedError
def save(self, save_path, episode):
base_path = os.path.join(save_path, 'trained_model')
if not os.path.exists(base_path):
os.makedirs(base_path)
model_actor_path = os.path.join(base_path, "actor_" + str(episode) + ".pth")
torch.save(self.policy.state_dict(), model_actor_path)
def load(self, file):
self.policy.load_state_dict(torch.load(file))
| [
"torch.nn.init.constant_",
"torch.nn.utils.clip_grad_norm_",
"numpy.log",
"torch.min",
"numpy.array",
"torch.sum",
"networks.actor.NoisyActor",
"os.path.exists",
"networks.actor.CategoricalActor",
"pathlib.Path",
"torch.nn.init.xavier_uniform_",
"torch.mean",
"networks.actor.GaussianActor",
... | [((569, 616), 'torch.nn.init.xavier_uniform_', 'torch.nn.init.xavier_uniform_', (['m.weight'], {'gain': '(1)'}), '(m.weight, gain=1)\n', (598, 616), False, 'import torch\n'), ((625, 659), 'torch.nn.init.constant_', 'torch.nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (648, 659), False, 'import torch\n'), ((5158, 5203), 'common.buffer.Replay_buffer', 'buffer', (['self.buffer_size', 'trajectory_property'], {}), '(self.buffer_size, trajectory_property)\n', (5164, 5203), True, 'from common.buffer import Replay_buffer as buffer\n'), ((9964, 10015), 'torch.tensor', 'torch.tensor', (["transitions['o_0']"], {'dtype': 'torch.float'}), "(transitions['o_0'], dtype=torch.float)\n", (9976, 10015), False, 'import torch\n'), ((10031, 10087), 'torch.tensor', 'torch.tensor', (["transitions['o_next_0']"], {'dtype': 'torch.float'}), "(transitions['o_next_0'], dtype=torch.float)\n", (10043, 10087), False, 'import torch\n'), ((10199, 10250), 'torch.tensor', 'torch.tensor', (["transitions['r_0']"], {'dtype': 'torch.float'}), "(transitions['r_0'], dtype=torch.float)\n", (10211, 10250), False, 'import torch\n'), ((10266, 10317), 'torch.tensor', 'torch.tensor', (["transitions['d_0']"], {'dtype': 'torch.float'}), "(transitions['d_0'], dtype=torch.float)\n", (10278, 10317), False, 'import torch\n'), ((14511, 14551), 'os.path.join', 'os.path.join', (['save_path', '"""trained_model"""'], {}), "(save_path, 'trained_model')\n", (14523, 14551), False, 'import os\n'), ((876, 915), 'torch.nn.utils.clip_grad_norm_', 'torch.nn.utils.clip_grad_norm_', (['i', 'clip'], {}), '(i, clip)\n', (906, 915), False, 'import torch\n'), ((5394, 5448), 'torch.zeros', 'torch.zeros', (['(1)'], {'requires_grad': '(True)', 'device': 'self.device'}), '(1, requires_grad=True, device=self.device)\n', (5405, 5448), False, 'import torch\n'), ((5521, 5554), 'torch.tensor', 'torch.tensor', (['[self.preset_alpha]'], {}), '([self.preset_alpha])\n', (5533, 5554), False, 'import torch\n'), ((5586, 5626), 'torch.optim.Adam', 'Adam', (['[self.log_alpha]'], {'lr': 'self.alpha_lr'}), '([self.log_alpha], lr=self.alpha_lr)\n', (5590, 5626), False, 'from torch.optim import Adam\n'), ((5666, 5699), 'torch.tensor', 'torch.tensor', (['[self.preset_alpha]'], {}), '([self.preset_alpha])\n', (5678, 5699), False, 'import torch\n'), ((7291, 7306), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (7304, 7306), False, 'import torch\n'), ((8549, 8564), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (8562, 8564), False, 'import torch\n'), ((8736, 8761), 'torch.min', 'torch.min', (['qf1_pi', 'qf2_pi'], {}), '(qf1_pi, qf2_pi)\n', (8745, 8761), False, 'import torch\n'), ((9693, 9717), 'numpy.array', 'np.array', (["data['states']"], {}), "(data['states'])\n", (9701, 9717), True, 'import numpy as np\n'), ((9743, 9772), 'numpy.array', 'np.array', (["data['states_next']"], {}), "(data['states_next'])\n", (9751, 9772), True, 'import numpy as np\n'), ((9854, 9878), 'numpy.array', 'np.array', (["data['action']"], {}), "(data['action'])\n", (9862, 9878), True, 'import numpy as np\n'), ((14567, 14592), 'os.path.exists', 'os.path.exists', (['base_path'], {}), '(base_path)\n', (14581, 14592), False, 'import os\n'), ((14606, 14628), 'os.makedirs', 'os.makedirs', (['base_path'], {}), '(base_path)\n', (14617, 14628), False, 'import os\n'), ((14840, 14856), 'torch.load', 'torch.load', (['file'], {}), '(file)\n', (14850, 14856), False, 'import torch\n'), ((5801, 5839), 'torch.tensor', 'torch.tensor', (['state'], {'dtype': 'torch.float'}), '(state, dtype=torch.float)\n', (5813, 5839), False, 'import torch\n'), ((9144, 9204), 'torch.sum', 'torch.sum', (['(action_prob * action_logprob)'], {'dim': '(1)', 'keepdim': '(True)'}), '(action_prob * action_logprob, dim=1, keepdim=True)\n', (9153, 9204), False, 'import torch\n'), ((9292, 9354), 'torch.mean', 'torch.mean', (['(self.log_alpha * (self.target_entropy - entropies))'], {}), '(self.log_alpha * (self.target_entropy - entropies))\n', (9302, 9354), False, 'import torch\n'), ((10105, 10155), 'torch.tensor', 'torch.tensor', (["transitions['u_0']"], {'dtype': 'torch.long'}), "(transitions['u_0'], dtype=torch.long)\n", (10117, 10155), False, 'import torch\n'), ((281, 295), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (285, 295), False, 'from pathlib import Path\n'), ((1849, 1957), 'networks.actor.NoisyActor', 'NoisyActor', ([], {'state_dim': 'self.state_dim', 'hidden_dim': 'self.hidden_size', 'out_dim': '(1)', 'num_hidden_layer': 'hid_layer'}), '(state_dim=self.state_dim, hidden_dim=self.hidden_size, out_dim=1,\n num_hidden_layer=hid_layer)\n', (1859, 1957), False, 'from networks.actor import NoisyActor, CategoricalActor, GaussianActor\n'), ((2042, 2150), 'networks.actor.NoisyActor', 'NoisyActor', ([], {'state_dim': 'self.state_dim', 'hidden_dim': 'self.hidden_size', 'out_dim': '(1)', 'num_hidden_layer': 'hid_layer'}), '(state_dim=self.state_dim, hidden_dim=self.hidden_size, out_dim=1,\n num_hidden_layer=hid_layer)\n', (2052, 2150), False, 'from networks.actor import NoisyActor, CategoricalActor, GaussianActor\n'), ((5310, 5339), 'numpy.log', 'np.log', (['(1.0 / self.action_dim)'], {}), '(1.0 / self.action_dim)\n', (5316, 5339), True, 'import numpy as np\n'), ((7652, 7695), 'torch.min', 'torch.min', (['qf1_next_target', 'qf2_next_target'], {}), '(qf1_next_target, qf2_next_target)\n', (7661, 7695), False, 'import torch\n'), ((9450, 9467), 'torch.tensor', 'torch.tensor', (['(0.0)'], {}), '(0.0)\n', (9462, 9467), False, 'import torch\n'), ((9793, 9818), 'numpy.array', 'np.array', (["data['rewards']"], {}), "(data['rewards'])\n", (9801, 9818), True, 'import numpy as np\n'), ((9899, 9922), 'numpy.array', 'np.array', (["data['dones']"], {}), "(data['dones'])\n", (9907, 9922), True, 'import numpy as np\n'), ((11299, 11326), 'torch.cat', 'torch.cat', (['[obs, action]', '(1)'], {}), '([obs, action], 1)\n', (11308, 11326), False, 'import torch\n'), ((11431, 11471), 'torch.cat', 'torch.cat', (['[obs_, target_next_action]', '(1)'], {}), '([obs_, target_next_action], 1)\n', (11440, 11471), False, 'import torch\n'), ((11816, 11851), 'torch.cat', 'torch.cat', (['[obs, current_action]', '(1)'], {}), '([obs, current_action], 1)\n', (11825, 11851), False, 'import torch\n'), ((13135, 13164), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['qf1', 'next_q_value'], {}), '(qf1, next_q_value)\n', (13145, 13164), True, 'import torch.nn.functional as F\n'), ((2924, 2991), 'networks.actor.CategoricalActor', 'CategoricalActor', (['self.state_dim', 'self.hidden_size', 'self.action_dim'], {}), '(self.state_dim, self.hidden_size, self.action_dim)\n', (2940, 2991), False, 'from networks.actor import NoisyActor, CategoricalActor, GaussianActor\n'), ((12656, 12671), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (12669, 12671), False, 'import torch\n'), ((13084, 13111), 'torch.cat', 'torch.cat', (['[obs, action]', '(1)'], {}), '([obs, action], 1)\n', (13093, 13111), False, 'import torch\n'), ((13357, 13380), 'torch.cat', 'torch.cat', (['[obs, pi]', '(1)'], {}), '([obs, pi], 1)\n', (13366, 13380), False, 'import torch\n'), ((4033, 4095), 'networks.actor.GaussianActor', 'GaussianActor', (['self.state_dim', 'self.hidden_size', '(1)'], {'tanh': '(False)'}), '(self.state_dim, self.hidden_size, 1, tanh=False)\n', (4046, 4095), False, 'from networks.actor import NoisyActor, CategoricalActor, GaussianActor\n'), ((12560, 12611), 'torch.tensor', 'torch.tensor', (["transitions['u_0']"], {'dtype': 'torch.float'}), "(transitions['u_0'], dtype=torch.float)\n", (12572, 12611), False, 'import torch\n'), ((12908, 12941), 'torch.cat', 'torch.cat', (['[obs_, next_action]', '(1)'], {}), '([obs_, next_action], 1)\n', (12917, 12941), False, 'import torch\n')] |
import subprocess
import importlib
import os
import inspect
import logging
import pydub
import pydub.playback
from src import utils
from src.handlers import get_festival_tts, get_greeting
event_logger = logging.getLogger("eventLogger")
class AlarmBuilder:
def __init__(self, config):
self.config = config
def build(self):
"""Loop through the configuration file for enabled content sections
and generate content.
Return:
list of generated content
"""
# Initialize content with greeting
contents = []
contents.append(self.generate_greeting())
# For each content section get the handler module and create the approriate
# content parser
content_sections = self.config.get_enabled_sections("content")
for section in content_sections:
class_ = self.get_content_parser_class(content_sections[section])
parser = class_(content_sections[section])
# call build to run the parser and store output
try:
parser.build()
except KeyError as e:
print("Error: missing key {} in configuration file.".format(e))
contents.append(parser.get())
# Add ending phrase from the config file
contents.append(self.config["main"].get("end", ""))
for section in contents:
print(section)
# Initialize TTS client with the generated content
content_text = "\n".join(contents)
self.tts_client = self.get_tts_client()
audio = self.tts_client.setup(content_text)
return audio
def play(self, audio):
"""Play an alarm. Either play a pre-built alarm via the configured TTS client
or play a beeping sound effect.
Args:
audio (pydub.AudioSegment): the alarm audio to play.
"""
# If no network connection is detected, or TTS is not enabled play beep
tts_enabled = self.config["main"]["TTS"]
if not self.config._testnet() or not tts_enabled:
AlarmBuilder.play_beep()
return
# Play the alarm, either use the clients play method if it exists,
# or play via pydub.
try:
self.tts_client.play(audio)
except AttributeError:
pydub.playback.play(audio)
def build_and_play(self):
"""Build and play an alarm.
This is provided as a CLI interface for playing the alarm.
Since the alarm is built on the go, there may be a few seconds delay on play.
"""
audio = self.build()
self.play(audio)
# Play the radio stream if enabled
if self.config["radio"]["enabled"]:
self.play_radio()
def generate_greeting(self):
"""Generate a greeting using get_greeting.py handler.
Return:
the greeting as string.
"""
section = self.config["content"]["greeting"]
alarm_time_override = self.config["main"].get("alarm_time")
greeter = get_greeting.Greeting(section, alarm_time_override)
greeter.build()
return greeter.get()
def get_tts_client(self):
"""Determine which TTS engine to use based on the enabled tts sections
in the config file. First enabled section is used.
"""
# Valid config can only have 1 enabled TTS engine. Note that
# response is a wrapper containing dicionary with the top level TTS key.
section_wrapper = self.config.get_enabled_sections("TTS")
# Instantiate the correct class
if section_wrapper:
section = list(section_wrapper.values())[0]
class_ = self.get_content_parser_class(section)
# read the path to the keyfile if provided/applicable
credentials = section.get("credentials")
client = class_(credentials=credentials)
# Default to Festival TTS
else:
event_logger.info("No TTS engine specified in config, using Festival")
client = get_festival_tts.FestivalTTSManager()
return client
def get_content_parser_class(self, section):
"""Given config file section name, return the class matching the handler."""
# use importlib to dynamically import the correct module within
# the 'handlers' package.
path_to_module = "src.handlers.{}".format(section["handler"][:-3])
handler_module = importlib.import_module(path_to_module)
# Inspect the handler module for classes and return the first class.
class_ = inspect.getmembers(handler_module, inspect.isclass)[0][1]
return class_
def play_radio(self):
"""Open a stream to the default radio station using cvlc."""
default_station = self.config["radio"]["default"]
url = self.config["radio"]["urls"][default_station]
args = self.config["radio"].get("args", "")
cmd = "/usr/bin/cvlc {} {}".format(url, args).split()
# Run the command via Popen directly to open the stream as a child process without
# waiting for it to finish.
subprocess.Popen(cmd)
@staticmethod
def play_beep():
"""Play a beeping sound effect."""
path = os.path.join(utils.BASE, "resources", "Cool-alarm-tone-notification-sound.mp3")
beep = pydub.AudioSegment.from_mp3(path)
pydub.playback.play(beep)
return path
| [
"logging.getLogger",
"inspect.getmembers",
"importlib.import_module",
"pydub.playback.play",
"subprocess.Popen",
"pydub.AudioSegment.from_mp3",
"src.handlers.get_greeting.Greeting",
"os.path.join",
"src.handlers.get_festival_tts.FestivalTTSManager"
] | [((207, 239), 'logging.getLogger', 'logging.getLogger', (['"""eventLogger"""'], {}), "('eventLogger')\n", (224, 239), False, 'import logging\n'), ((3068, 3119), 'src.handlers.get_greeting.Greeting', 'get_greeting.Greeting', (['section', 'alarm_time_override'], {}), '(section, alarm_time_override)\n', (3089, 3119), False, 'from src.handlers import get_festival_tts, get_greeting\n'), ((4491, 4530), 'importlib.import_module', 'importlib.import_module', (['path_to_module'], {}), '(path_to_module)\n', (4514, 4530), False, 'import importlib\n'), ((5171, 5192), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {}), '(cmd)\n', (5187, 5192), False, 'import subprocess\n'), ((5291, 5370), 'os.path.join', 'os.path.join', (['utils.BASE', '"""resources"""', '"""Cool-alarm-tone-notification-sound.mp3"""'], {}), "(utils.BASE, 'resources', 'Cool-alarm-tone-notification-sound.mp3')\n", (5303, 5370), False, 'import os\n'), ((5386, 5419), 'pydub.AudioSegment.from_mp3', 'pydub.AudioSegment.from_mp3', (['path'], {}), '(path)\n', (5413, 5419), False, 'import pydub\n'), ((5428, 5453), 'pydub.playback.play', 'pydub.playback.play', (['beep'], {}), '(beep)\n', (5447, 5453), False, 'import pydub\n'), ((4089, 4126), 'src.handlers.get_festival_tts.FestivalTTSManager', 'get_festival_tts.FestivalTTSManager', ([], {}), '()\n', (4124, 4126), False, 'from src.handlers import get_festival_tts, get_greeting\n'), ((2338, 2364), 'pydub.playback.play', 'pydub.playback.play', (['audio'], {}), '(audio)\n', (2357, 2364), False, 'import pydub\n'), ((4626, 4677), 'inspect.getmembers', 'inspect.getmembers', (['handler_module', 'inspect.isclass'], {}), '(handler_module, inspect.isclass)\n', (4644, 4677), False, 'import inspect\n')] |
# Copyright 2016, 2019 <NAME>. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test Factory to make fake objects for testing
"""
import factory
from factory.fuzzy import FuzzyChoice, FuzzyInteger
from services.models import Shopcart
class ShopcartFactory(factory.Factory):
""" Creates fake shopcarts that you don't have to feed """
class Meta:
model = Shopcart
product_id = FuzzyChoice(choices=[1001,2002,3003,4747,9999])
customer_id = FuzzyChoice(choices=[1000,2000,3000,8000])
product_name = FuzzyChoice(choices=["a","b","d","c","e"])
product_price = FuzzyChoice(choices=[10.01,200.2,30,4747,999])
quantity = FuzzyInteger(0, 10, step=1)
| [
"factory.fuzzy.FuzzyChoice",
"factory.fuzzy.FuzzyInteger"
] | [((919, 970), 'factory.fuzzy.FuzzyChoice', 'FuzzyChoice', ([], {'choices': '[1001, 2002, 3003, 4747, 9999]'}), '(choices=[1001, 2002, 3003, 4747, 9999])\n', (930, 970), False, 'from factory.fuzzy import FuzzyChoice, FuzzyInteger\n'), ((985, 1030), 'factory.fuzzy.FuzzyChoice', 'FuzzyChoice', ([], {'choices': '[1000, 2000, 3000, 8000]'}), '(choices=[1000, 2000, 3000, 8000])\n', (996, 1030), False, 'from factory.fuzzy import FuzzyChoice, FuzzyInteger\n'), ((1047, 1093), 'factory.fuzzy.FuzzyChoice', 'FuzzyChoice', ([], {'choices': "['a', 'b', 'd', 'c', 'e']"}), "(choices=['a', 'b', 'd', 'c', 'e'])\n", (1058, 1093), False, 'from factory.fuzzy import FuzzyChoice, FuzzyInteger\n'), ((1110, 1160), 'factory.fuzzy.FuzzyChoice', 'FuzzyChoice', ([], {'choices': '[10.01, 200.2, 30, 4747, 999]'}), '(choices=[10.01, 200.2, 30, 4747, 999])\n', (1121, 1160), False, 'from factory.fuzzy import FuzzyChoice, FuzzyInteger\n'), ((1172, 1199), 'factory.fuzzy.FuzzyInteger', 'FuzzyInteger', (['(0)', '(10)'], {'step': '(1)'}), '(0, 10, step=1)\n', (1184, 1199), False, 'from factory.fuzzy import FuzzyChoice, FuzzyInteger\n')] |
# Information: https://clover.coex.tech/en/simple_offboard.html#navigateglobal
import rospy
from clover import srv
from std_srvs.srv import Trigger
import math
rospy.init_node('flight')
get_telemetry = rospy.ServiceProxy('get_telemetry', srv.GetTelemetry)
navigate = rospy.ServiceProxy('navigate', srv.Navigate)
navigate_global = rospy.ServiceProxy('navigate_global', srv.NavigateGlobal)
set_position = rospy.ServiceProxy('set_position', srv.SetPosition)
set_velocity = rospy.ServiceProxy('set_velocity', srv.SetVelocity)
set_attitude = rospy.ServiceProxy('set_attitude', srv.SetAttitude)
set_rates = rospy.ServiceProxy('set_rates', srv.SetRates)
land = rospy.ServiceProxy('land', Trigger)
# https://clover.coex.tech/en/snippets.html#wait_arrival
def wait_arrival(tolerance=0.2):
while not rospy.is_shutdown():
telem = get_telemetry(frame_id='navigate_target')
if math.sqrt(telem.x ** 2 + telem.y ** 2 + telem.z ** 2) < tolerance:
break
rospy.sleep(0.2)
start = get_telemetry()
if math.isnan(start.lat):
raise Exception('No global position, install and configure GPS sensor: https://clover.coex.tech/gps')
print('Start point global position: lat={}, lon={}'.format(start.lat, start.lon))
print('Take off 3 meters')
navigate(x=0, y=0, z=3, frame_id='body', auto_arm=True)
wait_arrival()
print('Fly 1 arcsecond to the North (approx. 30 meters)')
navigate_global(lat=start.lat+1.0/60/60, lon=start.lon, z=start.z+3, yaw=math.inf, speed=5)
wait_arrival()
print('Fly to home position')
navigate_global(lat=start.lat, lon=start.lon, z=start.z+3, yaw=math.inf, speed=5)
wait_arrival()
print('Land')
land()
| [
"rospy.is_shutdown",
"rospy.init_node",
"rospy.ServiceProxy",
"math.sqrt",
"rospy.sleep",
"math.isnan"
] | [((162, 187), 'rospy.init_node', 'rospy.init_node', (['"""flight"""'], {}), "('flight')\n", (177, 187), False, 'import rospy\n'), ((205, 258), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""get_telemetry"""', 'srv.GetTelemetry'], {}), "('get_telemetry', srv.GetTelemetry)\n", (223, 258), False, 'import rospy\n'), ((270, 314), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""navigate"""', 'srv.Navigate'], {}), "('navigate', srv.Navigate)\n", (288, 314), False, 'import rospy\n'), ((333, 390), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""navigate_global"""', 'srv.NavigateGlobal'], {}), "('navigate_global', srv.NavigateGlobal)\n", (351, 390), False, 'import rospy\n'), ((406, 457), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""set_position"""', 'srv.SetPosition'], {}), "('set_position', srv.SetPosition)\n", (424, 457), False, 'import rospy\n'), ((473, 524), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""set_velocity"""', 'srv.SetVelocity'], {}), "('set_velocity', srv.SetVelocity)\n", (491, 524), False, 'import rospy\n'), ((540, 591), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""set_attitude"""', 'srv.SetAttitude'], {}), "('set_attitude', srv.SetAttitude)\n", (558, 591), False, 'import rospy\n'), ((604, 649), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""set_rates"""', 'srv.SetRates'], {}), "('set_rates', srv.SetRates)\n", (622, 649), False, 'import rospy\n'), ((657, 692), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""land"""', 'Trigger'], {}), "('land', Trigger)\n", (675, 692), False, 'import rospy\n'), ((1027, 1048), 'math.isnan', 'math.isnan', (['start.lat'], {}), '(start.lat)\n', (1037, 1048), False, 'import math\n'), ((798, 817), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (815, 817), False, 'import rospy\n'), ((981, 997), 'rospy.sleep', 'rospy.sleep', (['(0.2)'], {}), '(0.2)\n', (992, 997), False, 'import rospy\n'), ((888, 941), 'math.sqrt', 'math.sqrt', (['(telem.x ** 2 + telem.y ** 2 + telem.z ** 2)'], {}), '(telem.x ** 2 + telem.y ** 2 + telem.z ** 2)\n', (897, 941), False, 'import math\n')] |
"""starts a sync remote server
"""
import os
import getpass
import pathlib
import logging
import click
from . import cli
import paramiko
import paramiko.sftp_client
import syncro.support as support
import syncro.cli as cli
logger = logging.getLogger(__name__)
def add_arguments(parser):
parser.add_argument("host")
parser.add_argument("-u", "--username", default=getpass.getuser())
parser.add_argument("-p", "--password")
def process_options(options):
pass
def main(options):
host, port, username = options.host, 22, options.username
startup_delay_s = 2
print(support.remote(transport, ["ls", "-la",])[1])
#print(support.remote(transport, ["/bin/echo", "$$",]))
#print(support.remote(transport, ["/bin/echo", "$$",]))
sftp = paramiko.sftp_client.SFTPClient.from_transport(transport)
# transfer the remote server
sftp.put(pathlib.Path(__file__).parent / "remote.py", "remote.py")
# connect the secure end points
support.shell(transport)
@click.command()
@click.argument("host")
@click.option('--password', hide_input=True)
@click.option('--username', default=lambda: getpass.getuser())
@cli.standard(quiet=True)
def main(host, username, password):
"hello world"
logger.debug("A")
logger.info("B")
logger.warning("C")
port = 22
print("one", username, password)
client = paramiko.client.SSHClient()
client.load_system_host_keys()
client.load_host_keys(pathlib.Path("~/.ssh/known_hosts").expanduser())
client.connect(host, port, username=username, password=password)
transport = client.get_transport()
transport.set_keepalive(2)
print(support.remote(transport, ["ls", "-la",])[1])
# @cli.add_logging()
# def two(*args, **kwargs):
# print("two", args, kwargs)
#
# @cli.add_logging(1, b=2)
# def three(*args, **kwargs):
# print("three", args, kwargs)
if __name__ == '__main__':
main()
| [
"logging.getLogger",
"click.argument",
"paramiko.sftp_client.SFTPClient.from_transport",
"syncro.support.shell",
"pathlib.Path",
"click.option",
"paramiko.client.SSHClient",
"getpass.getuser",
"click.command",
"syncro.support.remote",
"syncro.cli.standard"
] | [((237, 264), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (254, 264), False, 'import logging\n'), ((1014, 1029), 'click.command', 'click.command', ([], {}), '()\n', (1027, 1029), False, 'import click\n'), ((1031, 1053), 'click.argument', 'click.argument', (['"""host"""'], {}), "('host')\n", (1045, 1053), False, 'import click\n'), ((1055, 1098), 'click.option', 'click.option', (['"""--password"""'], {'hide_input': '(True)'}), "('--password', hide_input=True)\n", (1067, 1098), False, 'import click\n'), ((1163, 1187), 'syncro.cli.standard', 'cli.standard', ([], {'quiet': '(True)'}), '(quiet=True)\n', (1175, 1187), True, 'import syncro.cli as cli\n'), ((782, 839), 'paramiko.sftp_client.SFTPClient.from_transport', 'paramiko.sftp_client.SFTPClient.from_transport', (['transport'], {}), '(transport)\n', (828, 839), False, 'import paramiko\n'), ((986, 1010), 'syncro.support.shell', 'support.shell', (['transport'], {}), '(transport)\n', (999, 1010), True, 'import syncro.support as support\n'), ((1373, 1400), 'paramiko.client.SSHClient', 'paramiko.client.SSHClient', ([], {}), '()\n', (1398, 1400), False, 'import paramiko\n'), ((379, 396), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (394, 396), False, 'import getpass\n'), ((604, 644), 'syncro.support.remote', 'support.remote', (['transport', "['ls', '-la']"], {}), "(transport, ['ls', '-la'])\n", (618, 644), True, 'import syncro.support as support\n'), ((1662, 1702), 'syncro.support.remote', 'support.remote', (['transport', "['ls', '-la']"], {}), "(transport, ['ls', '-la'])\n", (1676, 1702), True, 'import syncro.support as support\n'), ((1143, 1160), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (1158, 1160), False, 'import getpass\n'), ((887, 909), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (899, 909), False, 'import pathlib\n'), ((1462, 1496), 'pathlib.Path', 'pathlib.Path', (['"""~/.ssh/known_hosts"""'], {}), "('~/.ssh/known_hosts')\n", (1474, 1496), False, 'import pathlib\n')] |
from django.urls import path
from . import views as user_views
urlpatterns = [
path('staff_admin/register/', user_views.register, name='register'),
path('profile/', user_views.profile, name='profile'),
path(
'update_meal_categories/', user_views.edit_meal_cats,
name='edit_meal_cats'),
path('new_user_setup', user_views.new_user_setup, name='new_user_setup')
]
| [
"django.urls.path"
] | [((84, 151), 'django.urls.path', 'path', (['"""staff_admin/register/"""', 'user_views.register'], {'name': '"""register"""'}), "('staff_admin/register/', user_views.register, name='register')\n", (88, 151), False, 'from django.urls import path\n'), ((157, 209), 'django.urls.path', 'path', (['"""profile/"""', 'user_views.profile'], {'name': '"""profile"""'}), "('profile/', user_views.profile, name='profile')\n", (161, 209), False, 'from django.urls import path\n'), ((215, 301), 'django.urls.path', 'path', (['"""update_meal_categories/"""', 'user_views.edit_meal_cats'], {'name': '"""edit_meal_cats"""'}), "('update_meal_categories/', user_views.edit_meal_cats, name=\n 'edit_meal_cats')\n", (219, 301), False, 'from django.urls import path\n'), ((319, 391), 'django.urls.path', 'path', (['"""new_user_setup"""', 'user_views.new_user_setup'], {'name': '"""new_user_setup"""'}), "('new_user_setup', user_views.new_user_setup, name='new_user_setup')\n", (323, 391), False, 'from django.urls import path\n')] |
"""The module for running tgcf in past mode.
- past mode can only operate with a user account.
- past mode deals with all existing messages.
"""
import asyncio
import logging
import time
from telethon import TelegramClient
from telethon.errors.rpcerrorlist import FloodWaitError
from telethon.tl.custom.message import Message
from telethon.tl.patched import MessageService
from tgcf import config
from tgcf import storage as st
from tgcf.config import API_HASH, API_ID, CONFIG, SESSION, write_config
from tgcf.plugins import apply_plugins
from tgcf.utils import send_message
async def forward_job() -> None:
"""Forward all existing messages in the concerned chats."""
async with TelegramClient(SESSION, API_ID, API_HASH) as client:
config.from_to = await config.load_from_to(client, config.CONFIG.forwards)
client: TelegramClient
for from_to, forward in zip(config.from_to.items(), config.CONFIG.forwards):
src, dest = from_to
last_id = 0
forward: config.Forward
logging.info(f"Forwarding messages from {src} to {dest}")
async for message in client.iter_messages(
src, reverse=True, offset_id=forward.offset
):
message: Message
event = st.DummyEvent(message.chat_id, message.id)
event_uid = st.EventUid(event)
if forward.end and last_id >= forward.end:
logging.info(f"reached end id {forward.end}")
break
if isinstance(message, MessageService):
continue
try:
tm = await apply_plugins(message)
if not tm:
continue
st.stored[event_uid] = {}
if message.is_reply:
r_event = st.DummyEvent(
message.chat_id, message.reply_to_msg_id
)
r_event_uid = st.EventUid(r_event)
for d in dest:
if message.is_reply and r_event_uid in st.stored:
tm.reply_to = st.stored.get(r_event_uid).get(d)
fwded_msg = await send_message(d, tm)
st.stored[event_uid].update({d: fwded_msg.id})
tm.clear()
last_id = message.id
logging.info(f"forwarding message with id = {last_id}")
forward.offset = last_id
write_config(CONFIG)
time.sleep(CONFIG.past.delay)
logging.info(f"slept for {CONFIG.past.delay} seconds")
except FloodWaitError as fwe:
logging.info(f"Sleeping for {fwe}")
await asyncio.sleep(delay=fwe.seconds)
except Exception as err:
logging.exception(err)
| [
"tgcf.config.from_to.items",
"tgcf.plugins.apply_plugins",
"tgcf.config.write_config",
"tgcf.storage.stored.get",
"tgcf.storage.DummyEvent",
"logging.info",
"time.sleep",
"logging.exception",
"tgcf.utils.send_message",
"asyncio.sleep",
"tgcf.storage.EventUid",
"tgcf.config.load_from_to",
"te... | [((693, 734), 'telethon.TelegramClient', 'TelegramClient', (['SESSION', 'API_ID', 'API_HASH'], {}), '(SESSION, API_ID, API_HASH)\n', (707, 734), False, 'from telethon import TelegramClient\n'), ((777, 828), 'tgcf.config.load_from_to', 'config.load_from_to', (['client', 'config.CONFIG.forwards'], {}), '(client, config.CONFIG.forwards)\n', (796, 828), False, 'from tgcf import config\n'), ((896, 918), 'tgcf.config.from_to.items', 'config.from_to.items', ([], {}), '()\n', (916, 918), False, 'from tgcf import config\n'), ((1049, 1106), 'logging.info', 'logging.info', (['f"""Forwarding messages from {src} to {dest}"""'], {}), "(f'Forwarding messages from {src} to {dest}')\n", (1061, 1106), False, 'import logging\n'), ((1294, 1336), 'tgcf.storage.DummyEvent', 'st.DummyEvent', (['message.chat_id', 'message.id'], {}), '(message.chat_id, message.id)\n', (1307, 1336), True, 'from tgcf import storage as st\n'), ((1365, 1383), 'tgcf.storage.EventUid', 'st.EventUid', (['event'], {}), '(event)\n', (1376, 1383), True, 'from tgcf import storage as st\n'), ((1464, 1509), 'logging.info', 'logging.info', (['f"""reached end id {forward.end}"""'], {}), "(f'reached end id {forward.end}')\n", (1476, 1509), False, 'import logging\n'), ((2461, 2516), 'logging.info', 'logging.info', (['f"""forwarding message with id = {last_id}"""'], {}), "(f'forwarding message with id = {last_id}')\n", (2473, 2516), False, 'import logging\n'), ((2582, 2602), 'tgcf.config.write_config', 'write_config', (['CONFIG'], {}), '(CONFIG)\n', (2594, 2602), False, 'from tgcf.config import API_HASH, API_ID, CONFIG, SESSION, write_config\n'), ((2623, 2652), 'time.sleep', 'time.sleep', (['CONFIG.past.delay'], {}), '(CONFIG.past.delay)\n', (2633, 2652), False, 'import time\n'), ((2673, 2727), 'logging.info', 'logging.info', (['f"""slept for {CONFIG.past.delay} seconds"""'], {}), "(f'slept for {CONFIG.past.delay} seconds')\n", (2685, 2727), False, 'import logging\n'), ((1673, 1695), 'tgcf.plugins.apply_plugins', 'apply_plugins', (['message'], {}), '(message)\n', (1686, 1695), False, 'from tgcf.plugins import apply_plugins\n'), ((1882, 1937), 'tgcf.storage.DummyEvent', 'st.DummyEvent', (['message.chat_id', 'message.reply_to_msg_id'], {}), '(message.chat_id, message.reply_to_msg_id)\n', (1895, 1937), True, 'from tgcf import storage as st\n'), ((2030, 2050), 'tgcf.storage.EventUid', 'st.EventUid', (['r_event'], {}), '(r_event)\n', (2041, 2050), True, 'from tgcf import storage as st\n'), ((2795, 2830), 'logging.info', 'logging.info', (['f"""Sleeping for {fwe}"""'], {}), "(f'Sleeping for {fwe}')\n", (2807, 2830), False, 'import logging\n'), ((2951, 2973), 'logging.exception', 'logging.exception', (['err'], {}), '(err)\n', (2968, 2973), False, 'import logging\n'), ((2278, 2297), 'tgcf.utils.send_message', 'send_message', (['d', 'tm'], {}), '(d, tm)\n', (2290, 2297), False, 'from tgcf.utils import send_message\n'), ((2857, 2889), 'asyncio.sleep', 'asyncio.sleep', ([], {'delay': 'fwe.seconds'}), '(delay=fwe.seconds)\n', (2870, 2889), False, 'import asyncio\n'), ((2202, 2228), 'tgcf.storage.stored.get', 'st.stored.get', (['r_event_uid'], {}), '(r_event_uid)\n', (2215, 2228), True, 'from tgcf import storage as st\n')] |
# Copyright 2018 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import pickle
import numpy as np
from IPython import embed
from matplotlib import pylab as plt
import shared
import mmd_experiment
def process_mmd_experiment(width_class):
results_file_name = mmd_experiment.results_file_stub + "_" + width_class + ".pickle"
results = pickle.load( open(results_file_name,'rb' ) )
callibration_mmds = np.loadtxt('results/callibration_mmds.csv')
mean_callibration = np.mean(callibration_mmds)
mmd_squareds = results['mmd_squareds']
hidden_layer_numbers = results['hidden_layer_numbers']
hidden_unit_numbers = results['hidden_unit_numbers']
num_repeats = mmd_squareds.shape[2]
mean_mmds = np.mean( mmd_squareds, axis = 2 )
std_mmds = np.std( mmd_squareds, axis = 2 ) / np.sqrt(num_repeats)
plt.figure()
for hidden_layer_number, index in zip(hidden_layer_numbers,range(len(hidden_layer_numbers))):
if hidden_layer_number==1:
layer_string = ' hidden layer'
else:
layer_string = ' hidden layers'
line_name = str(hidden_layer_number) + layer_string
plt.errorbar( hidden_unit_numbers, mean_mmds[:,index], yerr = 2.*std_mmds[:,index], label = line_name)
plt.xlabel('Number of hidden units per layer')
plt.xlim([0,60])
plt.ylabel('MMD SQUARED(GP, NN)')
plt.ylim([0.,0.02])
plt.axhline(y=mean_callibration, color='r', linestyle='--')
plt.legend()
output_file_name = "../figures/mmds_" + width_class + ".pdf"
plt.savefig(output_file_name)
embed()
plt.show()
if __name__ == '__main__':
if len(sys.argv)!=2 or sys.argv[1] not in shared.valid_width_classes:
print("Usage: ", sys.argv[0], " <width_class>")
sys.exit(-1)
process_mmd_experiment(sys.argv[1])
| [
"numpy.mean",
"matplotlib.pylab.xlim",
"matplotlib.pylab.savefig",
"numpy.sqrt",
"matplotlib.pylab.errorbar",
"sys.exit",
"matplotlib.pylab.figure",
"matplotlib.pylab.axhline",
"matplotlib.pylab.ylim",
"matplotlib.pylab.legend",
"IPython.embed",
"matplotlib.pylab.xlabel",
"matplotlib.pylab.s... | [((930, 973), 'numpy.loadtxt', 'np.loadtxt', (['"""results/callibration_mmds.csv"""'], {}), "('results/callibration_mmds.csv')\n", (940, 973), True, 'import numpy as np\n'), ((998, 1024), 'numpy.mean', 'np.mean', (['callibration_mmds'], {}), '(callibration_mmds)\n', (1005, 1024), True, 'import numpy as np\n'), ((1250, 1279), 'numpy.mean', 'np.mean', (['mmd_squareds'], {'axis': '(2)'}), '(mmd_squareds, axis=2)\n', (1257, 1279), True, 'import numpy as np\n'), ((1364, 1376), 'matplotlib.pylab.figure', 'plt.figure', ([], {}), '()\n', (1374, 1376), True, 'from matplotlib import pylab as plt\n'), ((1791, 1837), 'matplotlib.pylab.xlabel', 'plt.xlabel', (['"""Number of hidden units per layer"""'], {}), "('Number of hidden units per layer')\n", (1801, 1837), True, 'from matplotlib import pylab as plt\n'), ((1842, 1859), 'matplotlib.pylab.xlim', 'plt.xlim', (['[0, 60]'], {}), '([0, 60])\n', (1850, 1859), True, 'from matplotlib import pylab as plt\n'), ((1863, 1896), 'matplotlib.pylab.ylabel', 'plt.ylabel', (['"""MMD SQUARED(GP, NN)"""'], {}), "('MMD SQUARED(GP, NN)')\n", (1873, 1896), True, 'from matplotlib import pylab as plt\n'), ((1901, 1922), 'matplotlib.pylab.ylim', 'plt.ylim', (['[0.0, 0.02]'], {}), '([0.0, 0.02])\n', (1909, 1922), True, 'from matplotlib import pylab as plt\n'), ((1925, 1984), 'matplotlib.pylab.axhline', 'plt.axhline', ([], {'y': 'mean_callibration', 'color': '"""r"""', 'linestyle': '"""--"""'}), "(y=mean_callibration, color='r', linestyle='--')\n", (1936, 1984), True, 'from matplotlib import pylab as plt\n'), ((1989, 2001), 'matplotlib.pylab.legend', 'plt.legend', ([], {}), '()\n', (1999, 2001), True, 'from matplotlib import pylab as plt\n'), ((2071, 2100), 'matplotlib.pylab.savefig', 'plt.savefig', (['output_file_name'], {}), '(output_file_name)\n', (2082, 2100), True, 'from matplotlib import pylab as plt\n'), ((2105, 2112), 'IPython.embed', 'embed', ([], {}), '()\n', (2110, 2112), False, 'from IPython import embed\n'), ((2117, 2127), 'matplotlib.pylab.show', 'plt.show', ([], {}), '()\n', (2125, 2127), True, 'from matplotlib import pylab as plt\n'), ((1299, 1327), 'numpy.std', 'np.std', (['mmd_squareds'], {'axis': '(2)'}), '(mmd_squareds, axis=2)\n', (1305, 1327), True, 'import numpy as np\n'), ((1334, 1354), 'numpy.sqrt', 'np.sqrt', (['num_repeats'], {}), '(num_repeats)\n', (1341, 1354), True, 'import numpy as np\n'), ((1684, 1791), 'matplotlib.pylab.errorbar', 'plt.errorbar', (['hidden_unit_numbers', 'mean_mmds[:, index]'], {'yerr': '(2.0 * std_mmds[:, index])', 'label': 'line_name'}), '(hidden_unit_numbers, mean_mmds[:, index], yerr=2.0 * std_mmds[\n :, index], label=line_name)\n', (1696, 1791), True, 'from matplotlib import pylab as plt\n'), ((2294, 2306), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (2302, 2306), False, 'import sys\n')] |
from config import config
import os
print('env DBURL:', os.environ.get('DB_URL'))
print('config:', config.db_url)
| [
"os.environ.get"
] | [((57, 81), 'os.environ.get', 'os.environ.get', (['"""DB_URL"""'], {}), "('DB_URL')\n", (71, 81), False, 'import os\n')] |