index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
3,179
|
smartgang/KViewer
|
refs/heads/master
|
/complex.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'complex.ui'
#
# Created: Sat Jul 14 20:48:01 2018
# by: pyside-uic 0.2.15 running on PySide 1.2.4
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(803, 600)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
self.tabWidget.setGeometry(QtCore.QRect(0, 0, 801, 551))
self.tabWidget.setObjectName("tabWidget")
self.tab = QtGui.QWidget()
self.tab.setObjectName("tab")
self.tabWidget_2 = QtGui.QTabWidget(self.tab)
self.tabWidget_2.setGeometry(QtCore.QRect(0, 0, 801, 531))
self.tabWidget_2.setObjectName("tabWidget_2")
self.tab_3 = QtGui.QWidget()
self.tab_3.setObjectName("tab_3")
self.treeWidget = QtGui.QTreeWidget(self.tab_3)
self.treeWidget.setGeometry(QtCore.QRect(0, 0, 791, 501))
self.treeWidget.setObjectName("treeWidget")
item_0 = QtGui.QTreeWidgetItem(self.treeWidget)
item_1 = QtGui.QTreeWidgetItem(item_0)
self.tabWidget_2.addTab(self.tab_3, "")
self.tab_4 = QtGui.QWidget()
self.tab_4.setObjectName("tab_4")
self.verticalLayoutWidget = QtGui.QWidget(self.tab_4)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(0, 0, 791, 501))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.dateEdit = QtGui.QDateEdit(self.verticalLayoutWidget)
self.dateEdit.setObjectName("dateEdit")
self.verticalLayout.addWidget(self.dateEdit)
self.calendarWidget = QtGui.QCalendarWidget(self.verticalLayoutWidget)
self.calendarWidget.setObjectName("calendarWidget")
self.verticalLayout.addWidget(self.calendarWidget)
self.tabWidget_2.addTab(self.tab_4, "")
self.tabWidget.addTab(self.tab, "")
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName("tab_2")
self.groupBox = QtGui.QGroupBox(self.tab_2)
self.groupBox.setGeometry(QtCore.QRect(20, 10, 73, 92))
self.groupBox.setObjectName("groupBox")
self.verticalLayout_2 = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.radioButton = QtGui.QRadioButton(self.groupBox)
self.radioButton.setObjectName("radioButton")
self.verticalLayout_2.addWidget(self.radioButton)
self.radioButton_2 = QtGui.QRadioButton(self.groupBox)
self.radioButton_2.setObjectName("radioButton_2")
self.verticalLayout_2.addWidget(self.radioButton_2)
self.radioButton_3 = QtGui.QRadioButton(self.groupBox)
self.radioButton_3.setObjectName("radioButton_3")
self.verticalLayout_2.addWidget(self.radioButton_3)
self.groupBox_2 = QtGui.QGroupBox(self.tab_2)
self.groupBox_2.setGeometry(QtCore.QRect(440, 30, 321, 151))
self.groupBox_2.setObjectName("groupBox_2")
self.layoutWidget = QtGui.QWidget(self.groupBox_2)
self.layoutWidget.setGeometry(QtCore.QRect(60, 30, 172, 102))
self.layoutWidget.setObjectName("layoutWidget")
self.horizontalLayout = QtGui.QHBoxLayout(self.layoutWidget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.dial = QtGui.QDial(self.layoutWidget)
self.dial.setObjectName("dial")
self.horizontalLayout.addWidget(self.dial)
self.lcdNumber = QtGui.QLCDNumber(self.layoutWidget)
self.lcdNumber.setObjectName("lcdNumber")
self.horizontalLayout.addWidget(self.lcdNumber)
self.fontComboBox = QtGui.QFontComboBox(self.tab_2)
self.fontComboBox.setGeometry(QtCore.QRect(60, 230, 381, 22))
self.fontComboBox.setObjectName("fontComboBox")
self.label = QtGui.QLabel(self.tab_2)
self.label.setGeometry(QtCore.QRect(60, 290, 381, 71))
self.label.setScaledContents(False)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setWordWrap(False)
self.label.setObjectName("label")
self.progressBar = QtGui.QProgressBar(self.tab_2)
self.progressBar.setGeometry(QtCore.QRect(60, 480, 661, 23))
self.progressBar.setProperty("value", 24)
self.progressBar.setObjectName("progressBar")
self.tabWidget.addTab(self.tab_2, "")
self.tab_5 = QtGui.QWidget()
self.tab_5.setObjectName("tab_5")
self.verticalLayoutWidget_2 = QtGui.QWidget(self.tab_5)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(-1, -1, 791, 531))
self.verticalLayoutWidget_2.setObjectName("verticalLayoutWidget_2")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.tabWidget.addTab(self.tab_5, "")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 803, 23))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(2)
self.tabWidget_2.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.treeWidget.headerItem().setText(0, QtGui.QApplication.translate("MainWindow", "第一列", None, QtGui.QApplication.UnicodeUTF8))
self.treeWidget.headerItem().setText(1, QtGui.QApplication.translate("MainWindow", "New Column", None, QtGui.QApplication.UnicodeUTF8))
__sortingEnabled = self.treeWidget.isSortingEnabled()
self.treeWidget.setSortingEnabled(False)
self.treeWidget.topLevelItem(0).setText(0, QtGui.QApplication.translate("MainWindow", "子条目一", None, QtGui.QApplication.UnicodeUTF8))
self.treeWidget.topLevelItem(0).child(0).setText(0, QtGui.QApplication.translate("MainWindow", "子条目一一", None, QtGui.QApplication.UnicodeUTF8))
self.treeWidget.setSortingEnabled(__sortingEnabled)
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_3), QtGui.QApplication.translate("MainWindow", "树", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_4), QtGui.QApplication.translate("MainWindow", "日历", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), QtGui.QApplication.translate("MainWindow", "Tab 1", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("MainWindow", "功能选择", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton.setText(QtGui.QApplication.translate("MainWindow", "默认", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_2.setText(QtGui.QApplication.translate("MainWindow", "重置", None, QtGui.QApplication.UnicodeUTF8))
self.radioButton_3.setText(QtGui.QApplication.translate("MainWindow", "选项3", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setTitle(QtGui.QApplication.translate("MainWindow", "移动刻度盘", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("MainWindow", "TextLabel", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), QtGui.QApplication.translate("MainWindow", "Tab 2", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_5), QtGui.QApplication.translate("MainWindow", "绘图", None, QtGui.QApplication.UnicodeUTF8))
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,180
|
smartgang/KViewer
|
refs/heads/master
|
/Indexer/ATR.py
|
# -*- coding: utf-8 -*-
from IndexerBase import IndexerBase
import numpy as np
class ATR(IndexerBase):
indexer_name = 'ATR'
indexer_name_list = ['ATR', 'TR']
default_para_dic = {
'N': 26,
}
def __init__(self, raw_data, plt):
super(ATR, self).__init__(raw_data, plt)
self.indexer_name_list = ['ATR', 'TR'] # MA的指标名和参数名都跟参数有关,所以要随参数进行设置
self.indexer_color_dic = {
'ATR': 'blue',
'TR': 'magenta'
}
def calculate_indexer_value(self):
n = self.para_dic['N']
closeshift1 = self.raw_data.close.shift(1).fillna(0)
self.raw_data['c'] = self.raw_data.high - self.raw_data.low
self.raw_data['d'] = np.abs(self.raw_data.high - closeshift1)
self.raw_data['b'] = np.abs(self.raw_data.low - closeshift1)
self.raw_data['TR'] = self.raw_data[['c', 'd', 'b']].max(axis=1)
self.raw_data.loc[self.raw_data['open'] < self.raw_data['close'], 'TR'] = 0 - self.raw_data['TR']
self.raw_data['ATR'] = np.abs(self.raw_data['TR'].rolling(window=n).mean())
self.indexer_value_dic['TR'] = self.raw_data['TR'].tolist()
self.indexer_value_dic['ATR'] = self.raw_data['ATR'].tolist()
def draw_indexer(self):
i = 0
for indexer_name, values in self.indexer_value_dic.items():
c = self.indexer_color_dic[indexer_name][0]
self.plt_dic[indexer_name] = self.plt.plot(name=indexer_name, pen=c)
self.plt_dic[indexer_name].setData(values)
i += 1
def re_draw_indexer(self):
for pname, values in self.indexer_value_dic.items():
self.plt_dic[pname].setData(values)
def get_polar_value(self,start_pos, end_pos):
max_v = max(max(self.indexer_value_dic['ATR'][start_pos:end_pos]),
max(self.indexer_value_dic['TR'][start_pos:end_pos]))
min_v = min(min(self.indexer_value_dic['ATR'][start_pos:end_pos]),
min(self.indexer_value_dic['TR'][start_pos:end_pos]))
return max_v, min_v
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,181
|
smartgang/KViewer
|
refs/heads/master
|
/DataInterface/DataInterface.py
|
# -*- coding: utf-8 -*-
import pandas as pd
import time
import os
# 读取中文路径
Collection_Path = unicode('D:\\002 MakeLive\DataCollection\\', 'utf-8')
PUBLIC_DATA_PATH = unicode('D:\\002 MakeLive\DataCollection\public data\\', 'utf-8')
RAW_DATA_PATH = unicode('D:\\002 MakeLive\DataCollection\\raw data\\', 'utf-8')
TICKS_DATA_PATH = unicode('D:\\002 MakeLive\DataCollection\\ticks data\\', 'utf-8')
BAR_DATA_PATH = unicode('D:\\002 MakeLive\DataCollection\\bar data\\', 'utf-8')
VOLUME_DATA_PATH = unicode('D:\\002 MakeLive\DataCollection\\volume data\\', 'utf-8')
TICKS_DATA_START_DATE = '2017-8-17' # 包含了8-17日
LAST_CONCAT_DATA = '2017-10-17' # 记录上次汇总数据的时间,不包含当天(要再加上一天,要不然后面truncate会不对)
DATA_TYPE_PUBLIC=1
DATA_TYPE_RAW=2
DATA_TYPE_TICKS=3
def getTradedates(exchangeid='SHFE', startdate='2016-01-01', enddate='2017-12-30'):
# 获取交易所的交易日
# 原文件保存在public data文件夹中
startutc = float(time.mktime(time.strptime(startdate + ' 00:00:00', "%Y-%m-%d %H:%M:%S")))
endutc = float(time.mktime(time.strptime(enddate + ' 23:59:59', "%Y-%m-%d %H:%M:%S")))
tradedatedf = pd.read_csv(PUBLIC_DATA_PATH + 'TradeDates.csv', index_col='exchange_id')
df = tradedatedf.loc[(tradedatedf['utc_time'] >= startutc) & (tradedatedf['utc_time'] < endutc)]
df = df.loc[exchangeid, :]
df.reset_index(inplace=True)
df.drop('Unnamed: 0', inplace=True, axis=1)
return df
def generatDailyClose(dailyK):
'''获取交易区间时间范围内的交易日和收盘价信息,生成dailyDf'''
dailyK['date'] = dailyK['strtime'].str.slice(0, 10)
closegrouped = dailyK['close'].groupby(dailyK['date'])
utcgrouped = dailyK['utc_time'].groupby(dailyK['date'])
dailyClose = pd.DataFrame(closegrouped.last())
dailyClose['preclose'] = dailyClose['close'].shift(1).fillna(0)
dailyClose['utc_time'] = utcgrouped.last()
return dailyClose
# ---------------------------------------------------------------------------------------------
def getBarData(symbol='SHFE.RB', K_MIN=60, starttime='2017-05-01 00:00:00', endtime='2018-01-01 00:00:00'):
# 读取bar数据
filename = BAR_DATA_PATH + symbol + '\\' + symbol + ' ' + str(K_MIN) + '.csv'
df = pd.read_csv(filename)
startutc = float(time.mktime(time.strptime(starttime, "%Y-%m-%d %H:%M:%S")))
endutc = float(time.mktime(time.strptime(endtime, "%Y-%m-%d %H:%M:%S")))
'''
df.index=pd.to_datetime(df['utc_time'],unit='s')
df = df.tz_localize(tz='PRC')
df=df.truncate(before=startdate)
'''
df = df.loc[(df['utc_time'] > startutc) & (df['utc_time'] < endutc)]
df['Unnamed: 0'] = range(0, df.shape[0])
# df.drop('Unnamed: 0.1', inplace=True,axis=1)
df.reset_index(drop=True, inplace=True)
# print 'get data success '+symbol+str(K_MIN)+startdate
return df
def getBarBySymbol(domain_symbol, symbol, bar_type, starttime=None, endtime=None):
# 取单个主力合约的数据
print (1)
filename = BAR_DATA_PATH + domain_symbol + '\\' + symbol + ' ' + str(bar_type) + '.csv'
print (filename)
df = pd.read_csv(filename)
print (2)
print (starttime, endtime)
if starttime:
startutc = float(time.mktime(time.strptime(starttime, "%Y-%m-%d %H:%M:%S")))
df = df.loc[df['utc_time'] >= startutc]
if endtime:
endutc = float(time.mktime(time.strptime(endtime, "%Y-%m-%d %H:%M:%S")))
df = df.loc[df['utc_time'] <= endutc]
df.reset_index(drop=True, inplace=True)
return df
def getBarBySymbolList(domain_symbol, symbollist, bar_type, startdate=None, enddate=None, cols=None):
# 取全部主力合约的数据,以dic的形式返回
bardic = {}
startutc = None
endutc = None
if startdate:
# 过滤掉主力结束时间在开始时间之前的,只取主力结束时间在开始时间之后
startutc = float(time.mktime(time.strptime(startdate + " 00:00:00", "%Y-%m-%d %H:%M:%S")))
if enddate:
# 过滤掉主力开始时间在结束时间之后的,只取主力开始时间在结束时间之前
endutc = float(time.mktime(time.strptime(enddate + " 23:59:59", "%Y-%m-%d %H:%M:%S")))
for symbol in symbollist:
filename = BAR_DATA_PATH + domain_symbol + '\\' + symbol + ' ' + str(bar_type) + '.csv'
if cols:
bardf = pd.read_csv(filename)[cols]
else:
bardf = pd.read_csv(filename)
if startutc:
bardf = bardf.loc[bardf['utc_time'] >= startutc]
if endutc:
bardf = bardf.loc[bardf['utc_time'] <= endutc]
bardic[symbol] = bardf.reset_index(drop=True)
return bardic
def getBarDicAfterDomain(symbolinfo, bar_type,cols=None):
# 取全部主力合约的数据,以dic的形式返回
domain_symbol = symbolinfo.domain_symbol
symbollist = symbolinfo.getSymbolList()
bardic = {}
startutc , endutc = symbolinfo.getUtcRange()
for symbol in symbollist:
domain_utc_start, domain_utc_end = symbolinfo.getSymbolDomainUtc(symbol)
filename = BAR_DATA_PATH + domain_symbol + '\\' + symbol + ' ' + str(bar_type) + '.csv'
if cols:
bardf = pd.read_csv(filename)[cols]
else:
bardf = pd.read_csv(filename)
bardf = bardf.loc[bardf['utc_time']>=domain_utc_start] # 只取主力时间之后的数据,以减少总的数据量
if startutc:
bardf = bardf.loc[bardf['utc_time'] >= startutc]
if endutc:
bardf = bardf.loc[bardf['utc_time'] <= endutc]
bardic[symbol] = bardf
return bardic
def getDomainbarByDomainSymbol(symbollist, bardic, symbolDomaindic):
# 根据symbolDomaindic中每个合约的时间范围,从bardic中取数组合成主连数据
# 默认双边的symbol是对得上的,不做检查
domain_bar = pd.DataFrame()
barlist = []
#timestart = time.time()
for symbol in symbollist:
utcs = symbolDomaindic[symbol]
bars = bardic[symbol]
symbol_domain_start = utcs[0]
symbol_domain_end = utcs[1]
bar = bars.loc[(bars['utc_time'] >= symbol_domain_start) & (bars['utc_time'] <= symbol_domain_end)]
#domain_bar = pd.concat([domain_bar, bar])
#domain_bar = domain_bar.append(bar)
barlist.append(bar)
#timebar = time.time()
#print ("timebar %.3f" % (timebar - timestart))
domain_bar = pd.concat(barlist)
#timeconcat = time.time()
#print ("timeconcat %.3f" % (timeconcat - timebar))
#domain_bar.sort_values('utc_time',inplace=True) # 本来有sort会妥当一点,不过sort比较耗时,就去掉了
#timesort = time.time()
#print ("timesort %.3f" % (timesort - timeconcat))
domain_bar.reset_index(drop=True, inplace=True)
#timeindex = time.time()
#print ("timeindex %.3f" % (timeindex - timeconcat))
return domain_bar
def getVolumeData(symbol='SHFE.RB', K_MIN=60, starttime='2017-05-01 00:00:00', endtime='2018-01-01 00:00:00'):
# 读取bar数据
filename = VOLUME_DATA_PATH + symbol + '\\' + symbol + ' ' + str(K_MIN) + '_volume.csv'
df = pd.read_csv(filename)
startutc = float(time.mktime(time.strptime(starttime, "%Y-%m-%d %H:%M:%S")))
endutc = float(time.mktime(time.strptime(endtime, "%Y-%m-%d %H:%M:%S")))
df = df.loc[(df['utc_time'] > startutc) & (df['utc_time'] < endutc)]
df['Unnamed: 0'] = range(0, df.shape[0])
# df.drop('Unnamed: 0.1', inplace=True,axis=1)
df.reset_index(drop=True, inplace=True)
# print 'get data success '+symbol+str(K_MIN)+startdate
return df
def getTickDateBySymbolDate(domain_symbol='SHFE.RB', symbol='RB1810', date='2018-08-09'):
file_name = BAR_DATA_PATH + "%s\\TICK_%s\\Tick_Data_%s_%s.csv" % (domain_symbol, symbol, symbol, date)
tick_data = pd.read_csv(file_name)
return tick_data
'''
def getTickData(symbol='SHFE.RB',K_MIN=60,startdate='2017-05-01',enddate='2018-01-01'):
filename=TICKS_DATA_PATH+symbol+'\\'+symbol+'ticks '+str(K_MIN)+'.csv'
df=pd.read_csv(filename)
starttime=startdate+" 00:00:00"
endtime= enddate+" 00:00:00"
startutc = float(time.mktime(time.strptime(starttime, "%Y-%m-%d %H:%M:%S")))
endutc = float(time.mktime(time.strptime(endtime,"%Y-%m-%d %H:%M:%S")))
df=df.loc[(df['utc_time']>startutc) & (df['utc_time']<endutc)]
df['Unnamed: 0'] = range(0, df.shape[0])
df.drop('Unnamed: 0.1.1',drop=True,inplace=True)
df.reset_index(drop=True,inplace=True)
#print 'get data success '+symbol+str(K_MIN)+startdate
return df
'''
def getTickByDate(symbol='SHFE.RB', tradedate='2017-08-07'):
filename = TICKS_DATA_PATH + symbol + '\\' + symbol + tradedate + 'ticks.csv'
df = pd.read_csv(filename)
return df
def getContractSwaplist(symbol):
datapath = Collection_Path + 'vitualContract\\'
df = pd.read_csv(datapath + symbol + 'ContractSwap.csv')
return df
pass
# ----------------------------------------------------------
def getCurrentPath():
'''
返回当前文件所在路径
:return:
'''
return os.path.abspath('.')
def getUpperPath(uppernume=1):
'''
返回当前文件所在的上一级路径
:return:
'''
p = '/'.join(['..'] * uppernume)
return os.path.abspath(p)
# -------------------------------------------------------------
def getPriceTick(symbol):
'''
查询品种的最小价格变动
:param symbol:
:return:
'''
contract = pd.read_excel(PUBLIC_DATA_PATH + 'Contract.xlsx', index_col='Contract')
return contract.ix[symbol, 'price_tick']
def getMultiplier(symbol):
'''
查询品种的合约乘数
:param symbol:
:return:
'''
contract = pd.read_excel(PUBLIC_DATA_PATH + 'Contract.xlsx', index_col='Contract')
return contract.ix[symbol, 'multiplier']
def getMarginRatio(symbol):
'''
查询品种的保证金率
:param symbol:
:return:
'''
contract = pd.read_excel(PUBLIC_DATA_PATH + 'Contract.xlsx', index_col='Contract')
return contract.ix[symbol, 'margin_ratio']
def getSlip(symbol):
'''
查询品种配置的滑点
:param symbol:
:return:
'''
contract = pd.read_excel(PUBLIC_DATA_PATH + 'Contract.xlsx', index_col='Contract')
return contract.ix[symbol, 'slip']
class SymbolInfo:
POUNDGE_TYPE_HAND = u'hand'
POUNDGE_TYPE_RATE = u'rate'
'''合约信息类'''
def __init__(self, domain_symbol, startdate=None, enddate=None):
self.domain_symbol = domain_symbol
contract = pd.read_excel(PUBLIC_DATA_PATH + 'domainMap.xlsx', index_col='symbol')
contractMapDf = pd.read_csv(PUBLIC_DATA_PATH + 'contractMap.csv', index_col='symbol')
self.start_utc = None
self.end_utc = None
self.contractMap = contractMapDf.loc[contractMapDf['domain_symbol'] == domain_symbol] # 取该主力合约编号对应的合约列表
if startdate:
# 过滤掉主力结束时间在开始时间之前的,只取主力结束时间在开始时间之后
self.start_utc = float(time.mktime(time.strptime(startdate+ " 00:00:00", "%Y-%m-%d %H:%M:%S")))
self.contractMap = self.contractMap.loc[self.contractMap['domain_end_utc'] > self.start_utc]
if enddate:
# 过滤掉主力开始时间在结束时间之后的,只取主力开始时间在结束时间之前
self.end_utc = float(time.mktime(time.strptime(enddate + " 23:59:59", "%Y-%m-%d %H:%M:%S")))
self.contractMap = self.contractMap.loc[self.contractMap['domain_start_utc'] < self.end_utc]
self.contractMap = self.contractMap.sort_values('domain_start_utc') # 根据主力时间排序
self.active = contract.ix[domain_symbol, 'active'] # 激活标志
self.priceTick = contract.ix[domain_symbol, 'price_tick']
self.multiplier = contract.ix[domain_symbol, 'multiplier']
self.marginRatio = contract.ix[domain_symbol, 'margin_ratio']
self.slip = contract.ix[domain_symbol, 'slip']
self.poundageType = contract.ix[domain_symbol, 'poundage_type']
self.poundageFee = contract.ix[domain_symbol, 'poundage_fee']
self.poundageRate = contract.ix[domain_symbol, 'poundage_rate']
def getPriceTick(self):
return self.priceTick
def getMultiplier(self):
return self.multiplier
def getMarginRatio(self):
return self.marginRatio
def getSlip(self):
return self.slip
def getPoundage(self):
return self.poundageType, self.poundageFee, self.poundageRate
def getSymbolList(self):
return self.contractMap.index.tolist()
def getSymbolDomainUtc(self, symbol):
return self.contractMap.ix[symbol, 'domain_start_utc'], self.contractMap.ix[symbol, 'domain_end_utc']
def getSymbolDomainTime(self, symbol):
return self.contractMap.ix[symbol, 'domain_start_date'], self.contractMap.ix[symbol, 'domain_end_date']
def getSymbolLifeDate(self, symbol):
# 获取合约的生命周期时间
return self.contractMap.ix[symbol, 'listed_date'], self.contractMap.ix[symbol, 'maturity_date']
def getUtcRange(self):
return self.start_utc, self.end_utc
def getSymbolDomainDic(self):
domainDic = {}
symbolList = self.getSymbolList()
for symbol in symbolList:
s, e = self.getSymbolDomainUtc(symbol)
domainDic[symbol] = [s, e]
return domainDic
def amendSymbolDomainDicByOpr(self, oprdf, closeutc_col='closeutc'):
# 基于传入的oprdf修正symbolDomainDic,因为合约切换时,会有持仓未平仓导致上一合约实际生效时间超过其主力结束时间的现象,故要修改正symbolDomainDic
# 注:可能会有些合适期间没有opr的情况,所以symbolList会比opr中的symbollist少
oprgrouped = oprdf.groupby('symbol')
symbol_last_utc_list = oprgrouped[closeutc_col].last()
opr_symbol_list = symbol_last_utc_list.index.tolist()
symbol_last_utc = None
domainDic = {}
symbolList = self.getSymbolList()
for symbol in symbolList:
s, e = self.getSymbolDomainUtc(symbol)
if symbol_last_utc:
s = symbol_last_utc + 1 # 如果上一个合约的最后一次操作超过其主力结束时间,则下一合约从其最后操作结束时间的下一秒开始算,规避取出两个合约同一个时间的数据的问题
if symbol in opr_symbol_list:
symbol_last_utc = symbol_last_utc_list[symbol]
if symbol_last_utc and symbol_last_utc > e:
e = symbol_last_utc
else:
symbol_last_utc = None
domainDic[symbol] = [s, e]
return domainDic
def isActive(self):
return self.active
class TickDataSupplier:
def __init__(self, symbol, startdate, enddate):
self.startdate = startdate
self.enddate = enddate
self.startdateutc = float(time.mktime(time.strptime(startdate + ' 00:00:00', "%Y-%m-%d %H:%M:%S")))
self.enddateutc = float(time.mktime(time.strptime(enddate + ' 23:59:59', "%Y-%m-%d %H:%M:%S")))
self.symbol = symbol
self.exchange, self.secid = symbol.split('.', 1)
self.datelist = getTradedates(self.exchange, self.startdate, self.enddate)['strtime']
self.tickdatadf = pd.DataFrame()
for d in self.datelist:
print 'Collecting tick data:', d
self.tickdatadf = pd.concat([self.tickdatadf, getTickByDate(self.symbol, d)])
def getTickData(self, starttime, endtime):
startutc = float(time.mktime(time.strptime(starttime, "%Y-%m-%d %H:%M:%S")))
endutc = float(time.mktime(time.strptime(endtime, "%Y-%m-%d %H:%M:%S")))
'''
df.index=pd.to_datetime(df['utc_time'],unit='s')
df = df.tz_localize(tz='PRC')
df=df.truncate(before=startdate)
'''
df = self.tickdatadf.loc[(self.tickdatadf['utc_time'] > startutc) & (self.tickdatadf['utc_time'] < endutc)]
df['Unnamed: 0'] = range(0, df.shape[0])
# df.drop('Unnamed: 0.1.1', inplace=True, axis=1)
df.reset_index(drop=True, inplace=True)
return df
def getTickDataByUtc(self, startutc, endutc):
df = self.tickdatadf.loc[(self.tickdatadf['utc_time'] > startutc) & (self.tickdatadf['utc_time'] < endutc)]
df['Unnamed: 0'] = range(0, df.shape[0])
# df.drop('Unnamed: 0.1.1', inplace=True, axis=1)
df.reset_index(drop=True, inplace=True)
return df
def getDateRange(self):
return self.startdate, self.enddate
def getDateUtcRange(self):
return self.startdateutc, self.enddateutc
def getSymbol(self):
return self.symbol
def getDateList(self):
return self.datelist
def symbolInfoTest():
domain_symbol = 'SHFE.RB'
symbolinfo = SymbolInfo(domain_symbol)
symbollist = symbolinfo.getSymbolList()
print symbolinfo.getSymbolDomainDic()
print symbolinfo.isActive()
bardic = getBarBySymbolList(domain_symbol, symbollist, 3600)
for symbol in symbollist:
print bardic[symbol].head(5)
# ========================================================================================
if __name__ == '__main__':
# df=getBarData("SHFE.RB",K_MIN=600,starttime='2011-10-08 00:00:00',endtime='2013-03-20 00:00:00')
# df=getTradedates('SHFE','2017-10-01','2017-12-12')
# ticksupplier = TickDataSupplier('SHFE.RB', '2017-10-01', '2017-12-10')
# df1 = ticksupplier.getTickData('2017-10-01 00:00:00', '2017-12-03 22:10:15')
# print df1.head(10)
# print df1.tail(10)
symbolInfoTest()
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,182
|
smartgang/KViewer
|
refs/heads/master
|
/Indexer/RSI.py
|
# -*- coding: utf-8 -*-
from IndexerBase import IndexerBase
import numpy as np
import talib
class RSI(IndexerBase):
indexer_name = 'RSI'
indexer_name_list = ['RSI']
default_para_dic = {
'N': 5,
}
def __init__(self, raw_data, plt):
super(RSI, self).__init__(raw_data, plt)
self.indexer_name_list = ['RSI'] # MA的指标名和参数名都跟参数有关,所以要随参数进行设置
self.indexer_color_dic = {
'RSI': 'blue'
}
def calculate_indexer_value(self):
n = self.para_dic['N']
close_array = np.array(self.raw_data['close'].values, dtype='float')
self.indexer_value_dic['RSI'] = talib.RSI(close_array, n)
def draw_indexer(self):
i = 0
for indexer_name, values in self.indexer_value_dic.items():
c = self.indexer_color_dic[indexer_name][0]
self.plt_dic[indexer_name] = self.plt.plot(name=indexer_name, pen=c)
self.plt_dic[indexer_name].setData(values)
i += 1
def re_draw_indexer(self):
for pname, values in self.indexer_value_dic.items():
self.plt_dic[pname].setData(values)
def get_polar_value(self,start_pos, end_pos):
max_v = max(self.indexer_value_dic['RSI'][start_pos:end_pos])
min_v = min(self.indexer_value_dic['RSI'][start_pos:end_pos])
return max_v, min_v
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,183
|
smartgang/KViewer
|
refs/heads/master
|
/untitled.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'untitled.ui'
#
# Created: Wed Jul 11 23:51:04 2018
# by: pyside-uic 0.2.15 running on PySide 1.2.4
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(641, 405)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName("verticalLayout")
self.tableWidget = QtGui.QTableWidget(self.centralwidget)
self.tableWidget.setRowCount(4)
self.tableWidget.setColumnCount(6)
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(6)
self.tableWidget.setRowCount(4)
item = QtGui.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
self.verticalLayout.addWidget(self.tableWidget)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton_2 = QtGui.QPushButton(self.centralwidget)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout.addWidget(self.pushButton_2)
self.pushButton = QtGui.QPushButton(self.centralwidget)
self.pushButton.setObjectName("pushButton")
self.horizontalLayout.addWidget(self.pushButton)
self.verticalLayout.addLayout(self.horizontalLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 641, 23))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.verticalHeaderItem(0).setText(QtGui.QApplication.translate("MainWindow", "1st row", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.verticalHeaderItem(1).setText(QtGui.QApplication.translate("MainWindow", "2nd row", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.horizontalHeaderItem(0).setText(QtGui.QApplication.translate("MainWindow", "1st col", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.horizontalHeaderItem(1).setText(QtGui.QApplication.translate("MainWindow", "2nd col", None, QtGui.QApplication.UnicodeUTF8))
self.tableWidget.horizontalHeaderItem(2).setText(QtGui.QApplication.translate("MainWindow", "3rd col", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_2.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton.setText(QtGui.QApplication.translate("MainWindow", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,184
|
smartgang/KViewer
|
refs/heads/master
|
/kviewer_app.py
|
# -*- coding: utf-8 -*-
import kviewer2
from indexer import Indexer_MA
import parameter2
from PyQt5 import QtCore, QtWidgets, QtGui
import sys
import time
import pyqtgraph as pg
import pandas as pd
import numpy as np
#import tushare as ts
import datetime
from matplotlib.pylab import date2num
#import DATA_CONSTANTS as DC
"""
'b': QtGui.QColor(0,0,255,255),
'g': QtGui.QColor(0,255,0,255),
'r': QtGui.QColor(255,0,0,255),
'c': QtGui.QColor(0,255,255,255),
'm': QtGui.QColor(255,0,255,255),
'y': QtGui.QColor(255,255,0,255),
'k': QtGui.QColor(0,0,0,255),
'w': QtGui.QColor(255,255,255,255),
'd': QtGui.QColor(150,150,150,255),
'l': QtGui.QColor(200,200,200,255),
's': QtGui.QColor(100,100,150,255),"""
color_list = ['w', 'y', 'c','r','g']
class MainWindow(object):
def __init__(self):
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
self.ui = kviewer2.Ui_MainWindow()
self.ui.setupUi(MainWindow)
self.ma_para = []
self.ma_data = []
self.ma_plot_dic = {}
# 准备数据
hist_data = pd.read_excel('RB1810_2018-06-19_1m.xlsx')
#hist_data = DC.getBarBySymbol('SHFE.RB', 'RB1805', 3600)
self.t = range(hist_data.shape[0])
self.date_list = hist_data['strtime'].tolist()
self.open = hist_data.open.tolist()
self.high = hist_data.high.tolist()
self.low = hist_data.low.tolist()
self.close = hist_data.close.tolist()
self.prepare_indexer_para()
packdate = zip(self.t,self.open, self.close, self.low, self.high)
self.plt1 = self.chart(self.date_list,packdate)
self.plt2 = self.chart2(self.t, self.close)
self.plt1.addLegend()
"""
i=0
for d in self.ma_para:
pname='ma%d'%d
self.ma_plot_dic[pname]=self.plt1.plot(name=pname,pen=color_list[i])
i+=1
self.prepare_indexer_data()
"""
self.ma_indexer = Indexer_MA(self.plt1, hist_data, [self.ui.lineEdit_ma_n1, self.ui.lineEdit_ma_n2,
self.ui.lineEdit_ma_n3, self.ui.lineEdit_ma_n4,
self.ui.lineEdit_ma_n5])
self.ma_indexer.draw()
#self.label = QtWidgets.QLabel()
# 加入竖线
self.vLine = pg.InfiniteLine(angle=90, movable=False)
self.plt1.addItem(self.vLine, ignoreBounds=True)
# self.vb = self.plt1.viewRect()
# 下面第2个图的范围设置框
self.region = pg.LinearRegionItem()
self.region.setZValue(10)
self.region.sigRegionChanged.connect(self.update_plt1)
self.plt1.sigRangeChanged.connect(self.updateRegion)
self.region.setRegion([0, 100])
self.plt2.addItem(self.region, ignoreBounds=True)
#self.ui.verticalLayout.addWidget(self.label)
self.ui.verticalLayout.addWidget(self.plt1)
self.ui.verticalLayout.addWidget(self.plt2)
proxy = pg.SignalProxy(self.plt1.scene().sigMouseMoved, rateLimit=60, slot=self.mouseMoved)
MainWindow.show()
sys.exit(app.exec_())
def prepare_indexer_para(self):
# 准备指标参数
"""
self.ma_para = {
'N1': 5,
'N2': 10,
'N3': 15,
'N4': 30,
'N5': 50
}
"""
self.ma_para = [5,10,20,30,50]
self.macd_para = {
'short': 5,
'long': 10,
'M': 9
}
self.kdj_para = {
'N': 9,
'M1': 3,
'M2': 3
}
self.dmi_para = {
'N': 14,
'M': 6
}
def prepare_indexer_data(self):
# ma数据
for d in self.ma_para:
data_name = 'ma%d' % d
data = pd.Series(self.close).rolling(d).mean()
self.ma_plot_dic[data_name].setData(data)
def set_ma_para(self):
self.ma_para[0]+=3
data = pd.Series(self.close).rolling(self.ma_para[0]).mean()
self.ma_plot_dic['ma5'].setData(data)
def chart(self,date_list, data_list):
item = CandlestickItem(data_list)
axis = DateAxis(date_strings=date_list, orientation='bottom')
plt = pg.PlotWidget()
plt.axisItems = {'bottom':axis}
plt.addItem(item, )
plt.showGrid(x=True, y=True)
return plt
def chart2(self,x,y):
plt = pg.PlotWidget()
plt.addLegend() # 加上图标
plt.plot(x=x,y=y, pen="w", name='close')
return plt
def update_plt1(self):
self.region.setZValue(10)
minX, maxX = self.region.getRegion()
#Y轴自适应
int_minY = max(0,int(minX))
int_maxY = max(1, int(maxX))
minY = min(self.low[int_minY:int_maxY]) - 5
maxY = max(self.high[int_minY:int_maxY]) +5
self.plt1.setYRange(minY, maxY)
self.plt1.setXRange(minX, maxX, padding=0)
def updateRegion(self,window, viewRange):
rgn = viewRange[0]
self.region.setRegion(rgn)
def mouseMoved(self,event):
pos = event[0] ## using signal proxy turns original arguments into a tuple
if self.plt1.sceneBoundingRect().contains(pos):
a = self.plt1.boundingRect().getRect()
minx, maxx = self.region.getRegion()
knum = maxx-minx
# (pos.x()-35)表示鼠标点距离左边框的位置
# (a[2]-35)/knum表示每一根K线占用的像素点数量
# 上面两者两除即为鼠标位置点的K线序号,+minx就是在整个数据列表中的位置
rx = int((pos.x()-35)/((a[2]-35)/knum)+minx)
index = rx
if index > 0 and index < len(self.t):
open = self.open[index]
close = self.close[index]
if open > close:
c = 'green'
elif open < close:
c = 'red'
else:
c = 'black'
self.ui.label_point.setText(
"""
<span style='color: %s'>open=%0.1f,high=%0.1f,low=%0.1f,close=%0.1f</span>,%s
""" % (
c,self.open[index], self.high[index], self.low[index],self.close[index],self.date_list[index]))
self.ui.label_para.setText(
self.ma_indexer.get_indexer_value_text(index)
)
self.vLine.setPos(index)
def set_parameter(self):
# 从参数页获取参数
pass
class DateAxis(pg.AxisItem):
def __init__(self, date_strings, orientation):
pg.AxisItem.__init__(self,orientation)
self.date_strings = date_strings
self.len = len(self.date_strings)
def tickStrings(self, values, scale, spacing):
"""
strns = []
rng = max(values) - min(values)
# if rng < 120:
# return pg.AxisItem.tickStrings(self, values, scale, spacing)
if rng < 3600 * 24:
string = '%H:%M:%S'
label1 = '%b %d -'
label2 = ' %b %d, %Y'
elif rng >= 3600 * 24 and rng < 3600 * 24 * 30:
string = '%d'
label1 = '%b - '
label2 = '%b, %Y'
elif rng >= 3600 * 24 * 30 and rng < 3600 * 24 * 30 * 24:
string = '%b'
label1 = '%Y -'
label2 = ' %Y'
elif rng >= 3600 * 24 * 30 * 24:
string = '%Y'
label1 = ''
label2 = ''
for x in values:
try:
strns.append(time.strftime(string, time.localtime(x)))
except ValueError: ## Windows can't handle dates before 1970
strns.append('')
try:
label = time.strftime(label1, time.localtime(min(values))) + time.strftime(label2,
time.localtime(max(values)))
except ValueError:
label = ''
# self.setLabel(text=label)
return strns
"""
#print values
strns = []
for x in values:
x1 = int(x)
if 0 <= x1 < self.len:
strns.append(self.date_strings[x1])
else:
strns.append('')
return strns
## Create a subclass of GraphicsObject.
## The only required methods are paint() and boundingRect()
## (see QGraphicsItem documentation)
class CandlestickItem(pg.GraphicsObject):
def __init__(self, data):
pg.GraphicsObject.__init__(self)
self.data = data ## data must have fields: time, open, close, min, max
self.generatePicture()
def generatePicture(self):
## pre-computing a QPicture object allows paint() to run much more quickly,
## rather than re-drawing the shapes every time.
self.picture = QtGui.QPicture()
p = QtGui.QPainter(self.picture)
p.setPen(pg.mkPen('w'))
w = (self.data[1][0] - self.data[0][0]) / 3.
for (t, open, close, min, max) in self.data:
p.drawLine(QtCore.QPointF(t, min), QtCore.QPointF(t, max))
if open > close:
p.setBrush(pg.mkBrush('g'))
else:
p.setBrush(pg.mkBrush('r'))
p.drawRect(QtCore.QRectF(t - w, open, w * 2, close - open))
p.end()
def paint(self, p, *args):
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
## boundingRect _must_ indicate the entire area that will be drawn on
## or else we will get artifacts and possibly crashing.
## (in this case, QPicture does all the work of computing the bouning rect for us)
return QtCore.QRectF(self.picture.boundingRect())
if __name__=='__main__':
MainWindow()
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,185
|
smartgang/KViewer
|
refs/heads/master
|
/Indexer/EMA.py
|
# -*- coding: utf-8 -*-
from IndexerBase import IndexerBase
class EMA(IndexerBase):
indexer_name = 'EMA'
indexer_name_list = [] # MA的指标名和参数名都跟参数有关,所以要随参数进行设置
default_para_dic = {
'N1': 5,
'N2': 10,
'N3': 15,
'N4': 30,
'N5': 50
}
def __init__(self, raw_data, plt):
self.indexer_name_list = []
for para_name, value in self.default_para_dic.items():
self.indexer_name_list.append("EMA%d"%value)
super(EMA,self).__init__(raw_data, plt)
self.indexer_color_dic = {
'N1': 'blue',
'N2': 'magenta',
'N3': 'cyan',
'N4': 'red',
'N5': 'green'
}
def calculate_indexer_value(self):
del self.indexer_value_dic
del self.indexer_name_list
self.indexer_name_list = []
self.indexer_value_dic = {}
for para_name, para_value, in self.para_dic.items():
indexer_name = "EMA%d" % para_value
self.indexer_name_list.append(indexer_name)
self.indexer_value_dic[indexer_name] = self.raw_data['close'].ewm(span=para_value, adjust=False).mean().tolist()
def draw_indexer(self):
i = 0
for pname, values in self.para_dic.items():
indexer_name = "EMA%d" % values
c = self.indexer_color_dic[pname][0]
self.plt_dic[pname] = self.plt.plot(name=pname, pen=c)
self.plt_dic[pname].setData(self.indexer_value_dic[indexer_name])
i += 1
def re_draw_indexer(self):
for pname, values in self.para_dic.items():
indexer_name = "EMA%d" % values
self.plt_dic[pname].setData(self.indexer_value_dic[indexer_name])
def get_polar_value(self,start_pos, end_pos):
max_v = 0
min_v = 99999
for value_list in self.indexer_value_dic.values():
max_v = max(max_v, max(value_list[start_pos:end_pos]))
min_v = min(min_v, min(value_list[start_pos:end_pos]))
return max_v, min_v
def get_indexer_value_text(self, pos):
# 根据传入的位置返回一个指标值的字符串
t = self.indexer_name + '('
for para_name, para_value in self.para_dic.items():
t += '%s_%d ' % (para_name, para_value)
t += ')'
i = 0
for para_name, para_value in self.para_dic.items():
indexer_name = 'EMA%d'%para_value
c = self.indexer_color_dic[para_name]
t += "<span style='color: %s'>%s=%0.2f </span>" % (c, indexer_name, self.indexer_value_dic[indexer_name][pos])
i += 1
return t
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,186
|
smartgang/KViewer
|
refs/heads/master
|
/Indexer/IndexerBase.py
|
# -*- coding: utf-8 -*-
"""
指标类,用于管理指标相内容:
"""
class IndexerBase(object):
color_list = ['blue', 'magenta', 'cyan', 'red', 'green']
indexer_name = ''
indexer_name_list = []
default_para_dic = {}
para_dic = {}
indexer_color_dic = {} # 各指标对应的显示颜色
def __init__(self, raw_data, plt):
self.raw_data = raw_data
self.para_dic = {}
for para_name, value in self.default_para_dic.items():
self.para_dic[para_name] = value
self.indexer_value_dic = {}
self.plt = plt
self.plt_dic = {}
self.value_num = self.raw_data.shape[0]
#self.calculate_indexer_value()
#self.draw_indexer()
pass
def calculate_indexer_value(self):
pass
def draw_indexer(self):
pass
def re_draw_indexer(self):
pass
def get_polar_value(self, start_pos, end_pos):
pass
def close_sub_plt(self):
for plt in self.plt_dic.values():
plt.close()
def set_para_dic(self,para_dic):
for para_name in self.para_dic.keys():
self.para_dic[para_name] = para_dic[para_name]
def get_para_dic(self):
return self.para_dic
def update_raw_data(self,raw_data):
self.raw_data = raw_data
self.calculate_indexer_value()
self.re_draw_indexer()
def update_parameter(self, para_dic):
changed = False
for para_name in self.default_para_dic.keys():
if self.para_dic[para_name] != para_dic[para_name]:
self.para_dic[para_name] = para_dic[para_name]
changed = True
if changed:
self.calculate_indexer_value()
self.re_draw_indexer()
def get_indexer_value_text(self, pos):
# 根据传入的位置返回一个指标值的字符串
t = self.indexer_name + '('
for para_name, para_value in self.para_dic.items():
t += '%s_%d ' % (para_name, para_value)
t += ')'
i = 0
for para_name, c in self.indexer_color_dic.items():
t += "<span style='color: %s'>%s=%0.2f </span>" % (c, para_name, self.indexer_value_dic[para_name][pos])
i += 1
return t
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,187
|
smartgang/KViewer
|
refs/heads/master
|
/Indexer/__init__.py
|
# -*- coding: utf-8 -*-
from IndexerWidget import IndexerWidget
from MA import MA
from MACD import MACD
from RSI import RSI
from ATR import ATR
from HullMacd import HULL_MACD
from EMA import EMA
from HullRsi import HULL_RSI
from KDJ import KDJ
from DMI import DMI
indexer_mapping_dic = {
'MA': MA,
'EMA': EMA,
'MACD': MACD,
'HULL_MACD': HULL_MACD,
'RSI': RSI,
'ATR': ATR,
'HULL_RSI': HULL_RSI,
'KDJ': KDJ,
'DMI': DMI
}
def get_all_indexer_para_name():
result_dic = {}
for indexer_name, indexer in indexer_mapping_dic.items():
result_dic[indexer_name] = indexer.default_para_dic.keys()
return result_dic
def get_all_indexer_para_dic():
result_dic = {}
for indexer_name, indexer in indexer_mapping_dic.items():
result_dic[indexer_name] = indexer.default_para_dic
return result_dic
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,188
|
smartgang/KViewer
|
refs/heads/master
|
/Indexer/KDJ.py
|
# -*- coding: utf-8 -*-
from IndexerBase import IndexerBase
import numpy as np
import talib
class KDJ(IndexerBase):
indexer_name = 'KDJ'
indexer_name_list = ['K', 'D', 'J']
default_para_dic = {
'N': 9,
'M1': 3,
'M2': 3
}
def __init__(self, raw_data, plt):
super(KDJ, self).__init__(raw_data, plt)
self.indexer_name_list = ['K', 'D', 'J'] # MA的指标名和参数名都跟参数有关,所以要随参数进行设置
self.indexer_color_dic = {
'K': 'blue',
'D': 'magenta',
'J': 'cyan'
}
def calculate_indexer_value(self):
n = self.para_dic['N']
m1 = self.para_dic['M1']
m2 = self.para_dic['M2']
low_list = self.raw_data['low'].rolling(n).min().fillna(self.raw_data['low']) # 使用low的值来填充前面的空白
high_list = self.raw_data['high'].rolling(n).max().fillna(self.raw_data['high']) # 使用high来填充
rsv = (self.raw_data['close'] - low_list) / (high_list - low_list) * 100
a = 1.0/m1
a2 = 1.0/m2
kdj_k = rsv.ewm(alpha=a, adjust=False).mean()
kdj_d = kdj_k.ewm(alpha=a2, adjust=False).mean()
kdj_j = 3 * kdj_k - 2 * kdj_d
self.indexer_value_dic['K'] = kdj_k
self.indexer_value_dic['D'] = kdj_d
self.indexer_value_dic['J'] = kdj_j
def draw_indexer(self):
i = 0
for indexer_name, values in self.indexer_value_dic.items():
c = self.indexer_color_dic[indexer_name][0]
self.plt_dic[indexer_name] = self.plt.plot(name=indexer_name, pen=c)
self.plt_dic[indexer_name].setData(values)
i += 1
def re_draw_indexer(self):
for pname, values in self.indexer_value_dic.items():
self.plt_dic[pname].setData(values)
def get_polar_value(self,start_pos, end_pos):
max_v = max(max(self.indexer_value_dic['K'][start_pos:end_pos]),
max(self.indexer_value_dic['D'][start_pos:end_pos]),
max(self.indexer_value_dic['J'][start_pos:end_pos]))
min_v = min(min(self.indexer_value_dic['K'][start_pos:end_pos]),
min(self.indexer_value_dic['D'][start_pos:end_pos]),
min(self.indexer_value_dic['J'][start_pos:end_pos]))
return max_v, min_v
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,189
|
smartgang/KViewer
|
refs/heads/master
|
/parameter.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'parameter.ui'
#
# Created: Sat Jul 21 16:49:46 2018
# by: pyside-uic 0.2.15 running on PySide 1.2.4
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(398, 494)
self.verticalLayout = QtGui.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox = QtGui.QGroupBox(Form)
self.groupBox.setObjectName("groupBox")
self.gridLayoutWidget_2 = QtGui.QWidget(self.groupBox)
self.gridLayoutWidget_2.setGeometry(QtCore.QRect(9, 9, 351, 61))
self.gridLayoutWidget_2.setObjectName("gridLayoutWidget_2")
self.gridLayout_2 = QtGui.QGridLayout(self.gridLayoutWidget_2)
self.gridLayout_2.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.lineEdit = QtGui.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit.setObjectName("lineEdit")
self.gridLayout_2.addWidget(self.lineEdit, 0, 2, 1, 1)
self.label_2 = QtGui.QLabel(self.gridLayoutWidget_2)
self.label_2.setObjectName("label_2")
self.gridLayout_2.addWidget(self.label_2, 0, 3, 1, 1)
self.label = QtGui.QLabel(self.gridLayoutWidget_2)
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 0, 0, 1, 1)
self.comboBox = QtGui.QComboBox(self.gridLayoutWidget_2)
self.comboBox.setObjectName("comboBox")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.gridLayout_2.addWidget(self.comboBox, 0, 4, 1, 1)
self.label_3 = QtGui.QLabel(self.gridLayoutWidget_2)
self.label_3.setObjectName("label_3")
self.gridLayout_2.addWidget(self.label_3, 1, 0, 1, 1)
self.label_4 = QtGui.QLabel(self.gridLayoutWidget_2)
self.label_4.setObjectName("label_4")
self.gridLayout_2.addWidget(self.label_4, 1, 3, 1, 1)
self.dateEdit_2 = QtGui.QDateEdit(self.gridLayoutWidget_2)
self.dateEdit_2.setObjectName("dateEdit_2")
self.gridLayout_2.addWidget(self.dateEdit_2, 1, 4, 1, 1)
self.dateEdit = QtGui.QDateEdit(self.gridLayoutWidget_2)
self.dateEdit.setObjectName("dateEdit")
self.gridLayout_2.addWidget(self.dateEdit, 1, 2, 1, 1)
self.verticalLayout.addWidget(self.groupBox)
self.groupBox_5 = QtGui.QGroupBox(Form)
self.groupBox_5.setObjectName("groupBox_5")
self.pushButton_3 = QtGui.QPushButton(self.groupBox_5)
self.pushButton_3.setGeometry(QtCore.QRect(20, 20, 75, 23))
self.pushButton_3.setObjectName("pushButton_3")
self.label_11 = QtGui.QLabel(self.groupBox_5)
self.label_11.setGeometry(QtCore.QRect(110, 30, 54, 12))
self.label_11.setObjectName("label_11")
self.verticalLayout.addWidget(self.groupBox_5)
self.groupBox_2 = QtGui.QGroupBox(Form)
self.groupBox_2.setObjectName("groupBox_2")
self.gridLayoutWidget = QtGui.QWidget(self.groupBox_2)
self.gridLayoutWidget.setGeometry(QtCore.QRect(10, 10, 351, 80))
self.gridLayoutWidget.setObjectName("gridLayoutWidget")
self.gridLayout = QtGui.QGridLayout(self.gridLayoutWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.label_5 = QtGui.QLabel(self.gridLayoutWidget)
self.label_5.setObjectName("label_5")
self.gridLayout.addWidget(self.label_5, 0, 0, 1, 1)
self.label_7 = QtGui.QLabel(self.gridLayoutWidget)
self.label_7.setObjectName("label_7")
self.gridLayout.addWidget(self.label_7, 0, 4, 1, 1)
self.lineEdit_ma_n3 = QtGui.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n3.setObjectName("lineEdit_ma_n3")
self.gridLayout.addWidget(self.lineEdit_ma_n3, 0, 5, 1, 1)
self.lineEdit_ma_n2 = QtGui.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n2.setObjectName("lineEdit_ma_n2")
self.gridLayout.addWidget(self.lineEdit_ma_n2, 0, 3, 1, 1)
self.label_6 = QtGui.QLabel(self.gridLayoutWidget)
self.label_6.setObjectName("label_6")
self.gridLayout.addWidget(self.label_6, 0, 2, 1, 1)
self.lineEdit_ma_n1 = QtGui.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n1.setObjectName("lineEdit_ma_n1")
self.gridLayout.addWidget(self.lineEdit_ma_n1, 0, 1, 1, 1)
self.label_8 = QtGui.QLabel(self.gridLayoutWidget)
self.label_8.setObjectName("label_8")
self.gridLayout.addWidget(self.label_8, 1, 0, 1, 1)
self.lineEdit_ma_n4 = QtGui.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n4.setObjectName("lineEdit_ma_n4")
self.gridLayout.addWidget(self.lineEdit_ma_n4, 1, 1, 1, 1)
self.label_9 = QtGui.QLabel(self.gridLayoutWidget)
self.label_9.setObjectName("label_9")
self.gridLayout.addWidget(self.label_9, 1, 2, 1, 1)
self.lineEdit_ma_n5 = QtGui.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n5.setObjectName("lineEdit_ma_n5")
self.gridLayout.addWidget(self.lineEdit_ma_n5, 1, 3, 1, 1)
self.label_10 = QtGui.QLabel(self.gridLayoutWidget)
self.label_10.setObjectName("label_10")
self.gridLayout.addWidget(self.label_10, 1, 4, 1, 1)
self.comboBox_ma = QtGui.QComboBox(self.gridLayoutWidget)
self.comboBox_ma.setObjectName("comboBox_ma")
self.comboBox_ma.addItem("")
self.comboBox_ma.addItem("")
self.gridLayout.addWidget(self.comboBox_ma, 1, 5, 1, 1)
self.verticalLayout.addWidget(self.groupBox_2)
self.groupBox_3 = QtGui.QGroupBox(Form)
self.groupBox_3.setObjectName("groupBox_3")
self.verticalLayout.addWidget(self.groupBox_3)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton = QtGui.QPushButton(Form)
self.pushButton.setLayoutDirection(QtCore.Qt.LeftToRight)
self.pushButton.setObjectName("pushButton")
self.horizontalLayout.addWidget(self.pushButton)
self.pushButton_2 = QtGui.QPushButton(Form)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout.addWidget(self.pushButton_2)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Form", "公共参数", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Form", "周期", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Form", "合约", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(0, QtGui.QApplication.translate("Form", "0", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(1, QtGui.QApplication.translate("Form", "60", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(2, QtGui.QApplication.translate("Form", "300", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(3, QtGui.QApplication.translate("Form", "600", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(4, QtGui.QApplication.translate("Form", "900", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(5, QtGui.QApplication.translate("Form", "1800", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(6, QtGui.QApplication.translate("Form", "3600", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("Form", "开始时间", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("Form", "结束时间", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_5.setTitle(QtGui.QApplication.translate("Form", "回测文件", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_3.setText(QtGui.QApplication.translate("Form", "PushButton", None, QtGui.QApplication.UnicodeUTF8))
self.label_11.setText(QtGui.QApplication.translate("Form", "TextLabel", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setTitle(QtGui.QApplication.translate("Form", "MA参数", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("Form", "N1", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("Form", "N3", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("Form", "N2", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("Form", "N4", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setText(QtGui.QApplication.translate("Form", "N5", None, QtGui.QApplication.UnicodeUTF8))
self.label_10.setText(QtGui.QApplication.translate("Form", "算法", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox_ma.setItemText(0, QtGui.QApplication.translate("Form", "MA", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox_ma.setItemText(1, QtGui.QApplication.translate("Form", "EMA", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_3.setTitle(QtGui.QApplication.translate("Form", "MACD参数", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton.setText(QtGui.QApplication.translate("Form", "确定", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_2.setText(QtGui.QApplication.translate("Form", "取消", None, QtGui.QApplication.UnicodeUTF8))
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,190
|
smartgang/KViewer
|
refs/heads/master
|
/Indexer/IndexerWidget.py
|
# -*- coding: utf-8 -*-
import sys
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from Indexer import *
class IndexerWidget(QWidget):
signal_para_changed = pyqtSignal(str,dict, name='para_changed')
def __init__(self, all_indexer_para_dic, current_indexer_name):
super(IndexerWidget,self).__init__()
#self.setGeometry(300,50,10,10)
self.setWindowTitle('设置指标参数')
self.indexer_para_dic = all_indexer_para_dic
self.leftlist = QListWidget()
self.para_line_edit_dic = {}
self.stack_dic = {}
self.indexer_pos_dic = {} # 记录各个指标在leftlist中的位置
i = 0
current_indexer_pos = 0
self.stack = QStackedWidget(self)
for indexer_name in self.indexer_para_dic.keys():
self.leftlist.insertItem(i, indexer_name)
stack_widget= QWidget()
layout = QFormLayout()
indexer_para_dic = self.indexer_para_dic[indexer_name]
line_edit_dic = {}
for name, value in indexer_para_dic.items():
le = QLineEdit()
le.setValidator(QIntValidator())
le.setMaxLength(2)
le.setText(str(value))
line_edit_dic[name] = le
layout.addRow(name, le)
self.para_line_edit_dic[indexer_name] = line_edit_dic
stack_widget.setLayout(layout)
self.stack_dic[indexer_name] = stack_widget
self.stack.addWidget(stack_widget)
if indexer_name == current_indexer_name:
current_indexer_pos = i
i += 1
main_box = QVBoxLayout(self)
hbox = QHBoxLayout(self)
hbox.addWidget(self.leftlist)
hbox.addWidget(self.stack)
main_box.addLayout(hbox)
btn_layout = self.setup_button()
main_box.addLayout(btn_layout)
self.setLayout(btn_layout)
self.leftlist.currentRowChanged.connect(self.display)
self.leftlist.setCurrentRow(current_indexer_pos)
def setup_button(self):
vbox = QHBoxLayout(self)
btn_ok = QPushButton('OK')
btn_cancel = QPushButton('Cancle')
btn_ok.clicked.connect(self.get_user_para)
btn_cancel.clicked.connect(self.close)
vbox.addWidget(btn_ok)
vbox.addWidget(btn_cancel)
return vbox
def get_user_para(self):
all_para_dic = {}
for indexer_name, line_edit_dic in self.para_line_edit_dic.items():
para_dic = {}
for para_name, line_edit in line_edit_dic.items():
para_dic[para_name] = int(line_edit.text())
all_para_dic[indexer_name] = para_dic
selected_indexer = self.leftlist.currentItem().text()
self.signal_para_changed.emit(selected_indexer,all_para_dic)
self.close()
def display(self,i):
self.stack.setCurrentIndex(i)
class test1():
def __init__(self, name):
self.name = name
def receive_para_changed(self,selected,dict):
print (self.name, selected, dict)
if __name__ == '__main__':
app = QApplication(sys.argv)
all_indexer_para_dic = get_all_indexer_para_dic()
demo = IndexerWidget(all_indexer_para_dic)
#demo2 = IndexerWidget(all_indexer_para_dic)
c1 = test1('test1')
#c2 = test1('test2')
demo.signal_para_changed.connect(c1.receive_para_changed)
#demo2.signal_para_changed.connect(c2.receive_para_changed)
demo.show()
#demo2.show()
sys.exit(app.exec_())
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,191
|
smartgang/KViewer
|
refs/heads/master
|
/nullWindow.py
|
# -*- coding: utf-8 -*-
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(641, 405)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName("verticalLayout")
self.tableWidget = QtWidgets.QTableWidget(self.centralwidget)
self.tableWidget.setRowCount(4)
self.tableWidget.setColumnCount(6)
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(6)
self.tableWidget.setRowCount(4)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
self.verticalLayout.addWidget(self.tableWidget)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton_2 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout.addWidget(self.pushButton_2)
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setObjectName("pushButton")
self.horizontalLayout.addWidget(self.pushButton)
self.verticalLayout.addLayout(self.horizontalLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 641, 23))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtWidgets.QApplication.translate("MainWindow", "MainWindow"))
self.tableWidget.verticalHeaderItem(0).setText(QtWidgets.QApplication.translate("MainWindow", "1st row"))
self.tableWidget.verticalHeaderItem(1).setText(QtWidgets.QApplication.translate("MainWindow", "2nd row"))
self.tableWidget.horizontalHeaderItem(0).setText(QtWidgets.QApplication.translate("MainWindow", "1st col"))
self.tableWidget.horizontalHeaderItem(1).setText(QtWidgets.QApplication.translate("MainWindow", "2nd col"))
self.tableWidget.horizontalHeaderItem(2).setText(QtWidgets.QApplication.translate("MainWindow", "3rd col"))
self.pushButton_2.setText(QtWidgets.QApplication.translate("MainWindow", "PushButton"))
self.pushButton.setText(QtWidgets.QApplication.translate("MainWindow", "PushButton"))
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,192
|
smartgang/KViewer
|
refs/heads/master
|
/MainFrame.py
|
# -*- coding: utf-8 -*-
import sys
from PyQt5.QtWidgets import *
from KViewer_new import KViewer
class KViewerMainWindow(QMainWindow):
def __init__(self, parent=None):
super(KViewerMainWindow, self).__init__(parent)
self.resize(800,600)
self.setWindowTitle('KViewer')
self.tab_widget = QTabWidget()
self.tab_num = 0
tab1 = self.new_tab()
self.tab_widget.addTab(tab1, 'tab%d'% self.tab_num)
self.tab_widget.setTabText(0, 'contract 0')
self.main_layout = QVBoxLayout()
self.main_layout.addWidget(self.tab_widget)
self.setLayout(self.main_layout)
self.setCentralWidget(self.tab_widget)
tb = self.addToolBar("增删")
add = QAction("增加",self)
add.triggered.connect(self.add_tab)
tb.addAction(add)
rem = QAction("删除",self)
rem.triggered.connect(self.remove_tab)
tb.addAction(rem)
#tb.actionTriggered[QAction].connect(self.add_tab)
def new_tab(self):
'''
tab1 = QWidget()
layout = QFormLayout()
layout.addRow("name",QLineEdit())
layout.addRow("address",QLineEdit())
tab1.setLayout(layout)
return tab1
'''
kv = KViewer()
return kv
def add_tab(self,):
print ('1')
tab1 = self.new_tab()
self.tab_num += 1
self.tab_widget.addTab(tab1, 'tab %d'% self.tab_num)
self.tab_widget.setTabText(self.tab_num, 'contract%d'% self.tab_num)
def remove_tab(self):
i = self.tab_widget.currentIndex()
print ("current index:%d" % i)
print ("tab_num:%d" % self.tab_num)
self.tab_widget.removeTab(i)
self.tab_num -= 1
if __name__ == "__main__":
app = QApplication(sys.argv)
win = KViewerMainWindow()
win.show()
sys.exit(app.exec_())
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,193
|
smartgang/KViewer
|
refs/heads/master
|
/Indexer/MACD.py
|
# -*- coding: utf-8 -*-
from IndexerBase import IndexerBase
import pyqtgraph as pg
class MACD(IndexerBase):
indexer_name = 'MACD'
indexer_name_list = ['DIF', 'DEA', 'HIST'] # MA的指标名和参数名都跟参数有关,所以要随参数进行设置
default_para_dic = {
'Short':12,
'Mid':9,
'Long':26
}
def __init__(self, raw_data, plt):
super(MACD, self).__init__(raw_data, plt)
self.indexer_name_list = ['DIF', 'DEA', 'HIST'] # MA的指标名和参数名都跟参数有关,所以要随参数进行设置
self.hist_item_up = None
self.hist_item_down = None
self.indexer_color_dic = {
'DIF': 'blue',
'DEA': 'magenta',
'HIST': 'red'
}
def calculate_indexer_value(self):
closedata = self.raw_data['close']
short = self.para_dic['Short']
long1 = self.para_dic['Long']
mid = self.para_dic['Mid']
sema = closedata.ewm(span=short, adjust=False).mean()
lema = closedata.ewm(span=long1, adjust=False).mean()
data_dif = sema - lema
# data_dea = pd.ewma(data_dif, span=mid)
data_dea = data_dif.ewm(span=mid, adjust=False).mean()
data_bar = (data_dif - data_dea) * 2
self.indexer_value_dic['DIF'] = data_dif.tolist()
self.indexer_value_dic['DEA'] = data_dea.tolist()
self.indexer_value_dic['HIST'] = data_bar.tolist()
def draw_indexer(self):
i = 0
for indexer_name, values in self.indexer_value_dic.items():
if indexer_name == 'HIST':
n = 0
up_num = []
up_value = []
down_num = []
down_value = []
for v in values:
if v >= 0:
up_value.append(v)
up_num.append(n)
else:
down_value.append(v)
down_num.append(n)
n += 1
self.hist_item_up = pg.BarGraphItem(x=up_num, height=up_value, width=0.3, brush='r')
self.hist_item_down = pg.BarGraphItem(x=down_num, height=down_value, width=0.3, brush='g')
self.plt.addItem(self.hist_item_up)
self.plt.addItem(self.hist_item_down)
else:
c = self.indexer_color_dic[indexer_name][0]
self.plt_dic[indexer_name] = self.plt.plot(name=indexer_name, pen=c)
self.plt_dic[indexer_name].setData(values)
i += 1
def re_draw_indexer(self):
for pname, values in self.indexer_value_dic.items():
if pname == 'HIST':
self.plt.removeItem(self.hist_item_up)
self.plt.removeItem(self.hist_item_down)
n = 0
up_num = []
up_value = []
down_num = []
down_value = []
for v in values:
if v >= 0:
up_value.append(v)
up_num.append(n)
else:
down_value.append(v)
down_num.append(n)
n += 1
self.hist_item_up = pg.BarGraphItem(x=up_num, height=up_value, width=0.3, brush='r')
self.hist_item_down = pg.BarGraphItem(x=down_num, height=down_value, width=0.3, brush='g')
self.plt.addItem(self.hist_item_up)
self.plt.addItem(self.hist_item_down)
else:
self.plt_dic[pname].setData(values)
def get_polar_value(self,start_pos, end_pos):
max_v = max(max(self.indexer_value_dic['DIF'][start_pos:end_pos]),
max(self.indexer_value_dic['DEA'][start_pos:end_pos]),
max(self.indexer_value_dic['HIST'][start_pos:end_pos]))
min_v = min(min(self.indexer_value_dic['DIF'][start_pos:end_pos]),
min(self.indexer_value_dic['DEA'][start_pos:end_pos]),
min(self.indexer_value_dic['HIST'][start_pos:end_pos]))
return max_v, min_v
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,194
|
smartgang/KViewer
|
refs/heads/master
|
/parameter2.py
|
# -*- coding: utf-8 -*-
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(398, 494)
self.verticalLayout = QtWidgets.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox = QtWidgets.QGroupBox(Form)
self.groupBox.setObjectName("groupBox")
self.gridLayoutWidget_2 = QtWidgets.QWidget(self.groupBox)
self.gridLayoutWidget_2.setGeometry(QtCore.QRect(9, 9, 351, 61))
self.gridLayoutWidget_2.setObjectName("gridLayoutWidget_2")
self.gridLayout_2 = QtWidgets.QGridLayout(self.gridLayoutWidget_2)
self.gridLayout_2.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.lineEdit = QtWidgets.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit.setObjectName("lineEdit")
self.gridLayout_2.addWidget(self.lineEdit, 0, 2, 1, 1)
self.label_2 = QtWidgets.QLabel(self.gridLayoutWidget_2)
self.label_2.setObjectName("label_2")
self.gridLayout_2.addWidget(self.label_2, 0, 3, 1, 1)
self.label = QtWidgets.QLabel(self.gridLayoutWidget_2)
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 0, 0, 1, 1)
self.comboBox = QtWidgets.QComboBox(self.gridLayoutWidget_2)
self.comboBox.setObjectName("comboBox")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.gridLayout_2.addWidget(self.comboBox, 0, 4, 1, 1)
self.label_3 = QtWidgets.QLabel(self.gridLayoutWidget_2)
self.label_3.setObjectName("label_3")
self.gridLayout_2.addWidget(self.label_3, 1, 0, 1, 1)
self.label_4 = QtWidgets.QLabel(self.gridLayoutWidget_2)
self.label_4.setObjectName("label_4")
self.gridLayout_2.addWidget(self.label_4, 1, 3, 1, 1)
self.dateEdit_2 = QtWidgets.QDateEdit(self.gridLayoutWidget_2)
self.dateEdit_2.setObjectName("dateEdit_2")
self.gridLayout_2.addWidget(self.dateEdit_2, 1, 4, 1, 1)
self.dateEdit = QtWidgets.QDateEdit(self.gridLayoutWidget_2)
self.dateEdit.setObjectName("dateEdit")
self.gridLayout_2.addWidget(self.dateEdit, 1, 2, 1, 1)
self.verticalLayout.addWidget(self.groupBox)
self.groupBox_5 = QtWidgets.QGroupBox(Form)
self.groupBox_5.setObjectName("groupBox_5")
self.pushButton_3 = QtWidgets.QPushButton(self.groupBox_5)
self.pushButton_3.setGeometry(QtCore.QRect(20, 20, 75, 23))
self.pushButton_3.setObjectName("pushButton_3")
self.label_11 = QtWidgets.QLabel(self.groupBox_5)
self.label_11.setGeometry(QtCore.QRect(110, 30, 54, 12))
self.label_11.setObjectName("label_11")
self.verticalLayout.addWidget(self.groupBox_5)
self.groupBox_2 = QtWidgets.QGroupBox(Form)
self.groupBox_2.setObjectName("groupBox_2")
self.gridLayoutWidget = QtWidgets.QWidget(self.groupBox_2)
self.gridLayoutWidget.setGeometry(QtCore.QRect(10, 10, 351, 80))
self.gridLayoutWidget.setObjectName("gridLayoutWidget")
self.gridLayout = QtWidgets.QGridLayout(self.gridLayoutWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.label_5 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_5.setObjectName("label_5")
self.gridLayout.addWidget(self.label_5, 0, 0, 1, 1)
self.label_7 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_7.setObjectName("label_7")
self.gridLayout.addWidget(self.label_7, 0, 4, 1, 1)
self.lineEdit_ma_n3 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n3.setObjectName("lineEdit_ma_n3")
self.gridLayout.addWidget(self.lineEdit_ma_n3, 0, 5, 1, 1)
self.lineEdit_ma_n2 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n2.setObjectName("lineEdit_ma_n2")
self.gridLayout.addWidget(self.lineEdit_ma_n2, 0, 3, 1, 1)
self.label_6 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_6.setObjectName("label_6")
self.gridLayout.addWidget(self.label_6, 0, 2, 1, 1)
self.lineEdit_ma_n1 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n1.setObjectName("lineEdit_ma_n1")
self.gridLayout.addWidget(self.lineEdit_ma_n1, 0, 1, 1, 1)
self.label_8 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_8.setObjectName("label_8")
self.gridLayout.addWidget(self.label_8, 1, 0, 1, 1)
self.lineEdit_ma_n4 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n4.setObjectName("lineEdit_ma_n4")
self.gridLayout.addWidget(self.lineEdit_ma_n4, 1, 1, 1, 1)
self.label_9 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_9.setObjectName("label_9")
self.gridLayout.addWidget(self.label_9, 1, 2, 1, 1)
self.lineEdit_ma_n5 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n5.setObjectName("lineEdit_ma_n5")
self.gridLayout.addWidget(self.lineEdit_ma_n5, 1, 3, 1, 1)
self.label_10 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_10.setObjectName("label_10")
self.gridLayout.addWidget(self.label_10, 1, 4, 1, 1)
self.comboBox_ma = QtWidgets.QComboBox(self.gridLayoutWidget)
self.comboBox_ma.setObjectName("comboBox_ma")
self.comboBox_ma.addItem("")
self.comboBox_ma.addItem("")
self.gridLayout.addWidget(self.comboBox_ma, 1, 5, 1, 1)
self.verticalLayout.addWidget(self.groupBox_2)
self.groupBox_3 = QtWidgets.QGroupBox(Form)
self.groupBox_3.setObjectName("groupBox_3")
self.verticalLayout.addWidget(self.groupBox_3)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton = QtWidgets.QPushButton(Form)
self.pushButton.setLayoutDirection(QtCore.Qt.LeftToRight)
self.pushButton.setObjectName("pushButton")
self.horizontalLayout.addWidget(self.pushButton)
self.pushButton_2 = QtWidgets.QPushButton(Form)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout.addWidget(self.pushButton_2)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtWidgets.QApplication.translate("Form", "Form"))
self.groupBox.setTitle(QtWidgets.QApplication.translate("Form", "公共参数"))
self.label_2.setText(QtWidgets.QApplication.translate("Form", "周期"))
self.label.setText(QtWidgets.QApplication.translate("Form", "合约"))
self.comboBox.setItemText(0, QtWidgets.QApplication.translate("Form", "0"))
self.comboBox.setItemText(1, QtWidgets.QApplication.translate("Form", "60"))
self.comboBox.setItemText(2, QtWidgets.QApplication.translate("Form", "300"))
self.comboBox.setItemText(3, QtWidgets.QApplication.translate("Form", "600"))
self.comboBox.setItemText(4, QtWidgets.QApplication.translate("Form", "900"))
self.comboBox.setItemText(5, QtWidgets.QApplication.translate("Form", "1800"))
self.comboBox.setItemText(6, QtWidgets.QApplication.translate("Form", "3600"))
self.label_3.setText(QtWidgets.QApplication.translate("Form", "开始时间"))
self.label_4.setText(QtWidgets.QApplication.translate("Form", "结束时间"))
self.groupBox_5.setTitle(QtWidgets.QApplication.translate("Form", "回测文件"))
self.pushButton_3.setText(QtWidgets.QApplication.translate("Form", "PushButton"))
self.label_11.setText(QtWidgets.QApplication.translate("Form", "TextLabel"))
self.groupBox_2.setTitle(QtWidgets.QApplication.translate("Form", "MA参数"))
self.label_5.setText(QtWidgets.QApplication.translate("Form", "N1"))
self.label_7.setText(QtWidgets.QApplication.translate("Form", "N3"))
self.label_6.setText(QtWidgets.QApplication.translate("Form", "N2"))
self.label_8.setText(QtWidgets.QApplication.translate("Form", "N4"))
self.label_9.setText(QtWidgets.QApplication.translate("Form", "N5"))
self.label_10.setText(QtWidgets.QApplication.translate("Form", "算法"))
self.comboBox_ma.setItemText(0, QtWidgets.QApplication.translate("Form", "MA"))
self.comboBox_ma.setItemText(1, QtWidgets.QApplication.translate("Form", "EMA"))
self.groupBox_3.setTitle(QtWidgets.QApplication.translate("Form", "MACD参数"))
self.pushButton.setText(QtWidgets.QApplication.translate("Form", "确定"))
self.pushButton_2.setText(QtWidgets.QApplication.translate("Form", "取消"))
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,195
|
smartgang/KViewer
|
refs/heads/master
|
/KViewer_new.py
|
# -*- coding: utf-8 -*-
import sys
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from Indexer import *
import pyqtgraph as pg
import pandas as pd
from ChildGraph import ChildGraph
import DataInterface.DataInterface as DI
class KViewer(QWidget):
def __init__(self,):
super(KViewer, self).__init__()
self.raw_data = None
self.main_layout = QVBoxLayout(self)
self.setting_layout = QHBoxLayout(self)
self.child_graph_layout = QVBoxLayout(self)
self.region = pg.LinearRegionItem()
self.region_minx = 0 # region控制框的宽度
self.region_maxx = 0
self.range_control_plt = pg.PlotWidget()
self.setting_view_btn = QPushButton('显示')
self.setting_add_child_btn = QPushButton('增加子图')
self.setting_remove_child_btn = QPushButton('删除子图')
self.setting_end_date = QDateEdit()
self.setting_start_date = QDateEdit()
self.setting_bar_type_cb = QComboBox()
self.setting_symbol_edit = QLineEdit() # 品种
self.setting_exchange_cb = QComboBox()
#self.setting_exchange_edit = QLineEdit() # 交易所
self.setting_contract_edit = QLineEdit() # 合约
self.setting_dic = {}
self.setup_ui()
self.setup_range_control_view()
self.main_child_graph = ChildGraph(False)
self.main_child_graph.main_child_plt_changed.connect(self.main_child_plt_changed)
self.child_graph_list = []
second_child_graph = ChildGraph(True)
self.child_graph_list.append(second_child_graph)
self.child_graph_layout.addWidget(self.main_child_graph,stretch=2)
self.child_graph_layout.addWidget(second_child_graph, stretch=2)
self.child_graph_layout.addWidget(self.range_control_plt, stretch=1)
self.main_layout.addLayout(self.setting_layout)
self.main_layout.addLayout(self.child_graph_layout)
self.setLayout(self.main_layout)
def setup_ui(self):
self.setup_ui_header()
def setup_ui_header(self):
self.setting_layout.addWidget(QLabel('交易所'))
self.setting_exchange_cb.addItems(['SHFE', 'DCE', 'CZCE', 'CFFEX'])
self.setting_layout.addWidget(self.setting_exchange_cb)
#self.setting_layout.addWidget(self.setting_exchange_edit)
self.setting_layout.addWidget(QLabel('品种'))
self.setting_layout.addWidget(self.setting_symbol_edit)
self.setting_layout.addWidget(QLabel('合约'))
self.setting_layout.addWidget(self.setting_contract_edit)
self.setting_layout.addWidget(QLabel('周期'))
self.setting_bar_type_cb.addItems(['0','60','300','600','900','1800','3600'])
self.setting_layout.addWidget(self.setting_bar_type_cb)
self.setting_start_date.setDisplayFormat("yyyy-MM-dd")
self.setting_end_date.setDisplayFormat("yyyy-MM-dd")
self.setting_layout.addWidget(QLabel('开始日期'))
self.setting_layout.addWidget(self.setting_start_date)
self.setting_layout.addWidget(QLabel('结束日期'))
self.setting_layout.addWidget(self.setting_end_date)
self.setting_view_btn.clicked.connect(self.get_setting)
self.setting_layout.addWidget(self.setting_view_btn)
self.setting_layout.addWidget(self.setting_add_child_btn)
self.setting_layout.addWidget(self.setting_remove_child_btn)
self.setting_add_child_btn.clicked.connect(self.add_child_graph)
self.setting_remove_child_btn.clicked.connect(self.remove_child_graph)
def setup_range_control_view(self):
# 下面第2个图的范围设置框
self.region.setZValue(10)
self.range_control_plt.addItem(self.region)
#self.range_control_plt.plot(x=x,y=y, pen="w", name='close')
def get_setting(self):
exchange = self.setting_exchange_cb.currentText()
symbol = self.setting_symbol_edit.text()
contract = self.setting_contract_edit.text()
bar_type = int(self.setting_bar_type_cb.currentText())
start_date = self.setting_start_date.date().toString("yyyy-MM-dd")
end_date = self.setting_end_date.date().toString("yyyy-MM-dd")
self.setting_dic['exchange'] = exchange
self.setting_dic['symbol'] = symbol
self.setting_dic['contract'] = contract
self.setting_dic['period'] = bar_type
self.setting_dic['start_date'] = start_date
self.setting_dic['end_date'] = end_date
self.setup_child_graph()
def setup_child_graph(self):
domain_symbol = '.'.join([self.setting_dic['exchange'], self.setting_dic['symbol']])
contract = self.setting_dic['contract']
bar_type = self.setting_dic['period']
start_date = self.setting_dic['start_date']
end_date = self.setting_dic['end_date']
self.raw_data = DI.getBarBySymbol(domain_symbol, contract, bar_type, start_date + ' 09:00:00', end_date + ' 15:00:00')
#self.raw_data = pd.read_excel('RB1810_2018-06-19_1m.xlsx')
self.main_child_graph.set_raw_data(self.raw_data)
for second_child_graph in self.child_graph_list:
second_child_graph.set_raw_data(self.raw_data)
self.range_control_plt.plot(self.raw_data['close'], pen="w", name='close')
self.region.sigRegionChanged.connect(self.set_child_range)
self.region.setRegion([0, 100])
pass
def set_child_range(self):
#self.region.setZValue(10)
minX, maxX = self.region.getRegion()
self.main_child_graph.update_visual_range(int(minX), int(maxX))
for second_child_graph in self.child_graph_list:
second_child_graph.update_visual_range(int(minX), int(maxX))
def update_region(self,window, viewRange):
rgn = viewRange[0]
self.region.setRegion(rgn)
self.region_minx, self.region_maxx = self.region.getRegion()
def main_child_plt_changed(self):
self.main_child_graph.plt.sigRangeChanged.connect(self.update_region)
self.proxy = pg.SignalProxy(self.main_child_graph.plt.scene().sigMouseMoved, rateLimit=60, slot=self.mouseMoved)
def mouseMoved(self, event):
pos = event[0] ## using signal proxy turns original arguments into a tuple
if self.main_child_graph.plt.sceneBoundingRect().contains(pos):
a = self.main_child_graph.plt.boundingRect().getRect()
knum = self.region_maxx - self.region_minx
# (pos.x()-35)表示鼠标点距离左边框的位置
# (a[2]-35)/knum表示每一根K线占用的像素点数量
# 上面两者两除即为鼠标位置点的K线序号,+minx就是在整个数据列表中的位置
rx = int((pos.x()-35)/((a[2]-35)/knum)+ self.region_minx)
index = rx
#if index > 0 and index < len(self.t):
self.main_child_graph.set_indexer_label(index)
for second_child_graph in self.child_graph_list:
second_child_graph.set_indexer_label(index)
def add_child_graph(self):
second_child_graph = ChildGraph(True)
self.child_graph_list.append(second_child_graph)
if self.raw_data is not None:
second_child_graph.set_raw_data(self.raw_data)
self.child_graph_layout.insertWidget(len(self.child_graph_list), second_child_graph, stretch=2)
def remove_child_graph(self):
if self.child_graph_list:
self.child_graph_layout.removeWidget(self.child_graph_list[-1])
self.child_graph_list[-1].deleteLater()
del self.child_graph_list[-1]
pass
if __name__ == '__main__':
app = QApplication(sys.argv)
demo = KViewer()
demo.show()
sys.exit(app.exec_())
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,196
|
smartgang/KViewer
|
refs/heads/master
|
/kviewer2.py
|
# -*- coding: utf-8 -*-
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
self.centralwidget = QtWidgets.QWidget(MainWindow)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.centralwidget.sizePolicy().hasHeightForWidth())
self.centralwidget.setSizePolicy(sizePolicy)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox.setObjectName("groupBox")
self.checkBox_ma = QtWidgets.QCheckBox(self.groupBox)
self.checkBox_ma.setGeometry(QtCore.QRect(40, 10, 71, 16))
self.checkBox_ma.setObjectName("checkBox_ma")
self.checkBox_dmi = QtWidgets.QCheckBox(self.groupBox)
self.checkBox_dmi.setGeometry(QtCore.QRect(40, 30, 71, 16))
self.checkBox_dmi.setObjectName("checkBox_dmi")
self.checkBox_macd = QtWidgets.QCheckBox(self.groupBox)
self.checkBox_macd.setGeometry(QtCore.QRect(140, 10, 71, 16))
self.checkBox_macd.setObjectName("checkBox_macd")
self.checkBox_kdj = QtWidgets.QCheckBox(self.groupBox)
self.checkBox_kdj.setGeometry(QtCore.QRect(250, 10, 71, 16))
self.checkBox_kdj.setObjectName("checkBox_kdj")
self.checkBox_5 = QtWidgets.QCheckBox(self.groupBox)
self.checkBox_5.setGeometry(QtCore.QRect(140, 30, 71, 16))
self.checkBox_5.setObjectName("checkBox_5")
self.checkBox_6 = QtWidgets.QCheckBox(self.groupBox)
self.checkBox_6.setGeometry(QtCore.QRect(250, 30, 71, 16))
self.checkBox_6.setObjectName("checkBox_6")
self.gridLayout.addWidget(self.groupBox, 0, 0, 2, 1)
self.pushButton_draw = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_draw.setObjectName("pushButton_draw")
self.gridLayout.addWidget(self.pushButton_draw, 1, 1, 1, 1)
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setObjectName("pushButton")
self.gridLayout.addWidget(self.pushButton, 0, 1, 1, 1)
self.horizontalLayout.addLayout(self.gridLayout)
self.verticalLayout_2.addLayout(self.horizontalLayout)
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget.setObjectName("tabWidget")
self.tab_plot = QtWidgets.QWidget()
self.tab_plot.setObjectName("tab_plot")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.tab_plot)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setContentsMargins(-1, -1, -1, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout_plot_field = QtWidgets.QHBoxLayout()
self.horizontalLayout_plot_field.setObjectName("horizontalLayout_plot_field")
self.label_para = QtWidgets.QLabel(self.tab_plot)
self.label_para.setFrameShape(QtWidgets.QFrame.Box)
self.label_para.setObjectName("label_para")
self.horizontalLayout_plot_field.addWidget(self.label_para)
self.label_point = QtWidgets.QLabel(self.tab_plot)
self.label_point.setFrameShape(QtWidgets.QFrame.Box)
self.label_point.setObjectName("label_point")
self.horizontalLayout_plot_field.addWidget(self.label_point)
self.label_file = QtWidgets.QLabel(self.tab_plot)
self.label_file.setFrameShape(QtWidgets.QFrame.Box)
self.label_file.setObjectName("label_file")
self.horizontalLayout_plot_field.addWidget(self.label_file)
self.verticalLayout.addLayout(self.horizontalLayout_plot_field)
self.verticalLayout_3.addLayout(self.verticalLayout)
self.tabWidget.addTab(self.tab_plot, "")
self.tab_para = QtWidgets.QWidget()
self.tab_para.setObjectName("tab_para")
self.groupBox_3 = QtWidgets.QGroupBox(self.tab_para)
self.groupBox_3.setGeometry(QtCore.QRect(400, 110, 361, 91))
self.groupBox_3.setObjectName("groupBox_3")
self.label = QtWidgets.QLabel(self.groupBox_3)
self.label.setGeometry(QtCore.QRect(30, 40, 41, 16))
self.label.setObjectName("label")
self.lineEdit_macd_short = QtWidgets.QLineEdit(self.groupBox_3)
self.lineEdit_macd_short.setGeometry(QtCore.QRect(80, 40, 41, 20))
self.lineEdit_macd_short.setObjectName("lineEdit_macd_short")
self.label_2 = QtWidgets.QLabel(self.groupBox_3)
self.label_2.setGeometry(QtCore.QRect(150, 40, 31, 16))
self.label_2.setObjectName("label_2")
self.lineEdit_macd_long = QtWidgets.QLineEdit(self.groupBox_3)
self.lineEdit_macd_long.setGeometry(QtCore.QRect(190, 40, 41, 20))
self.lineEdit_macd_long.setObjectName("lineEdit_macd_long")
self.label_3 = QtWidgets.QLabel(self.groupBox_3)
self.label_3.setGeometry(QtCore.QRect(260, 40, 21, 16))
self.label_3.setObjectName("label_3")
self.lineEdit_macd_m = QtWidgets.QLineEdit(self.groupBox_3)
self.lineEdit_macd_m.setGeometry(QtCore.QRect(280, 40, 41, 20))
self.lineEdit_macd_m.setObjectName("lineEdit_macd_m")
self.groupBox_2 = QtWidgets.QGroupBox(self.tab_para)
self.groupBox_2.setGeometry(QtCore.QRect(20, 110, 371, 90))
self.groupBox_2.setObjectName("groupBox_2")
self.gridLayoutWidget = QtWidgets.QWidget(self.groupBox_2)
self.gridLayoutWidget.setGeometry(QtCore.QRect(10, 10, 351, 80))
self.gridLayoutWidget.setObjectName("gridLayoutWidget")
self.gridLayout_2 = QtWidgets.QGridLayout(self.gridLayoutWidget)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.label_5 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_5.setObjectName("label_5")
self.gridLayout_2.addWidget(self.label_5, 0, 0, 1, 1)
self.label_7 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_7.setObjectName("label_7")
self.gridLayout_2.addWidget(self.label_7, 0, 4, 1, 1)
self.lineEdit_ma_n3 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n3.setObjectName("lineEdit_ma_n3")
self.gridLayout_2.addWidget(self.lineEdit_ma_n3, 0, 5, 1, 1)
self.lineEdit_ma_n2 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n2.setObjectName("lineEdit_ma_n2")
self.gridLayout_2.addWidget(self.lineEdit_ma_n2, 0, 3, 1, 1)
self.label_6 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_6.setObjectName("label_6")
self.gridLayout_2.addWidget(self.label_6, 0, 2, 1, 1)
self.lineEdit_ma_n1 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n1.setObjectName("lineEdit_ma_n1")
self.gridLayout_2.addWidget(self.lineEdit_ma_n1, 0, 1, 1, 1)
self.label_8 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_8.setObjectName("label_8")
self.gridLayout_2.addWidget(self.label_8, 1, 0, 1, 1)
self.lineEdit_ma_n4 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n4.setObjectName("lineEdit_ma_n4")
self.gridLayout_2.addWidget(self.lineEdit_ma_n4, 1, 1, 1, 1)
self.label_9 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_9.setObjectName("label_9")
self.gridLayout_2.addWidget(self.label_9, 1, 2, 1, 1)
self.lineEdit_ma_n5 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_ma_n5.setObjectName("lineEdit_ma_n5")
self.gridLayout_2.addWidget(self.lineEdit_ma_n5, 1, 3, 1, 1)
self.label_10 = QtWidgets.QLabel(self.gridLayoutWidget)
self.label_10.setObjectName("label_10")
self.gridLayout_2.addWidget(self.label_10, 1, 4, 1, 1)
self.comboBox_ma = QtWidgets.QComboBox(self.gridLayoutWidget)
self.comboBox_ma.setObjectName("comboBox_ma")
self.comboBox_ma.addItem("")
self.comboBox_ma.addItem("")
self.gridLayout_2.addWidget(self.comboBox_ma, 1, 5, 1, 1)
self.groupBox_5 = QtWidgets.QGroupBox(self.tab_para)
self.groupBox_5.setGeometry(QtCore.QRect(400, 10, 361, 91))
self.groupBox_5.setObjectName("groupBox_5")
self.pushButton_opr_file = QtWidgets.QPushButton(self.groupBox_5)
self.pushButton_opr_file.setGeometry(QtCore.QRect(30, 40, 75, 23))
self.pushButton_opr_file.setObjectName("pushButton_opr_file")
self.label_opr = QtWidgets.QLabel(self.groupBox_5)
self.label_opr.setGeometry(QtCore.QRect(130, 40, 54, 12))
self.label_opr.setObjectName("label_opr")
self.groupBox_4 = QtWidgets.QGroupBox(self.tab_para)
self.groupBox_4.setGeometry(QtCore.QRect(20, 10, 371, 90))
self.groupBox_4.setObjectName("groupBox_4")
self.gridLayoutWidget_2 = QtWidgets.QWidget(self.groupBox_4)
self.gridLayoutWidget_2.setGeometry(QtCore.QRect(9, 9, 351, 61))
self.gridLayoutWidget_2.setObjectName("gridLayoutWidget_2")
self.gridLayout_3 = QtWidgets.QGridLayout(self.gridLayoutWidget_2)
self.gridLayout_3.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.gridLayout_3.setContentsMargins(0, 0, 0, 0)
self.gridLayout_3.setObjectName("gridLayout_3")
self.lineEdit_contract = QtWidgets.QLineEdit(self.gridLayoutWidget_2)
self.lineEdit_contract.setObjectName("lineEdit_contract")
self.gridLayout_3.addWidget(self.lineEdit_contract, 0, 2, 1, 1)
self.label_4 = QtWidgets.QLabel(self.gridLayoutWidget_2)
self.label_4.setObjectName("label_4")
self.gridLayout_3.addWidget(self.label_4, 0, 3, 1, 1)
self.label_12 = QtWidgets.QLabel(self.gridLayoutWidget_2)
self.label_12.setObjectName("label_12")
self.gridLayout_3.addWidget(self.label_12, 0, 0, 1, 1)
self.comboBox_bar = QtWidgets.QComboBox(self.gridLayoutWidget_2)
self.comboBox_bar.setObjectName("comboBox_bar")
self.comboBox_bar.addItem("")
self.comboBox_bar.addItem("")
self.comboBox_bar.addItem("")
self.comboBox_bar.addItem("")
self.comboBox_bar.addItem("")
self.comboBox_bar.addItem("")
self.comboBox_bar.addItem("")
self.gridLayout_3.addWidget(self.comboBox_bar, 0, 4, 1, 1)
self.label_13 = QtWidgets.QLabel(self.gridLayoutWidget_2)
self.label_13.setObjectName("label_13")
self.gridLayout_3.addWidget(self.label_13, 1, 0, 1, 1)
self.label_14 = QtWidgets.QLabel(self.gridLayoutWidget_2)
self.label_14.setObjectName("label_14")
self.gridLayout_3.addWidget(self.label_14, 1, 3, 1, 1)
self.dateEdit_end = QtWidgets.QDateEdit(self.gridLayoutWidget_2)
self.dateEdit_end.setDateTime(QtCore.QDateTime(QtCore.QDate(2018, 6, 30), QtCore.QTime(0, 0, 0)))
self.dateEdit_end.setObjectName("dateEdit_end")
self.gridLayout_3.addWidget(self.dateEdit_end, 1, 4, 1, 1)
self.dateEdit_start = QtWidgets.QDateEdit(self.gridLayoutWidget_2)
self.dateEdit_start.setObjectName("dateEdit_start")
self.gridLayout_3.addWidget(self.dateEdit_start, 1, 2, 1, 1)
self.pushButton_set_para = QtWidgets.QPushButton(self.tab_para)
self.pushButton_set_para.setGeometry(QtCore.QRect(360, 330, 75, 23))
self.pushButton_set_para.setObjectName("pushButton_set_para")
self.groupBox_6 = QtWidgets.QGroupBox(self.tab_para)
self.groupBox_6.setGeometry(QtCore.QRect(20, 210, 371, 80))
self.groupBox_6.setObjectName("groupBox_6")
self.lineEdit_kdj_n = QtWidgets.QLineEdit(self.groupBox_6)
self.lineEdit_kdj_n.setGeometry(QtCore.QRect(40, 30, 51, 20))
self.lineEdit_kdj_n.setObjectName("lineEdit_kdj_n")
self.lineEdit_kdj_m1 = QtWidgets.QLineEdit(self.groupBox_6)
self.lineEdit_kdj_m1.setGeometry(QtCore.QRect(160, 30, 51, 20))
self.lineEdit_kdj_m1.setObjectName("lineEdit_kdj_m1")
self.lineEdit_kdj_m2 = QtWidgets.QLineEdit(self.groupBox_6)
self.lineEdit_kdj_m2.setGeometry(QtCore.QRect(270, 30, 51, 20))
self.lineEdit_kdj_m2.setObjectName("lineEdit_kdj_m2")
self.label_11 = QtWidgets.QLabel(self.groupBox_6)
self.label_11.setGeometry(QtCore.QRect(20, 30, 21, 16))
self.label_11.setObjectName("label_11")
self.label_15 = QtWidgets.QLabel(self.groupBox_6)
self.label_15.setGeometry(QtCore.QRect(140, 30, 21, 16))
self.label_15.setObjectName("label_15")
self.label_16 = QtWidgets.QLabel(self.groupBox_6)
self.label_16.setGeometry(QtCore.QRect(250, 30, 21, 16))
self.label_16.setObjectName("label_16")
self.groupBox_7 = QtWidgets.QGroupBox(self.tab_para)
self.groupBox_7.setGeometry(QtCore.QRect(400, 210, 361, 81))
self.groupBox_7.setObjectName("groupBox_7")
self.lineEdit_dmi_n = QtWidgets.QLineEdit(self.groupBox_7)
self.lineEdit_dmi_n.setGeometry(QtCore.QRect(70, 30, 41, 20))
self.lineEdit_dmi_n.setObjectName("lineEdit_dmi_n")
self.lineEdit_dmi_m = QtWidgets.QLineEdit(self.groupBox_7)
self.lineEdit_dmi_m.setGeometry(QtCore.QRect(190, 30, 41, 20))
self.lineEdit_dmi_m.setObjectName("lineEdit_dmi_m")
self.label_17 = QtWidgets.QLabel(self.groupBox_7)
self.label_17.setGeometry(QtCore.QRect(40, 30, 31, 16))
self.label_17.setObjectName("label_17")
self.label_18 = QtWidgets.QLabel(self.groupBox_7)
self.label_18.setGeometry(QtCore.QRect(170, 30, 21, 16))
self.label_18.setObjectName("label_18")
self.tabWidget.addTab(self.tab_para, "")
self.verticalLayout_2.addWidget(self.tabWidget)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 23))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtWidgets.QApplication.translate("MainWindow", "MainWindow"))
self.groupBox.setTitle(QtWidgets.QApplication.translate("MainWindow", "指标开关"))
self.checkBox_ma.setText(QtWidgets.QApplication.translate("MainWindow", "MA"))
self.checkBox_dmi.setText(QtWidgets.QApplication.translate("MainWindow", "DMI"))
self.checkBox_macd.setText(QtWidgets.QApplication.translate("MainWindow", "MACD"))
self.checkBox_kdj.setText(QtWidgets.QApplication.translate("MainWindow", "KDJ"))
self.checkBox_5.setText(QtWidgets.QApplication.translate("MainWindow", "CheckBox"))
self.checkBox_6.setText(QtWidgets.QApplication.translate("MainWindow", "CheckBox"))
self.pushButton_draw.setText(QtWidgets.QApplication.translate("MainWindow", "绘图"))
self.pushButton.setText(QtWidgets.QApplication.translate("MainWindow", "设置参数"))
self.label_para.setText(QtWidgets.QApplication.translate("MainWindow", "TextLabel"))
self.label_point.setText(QtWidgets.QApplication.translate("MainWindow", "TextLabel"))
self.label_file.setText(QtWidgets.QApplication.translate("MainWindow", "TextLabel"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_plot), QtWidgets.QApplication.translate("MainWindow", "行情"))
self.groupBox_3.setTitle(QtWidgets.QApplication.translate("MainWindow", "MACD参数"))
self.label.setText(QtWidgets.QApplication.translate("MainWindow", "Short"))
self.lineEdit_macd_short.setText(QtWidgets.QApplication.translate("MainWindow", "12"))
self.label_2.setText(QtWidgets.QApplication.translate("MainWindow", "Long"))
self.lineEdit_macd_long.setText(QtWidgets.QApplication.translate("MainWindow", "26"))
self.label_3.setText(QtWidgets.QApplication.translate("MainWindow", "M"))
self.lineEdit_macd_m.setText(QtWidgets.QApplication.translate("MainWindow", "9"))
self.groupBox_2.setTitle(QtWidgets.QApplication.translate("MainWindow", "MA参数"))
self.label_5.setText(QtWidgets.QApplication.translate("MainWindow", "N1"))
self.label_7.setText(QtWidgets.QApplication.translate("MainWindow", "N3"))
self.lineEdit_ma_n3.setText(QtWidgets.QApplication.translate("MainWindow", "20"))
self.lineEdit_ma_n2.setText(QtWidgets.QApplication.translate("MainWindow", "10"))
self.label_6.setText(QtWidgets.QApplication.translate("MainWindow", "N2"))
self.lineEdit_ma_n1.setText(QtWidgets.QApplication.translate("MainWindow", "5"))
self.label_8.setText(QtWidgets.QApplication.translate("MainWindow", "N4"))
self.lineEdit_ma_n4.setText(QtWidgets.QApplication.translate("MainWindow", "30"))
self.label_9.setText(QtWidgets.QApplication.translate("MainWindow", "N5"))
self.lineEdit_ma_n5.setText(QtWidgets.QApplication.translate("MainWindow", "50"))
self.label_10.setText(QtWidgets.QApplication.translate("MainWindow", "算法"))
self.comboBox_ma.setItemText(0, QtWidgets.QApplication.translate("MainWindow", "MA"))
self.comboBox_ma.setItemText(1, QtWidgets.QApplication.translate("MainWindow", "EMA"))
self.groupBox_5.setTitle(QtWidgets.QApplication.translate("MainWindow", "回测文件"))
self.pushButton_opr_file.setText(QtWidgets.QApplication.translate("MainWindow", "PushButton"))
self.label_opr.setText(QtWidgets.QApplication.translate("MainWindow", "TextLabel"))
self.groupBox_4.setTitle(QtWidgets.QApplication.translate("MainWindow", "公共参数"))
self.label_4.setText(QtWidgets.QApplication.translate("MainWindow", "周期"))
self.label_12.setText(QtWidgets.QApplication.translate("MainWindow", "合约"))
self.comboBox_bar.setItemText(0, QtWidgets.QApplication.translate("MainWindow", "0"))
self.comboBox_bar.setItemText(1, QtWidgets.QApplication.translate("MainWindow", "60"))
self.comboBox_bar.setItemText(2, QtWidgets.QApplication.translate("MainWindow", "300"))
self.comboBox_bar.setItemText(3, QtWidgets.QApplication.translate("MainWindow", "600"))
self.comboBox_bar.setItemText(4, QtWidgets.QApplication.translate("MainWindow", "900"))
self.comboBox_bar.setItemText(5, QtWidgets.QApplication.translate("MainWindow", "1800"))
self.comboBox_bar.setItemText(6, QtWidgets.QApplication.translate("MainWindow", "3600"))
self.label_13.setText(QtWidgets.QApplication.translate("MainWindow", "开始时间"))
self.label_14.setText(QtWidgets.QApplication.translate("MainWindow", "结束时间"))
self.pushButton_set_para.setText(QtWidgets.QApplication.translate("MainWindow", "设置"))
self.groupBox_6.setTitle(QtWidgets.QApplication.translate("MainWindow", "KDJ参数"))
self.lineEdit_kdj_n.setText(QtWidgets.QApplication.translate("MainWindow", "9"))
self.lineEdit_kdj_m1.setText(QtWidgets.QApplication.translate("MainWindow", "3"))
self.lineEdit_kdj_m2.setText(QtWidgets.QApplication.translate("MainWindow", "3"))
self.label_11.setText(QtWidgets.QApplication.translate("MainWindow", "N"))
self.label_15.setText(QtWidgets.QApplication.translate("MainWindow", "M1"))
self.label_16.setText(QtWidgets.QApplication.translate("MainWindow", "M2"))
self.groupBox_7.setTitle(QtWidgets.QApplication.translate("MainWindow", "DMI参数"))
self.lineEdit_dmi_n.setText(QtWidgets.QApplication.translate("MainWindow", "14"))
self.lineEdit_dmi_m.setText(QtWidgets.QApplication.translate("MainWindow", "6"))
self.label_17.setText(QtWidgets.QApplication.translate("MainWindow", "N"))
self.label_18.setText(QtWidgets.QApplication.translate("MainWindow", "M"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_para), QtWidgets.QApplication.translate("MainWindow", "参数设置"))
|
{"/ChildGraph.py": ["/Indexer/__init__.py"], "/complexExample.py": ["/complex2.py"], "/decouple_window.py": ["/nullWindow.py"], "/kviewer_app.py": ["/kviewer2.py", "/indexer.py", "/parameter2.py"], "/Indexer/IndexerWidget.py": ["/Indexer/__init__.py"], "/MainFrame.py": ["/KViewer_new.py"], "/KViewer_new.py": ["/Indexer/__init__.py", "/ChildGraph.py", "/DataInterface/DataInterface.py"]}
|
3,204
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/examples/complex_sequences.py
|
from geneblocks import DiffBlocks, CommonBlocks, random_dna_sequence
import geneblocks.sequence_modification_utils as smu
import matplotlib.pyplot as plt
import numpy
numpy.random.seed(1) # ensures the sequences will be the same at each run
# GENERATE 2 "SISTER" SEQUENCES FOR THE EXAMPLE
seq1 = random_dna_sequence(50000)
seq1 = smu.copy(seq1, 25000, 30000, 50000)
seq2 = seq1
seq2 = smu.insert(seq2, 39000, random_dna_sequence(100))
seq2 = smu.insert(seq2, 38000, random_dna_sequence(100))
seq2 = smu.reverse(seq2, 30000, 35000)
seq2 = smu.swap(seq2, (30000, 35000), (45000, 480000))
seq2 = smu.delete(seq2, 20000, 2000)
seq2 = smu.insert(seq2, 10000, random_dna_sequence(2000))
seq2 = smu.insert(seq2, 0, 1000*"A")
# FIND COMMON BLOCKS AND DIFFS
common_blocks = CommonBlocks.from_sequences({'seq1': seq1, 'seq2': seq2})
diff_blocks = DiffBlocks.from_sequences(seq1, seq2).merged()
# PLOT EVERYTHING
fig, axes = plt.subplots(3, 1, figsize=(16, 8))
common_blocks.plot_common_blocks(axes=axes[:-1])
diff_blocks.plot(ax=axes[-1], separate_axes=False)
axes[-1].set_xlabel("Changes in seq2 vs. seq1")
fig.savefig("complex_sequences.png", bbox_inches='tight')
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,205
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/tests/test_complex_sequences.py
|
from geneblocks import DiffBlocks, CommonBlocks, random_dna_sequence
import geneblocks.sequence_modification_utils as smu
import matplotlib.pyplot as plt
def test_complex_sequences():
seq1 = random_dna_sequence(50000, seed=123)
seq1 = smu.copy(seq1, 25000, 30000, 50000)
seq2 = seq1
seq2 = smu.insert(seq2, 39000, random_dna_sequence(100))
seq2 = smu.insert(seq2, 38000, random_dna_sequence(100))
seq2 = smu.reverse(seq2, 30000, 35000)
seq2 = smu.swap(seq2, (30000, 35000), (45000, 480000))
seq2 = smu.delete(seq2, 20000, 2000)
seq2 = smu.insert(seq2, 10000, random_dna_sequence(2000))
seq2 = smu.insert(seq2, 0, 1000 * "A")
diff_blocks = DiffBlocks.from_sequences(seq1, seq2).merged()
b = diff_blocks.blocks
assert len(b) == 15
assert b[0].operation == "insert"
start, end, _ = b[0].s2_location.to_tuple()
assert end - start == 1000
assert b[1].operation == "equal"
assert b[2].operation == "insert"
start, end, _ = b[2].s2_location.to_tuple()
assert end - start == 2000
assert sorted([b[6].operation, b[7].operation]) == ["change", "transpose"]
assert sorted([b[-1].operation, b[-2].operation]) == ["change", "reverse"]
s1, s2 = diff_blocks.reconstruct_sequences_from_blocks(diff_blocks.blocks)
assert s1 == seq1
assert s2 == seq2
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,206
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/tests/test_base_diff_cases.py
|
from geneblocks.DiffBlocks import DiffBlock, DiffBlocks
from geneblocks.biotools import random_dna_sequence
def diff_string(seq1, seq2, contract_under=0):
return str(DiffBlocks.from_sequences(seq1, seq2))
flanks_dict = {
'short': 10 * "A",
'long': random_dna_sequence(10000)
}
def flanked(seq, flanks='short'):
flank = flanks_dict[flanks]
return flank + seq + flank
def test_1():
assert diff_string(
flanked("T", "short"),
flanked("C", "short")
) == 'replace 10-11|10-11'
def test_2():
assert diff_string(
flanked("T", "long"),
flanked("C", "long")
) == ', '.join([
'equal 0-10000|0-10000',
'replace 10000-10001|10000-10001',
'equal 10001-20001|10001-20001'
])
def test_3():
assert diff_string(
"AATAATAAT",
"AAAAAAAAA"
) == 'replace 2-3|2-3, replace 5-6|5-6, replace 8-9|8-9'
def test_4():
assert diff_string(
flanked("AATAATAAT", 'long'),
flanked("AAAAAAAAA", 'long')
) == ', '.join([
'equal 0-10002|0-10002',
'replace 10002-10003|10002-10003',
'replace 10005-10006|10005-10006',
'replace 10008-10009|10008-10009',
'equal 10009-20009|10009-20009'
])
def test_5():
assert diff_string(
"AAAAATTTTGGAAA",
"AAATTTTGGAAAAA"
) == 'delete 3-5|3-3, insert 11-11|9-11'
def test_6():
assert diff_string(
flanked("AAAAATTTTGGAAA", 'long'),
flanked("AAATTTTGGAAAAA", 'long')
) == ', '.join([
'equal 0-10003|0-10003',
'delete 10003-10005|10003-10003',
'insert 10011-10011|10009-10011',
'equal 10011-20014|10011-20014'
])
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,207
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/Location.py
|
from .biotools import reverse_complement
from Bio.SeqFeature import SeqFeature, FeatureLocation
class Location:
def __init__(
self, start, end, strand=None, sequence=None, sequence_id=None
):
self.start = start
self.end = end
self.strand = strand
self.sequence = sequence
self.sequence_id = sequence_id
def extract_sequence(self, sequence=None):
"""Return the subsequence read at the given location.
If sequence is None, ``self.sequence`` is used.
"""
if sequence is None:
sequence = self.sequence
if hasattr(sequence, "seq"):
sequence = str(sequence.seq)
result = sequence[self.start : self.end]
if self.strand == -1:
return reverse_complement(result)
else:
return result
def __repr__(self):
"""Represent"""
result = "%d-%d" % (self.start, self.end)
if self.strand is not None:
result += {1: "(+)", -1: "(-)", 0: ""}[self.strand]
if self.sequence_id is not None:
result = self.sequence_id + "|" + result
return result
def __len__(self):
"""Size of the location"""
return abs(self.end - self.start)
def to_tuple(self):
return self.start, self.end, self.strand
def to_biopython_location(self):
"""Return a Biopython FeatureLocation equivalent to the location."""
start, end, strand = [
None if e is None else int(e)
for e in [self.start, self.end, self.strand]
]
return FeatureLocation(start, end, strand)
def to_biopython_feature(self, feature_type="misc_feature", **qualifiers):
"""Return a Biopython SeqFeature with same location and custom
qualifiers."""
return SeqFeature(
self.to_biopython_location(),
type=feature_type,
qualifiers=qualifiers,
)
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,208
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/DiffBlocks/DiffBlocks.py
|
from copy import deepcopy
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
from ..Location import Location
from ..biotools import sequence_to_record
from ..CommonBlocks import CommonBlocks
from .DiffBlock import DiffBlock
from .DiffRecordTranslator import DiffRecordTranslator
from .diffblocks_tools import (
compute_levenshtein_blocks,
get_optimal_common_blocks,
merge_blocs_by_location,
merge_successive_blocks,
compute_sorted_blocks,
)
class DiffBlocks:
"""Class to generate and represent DiffBlocks.
Usage:
>>> DiffBlocks.from_sequences(s1, s2)
"""
def __init__(self, s1, s2, blocks):
self.s1 = s1
self.s2 = s2
self.blocks = blocks
@staticmethod
def from_sequences(s1, s2, blast_over=500, max_complexity=1e8):
"""Create DiffBlocks by comparing two sequences.
Parameters
----------
s1, s2
Two sequences, either "ATGC..." strings or Biopython records.
blast_over
A blast will be triggered to accelerate homology finding if
len(s1) + len(s2) > blast_over.
max_complexity
If len(s1) * len(s2) is over max_complexity, no analysis is done and
s1 is just labeled as a "change" of s2 (useful internally during the
recursions of this method).
"""
# Note: the sequences will always be upperized before they are
# compared. however we also need to keep the initial sequences to
# create the final blocks (possibly with upper/lowercase nucleotides)
# If the sequences are records, convert to string
seq_s1 = str(s1.seq) if hasattr(s1, "seq") else str(s1)
seq_s2 = str(s2.seq) if hasattr(s2, "seq") else str(s2)
# Simple case to eliminate the trivial case of equality
if seq_s1.upper() == seq_s2.upper():
return DiffBlocks(s1, s2, [])
# If the sequences are too big for straight-on Levenshtein, first
# find the large sub-blocks that are identical, and the ones that
# differ.
if (blast_over is not None) and (len(s1) + len(s2)) > blast_over:
diffblocks = []
# Use CommonBlocks to find all big common blocks
sequences = {"s1": s1, "s2": s2}
common_blocks = CommonBlocks.from_sequences(
sequences,
min_block_size=100,
include_self_homologies=False,
block_selection_method="larger_first",
).common_blocks
blocks_in_seqs, remarks = get_optimal_common_blocks(common_blocks)
# First, each common block is added as an "equal" diffblock
for b1, b2 in zip(blocks_in_seqs["s1"], blocks_in_seqs["s2"]):
diffblocks.append(
DiffBlock(
"equal",
s1_location=Location(*b1[:2], sequence=s1),
s2_location=Location(*b2[:2], sequence=s2),
)
)
# for sequence in s1, s2, complete the sequence's list of blocks
# with a (0, 0, "START") on the left, (L, L, "END") on the right.
for seq, blocks in blocks_in_seqs.items():
blocks_in_seqs[seq] = (
[(0, 0, "START")]
+ blocks_in_seqs[seq]
+ [(len(sequences[seq]), len(sequences[seq]), "END")]
)
for i in range(len(blocks_in_seqs["s2"]) - 1):
_, end1, _ = blocks_in_seqs["s1"][i]
next_start1, _, _ = blocks_in_seqs["s1"][i + 1]
_, end2, _ = blocks_in_seqs["s2"][i]
next_start2, _, _ = blocks_in_seqs["s2"][i + 1]
if next_start2 < end2:
subdiffblocks = [
DiffBlock(
"delete",
s1_location=Location(end1, next_start1, sequence=s1),
s2_location=Location(next_start2, next_start2, sequence=s2),
)
]
else:
subsequence_1 = s1[end1:next_start1]
subsequence_2 = s2[end2:next_start2]
subdiffblocks = DiffBlocks.from_sequences(
subsequence_1,
subsequence_2,
blast_over=None,
max_complexity=max_complexity,
)
for block in subdiffblocks.blocks:
block.s1_location.start += end1
block.s1_location.end += end1
block.s1_location.sequence = s1
block.s2_location.start += end2
block.s2_location.end += end2
block.s2_location.sequence = s2
diffblocks += subdiffblocks.blocks
diffblocks = [
b for b in diffblocks if len(b.s1_location) or len(b.s2_location)
]
sorted_blocks = compute_sorted_blocks(diffblocks + remarks)
return DiffBlocks(s1, s2, sorted_blocks)
s1_std = str(s1.seq if hasattr(s1, "seq") else s1).upper()
s2_std = str(s2.seq if hasattr(s2, "seq") else s2).upper()
levenshtein_blocks = compute_levenshtein_blocks(
s1_std, s2_std, max_complexity=max_complexity
)
blocks = [
DiffBlock(
operation,
Location(s1s, s1e, sequence=s1),
Location(s2s, s2e, sequence=s2),
)
for operation, (s1s, s1e), (s2s, s2e) in levenshtein_blocks
]
return DiffBlocks(s1, s2, blocks)
def merged(
self, blocks_per_span=(3, 600), change_gap=100, replace_gap=10, reference="s2",
):
blocks = [b for b in self.blocks if b.operation not in ["reverse", "transpose"]]
remarks = [b for b in self.blocks if b.operation in ["reverse", "transpose"]]
if blocks_per_span is not None:
max_blocks, span = blocks_per_span
blocks = merge_blocs_by_location(
blocks=blocks,
max_blocks=max_blocks,
max_span=span,
reference=reference,
)
if change_gap is not None:
blocks = merge_successive_blocks(
blocks=blocks,
change_gap=change_gap,
replace_gap=replace_gap,
reference="s2",
)
blocks = compute_sorted_blocks(blocks + remarks)
return DiffBlocks(s1=self.s1, s2=self.s2, blocks=blocks)
def sort_blocks(self):
self.blocks = compute_sorted_blocks(self.blocks)
def diffs_as_features(self, sequence="s2"):
return [block.to_feature(sequence=sequence) for block in self.blocks]
def plot(
self, translator_class="default", separate_axes=True, sequence="s2", **plot_kw
):
if translator_class == "default":
translator_class = DiffRecordTranslator
translator = translator_class()
record = deepcopy(self.s2 if sequence == "s2" else self.s1)
if not hasattr(record, "features"):
record = sequence_to_record(record)
diff_features = self.diffs_as_features(sequence=sequence)
if separate_axes:
gr_record = translator.translate_record(record)
record.features = diff_features
gr_diffrecord = DiffRecordTranslator().translate_record(record)
width = plot_kw.get("figure_width", 8)
if "axes" in plot_kw:
ax1, ax2 = plot_kw["axes"]
fig = ax1.figure
else:
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(width, 6))
plot_kw["annotate_inline"] = plot_kw.get("annotate_inline", True)
_, stats1 = gr_record.plot(ax=ax1, **plot_kw)
_, stats2 = gr_diffrecord.plot(ax=ax2, with_ruler=False, **plot_kw)
max_features_1 = gr_record.feature_level_height * max(
[0] + [v for v in stats1[0].values()]
)
max_level_1 = max(
[max_features_1] + [v["annotation_y"] for v in stats1[1].values()]
)
max_level_2 = max([1] + [v["annotation_y"] for v in stats2[1].values()]) + 2
max_level_1 = int(np.round(max_level_1))
max_level_2 = int(np.round(max_level_2))
# print (stats2)
n_levels = max_level_1 + max_level_2
if max_level_1 and max_level_2:
plt.close(fig)
##
easing = 3
gs = gridspec.GridSpec(n_levels + 2 * easing, 1)
fig = plt.figure(figsize=(width, 1 + 0.5 * n_levels), facecolor="w")
ax1 = fig.add_subplot(gs[: max_level_1 + easing])
ax2 = fig.add_subplot(gs[max_level_1 + easing :])
_, stats1 = gr_record.plot(ax=ax1, **plot_kw)
_, stats2 = gr_diffrecord.plot(ax=ax2, with_ruler=False, **plot_kw)
# fig.set_size_inches((width, 3 + 0.4 * n_levels))
ax2.set_ylim(bottom=-2)
ax2.invert_yaxis()
for f in gr_diffrecord.features:
ax1.fill_between(
[f.start, f.end],
y1=max_features_1 + 1,
y2=-1,
facecolor=f.color,
alpha=0.07,
zorder=1000,
)
return (ax1, ax2)
else:
record.features += diff_features
gr_record = translator.translate_record(record)
ax, _ = gr_record.plot(**plot_kw)
return ax
@staticmethod
def reconstruct_sequences_from_blocks(blocks):
s1, s2 = "", ""
blocks = sorted(blocks, key=lambda b: b.s2_location.to_tuple())
for block in blocks:
if block.operation in ("equal", "replace", "change", "delete"):
s1 = s1 + block.s1_location.extract_sequence()
if block.operation in ("equal", "replace", "change", "insert"):
s2 = s2 + block.s2_location.extract_sequence()
return s1, s2
def __str__(self):
return ", ".join([str(b) for b in self.blocks])
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,209
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/CommonBlocks/__init__.py
|
from .CommonBlocks import CommonBlocks
__all__ = ['CommonBlocks']
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,210
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/biotools.py
|
import tempfile
import subprocess
import numpy as np
try:
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.SeqFeature import SeqFeature, FeatureLocation
from Bio import SeqIO
BIOPYTHON_AVAILABLE = True
except ImportError:
BIOPYTHON_AVAILABLE = False
try:
# Biopython <1.78
from Bio.Alphabet import DNAAlphabet
has_dna_alphabet = True
except ImportError:
# Biopython >=1.78
has_dna_alphabet = False
complements_dict = {"A": "T", "T": "A", "C": "G", "G": "C"}
def random_dna_sequence(length, probas=None, seed=None):
"""Return a random DNA sequence ("ATGGCGT...") with the specified length.
Parameters
----------
length
Length of the DNA sequence.
proba
Frequencies for the different nucleotides, for instance
``probas={"A":0.2, "T":0.3, "G":0.3, "C":0.2}``.
If not specified, all nucleotides are equiprobable (p=0.25).
seed
The seed to feed to the random number generator. When a seed is provided
the random results depend deterministically on the seed, thus enabling
reproducibility
"""
if seed is not None:
np.random.seed(seed)
if probas is None:
sequence = np.random.choice(list("ATCG"), length)
else:
bases, probas = zip(*probas.items())
sequence = np.random.choice(bases, length, p=probas)
return "".join(sequence)
def load_record(filename, linear=True, name="id", upperize=True):
formt = "genbank" if filename.endswith(("gb", "gbk")) else "fasta"
record = SeqIO.read(filename, formt)
if upperize:
record.seq = record.seq.upper()
record.linear = linear
if name != "id":
record.id = name
record.name = record.id.replace(" ", "_")[:20]
return record
def complement(sequence):
return "".join(complements_dict[c] for c in sequence)
def reverse_complement(sequence):
return complement(sequence)[::-1]
def sequence_to_record(sequence, record_id=None, name="unnamed", features=()):
if not BIOPYTHON_AVAILABLE:
raise ImportError("Creating records requires Biopython installed.")
if has_dna_alphabet: # Biopython <1.78
sequence = Seq(sequence, alphabet=DNAAlphabet())
else:
sequence = Seq(sequence)
seqrecord = SeqRecord(sequence, name=name, id=record_id, features=list(features),)
seqrecord.annotations["molecule_type"] = "DNA"
return seqrecord
def annotate_record(
seqrecord, location="full", feature_type="feature", margin=0, **qualifiers
):
"""Add a feature to a Biopython SeqRecord.
Parameters
----------
seqrecord
The biopython seqrecord to be annotated.
location
Either (start, end) or (start, end, strand). (strand defaults to +1)
feature_type
The type associated with the feature
margin
Number of extra bases added on each side of the given location.
qualifiers
Dictionnary that will be the Biopython feature's `qualifiers` attribute.
"""
if not BIOPYTHON_AVAILABLE:
raise ImportError("Creating records requires Biopython installed.")
if location == "full":
location = (margin, len(seqrecord) - margin)
strand = location[2] if len(location) == 3 else 1
seqrecord.features.append(
SeqFeature(
FeatureLocation(location[0], location[1], strand),
qualifiers=qualifiers,
type=feature_type,
)
)
def sequences_differences_array(seq1, seq2):
"""Return an array [0, 0, 1, 0, ...] with 1s for sequence differences.
seq1, seq2 should both be ATGC strings.
"""
if len(seq1) != len(seq2):
raise ValueError(
"Only use on same-size sequences (%d, %d)" % (len(seq1), len(seq2))
)
arr1 = np.fromstring(seq1, dtype="uint8")
arr2 = np.fromstring(seq2, dtype="uint8")
return arr1 != arr2
def sequences_differences(seq1, seq2):
"""Return the number of nucleotides that differ in the two sequences.
seq1, seq2 should be strings of DNA sequences e.g. "ATGCTGTGC"
"""
return sequences_differences_array(seq1, seq2).sum()
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,211
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/DiffBlocks/DiffBlock.py
|
from ..Location import Location
from ..biotools import sequences_differences
class DiffBlock:
"""Class to represent a segment that differs between sequences.
Parameters
----------
operation
One of "insert", "delete", "replace", or "equal".
s1_location
The Location(start, end) of the region in sequence sequence s1.
s2_location
The Location(start, end) of the region in sequence sequence s2.
"""
def __init__(self, operation, s1_location, s2_location):
self.operation = operation
self.s1_location = s1_location
self.s2_location = s2_location
def to_feature(self, sequence="s2"):
s1_length, s2_length = len(self.s1_location), len(self.s2_location)
max_length = max([s1_length, s2_length])
if sequence == "s1":
return DiffBlock(
self.operation, self.s2_location, self.s1_location
).to_feature()
if self.operation == "insert":
if max_length <= 7:
label = "+%s" % self.s2_location.extract_sequence()
else:
label = "+ %d nuc." % s2_length
elif self.operation == "delete":
if max_length <= 7:
label = "-%s" % self.s1_location.extract_sequence()
else:
label = "- %d nuc." % s1_length
elif self.operation == "replace":
if max([s1_length, s2_length]) <= 6:
label = "%s ➤ %s" % (
self.s1_location.extract_sequence(),
self.s2_location.extract_sequence(),
)
else:
sub_s1 = self.s1_location.extract_sequence()
sub_s2 = self.s2_location.extract_sequence()
diffs = sequences_differences(sub_s1, sub_s2)
label = "%d mutations" % diffs
elif self.operation == "change":
if max([s1_length, s2_length]) <= 6:
label = "%s ➤ %s" % (
self.s1_location.extract_sequence(),
self.s2_location.extract_sequence(),
)
else:
label = "%sn ➤ %sn change" % (s1_length, s2_length)
elif self.operation == "reverse":
label = "was reversed at %d-%d" % (
self.s1_location.start,
self.s1_location.end,
)
elif self.operation == "transpose":
label = "was at %d-%d" % (self.s1_location.start, self.s1_location.end,)
elif self.operation == "equal":
label = "Equal"
return self.s2_location.to_biopython_feature(
feature_type="diff_" + self.operation, label=label
)
def __str__(self):
"""Represent a diffbloc, for instance: "insert 10-30|105-105" """
return "%s %s|%s" % (self.operation, self.s1_location, self.s2_location,)
def __repr__(self):
return str(self)
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,212
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/utils.py
|
from .CommonBlocks import CommonBlocks
from .biotools import sequence_to_record
def _turn_sequence_into_record_if_necessary(sequence, record_id="id"):
if hasattr(sequence, "seq"):
return sequence
else:
return sequence_to_record(sequence, record_id=record_id)
def sequences_are_circularly_equal(sequences):
"""Return whether all the sequences represent the same circular sequence.
This means that the sequences are differently rotated versions of a same
circular sequence, and for any pair s1, s2 in the sequences, there is an
index i such that s1 = s2[i:] + s2[:i].
The ``sequences`` parameter should be a list of "ATGC" strings or
SeqRecords.
"""
sequences = [
_turn_sequence_into_record_if_necessary(seq, record_id="REC_%d" % i)
for i, seq in enumerate(sequences)
]
if len(sequences) < 2:
raise ValueError("Provide at least 2 sequences")
elif len(sequences) > 2:
first_equal = sequences_are_circularly_equal(sequences[:2])
return first_equal and sequences_are_circularly_equal(sequences[1:])
s1, s2 = sequences[:2]
if s1.id == s2.id:
s1.id += "_b"
if len(s1) != len(s2):
return False
blocks = CommonBlocks.from_sequences(sequences=[s1, s2], min_block_size=2)
if len(blocks.common_blocks) > 2:
return False
potential_pivot_indices = [
index
for data in blocks.common_blocks.values()
for (origin, (start, end, _)) in data["locations"]
for index in [start, end]
]
s1, s2 = str(s1.seq), str(s2.seq)
for index in potential_pivot_indices:
new_s1 = s1[index:] + s1[:index]
if new_s1 == s2:
return True
return False
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,213
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/DiffBlocks/diffblocks_tools.py
|
from copy import deepcopy
import networkx as nx
import Levenshtein
from ..Location import Location
from .DiffBlock import DiffBlock
def compute_levenshtein_blocks(seq1, seq2, max_complexity=1e8):
"""Compute the Levenshtein blocks of insertion, deletion, replacement.
"""
# TODO: better method for dealing with long sequences?
l1, l2 = len(seq1), len(seq2)
if l1 * l2 > max_complexity:
return [("change", (0, l1), (0, l2))]
def block_format(op, s1, e1, s2, e2):
if op == "delete":
return (op, (s1, e1 + 1), (s2, e2))
if op == "insert":
return (op, (s1, e1), (s2, e2 + 1))
else:
return (op, (s1, e1 + 1), (s2, e2 + 1))
edits = Levenshtein.editops(seq1, seq2)
if len(edits) == 0:
return []
bop, s1, s2 = edits[0]
e1, e2 = s1, s2
blocks = []
for (op, _e1, _e2) in edits[1:]:
continuity = any(
[
all([op == "delete", _e1 == e1 + 1, e2 == _e2]),
all([op == "insert", _e1 == e1, _e2 == e2 + 1]),
all([op == "replace", _e1 == e1 + 1, _e2 == e2 + 1]),
]
)
if op == bop and continuity:
e1, e2 = _e1, _e2
else:
blocks.append(block_format(bop, s1, e1, s2, e2))
bop, s1, s2 = op, _e1, _e2
e1, e2 = s1, s2
blocks.append(block_format(bop, s1, e1, s2, e2))
return blocks
def merge_subblocks(subblocks):
s1_location = Location(
min([b.s1_location.start for b in subblocks]),
max([b.s1_location.end for b in subblocks]),
sequence=subblocks[0].s1_location.sequence,
)
s2_location = Location(
min([b.s2_location.start for b in subblocks]),
max([b.s2_location.end for b in subblocks]),
sequence=subblocks[0].s2_location.sequence,
)
if len(s1_location) == len(s2_location):
return DiffBlock("replace", s1_location, s2_location)
else:
return DiffBlock("change", s1_location, s2_location)
def merge_blocs_by_location(blocks, max_blocks, max_span, reference="s2"):
while 1:
for i in range(len(blocks) - max_blocks):
subblocks = blocks[i : i + max_blocks]
b1, b2 = subblocks[0], subblocks[-1]
if reference == "s2":
span = b2.s2_location.end - b1.s2_location.start
else:
span = b2.s1_location.end - b1.s1_location.start
if span < max_span:
new_block = merge_subblocks(subblocks)
blocks = blocks[:i] + [new_block] + blocks[i + max_blocks :]
break
else:
break
return blocks
def merge_successive_blocks(blocks, change_gap=10, replace_gap=5, reference="s2"):
while 1:
for i in range(len(blocks) - 1):
b1, b2 = blocks[i], blocks[i + 1]
operations = (b1.operation, b2.operation)
if reference == "s2":
gap = b2.s2_location.start - b1.s2_location.end
else:
gap = b2.s1_location.start - b1.s1_location.end
if "equal" in operations:
continue
if "change" in operations and (gap < change_gap):
new_block = merge_subblocks([b1, b2])
blocks = blocks[:i] + [new_block] + blocks[i + 2 :]
break
if operations == ("replace", "replace") and gap < replace_gap:
new_block = merge_subblocks([b1, b2])
blocks = blocks[:i] + [new_block] + blocks[i + 2 :]
break
else:
break
return blocks
def compute_sorted_blocks(blocks, reference="s2"):
def sort_key(block):
if reference == "s2":
return block.s2_location.to_tuple()
else:
return block.s1_location.to_tuple()
return sorted(blocks, key=sort_key)
def get_optimal_common_blocks(common_blocks):
common_blocks = deepcopy(common_blocks)
remarks = []
# Make so that there is never an antisense block in s1 and a + block in s2.
# If it is so, flip the block in s2. It will become antisense and be later
# removed
for block_name, data in common_blocks.items():
locations = data["locations"]
s1_strands = [strand for (s, (_, _, strand)) in locations if s == "s1"]
if 1 not in s1_strands:
for i, location in enumerate(locations):
seq, (start, end, strand) = location
if seq == "s2":
locations[i] = (seq, (start, end, -strand))
# Remove every antisense blocks now. For the ones in s2, log this
# with a remark.
for block_name, data in common_blocks.items():
locations = data["locations"]
for i, location in enumerate(locations):
(seq, (start, end, strand)) = location
if seq == "s2" and (strand == -1):
locations.remove(location)
_, (start1, end1, strand1) = locations[0]
remarks.append(
DiffBlock(
"reverse",
s1_location=Location(start1, end1, strand1),
s2_location=Location(start, end),
)
)
# We start the structure that will be returned in the end
blocks_in_seqs = {
seq: sorted(
[
(start, end, bname)
for bname, data in common_blocks.items()
for (s, (start, end, strand)) in data["locations"]
if s == seq
]
)
for seq in ("s1", "s2")
}
# Identify blocks appearing only in one of the two sequences
blocks_in_s1 = set(b[-1] for b in blocks_in_seqs["s1"])
blocks_in_s2 = set(b[-1] for b in blocks_in_seqs["s2"])
uniblocks = (blocks_in_s1.union(blocks_in_s2)).difference(
blocks_in_s1.intersection(blocks_in_s2)
)
# Remove blocks appearing only in one of the two sequences
# as they are useless for sequences comparison
# this should be very rare but you never know.
for block_list in blocks_in_seqs.values():
for b in block_list:
if b[-1] in uniblocks:
block_list.remove(b)
for b1, b2 in zip(block_list, block_list[1:]):
start1, end1, __name1 = b1
start2, end2, __name2 = b2
if end2 <= end1:
block_list.remove(b2)
# If a block appears several time in a sequence (self-homology)
# give unique names to each occurence: block_1, block_1*, etc.
blocks_in_seqs_dicts = dict(s1={}, s2={})
for seq, blocks_list in list(blocks_in_seqs.items()):
seen_blocks = set()
for i, (start, end, block_name) in enumerate(blocks_list):
while block_name in seen_blocks:
block_name = block_name + "*"
blocks_list[i] = start, end, block_name
blocks_in_seqs_dicts[seq][block_name] = dict(rank=i, location=(start, end))
seen_blocks.add(block_name)
# Find and retain the largest sequence of blocks which is in the right
# order in both sequences. We will remove every other block.
# We do that by looking for the longest path in a graph
if len(blocks_in_seqs_dicts["s2"]) < 2:
retained_blocks = list(blocks_in_seqs_dicts["s2"])
else:
s1_dict = blocks_in_seqs_dicts["s1"]
graph = nx.DiGraph(
[
(b1, b2)
for b1, data1 in blocks_in_seqs_dicts["s2"].items()
for b2, data2 in blocks_in_seqs_dicts["s2"].items()
if (b2 in s1_dict)
and (b1 in s1_dict)
and (s1_dict[b2]["rank"] > s1_dict[b1]["rank"])
and (data2["rank"] > data1["rank"])
]
)
retained_blocks = nx.dag_longest_path(graph)
# remove any "misplaced" block that is not in the retained list.
# log a remark for the ones in s2.
for seq in ("s1", "s2"):
blocks_list = blocks_in_seqs[seq]
for block in list(blocks_list): # copy cause we will remove elements
start, end, block_name = block
if block_name not in retained_blocks:
blocks_list.remove(block)
if seq == "s2":
new_block_name = block_name.strip("*")
s1_blocks = blocks_in_seqs_dicts["s1"]
s1_loc = s1_blocks[new_block_name]["location"]
start1, end1 = s1_loc
if (
len(remarks)
and (start == remarks[-1].s2_location.end)
and (start1 == remarks[-1].s1_location.end)
):
remarks[-1].s1_location.end = end1
remarks[-1].s2_location.end = end
else:
remarks.append(
DiffBlock(
"transpose",
s1_location=Location(start1, end1),
s2_location=Location(start, end),
)
)
# Reduce blocks when there is overlap
blocks_to_reduce = {}
for seq in ("s1", "s2"):
blocks_list = blocks_in_seqs[seq]
for b1, b2 in zip(blocks_list, blocks_list[1:]):
start1, end1, block_name1 = b1
start2, end2, __block_name2 = b2
diff = end1 - start2
if diff > 0:
if block_name1 not in blocks_to_reduce:
blocks_to_reduce[block_name1] = 0
blocks_to_reduce[block_name1] = max(blocks_to_reduce[block_name1], diff)
for seq in ("s1", "s2"):
blocks_list = blocks_in_seqs[seq]
for i, (start, end, block_name) in enumerate(blocks_list):
if block_name in blocks_to_reduce:
diff = blocks_to_reduce[block_name]
blocks_list[i] = (start, end - diff, block_name)
return blocks_in_seqs, remarks
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,214
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/CommonBlocks/commonblocks_tools.py
|
"""Algorithmic methods for the selection of common blocks in DiffBlocks
- select_common_blocks
-
- segments_difference
"""
import re
import tempfile
import subprocess
from collections import defaultdict, OrderedDict
import numpy as np
from ..biotools import reverse_complement, sequence_to_record
def format_sequences_as_dicts(sequences):
"""Standardize different formats into a single one.
The ``sequences`` can be either:
- A list [('sequence_id', 'ATGC...'), ('sequence_2', ...)]
- A list of Biopython records (all with different IDs)
- A dict {'sequence_id': "ATGC..."}
- A dict {'sequence_id': biopython_record}
The output is a tuple (sequences_dict, records_dict), where
- sequences_dict is of the form {'sequence_id': 'ATGC...'}
- sequences_dict is of the form {'sequence_id': 'ATGC...'}
"""
if isinstance(sequences, (list, tuple)):
if hasattr(sequences[0], "seq"):
# SEQUENCES = LIST OF RECORDS
records_dict = OrderedDict([(record.id, record) for record in sequences])
sequences_dict = OrderedDict(
[(record.id, str(record.seq).upper()) for record in sequences]
)
else:
# SEQUENCES = LIST OF ATGC STRINGS
sequences_dict = OrderedDict(sequences)
if isinstance(list(sequences_dict.values())[0], str):
records_dict = OrderedDict(
[
(name, sequence_to_record(seq, name=name))
for name, seq in sequences_dict.items()
]
)
else:
records_dict = sequences
elif hasattr(list(sequences.values())[0], "seq"):
# SEQUENCES = DICT {SEQ_ID: RECORD}
records_dict = OrderedDict(sorted(sequences.items()))
sequences_dict = OrderedDict(
[
(record_id, str(record.seq).upper())
for record_id, record in sequences.items()
]
)
else:
# SEQUENCES = DICT {SEQ_ID: ATGC}
sequences_dict = OrderedDict(sorted(sequences.items()))
records_dict = OrderedDict(
[
(name, sequence_to_record(seq, name=name))
for name, seq in sequences.items()
]
)
return sequences_dict, records_dict
def segments_difference(segment, subtracted):
"""Return the difference between segment (start, end) and subtracted.
The result is a list containing either zero, one, or two segments of the
form (start, end).
Examples
--------
>>> segment=(10, 100), subtracted=(0, 85) => [(85, 100)]
>>> segment=(10, 100), subtracted=(40, 125) => [(10, 40)]
>>> segment=(10, 100), subtracted=(30, 55) => [(10, 30), (55, 100)]
>>> segment=(10, 100), subtracted=(0, 150) => []
"""
seg_start, seg_end = segment
sub_start, sub_end = subtracted
result = []
if sub_start > seg_start:
result.append((seg_start, min(sub_start, seg_end)))
if sub_end < seg_end:
result.append((max(seg_start, sub_end), seg_end))
return sorted(list(set(result)))
def find_homologies_between_sequences(
sequences, min_size=0, max_size=None, include_self_homologies=True
):
"""Return a dict listing the locations of all homologies between sequences.
The result is a dict of the form below, where the sequence identifiers
are used as keys.
>>> {
>>> 'seq_1': {
>>> (start1, end1): [('seq2_5', _start, _end), ('seq_3', )...]
>>> (start2, end2): ...
>>> }
>>> }
Parameters
----------
sequences
A dict {'sequence_id': 'ATTGTGCAG...'}.
min_size, max_size
Minimum and maximum size outside which homologies will be ignored.
include_self_homologies
If False, self-homologies will be removed from the list.
"""
# BLAST THE SEQUENCES USING NCBI-BLAST
temp_fasta_path = tempfile.mktemp(".fa")
with open(temp_fasta_path, "w+") as f:
f.write(
"\n\n".join(
["> %s\n%s" % (name, seq.upper()) for name, seq in sequences.items()]
)
)
proc = subprocess.Popen(
[
"blastn",
"-query",
temp_fasta_path,
"-subject",
temp_fasta_path,
"-perc_identity",
"100",
"-dust",
"no",
"-evalue",
"1000000000000000",
"-culling_limit",
"10",
"-ungapped",
"-outfmt",
"6 qseqid qstart qend sseqid sstart send",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
result, __blast_err = proc.communicate()
# PARSE THE RESULT FROM BLAST
parsing = [line.split("\t") for line in result.decode("utf-8").splitlines()]
homologies = {name: defaultdict(lambda *a: []) for name, seq in sequences.items()}
# FILTER THE RESULTS (MIN_SIZE, MAX_SIZE, SELF-HOMOLOGIES)
for query, qstart, qend, subject, sstart, send in parsing:
is_self_homology = (query == subject) and (qstart != sstart)
if is_self_homology and (not include_self_homologies):
continue
qstart, qend = int(qstart) - 1, int(qend)
sstart, send = int(sstart) - 1, int(send)
if qend - qstart < min_size:
continue
if (max_size is not None) and (qend - qstart > max_size):
continue
location = (subject, sstart, send)
homologies[query][(qstart, qend)].append(location)
return homologies
def count_homologies(matches, min_size):
"""Return a dict {(start, end): number_of_homologies_count}.
"""
homologies_counts = {}
if len(matches) == 1:
segment = list(matches.keys())[0]
homologies_counts[segment] = 1
matches_list = sorted(matches.keys())
for i, match1 in enumerate(matches_list):
for match2 in matches_list[i + 1 :]:
segment = start, end = (match2[0], min(match1[1], match2[1]))
if end < start:
# The segment is empty, match1 and match2 as disjunct.
break
elif (end - start > min_size) and (segment not in homologies_counts):
homologies_counts[segment] = len(
[
matching
for (match_start, match_end) in matches_list
for matching in matches[(match_start, match_end)]
if match_start <= start <= end <= match_end
]
)
return homologies_counts
def segment_with_most_homologies(homologies_counts, method="most_coverage_first"):
"""Select the "best" segment, that should be selected next as a common
block."""
def segment_score(segment):
if method == "most_coverage_first":
factor = homologies_counts[segment]
else:
factor = 1
start, end = segment
return factor * (end - start)
return max(
[(0, (None, None))]
+ [(segment_score(segment), segment) for segment in homologies_counts]
)
def select_common_blocks(
homologies, sequences, min_size=0, method="most_coverage_first"
):
"""Select a collection of the largest common blocks, iteratively."""
common_blocks = []
homologies_counts = {
seqname: count_homologies(matches=homologies[seqname], min_size=min_size)
for seqname in sequences
}
# ITERATIVELY SELECT A COMMON BLOCK AND REMOVE THAT BLOCK FROM THE
# homologies IN VARIOUS SEQUENCES, UNTIL THERE IS NO HOMOLOGY
while True:
# FIND THE HOMOLOGY WITH THE BEST OVERALL SCORE ACROSS ALL SEQS
(best_score, (start, end)), seqname = max(
[
(
segment_with_most_homologies(
homologies_counts[seqname], method=method
),
seqname,
)
for seqname in sequences
]
)
# IF NO HOMOLOGY WAS FOUND AT ALL, STOP
if best_score == 0:
break
# FIND WHERE THE SELECTED SUBSEQUENCE APPEARS IN OTHER SEQUENCES.
# AT EACH LOCATION, "EXTRUDE" THE SUBSEQUENCE FROM THE CURRENT
# LOCATIONS IN homologies_counts
best_subsequence = sequences[seqname][start:end]
locations = []
for seqname, sequence in sequences.items():
seq_n_intersections = homologies_counts[seqname]
# we look for both the subsequence and its reverse complement:
for strand in [1, -1]:
if strand == 1:
matches = re.finditer(best_subsequence, sequence)
else:
matches = re.finditer(
reverse_complement(best_subsequence), sequence
)
for match in matches:
# add the location to the list for this subsequence...
start, end = match.start(), match.end()
locations.append((seqname, (start, end, strand)))
# ...then subtract the location from the sequence's
# homologies list
match_as_segment = tuple(sorted([start, end]))
for intersection in list(seq_n_intersections.keys()):
score = seq_n_intersections.pop(intersection)
for diff in segments_difference(intersection, match_as_segment):
diff_start, diff_end = diff
if diff_end - diff_start > min_size:
seq_n_intersections[diff] = score
common_blocks.append((best_subsequence, locations))
# REMOVE SELF-HOMOLOGOUS SEQUENCES
common_blocks = [
(seq, locations) for (seq, locations) in common_blocks if len(locations) >= 2
]
# CREATE THE FINAL COMMON_BLOCKS_DICT
common_blocks_dict = OrderedDict()
if len(common_blocks) > 0:
number_size = int(np.log10(len(common_blocks))) + 1
for i, (sequence, locations) in enumerate(common_blocks):
block_name = "block_%s" % (str(i + 1).zfill(number_size))
common_blocks_dict[block_name] = {
"sequence": sequence,
"locations": locations,
}
return common_blocks_dict
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,215
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/tests/test_utils.py
|
from geneblocks.utils import sequences_are_circularly_equal
from Bio import SeqIO
import os
this_directory = os.path.dirname(os.path.realpath(__file__))
def test_sequences_are_circularly_equal():
block_1 = "ATGTGCACACGCACCGTGTGTGCACACACGTGTGCACACACGTGCACACGGTGT"
block_2 = "ACACACATATACGCGTGCGTGCAAAACACATTTTACACGGCACGTGCA"
block_3 = "ACCCACACTTTGTGTCGCGCACACGTGTG"
# Three rotated sequences
seq_1 = block_1 + block_2 + block_3
seq_2 = block_2 + block_3 + block_1
seq_3 = block_3 + block_1 + block_2
# Sequences not equivalent to the previous
seq_4 = block_2 + block_3 + block_1 + "A"
seq_5 = block_1 + block_2 + block_2 + block_3
assert sequences_are_circularly_equal([seq_1, seq_2])
assert sequences_are_circularly_equal([seq_1, seq_2, seq_3])
assert not sequences_are_circularly_equal([seq_1, seq_2, seq_3, seq_4])
assert not sequences_are_circularly_equal([seq_1, seq_5])
def test_long_equivalents():
"""Test with two 12kb sequences with only a 10bp shift"""
path = os.path.join(this_directory, "sequences", "long_equivalents.fa")
seq_a, seq_b = SeqIO.parse(path, 'fasta')
assert sequences_are_circularly_equal([seq_a, seq_b])
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,216
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/DiffBlocks/__init__.py
|
from .DiffBlocks import DiffBlocks, DiffBlock
from .DiffRecordTranslator import DiffRecordTranslator
__all__ = ['DiffBlocks', 'DiffBlock', 'DiffRecordTranslator']
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,217
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/tests/test_scenarios.py
|
import os
import matplotlib
import networkx as nx
from geneblocks import CommonBlocks, DiffBlocks, load_record
from geneblocks.biotools import reverse_complement, random_dna_sequence
from geneblocks.DiffBlocks import DiffBlock
from geneblocks.Location import Location
matplotlib.use("Agg")
def test_CommonBlocks_basics(tmpdir):
C1, A, B, C2, C3, D, E, F = [random_dna_sequence(100 * L) for L in range(1, 9)]
sequences = {
"a": C1 + A + C2,
"b": B + D + C2 + C3,
"c": E + C1 + C2,
"d": C2 + C1 + F + C3,
"e": C3 + reverse_complement(C2 + C1),
}
common_blocks = CommonBlocks.from_sequences(sequences)
assert len(common_blocks.common_blocks) == 3
axes = common_blocks.plot_common_blocks()
fig_path = os.path.join(str(tmpdir), "basic_example.png")
axes[0].figure.savefig(fig_path, bbox_inches="tight")
# GET ALL COMMON BLOCKS AS BIOPYTHON RECORDS
_ = common_blocks.common_blocks_records()
_ = common_blocks.unique_blocks_records()
# WRITE ALL COMMON BLOCKS INTO A CSV SPREADSHEET
csv_path = os.path.join(str(tmpdir), "basic_example.csv")
common_blocks.common_blocks_to_csv(target_file=csv_path)
def test_DiffBlocks_basics(tmpdir):
seq_1 = load_record(os.path.join("tests", "sequences", "sequence1.gb"))
seq_2 = load_record(os.path.join("tests", "sequences", "sequence2.gb"))
diff_blocks = DiffBlocks.from_sequences(seq_1, seq_2).merged()
# next line is just to cover separate_axes=false
diff_blocks.plot(figure_width=8, separate_axes=False)
ax1, __ax2 = diff_blocks.plot(figure_width=8)
fig_path = os.path.join(str(tmpdir), "diff_blocks.png")
ax1.figure.savefig(fig_path, bbox_inches="tight")
assert list(map(str, diff_blocks.blocks)) == [
"insert 0-0|0-120",
"equal 0-1000|120-1120",
"replace 1000-1004|1120-1124",
"equal 1004-1503|1124-1623",
"insert 1503-1503|1623-1723",
"equal 1503-2304|1723-2524",
"delete 2304-2404|2524-2524",
"equal 2404-3404|2524-3524",
]
def test_features_transfer():
seq_folder = os.path.join("tests", "sequences", "features_transfer")
insert = load_record(os.path.join(seq_folder, "insert.gb"), name="insert")
plasmid = load_record(
os.path.join(seq_folder, "plasmid_to_annotate.gb"), name="plasmid"
)
blocks = CommonBlocks.from_sequences([insert, plasmid])
records = blocks.copy_features_between_common_blocks(inplace=False)
assert len(records["plasmid"].features) == 6
assert len(plasmid.features) == 2
blocks.copy_features_between_common_blocks(inplace=True)
assert len(plasmid.features) == 6
def test_networkx_dag_longest_path():
# Github issue #7
# networkx >=2.6 has a different correct output. This test catches future changes.
test_graph = nx.DiGraph([("block_1", "block_3"), ("block_1", "block_2")])
assert nx.dag_longest_path(test_graph) == ["block_1", "block_3"]
def test_good_management_of_homologies():
"""This checks for a former obscure bug where a sequence with 2 homologies
in seq2 corresponding to a single sequence in s1 used to cause an index
error due to the "*" added by the algorithm to the end of homologies."""
# See also Github issue #7
b1 = random_dna_sequence(4000, seed=123)
b2 = random_dna_sequence(4000, seed=234)
b3 = random_dna_sequence(4000, seed=345)
seq1 = b1 + "A" + "T" + b2 + b3
seq2 = "T" + b1 + "T" + b3 + b2 + b1 + b1
blocks = DiffBlocks.from_sequences(seq1, seq2).merged()
assert len(blocks.blocks) == 9
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,218
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/setup.py
|
import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
exec(open("geneblocks/version.py").read()) # loads __version__
setup(
name="geneblocks",
version=__version__,
author="Zulko",
description="Library to compare DNA sequences (diff, common blocks, etc.)",
long_description=open("pypi-readme.rst").read(),
license="MIT",
url="https://github.com/Edinburgh-Genome-Foundry/geneblocks",
keywords="DNA sequence blocks diff synthetic-biology bioinformatics",
packages=find_packages(exclude="docs"),
install_requires=[
"numpy",
"Biopython",
"dna_features_viewer",
"networkx",
"python-Levenshtein",
],
)
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,219
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/CommonBlocks/CommonBlocks.py
|
"""Defines central class BlockFinder."""
import itertools
from collections import OrderedDict
from copy import deepcopy
from .CommonBlocksRecordTranslator import CommonBlocksRecordTranslator
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from ..biotools import annotate_record
from .commonblocks_tools import (
format_sequences_as_dicts,
select_common_blocks,
find_homologies_between_sequences,
)
# TODO: Simplify the code by using the new Location class in Location.py
class CommonBlocks:
"""Class to represent a set of common blocks from different sequences.
Create with ``CommonBlocks.from_sequences``:
>>> common_blocks = CommonBlocks.from_sequences({'s1': 'ATGC...'})
Parameters
----------
common_blocks
A dictionary of the sequences to compare, of the form
{sequence_name: ATGC_sequence_string} or a list of records, all with
different IDs.
records
A dictionary of the Biopython records of the sequences
{record_id: record}.
"""
def __init__(self, common_blocks, records):
"""Initialize, compute best blocks."""
self.common_blocks = common_blocks
self.records = records
@staticmethod
def from_sequences(
sequences,
block_selection_method="most_coverage_first",
include_self_homologies=True,
min_block_size=80,
max_block_size=None,
):
sequences_dict, records_dict = format_sequences_as_dicts(sequences)
homologies_dict = find_homologies_between_sequences(
sequences_dict,
min_size=min_block_size,
max_size=max_block_size,
include_self_homologies=include_self_homologies,
)
common_blocks = select_common_blocks(
homologies_dict,
sequences_dict,
min_size=min_block_size,
method=block_selection_method,
)
return CommonBlocks(common_blocks=common_blocks, records=records_dict)
def compute_unique_blocks(self):
"""Return a dictionary listing unique blocks by sequence.
The unique blocks are the blocks between the selected common blocks.
The result is of the form {seq: [(start, end), (start2, end2), ...]}
"""
unique_blocks = OrderedDict()
for seqname, rec in self.sequences_with_annotated_blocks().items():
blocks_locations = (
[(0, 0)]
+ sorted(
[
(f.location.start, f.location.end)
for f in rec.features
if f.qualifiers.get("is_block", False)
]
)
+ [(len(rec), len(rec))]
)
unique_blocks[seqname] = [
(end1, start2)
for (_, end1), (start2, _) in zip(
blocks_locations, blocks_locations[1:]
)
if (start2 - end1) > 1
]
return unique_blocks
def common_blocks_to_csv(self, target_file=None):
"""Write the common blocks into a CSV file.
If a target CSV file is provided the result is written to that file.
Otherwise the result is returned as a string.
The columns of the CSV file are "block", "size", "locations", and
sequence.
"""
csv_content = "\n".join(
["block;size;locations;sequence"]
+ [
";".join(
[
block_name,
str(len(data["sequence"])),
" ".join(
[
"%s(%d, %d, %d)" % (cst, start, end, strand)
for (cst, (start, end, strand)) in data["locations"]
]
),
data["sequence"],
]
)
for block_name, data in self.common_blocks.items()
]
)
if target_file:
with open(target_file, "w+") as f:
f.write(csv_content)
else:
return csv_content
def common_blocks_records(self):
"""Return all common blocks as a list of Biopython records.
"""
if self.records is None:
raise ValueError("")
records = []
for block_name, data in self.common_blocks.items():
cst, (start, end, strand) = data["locations"][0]
record = self.records[cst][start:end]
if strand == -1:
record = record.reverse_complement()
record.id = record.name = block_name
records.append(record)
return records
def unique_blocks_records(self, target_file=None):
"""Return all unique blocks as a list of Biopython records."""
if self.records is None:
raise ValueError("")
records = []
for seqname, locations in self.compute_unique_blocks().items():
for i, (start, end) in enumerate(locations):
record = self.records[seqname][start:end]
record.id = "%s_%03d" % (seqname, i)
records.append(record)
return records
def sequences_with_annotated_blocks(self, colors="auto"):
"""Return a list of Biopython records representing the sequences
with annotations indicating the common blocks.
Parameter ``colors`` is either a list of colors or "auto" for the
default.
"""
records = deepcopy(self.records)
if colors == "auto":
colors = itertools.cycle([cm.Paired(0.21 * i % 1.0) for i in range(30)])
blocks_and_colors = zip(self.common_blocks.items(), colors)
for (name, data), color in blocks_and_colors:
for (seqname, location) in data["locations"]:
annotate_record(
records[seqname],
location,
feature_type="misc_feature",
is_block=True,
label=name,
color=color,
)
return records
def plot_common_blocks(
self, colors="auto", axes=None, figure_width=10, ax_height=2
):
"""Plot the common blocks found on vertically stacked axes.
The axes on which the plots are drawn are returned at the end.
Parameters
----------
colors
Either a list of colors to use for blocks or "auto" for the default.
axes
A list of matplotlib axes on which to plot, or None for new axes.
figure_width
Width of the final figure in inches.
ax_eight
Height of each plot.
"""
translator = CommonBlocksRecordTranslator()
records = self.sequences_with_annotated_blocks(colors=colors)
if axes is None:
fig, axes = plt.subplots(
len(self.records),
1,
facecolor="white",
sharex=True,
figsize=(figure_width, ax_height * len(self.records)),
)
else:
fig = axes[0].figure
for (ax, (seqname, record)) in zip(axes, records.items()):
gr_record = translator.translate_record(record)
gr_record.plot(
ax,
x_lim=(0, max([len(rec) for rec in self.records.values()])),
with_ruler=(ax == axes[-1]),
)
ax.set_ylim(top=ax.get_ylim()[1])
ax.set_title(seqname, loc="left", fontdict=dict(weight="bold"))
# fig.tight_layout()
return axes
def copy_features_between_common_blocks(self, inplace=False):
def extract_subrecord(record, location):
start, end, strand = location
record = record[start:end]
if strand == -1:
record = record.reverse_complement()
return record
def extract_features(record, offset, reverse=False):
if reverse:
record = record.reverse_complement()
new_features = [deepcopy(f) for f in record.features]
for f in new_features:
f.qualifiers["original_record"] = record.id
for f in new_features:
f.location += offset
return new_features
if inplace:
records = self.records
else:
records = deepcopy(self.records)
for data in self.common_blocks.values():
locations = data["locations"]
subrecords = {
rec_id: extract_subrecord(records[rec_id], location)
for rec_id, location in data["locations"]
}
for l1, l2 in itertools.combinations(locations, 2):
for ((id1, loc1), (id2, __loc2)) in ((l1, l2), (l2, l1)):
start1, __end1, strand1 = loc1
# start2, end2, strand2 = loc2
records[id1].features += extract_features(
subrecords[id2], offset=start1, reverse=(strand1 == -1)
)
return records
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,220
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/examples/features_transfer.py
|
import os
from geneblocks import CommonBlocks, load_record
from dna_features_viewer import BiopythonTranslator
import matplotlib.pyplot as plt
# LOAD THE TWO RECORDS
part_path = os.path.join("sequences", "features_transfer", "part.gb")
part = load_record(part_path, name="part")
plasmid_path = os.path.join(
"sequences", "features_transfer", "plasmid_to_annotate.gb"
)
plasmid = load_record(plasmid_path, name="plasmid")
# TRANSFER THE FEATURES
blocks = CommonBlocks.from_sequences([part, plasmid])
new_records = blocks.copy_features_between_common_blocks(inplace=False)
annotated_plasmid = new_records["plasmid"] # record with all features
# PLOT ALL RECORDS
fig, (ax0, ax1, ax2) = plt.subplots(3, 1, sharex=True, figsize=(7, 5))
ax0.set_title("Record 1: Annotated part", loc="left")
ax1.set_title("Record 2: Partially annotated plasmid", loc="left")
ax2.set_title("Record 2 (after) with all annotations", loc="left")
class SpecialBiopythonTranslator(BiopythonTranslator):
def compute_feature_color(self, f):
original_record = "".join(f.qualifiers.get("original_record", ""))
return "#ffafaf" if (original_record == "part") else "#afafff"
tr = SpecialBiopythonTranslator()
tr.translate_record(part).plot(ax0, with_ruler=False, x_lim=(0, len(plasmid)))
tr.translate_record(plasmid).plot(ax1, with_ruler=False)
tr.translate_record(annotated_plasmid).plot(ax2, with_ruler=False)
fig.tight_layout()
fig.subplots_adjust(hspace=0.6)
fig.savefig("features_transfer.png", dpi=125)
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,221
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/__init__.py
|
""" geneblocks/__init__.py """
# __all__ = []
from .CommonBlocks import CommonBlocks
from .DiffBlocks import DiffBlocks, DiffRecordTranslator
from .biotools import load_record, random_dna_sequence, reverse_complement
from .utils import sequences_are_circularly_equal
from .version import __version__
__all__ = [
"CommonBlocks",
"DiffBlocks",
"DiffRecordTranslator",
"load_record",
"random_dna_sequence",
"reverse_complement",
"sequences_are_circularly_equal"
"__version__",
]
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,222
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/examples/diff_blocks.py
|
import os
from geneblocks import DiffBlocks, load_record
seq_1 = load_record(os.path.join("sequences", "sequence1.gb"))
seq_2 = load_record(os.path.join("sequences", "sequence2.gb"))
diff_blocks = DiffBlocks.from_sequences(seq_1, seq_2)
ax1, ax2 = diff_blocks.plot(figure_width=8)
ax1.figure.savefig("diff_blocks.png", bbox_inches='tight')
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,223
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/examples/common_blocks.py
|
from geneblocks import CommonBlocks
from geneblocks.biotools import reverse_complement, random_dna_sequence
C1, A, B, C2, C3, D, E, F = [random_dna_sequence(100 * L) for L in range(1, 9)]
sequences = {
"a": C1 + A + C2,
"b": B + D + C2 + C3,
"c": E + C1 + C2,
"d": C2 + C1 + F + C3,
"e": C3 + reverse_complement(C2 + C1)
}
common_blocks = CommonBlocks.from_sequences(sequences)
axes = common_blocks.plot_common_blocks()
axes[0].figure.savefig("common_blocks.png", bbox_inches="tight")
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,224
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/DiffBlocks/DiffRecordTranslator.py
|
from dna_features_viewer import BiopythonTranslator
class DiffRecordTranslator(BiopythonTranslator):
ignored_features_types = ("diff_equal",)
default_box_color = None
@staticmethod
def compute_feature_color(f):
return dict(
diff_delete="#E76F51", # RED
diff_insert="#2A9D8F", # GREEN
diff_replace="#E9C46A", # YELLOW
diff_change="#F4A261", # ORANGE
diff_reverse="white",
diff_transpose="white",
).get(f.type, "white")
@staticmethod
def compute_feature_box_linewidth(f):
return 1 if f.type.startswith("diff_") else 0
@staticmethod
def compute_feature_fontdict(f):
return {"fontsize": 12 if f.type.startswith("diff_") else 9}
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,225
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/sequence_modification_utils.py
|
"""These methods are only useful to build examples and tests for Geneblocks."""
from .biotools import reverse_complement
def change(seq, start, end, change):
"""Return the sequence with ``seq[start:end]`` replaced by ``change``"""
return seq[:start] + change + seq[end:]
def insert(seq, pos, inserted):
"""Return the sequence with ``inserted`` inserted, starting at index 'pos'
"""
return seq[:pos] + inserted + seq[pos:]
def delete(seq, pos, deletions):
"""Return the sequence with a number of deletions from position pos."""
return seq[:pos] + seq[pos + deletions :]
def reverse(seq, start, end):
"""Return the sequence with segment seq[start:end] reverse-complemented."""
return seq[:start] + reverse_complement(seq[start:end]) + seq[end:]
def move(seq, start, end, diff):
"""Move a subsequence by "diff" nucleotides the left or the right."""
sub = seq[start:end]
if diff > 0:
return seq[:start] + seq[end : end + diff] + sub + seq[end + diff :]
else:
return (
seq[: start + diff] + sub + seq[start + diff : start] + seq[end:]
)
def swap(seq, pos1, pos2):
"""Return a new sequence with segments at position pos1 and pos2 swapped.
pos1, pos2 are both of the form (start1, end1), (start2, end2)
"""
(start1, end1), (start2, end2) = sorted([pos1, pos2])
return (
seq[:start1]
+ seq[start2:end2]
+ seq[end1:start2]
+ seq[start1:end1]
+ seq[end2:]
)
def copy(seq, start, end, new_start):
"""Return the sequence with segment [start, end] also copied elsewhere,
starting in new_start."""
return insert(seq, new_start, seq[start:end])
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,226
|
Edinburgh-Genome-Foundry/Geneblocks
|
refs/heads/master
|
/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py
|
from dna_features_viewer import BiopythonTranslator
class CommonBlocksRecordTranslator(BiopythonTranslator):
ignored_features_types = ("diff_equal",)
default_box_color = None
def compute_feature_color(self, f):
if f.qualifiers.get("is_block", False):
return BiopythonTranslator.compute_feature_color(self, f)
else:
return "white"
@staticmethod
def compute_feature_box_linewidth(f):
return 1 if f.qualifiers.get("is_block", False) else 0
@staticmethod
def compute_feature_fontdict(f):
return {"fontsize": 12 if f.qualifiers.get("is_block", False) else 9}
|
{"/examples/complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_complex_sequences.py": ["/geneblocks/__init__.py", "/geneblocks/sequence_modification_utils.py"], "/tests/test_base_diff_cases.py": ["/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/Location.py": ["/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/DiffBlocks.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/DiffBlock.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py", "/geneblocks/DiffBlocks/diffblocks_tools.py"], "/geneblocks/CommonBlocks/__init__.py": ["/geneblocks/CommonBlocks/CommonBlocks.py"], "/geneblocks/DiffBlocks/DiffBlock.py": ["/geneblocks/Location.py", "/geneblocks/biotools.py"], "/geneblocks/utils.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/DiffBlocks/diffblocks_tools.py": ["/geneblocks/Location.py", "/geneblocks/DiffBlocks/DiffBlock.py"], "/geneblocks/CommonBlocks/commonblocks_tools.py": ["/geneblocks/biotools.py"], "/tests/test_utils.py": ["/geneblocks/utils.py"], "/geneblocks/DiffBlocks/__init__.py": ["/geneblocks/DiffBlocks/DiffBlocks.py", "/geneblocks/DiffBlocks/DiffRecordTranslator.py"], "/tests/test_scenarios.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/Location.py"], "/geneblocks/CommonBlocks/CommonBlocks.py": ["/geneblocks/CommonBlocks/CommonBlocksRecordTranslator.py", "/geneblocks/biotools.py", "/geneblocks/CommonBlocks/commonblocks_tools.py"], "/examples/features_transfer.py": ["/geneblocks/__init__.py"], "/geneblocks/__init__.py": ["/geneblocks/CommonBlocks/__init__.py", "/geneblocks/DiffBlocks/__init__.py", "/geneblocks/biotools.py", "/geneblocks/utils.py"], "/examples/diff_blocks.py": ["/geneblocks/__init__.py"], "/examples/common_blocks.py": ["/geneblocks/__init__.py", "/geneblocks/biotools.py"], "/geneblocks/sequence_modification_utils.py": ["/geneblocks/biotools.py"]}
|
3,231
|
ronbeltran/four-pics-one-word
|
refs/heads/master
|
/game/models.py
|
from google.appengine.ext import ndb
class Letters(ndb.Model):
match_count = ndb.IntegerProperty(required=True, default=0)
@classmethod
def _build_key(cls, letters):
return ndb.Key(cls, letters)
@classmethod
def new(cls, letters, match_count):
key = cls._build_key(letters)
new = cls(
key=key,
match_count=match_count,
)
return new
|
{"/game/views.py": ["/game/__init__.py"], "/api.py": ["/game/__init__.py"]}
|
3,232
|
ronbeltran/four-pics-one-word
|
refs/heads/master
|
/game/views.py
|
import logging
import operator
from flask import render_template, request
from google.appengine.api import memcache
from game import app
from game import utils
from game import models
EXPIRE_TIME = 60 * 60 * 24 # 24 hours
@app.route('/', methods=['GET', 'POST'])
def home():
context = {}
if request.method == "POST":
length = request.form['length'] or 1
letters = request.form['letters'] or None
if letters is None:
context.update({
'length': length,
'letters': '',
})
return render_template('index.html', **context)
letters = letters.upper().strip().replace(' ', '').replace('\n', '').replace('\t', '')
key = '{0}_{1}'.format(str(length), ''.join(sorted(letters)))
cached_data = memcache.get(key)
if cached_data is None:
logging.info('{} not found in memcache'.format(key))
words = utils.get_words_dict(length, letters)
sorted_words = sorted(words.items(), key=operator.itemgetter(1))
sorted_words.reverse()
memcache.add(key, sorted_words, EXPIRE_TIME)
else:
logging.info('{} found in memcache'.format(key))
sorted_words = cached_data
context.update({
'length': length,
'letters': letters,
'words': sorted_words,
})
return render_template('index.html', **context)
|
{"/game/views.py": ["/game/__init__.py"], "/api.py": ["/game/__init__.py"]}
|
3,233
|
ronbeltran/four-pics-one-word
|
refs/heads/master
|
/game/utils.py
|
import pickle
import logging
WORDS = pickle.load(open('./static/google-books-common-words.bin', 'r'))
def is_subset(word, choices):
_choices = list(choices)
for c in word:
if c not in _choices:
return False
_choices.remove(c)
return True
def get_words(length, letters):
length = int(length)
candidates = []
selected = []
for key, value in WORDS.iteritems():
if len(key) == length:
candidates.append(key)
for word in candidates:
if is_subset(word, letters):
selected.append(word)
logging.info('Got {0} matches with length of {1} where choices {2}'.format(
len(selected), length, letters))
return selected
def get_words_dict(length, letters):
length = int(length)
candidates = []
selected = {}
for key, value in WORDS.iteritems():
if len(key) == length:
candidates.append(key)
for word in candidates:
if is_subset(word, letters):
selected.update({word: WORDS.get(word)})
logging.info('Got {0} matches with length of {1} where choices {2}'.format(
len(selected.keys()), length, letters))
return selected
|
{"/game/views.py": ["/game/__init__.py"], "/api.py": ["/game/__init__.py"]}
|
3,234
|
ronbeltran/four-pics-one-word
|
refs/heads/master
|
/api.py
|
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
package = 'Wordsapi'
from game import utils
class Word(messages.Message):
word = messages.StringField(1)
frequency = messages.StringField(2)
class Words(messages.Message):
words = messages.MessageField(Word, 1, repeated=True)
WORDS_CRITERIA_RESOURCE = endpoints.ResourceContainer(
message_types.VoidMessage,
length=messages.IntegerField(1, variant=messages.Variant.INT32, required=True), # noqa
choices=messages.StringField(2, required=True)
)
WEB_CLIENT_ID = '471311115005-4bd8aqpnmrnro61ntdgstb2bsbvhma90.apps.googleusercontent.com' # noqa
ANDROID_CLIENT_ID = ''
IOS_CLIENT_ID = ''
ANDROID_AUDIENCE = WEB_CLIENT_ID
@endpoints.api(name='wordsapi', version='v1',
allowed_client_ids=[WEB_CLIENT_ID, endpoints.API_EXPLORER_CLIENT_ID], # noqa
audiences=[ANDROID_AUDIENCE],)
class WordsApi(remote.Service):
@endpoints.method(WORDS_CRITERIA_RESOURCE, Words,
path='words/{length}/{choices}', http_method='POST',
name='words.get')
def get_words(self, request):
words = utils.get_words(request.length, request.choices.upper())
return Words(words=[Word(word=w, frequency=str(utils.WORDS.get(w))) for w in words]) # noqa
app = endpoints.api_server([WordsApi])
|
{"/game/views.py": ["/game/__init__.py"], "/api.py": ["/game/__init__.py"]}
|
3,235
|
ronbeltran/four-pics-one-word
|
refs/heads/master
|
/static/serialize.py
|
#!/usr/bin/env python
import os
import pickle
FILEPATH = os.path.abspath('google-books-common-words.txt')
WORD_LENGTHS = []
def convert_to_dict(filename):
data = {}
with open(filename, 'r') as f:
for line in f:
if line.startswith('#'):
continue
key, value = line.split()
data.update({key: int(value)})
return data
def pickle_data(data, filename):
if not isinstance(data, dict):
raise TypeError('data should be a dict')
with open(filename, 'wb') as f:
pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
print 'Data saved as: {}'.format(os.path.abspath(filename))
def unpickle_data(filename):
data = None
with open(filename, 'rb') as f:
data = pickle.load(f)
return data
if __name__ == "__main__":
data = convert_to_dict(FILEPATH)
pickle_data(data, 'google-books-common-words.bin')
# _data = unpickle_data('google-books-common-words.bin')
# print _data
|
{"/game/views.py": ["/game/__init__.py"], "/api.py": ["/game/__init__.py"]}
|
3,236
|
ronbeltran/four-pics-one-word
|
refs/heads/master
|
/game/__init__.py
|
from flask import Flask
app = Flask(__name__)
from game import views # noqa
|
{"/game/views.py": ["/game/__init__.py"], "/api.py": ["/game/__init__.py"]}
|
3,237
|
MTG/pymtg
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
setup(name='pymtg',
version='0.2.1',
description='Python research utils that some of us use at the MTG and eventually everyone will use :)',
url='https://github.com/MTG/pymtg',
author='Music Technology Group',
author_email='mtg-info@upf.edu',
license='MIT',
install_requires=['numpy'],
packages=find_packages())
|
{"/test.py": ["/pymtg/__init__.py"]}
|
3,238
|
MTG/pymtg
|
refs/heads/master
|
/pymtg/processing/__init__.py
|
from .worker import WorkParallelizer
|
{"/test.py": ["/pymtg/__init__.py"]}
|
3,239
|
MTG/pymtg
|
refs/heads/master
|
/test.py
|
import doctest
import unittest
import pkgutil
import pymtg
# Create a unit test suite and add all pymtg modules' doctests
suite = unittest.TestSuite()
for _, modname, _ in pkgutil.iter_modules(pymtg.__path__, prefix='pymtg.'):
suite.addTest(doctest.DocTestSuite(modname))
runner = unittest.TextTestRunner(verbosity=1)
runner.run(suite)
|
{"/test.py": ["/pymtg/__init__.py"]}
|
3,240
|
MTG/pymtg
|
refs/heads/master
|
/pymtg/io/__init__.py
|
import os
import errno
import json
import fnmatch
def json_dump(path, data, indent=4, verbose=False):
"""Save python dictionary ``data`` to JSON file at ``path``.
Args:
path (str): Path to the file
verbose (bool): Verbosity flag
"""
with open(path, 'w') as f:
if verbose:
print('Saving data to {0}'.format(path))
json.dump(data, f, indent=indent)
def json_load(path, verbose=False):
"""Load python dictionary stored in JSON file at ``path``.
Args:
path (str): Path to the file
verbose (bool): Verbosity flag
Returns:
(dict): Loaded JSON contents
"""
with open(path, 'r') as f:
if verbose:
print('Loading data from {0}'.format(path))
return json.load(f)
def save_to_file(path, data, verbose=False):
""" Save arbitrary data to file at ``path``.
Args:
path (str): Path to the file
verbose (bool): Verbosity flag
"""
with open(path, 'w') as f:
if verbose:
print('Saving data to {0}'.format(path))
f.write(data)
def mkdir_p(path):
"""
TODO: document this function
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def get_filenames_in_dir(dir_name, keyword='*', skip_foldername='', match_case=True, verbose=False):
"""TODO: better document this function
TODO: does a python 3 version of this function exist?
Args:
dir_name (str): The foldername.
keyword (str): The keyword to search (defaults to '*').
skip_foldername (str): An optional foldername to skip searching
match_case (bool): Flag for case matching
verbose (bool): Verbosity flag
Returns:
(tuple): Tuple containing:
- fullnames (list): List of the fullpaths of the files found
- folder (list): List of the folders of the files
- names (list): List of the filenames without the foldername
Examples:
>>> get_filenames_in_dir('/path/to/dir/', '*.mp3') #doctest: +SKIP
(['/path/to/dir/file1.mp3', '/path/to/dir/folder1/file2.mp3'], ['/path/to/dir/', '/path/to/dir/folder1'], ['file1.mp3', 'file2.mp3'])
"""
names = []
folders = []
fullnames = []
if verbose:
print(dir_name)
# check if the folder exists
if not os.path.isdir(dir_name):
if verbose:
print("Directory doesn't exist!")
return [], [], []
# if the dir_name finishes with the file separator,
# remove it so os.walk works properly
dir_name = dir_name[:-1] if dir_name[-1] == os.sep else dir_name
# walk all the subdirectories
for (path, dirs, files) in os.walk(dir_name):
for f in files:
hasKey = (fnmatch.fnmatch(f, keyword) if match_case else
fnmatch.fnmatch(f.lower(), keyword.lower()))
if hasKey and skip_foldername not in path.split(os.sep)[1:]:
try:
folders.append(unicode(path, 'utf-8'))
except TypeError: # already unicode
folders.append(path)
try:
names.append(unicode(f, 'utf-8'))
except TypeError: # already unicode
names.append(path)
fullnames.append(os.path.join(path, f))
if verbose:
print("> Found " + str(len(names)) + " files.")
return fullnames, folders, names
|
{"/test.py": ["/pymtg/__init__.py"]}
|
3,241
|
MTG/pymtg
|
refs/heads/master
|
/pymtg/__init__.py
|
import pkgutil
for _, modname, _ in pkgutil.iter_modules(__path__, prefix='pymtg.'):
__import__(modname)
|
{"/test.py": ["/pymtg/__init__.py"]}
|
3,242
|
MTG/pymtg
|
refs/heads/master
|
/pymtg/plotting/__init__.py
|
COLORS = ['#FF4500', '#FFA500', '#6B8E23', '#32CD32', '#FFD700', '#008B8B', '#00008B', '#B22222', '#1E90FF', '#FF1493',
'#008000', '#DAA520', '#2F4F4F', '#8B0000', '#FF8C00', '#8B008B', '#A9A9A9', '#B8860B', '#00FFFF', '#6495ED',
'#FF7F50', '#D2691E', '#7FFF00', '#DEB887', '#8A2BE2', '#0000FF', '#000000']
def color_at_index(index):
"""Return hexadecimal color at given ``index`` from ``COLORS``.
Args:
index (int): Index of color to return (wraps if larger than the length of ``COLORS``)
Returns:
(str): Hexadecimal color code (starts with #)
"""
return COLORS[index % len(COLORS)]
|
{"/test.py": ["/pymtg/__init__.py"]}
|
3,243
|
MTG/pymtg
|
refs/heads/master
|
/pymtg/iterables/__init__.py
|
def chunks(l, n):
"""Yield successive ``n``-sized chunks from ``l``.
Examples:
>>> chunks([1, 2, 3, 4, 5], 2) #doctest: +ELLIPSIS
<generator object chunks at 0x...>
>>> list(chunks([1, 2, 3, 4, 5], 2))
[[1, 2], [3, 4], [5]]
"""
for i in range(0, len(l), n):
yield l[i:i+n]
|
{"/test.py": ["/pymtg/__init__.py"]}
|
3,244
|
MTG/pymtg
|
refs/heads/master
|
/pymtg/signal/__init__.py
|
import numpy as np
def smooth(x, window_len=11, window='hanning', preserve_length=True):
"""Smooth the data using a window with requested size.
This method is based on the convolution of a scaled window with the signal.
The signal is prepared by introducing reflected copies of the signal
(with the window size) in both ends so that transient parts are minimized
in the beginning and end part of the output signal.
The code here is an adaptation of the smoothing code from Scipy Cookbook:
http://scipy-cookbook.readthedocs.io/items/SignalSmooth.html
Args:
x (array): The input signal
window_len (int): The dimension of the smoothing window. Should be an odd integer.
window (string): The type of window from 'flat', 'hanning', 'hamming', 'bartlett',
'blackman'. Flat window will produce a moving average smoothing.
preserve_length (bool): Whether the length oh the output signal should be the same
as the length of the input signal (default=True).
Returns:
(array): The smoothed signal
Examples:
>>> smooth([0, 1, 0, 1, 0, 1], 4)
array([ 0.5, 0.5, 0.5, 0.5, 0.5, 0.5])
"""
if type(x) != np.array:
x = np.array(x)
if x.ndim != 1:
raise (ValueError, "Smooth only accepts 1 dimension arrays.")
if x.size < window_len:
raise (ValueError, "Input vector needs to be bigger than window size.")
if window_len<3:
return x
if window not in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
raise (ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'")
s=np.r_[x[window_len-1:0:-1],x,x[-2:-window_len-1:-1]]
if window == 'flat': #moving average
w=np.ones(window_len,'d')
else:
w=eval('np.'+window+'(window_len)')
y=np.convolve(w/w.sum(),s,mode='valid')
if preserve_length:
return y[(window_len//2-1):-(window_len//2)][:x.size]
return y
def linear_approximation(x, include_coeffs=False):
"""Compute the first degree least squares polynomial fit of x (linear approximation).
This function returns the linear approximation as a signal of the same length of x.
If requested, the function can also return the linear approximation coefficients as
returned by Numpy's 'polyfit' function. For more details in the method used for the linear
approximation, see https://docs.scipy.org/doc/numpy/reference/generated/numpy.polyfit.html.
Args:
x (array): The input signal
include_coeffs (bool): Whether to return the computed linear approximation coefficients
along with the approximated signal (default=False).
Returns:
(array): The linear approximation of then input signal
Examples:
>>> linear_approximation([1, 1, 1])
array([ 1., 1., 1.])
>>> linear_approximation([0, 1, 2, 3, 4, 5])
array([ 0., 1., 2., 3., 4., 5.])
>>> linear_approximation([1, 2, 4, 8, 16])
array([ -1. , 2.6, 6.2, 9.8, 13.4])
>>> linear_approximation([1, 2, 4, 8, 16], include_coeffs=True)
(array([ -1. , 2.6, 6.2, 9.8, 13.4]), (3.6000000000000001, -0.99999999999999778))
"""
a, b = np.polyfit(range(0, len(x)), x, 1)
x_fit = np.array([a*i + b for i in range(0, len(x))])
if not include_coeffs:
return x_fit
else:
return x_fit, (a, b)
|
{"/test.py": ["/pymtg/__init__.py"]}
|
3,245
|
MTG/pymtg
|
refs/heads/master
|
/pymtg/time/__init__.py
|
import time
import datetime
def time_stats(done, total, starttime):
"""Count how far through a repeated operation you are.
Use this method if you are performing a repeated operation over
a list of items and you want to check progress and time remaining
after each iteration.
Args:
done (int): how many items have been processed
total (int): the total number of items that are to be processed
starttime: the result of an initial call to time.monotonic()
Returns:
A tuple of (time elapsed, time remaining), as a string representation
of a timedelta
"""
nowtime = time.monotonic()
position = done*1.0 / total
duration = round(nowtime - starttime)
durdelta = datetime.timedelta(seconds=duration)
remaining = round((duration / position) - duration)
remdelta = datetime.timedelta(seconds=remaining)
return str(durdelta), str(remdelta)
def datetime_range(start_datetime, end_datetime=None, step_interval=None, n_steps=1, snap_to_date=False,
return_pairs=False):
"""Return a list of dates inside the date range between ``start_datetime`` and ``end_datetime``,
equally spaced in ``step`` time intervals.
Args:
start_datetime (datetime): Starting time of the range
end_datetime (datetime): End of the time range (included if range is multiple of step). Defaults to today
step_interval (timedelta,str): time interval of between list elements. Can be a ``datetime.timedelta``
object or a string from ['day', 'second', 'microsecond', 'millisecond', 'minute', 'hour', 'week'].
Defaults to 1 day.
n_steps (int): number of steps to be applied between list elements (default=1)
snap_to_date (bool): Whether to disregard hour, minutes and seconds information (as a date object,
default=False)
return_pairs (bool): Whether to return a simple list or a list of pairs with edge dates for each
interval (default=False)
Returns:
(list): List of ``datetime.datetime`` objects (or tuples of two ``datetime.datetime`` if ``return_pairs=True``)
Examples:
>>> datetime_range(datetime.datetime(2017,1,1), datetime.datetime(2017,1,3))
[datetime.datetime(2017, 1, 1, 0, 0), datetime.datetime(2017, 1, 2, 0, 0), datetime.datetime(2017, 1, 3, 0, 0)]
>>> datetime_range(datetime.datetime(2017,1,1,10,21,45), datetime.datetime(2017,1,3,10,30,54), snap_to_date=True)
[datetime.datetime(2017, 1, 1, 0, 0), datetime.datetime(2017, 1, 2, 0, 0), datetime.datetime(2017, 1, 3, 0, 0)]
>>> datetime_range(datetime.datetime(2017,1,1,11,0,0), datetime.datetime(2017,1,1,11,2,0), step_interval='minute')
[datetime.datetime(2017, 1, 1, 11, 0), datetime.datetime(2017, 1, 1, 11, 1), datetime.datetime(2017, 1, 1, 11, 2)]
>>> datetime_range(datetime.datetime(2017,1,1,11,0,0), datetime.datetime(2017,1,1,11,20,0), step_interval='minute', n_steps=10)
[datetime.datetime(2017, 1, 1, 11, 0), datetime.datetime(2017, 1, 1, 11, 10), datetime.datetime(2017, 1, 1, 11, 20)]
>>> datetime_range(datetime.datetime(2017,1,1), datetime.datetime(2017,1,3), return_pairs=True)
[(datetime.datetime(2017, 1, 1, 0, 0), datetime.datetime(2017, 1, 2, 0, 0)), (datetime.datetime(2017, 1, 2, 0, 0), datetime.datetime(2017, 1, 3, 0, 0))]
"""
if end_datetime is None:
end_datetime = datetime.datetime.today()
if step_interval is None:
step_interval = datetime.timedelta(days=1)
else:
if not isinstance(step_interval, datetime.timedelta):
if step_interval.lower() == 'day':
step_interval = datetime.timedelta(days=n_steps)
elif step_interval.lower() == 'second':
step_interval = datetime.timedelta(seconds=n_steps)
elif step_interval.lower() == 'microsecond':
step_interval = datetime.timedelta(microseconds=n_steps)
elif step_interval.lower() == 'millisecond':
step_interval = datetime.timedelta(milliseconds=n_steps)
elif step_interval.lower() == 'minute':
step_interval = datetime.timedelta(minutes=n_steps)
elif step_interval.lower() == 'hour':
step_interval = datetime.timedelta(hours=n_steps)
elif step_interval.lower() == 'week':
step_interval = datetime.timedelta(weeks=n_steps)
if snap_to_date:
start_datetime = start_datetime.replace(hour=0, minute=0, second=0, microsecond=0)
end_datetime = end_datetime.replace(hour=0, minute=0, second=0, microsecond=0)
dates = []
current_datetime = start_datetime
while current_datetime <= end_datetime:
dates.append(current_datetime)
current_datetime += step_interval
if return_pairs:
return list(zip(dates[:-1], dates[1:]))
else:
return dates
|
{"/test.py": ["/pymtg/__init__.py"]}
|
3,258
|
yjad/Academy
|
refs/heads/master
|
/flaskblog/users/routes.py
|
from flask import render_template, url_for, flash, redirect, request, Blueprint
from flask_login import login_user, current_user, logout_user, login_required
from flaskblog import db, bcrypt
from flaskblog.models import User, Post, Role
from flaskblog.users.forms import (RegistrationForm, LoginForm, UpdateAccountForm,
RequestResetForm, ResetPasswordForm, AddUserForm, EditUserForm, RoleForm)
from flaskblog.users.utils import save_picture, send_reset_email
users = Blueprint('users', __name__)
@users.route("/register", methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('main.home'))
form = RegistrationForm()
if form.validate_on_submit():
hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')
user = User(username=form.username.data, email=form.email.data, password=hashed_password)
db.session.add(user)
db.session.commit()
flash('Your account has been created! You are now able to log in', 'success')
return redirect(url_for('users.login'))
return render_template('register.html', title='Register', form=form)
@users.route('/users/adduser', methods=['GET', 'POST'])
def add_user():
# if current_user.is_authenticated:
# return redirect(url_for('home'))
roles = db.session.query(Role).all()
roles_list = [(i.id, i.name) for i in roles]
form = AddUserForm()
form.role_id.choices = roles_list
if form.validate_on_submit():
hashed_password = bcrypt.generate_password_hash(form.password.data)
user = User(
login_name=form.login_name.data,
username=form.username.data,
email=form.email.data,
password=hashed_password,
role_id=form.role_id.data)
db.session.add(user)
db.session.commit()
flash(f'Account has been created, you can login!', 'success')
return redirect(url_for('users.add_user'))
else:
return render_template('add_user.html', title='Add User', form=form)
@users.route('/users/user_list', methods=['GET', 'POST'])
@login_required
def user_list():
page = request.args.get('page', default=1, type=int)
user_lst = User.query.order_by(User.login_name.asc()).paginate(page=page, per_page=5)
return render_template('user_list.html', user_list=user_lst)
@users.route("/users/users/<int:user_id>", methods=['GET', 'POST'])
@login_required
def edit_user(user_id):
user = User.query.get_or_404(user_id)
roles = db.session.query(Role).all()
roles_list = [(i.id, i.name) for i in roles]
form = EditUserForm()
form.role_id.choices = roles_list
if form.validate_on_submit():
if user.email != form.email.data: # email changed. check if new is unique
if User.query.filter_by(email=form.email.data).first():
flash('Email alreay exist, select another one!', 'danger')
return redirect('#') # reload
user.username = form.username.data
user.email = form.email.data
user.role_id = form.role_id.data
db.session.commit()
flash('Your updates have been saved!', 'success')
return redirect(url_for('users.user_list'))
elif request.method == 'GET':
# print("inside GET", form.email.data, form.email.object_data, form.email.raw_data)
form.username.data = user.username
form.login_name.data = user.login_name
form.email.data = user.email
form.role_id.data = user.role_id
return render_template('edit_user.html', title='Update User', form=form, user=user, legend="Update User")
#@users.route("/")
@users.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('main.home'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(login_name=form.login_name.data).first()
if user and bcrypt.check_password_hash(user.password, form.password.data):
next_page = request.args.get('next')
login_user(user, remember=form.remember.data)
return redirect(next_page) if next_page else redirect(url_for('main.home'))
else:
flash('Login Unsuccessful. Please check username and password', 'danger')
return render_template('login.html', title='Login', form=form)
@users.route("/logout")
def logout():
logout_user()
return redirect(url_for('main.home'))
@users.route("/account", methods=['GET', 'POST'])
@login_required
def account():
form = UpdateAccountForm()
if form.validate_on_submit():
if form.picture.data:
picture_file = save_picture(form.picture.data)
current_user.image_file = picture_file
current_user.username = form.username.data
current_user.email = form.email.data
db.session.commit()
flash('Your account has been updated!', 'success')
return redirect(url_for('users.account'))
elif request.method == 'GET':
form.username.data = current_user.username
form.email.data = current_user.email
image_file = url_for('static', filename='profile_pics/' + current_user.image_file)
return render_template('account.html', title='Account',
image_file=image_file, form=form)
@users.route("/user/<string:username>")
def user_posts(username):
page = request.args.get('page', 1, type=int)
user = User.query.filter_by(username=username).first_or_404()
posts = Post.query.filter_by(author=user)\
.order_by(Post.date_posted.desc())\
.paginate(page=page, per_page=5)
return render_template('user_posts.html', posts=posts, user=user)
@users.route("/reset_password", methods=['GET', 'POST'])
def reset_request():
if current_user.is_authenticated:
return redirect(url_for('main.home'))
form = RequestResetForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
send_reset_email(user)
flash('An email has been sent with instructions to reset your password.', 'info')
return redirect(url_for('users.login'))
return render_template('reset_request.html', title='Reset Password', form=form)
@users.route("/reset_password/<token>", methods=['GET', 'POST'])
def reset_token(token):
if current_user.is_authenticated:
return redirect(url_for('main.home'))
user = User.verify_reset_token(token)
if user is None:
flash('That is an invalid or expired token', 'warning')
return redirect(url_for('users.reset_request'))
form = ResetPasswordForm()
if form.validate_on_submit():
hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')
user.password = hashed_password
db.session.commit()
flash('Your password has been updated! You are now able to log in', 'success')
return redirect(url_for('users.login'))
return render_template('reset_token.html', title='Reset Password', form=form)
# ----------------------------------------------------------------------
@users.route('/users/add_role', methods=['GET', 'POST'])
# @login_required
def add_role():
# if current_user.is_authenticated:
# return redirect(url_for('home'))
form = RoleForm()
if form.validate_on_submit():
role = Role(
name=form.name.data,
description=form.description.data,
is_admin=form.is_admin.data) # 1 or 0
db.session.add(role)
db.session.commit()
flash(f'Role Saved!', 'success')
return redirect(url_for('users.role_list'))
else:
return render_template('add_role.html', title='Add Role', form=form, legend="Add Role")
# @users.route("/test", methods=['GET', 'POST'])
# def test():
# form = TestRoleForm()
# return render_template("test.html", form=form)
@users.route('/users/role_list', methods=['GET', 'POST'])
@login_required
def role_list():
page = request.args.get('page', default=1, type=int)
# role_list = Role.query.order_by(Role.name.desc()).paginate(page=page, per_page=5)
role_list = Role.query.all()
# for r in role_list:
# r.admin_role = (r.admin_role == 1)
return render_template('role_list.html', role_list=role_list)
# @users.route("/users/roles/<int:role_id>/update", methods=['GET', 'POST'])
# @login_required
# def edit_role(role_id):
# role = Role.query.get_or_404(role_id)
# # if post.author != current_user:
# # abort(403)
# form = RoleForm()
# if form.validate_on_submit():
# role.name = form.name.data
# role.description = form.description.data
# role.is_admin = form.is_admin.data
# db.session.commit()
# flash('Your updates have been saved!', 'success')
# return redirect(url_for('role_list'))
# elif request.method == 'GET':
# form.name.data = role.name
# form.description.data = role.description
# form.is_admin.data = role.is_admin
# return render_template('add_role.html', title='Update Role', form=form, legend="Update Role")
@users.route("/users/roles/<int:role_id>/delete", methods=['GET', 'POST'])
@login_required
def delete_role(role_id):
role = Role.query.get_or_404(role_id)
db.session.delete(role)
db.session.commit()
flash('Record deleted!', 'success')
return redirect(url_for('users.role_list'))
@users.route("/users/roles/<int:role_id>", methods=['GET', 'POST'])
@login_required
def edit_role(role_id):
role = Role.query.get_or_404(role_id)
# if post.author != current_user:
# abort(403)
form = RoleForm()
if form.validate_on_submit():
role.name = form.name.data
role.description = form.description.data
role.is_admin = form.is_admin.data
db.session.commit()
flash('Your updates have been saved!', 'success')
return redirect(url_for('users.role_list'))
elif request.method == 'GET':
form.name.data = role.name
form.description.data = role.description
form.is_admin.data = role.is_admin
return render_template('edit_role.html', title='Update Role', form=form, role=role, legend="Update Role")
|
{"/flaskblog/users/routes.py": ["/flaskblog/users/forms.py"]}
|
3,259
|
yjad/Academy
|
refs/heads/master
|
/flaskblog/reports/routes.py
|
from flask import render_template, request, Blueprint, url_for, Flask
from flask_login import login_required
from flaskblog.models import Post
from flaskblog.reports.zoom_reports import attendees_last_2_month, update_meetings, attendees_per_month
import os
reports = Blueprint('reports', __name__)
@reports.route("/reports/attendance_last_2_month")
@login_required
def attendance_last_2_month():
#IMAGE_DIR= r"C:\Yahia\Home\Yahia-Dev\Python\Academy\flaskblog\static\out"
IMAGE_DIR= os.path.join(os.path.dirname(__file__)[:-8], 'static','out')
image_name = 'attendess_2.png'
filename = os.path.join(IMAGE_DIR, image_name)
#attendees_last_2_month(filename)
return render_template('attendance_graph.html', image=image_name, title="Attendance last 2 Month")
# legend = 'Monthly Data'
# labels = ["January", "February", "March", "April", "May", "June", "July", "August"]
# values = [10, 9, 8, 7, 6, 4, 7, 8]
# return render_template('chart.html', values=values, labels=labels, legend=legend)
# def plot_png():
# fig = create_figure()
# output = io.BytesIO()
# FigureCanvas(fig).print_png(output)
# return Response(output.getvalue(), mimetype='image/png')
#
# def create_figure():
# fig = Figure()
# axis = fig.add_subplot(1, 1, 1)
# xs = range(100)
# ys = [random.randint(1, 50) for x in xs]
# axis.plot(xs, ys)
# return fig
@reports.route("/reports/attendance_per_month")
@login_required
def attendance_per_month():
IMAGE_DIR= os.path.join(os.path.dirname(__file__)[:-8], 'static','out')
image_name = 'attendess_month.png'
filename = os.path.join(IMAGE_DIR, image_name)
attendees_per_month(filename)
return render_template('attendance_graph.html', image=image_name, title="Attendance per Month")
@reports.route("/reports/load_meetings_data")
@login_required
def load_meetings_data():
render_template('loading.html')
update_meetings()
return render_template('home.html')
|
{"/flaskblog/users/routes.py": ["/flaskblog/users/forms.py"]}
|
3,260
|
yjad/Academy
|
refs/heads/master
|
/flaskblog/users/forms.py
|
from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed
from wtforms import StringField, PasswordField, SubmitField, BooleanField, SelectField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError
from flask_login import current_user
from flaskblog.models import User, Role
class RegistrationForm(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(min=2, max=20)])
email = StringField('Email',
validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password',
validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Sign Up')
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError('That username is taken. Please choose a different one.')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('That email is taken. Please choose a different one.')
class AddUserForm(FlaskForm):
login_name = StringField('Login Name', validators=[DataRequired(), Length(min=4, max=25)])
username = StringField('Username', validators=[DataRequired(), Length(max=100)])
email = StringField('Email Address', validators=[DataRequired(), Email()])
password = PasswordField('New Password', validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password', message='Passwords must match')])
role_id = SelectField("Role", coerce=int, validators=[DataRequired()])
submit = SubmitField('Save')
def validate_login_name(self, login_name):
user = User.query.filter_by(login_name=login_name.data).first()
if user:
raise ValidationError('That login name is taken, please select another one')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('That email already exists')
class EditUserForm(FlaskForm):
#login_name = StringField('Login Name', validators=[DataRequired(), Length(min=4, max=25)])
login_name = StringField('Login Name', render_kw={'readonly': True})
username = StringField('Username', validators=[DataRequired(), Length(max=100)])
email = StringField('Email Address', validators=[DataRequired(), Email()])
role_id = SelectField("Role", coerce=int, validators=[DataRequired()])
submit = SubmitField('Save')
# def validate_email(self, email):
# print (f"from Validate_email data: {email.data}, raw: {email.object_data}, self: {self.email.data} selfobj: {self.email.object_data}")
# #if email.data != email.object_data: # field changed
# user = User.query.filter_by(email=email.data).first()
# if user:
# raise ValidationError('That email already exists')
class LoginForm(FlaskForm):
login_name = StringField('Login Name', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
remember = BooleanField('Remember Me', validators = [])
submit = SubmitField('Login')
class UpdateAccountForm(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(min=2, max=20)])
email = StringField('Email',
validators=[DataRequired(), Email()])
picture = FileField('Update Profile Picture', validators=[FileAllowed(['jpg', 'png'])])
submit = SubmitField('Update')
def validate_username(self, username):
if username.data != current_user.username:
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError('That username is taken. Please choose a different one.')
def validate_email(self, email):
if email.data != current_user.email:
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('That email is taken. Please choose a different one.')
class RequestResetForm(FlaskForm):
email = StringField('Email',
validators=[DataRequired(), Email()])
submit = SubmitField('Request Password Reset')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user is None:
raise ValidationError('There is no account with that email. You must register first.')
class ResetPasswordForm(FlaskForm):
password = PasswordField('Password', validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password',
validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Reset Password')
class RoleForm(FlaskForm):
name = StringField('Name', validators=[DataRequired(), Length(min=4, max=20)])
description = StringField('Description', validators=[DataRequired(), Length(max=200)])
is_admin = BooleanField('Admin Role?', default= False)
submit = SubmitField('Save')
def validate_name(self, name):
role = Role.query.filter_by(name=name.data).first()
if role:
raise ValidationError('This role already exists, please select another one')
|
{"/flaskblog/users/routes.py": ["/flaskblog/users/forms.py"]}
|
3,261
|
yjad/Academy
|
refs/heads/master
|
/flaskblog/reports/run.py
|
from zoom import load_zoom_meetings, get_zoom_report_daily, load_zoom_telephone_report, get_meeting_details
from zoom_reports import stats_attendees, attendance_sheet, list_unmatched_attendees, update_meetings, \
stats_attendees_graph
if __name__ == "__main__":
#load_zoom_meetings("2020-06-15", "2020-07-14")
#load_zoom_meetings("2020-07-01")
#load_zoom_meetings("2020-07-26")
#attendance_sheet("2020-08-11")
#attendance_sheet("")
#stats_attendees()
#list_unmatched_attendees()
#update_meetings() # update meetings starts after the last loaded meeting
#get_zoom_report_daily(2020,8)
#load_zoom_telephone_report("2020-07-26")
#get_meeting_details("Hce5zSsbRPmL+1l0VKGmVQ==")
stats_attendees_graph()
|
{"/flaskblog/users/routes.py": ["/flaskblog/users/forms.py"]}
|
3,262
|
yjad/Academy
|
refs/heads/master
|
/flaskblog/config.py
|
import os
class Config:
#SECRET_KEY = os.environ.get('SECRET_KEY')
SECRET_KEY = '793c5f9c78d57a71bbc1e21eed2aa6b3'
#SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI')
SQLALCHEMY_DATABASE_URI = 'sqlite:///data/site.sqlite'
MAIL_SERVER = 'smtp.googlemail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get('EMAIL_USER')
MAIL_PASSWORD = os.environ.get('EMAIL_PASS')
IMAGE_DIR = r"E:\Yahia-Home\Python\src\Academy\flaskblog\static\out"
|
{"/flaskblog/users/routes.py": ["/flaskblog/users/forms.py"]}
|
3,263
|
yjad/Academy
|
refs/heads/master
|
/flaskblog/reports/DB.py
|
import sqlite3
from sqlalchemy.sql import text
from flaskblog.reports.config import config
from flaskblog import db
MEETING_TABLE = "meetings"
ATTENDEES_TABLE = "attendees"
# def open_db():
# connection = sqlite3.Connection(config.get("DB_FILE_NAME"))
# cursor = connection.cursor()
# return connection, cursor
def create_tables():
cmd = f'CREATE TABLE IF NOT EXISTS {MEETING_TABLE}' \
f'(uuid TEXT NOT NULL PRIMARY KEY,' \
f'id INTEGER ,' \
f'host_id TEXT,' \
f'type INTEGER,' \
f'topic TEXT,' \
f'user_name TEXT,' \
f'user_email TEXT,' \
f'start_time TEXT,' \
f'end_time TEXT,' \
f'duration INTEGER,' \
f'total_minutes INTEGER,' \
f'participants_count INTEGER)'
db.engine.execute(cmd)
db.engine.execute(f'CREATE TABLE IF NOT EXISTS {ATTENDEES_TABLE}'
'(meeting_uuid TEXT,'
'id TEXT ,'
'user_id TEXT,'
'name TEXT,'
'user_email TEXT,'
'join_time TEXT,'
'leave_time TEXT,'
'duration INTEGER,'
'attentiveness_score TEXT)')
return
# def close_db(cursor):
# cursor.close()
def insert_row(table_name, rec):
keys = ','.join(rec.keys())
question_marks = ','.join(list('?' * len(rec)))
values = tuple(rec.values())
try:
db.engine.execute('INSERT INTO ' + table_name + ' (' + keys + ') VALUES (' + question_marks + ')', values)
#db.engine.commit()
return 0
#except sqlite3.Error as er:
except : # already exist
#print('SQLite error: %s' % (' '.join(er.args)))
#print("Exception class is: ", er.__class__)
#print('SQLite traceback: ')
#exc_type, exc_value, exc_tb = sys.exc_info()
#print(traceback.format_exception(exc_type, exc_value, exc_tb))
return -1
def insert_row_meeting(rec):
return insert_row(MEETING_TABLE, rec)
def insert_row_attendees(rec):
return insert_row(ATTENDEES_TABLE, rec)
def exec_query(cmd):
rows = db.engine.execute(text(cmd))
#rows = db.engine.fetchall()
return rows
def get_last_meeting_date():
#conn, cursor = open_db()
rows = db.engine.execute('SELECT start_time from meetings order by start_time DESC LIMIT 1')
#rows = db.engine.fetchall()
#close_db(cursor)
# for r in rows:
# print (r)
# print (r[0])
# print (r[0][:10])
# return r[0][:10]
def get_col_names(sql):
get_column_names = db.engine.execute(sql + " limit 1")
print(get_column_names)
col_name = [i[0] for i in get_column_names.description]
print ("----------------->", col_name)
return col_name
|
{"/flaskblog/users/routes.py": ["/flaskblog/users/forms.py"]}
|
3,264
|
supasate/FBPCS
|
refs/heads/main
|
/tests/util/test_yaml.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import json
import unittest
from unittest.mock import patch, mock_open
from fbpcs.util.yaml import load, dump
TEST_FILENAME = "TEST_FILE"
TEST_DICT = {
"test_dict": [
{"test_key_1": "test_value_1"},
{"test_key_1": "test_value_2"},
]
}
class TestYaml(unittest.TestCase):
data = json.dumps(TEST_DICT)
@patch("builtins.open", new_callable=mock_open, read_data=data)
def test_load(self, mock_file):
self.assertEqual(open(TEST_FILENAME).read(), self.data)
load_data = load(TEST_FILENAME)
self.assertEqual(load_data, TEST_DICT)
mock_file.assert_called_with(TEST_FILENAME)
@patch("builtins.open")
@patch("yaml.dump")
def test_dump(self, mock_dump, mock_open):
mock_dump.return_value = None
stream = mock_open().__enter__.return_value
self.assertIsNone(dump(TEST_DICT, TEST_FILENAME))
mock_open.assert_called_with(TEST_FILENAME, "w")
mock_dump.assert_called_with(TEST_DICT, stream)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,265
|
supasate/FBPCS
|
refs/heads/main
|
/onedocker/tests/test_util.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import subprocess
import unittest
from util import run_cmd
class TestUtil(unittest.TestCase):
def test_run_cmd(self):
self.assertEqual(0, run_cmd("cat", 1))
def test_run_cmd_with_timeout(self):
self.assertRaises(subprocess.TimeoutExpired, run_cmd, "vi", 1)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,266
|
supasate/FBPCS
|
refs/heads/main
|
/tests/error/mapper/test_aws.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from botocore.exceptions import ClientError
from fbpcs.error.mapper.aws import map_aws_error
from fbpcs.error.pcs import PcsError
from fbpcs.error.throttling import ThrottlingError
class TestMapAwsError(unittest.TestCase):
def test_pcs_error(self):
err = ClientError(
{
"Error": {
"Code": "Exception",
"Message": "test",
},
},
"test",
)
err = map_aws_error(err)
self.assertIsInstance(err, PcsError)
def test_throttling_error(self):
err = ClientError(
{
"Error": {
"Code": "ThrottlingException",
"Message": "test",
},
},
"test",
)
err = map_aws_error(err)
self.assertIsInstance(err, ThrottlingError)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,267
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/gateway/ec2.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from typing import Any, Dict, List, Optional
import boto3
from fbpcs.decorator.error_handler import error_handler
from fbpcs.entity.vpc_instance import Vpc
from fbpcs.mapper.aws import map_ec2vpc_to_vpcinstance
class EC2Gateway:
def __init__(
self,
region: str,
access_key_id: Optional[str],
access_key_data: Optional[str],
config: Optional[Dict[str, Any]] = None,
) -> None:
self.region = region
config = config or {}
if access_key_id is not None:
config["aws_access_key_id"] = access_key_id
if access_key_data is not None:
config["aws_secret_access_key"] = access_key_data
# pyre-ignore
self.client = boto3.client("ec2", region_name=self.region, **config)
@error_handler
def describe_vpcs(self, vpc_ids: List[str]) -> List[Vpc]:
response = self.client.describe_vpcs(VpcIds=vpc_ids)
return [map_ec2vpc_to_vpcinstance(vpc) for vpc in response["Vpcs"]]
@error_handler
def describe_vpc(self, vpc_id: str) -> Vpc:
return self.describe_vpcs([vpc_id])[0]
@error_handler
def list_vpcs(self) -> List[str]:
all_vpcs = self.client.describe_vpcs()
return [vpc["VpcId"] for vpc in all_vpcs["Vpcs"]]
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,268
|
supasate/FBPCS
|
refs/heads/main
|
/onedocker/util.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import signal
import subprocess
from typing import Optional
def run_cmd(cmd: str, timeout: Optional[int]) -> int:
# The handler dealing signal SIGINT, which could be Ctrl + C from user's terminal
def _handler(signum, frame):
raise InterruptedError
signal.signal(signal.SIGINT, _handler)
"""
If start_new_session is true the setsid() system call will be made in the
child process prior to the execution of the subprocess, which makes sure
every process in the same process group can be killed by OS if timeout occurs.
note: setsid() will set the pgid to its pid.
"""
with subprocess.Popen(cmd, shell=True, start_new_session=True) as proc:
try:
proc.communicate(timeout=timeout)
except (subprocess.TimeoutExpired, InterruptedError) as e:
proc.terminate()
os.killpg(proc.pid, signal.SIGTERM)
raise e
return proc.wait()
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,269
|
supasate/FBPCS
|
refs/heads/main
|
/tests/service/test_container_aws.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest.mock import MagicMock, patch
from fbpcs.service.container_aws import (
ContainerInstance,
ContainerInstanceStatus,
AWSContainerService,
)
TEST_INSTANCE_ID_1 = "test-instance-id-1"
TEST_INSTANCE_ID_2 = "test-instance-id-2"
TEST_REGION = "us-west-2"
TEST_KEY_ID = "test-key-id"
TEST_KEY_DATA = "test-key-data"
TEST_CLUSTER = "test-cluster"
TEST_SUBNET = "test-subnet"
TEST_IP_ADDRESS = "127.0.0.1"
TEST_CONTAINER_DEFNITION = "test-task-definition#test-container-definition"
class TestAWSContainerService(unittest.TestCase):
@patch("fbpcs.gateway.ecs.ECSGateway")
def setUp(self, MockECSGateway):
self.container_svc = AWSContainerService(
TEST_REGION, TEST_CLUSTER, TEST_SUBNET, TEST_KEY_ID, TEST_KEY_DATA
)
self.container_svc.ecs_gateway = MockECSGateway()
def test_create_instances(self):
created_instances = [
ContainerInstance(
TEST_INSTANCE_ID_1,
TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
),
ContainerInstance(
TEST_INSTANCE_ID_2,
TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
),
]
self.container_svc.ecs_gateway.run_task = MagicMock(
side_effect=created_instances
)
cmd_list = ["test_cmd", "test_cmd-1"]
container_instances = self.container_svc.create_instances(
TEST_CONTAINER_DEFNITION, cmd_list
)
self.assertEqual(container_instances, created_instances)
self.assertEqual(
self.container_svc.ecs_gateway.run_task.call_count, len(created_instances)
)
async def test_create_instances_async(self):
created_instances = [
ContainerInstance(
TEST_INSTANCE_ID_1,
TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
),
ContainerInstance(
TEST_INSTANCE_ID_2,
TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
),
]
self.container_svc.ecs_gateway.run_task = MagicMock(
side_effect=created_instances
)
cmd_list = ["test_cmd", "test_cmd-1"]
container_instances = await self.container_svc.create_instances_async(
TEST_CONTAINER_DEFNITION, cmd_list
)
self.assertEqual(container_instances, created_instances)
self.assertEqual(
self.container_svc.ecs_gateway.run_task.call_count, len(created_instances)
)
def test_create_instance(self):
created_instance = ContainerInstance(
TEST_INSTANCE_ID_1,
TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
)
self.container_svc.ecs_gateway.run_task = MagicMock(
return_value=created_instance
)
container_instance = self.container_svc.create_instance(
TEST_CONTAINER_DEFNITION, "test-cmd"
)
self.assertEqual(container_instance, created_instance)
def test_get_instance(self):
container_instance = ContainerInstance(
TEST_INSTANCE_ID_1,
TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
)
self.container_svc.ecs_gateway.describe_task = MagicMock(
return_value=container_instance
)
instance = self.container_svc.get_instance(TEST_INSTANCE_ID_1)
self.assertEqual(instance, container_instance)
def test_get_instances(self):
container_instances = [
ContainerInstance(
TEST_INSTANCE_ID_1,
TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
),
ContainerInstance(
TEST_INSTANCE_ID_2,
TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
),
]
self.container_svc.ecs_gateway.describe_tasks = MagicMock(
return_value=container_instances
)
instances = self.container_svc.get_instances(
[TEST_INSTANCE_ID_1, TEST_INSTANCE_ID_2]
)
self.assertEqual(instances, container_instances)
def test_list_tasks(self):
instance_ids = [TEST_INSTANCE_ID_1, TEST_INSTANCE_ID_2]
self.container_svc.ecs_gateway.list_tasks = MagicMock(return_value=instance_ids)
self.assertEqual(instance_ids, self.container_svc.list_tasks())
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,270
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/service/log_cloudwatch.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from typing import Any, Dict, Optional
from fbpcs.gateway.cloudwatch import CloudWatchGateway
from fbpcs.service.log import LogService
class CloudWatchLogService(LogService):
def __init__(
self,
log_group: str,
region: str = "us-west-1",
access_key_id: Optional[str] = None,
access_key_data: Optional[str] = None,
config: Optional[Dict[str, Any]] = None,
) -> None:
self.cloudwatch_gateway = CloudWatchGateway(
region, access_key_id, access_key_data, config
)
self.log_group = log_group
def fetch(self, log_path: str) -> Dict[str, Any]:
"""Fetch logs"""
return self.cloudwatch_gateway.get_log_events(self.log_group, log_path)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,271
|
supasate/FBPCS
|
refs/heads/main
|
/tests/gateway/test_cloudwatch.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest.mock import MagicMock, patch
from fbpcs.gateway.cloudwatch import CloudWatchGateway
class TestCloudWatchGateway(unittest.TestCase):
REGION = "us-west-1"
GROUP_NAME = "test-group-name"
STREAM_NAME = "test-stream-name"
@patch("boto3.client")
def test_get_log_events(self, BotoClient):
gw = CloudWatchGateway(self.REGION)
mocked_log = {"test-events": [{"test-event-name": "test-event-data"}]}
gw.client = BotoClient()
gw.client.get_log_events = MagicMock(return_value=mocked_log)
returned_log = gw.get_log_events(self.GROUP_NAME, self.STREAM_NAME)
gw.client.get_log_events.assert_called()
self.assertEqual(mocked_log, returned_log)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,272
|
supasate/FBPCS
|
refs/heads/main
|
/tests/gateway/test_ec2.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest.mock import MagicMock, patch
from fbpcs.entity.vpc_instance import Vpc, VpcState
from fbpcs.gateway.ec2 import EC2Gateway
TEST_VPC_ID = "test-vpc-id"
TEST_ACCESS_KEY_ID = "test-access-key-id"
TEST_ACCESS_KEY_DATA = "test-access-key-data"
TEST_VPC_TAG_KEY = "test-vpc-tag-key"
TEST_VPC_TAG_VALUE = "test-vpc-tag-value"
REGION = "us-west-2"
class TestEC2Gateway(unittest.TestCase):
@patch("boto3.client")
def setUp(self, BotoClient):
self.gw = EC2Gateway(REGION, TEST_ACCESS_KEY_ID, TEST_ACCESS_KEY_DATA)
self.gw.client = BotoClient()
def test_describe_vpcs(self):
client_return_response = {
"Vpcs": [
{
"State": "UNKNOWN",
"VpcId": TEST_VPC_ID,
"Tags": [
{
"Key": TEST_VPC_TAG_KEY,
"Value": TEST_VPC_TAG_VALUE,
},
],
}
]
}
tags = {TEST_VPC_TAG_KEY: TEST_VPC_TAG_VALUE}
self.gw.client.describe_vpcs = MagicMock(return_value=client_return_response)
vpcs = self.gw.describe_vpcs([TEST_VPC_ID])
expected_vpcs = [
Vpc(
TEST_VPC_ID,
VpcState.UNKNOWN,
tags,
),
]
self.assertEqual(vpcs, expected_vpcs)
self.gw.client.describe_vpcs.assert_called()
def test_list_vpcs(self):
client_return_response = {
"Vpcs": [
{"VpcId": TEST_VPC_ID},
]
}
self.gw.client.describe_vpcs = MagicMock(return_value=client_return_response)
vpcs = self.gw.list_vpcs()
expected_vpcs = [TEST_VPC_ID]
self.assertEqual(vpcs, expected_vpcs)
self.gw.client.describe_vpcs.assert_called()
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,273
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/entity/vpc_instance.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from dataclasses import dataclass, field
from enum import Enum
from typing import Dict
from dataclasses_json import dataclass_json
class VpcState(Enum):
UNKNOWN = "UNKNOWN"
PENDING = "PENDING"
AVAILABLE = "AVAILABLE"
@dataclass_json
@dataclass
class Vpc:
vpc_id: str
state: VpcState = VpcState.UNKNOWN
tags: Dict[str, str] = field(default_factory=lambda: {})
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,274
|
supasate/FBPCS
|
refs/heads/main
|
/tests/service/test_storage_s3.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import unittest
from unittest.mock import call, MagicMock, patch
from fbpcs.service.storage_s3 import S3StorageService
class TestS3StorageService(unittest.TestCase):
LOCAL_FILE = "/usr/test_file"
LOCAL_FOLDER = "/foo"
S3_FILE = "https://bucket.s3.Region.amazonaws.com/test_file"
S3_FILE_COPY = "https://bucket.s3.Region.amazonaws.com/test_file_copy"
S3_FOLDER = "https://bucket.s3.Region.amazonaws.com/test_folder/"
S3_FOLDER_COPY = "https://bucket.s3.Region.amazonaws.com/test_folder_copy/"
S3_FILE_WITH_SUBFOLDER = (
"https://bucket.s3.Region.amazonaws.com/test_folder/test_file"
)
"""
The layout of LOCAL_DIR:
/foo/
├── bar/
└── baz/
├── a
└── b
"""
LOCAL_DIR = [
("/foo", ("bar",), ("baz",)),
("/foo/baz", (), ("a", "b")),
]
S3_DIR = [
"test_folder/bar/",
"test_folder/baz/",
"test_folder/baz/a",
"test_folder/baz/b",
]
@patch("fbpcs.gateway.s3.S3Gateway")
def test_copy_local_to_s3(self, MockS3Gateway):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
service.s3_gateway.upload_file = MagicMock(return_value=None)
service.copy(self.LOCAL_FILE, self.S3_FILE)
service.s3_gateway.upload_file.assert_called_with(
str(self.LOCAL_FILE), "bucket", "test_file"
)
def test_copy_local_dir_to_s3_recursive_false(self):
service = S3StorageService("us-west-1")
with patch("os.path.isdir", return_value=True):
self.assertRaises(
ValueError, service.copy, self.LOCAL_FOLDER, self.S3_FOLDER, False
)
@patch("fbpcs.gateway.s3.S3Gateway")
def test_copy_local_dir_to_s3_recursive_true(self, MockS3Gateway):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
service.s3_gateway.put_object = MagicMock(return_value=None)
service.s3_gateway.upload_file = MagicMock(return_value=None)
with patch("os.path.isdir", return_value=True):
with patch("os.walk", return_value=self.LOCAL_DIR):
service.copy(self.LOCAL_FOLDER, self.S3_FOLDER, True)
service.s3_gateway.put_object.assert_called_with(
"bucket", "test_folder/bar/", ""
)
service.s3_gateway.upload_file.assert_has_calls(
[
call("/foo/baz/a", "bucket", "test_folder/baz/a"),
call("/foo/baz/b", "bucket", "test_folder/baz/b"),
],
any_order=True,
)
@patch("fbpcs.gateway.s3.S3Gateway")
def test_copy_s3_to_local(self, MockS3Gateway):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
service.s3_gateway.download_file = MagicMock(return_value=None)
service.copy(self.S3_FILE, self.LOCAL_FILE)
service.s3_gateway.download_file.assert_called_with(
"bucket", "test_file", str(self.LOCAL_FILE)
)
def test_copy_s3_dir_to_local_recursive_false(self):
service = S3StorageService("us-west-1")
self.assertRaises(
ValueError, service.copy, self.S3_FOLDER, self.LOCAL_FOLDER, False
)
@patch("fbpcs.gateway.s3.S3Gateway")
def test_copy_s3_dir_to_local_source_does_not_exist(self, MockS3Gateway):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
service.s3_gateway.object_exists = MagicMock(return_value=False)
self.assertRaises(
ValueError, service.copy, self.S3_FOLDER, self.LOCAL_FOLDER, False
)
@patch("os.makedirs")
@patch("fbpcs.gateway.s3.S3Gateway")
def test_copy_s3_dir_to_local_ok(self, MockS3Gateway, os_makedirs):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
service.s3_gateway.object_exists = MagicMock(return_value=True)
service.s3_gateway.list_object2 = MagicMock(return_value=self.S3_DIR)
service.s3_gateway.download_file = MagicMock(return_value=None)
service.copy(self.S3_FOLDER, self.LOCAL_FOLDER, True)
os.makedirs.assert_has_calls(
[
call("/foo/bar"),
call("/foo/baz"),
],
any_order=True,
)
service.s3_gateway.download_file.assert_has_calls(
[
call("bucket", "test_folder/baz/a", "/foo/baz/a"),
call("bucket", "test_folder/baz/b", "/foo/baz/b"),
],
any_order=True,
)
@patch("fbpcs.gateway.s3.S3Gateway")
def test_copy_local_to_local(self, MockS3Gateway):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
self.assertRaises(ValueError, service.copy, self.LOCAL_FILE, self.LOCAL_FILE)
@patch("fbpcs.gateway.s3.S3Gateway")
def test_copy_s3_to_s3(self, MockS3Gateway):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
service.copy(self.S3_FILE, self.S3_FILE_COPY)
service.s3_gateway.copy.assert_called_with(
"bucket", "test_file", "bucket", "test_file_copy"
)
def test_copy_s3_dir_to_s3_recursive_false(self):
service = S3StorageService("us-west-1")
self.assertRaises(
ValueError, service.copy, self.S3_FOLDER, self.S3_FOLDER_COPY, False
)
def test_copy_s3_dir_to_s3_source_and_dest_are_the_same(self):
service = S3StorageService("us-west-1")
self.assertRaises(
ValueError, service.copy, self.S3_FOLDER, self.S3_FOLDER, True
)
@patch("fbpcs.gateway.s3.S3Gateway")
def test_copy_s3_dir_to_s3_source_does_not_exist(self, MockS3Gateway):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
service.s3_gateway.object_exists = MagicMock(return_value=False)
self.assertRaises(
ValueError, service.copy, self.S3_FOLDER, self.S3_FOLDER_COPY, False
)
@patch("os.makedirs")
@patch("fbpcs.gateway.s3.S3Gateway")
def test_copy_s3_dir_to_s3_ok(self, MockS3Gateway, os_makedirs):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
service.s3_gateway.object_exists = MagicMock(return_value=True)
service.s3_gateway.list_object2 = MagicMock(return_value=self.S3_DIR)
service.s3_gateway.put_object = MagicMock(return_value=None)
service.s3_gateway.copy = MagicMock(return_value=None)
service.copy(self.S3_FOLDER, self.S3_FOLDER_COPY, True)
service.s3_gateway.put_object.assert_has_calls(
[
call("bucket", "test_folder_copy/bar/", ""),
call("bucket", "test_folder_copy/baz/", ""),
],
any_order=True,
)
service.s3_gateway.copy.assert_has_calls(
[
call("bucket", "test_folder/baz/a", "bucket", "test_folder_copy/baz/a"),
call("bucket", "test_folder/baz/b", "bucket", "test_folder_copy/baz/b"),
],
any_order=True,
)
@patch("fbpcs.gateway.s3.S3Gateway")
def test_delete_s3(self, MockS3Gateway):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
service.delete(self.S3_FILE)
service.s3_gateway.delete_object.assert_called_with("bucket", "test_file")
@patch("fbpcs.gateway.s3.S3Gateway")
def test_file_exists(self, MockS3Gateway):
service = S3StorageService("us-west-1")
service.s3_gateway = MockS3Gateway()
service.file_exists(self.S3_FILE)
service.s3_gateway.object_exists.assert_called_with("bucket", "test_file")
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,275
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/service/container.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import abc
from typing import List
from fbpcs.entity.container_instance import ContainerInstance
class ContainerService(abc.ABC):
@abc.abstractmethod
def create_instance(self, container_definition: str, cmd: str) -> ContainerInstance:
pass
@abc.abstractmethod
def create_instances(
self, container_definition: str, cmds: List[str]
) -> List[ContainerInstance]:
pass
@abc.abstractmethod
async def create_instances_async(
self, container_definition: str, cmds: List[str]
) -> List[ContainerInstance]:
pass
@abc.abstractmethod
def get_instance(self, instance_id: str) -> ContainerInstance:
pass
@abc.abstractmethod
def get_instances(self, instance_ids: List[str]) -> List[ContainerInstance]:
pass
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,276
|
supasate/FBPCS
|
refs/heads/main
|
/tests/util/test_typing.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from fbpcs.util.typing import checked_cast
TEST_STR = "test"
TEST_NUM = 123
class TestTyping(unittest.TestCase):
def test_checked_cast(self):
error = f"Value was not of type {type!r}:\n{TEST_STR!r}"
with self.assertRaisesRegex(ValueError, error):
checked_cast(int, TEST_STR)
self.assertEqual(checked_cast(int, TEST_NUM), TEST_NUM)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,277
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/repository/instance_s3.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import pickle
from fbpcs.entity.instance_base import InstanceBase
from fbpcs.service.storage_s3 import S3StorageService
class S3InstanceRepository:
def __init__(self, s3_storage_svc: S3StorageService, base_dir: str) -> None:
self.s3_storage_svc = s3_storage_svc
self.base_dir = base_dir
def create(self, instance: InstanceBase) -> None:
if self._exist(instance.get_instance_id()):
raise RuntimeError(f"{instance.get_instance_id()} already exists")
filename = f"{self.base_dir}{instance.get_instance_id()}"
# Use pickle protocol 0 to make ASCII only bytes that can be safely decoded into a string
self.s3_storage_svc.write(filename, pickle.dumps(instance, 0).decode())
def read(self, instance_id: str) -> InstanceBase:
if not self._exist(instance_id):
raise RuntimeError(f"{instance_id} does not exist")
filename = f"{self.base_dir}{instance_id}"
instance = pickle.loads(self.s3_storage_svc.read(filename).encode())
return instance
def update(self, instance: InstanceBase) -> None:
if not self._exist(instance.get_instance_id()):
raise RuntimeError(f"{instance.get_instance_id()} does not exist")
filename = f"{self.base_dir}{instance.get_instance_id()}"
# Use pickle protocol 0 to make ASCII only bytes that can be safely decoded into a string
self.s3_storage_svc.write(filename, pickle.dumps(instance, 0).decode())
def delete(self, instance_id: str) -> None:
if not self._exist(instance_id):
raise RuntimeError(f"{instance_id} does not exist")
filename = f"{self.base_dir}{instance_id}"
self.s3_storage_svc.delete(filename)
def _exist(self, instance_id: str) -> bool:
return self.s3_storage_svc.file_exists(f"{self.base_dir}{instance_id}")
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,278
|
supasate/FBPCS
|
refs/heads/main
|
/tests/util/test_reflect.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from fbpcs.util.reflect import get_class
from fbpcs.util.s3path import S3Path
TEST_CLASS_PATH = "fbpcs.util.s3path.S3Path"
class TestReflect(unittest.TestCase):
def test_get_class(self):
self.assertEqual(S3Path, get_class(TEST_CLASS_PATH))
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,279
|
supasate/FBPCS
|
refs/heads/main
|
/setup.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from setuptools import setup, find_packages
install_requires = [
"boto3==1.11.11",
"dataclasses-json==0.5.2",
"pyyaml==5.4.1",
"tqdm==4.55.1",
]
with open("README.md", encoding="utf-8") as f:
long_description = f.read()
setup(
name="fbpcs",
version="0.1.0",
description="Facebook Private Computation Service",
author="Facebook",
author_email="researchtool-help@fb.com",
url="https://github.com/facebookresearch/FBPCS",
install_requires=install_requires,
packages=find_packages(),
long_description_content_type="text/markdown",
long_description=long_description,
python_requires=">=3.8",
)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,280
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/util/s3path.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import re
from typing import Tuple
class S3Path:
region: str
bucket: str
key: str
def __init__(self, fileURL: str) -> None:
self.region, self.bucket, self.key = self._get_region_bucket_key(fileURL)
def __eq__(self, other: "S3Path") -> bool:
return (
self.region == other.region
and self.bucket == other.bucket
and self.key == other.key
)
# virtual host style url
# https://bucket-name.s3.Region.amazonaws.com/key-name
def _get_region_bucket_key(self, fileURL: str) -> Tuple[str, str, str]:
match = re.search("^https?:/([^.]+).s3.([^.]+).amazonaws.com/(.*)$", fileURL)
if not match:
raise ValueError(f"Could not parse {fileURL} as an S3Path")
bucket, region, key = (
match.group(1).strip("/"),
match.group(2),
match.group(3).strip("/"),
)
return (region, bucket, key)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,281
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/service/storage.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import abc
from enum import Enum
class PathType(Enum):
Local = 1
S3 = 2
class StorageService(abc.ABC):
@abc.abstractmethod
def read(self, filename: str) -> str:
pass
@abc.abstractmethod
def write(self, filename: str, data: str) -> None:
pass
@abc.abstractmethod
def copy(self, source: str, destination: str) -> None:
pass
@abc.abstractmethod
def file_exists(self, filename: str) -> bool:
pass
@staticmethod
def path_type(filename: str) -> PathType:
if filename.startswith("https:"):
return PathType.S3
return PathType.Local
@abc.abstractmethod
def get_file_size(self, filename: str) -> int:
pass
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,282
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/decorator/error_handler.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Callable
from botocore.exceptions import ClientError
from fbpcs.error.mapper.aws import map_aws_error
from fbpcs.error.pcs import PcsError
def error_handler(f: Callable) -> Callable:
def wrap(*args, **kwargs):
try:
return f(*args, **kwargs)
except ClientError as err:
raise map_aws_error(err)
except Exception as err:
raise PcsError(err)
return wrap
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,283
|
supasate/FBPCS
|
refs/heads/main
|
/tests/service/test_mpc.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest.mock import AsyncMock, MagicMock, patch
from fbpcs.entity.container_instance import ContainerInstance, ContainerInstanceStatus
from fbpcs.entity.mpc_instance import MPCInstance, MPCInstanceStatus, MPCRole
from fbpcs.service.mpc import MPCService
TEST_INSTANCE_ID = "123"
TEST_GAME_NAME = "lift"
TEST_MPC_ROLE = MPCRole.SERVER
TEST_NUM_WORKERS = 1
TEST_SERVER_IPS = ["192.0.2.0", "192.0.2.1"]
TEST_INPUT_ARGS = "test_input_file"
TEST_OUTPUT_ARGS = "test_output_file"
TEST_CONCURRENCY_ARGS = 1
TEST_INPUT_DIRECTORY = "TEST_INPUT_DIRECTORY/"
TEST_OUTPUT_DIRECTORY = "TEST_OUTPUT_DIRECTORY/"
TEST_TASK_DEFINITION = "test_task_definition"
INPUT_DIRECTORY = "input_directory"
OUTPUT_DIRECTORY = "output_directory"
GAME_ARGS = [
{
"input_filenames": TEST_INPUT_ARGS,
"input_directory": TEST_INPUT_DIRECTORY,
"output_filenames": TEST_OUTPUT_ARGS,
"output_directory": TEST_OUTPUT_DIRECTORY,
"concurrency": TEST_CONCURRENCY_ARGS,
}
]
class TestMPCService(unittest.TestCase):
def setUp(self):
cspatcher = patch("fbpcs.service.container_aws.AWSContainerService")
sspatcher = patch("fbpcs.service.storage_s3.S3StorageService")
irpatcher = patch(
"fbpcs.repository.mpc_instance_local.LocalMPCInstanceRepository"
)
gspatcher = patch("fbpcs.service.mpc_game.MPCGameService")
container_svc = cspatcher.start()
storage_svc = sspatcher.start()
instance_repository = irpatcher.start()
mpc_game_svc = gspatcher.start()
for patcher in (cspatcher, sspatcher, irpatcher, gspatcher):
self.addCleanup(patcher.stop)
self.mpc_service = MPCService(
container_svc,
storage_svc,
instance_repository,
"test_task_definition",
mpc_game_svc,
)
@staticmethod
def _get_sample_mpcinstance():
return MPCInstance(
instance_id=TEST_INSTANCE_ID,
game_name=TEST_GAME_NAME,
mpc_role=TEST_MPC_ROLE,
num_workers=TEST_NUM_WORKERS,
server_ips=TEST_SERVER_IPS,
status=MPCInstanceStatus.CREATED,
game_args=GAME_ARGS,
)
@staticmethod
def _get_sample_mpcinstance_with_game_args():
return MPCInstance(
instance_id=TEST_INSTANCE_ID,
game_name=TEST_GAME_NAME,
mpc_role=TEST_MPC_ROLE,
num_workers=TEST_NUM_WORKERS,
status=MPCInstanceStatus.CREATED,
server_ips=TEST_SERVER_IPS,
game_args=GAME_ARGS,
)
@staticmethod
def _get_sample_mpcinstance_client():
return MPCInstance(
instance_id=TEST_INSTANCE_ID,
game_name=TEST_GAME_NAME,
mpc_role=MPCRole.CLIENT,
num_workers=TEST_NUM_WORKERS,
server_ips=TEST_SERVER_IPS,
status=MPCInstanceStatus.CREATED,
game_args=GAME_ARGS,
)
async def test_spin_up_containers_one_docker_inconsistent_arguments(self):
with self.assertRaisesRegex(
ValueError,
"The number of containers is not consistent with the number of game argument dictionary.",
):
await self.mpc_service._spin_up_containers_onedocker(
game_name=TEST_GAME_NAME,
mpc_role=MPCRole.SERVER,
num_containers=TEST_NUM_WORKERS,
game_args=[],
)
with self.assertRaisesRegex(
ValueError,
"The number of containers is not consistent with number of ip addresses.",
):
await self.mpc_service._spin_up_containers_onedocker(
game_name=TEST_GAME_NAME,
mpc_role=MPCRole.CLIENT,
num_containers=TEST_NUM_WORKERS,
ip_addresses=TEST_SERVER_IPS,
)
def test_create_instance_with_game_args(self):
self.mpc_service.create_instance(
instance_id=TEST_INSTANCE_ID,
game_name=TEST_GAME_NAME,
mpc_role=TEST_MPC_ROLE,
num_workers=TEST_NUM_WORKERS,
server_ips=TEST_SERVER_IPS,
game_args=GAME_ARGS,
)
self.mpc_service.instance_repository.create.assert_called()
self.assertEqual(
self._get_sample_mpcinstance_with_game_args(),
self.mpc_service.instance_repository.create.call_args[0][0],
)
def test_create_instance(self):
self.mpc_service.create_instance(
instance_id=TEST_INSTANCE_ID,
game_name=TEST_GAME_NAME,
mpc_role=TEST_MPC_ROLE,
num_workers=TEST_NUM_WORKERS,
server_ips=TEST_SERVER_IPS,
game_args=GAME_ARGS,
)
# check that instance with correct instance_id was created
self.mpc_service.instance_repository.create.assert_called()
self.assertEquals(
self._get_sample_mpcinstance(),
self.mpc_service.instance_repository.create.call_args[0][0],
)
def _read_side_effect_start(self, instance_id: str):
"""mock MPCInstanceRepository.read for test_start"""
if instance_id == TEST_INSTANCE_ID:
return self._get_sample_mpcinstance()
else:
raise RuntimeError(f"{instance_id} does not exist")
def test_start_instance(self):
self.mpc_service.instance_repository.read = MagicMock(
side_effect=self._read_side_effect_start
)
created_instances = [
ContainerInstance(
"arn:aws:ecs:us-west-1:592513842793:task/57850450-7a81-43cc-8c73-2071c52e4a68", # noqa
"10.0.1.130",
ContainerInstanceStatus.STARTED,
)
]
self.mpc_service.container_svc.create_instances_async = AsyncMock(
return_value=created_instances
)
built_one_docker_args = ("private_lift/lift", "test one docker arguments")
self.mpc_service.mpc_game_svc.build_one_docker_args = MagicMock(
return_value=built_one_docker_args
)
# check that update is called with correct status
self.mpc_service.start_instance(TEST_INSTANCE_ID)
self.mpc_service.instance_repository.update.assert_called()
latest_update = self.mpc_service.instance_repository.update.call_args_list[-1]
updated_status = latest_update[0][0].status
self.assertEqual(updated_status, MPCInstanceStatus.STARTED)
def test_start_instance_missing_ips(self):
self.mpc_service.instance_repository.read = MagicMock(
return_value=self._get_sample_mpcinstance_client()
)
# Exception because role is client but server ips are not given
with self.assertRaises(ValueError):
self.mpc_service.start_instance(TEST_INSTANCE_ID)
def _read_side_effect_update(self, instance_id):
"""
mock MPCInstanceRepository.read for test_update,
with instance.containers is not None
"""
if instance_id == TEST_INSTANCE_ID:
mpc_instance = self._get_sample_mpcinstance()
else:
raise RuntimeError(f"{instance_id} does not exist")
mpc_instance.status = MPCInstanceStatus.STARTED
mpc_instance.containers = [
ContainerInstance(
"arn:aws:ecs:us-west-1:592513842793:task/57850450-7a81-43cc-8c73-2071c52e4a68", # noqa
"10.0.1.130",
ContainerInstanceStatus.STARTED,
)
]
return mpc_instance
def test_update_instance(self):
self.mpc_service.instance_repository.read = MagicMock(
side_effect=self._read_side_effect_update
)
container_instances = [
ContainerInstance(
"arn:aws:ecs:us-west-1:592513842793:task/cd34aed2-321f-49d1-8641-c54baff8b77b", # noqa
"10.0.1.130",
ContainerInstanceStatus.STARTED,
)
]
self.mpc_service.container_svc.get_instances = MagicMock(
return_value=container_instances
)
self.mpc_service.update_instance(TEST_INSTANCE_ID)
self.mpc_service.instance_repository.update.assert_called()
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,284
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/entity/mpc_instance.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from dataclasses import dataclass
from enum import Enum
from typing import Any, Dict, List, Mapping, Optional
from dataclasses_json import dataclass_json
from fbpcs.entity.container_instance import ContainerInstance
from fbpcs.entity.instance_base import InstanceBase
class MPCRole(Enum):
SERVER = "SERVER"
CLIENT = "CLIENT"
class MPCInstanceStatus(Enum):
UNKNOWN = "UNKNOWN"
CREATED = "CREATED"
STARTED = "STARTED"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
@dataclass_json
@dataclass
class MPCInstance(InstanceBase):
instance_id: str
game_name: str
mpc_role: MPCRole
num_workers: int
server_ips: Optional[List[str]]
containers: List[ContainerInstance]
status: MPCInstanceStatus
game_args: Optional[List[Dict[str, Any]]]
arguments: Mapping[str, Any]
def __init__(
self,
instance_id: str,
game_name: str,
mpc_role: MPCRole,
num_workers: int,
ip_config_file: Optional[str] = None,
server_ips: Optional[List[str]] = None,
containers: Optional[List[ContainerInstance]] = None,
status: MPCInstanceStatus = MPCInstanceStatus.UNKNOWN,
game_args: Optional[List[Dict[str, Any]]] = None,
**arguments # pyre-ignore
) -> None:
self.instance_id = instance_id
self.game_name = game_name
self.mpc_role = mpc_role
self.num_workers = num_workers
self.ip_config_file = ip_config_file
self.server_ips = server_ips
self.containers = containers or []
self.status = status
self.game_args = game_args
self.arguments = arguments
def get_instance_id(self) -> str:
return self.instance_id
def __str__(self) -> str:
# pyre-ignore
return self.to_json()
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,285
|
supasate/FBPCS
|
refs/heads/main
|
/tests/service/test_onedocker.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest.mock import AsyncMock, patch
from fbpcs.entity.container_instance import ContainerInstance, ContainerInstanceStatus
from fbpcs.service.onedocker import OneDockerService
class TestOneDockerService(unittest.TestCase):
@patch("fbpcs.service.container.ContainerService")
def setUp(self, MockContainerService):
container_svc = MockContainerService()
self.onedocker_svc = OneDockerService(container_svc)
def test_start_container(self):
mocked_container_info = ContainerInstance(
"arn:aws:ecs:region:account_id:task/container_id",
"192.0.2.0",
ContainerInstanceStatus.STARTED,
)
self.onedocker_svc.container_svc.create_instances_async = AsyncMock(
return_value=[mocked_container_info]
)
returned_container_info = self.onedocker_svc.start_container(
"task_def", "project/exe_name", "cmd_args"
)
self.assertEqual(returned_container_info, mocked_container_info)
def test_start_containers(self):
mocked_container_info = [
ContainerInstance(
"arn:aws:ecs:region:account_id:task/container_id_1",
"192.0.2.0",
ContainerInstanceStatus.STARTED,
),
ContainerInstance(
"arn:aws:ecs:region:account_id:task/container_id_2",
"192.0.2.1",
ContainerInstanceStatus.STARTED,
),
]
self.onedocker_svc.container_svc.create_instances_async = AsyncMock(
return_value=mocked_container_info
)
returned_container_info = self.onedocker_svc.start_containers(
"task_def", "project/exe_name", ["--k1=v1", "--k2=v2"]
)
self.assertEqual(returned_container_info, mocked_container_info)
def test_get_cmd(self):
package_name = "project/exe_name"
cmd_args = "--k1=v1 --k2=v2"
timeout = 3600
expected_cmd_without_timeout = "python3.8 -m one_docker_runner --package_name=project/exe_name --cmd='/root/one_docker/package/exe_name --k1=v1 --k2=v2'"
expected_cmd_with_timeout = expected_cmd_without_timeout + " --timeout=3600"
cmd_without_timeout = self.onedocker_svc._get_cmd(package_name, cmd_args)
cmd_with_timeout = self.onedocker_svc._get_cmd(package_name, cmd_args, timeout)
self.assertEqual(expected_cmd_without_timeout, cmd_without_timeout)
self.assertEqual(expected_cmd_with_timeout, cmd_with_timeout)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,286
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/util/typing.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from typing import Type, TypeVar
T = TypeVar("T")
V = TypeVar("V")
# pyre-fixme[34]: `T` isn't present in the function's parameters.
def checked_cast(typ: Type[V], val: V) -> T:
if not isinstance(val, typ):
raise ValueError(f"Value was not of type {type!r}:\n{val!r}")
# pyre-fixme[7]: Expected `T` but got `V`.
return val
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,287
|
supasate/FBPCS
|
refs/heads/main
|
/tests/util/test_s3path.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from fbpcs.util.s3path import S3Path
class TestS3Path(unittest.TestCase):
def test_s3path_no_subfolder(self):
test_s3path = S3Path("https://bucket-name.s3.Region.amazonaws.com/key-name")
self.assertEqual(test_s3path.region, "Region")
self.assertEqual(test_s3path.bucket, "bucket-name")
self.assertEqual(test_s3path.key, "key-name")
def test_s3path_with_subfoler(self):
test_s3path = S3Path(
"https://bucket-name.s3.Region.amazonaws.com/subfolder/key"
)
self.assertEqual(test_s3path.region, "Region")
self.assertEqual(test_s3path.bucket, "bucket-name")
self.assertEqual(test_s3path.key, "subfolder/key")
def test_s3path_invalid_fileURL(self):
test_url = "an invalid fileURL"
with self.assertRaises(ValueError):
S3Path(test_url)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,288
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/repository/mpc_game_repository.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import abc
from fbpcs.entity.mpc_game_config import MPCGameConfig
class MPCGameRepository(abc.ABC):
@abc.abstractmethod
def get_game(self, name: str) -> MPCGameConfig:
pass
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,289
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/service/log.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import abc
from typing import Any, Dict
class LogService(abc.ABC):
@abc.abstractmethod
def fetch(self, log_path: str) -> Dict[str, Any]:
pass
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,290
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/repository/mpc_instance_local.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from typing import cast
from fbpcs.entity.mpc_instance import MPCInstance
from fbpcs.repository.instance_local import LocalInstanceRepository
from fbpcs.repository.mpc_instance import MPCInstanceRepository
class LocalMPCInstanceRepository(MPCInstanceRepository):
def __init__(self, base_dir: str) -> None:
self.repo = LocalInstanceRepository(base_dir)
def create(self, instance: MPCInstance) -> None:
self.repo.create(instance)
def read(self, instance_id: str) -> MPCInstance:
return cast(MPCInstance, self.repo.read(instance_id))
def update(self, instance: MPCInstance) -> None:
self.repo.update(instance)
def delete(self, instance_id: str) -> None:
self.repo.delete(instance_id)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,291
|
supasate/FBPCS
|
refs/heads/main
|
/tests/gateway/test_s3.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest.mock import MagicMock, patch
from fbpcs.gateway.s3 import S3Gateway
class TestS3Gateway(unittest.TestCase):
TEST_LOCAL_FILE = "test-local-file"
TEST_BUCKET = "test-bucket"
TEST_FILE = "test-file"
TEST_ACCESS_KEY_ID = "test-access-key-id"
TEST_ACCESS_KEY_DATA = "test-access-key-data"
REGION = "us-west-1"
@patch("boto3.client")
def test_create_bucket(self, BotoClient):
gw = S3Gateway(self.REGION)
gw.client = BotoClient()
gw.client.create_bucket = MagicMock(return_value=None)
gw.create_bucket(self.TEST_BUCKET)
gw.client.create_bucket.assert_called()
@patch("boto3.client")
def test_delete_bucket(self, BotoClient):
gw = S3Gateway(self.REGION)
gw.client = BotoClient()
gw.client.delete_bucket = MagicMock(return_value=None)
gw.delete_bucket(self.TEST_BUCKET)
gw.client.delete_bucket.assert_called()
@patch("boto3.client")
def test_put_object(self, BotoClient):
gw = S3Gateway(self.REGION)
gw.client = BotoClient()
gw.client.put_object = MagicMock(return_value=None)
gw.put_object(
self.TEST_BUCKET, self.TEST_ACCESS_KEY_ID, self.TEST_ACCESS_KEY_DATA
)
gw.client.put_object.assert_called()
@patch("os.path.getsize", return_value=100)
@patch("boto3.client")
def test_upload_file(self, BotoClient, mock_getsize):
gw = S3Gateway(self.REGION)
gw.client = BotoClient()
gw.client.upload_file = MagicMock(return_value=None)
gw.upload_file(self.TEST_LOCAL_FILE, self.TEST_BUCKET, self.TEST_FILE)
gw.client.upload_file.assert_called()
@patch("boto3.client")
def test_download_file(self, BotoClient):
gw = S3Gateway(self.REGION)
gw.client = BotoClient()
gw.client.head_object.return_value = {"ContentLength": 100}
gw.client.download_file = MagicMock(return_value=None)
gw.download_file(self.TEST_BUCKET, self.TEST_FILE, self.TEST_LOCAL_FILE)
gw.client.download_file.assert_called()
@patch("boto3.client")
def test_delete_object(self, BotoClient):
gw = S3Gateway(self.REGION)
gw.client = BotoClient()
gw.client.delete_object = MagicMock(return_value=None)
gw.delete_object(self.TEST_BUCKET, self.TEST_FILE)
gw.client.delete_object.assert_called()
@patch("boto3.client")
def test_copy(self, BotoClient):
gw = S3Gateway(self.REGION)
gw.client = BotoClient()
gw.client.copy = MagicMock(return_value=None)
gw.copy(
self.TEST_BUCKET, self.TEST_FILE, self.TEST_BUCKET, f"{self.TEST_FILE}_COPY"
)
gw.client.copy.assert_called()
@patch("boto3.client")
def test_object_exists(self, BotoClient):
gw = S3Gateway(self.REGION)
gw.client = BotoClient()
gw.client.head_object = MagicMock(return_value=None)
self.assertTrue(gw.object_exists(self.TEST_BUCKET, self.TEST_ACCESS_KEY_ID))
gw.client.head_object.assert_called()
@patch("boto3.client")
def test_object_not_exists(self, BotoClient):
gw = S3Gateway(self.REGION)
gw.client = BotoClient()
gw.client.head_object = MagicMock(side_effect=Exception)
self.assertFalse(gw.object_exists(self.TEST_BUCKET, self.TEST_ACCESS_KEY_ID))
gw.client.head_object.assert_called()
@patch("boto3.client")
def test_list_object2(self, BotoClient):
test_page_content_key1 = "test-page-content-key1"
test_page_content_key2 = "test-page-content-key2"
client_return_response = [
{
"Contents": [
{"Key": test_page_content_key1},
{"Key": test_page_content_key2},
],
}
]
gw = S3Gateway(self.REGION)
gw.client = BotoClient()
gw.client.get_paginator("list_objects_v2").paginate = MagicMock(
return_value=client_return_response
)
key_list = gw.list_object2(self.TEST_BUCKET, self.TEST_ACCESS_KEY_ID)
expected_key_list = [
test_page_content_key1,
test_page_content_key2,
]
self.assertEqual(key_list, expected_key_list)
gw.client.get_paginator("list_object_v2").paginate.assert_called()
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,292
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/service/mpc.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import asyncio
import logging
from typing import Any, Dict, List, Optional
from fbpcs.entity.container_instance import ContainerInstance, ContainerInstanceStatus
from fbpcs.entity.mpc_instance import MPCInstance, MPCInstanceStatus, MPCRole
from fbpcs.repository.mpc_instance import MPCInstanceRepository
from fbpcs.service.container import ContainerService
from fbpcs.service.mpc_game import MPCGameService
from fbpcs.service.onedocker import OneDockerService
from fbpcs.service.storage import StorageService
from fbpcs.util.typing import checked_cast
class MPCService:
"""MPCService is responsible for distributing a larger MPC game to multiple
MPC workers
"""
def __init__(
self,
container_svc: ContainerService,
storage_svc: StorageService,
instance_repository: MPCInstanceRepository,
task_definition: str,
mpc_game_svc: MPCGameService,
) -> None:
"""Constructor of MPCService
Keyword arguments:
container_svc -- service to spawn container instances
storage_svc -- service to read/write input/output files
instance_repository -- repository to CRUD MPCInstance
task_definition -- containers task definition
mpc_game_svc -- service to generate package name and game arguments.
"""
if (
container_svc is None
or storage_svc is None
or instance_repository is None
or mpc_game_svc is None
):
raise ValueError(
f"Dependency is missing. container_svc={container_svc}, mpc_game_svc={mpc_game_svc}, "
f"storage_svc={storage_svc}, instance_repository={instance_repository}"
)
self.container_svc = container_svc
self.storage_svc = storage_svc
self.instance_repository = instance_repository
self.task_definition = task_definition
self.mpc_game_svc: MPCGameService = mpc_game_svc
self.logger: logging.Logger = logging.getLogger(__name__)
self.onedocker_svc = OneDockerService(self.container_svc)
"""
The game_args should be consistent with the game_config, which should be
defined in caller's game repository.
For example,
If the game config looks like this:
game_config = {
"game": {
"one_docker_package_name": "package_name",
"arguments": [
{"name": "input_filenames", "required": True},
{"name": "input_directory", "required": True},
{"name": "output_filenames", "required": True},
{"name": "output_directory", "required": True},
{"name": "concurrency", "required": True},
],
},
The game args should look like this:
[
# 1st container
{
"input_filenames": input_path_1,
"input_directory": input_directory,
"output_filenames": output_path_1,
"output_directory": output_directory,
"concurrency": cocurrency,
},
# 2nd container
{
"input_filenames": input_path_2,
"input_directory": input_directory,
"output_filenames": output_path_2,
"output_directory": output_directory,
"concurrency": cocurrency,
},
]
"""
def create_instance(
self,
instance_id: str,
game_name: str,
mpc_role: MPCRole,
num_workers: int,
server_ips: Optional[List[str]] = None,
game_args: Optional[List[Dict[str, Any]]] = None,
) -> MPCInstance:
self.logger.info(f"Creating MPC instance: {instance_id}")
instance = MPCInstance(
instance_id=instance_id,
game_name=game_name,
mpc_role=mpc_role,
num_workers=num_workers,
server_ips=server_ips,
status=MPCInstanceStatus.CREATED,
game_args=game_args,
)
self.instance_repository.create(instance)
return instance
def start_instance(
self,
instance_id: str,
output_files: Optional[List[str]] = None,
server_ips: Optional[List[str]] = None,
timeout: Optional[int] = None,
) -> MPCInstance:
return asyncio.run(
self.start_instance_async(instance_id, output_files, server_ips, timeout)
)
async def start_instance_async(
self,
instance_id: str,
output_files: Optional[List[str]] = None,
server_ips: Optional[List[str]] = None,
timeout: Optional[int] = None,
) -> MPCInstance:
"""To run a distributed MPC game
Keyword arguments:
instance_id -- unique id to identify the MPC instance
"""
instance = self.instance_repository.read(instance_id)
self.logger.info(f"Starting MPC instance: {instance_id}")
if instance.mpc_role is MPCRole.CLIENT and not server_ips:
raise ValueError("Missing server_ips")
# spin up containers
self.logger.info("Spinning up container instances")
game_args = instance.game_args
instance.containers = await self._spin_up_containers_onedocker(
instance.game_name,
instance.mpc_role,
instance.num_workers,
game_args,
server_ips,
timeout,
)
if len(instance.containers) != instance.num_workers:
self.logger.warning(
f"Instance {instance_id} has {len(instance.containers)} containers spun up, but expecting {instance.num_workers} containers!"
)
if instance.mpc_role is MPCRole.SERVER:
ip_addresses = [
checked_cast(str, instance.ip_address)
for instance in instance.containers
]
instance.server_ips = ip_addresses
instance.status = MPCInstanceStatus.STARTED
self.instance_repository.update(instance)
return instance
def get_instance(self, instance_id: str) -> MPCInstance:
self.logger.info(f"Getting MPC instance: {instance_id}")
return self.instance_repository.read(instance_id)
def update_instance(self, instance_id: str) -> MPCInstance:
instance = self.instance_repository.read(instance_id)
self.logger.info(f"Updating MPC instance: {instance_id}")
if instance.status in [MPCInstanceStatus.COMPLETED, MPCInstanceStatus.FAILED]:
return instance
# skip if no containers registered under instance yet
if instance.containers:
instance.containers = self._update_container_instances(instance.containers)
if len(instance.containers) != instance.num_workers:
self.logger.warning(
f"Instance {instance_id} has {len(instance.containers)} containers after update, but expecting {instance.num_workers} containers!"
)
instance.status = self._get_instance_status(instance)
self.instance_repository.update(instance)
return instance
async def _spin_up_containers_onedocker(
self,
game_name: str,
mpc_role: MPCRole,
num_containers: int,
game_args: Optional[List[Dict[str, Any]]] = None,
ip_addresses: Optional[List[str]] = None,
timeout: Optional[int] = None,
) -> List[ContainerInstance]:
if game_args is not None and len(game_args) != num_containers:
raise ValueError(
"The number of containers is not consistent with the number of game argument dictionary."
)
if ip_addresses is not None and len(ip_addresses) != num_containers:
raise ValueError(
"The number of containers is not consistent with number of ip addresses."
)
cmd_tuple_list = []
for i in range(num_containers):
game_arg = game_args[i] if game_args is not None else {}
server_ip = ip_addresses[i] if ip_addresses is not None else None
cmd_tuple_list.append(
self.mpc_game_svc.build_one_docker_args(
game_name=game_name,
mpc_role=mpc_role,
server_ip=server_ip,
**game_arg,
)
)
cmd_args_list = [cmd_args for (package_name, cmd_args) in cmd_tuple_list]
return await self.onedocker_svc.start_containers_async(
self.task_definition, cmd_tuple_list[0][0], cmd_args_list, timeout
)
def _update_container_instances(
self, containers: List[ContainerInstance]
) -> List[ContainerInstance]:
ids = [container.instance_id for container in containers]
return self.container_svc.get_instances(ids)
def _get_instance_status(self, instance: MPCInstance) -> MPCInstanceStatus:
status = MPCInstanceStatus.COMPLETED
for container in instance.containers:
if container.status == ContainerInstanceStatus.FAILED:
return MPCInstanceStatus.FAILED
if container.status == ContainerInstanceStatus.UNKNOWN:
return MPCInstanceStatus.UNKNOWN
if container.status == ContainerInstanceStatus.STARTED:
status = MPCInstanceStatus.STARTED
return status
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,293
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/util/reflect.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from importlib import import_module
from typing import Any
# pyre-ignore
def get_class(class_path: str) -> Any:
module_name, class_name = class_path.rsplit(".", 1)
module = import_module(module_name)
return getattr(module, class_name)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,294
|
supasate/FBPCS
|
refs/heads/main
|
/tests/service/test_storage.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from fbpcs.service.storage import PathType, StorageService
class TestStorageService(unittest.TestCase):
def test_path_type_s3(self):
type_ = StorageService.path_type(
"https://bucket-name.s3.Region.amazonaws.com/key-name"
)
self.assertEqual(type_, PathType.S3)
def test_path_type_local(self):
type_ = StorageService.path_type("/usr/file")
self.assertEqual(type_, PathType.Local)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,295
|
supasate/FBPCS
|
refs/heads/main
|
/onedocker/onedocker_runner.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
CLI for running an executable in one docker
Usage:
onedocker-runner <package_name> --cmd=<cmd> [options]
Options:
-h --help Show this help
--repository_path=<repository_path> The folder repository that the executables are to downloaded from
--exe_path=<exe_path> The folder that the executables are located at
--timeout=<timeout> Set timeout (in sec) to task to avoid endless running
--log_path=<path> Override the default path where logs are saved
--verbose Set logging level to DEBUG
"""
import logging
import os
import subprocess
import sys
from pathlib import Path
from typing import Tuple, Any, Optional
import psutil
import schema
from docopt import docopt
from env import ONEDOCKER_EXE_PATH, ONEDOCKER_REPOSITORY_PATH
from fbpcs.service.storage_s3 import S3StorageService
from fbpcs.util.s3path import S3Path
from util import run_cmd
# the folder on s3 that the executables are to downloaded from
DEFAULT_REPOSITORY_PATH = "https://one-docker-repository.s3.us-west-1.amazonaws.com/"
# the folder in the docker image that is going to host the executables
DEFAULT_EXE_FOLDER = "/root/one_docker/package/"
def run(
repository_path: str,
exe_path: str,
package_name: str,
cmd: str,
logger: logging.Logger,
timeout: int,
) -> None:
# download executable from s3
if repository_path.upper() != "LOCAL":
logger.info("Downloading executables ...")
_download_executables(repository_path, package_name)
else:
logger.info("Local repository, skip download ...")
# grant execute permission to the downloaded executable file
team, exe_name = _parse_package_name(package_name)
subprocess.run(f"chmod +x {exe_path}/{exe_name}", shell=True)
# TODO update this line after proper change in fbcode/measurement/private_measurement/pcs/oss/fbpcs/service/onedocker.py to take
# out the hard coded exe_path in cmd string
if repository_path.upper() == "LOCAL":
cmd = exe_path + cmd
# run execution cmd
logger.info(f"Running cmd: {cmd} ...")
net_start: Any = psutil.net_io_counters()
return_code = run_cmd(cmd, timeout)
if return_code != 0:
logger.info(f"Subprocess returned non-zero return code: {return_code}")
net_end: Any = psutil.net_io_counters()
logger.info(
f"Net usage: {net_end.bytes_sent - net_start.bytes_sent} bytes sent, {net_end.bytes_recv - net_start.bytes_recv} bytes received"
)
sys.exit(return_code)
def _download_executables(
repository_path: str,
package_name: str,
) -> None:
s3_region = S3Path(repository_path).region
team, exe_name = _parse_package_name(package_name)
exe_local_path = DEFAULT_EXE_FOLDER + exe_name
exe_s3_path = repository_path + package_name
storage_svc = S3StorageService(s3_region)
storage_svc.copy(exe_s3_path, exe_local_path)
def _parse_package_name(package_name: str) -> Tuple[str, str]:
return package_name.split("/")[0], package_name.split("/")[1]
def _read_config(
logger: logging.Logger,
config_name: str,
argument: Optional[str],
env_var: str,
default_val: str,
):
if argument:
logger.info(f"Read {config_name} from program arguments...")
return argument
if os.getenv(env_var):
logger.info(f"Read {config_name} from environment variables...")
return os.getenv(env_var)
logger.info(f"Read {config_name} from default value...")
return default_val
def main():
s = schema.Schema(
{
"<package_name>": str,
"--cmd": schema.Or(None, str),
"--repository_path": schema.Or(None, schema.And(str, len)),
"--exe_path": schema.Or(None, schema.And(str, len)),
"--timeout": schema.Or(None, schema.Use(int)),
"--log_path": schema.Or(None, schema.Use(Path)),
"--verbose": bool,
"--help": bool,
}
)
arguments = s.validate(docopt(__doc__))
log_path = arguments["--log_path"]
log_level = logging.DEBUG if arguments["--verbose"] else logging.INFO
logging.basicConfig(filename=log_path, level=log_level)
logger = logging.getLogger(__name__)
# timeout could be None if the caller did not provide the value
timeout = arguments["--timeout"]
repository_path = _read_config(
logger,
"repository_path",
arguments["--repository_path"],
ONEDOCKER_REPOSITORY_PATH,
DEFAULT_REPOSITORY_PATH,
)
exe_path = _read_config(
logger,
"exe_path",
arguments["--exe_path"],
ONEDOCKER_EXE_PATH,
DEFAULT_EXE_FOLDER,
)
logger.info("Starting program....")
try:
run(
repository_path=repository_path,
exe_path=exe_path,
package_name=arguments["<package_name>"],
cmd=arguments["--cmd"],
logger=logger,
timeout=timeout,
)
except subprocess.TimeoutExpired:
logger.error(f"{timeout} seconds have passed. Now exiting the program....")
sys.exit(1)
except InterruptedError:
logger.error("Receive abort command from user, Now exiting the program....")
sys.exit(1)
if __name__ == "__main__":
main()
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,296
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/gateway/s3.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import os
from typing import Any, Dict, List, Optional
import boto3
from fbpcs.decorator.error_handler import error_handler
from tqdm.auto import tqdm
class S3Gateway:
def __init__(
self,
region: str = "us-west-1",
access_key_id: Optional[str] = None,
access_key_data: Optional[str] = None,
config: Optional[Dict[str, Any]] = None,
) -> None:
self.region = region
config = config or {}
if access_key_id:
config["aws_access_key_id"] = access_key_id
if access_key_data:
config["aws_secret_access_key"] = access_key_data
# pyre-ignore
self.client = boto3.client("s3", region_name=self.region, **config)
@error_handler
def create_bucket(self, bucket: str, region: Optional[str] = None) -> None:
region = region if region is not None else self.region
self.client.create_bucket(
Bucket=bucket, CreateBucketConfiguration={"LocationConstraint": region}
)
@error_handler
def delete_bucket(self, bucket: str) -> None:
self.client.delete_bucket(Bucket=bucket)
@error_handler
def upload_file(self, file_name: str, bucket: str, key: str) -> None:
file_size = os.path.getsize(file_name)
self.client.upload_file(
file_name,
bucket,
key,
Callback=self.ProgressPercentage(file_name, file_size),
)
@error_handler
def download_file(self, bucket: str, key: str, file_name: str) -> None:
file_size = self.get_object_size(bucket, key)
self.client.download_file(
bucket,
key,
file_name,
Callback=self.ProgressPercentage(file_name, file_size),
)
@error_handler
def put_object(self, bucket: str, key: str, data: str) -> None:
self.client.put_object(Bucket=bucket, Key=key, Body=data.encode())
@error_handler
def get_object(self, bucket: str, key: str) -> str:
res = self.client.get_object(Bucket=bucket, Key=key)
return res["Body"].read().decode()
@error_handler
def get_object_size(self, bucket: str, key: str) -> int:
return self.client.head_object(Bucket=bucket, Key=key)["ContentLength"]
@error_handler
def get_object_info(self, bucket: str, key: str) -> Dict[str, Any]:
return self.client.get_object(Bucket=bucket, Key=key)
@error_handler
def list_object2(self, bucket: str, key: str) -> List[str]:
paginator = self.client.get_paginator("list_objects_v2")
pages = paginator.paginate(Bucket=bucket, Prefix=key)
key_list = []
for page in pages:
for content in page["Contents"]:
key_list.append(content["Key"])
return key_list
@error_handler
def delete_object(self, bucket: str, key: str) -> None:
self.client.delete_object(Bucket=bucket, Key=key)
@error_handler
def object_exists(self, bucket: str, key: str) -> bool:
try:
# Result intentionally discarded
_ = self.client.head_object(Bucket=bucket, Key=key)
return True
except Exception:
return False
@error_handler
def copy(
self, source_bucket: str, source_key: str, dest_bucket: str, dest_key: str
) -> None:
source = {"Bucket": source_bucket, "Key": source_key}
self.client.copy(source, dest_bucket, dest_key)
class ProgressPercentage(object):
def __init__(self, file_name: str, file_size: int) -> None:
self._progressbar = tqdm(total=file_size, desc=file_name)
def __call__(self, bytes_amount: int) -> None:
self._progressbar.update(bytes_amount)
def __del__(self) -> None:
self._progressbar.close()
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,297
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/entity/container_instance.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from dataclasses import dataclass
from enum import Enum
from typing import Optional
from dataclasses_json import dataclass_json
class ContainerInstanceStatus(Enum):
UNKNOWN = "UNKNOWN"
STARTED = "STARTED"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
@dataclass_json
@dataclass
class ContainerInstance:
instance_id: str
ip_address: Optional[str] = None
status: ContainerInstanceStatus = ContainerInstanceStatus.UNKNOWN
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,298
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/util/yaml.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from pathlib import Path
from typing import Any, Dict
import yaml
def load(file_path: Path) -> Dict[str, Any]:
with open(file_path) as stream:
return yaml.safe_load(stream)
# pyre-ignore
def dump(data: Any, file_path: Path) -> None:
with open(file_path, "w") as f:
return yaml.dump(data, f)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,299
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/service/onedocker.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import asyncio
import logging
from typing import List, Optional
from fbpcs.entity.container_instance import ContainerInstance
from fbpcs.service.container import ContainerService
ONE_DOCKER_CMD_PREFIX = (
# patternlint-disable-next-line f-string-may-be-missing-leading-f
"python3.8 -m one_docker_runner --package_name={0} --cmd='/root/one_docker/package/"
)
class OneDockerService:
"""OneDockerService is responsible for executing executable(s) in a Fargate container"""
def __init__(self, container_svc: ContainerService) -> None:
"""Constructor of OneDockerService
container_svc -- service to spawn container instances
TODO: log_svc -- service to read cloudwatch logs
"""
if container_svc is None:
raise ValueError(f"Dependency is missing. container_svc={container_svc}, ")
self.container_svc = container_svc
self.logger: logging.Logger = logging.getLogger(__name__)
def start_container(
self,
container_definition: str,
package_name: str,
cmd_args: str,
timeout: Optional[int] = None,
) -> ContainerInstance:
# TODO: ContainerInstance mapper
return asyncio.run(
self.start_containers_async(
container_definition, package_name, [cmd_args], timeout
)
)[0]
def start_containers(
self,
container_definition: str,
package_name: str,
cmd_args_list: List[str],
timeout: Optional[int] = None,
) -> List[ContainerInstance]:
return asyncio.run(
self.start_containers_async(
container_definition, package_name, cmd_args_list, timeout
)
)
async def start_containers_async(
self,
container_definition: str,
package_name: str,
cmd_args_list: List[str],
timeout: Optional[int] = None,
) -> List[ContainerInstance]:
"""Asynchronously spin up one container per element in input command list."""
cmds = [
self._get_cmd(package_name, cmd_args, timeout) for cmd_args in cmd_args_list
]
self.logger.info("Spinning up container instances")
container_ids = await self.container_svc.create_instances_async(
container_definition, cmds
)
return container_ids
def _get_exe_name(self, package_name: str) -> str:
return package_name.split("/")[1]
def _get_cmd(
self, package_name: str, cmd_args: str, timeout: Optional[int] = None
) -> str:
cmd_timeout = ""
"""
If we passed --timeout=None, the schema module will raise error,
since f-string converts None to "None" and schema treats None
in --timeout=None as a string
"""
if timeout is not None:
cmd_timeout = f" --timeout={timeout}"
return f"{ONE_DOCKER_CMD_PREFIX.format(package_name, timeout)}{self._get_exe_name(package_name)} {cmd_args}'{cmd_timeout}"
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,300
|
supasate/FBPCS
|
refs/heads/main
|
/tests/decorator/test_error_handler.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from botocore.exceptions import ClientError
from fbpcs.decorator.error_handler import error_handler
from fbpcs.error.pcs import PcsError
from fbpcs.error.throttling import ThrottlingError
class TestErrorHandler(unittest.TestCase):
def test_pcs_error(self):
@error_handler
def foo():
raise ValueError("just a test")
self.assertRaises(PcsError, foo)
def test_throttling_error(self):
@error_handler
def foo():
err = ClientError(
{
"Error": {
"Code": "ThrottlingException",
"Message": "test",
},
},
"test",
)
raise err
self.assertRaises(ThrottlingError, foo)
def test_wrapped_function_args(self):
@error_handler
def foo(**kwargs):
raise ValueError("just a test f")
error_msgs = {
"error_type1": "error_msg1",
"error_type2": "error_msg2",
}
self.assertRaises(PcsError, foo, error_msgs)
def test_wrapped_function_kwargs(self):
@error_handler
def foo(*args):
raise ValueError("just a test")
self.assertRaises(PcsError, foo, "error1", "error2")
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,301
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/gateway/ecs.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from typing import Any, Dict, List, Optional
import boto3
from fbpcs.decorator.error_handler import error_handler
from fbpcs.entity.cluster_instance import Cluster
from fbpcs.entity.container_instance import ContainerInstance
from fbpcs.mapper.aws import (
map_ecstask_to_containerinstance,
map_esccluster_to_clusterinstance,
)
class ECSGateway:
def __init__(
self,
region: str,
access_key_id: Optional[str],
access_key_data: Optional[str],
config: Optional[Dict[str, Any]] = None,
) -> None:
self.region = region
config = config or {}
if access_key_id is not None:
config["aws_access_key_id"] = access_key_id
if access_key_data is not None:
config["aws_secret_access_key"] = access_key_data
# pyre-ignore
self.client = boto3.client("ecs", region_name=self.region, **config)
@error_handler
def run_task(
self, task_definition: str, container: str, cmd: str, cluster: str, subnet: str
) -> ContainerInstance:
response = self.client.run_task(
taskDefinition=task_definition,
cluster=cluster,
networkConfiguration={
"awsvpcConfiguration": {
"subnets": [subnet],
"assignPublicIp": "ENABLED",
}
},
overrides={"containerOverrides": [{"name": container, "command": [cmd]}]},
)
return map_ecstask_to_containerinstance(response["tasks"][0])
@error_handler
def describe_tasks(self, cluster: str, tasks: List[str]) -> List[ContainerInstance]:
response = self.client.describe_tasks(cluster=cluster, tasks=tasks)
return [map_ecstask_to_containerinstance(task) for task in response["tasks"]]
@error_handler
def describe_task(self, cluster: str, task: str) -> ContainerInstance:
return self.describe_tasks(cluster, [task])[0]
@error_handler
def list_tasks(self, cluster: str) -> List[str]:
return self.client.list_tasks(cluster=cluster)["taskArns"]
@error_handler
def stop_task(self, cluster: str, task_id: str) -> Dict[str, Any]:
return self.client.stop_task(
cluster=cluster,
task=task_id,
)
@error_handler
def describe_clusters(self, clusters: List[str]) -> List[Cluster]:
response = self.client.describe_clusters(clusters=clusters, include=["TAGS"])
return [
map_esccluster_to_clusterinstance(cluster)
for cluster in response["clusters"]
]
@error_handler
def describe_cluster(self, cluster: str) -> Cluster:
return self.describe_clusters([cluster])[0]
@error_handler
def list_clusters(self) -> List[str]:
return self.client.list_clusters()["clusterArns"]
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.