index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
3,898
|
chengyan1984/cdk-gui
|
refs/heads/master
|
/Util.py
|
import json
import requests
from bs4 import BeautifulSoup
class Util(object):
@staticmethod
def get_value(element):
return element["value"]
@staticmethod
def get_selected(element):
results = element.select('option[selected="selected"]')
if results and len(results) > 0:
return results[0]['value'] or ''
option = element.find("option")
if option:
return option['value'] or ''
return ''
@staticmethod
def getsoup(response):
# print(response.status_code)
response.encoding = 'utf-8'
return BeautifulSoup(response.text, features="lxml")
@staticmethod
def finda(element):
return element.find("a").text.strip()
@staticmethod
def findspan(element):
return element.find("span").text.strip()
@staticmethod
def isNew(data, bjdomain, adminid):
res = requests.post(bjdomain + "/Api/Climborder/checkexist",
data={"orderno": data['factorynumber'], 'adminid': adminid})
return Util.checkBjRes(res)
@staticmethod
def getAccount(bjdomain):
try:
res = requests.post(bjdomain + "/Api/Climborder/newgetaccount", data={"mobile": "18205169014"})
if res.status_code == 200 and res.text:
result = json.loads(res.text)
if 'ret' not in result or int(result['ret']) != 0 or 'element' not in result or not result['element']:
return None
for factory in result['element']:
if 'factoryid' in factory and int(factory['factoryid']) == 10002 and len(factory['accounts']) > 0:
return factory['accounts'][0]
else:
return None
except Exception as e:
print("getaccount failed:", e)
return None
return None
@staticmethod
def clearKey(data, datakey, destkey='address'):
if datakey in data and data[destkey] and data[destkey].strip().startswith(data[datakey].strip()):
data[destkey] = data[destkey].replace(data[datakey], '', 1).strip()
return data
@staticmethod
def clearAddress(orderinfo, destkey='address'):
if destkey not in orderinfo:
return orderinfo
orderinfo = Util.clearKey(orderinfo, "province", destkey)
orderinfo = Util.clearKey(orderinfo, "city", destkey)
orderinfo = Util.clearKey(orderinfo, "county", destkey)
orderinfo = Util.clearKey(orderinfo, "town", destkey)
return orderinfo
@staticmethod
def checkBjRes(response):
if response.status_code == 200 and response.text:
result = json.loads(response.text)
return 'ret' in result and int(result['ret']) == 0
return False
@staticmethod
def getTableRow(bsObj, id, func, row_no=None, truncate=True):
"""@truncate: 是否截取掉最后一个字符"""
table = bsObj.find("table", {"id": id})
if not table:
return ""
alltr = table.find("tbody").find_all("tr")
result = ""
if row_no is not None and isinstance(row_no, int):
if (0 <= row_no < len(alltr)) or (row_no < 0 and len(alltr) >= -row_no):
return func(alltr[row_no].find_all("td")) if alltr[row_no] else ""
for tr in alltr:
note_td = tr.find_all("td")
if note_td and len(note_td) > 2:
item = func(note_td)
result = result + item
if truncate and result and len(result) > 0:
result = result[:-1]
return result
|
{"/SuningUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/huadi_zb.py": ["/Util.py"], "/TCSMCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/CDKCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/GreeUtil.py": ["/Util.py"], "/MideaUtil.py": ["/BaseUtil.py"], "/BaseUtil.py": ["/Util.py", "/cookie_test.py"], "/master.py": ["/searchutil.py"], "/login.py": ["/CDKUtil.py"], "/MideaCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/test/http2.py": ["/BaseUtil.py"], "/MIUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/JDUtil.py": ["/BaseUtil.py"], "/cookie_test.py": ["/aesgcm.py"]}
|
3,899
|
chengyan1984/cdk-gui
|
refs/heads/master
|
/login.py
|
import datetime
import json
import os
import sys
import time
import wx
import wx.adv
import wx.lib.mixins.inspection
from apscheduler.triggers import interval
from CDKUtil import CDKUtil
from apscheduler.schedulers.background import BackgroundScheduler
scheduler = BackgroundScheduler()
AppTitle = "CDK抓单"
VERSION = 0.1
def refresh_order(frame):
print("refresh_order frame={}".format(frame))
success = wx.GetApp().cdkutil.loadHaierOrder()
if not success:
wx.GetApp().logout(frame)
else:
wx.GetApp().addCount()
wx.GetApp().setLast()
class MainFrame(wx.Frame):
def __init__(self, userinfo):
wx.Frame.__init__(self, parent=None, title='CDK抓单中...')
self.loginTime = wx.GetApp().GetLoginTime()
self.userinfo = userinfo
self.makeStatusBar()
self.initText()
self.OnTimer(None)
self.timer = wx.Timer(self)
self.timer.Start(3000)
self.Bind(wx.EVT_TIMER, self.OnTimer)
wx.GetApp().startJob(self)
def initText(self):
textSizer = wx.BoxSizer(wx.VERTICAL)
self.main_txt = wx.StaticText(self, -1, "登录时长 %s".format(MyApp.getCurrentDateTime() - self.loginTime),
style=wx.ALIGN_CENTER)
self.count_txt = wx.StaticText(self, -1, "同步次数:{}".format(wx.GetApp().getCount()), style=wx.ALIGN_CENTER)
self.last_txt = wx.StaticText(self, -1, "最近更新时间:{}".format(wx.GetApp().getLast()), style=wx.ALIGN_CENTER)
# center.SetForegroundColour('white')
# center.SetBackgroundColour('black')
textSizer.Add(self.main_txt, 0, wx.EXPAND, 10)
textSizer.Add(self.count_txt, 0, wx.EXPAND, 10)
textSizer.Add(self.last_txt, 0, wx.EXPAND, 10)
self.SetSizer(textSizer)
textSizer.Fit(self)
def OnTimer(self, event):
t = MyApp.getCurrentDateTime()
sbTime = "当前时间 {}".format(t.strftime("%Y-%m-%d %H:%M:%S"))
self.myStatusBar.SetStatusText(sbTime, 0)
self.main_txt.SetLabel("登录时长 {}".format(t - self.loginTime))
self.count_txt.SetLabel("同步次数:{}".format(wx.GetApp().getCount()))
self.last_txt.SetLabel("最近更新时间:{}".format(wx.GetApp().getLast()))
self.Layout()
def makeStatusBar(self):
self.myStatusBar = self.CreateStatusBar(1)
self.myStatusBar.SetFieldsCount(2)
self.myStatusBar.SetStatusWidths([-8, -4])
self.myStatusBar.SetStatusText("", 0)
self.myStatusBar.SetStatusText("bangjia.me.", 1)
class LoginFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, parent=None, title=AppTitle)
# panel = wx.Panel(self)
self.main_sizer = wx.BoxSizer(wx.VERTICAL)
userInfo = wx.GetApp().getUserInfo()
if userInfo and 'username' in userInfo:
default_name = userInfo['username']
else:
default_name = "66004185"
if userInfo and 'passwd' in userInfo:
default_pwd = userInfo['passwd']
else:
default_pwd = "Dw147259"
self.txt_username = wx.TextCtrl(self, value=default_name)
self.add_widgets("账号", self.txt_username)
self.txt_password = wx.TextCtrl(self, value=default_pwd, style=wx.TE_PASSWORD)
self.add_widgets("密码", self.txt_password)
self.txt_code = wx.TextCtrl(self, value="")
# 添加验证码图片,并加入页面布局,为第三行,第3列
# image = wx.Image(os.path.join(wx.GetApp().resource_path(''), "bitmaps",'item_empty.png'),
# wx.BITMAP_TYPE_PNG).Rescale(80, 25).ConvertToBitmap() # 获取图片,转化为Bitmap形式
self.img_code = wx.StaticBitmap(self, -1) # 转化为wx.StaticBitmap()形式
self.img_code.Bind(wx.EVT_LEFT_DOWN, self.loadCodeImg)
self.add_widgets("验证码", self.txt_code).Add(self.img_code, 0, wx.ALL, 5)
# self.title = wx.TextCtrl(self, value="")
# self.add_widgets("验证码", self.title)
btn_sizer = wx.BoxSizer()
save_btn = wx.Button(self, label="登录")
save_btn.Bind(wx.EVT_BUTTON, self.on_save)
exit_btn = wx.Button(self, label="退出")
exit_btn.Bind(wx.EVT_BUTTON, self.on_exit)
btn_sizer.Add(save_btn, 0, wx.ALL, 5)
btn_sizer.Add(exit_btn, 0, wx.ALL, 5)
# btn_sizer.Add(wx.Button(self, id=wx.ID_CANCEL), 0, wx.ALL, 5)
self.main_sizer.Add(btn_sizer, 0, wx.CENTER)
self.SetSizer(self.main_sizer)
self.loadCodeImg()
self.Show()
self.main_window = None
self.Bind(wx.EVT_BUTTON, self.OnExit, exit_btn)
self.Bind(wx.EVT_CLOSE, self.OnExit)
def add_widgets(self, label_text, text_ctrl):
row_sizer = wx.BoxSizer(wx.HORIZONTAL)
label = wx.StaticText(self, label=label_text, size=(50, -1))
row_sizer.Add(label, 0, wx.ALL, 5)
row_sizer.Add(text_ctrl, 1, wx.ALL | wx.EXPAND, 5)
self.main_sizer.Add(row_sizer, 0, wx.EXPAND)
return row_sizer
def loadCodeImg(self, event=None):
# response = requests.get(url)
# img = Image.open(BytesIO(response.content))
img = wx.GetApp().cdkutil.generateCode()
# image = wx.Image(img.size[0], img.size[1])
image = wx.Image(img.size[0], img.size[1])
image.SetData(img.convert("RGB").tobytes())
self.img_code.SetBitmap(image.Rescale(80, 25).ConvertToBitmap())
def on_save(self, event):
print("登录")
# 开始登录,登录成功后保存信息到本地
username = self.txt_username.GetValue()
passwd = self.txt_password.GetValue()
code = self.txt_code.GetValue()
wx.GetApp().cdkutil.username = username
wx.GetApp().cdkutil.passwd = passwd
success = wx.GetApp().cdkutil.checkCode(code, username, passwd)
print("登录 success: {}".format(success))
# todo 写入文件?
if success:
wx.GetApp().SetLoginTime()
self.main_window = MainFrame(wx.GetApp().getUserInfo())
self.main_window.SetSize(800, 527)
self.main_window.Center()
self.main_window.Show(True)
self.Hide()
self.main_window.Bind(wx.EVT_CLOSE, self.on_exit)
else:
wx.GetApp().cdkutil.token = ''
userinfo = {"username": username, "passwd": passwd, "token": '', 'islogin': False, 'orderurl': ''}
wx.GetApp().setUserInfo(userinfo)
def on_exit(self, event):
print("exit")
user = wx.GetApp().getUserInfo()
# closed_window = event.EventObject
# if closed_window == self.main_window:
# self.main_window = None
# self.Show()
# elif closed_window == self:
# print('Carry out your code for when Main window closes')
# event.Skip()
self.OnExit(event)
def OnClose(self):
ret = wx.MessageBox("确定要退出吗 ?",
AppTitle,
wx.YES_NO | wx.ICON_QUESTION |
wx.CENTRE | wx.NO_DEFAULT)
return ret
def OnExit(self, event):
# Ask for exit.
print("OnExit")
print(event)
intChoice = self.OnClose()
print(intChoice)
if intChoice == 2:
# Disconnect from server.
# self.con.OnCloseDb()
# 结束循环任务
wx.GetApp().stopJob()
closed_window = event.EventObject
if closed_window == self.main_window:
self.main_window.Destroy()
self.main_window = None
# self.Show()
# elif closed_window == self:
# print('Carry out your code for when Main window closes')
# event.Skip()
userinfo = wx.GetApp().getUserInfo()
userinfo['islogin'] = False
wx.GetApp().setUserInfo(userinfo)
self.Destroy()
class MyApp(wx.App, wx.lib.mixins.inspection.InspectionMixin):
def OnInit(self, redirect=False, filename=None, useBestVisual=False, clearSigInt=True):
self.SetAppName("CDK抓单")
self.InitInspection()
self.installDir = os.path.split(os.path.abspath(sys.argv[0]))[0]
# self.installDir = self.resource_path('')
self.locale = wx.Locale(wx.LANGUAGE_CHINESE_SIMPLIFIED)
self.loginTime = MyApp.getCurrentDateTime()
path = os.path.join(self.installDir, "file")
if not os.path.exists(path):
os.makedirs(path)
self.userfile = os.path.join(self.installDir, "file", "user.txt")
self.apscheduler = BackgroundScheduler()
self.cdkutil = CDKUtil()
self.job = None
self.loginFrame = None
self.mainFrame = None
self.count = 1
self.lasttime = self.loginTime
print("OnInit sys.argv[0]={}".format(sys.argv[0]))
print("OnInit installDir={}".format(self.installDir))
userinfo = self.getUserInfo()
frame = None
if userinfo and 'islogin' in userinfo and 'token' in userinfo:
if userinfo['islogin'] and userinfo['token'] and len(userinfo['token']) > 5:
self.cdkutil.token = userinfo['token']
self.cdkutil.username = userinfo['username']
self.cdkutil.passwd = userinfo['passwd']
self.cdkutil.orderurl = userinfo['orderurl']
self.mainFrame = MainFrame(userinfo)
frame = self.mainFrame
if not self.mainFrame:
self.loginFrame = LoginFrame()
frame = self.loginFrame
frame.SetSize(800, 527)
self.SetTopWindow(frame)
frame.Center()
frame.Show(True)
return True
def getUserInfo(self):
if os.path.exists(self.userfile):
with open(self.userfile, 'r') as f:
userinfo = json.loads(f.read())
return userinfo
return None
def setUserInfo(self, userinfo):
with open(self.userfile, 'w') as f:
jsObj = json.dumps(userinfo)
f.write(jsObj)
@staticmethod
def getCurrentDateTime():
return datetime.datetime.strptime(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()), "%Y-%m-%d %H:%M:%S")
def SetLoginTime(self):
self.loginTime = MyApp.getCurrentDateTime()
# self.loginTime = time.localtime(time.time())
def GetLoginTime(self):
return self.loginTime
def startJob(self, frame):
if not self.apscheduler:
self.apscheduler = BackgroundScheduler()
self.apscheduler.start()
if not self.job:
trigger = interval.IntervalTrigger(seconds=5 * 10)
self.job = self.apscheduler.add_job(lambda: refresh_order(frame), trigger=trigger, id='task_sync_every_5m',
replace_existing=True)
# self.job = self.apscheduler.add_job(func=refresh_order, trigger='interval', args=[frame],
# id='task_sync_every_5m', seconds=5 * 60)
def stopJob(self):
# self.apscheduler.shutdown(wait=False)
if self.job:
self.job.remove()
self.job = None
def logout(self, frame):
print("logout")
self.stopJob()
userinfo = self.getUserInfo()
userinfo['islogin'] = False
self.setUserInfo(userinfo)
wx.CallAfter(self.test, frame)
def test(self, frame):
print("test frame={}".format(frame))
ret = wx.MessageBox("账号登录过期,请尝试重新登录",
AppTitle,
wx.OK | wx.ICON_INFORMATION)
# ret = dialog.ShowModal()
print(ret)
if wx.OK == ret:
print("ok pressed")
frame.Destroy()
# a = MyDialog(self.GetTopWindow(), "Dialog").ShowModal()
# print(a)
def addCount(self):
self.count = self.count + 1
def getCount(self):
return self.count
def setLast(self):
self.lasttime = MyApp.getCurrentDateTime()
def getLast(self):
return self.lasttime
def resource_path(self, relative_path):
if hasattr(sys, '_MEIPASS'):
return os.path.join(sys._MEIPASS, relative_path)
return os.path.join(os.path.abspath("."), relative_path)
class MyDialog(wx.Dialog):
def __init__(self, parent, title):
super(MyDialog, self).__init__(parent, title=title, size=(250, 150))
panel = wx.Panel(self)
self.btn = wx.Button(panel, wx.ID_OK, label="ok", size=(50, 20), pos=(75, 50))
self.btn.Bind(wx.EVT_BUTTON, self.on_Ok)
def on_Ok(self, event):
print("MyDialog ok button clicked!!!")
self.Close()
if __name__ == '__main__':
app = MyApp(redirect=False)
app.MainLoop()
|
{"/SuningUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/huadi_zb.py": ["/Util.py"], "/TCSMCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/CDKCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/GreeUtil.py": ["/Util.py"], "/MideaUtil.py": ["/BaseUtil.py"], "/BaseUtil.py": ["/Util.py", "/cookie_test.py"], "/master.py": ["/searchutil.py"], "/login.py": ["/CDKUtil.py"], "/MideaCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/test/http2.py": ["/BaseUtil.py"], "/MIUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/JDUtil.py": ["/BaseUtil.py"], "/cookie_test.py": ["/aesgcm.py"]}
|
3,900
|
chengyan1984/cdk-gui
|
refs/heads/master
|
/MideaCookieUtil.py
|
import json
import time
from datetime import date, timedelta
import requests
from BaseUtil import BaseUtil
from cookie_test import fetch_chrome_cookie
class MideaUtil(BaseUtil):
def __init__(self, username, passwd, adminid='24', factoryid='4', baseurl='https://cs.midea.com/c-css/',
bjdomain='http://yxgtest.bangjia.me'):
super(MideaUtil, self).__init__(username, passwd, adminid, factoryid, baseurl, bjdomain)
self.headers['Accept'] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng," \
"*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
self.headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8'
self.headers['Accept'] = "*/*"
self.headers['Content-Type'] = 'application/json'
self.cookie = fetch_chrome_cookie([{"domain": ".midea.com"}], isExact=False)
self.cookies = BaseUtil.getCookies(self.cookie)
self.headers['Cookie'] = self.cookie
print("init cookie=", self.cookie)
def loadOrders(self, param=None):
# 开始加载工单
try:
data = {"data": json.dumps(self.loadRolesOrder())}
print("loadOrders data=", data)
requests.post(self.bjdomain + "/Api/Climborder/addorder", data=data)
except:
return self.dataverify
return self.datasuccess
def loadRolesOrder(self):
roleurl = self.baseurl + "desktop/userInfo"
self.headers['Referer'] = self.baseurl + "views/css/desktop/index.jsp"
response = self.session.post(roleurl, headers=self.headers)
print("userInfo result=", response.text)
result = []
if response.status_code == 200 and response.text:
roleresult = self.getjson(response)
if not roleresult or 'status' not in roleresult or not roleresult['status']:
return self.datafail
if 'content' not in roleresult or 'orgUsers' not in roleresult['content']:
return self.datafail
for org in roleresult['content']['orgUsers']:
orgId = org['orgEntityVO']['orgCode']
result = self.merge(result, self.switchOrg(orgId), "factorynumber")
def switchOrg(self, orgId):
roleurl = self.baseurl + "switchOrg"
self.headers['Referer'] = self.baseurl + "views/css/desktop/index.jsp"
params = {"currentOrg": orgId, "loginToken": self.cookies['loginToken']}
response = self.session.post(roleurl, headers=self.headers, data=params)
# self.initCookie()
# print("switchOrg orgId={},params={}, result={} ".format(orgId, params, response.text))
response = self.session.get(self.baseurl + 'views/css/desktopPlugIn/wd_homePage.jsp', headers=self.headers)
# print("wd_homePage orgId={},params={}, result={} ".format(orgId, params, response.text))
return list(self.loadPageOrder())
def loadPageOrder(self, page=1, totalcount=100, pageSize=100):
dataurl = self.baseurl + "wom/serviceorderunit/listdata"
data = {"page": page, "rows": pageSize, "pageIndex": page - 1, "pageSize": pageSize,
"formConditions": {"SERVICE_ORDER_STATUS": "", "CONTAIN_EJFWS": "N",
"CONTACT_TIME": (date.today() - timedelta(days=3)).strftime("%Y-%m-%d"),
"CONTACT_TIME_end": (date.today()).strftime("%Y-%m-%d")}}
response = self.session.post(dataurl, headers=self.headers, data=json.dumps(data))
self.headers['Referer'] = self.baseurl + "wom/serviceorderunit/list?type=womServiceNotFinshCount"
response.encoding = 'utf-8'
print("loadOrders response={}".format(response.text))
result = json.loads(response.text)
if result and 'status' in result and result['status']:
data = result['content']
totalcount = data['total']
pagecount = data['pageCount']
pageSize = data['pageSize']
page = data['pageIndex']
# print("totalcount={} pagecount={} pageSize={} page={}".format(totalcount, pagecount, pageSize, page))
if page >= pagecount:
yield from self.parseOrders(data)
else:
yield from self.parseOrders(data)
yield from self.loadPageOrder(page + 1, totalcount, pageSize)
def parseOrders(self, data):
for item in data['rows']:
yield {
'factorynumber': item['SERVICE_ORDER_NO'], 'ordername': item['SERVICE_SUB_TYPE_NAME'],
'username': item['SERVICE_CUSTOMER_NAME'], 'mobile': item['SERVICE_CUSTOMER_TEL1'],
'orderstatus': item['SERVICE_ORDER_STATUS'], 'originname': item['ORDER_ORIGIN'],
'machinetype': item['PROD_NAME'], 'machinebrand': item['BRAND_NAME'],
'sn': '', 'version': item['PRODUCT_MODEL'] if 'PRODUCT_MODEL' in item else '',
'repairtime': item['FINAL_APPOINT_TIME'] if 'FINAL_APPOINT_TIME' in item else '',
'mastername': item['ENGINEER_NAME'] if 'ENGINEER_NAME' in item else '',
'note': item['PUB_REMARK'] if 'PUB_REMARK' in item else '',
'companyid': self.factoryid, 'adminid': self.adminid,
'address': str(item['SERVICE_CUSTOMER_ADDRESS']),
# 'province': item['provinceName'], 'city': item['cityName'],
# 'county': item['regionName'], 'town': item['countyName'],
'ordertime': item['CONTACT_TIME'],
'description': item['SERVICE_DESC'],
}
if __name__ == '__main__':
# util = ConkaUtil('K608475', 'Kuser6646!', adminid='20699', factoryid='1')
# bangjia:13819807915 美的:AW3306009461 Md123456789
util = MideaUtil('AW3306009461', 'Md123456789!', adminid='24', factoryid='4')
# util = ConkaUtil('K608069', 'Crm@20200401', adminid='24', factoryid='1')
print(util.loadOrders())
|
{"/SuningUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/huadi_zb.py": ["/Util.py"], "/TCSMCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/CDKCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/GreeUtil.py": ["/Util.py"], "/MideaUtil.py": ["/BaseUtil.py"], "/BaseUtil.py": ["/Util.py", "/cookie_test.py"], "/master.py": ["/searchutil.py"], "/login.py": ["/CDKUtil.py"], "/MideaCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/test/http2.py": ["/BaseUtil.py"], "/MIUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/JDUtil.py": ["/BaseUtil.py"], "/cookie_test.py": ["/aesgcm.py"]}
|
3,901
|
chengyan1984/cdk-gui
|
refs/heads/master
|
/chrome_cookies_old.py
|
import os
import sqlite3
from collections import defaultdict
# from win32.win32crypt import CryptUnprotectData
'''
实际使用场景请自行修改Cookies/cookies.sqlite位置,下面代码均为默认安装的位置,有些绿色版的文件夹位置以及老版本的渗透版火狐浏览器位置需要自行修改
'''
# # 获取chrome浏览器的cookies
# def getcookiefromchrome():
# cookiepath = os.environ['LOCALAPPDATA'] + r"\Google\Chrome\User Data\Default\Cookies"
# sql = "select host_key,name,encrypted_value from cookies"
# with sqlite3.connect(cookiepath) as conn:
# cu = conn.cursor()
# select_cookie = (cu.execute(sql).fetchall())
# cookie_list = []
# for host_key, name, encrypted_value in select_cookie:
# cookie = CryptUnprotectData(encrypted_value)[1].decode()
# cookies = {host_key: name + ":" + cookie}
# cookie_list.append(cookies)
# d = defaultdict(list)
# for cookie_item in cookie_list:
# for key, value in cookie_item.items():
# d[key].append(value.strip())
# print(dict(d))
#
#
# getcookiefromchrome()
|
{"/SuningUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/huadi_zb.py": ["/Util.py"], "/TCSMCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/CDKCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/GreeUtil.py": ["/Util.py"], "/MideaUtil.py": ["/BaseUtil.py"], "/BaseUtil.py": ["/Util.py", "/cookie_test.py"], "/master.py": ["/searchutil.py"], "/login.py": ["/CDKUtil.py"], "/MideaCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/test/http2.py": ["/BaseUtil.py"], "/MIUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/JDUtil.py": ["/BaseUtil.py"], "/cookie_test.py": ["/aesgcm.py"]}
|
3,902
|
chengyan1984/cdk-gui
|
refs/heads/master
|
/test/http2.py
|
import asyncio
import json
import os
import sys
import httpx
from hyper import HTTPConnection, HTTP20Connection
# conn = HTTPConnection('http2bin.org:443')
# conn.request('GET', '/get')
# resp = conn.get_response()
#
# print(resp.read())
from hyper.tls import init_context
from BaseUtil import BaseUtil
agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36"
headers = {'Content-Type': 'application/x-www-form-urlencoded',
'User-Agent': agent, 'Referer': "https://opn.jd.com/bill/search?billStatus=5",
'Upgrade-Insecure-Requests': '1', 'Host': "opn.jd.com", 'Origin': "https://opn.jd.com",
'Accept-Encoding': 'gzip, deflate, br', 'Connection': 'keep-alive',
'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
'Accept': 'application/json, text/plain, */*'}
data = {"sort": "billId",
"order": "desc",
"billStatuses": "5",
"isEgBuy": "0",
"outletsNo": "05928613279",
"sortKind": "4", "page": "1", "rows": "10", "isAppliance": "1",
}
result = ""
for item in data:
result += item + "=" + data[item] + "&"
result = result[:-1]
# 修改路径
realpath = os.path.dirname(os.path.realpath(sys.argv[0]))
print("realpath>>>>", realpath)
cafile = os.path.join(realpath, "resource", 'pem', "certs.pem")
print("cert_loc cafile>>>",cafile)
conn = HTTP20Connection(host='opn.jd.com', port=443, ssl_context=init_context(cafile))
cookie = BaseUtil.getCookie([{"domain": ".jd.com"}])
headers['Cookie'] = cookie
headers[':authority'] = 'opn.jd.com'
headers[':method'] = 'POST'
headers[':path'] = '/bill/query.json'
headers[':scheme'] = 'https'
response = conn.request(method='POST', url='https://opn.jd.com/bill/query.json',
body=result,
headers=headers)
resp = conn.get_response(response)
print(resp.status)
res = resp.read()
print(res)
print(json.loads(res))
# async def test():
# async with httpx.AsyncClient(http2=True) as client:
# r = await client.post('https://opn.jd.com/bill/query.json', data=data, headers=headers)
# print(r.text)
#
#
# asyncio.run(test())
|
{"/SuningUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/huadi_zb.py": ["/Util.py"], "/TCSMCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/CDKCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/GreeUtil.py": ["/Util.py"], "/MideaUtil.py": ["/BaseUtil.py"], "/BaseUtil.py": ["/Util.py", "/cookie_test.py"], "/master.py": ["/searchutil.py"], "/login.py": ["/CDKUtil.py"], "/MideaCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/test/http2.py": ["/BaseUtil.py"], "/MIUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/JDUtil.py": ["/BaseUtil.py"], "/cookie_test.py": ["/aesgcm.py"]}
|
3,903
|
chengyan1984/cdk-gui
|
refs/heads/master
|
/MIUtil.py
|
import datetime
import json
import re
import time
from urllib import parse
from urllib.parse import urlparse
import requests
# from requests_html import HTMLSession
# from utils.ChromeCookie import fetch_chrome_cookie
from BaseUtil import BaseUtil
from cookie_test import fetch_chrome_cookie
class MIUtil(BaseUtil):
def __init__(self, adminid='68891', factoryid='17', baseurl='https://xms.be.xiaomi.com',
bjdomain='http://yxgtest.bangjia.me'):
super(MIUtil, self).__init__('', '', adminid, factoryid, baseurl, bjdomain)
parsed_uri = urlparse(baseurl)
self.host = parsed_uri.netloc
self.baseurl = baseurl
self.adminid = adminid
self.factoryid = factoryid
self.bjdomain = bjdomain
self.mainurl = self.baseurl + '/admin/page!main.action'
self.searchurl = self.baseurl + '/afterservice/afterservice!api.action'
self.cookie = fetch_chrome_cookie(
[{"domain": ".xiaomi.com", "fields": ['uLocale', 'cUserId', 'userId', 'xmsbe_slh', "xst"]},
{"domain": ".be.xiaomi.com", "fields": ["xst"]},
{"domain": "xms.be.xiaomi.com"},
{"domain": ".xms.be.xiaomi.com"},
# {"domain": ".account.xiaomi.com"},
# {"domain": ".mi.com"}
])
# print(self.cookie)
self.cookies = MIUtil.getCookies(self.cookie)
self.session = requests.Session()
# self.session = HTMLSession()
# self.agent = random.choice(agents)
self.agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) ' \
'Chrome/81.0.4044.113 Safari/537.36'
self.datasuccess = {'code': 1, 'msg': '抓单成功', 'element': ''}
self.datafail = {'code': 0, 'msg': '抓单失败,请使用谷歌浏览器登录小米账号后重试'}
self.dataverify = {'code': 2, 'msg': '登录过期,请重新登录', 'element': ''}
self.headers = {'content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
'User-Agent': self.agent,
'Upgrade-Insecure-Requests': '1', 'Host': self.host, 'Origin': self.baseurl,
'Accept-Encoding': 'gzip, deflate, br', 'Cookie': self.initCookie(self.cookies),
'Accept-Language': 'zh-CN,zh;q=0.9', 'Connection': 'keep-alive',
'Accept': 'application/json, text/javascript, */*; q=0.01'}
def initCookie(self, cookies=None):
if not cookies:
return ""
result = ""
for cookie in cookies:
result += cookie + "=" + cookies[cookie] + "; "
return result[:-2]
def loadMain(self):
if 'userId' not in self.cookies:
return self.datafail
# searchurl = self.searchurl + "?router=service_list"
# data = "method=srvServicing.getJurisdictionOrg¶ms=" + self.cookies['userId']
# print(data)
self.headers['Referer'] = self.mainurl + "?"
# print(self.headers['Cookie'])
# print("***********************************")
headers = self.headers.copy()
headers[
'Accept'] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
response = self.session.get(self.searchurl + "?router=service_list", headers=headers)
response.encoding = 'utf-8'
# print(response.headers['Set-Cookie'])
# orgIds = re.findall(r"var orgId = \"(.+?)\"", response.text, re.S)
# datas = json.loads(response.text)
# print(response.text)
result = re.findall(re.compile(r"originOrgId: ['](.*?)[']", re.S), response.text)
if not result or len(result) == 0:
return self.datafail
orgId = result[0]
# originOrgId = re.findall(r"originOrgId: '(.+?)',", response.text, re.S)[0]
originOrgId = orgId
# print(originOrgId)
return self.loadOrders({'orgId': orgId, "originOrgId": originOrgId})
def loadOrders(self, param=None):
self.headers['Referer'] = self.searchurl
# print(self.headers['Cookie'])
# print("===============")
startTime = (datetime.date.today() + datetime.timedelta(days=-3)).strftime("%Y-%m-%d")
endTime = (datetime.date.today() + datetime.timedelta(days=+1)).strftime("%Y-%m-%d")
params = {"key": "", "miliao": "", "curOperator": self.cookies['userId'], "originOrgId": param['originOrgId'],
"orgId": param['orgId'], "sId": "", "tel": "", "imei": "", "sn": "", "orderId": "",
"createStartTime": startTime, "createEndTime": endTime, "signStartTime": "", "signEndTime": "",
"closeStartTime": "", "closeEndTime": "", "returnStartTime": "", "returnEndTime": "",
"fullStartTime": startTime, "fullEndTime": endTime, "pageInfo": {"pageNum": 1, "pageSize": 50}}
data = {'method': 'srvServicing.searchList',
'params': json.dumps(params)}
response = self.session.post(self.searchurl, data=parse.urlencode(data), headers=self.headers)
response.encoding = 'utf-8'
# print("===================================loadOrders")
# print(response.text)
datas = json.loads(response.text)
# print(datas['result']['pageInfo']['total'])
if datas['code'] == 1:
try:
data = {"data": json.dumps(list(self.parseOrders(datas)))}
# print("data=", data)
requests.post(self.bjdomain + "/Api/Climborder/addorder", data=data)
except Exception as e:
print(str(e))
return self.datafail
return self.datasuccess
return self.datafail
def parseOrders(self, datas):
total_num = datas['result']['pageInfo']['total']
# print("total count:{}".format(total_num))
for order_key in datas['result']['srvInfos']:
# flag = 0
# for key in order_list:
# if (order_list[key]['factorynumber'] == order_key['sId']):
# order_list[key]['sn'] = order_list[key]['sn'] + "," + order_key['sns']
# flag = 1
# break
# if flag == 1:
# continue
order_info = {'factorynumber': order_key['sId'], 'ordername': order_key['typeDesc'],
'username': order_key['customerName'], 'mobile': order_key['customerTel'],
'orderstatus': order_key['statusDesc'],
'machinetype': order_key['goodsNames'].replace("小米", ''), 'sn': order_key['sns'],
'companyid': self.factoryid, 'machinebrand': '小米', 'originname': '小米系统',
'adminid': self.adminid}
yield from self.getDetail(order_info, order_key)
# 查询详情接口
def getDetail(self, order, datas):
self.headers['Referer'] = self.mainurl
post_data = "method=srvServicing.getCommonSrvDetail¶ms=%7B%22sId%22%3A%22" + datas['sId'] + \
"%22%2C%22conditions%22%3A%22BASEINFO%22%7D"
response = self.session.post(self.searchurl, data=post_data, headers=self.headers)
response.encoding = 'utf-8'
json_ret2 = json.loads(response.text)
# print("===================================getDetail result")
# print(response.text)
if json_ret2['code'] == 1:
datas['addressDescC'] = json_ret2['result']['baseInformation']['addressDescC']
order['address'] = json_ret2['result']['baseInformation']['addressDesc']
timeArray = time.localtime(json_ret2['result']['baseInformation']['applyTime'] / 1000)
otherStyleTime = time.strftime("%Y-%m-%d %H:%M:%S", timeArray)
order['ordertime'] = otherStyleTime
if json_ret2['result']['baseInformation']['hopeVisitTime']:
order['repairtime'] = json_ret2['result']['baseInformation']['hopeVisitTime']
createFrom = json_ret2['result']['baseInformation']['createFrom']
if createFrom.find("预付费") != -1 and createFrom != '':
order['note'] = createFrom
if len(json_ret2['result']['baseInformation']['items']) > 0:
priceitem = json.loads(json_ret2['result']['baseInformation']['items'][0]['extendContent'])
order['note'] = order['note'] + str(priceitem['price'])
yield from self.showMsg(order, datas)
def showMsg(self, order, datas):
show_url = self.baseurl + '/common/common!savePrivateLogOperate.action'
post_data = {"content": json.dumps({"miliao": [], "name": [datas['customerNameC']],
"tel": [datas['customerTelC']],
"email": [], "address": [datas['addressDescC']],
"operateKey": datas['sId']})}
response = self.session.post(show_url, data=post_data, headers=self.headers)
response.encoding = 'utf-8'
json_msg = json.loads(response.text)
# print("===================================showMsg result")
# print(response.text)
if 'result' in json_msg:
order['username'] = json_msg['result']['name'][0]
order['mobile'] = json_msg['result']['tel'][0]
order['address'] = json_msg['result']['address'][0]
yield self.getDescription(order, datas)
# 查询处理结果,问题描述
def getDescription(self, order, datas):
self.headers['Referer'] = self.searchurl + '?router=service_info_detail&sId=' + datas['sId']
post_data = "method=srvServicing.getServiceVo¶ms=%7B%22sId%22%3A%22" + datas[
'sId'] + "%22%2C%22conditions%22%3A%22%22%7D"
response = self.session.post(self.searchurl, data=post_data, headers=self.headers)
response.encoding = 'utf-8'
json_ret3 = json.loads(response.text)
if json_ret3['code'] == 1:
data = json_ret3['result']
if data['customerDesc']:
order['description'] = data['customerDesc']
fault = ''
if len(data['items']) > 0:
for item in data['items'][0]['itemHasFaults']:
fault += item['faultName'] + ";"
if data['items'][0]['faultDesc']:
fault += data['items'][0]['faultDesc'] + ";"
if data['items'][0]['methods']:
fault += "处理方法:" + data['items'][0]['methods'][0]['name']
if fault:
order['note'] = fault
return order
if __name__ == '__main__':
# util = MIUtil('20845', factoryid='17')
util = MIUtil('24', factoryid='17', bjdomain='http://yxgtest.bangjia.me')
print(util.loadMain())
|
{"/SuningUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/huadi_zb.py": ["/Util.py"], "/TCSMCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/CDKCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/GreeUtil.py": ["/Util.py"], "/MideaUtil.py": ["/BaseUtil.py"], "/BaseUtil.py": ["/Util.py", "/cookie_test.py"], "/master.py": ["/searchutil.py"], "/login.py": ["/CDKUtil.py"], "/MideaCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/test/http2.py": ["/BaseUtil.py"], "/MIUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/JDUtil.py": ["/BaseUtil.py"], "/cookie_test.py": ["/aesgcm.py"]}
|
3,904
|
chengyan1984/cdk-gui
|
refs/heads/master
|
/JDUtil.py
|
import json
import os
import re
import sys
import time
import requests
from hyper.tls import init_context
from BaseUtil import BaseUtil
from hyper import HTTPConnection, HTTP20Connection
businessTypes = {"1": "上门安装", "2": "送货服务", "3": "提货送装", "4": "拆卸包装", "5": "退货服务"}
statusTypes = {"1": "新订单", "2": "自动分配失败", "3": "已分配", "4": "申请改派", "5": "已接收", "6": "已预约", "7": "已派工",
"8": "上门完成", "12": "确认完成", "13": "取消服务", "14": "确认取消服务", "15": "客户取消"}
class JDUtil(BaseUtil):
def __init__(self, username='', passwd='', adminid='24', factoryid='19', baseurl='http://jdfw.jd.com',
bjdomain='http://yxgtest.bangjia.me'):
super(JDUtil, self).__init__(username, passwd, adminid, factoryid, baseurl, bjdomain)
self.mainurl = self.baseurl + '/admin/page!main.action'
self.searchurl = self.baseurl + '/receipt/query.json'
self.popurl = "https://opn.jd.com/bill/query.json"
self.cookie = BaseUtil.getCookie([{"domain": ".jd.com"}])
self.cookies = BaseUtil.getCookies(self.cookie)
self.headers['Cookie'] = self.cookie
self.headers['Accept'] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng," \
"*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
self.headers['Content-Type'] = 'application/x-www-form-urlencoded'
def loadMain(self):
self.headers['Referer'] = self.baseurl + '/receipt/receiptDashboardIndex?homePageDistinguish=notAppointed'
self.headers['Accept'] = '*/*'
response = self.session.post(self.baseurl + "/common/inforLinkage/getPerson", headers=self.headers)
response.encoding = 'utf-8'
print("loadMain result:{}".format(response.text))
# print("=============================================")
if response.status_code == 200:
return self.getOrgan(json.loads(response.text))
return self.datafail
def getOrgan(self, datas):
response = self.session.post(self.baseurl + "/wareset/getImBaseLasWare", headers=self.headers,
data={"lasWareCode": datas['wareHouseNo']})
response.encoding = 'utf-8'
# print("getOrgan result:{}".format(response.text))
# print("=============================================")
if response.status_code == 200:
return self.loadMains(dict(datas, **(json.loads(response.text)[0])))
return self.datafail
def uploadOrders(self, datas):
try:
data = {"data": json.dumps(datas)}
# print("uploadOrders data={}".format(data))
requests.post(self.bjdomain + "/Api/Climborder/addorder", data=data)
except Exception as e:
print("addorder failed:", e)
return self.datafail
return self.datasuccess
def mergeData(self, result, orders):
if orders and "code" not in orders:
result += orders
return result
def loadMains(self, datas):
result = []
result = self.mergeData(result, self.loadPageOrders(datas, 0))
result = self.mergeData(result, self.loadPageOrders(datas, 1))
self.uploadOrders(result)
time.sleep(1)
result = []
result = self.mergeData(result, self.loadPageOrders(datas, 3))
time.sleep(1)
result = self.mergeData(result, self.loadPageOrders(datas, 4))
# print("loadMains result={}".format(result))
# print("=============================================")
return self.uploadOrders(result)
def ispop(self, serviceType):
return serviceType == 3 or serviceType == 4
def loadPopOrder(self, data, serviceType):
result = ""
for item in data:
result += item + "=" + data[item] + "&"
result = result[:-1]
# 修改路径
realpath = os.path.dirname(os.path.realpath(sys.argv[0]))
print("realpath>>>>", realpath)
cafile = os.path.join(realpath, "resource", 'pem', "certs.pem")
print("cert_loc cafile>>>", cafile)
conn = HTTP20Connection(host='opn.jd.com', port=443, ssl_context=init_context(cafile))
headers = self.headers.copy()
headers['Referer'] = "https://opn.jd.com/bill/search?billStatus=5"
headers['Host'] = "opn.jd.com"
headers['Origin'] = "https://opn.jd.com"
headers[':authority'] = 'opn.jd.com'
headers[':method'] = 'POST'
headers[':path'] = '/bill/query.json'
headers[':scheme'] = 'https'
response = conn.request(method='POST', url=self.popurl, body=result, headers=headers)
resp = conn.get_response(response)
if resp.status != 200:
print("请求{}失败,返回:{},请使用谷歌浏览器重新登录京东系统".format(response.url, response.text))
return self.dataverify
res = resp.read()
# print(res)
return list(self.parseOrders(json.loads(res), serviceType))
def loadPageOrders(self, datas, serviceType):
""" 抓取serviceType [0,1] 类型的所有单子 # 0为安维工单 1为售后工单 3为POP服务单 4为POP家具服务单"""
data = {
"sort": "returnTime" if not self.ispop(serviceType) else "billId", "order": "desc",
"sortKind": "4", "page": "1", "rows": "500", "reservationStatus": "", # 3 为未预约状态 空为所有状态
}
if self.ispop(serviceType):
data['isAppliance'] = '1' if serviceType == 3 else '0'
data['billStatuses'] = '5'
data['isEgBuy'] = '0'
data['outletsNo'] = str(datas['infoLink'])
return self.loadPopOrder(data, serviceType)
else:
data['serviceType'] = str(serviceType)
data['fastDealNum'] = '5' # 5为 待预约,7为待反馈 0为所有状态
data['esSwitch'] = '1'
data['subCompanyId'] = str(datas['orgNo'])
data['wareInfoId'] = str(datas['lasWareRelation'])
data['outletsId'] = str(datas['infoLink'])
result = ""
for item in data:
result += item + "=" + data[item] + "&"
result = result + "freeinstall=&startStatus=&endStatus=&timeout=&todayOtherReservationConditionName=&productBrand=&productType1=&productType2=&productType3=&orderId=&bizOrderId=&ordernoGroup=&customerName=&customerPhone=&serviceStreet=&wareId=&productName=&orderStatus=&orderStatusGroup=&createOrderTimeBegin=&createOrderTimeEnd=&reservationDateBegin=&reservationDateEnd=&firstReservationTimeBegin=&firstReservationTimeEnd=&changedReservationDateBegin=&changedReservationDateEnd=&feedbackStatus=&orderOrderStatus=&expectAtHomeDateBegin=&expectAtHomeDateEnd=&atHomeFinishDateBegin=&atHomeFinishDateEnd=&deliveryDateStart=&deliveryDateEnd=&homePageDistinguish=&fastDealNumByColor=&reportLessFlag=&superExperienceStore=&sourceOrderIdGroup=&sellerId=&sellerName=&eclpBusinessNo=&isFast="
# print("loadPageOrders requesturl=", result)
params = {}
datas = result.split("&")
for data in datas:
content = data.split("=")
if len(content) > 1:
params[content[0]] = content[1]
self.headers['X-Requested-With'] = 'XMLHttpRequest'
self.headers['Accept'] = 'application/json, text/javascript, */*; q=0.01'
self.headers['Referer'] = self.baseurl + '/receipt/receiptDashboardIndex?homePageDistinguish=notAppointed' \
'&serviceType=' + str(serviceType)
url = self.searchurl if not self.ispop(serviceType) else self.popurl
response = self.session.post(url, headers=self.headers, data=params)
response.encoding = 'utf-8'
# print(response.url)
# print(response.text)
# print(response.headers)
if response.status_code != 200 or "error" in response.url:
print("请求{}失败,返回:{},请使用谷歌浏览器重新登录京东系统".format(response.url, response.text))
return self.dataverify
return list(self.parseOrders(self.getjson(response), serviceType))
def parseOrders(self, datas, serviceType):
if 'total' not in datas:
return []
total_num = datas['total']
print("total count:{}".format(total_num))
for data in datas['rows']:
yield from self.parseOrder(data, serviceType)
def getordername(self, data, serviceType):
if self.ispop(serviceType) and 'businessType' in data and data['businessType']:
index = str(int(data['businessType']))
return businessTypes[index] if index in businessTypes else ''
elif not self.ispop(serviceType) and 'reservationServiceTypeName' in data:
return data['reservationServiceTypeName'] if data['reservationServiceTypeName'] else ''
def parseOrder(self, data, serviceType):
# reservationServiceTypeName :安装 createOrderTime:1588123851000
mobile = str(data['customerPhone']) if 'customerPhone' in data else ''
address = str(data['serviceStreet']) if 'serviceStreet' in data else data['customerAddress']
address = address.replace(",", "").replace(",", "") if address else ''
brand = re.sub(r'([^()]*)', '', data['productBrandName'])
createTimeKey = "createOrderTime" if 'createOrderTime' in data else "createTime"
orderid = "orderno" if not self.ispop(serviceType) else "billNo"
orderno = "_{}".format(data[orderid]) if orderid in data and data[orderid] else ''
ps = (" 安维单号:{}" if serviceType != 1 else " 售后单号:{}").format(data[orderid])
if 'expectAtHomeDate' in data:
repairtime = data['expectAtHomeDate']
elif 'reservationInstallTime' in data and data['reservationInstallTime']:
repairtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(data['reservationInstallTime'] / 1000))
else:
repairtime = ''
order_info = {
'factorynumber': (data['orderId'] if 'orderId' in data else data['orderid']) + orderno,
'ordername': self.getordername(data, serviceType),
'username': data['customerName'], 'mobile': mobile, 'originname': '京东系统',
'orderstatus': data['orderStatusName'] if 'orderStatusName' in data else statusTypes["5"],
'machinetype': data['productTypeName'] if 'productTypeName' in data else data['productCategoryName'],
'machinebrand': brand, 'version': data['productName'],
'sn': data['wareId'] if 'wareId' in data else data['productSku'],
'companyid': self.factoryid, 'adminid': self.adminid, 'address': address,
'province': data['serviceProvince'] if 'serviceProvince' in data else data['provinceName'],
'city': data['serviceCity'] if 'serviceCity' in data else data['cityName'],
'county': data['serviceCounty'] if 'serviceCounty' in data else data['districtName'],
'town': data['serviceDistrict'] if 'serviceDistrict' in data else data['streetName'],
'ordertime': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(data[createTimeKey] / 1000)),
'repairtime': repairtime,
'note': str(data['feedbackNote'] if 'feedbackNote' in data else data['saleFrom']) + str(
data['exceptionFeeApprovalStatusName'] if 'exceptionFeeApprovalStatusName' in data else ''),
'description': str(data['feedbackResult'] if 'feedbackResult' in data else data['reservationFailReason']) + ps,
'ordernoSecret': data['ordernoSecret'] if 'ordernoSecret' in data else data['businessNo']
}
order_info = JDUtil.clearAddress(order_info)
if not self.ispop(serviceType):
order_info = self.getUserInfo(order_info)
# print(order_info)
yield order_info
def parseUserMobile(self, data, url, referer):
header = self.headers.copy()
header['Referer'] = referer
response = self.session.get(url, headers=header)
# print("parseUserMobile response:{}".format(response.text))
if response.status_code != 200:
return data
bsObj = self.getsoup(response)
tr = bsObj.find("form", {"id": "searchForm"}).find("tbody").find("tr")
data['mobile'] = tr.find("input", {"name": "customerPhone"})["value"]
return data
def getUserInfo(self, data):
if not data or "ordernoSecret" not in data:
return data
userurl = self.baseurl + "/receipt/manage?orderno=" + data['ordernoSecret']
self.headers['Accept'] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng," \
"*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
response = self.session.get(userurl, headers=self.headers)
# print("getUserInfo response:{}".format(response.text))
if response.status_code != 200:
return data
bsObj = self.getsoup(response)
iframe = bsObj.find("iframe", {"id": "innerframe"})
if iframe:
url = self.baseurl + str(iframe['src'])
# parsed_url = urlparse(url)
# params = dict(parse.parse_qsl(parsed_url.query))
return self.parseUserMobile(data, url, userurl)
return data
if __name__ == '__main__':
util = JDUtil(adminid='24', factoryid='19')
# util = JDUtil(adminid='69046', factoryid='19')
print(util.loadMain())
|
{"/SuningUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/huadi_zb.py": ["/Util.py"], "/TCSMCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/CDKCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/GreeUtil.py": ["/Util.py"], "/MideaUtil.py": ["/BaseUtil.py"], "/BaseUtil.py": ["/Util.py", "/cookie_test.py"], "/master.py": ["/searchutil.py"], "/login.py": ["/CDKUtil.py"], "/MideaCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/test/http2.py": ["/BaseUtil.py"], "/MIUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/JDUtil.py": ["/BaseUtil.py"], "/cookie_test.py": ["/aesgcm.py"]}
|
3,905
|
chengyan1984/cdk-gui
|
refs/heads/master
|
/ChromeCookie.py
|
import os
import json
import base64
import win32crypt
from Crypto.Cipher import AES
import sqlite3
'''
[(0, 'creation_utc', 'INTEGER', 1, None, 0), (1, 'host_key', 'TEXT', 1, None, 0), (2, 'name', 'TEXT', 1, None, 0), (3, 'value', '
TEXT', 1, None, 0), (4, 'path', 'TEXT', 1, None, 0), (5, 'expires_utc', 'INTEGER', 1, None, 0), (6, 'is_secure', 'INTEGER', 1, No
ne, 0), (7, 'is_httponly', 'INTEGER', 1, None, 0), (8, 'last_access_utc', 'INTEGER', 1, None, 0), (9, 'has_expires', 'INTEGER', 1
, '1', 0), (10, 'is_persistent', 'INTEGER', 1, '1', 0), (11, 'priority', 'INTEGER', 1, '1', 0), (12, 'encrypted_value', 'BLOB', 0
, "''", 0), (13, 'samesite', 'INTEGER', 1, '-1', 0), (14, 'source_scheme', 'INTEGER', 1, '0', 0)]
'''
sql = """
SELECT
host_key, name, path,encrypted_value as value
FROM
cookies
"""
def get_decrypted_key():
path = r'%LocalAppData%\Google\Chrome\User Data\Local State'
path = os.path.expandvars(path)
with open(path, 'r', encoding='utf8') as file:
encrypted_key = json.loads(file.read())['os_crypt']['encrypted_key']
encrypted_key = base64.b64decode(encrypted_key) # Base64 decoding
encrypted_key = encrypted_key[5:] # Remove DPAPI
decrypted_key = win32crypt.CryptUnprotectData(encrypted_key, None, None, None, 0)[1] # Decrypt key
# print("decrypt",decrypted_key)
return decrypted_key
# get cookie
def get_chrome_cookie():
cookies_path = os.environ['HOMEPATH'] + r'\AppData\Local\Google\Chrome\User Data\Default\Cookies'
cookies_path = os.path.join(os.environ['LOCALAPPDATA'], os.environ['HOMEPATH'], cookies_path)
con = sqlite3.connect(cookies_path)
res = con.execute(sql).fetchall()
# names = con.execute('PRAGMA table_info([cookies])').fetchall()
# print(names)
con.close()
# print(res)
return res
def decrypt_chrome_cookie(decrypted_key, data):
# data = bytes.fromhex('763130...') # the encrypted cookie
if data[:3] == b'v10':
nonce = data[3:3 + 12]
ciphertext = data[3 + 12:-16]
tag = data[-16:]
cipher = AES.new(decrypted_key, AES.MODE_GCM, nonce=nonce)
# plaintext = cipher.decrypt_and_verify(ciphertext, tag) # the decrypted cookie
plaintext = cipher.decrypt(ciphertext)
# print(plaintext)
return plaintext
else:
# print('old cookie none decrypt')
return ""
def fetch_chrome_cookies(domain=''):
res = get_chrome_cookie()
list = []
for i in res:
if domain in i[0]:
item = {}
# print(type(i[3]),i[3])
data = i[3] # the encrypted cookie
key = get_decrypted_key()
plaintext = decrypt_chrome_cookie(key, data)
plaintext = str(plaintext, encoding="utf-8")
# print("host:", i[0], "name:", i[1], "path:", i[2], "value:", plaintext)
item["host"] = i[0]
item["name"] = i[1]
item["path"] = i[2]
item["value"] = plaintext
list.append(item)
return list
def fetch_chrome_cookie(domain=''):
cookie_list = fetch_chrome_cookies(domain)
cookieValue = ''
for item in cookie_list:
cookieValue += item['name'] + '=' + item['value'] + '; '
# print("fetch_chrome_cookie:" + cookieValue)
return cookieValue[:-1]
if __name__ == '__main__':
print(fetch_chrome_cookie('xiaomi.com'))
|
{"/SuningUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/huadi_zb.py": ["/Util.py"], "/TCSMCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/CDKCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/GreeUtil.py": ["/Util.py"], "/MideaUtil.py": ["/BaseUtil.py"], "/BaseUtil.py": ["/Util.py", "/cookie_test.py"], "/master.py": ["/searchutil.py"], "/login.py": ["/CDKUtil.py"], "/MideaCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/test/http2.py": ["/BaseUtil.py"], "/MIUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/JDUtil.py": ["/BaseUtil.py"], "/cookie_test.py": ["/aesgcm.py"]}
|
3,906
|
chengyan1984/cdk-gui
|
refs/heads/master
|
/asdfsd.py
|
# 载入必要的模块
import wx
import os
import pygame
from pygame.locals import *
import itertools
import random
# 创建类
class Example(wx.Frame):
def __init__(self, parent, title):
# 继承父类wx.Frame的初始化方法,并设置窗口大小为320*220
super(Example, self).__init__(parent, title=title, size=(320, 220))
self.InitUI()
self.Centre()
self.Show()
# 产生图片验证码的图像,保存在本地电脑
def generate_picture(self):
# pygame初始化
pygame.init()
# 设置字体和字号
font = pygame.font.SysFont('consolas', 64)
# 产生字母及数字列表,并重组,取其前四个作为图片验证码的文字
chr_num_lst = list(itertools.chain([chr(ord('A') + _) for _ in range(26)], \
[chr(ord('a') + _) for _ in range(26)], \
[str(_) for _ in range(10)]))
random.shuffle(chr_num_lst)
self.val_text = chr_num_lst[0] + chr_num_lst[1] + chr_num_lst[2] + chr_num_lst[3]
# 渲染图片,设置背景颜色和字体样式,前面的颜色是字体颜色
ftext = font.render(self.val_text, True, (0, 0, 255), (255, 0, 0))
# 保存图片
pygame.image.save(ftext, r"%s/val.png" % os.getcwd()) # 图片保存地址
def InitUI(self):
# 产生验证码图片
self.generate_picture()
# 利用wxpython的GridBagSizer()进行页面布局
panel = wx.Panel(self)
sizer = wx.GridBagSizer(10, 20) # 列间隔为10,行间隔为20
# 添加账号字段,并加入页面布局,为第一行,第一列
text = wx.StaticText(panel, label="账号")
sizer.Add(text, pos=(0, 0), flag=wx.ALL, border=5)
# 添加文本框字段,并加入页面布局,为第一行,第2,3列
self.tc = wx.TextCtrl(panel)
sizer.Add(self.tc, pos=(0, 1), span=(1, 2), flag=wx.EXPAND | wx.ALL, border=5)
# 添加密码字段,并加入页面布局,为第二行,第一列
text1 = wx.StaticText(panel, label="密码")
sizer.Add(text1, pos=(1, 0), flag=wx.ALL, border=5)
# 添加文本框字段,以星号掩盖,并加入页面布局,为第二行,第2,3列
tc1 = wx.TextCtrl(panel, style=wx.TE_PASSWORD)
sizer.Add(tc1, pos=(1, 1), span=(1, 2), flag=wx.EXPAND | wx.ALL, border=5)
# 添加验证码字段,并加入页面布局,为第三行,第一列
text2 = wx.StaticText(panel, label="验证码")
sizer.Add(text2, pos=(2, 0), flag=wx.ALL, border=5)
# 添加文本框字段,并加入页面布局,为第三行,第2列
self.tc2 = wx.TextCtrl(panel)
sizer.Add(self.tc2, pos=(2, 1), flag=wx.ALL, border=5)
# 添加验证码图片,并加入页面布局,为第三行,第3列
image = wx.Image(r'%s/val.png' % os.getcwd(),
wx.BITMAP_TYPE_PNG).Rescale(80, 25).ConvertToBitmap() # 获取图片,转化为Bitmap形式
self.bmp = wx.StaticBitmap(panel, -1, image) # 转化为wx.StaticBitmap()形式
sizer.Add(self.bmp, pos=(2, 2), flag=wx.ALL, border=5)
# 添加登录按钮,并加入页面布局,为第四行,第2列
btn = wx.Button(panel, -1, "登录")
sizer.Add(btn, pos=(3, 1), flag=wx.ALL, border=5)
# 为登录按钮绑定login_process事件
self.Bind(wx.EVT_BUTTON, self.login_process, btn)
# 将Panmel适应GridBagSizer()放置
panel.SetSizerAndFit(sizer)
# 事件处理
def login_process(self, event):
self.input_val = self.tc2.GetValue() # 获取验证码文本框的输入文字
# 判断验证码文本框的输入文字是否等于验证码图片上的文字(不计大小写),并弹出消息框
if self.input_val.lower() == self.val_text.lower():
wx.MessageBox("登录成功!\n欢迎您,%s!" % self.tc.GetValue(), '登录结果', wx.OK | wx.ICON_INFORMATION)
else:
wx.MessageBox("登录失败!请重试!", '登录结果', wx.OK | wx.ICON_INFORMATION)
self.tc2.SetValue("") # 将验证码文本框清空
self.generate_picture() # 重新产生一张验证码图片
# 获取新产生的验证码图片,转化为Bitmap形式
image = wx.Image(r'%s/val.png' % os.getcwd(), wx.BITMAP_TYPE_PNG).Rescale(80, 25).ConvertToBitmap()
# 更新GridBagSizer()的self.bmp
self.bmp.SetBitmap(wx.BitmapFromImage(image))
# 主函数
def main():
app = wx.App()
Example(None, title='图片验证GUI')
app.MainLoop()
main()
|
{"/SuningUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/huadi_zb.py": ["/Util.py"], "/TCSMCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/CDKCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/GreeUtil.py": ["/Util.py"], "/MideaUtil.py": ["/BaseUtil.py"], "/BaseUtil.py": ["/Util.py", "/cookie_test.py"], "/master.py": ["/searchutil.py"], "/login.py": ["/CDKUtil.py"], "/MideaCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/test/http2.py": ["/BaseUtil.py"], "/MIUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/JDUtil.py": ["/BaseUtil.py"], "/cookie_test.py": ["/aesgcm.py"]}
|
3,907
|
chengyan1984/cdk-gui
|
refs/heads/master
|
/cookie_test.py
|
import os
import sys
import sqlite3
import http.cookiejar as cookiejar
import json, base64
import requests
import aesgcm
sql = """
SELECT
host_key, name, path,encrypted_value as value
FROM
cookies
"""
def dpapi_decrypt(encrypted):
import ctypes
import ctypes.wintypes
class DATA_BLOB(ctypes.Structure):
_fields_ = [('cbData', ctypes.wintypes.DWORD),
('pbData', ctypes.POINTER(ctypes.c_char))]
p = ctypes.create_string_buffer(encrypted, len(encrypted))
blobin = DATA_BLOB(ctypes.sizeof(p), p)
blobout = DATA_BLOB()
retval = ctypes.windll.crypt32.CryptUnprotectData(
ctypes.byref(blobin), None, None, None, None, 0, ctypes.byref(blobout))
if not retval:
raise ctypes.WinError()
result = ctypes.string_at(blobout.pbData, blobout.cbData)
ctypes.windll.kernel32.LocalFree(blobout.pbData)
return result
def unix_decrypt(encrypted):
if not encrypted or len(encrypted) <= 3:
return None
print("unix_decrypt encrypted={}".format(encrypted))
if sys.platform.startswith('linux'):
password = 'peanuts'.encode('utf8')
iterations = 1
else:
raise NotImplementedError
from Crypto.Cipher import AES
from Crypto.Protocol.KDF import PBKDF2
salt = b'saltysalt'
iv = b' ' * 16
length = 16
key = PBKDF2(password, salt, length, iterations)
cipher = AES.new(key, AES.MODE_CBC, IV=iv)
decrypted = cipher.decrypt(encrypted[3:])
print("unix_decrypt decrypted={}".format(decrypted))
# return decrypted[:-ord(decrypted[-1])]
return decrypted[:-decrypted[-1]]
def get_key_from_local_state():
jsn = None
with open(os.path.join(os.environ['LOCALAPPDATA'], r"Google\Chrome\User Data\Local State"), encoding='utf-8',
mode="r") as f:
jsn = json.loads(str(f.readline()))
return jsn["os_crypt"]["encrypted_key"]
def aes_decrypt(encrypted_txt):
encoded_key = get_key_from_local_state()
encrypted_key = base64.b64decode(encoded_key.encode())
encrypted_key = encrypted_key[5:]
key = dpapi_decrypt(encrypted_key)
nonce = encrypted_txt[3:15]
cipher = aesgcm.get_cipher(key)
return aesgcm.decrypt(cipher, encrypted_txt[15:], nonce)
def chrome_decrypt(encrypted_txt):
if sys.platform == 'win32':
try:
if encrypted_txt[:4] == b'\x01\x00\x00\x00':
decrypted_txt = dpapi_decrypt(encrypted_txt)
return decrypted_txt.decode()
elif encrypted_txt[:3] == b'v10':
decrypted_txt = aes_decrypt(encrypted_txt)
return decrypted_txt[:-16].decode()
except WindowsError:
return None
else:
return unix_decrypt(encrypted_txt)
# try:
#
# except NotImplementedError:
# return None
def to_epoch(chrome_ts):
if chrome_ts:
return chrome_ts - 11644473600 * 000 * 1000
else:
return None
class ChromeCookieJar(cookiejar.FileCookieJar):
def __init__(self, filename=None, delayload=False, policy=None):
self.cookies = []
if filename is None:
if sys.platform == 'win32':
filename = os.path.join(
os.environ['USERPROFILE'],
r'AppData\Local\Google\Chrome\User Data\default\Cookies')
'''
AppData\\Local\\Google\\Chrome\\User Data\\Profile [n]\\Cookies
'''
elif sys.platform.startswith('linux'):
filename = os.path.expanduser(
'~/.config/google-chrome/Default/Cookies')
if not os.path.exists(filename):
filename = os.path.expanduser(
'~/.config/chromium/Default/Cookies')
if not os.path.exists(filename):
filename = None
cookiejar.FileCookieJar.__init__(self, filename, delayload, policy)
def _really_load(self, f, filename, ignore_discard, ignore_expires):
con = sqlite3.connect(filename)
con.row_factory = sqlite3.Row
con.create_function('decrypt', 1, chrome_decrypt)
con.create_function('to_epoch', 1, to_epoch)
cur = con.cursor()
cur.execute(sql)
for row in cur:
if row['value'] is not None:
name = row['name']
value = chrome_decrypt(row['value'])
host = row['host_key']
path = row['path']
cookie = {"name": name, "value": value, "host": host, "path": path}
self.cookies.append(cookie)
# print("host:" + str(host) + " path:" + str(path) + " name:" + str(name) + " value:" + str(value))
cur.close()
def isDesiredDomain(origin, dest, isExact=True):
if not isExact:
return dest in origin
else:
return origin == dest
def existInDomain(domain, cookie, isExact=True):
if isDesiredDomain(cookie['host'], domain['domain'], isExact):
if "fields" in domain and domain["fields"] and len(domain['fields']) > 0:
for field in domain['fields']:
if field == cookie['name']:
return True
else:
return True
if "filters" in domain and domain["filters"] and len(domain['filters']) > 0:
for filter_item in domain['filters']:
if filter_item == cookie['name']:
return False
return True
else:
return True
return False
def existInArray(domains, cookie, isExact=True):
if not domains:
return True
for domain in domains:
if existInDomain(domain, cookie, isExact):
return True
return False
def fetch_chrome_cookie(domains=[], isExact=True):
try:
jar = ChromeCookieJar()
jar.load()
cookieValue = ''
for item in jar.cookies:
if existInArray(domains, item, isExact):
cookieValue += item['name'] + '=' + item['value'] + '; '
return cookieValue[:-2]
except Exception as e:
print("fetch_chrome_cookie", e)
return ""
if __name__ == '__main__':
coo = fetch_chrome_cookie([{"domain": ".jd.com"}], False)
print(coo)
session = requests.Session()
cookie = coo
headers = {'Content-Type': 'application/x-www-form-urlencoded',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36',
'Host': 'jdfw.jd.com', 'Origin': 'http://jdfw.jd.com', 'Accept-Encoding': 'gzip, deflate',
'Cookie': cookie, 'Accept-Language': 'zh-CN,zh;q=0.9', 'Connection': 'keep-alive',
'Accept': 'application/json, text/javascript, */*; q=0.01', "X-Requested-With": "XMLHttpRequest",
'Referer': 'http://jdfw.jd.com/receipt/receiptDashboardIndex?homePageDistinguish=notAppointed&serviceType=0'}
data = {
"esSwitch": "1", "subCompanyId": "10", "wareInfoId": "lw_10_334%%603_2", "outletsId": "0755860394",
"sortKind": "4", "page": "1", "rows": "20", "sort": "returnTime", "order": "desc", "serviceType": "0",
"fastDealNum": "5"
}
result = ""
for item in data:
result += item + "=" + data[item] + "&"
result = result + "freeinstall=&startStatus=&endStatus=&timeout=&todayOtherReservationConditionName=&productBrand=&productType1=&productType2=&productType3=&orderId=&bizOrderId=&ordernoGroup=&customerName=&customerPhone=&serviceStreet=&wareId=&productName=&orderStatus=&orderStatusGroup=&createOrderTimeBegin=&createOrderTimeEnd=&reservationDateBegin=&reservationDateEnd=&firstReservationTimeBegin=&firstReservationTimeEnd=&changedReservationDateBegin=&changedReservationDateEnd=&feedbackStatus=&orderOrderStatus=&expectAtHomeDateBegin=&expectAtHomeDateEnd=&atHomeFinishDateBegin=&atHomeFinishDateEnd=&deliveryDateStart=&deliveryDateEnd=&homePageDistinguish=&fastDealNumByColor=&reservationStatus=&reportLessFlag=&superExperienceStore=&sourceOrderIdGroup=&sellerId=&sellerName=&eclpBusinessNo=&isFast="
print(result)
params = {}
datas = result.split("&")
for data in datas:
content = data.split("=")
if len(content) > 1:
params[content[0]] = content[1]
response = session.post("http://jdfw.jd.com/receipt/query.json", headers=headers, data=params)
print(response.text)
|
{"/SuningUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/huadi_zb.py": ["/Util.py"], "/TCSMCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/CDKCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/GreeUtil.py": ["/Util.py"], "/MideaUtil.py": ["/BaseUtil.py"], "/BaseUtil.py": ["/Util.py", "/cookie_test.py"], "/master.py": ["/searchutil.py"], "/login.py": ["/CDKUtil.py"], "/MideaCookieUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/test/http2.py": ["/BaseUtil.py"], "/MIUtil.py": ["/BaseUtil.py", "/cookie_test.py"], "/JDUtil.py": ["/BaseUtil.py"], "/cookie_test.py": ["/aesgcm.py"]}
|
3,910
|
anthony1110/web_scraper
|
refs/heads/master
|
/web_scrapper/mongo_client.py
|
from pymongo import MongoClient
MONGO_CLIENT = MongoClient('localhost', 27017)
NEWS_DB = MONGO_CLIENT['news']
NEWS_CONTENT_COLLECTION = NEWS_DB.news_content
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,911
|
anthony1110/web_scraper
|
refs/heads/master
|
/crawl_bot/crawl_bot/pipelines.py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from news_scrappy.models import NewsContent
from news_scrappy.utils import beautify_text, correct_bbc_article_link_to_full_path
class CrawlBotPipeline(object):
def process_item(self, item, spider):
article_text = beautify_text(item.get('summary')[0]) if len(item.get('summary')) > 0 else ''
article_headline = beautify_text(item.get('title')[0]) if len(item.get('title')) > 0 else ''
article_url = correct_bbc_article_link_to_full_path(item.get('link')[0]) if len(item.get('link')) > 0 else ''
article_tag = beautify_text(item.get('tag')[0]) if len(item.get('tag')) > 0 else ''
news = NewsContent(article_headline=article_headline,
article_text=article_text,
article_url=article_url,
article_tag=article_tag)
news.save()
return item
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,912
|
anthony1110/web_scraper
|
refs/heads/master
|
/news_scrappy/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from mongoengine import Document, StringField
class NewsContent(Document):
# visible field
article_text = StringField()
article_headline = StringField()
article_url = StringField()
article_tag = StringField()
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,913
|
anthony1110/web_scraper
|
refs/heads/master
|
/news_scrappy/urls.py
|
from django.conf.urls import include, url
from rest_framework import routers
from news_scrappy import views
from news_scrappy.api import api_views
urlpatterns = [
# Main page
url(r'^$', views.main_page),
# API URL
url(r'^api/query/$', api_views.NewsQuery.as_view(), name='news_query'),
]
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,914
|
anthony1110/web_scraper
|
refs/heads/master
|
/news_scrappy/utils.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
def beautify_text(text):
text = str(text).replace("\n", " ").strip()
return text
def correct_bbc_article_link_to_full_path(url):
url = str(url).strip()
if not url.startswith('http'):
url = 'http://www.bbc.com' + url
return url
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,915
|
anthony1110/web_scraper
|
refs/heads/master
|
/crawl_bot/crawl_bot/spiders/news_crawling.py
|
from scrapy.spiders import BaseSpider
class NewsSpider(BaseSpider):
name = "news_crawling"
start_urls = ['http://www.bbc.com/']
def parse(self, response):
for brick in response.css('div.media__content'):
TITLE_SELECTOR = 'h3 a ::text'
SUMMARY_SELECTOR = 'p ::text'
LINK_SELECTOR = '.media__title a ::attr(href)'
TAG_SELECTOR = '.media__tag ::text'
yield {
'title': brick.css(TITLE_SELECTOR).extract(),
'summary': brick.css(SUMMARY_SELECTOR).extract(),
'link': brick.css(LINK_SELECTOR).extract(),
'tag': brick.css(TAG_SELECTOR).extract(),
}
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,916
|
anthony1110/web_scraper
|
refs/heads/master
|
/web_scrapper/configurations/gunicorn.conf.py
|
bind = "127.0.0.1:29004"
logfile = "/data/projects/logs/web_scraper.gunicorn.log"
NUM_WORKERS=4
DJANGODIR='/data/projects/web_scraper'
timeout = 60
proc_name = "web_scraper"
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,917
|
anthony1110/web_scraper
|
refs/heads/master
|
/web_scrapper/settings_local.py
|
PRODUCTION = False
DEBUG = True
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,918
|
anthony1110/web_scraper
|
refs/heads/master
|
/news_scrappy/management/commands/mongo_api.py
|
import requests
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = 'Scrapy API in command.'
def add_arguments(self, parser):
parser.add_argument('--host', type=str, help="URL that need to be query.")
parser.add_argument('--query_any', type=str, help="keyword to query any word in for crawling info.")
parser.add_argument('--query_article_text', type=str, help="keyword to query article text")
parser.add_argument('--query_article_headline', type=str, help="keyword to query article headline")
parser.add_argument('--query_article_tag', type=str, help="keyword to query article tag.")
def handle(self, *args, **options):
default_url = options['host'] + "/news_scrappy/api/query/"
if 'query_any' in options and options['query_any']:
url = default_url + "?query_any=" + options['query_any']
elif 'query_article_text' in options and options['query_article_text']:
url = default_url + "?query_article_text=" + options['query_article_text']
elif 'query_article_headline' in options and options['query_article_headline']:
url = default_url + "?query_article_headline=" + options['query_article_headline']
elif 'query_article_tag' in options and options['query_article_tag']:
url = default_url + "?query_article_tag=" + options['query_article_tag']
response = requests.get(url)
self.stdout.write(self.style.SUCCESS('Query result = "%s"' % response.text))
self.stdout.write(self.style.SUCCESS('Query status code = "%s"' % response.status_code))
self.stdout.write(self.style.SUCCESS('Query number of results"%s"' % len(response.json())))
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,919
|
anthony1110/web_scraper
|
refs/heads/master
|
/news_scrappy/api/serializers.py
|
from rest_framework_mongoengine.serializers import DocumentSerializer
from news_scrappy.models import NewsContent
class NewsContentSerializer(DocumentSerializer):
class Meta:
model = NewsContent
fields = ('id', 'article_text', 'article_headline', 'article_url', 'article_tag')
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,920
|
anthony1110/web_scraper
|
refs/heads/master
|
/news_scrappy/api/api_views.py
|
import datetime
import pprint
import pymongo
from django.contrib.auth.models import User
from django.utils import log
from mongoengine import Q
from rest_framework import status
from rest_framework import viewsets
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.views import APIView
from rest_framework.response import Response
from news_scrappy.api.serializers import NewsContentSerializer
from news_scrappy.models import NewsContent
class NewsQuery(APIView):
queryset = NewsContent.objects.all()
serializer_class = NewsContentSerializer
def get(self, request, format=None):
get_dict = request.GET.copy()
query_any_value = get_dict.get('query_any')
query_article_text_value = get_dict.get('query_article_text')
query_article_headline_value = get_dict.get('query_article_headline')
query_article_tag_value = get_dict.get('query_article_tag')
query = Q()
if query_any_value:
query = Q(article_text__icontains=query_any_value) | Q(article_headline__icontains=query_any_value) | Q(article_tag__icontains=query_any_value)
elif query_article_text_value:
query = Q(article_text__icontains=query_article_text_value)
elif query_article_headline_value:
query = Q(article_headline__icontains=query_article_headline_value)
elif query_article_tag_value:
query = Q(article_tag__icontains=query_article_tag_value)
print query
news_obj = NewsContent.objects.filter(query)
serializer = NewsContentSerializer(news_obj, many=True)
# return JsonResponse(serializer.data, safe=False)
return Response(serializer.data)
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
3,921
|
anthony1110/web_scraper
|
refs/heads/master
|
/news_scrappy/views.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.http import JsonResponse
from django.shortcuts import render, render_to_response
# Create your views here.
from django.template import RequestContext
def main_page(request):
context = {}
return JsonResponse(context, safe=False)
|
{"/crawl_bot/crawl_bot/pipelines.py": ["/news_scrappy/models.py", "/news_scrappy/utils.py"], "/news_scrappy/api/serializers.py": ["/news_scrappy/models.py"]}
|
4,011
|
yongzhengqi/EMERITUS
|
refs/heads/master
|
/train.py
|
#!/usr/bin/env python3
import torch
import torch.utils.data as Data
from tensorboardX import SummaryWriter
from time import localtime, strftime
from dataset import DataProvider
from utils import *
from model import Net
if __name__ == '__main__':
print("basic settings:\ninput file name: {}\nwindow size: {}\ndimensionality: {}".format(config.input_filename,
config.window,
config.dim))
# initialize tensorboard
tb_log_dir = 'logs/' + strftime("%Y-%m-%d-%H:%M:%S", localtime())
tb_writer = SummaryWriter(tb_log_dir)
# initialize dataset
data_provider = DataProvider(config.input_filename)
data_loader = data_provider.get_training_set(config.dataset_size)
loader_itr = iter(data_loader)
# initialize model
net = Net(data_provider.get_voc_size(), config.dim)
net = net.cuda()
net_multi_gpu = nn.DataParallel(net)
gpu_num = torch.cuda.device_count()
# specifying optimizing method
criterion = nn.MSELoss()
optimizer = optim.Adam(net_multi_gpu.parameters())
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=config.lr_adj_pat, min_lr=config.lr_min)
# training
for cur_epoch in tqdm(range(config.max_epoch), desc='training on {} GPUs...'.format(gpu_num)):
try:
mini_batch = next(loader_itr)
except StopIteration:
loader_itr = iter(data_loader)
mini_batch = next(loader_itr)
batched_x, batched_y = mini_batch
batched_x, batched_y = batched_x.cuda(), batched_y.cuda()
optimizer.zero_grad()
output = net_multi_gpu(batched_x)
loss = criterion(output, batched_y)
loss.backward()
optimizer.step()
if (cur_epoch % config.tb_upd_gap) == 0:
loss_var = loss.data.cpu().numpy()
print('training loss: {}'.format(loss_var))
tb_writer.add_scalar('training loss', loss_var, cur_epoch)
if (cur_epoch % config.ckpt_save_gap) == 0:
print('saving check point...')
embed_vec = net_multi_gpu.module.fe.weight.detach().cpu().numpy()
save_model(data_provider.get_voc(), embed_vec, './results/{}-epoch.ckpt'.format(cur_epoch))
if (cur_epoch % config.latest_upd_gap) == 0:
print('updating latest model...')
embed_vec = net_multi_gpu.module.fe.weight.detach().cpu().numpy()
save_features(embed_vec, cur_epoch, tb_log_dir, data_provider.word2idx)
save_model(data_provider.get_voc(), embed_vec, config.latest_ckpt_dir)
cur_epoch += 1
scheduler.step(loss)
embed_vec = net_multi_gpu.module.fe.weight.detach().cpu().numpy()
save_model(data_provider.get_voc(), embed_vec, config.output_filename)
|
{"/train.py": ["/dataset.py", "/utils.py", "/model.py"], "/utils.py": ["/common.py"], "/tester.py": ["/common.py"], "/dataset.py": ["/common.py"]}
|
4,012
|
yongzhengqi/EMERITUS
|
refs/heads/master
|
/utils.py
|
import torch.nn as nn
import torch.optim as optim
from tqdm import tqdm
import numpy as np
from common import config
import json
def save_model(words, features, filename):
output_file = open(filename, 'w', encoding='utf-8')
for word, feature in zip(words, features):
output_file.write('{} '.format(word))
for num in feature:
output_file.write('{} '.format(num))
output_file.write('\n')
output_file.close()
print('result saved to {}'.format(filename))
def save_features(feats, cur_epoch, tb_log_dir, word2idx):
word_list_file_name = 'evaluation/ITC/word_list.txt'
word_list = []
for idx, line in enumerate(open(word_list_file_name, 'r')):
word_list.append(line.strip())
feat_lst = []
for idx, word in enumerate(word_list):
if word in word2idx.keys():
feat_lst.append(feats[word2idx[word]])
else:
print('word \'{}\' not found...'.format(word))
feat_lst.append(np.random.randint(low=-1, high=1, size=config.dim))
feat_lst = [feat.tolist() for feat in feat_lst]
check_point = {'feats': feat_lst, 'epoch': cur_epoch, 'log_dir': tb_log_dir}
json.dump(check_point, open(config.valida_ckpt_dir, 'w'))
|
{"/train.py": ["/dataset.py", "/utils.py", "/model.py"], "/utils.py": ["/common.py"], "/tester.py": ["/common.py"], "/dataset.py": ["/common.py"]}
|
4,013
|
yongzhengqi/EMERITUS
|
refs/heads/master
|
/common.py
|
class Config:
# the size of windows of skip-gram
window = 3
# the number of dimensions of features
dim = 300
# where you saved your corpus
input_filename = './data/quora_questions_gbk_fixed.txt'
# where you want to save the representation of your words
output_filename = './results/output.window={}.dim={}'.format(window, dim)
# if a word appears less than word_min_cnt times, it will be replaced
word_min_cnt = 30
# the max number of sentence used for training
# set to None if you want to ignore this limit
dataset_size = None
# batch size of SGD
batch_size = 2048
# parameter in Negative sampling
# see more at https://arxiv.org/abs/1301.3781
ng_pow = 0.75
# parameter in Negative sampling
# see more at https://arxiv.org/abs/1301.3781
ng_table_sz = 100000000
# parameter in Negative sampling
# see more at https://arxiv.org/abs/1301.3781
ng_k = 5
# if to lazy load the training set
saved_training_set = None # 'data/training_set.json'
# run how many mini-batches between two updates on tensorboard
tb_upd_gap = 500
# run how many mini-batches between updates on saved models
latest_upd_gap = 5000
# the gap between check points
ckpt_save_gap = 5000
# max mini-batch to train
max_epoch = 300000
# where to save check latest models
latest_ckpt_dir = './results/latest'
# where to save file for testing on validation set
valida_ckpt_dir = './results/latest.json'
# hyper-parameter on optimizing
# see more at https://pytorch.org/docs/stable/optim.html#torch.optim.lr_scheduler.ReduceLROnPlateau
lr_adj_pat = 1e4
# min learning rate
# see more at https://pytorch.org/docs/stable/optim.html#torch.optim.lr_scheduler.ReduceLROnPlateau
lr_min = 1e-5
config = Config()
|
{"/train.py": ["/dataset.py", "/utils.py", "/model.py"], "/utils.py": ["/common.py"], "/tester.py": ["/common.py"], "/dataset.py": ["/common.py"]}
|
4,014
|
yongzhengqi/EMERITUS
|
refs/heads/master
|
/tester.py
|
#!/usr/bin/env python3
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
word_num = 3000
import numpy as np
from common import config
from tensorboardX import SummaryWriter
from tqdm import tqdm
import json
import time
def read_word_list(fin):
word_list = []
word2idx = {}
for idx, line in enumerate(fin):
word_list.append(line.strip())
word2idx[line.strip()] = idx
fin.close()
return word_list, word2idx
def read_gold_standard(fin, word2idx):
gold_standard = []
for line in fin:
word1, word2, sim = line.strip().split()
if word1 not in word2idx or word2 not in word2idx:
continue
gold_standard.append((word2idx[word1], word2idx[word2], float(sim)))
fin.close()
return gold_standard
def eval_ITC(gold_standard, matrix):
rs = 0
my_similarity = []
for wid1, wid2, _ in gold_standard:
my_similarity.append(matrix[wid1][wid2])
n = len(my_similarity)
my_similarity_rank = {item[1]: item[0] for item in
enumerate(sorted(range(len(my_similarity)), key=lambda k: my_similarity[k]))}
gold_similarity_rank = sorted(enumerate(gold_standard), key=lambda x: x[1][2])
for rkg in range(len(gold_similarity_rank)):
pair_id = gold_similarity_rank[rkg][0]
rkm = my_similarity_rank[pair_id]
rs += (rkg - rkm) ** 2
rs = 1 - 6 * (rs) / n / (n * n - 1)
return rs
def get_norm(a):
return (a ** 2).sum() ** 0.5
def similarity(vec_a, vec_b):
vec_b, vec_a = np.array(vec_a), np.array(vec_b)
dot = np.dot(vec_a, vec_b)
cos_dis = dot / get_norm(vec_a) / get_norm(vec_b)
return cos_dis
def get_matrix(feat_lst):
dis_matrix = []
for i in tqdm(range(word_num), desc='creating distance matrix'):
dis_matrix.append([])
for j in range(word_num):
dis = similarity(feat_lst[i], feat_lst[j])
dis_matrix[-1].append(dis)
return dis_matrix
if __name__ == "__main__":
word_list_file_name = 'evaluation/ITC/word_list.txt'
gold_standard_file_name = 'evaluation/ITC/wordsim_quora'
word_list, word2idx = read_word_list(open(word_list_file_name))
gold_standard = read_gold_standard(open(gold_standard_file_name), word2idx)
last_epoch = -1
while last_epoch < config.max_epoch - 1:
data_pack = json.load(open(config.valida_ckpt_dir, 'r'))
feats = data_pack['feats']
epoch = data_pack['epoch']
log_dir = data_pack['log_dir']
tb_writer = SummaryWriter(log_dir)
if epoch == last_epoch:
print('latest model is the same, sleep for 30s')
time.sleep(30)
continue
avg_norm = np.array([get_norm(np.array(feat)) for feat in feats]).mean()
print('features\' average norm: {}'.format(avg_norm))
tb_writer.add_scalar('average norm', avg_norm, epoch)
last_epoch = epoch
print('evaluating epoch {}...'.format(epoch))
matrix = get_matrix(feats)
validation_var = eval_ITC(gold_standard, matrix)
tb_writer.add_scalar('validation score', validation_var, epoch)
print('evaluation done, score = {}'.format(validation_var))
|
{"/train.py": ["/dataset.py", "/utils.py", "/model.py"], "/utils.py": ["/common.py"], "/tester.py": ["/common.py"], "/dataset.py": ["/common.py"]}
|
4,015
|
yongzhengqi/EMERITUS
|
refs/heads/master
|
/dataset.py
|
from tqdm import tqdm
import torch
import math
import numpy as np
import json
import torch.utils.data as Data
from common import config
class DataProvider:
class NegativeTable:
def __init__(self, vocab, word2idx):
prob_sum = 0.0
for word, cnt in vocab.items():
if cnt >= config.word_min_cnt:
prob_sum += math.pow(cnt, config.ng_pow)
neg_table = []
for word, cnt in tqdm(vocab.items(), desc='Initializing Negative Table'):
if cnt >= config.word_min_cnt:
ins = math.pow(cnt, config.ng_pow) / prob_sum * config.ng_table_sz
id = word2idx[word]
for i in range(int(ins)):
neg_table.append(id)
while len(neg_table) < config.ng_table_sz:
neg_table.append(0)
self.neg_table = neg_table
def sample(self, x):
idxs = np.random.randint(low=0, high=config.ng_table_sz, size=x)
return [self.neg_table[idx] for idx in idxs]
def __init__(self, input_file):
self.input_file = input_file
text_file = open(input_file, 'r', encoding='utf-8')
self.input_file_sz = 0
vocab = {}
for idx, line in tqdm(enumerate(text_file), desc='Reading corpus'):
self.input_file_sz += 1
line_words = line.split()
for word in line_words:
if self.is_word(word):
if word not in vocab.keys():
vocab[word] = 0
vocab[word] = vocab[word] + 1
vocab_lst = []
for word, word_cnt in vocab.items():
if word_cnt >= config.word_min_cnt:
vocab_lst.append(word)
print('{} words valid'.format(len(vocab_lst)))
word2idx = {}
for idx, word in enumerate(vocab_lst):
word2idx[word] = idx
for word, word_cnt in vocab.items():
if word_cnt < config.word_min_cnt:
word2idx[word] = -1
self.word2idx = word2idx
self.vocab = vocab_lst
self.ntable = self.NegativeTable(vocab, word2idx)
def get_training_set(self, set_size):
training_set = []
if config.saved_training_set is not None:
print('loading saved training set: {}'.format(config.saved_training_set))
training_set = json.load(open(config.saved_training_set, 'r', encoding='utf-8'))
print('using saved training set: {}'.format(config.saved_training_set))
else:
text_file = open(self.input_file, 'r', encoding='utf-8')
for idx, line in tqdm(enumerate(text_file), desc='preparing dataset', total=self.input_file_sz):
line_words = line.split()
line_words = [self.word2idx[word] if self.is_word(word) else -1 for word in line_words]
for idx, word in enumerate(line_words):
anchor = line_words[idx]
if anchor >= 0:
negative_samples = self.ntable.sample(config.ng_k * config.window)
for negative_sample in negative_samples:
training_set.append([[anchor, negative_sample], 0])
beg = max(0, idx - config.window)
end = min(len(line_words) - 1, idx + config.window) + 1
for pos_idx in range(beg, end):
positive = line_words[pos_idx]
if pos_idx != idx and positive >= 0:
training_set.append([[anchor, positive], 1])
if set_size is not None and len(training_set) > set_size:
break
# json.dump(training_set, open('./data/training_set.json', 'w', encoding='utf-8'))
if set_size is not None:
training_set = training_set[:set_size]
print('{} pairs ready...'.format(len(training_set)))
x = torch.LongTensor([pair[0] for pair in training_set])
y = torch.Tensor([pair[1] for pair in training_set])
dataset_combined = torch.utils.data.TensorDataset(x, y)
gpu_num = torch.cuda.device_count()
dataset_dataloader = Data.DataLoader(
dataset=dataset_combined,
batch_size=config.batch_size, # * gpu_num,
shuffle=True,
num_workers=1,
)
print('DataLoader ready...')
return dataset_dataloader
def get_voc(self):
return self.vocab
def get_voc_size(self):
return len(self.vocab)
def is_word(self, _word):
for ch in _word:
if (ch < '0' or ch > '9') and (ch < 'a' or ch > 'z'):
return False
return True
|
{"/train.py": ["/dataset.py", "/utils.py", "/model.py"], "/utils.py": ["/common.py"], "/tester.py": ["/common.py"], "/dataset.py": ["/common.py"]}
|
4,016
|
yongzhengqi/EMERITUS
|
refs/heads/master
|
/model.py
|
import torch.nn as nn
import torch
import numpy as np
class Net(nn.Module):
def __init__(self, vocab_sz, dim):
super(Net, self).__init__()
self.fe = nn.Embedding(vocab_sz, dim)
self.sigmoid = nn.Sigmoid()
def forward(self, xs):
fe_a = self.fe(xs[:, 0])
fe_b = self.fe(xs[:, 1])
dis_dot = self.batch_dot(fe_a, fe_b).view(xs.size()[0])
dis_cos = dis_dot / self.norm(fe_a) / self.norm(fe_b)
dis = (self.sigmoid(dis_cos) + 1) / 2
return dis
def norm(self, a):
return (a ** 2).sum(dim=1) ** 0.5
def batch_dot(self, a, b):
batch_sz = a.size()[0]
dim = a.size()[1]
return torch.bmm(a.view(batch_sz, 1, dim), b.view(batch_sz, dim, 1)).view(batch_sz)
|
{"/train.py": ["/dataset.py", "/utils.py", "/model.py"], "/utils.py": ["/common.py"], "/tester.py": ["/common.py"], "/dataset.py": ["/common.py"]}
|
4,018
|
mauser7x63/fastAPI_socnet
|
refs/heads/main
|
/main.py
|
from typing import List
from fastapi import Depends, FastAPI, HTTPException, Security
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from sqlalchemy.orm import Session
import crud, models, schemas
from database import SessionLocal, engine
from auth import Auth
models.Base.metadata.create_all(bind=engine)
app = FastAPI()
security = HTTPBearer()
auth_handler = Auth()
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.get('/', response_model=List[schemas.Post])
def get_all_posts(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
all_posts = crud.get_posts(db, skip=skip, limit=limit)
return all_posts
@app.get('/post/{post_id}', response_model=schemas.Post)
def get_post(post_id: int, db: Session=Depends(get_db)):
db_post = crud.get_post(db, post_id)
return db_post
@app.post('/post/{post_id}/like', response_model=schemas.Post, dependencies=[Depends(auth_handler.decode_token)])
def like_post(post_id: int, db: Session=Depends(get_db)):
return crud.update_post(db, post_id, like=1)
@app.post('/post/{post_id}/dislike', response_model=schemas.Post)
def dislike_post(post_id: int, db: Session=Depends(get_db), token: str=Depends(auth_handler.decode_token)):
print('there is!')
return crud.update_post(db, post_id, like=-1)
@app.post('/users/', response_model=schemas.User)
def create_user(user: schemas.UserCreate, db: Session = Depends(get_db)):
db_user = crud.get_user_by_email(db, email=user.email)
if db_user:
raise HTTPException(status_code=400, detail="Email already registered.")
return crud.create_user(db=db, user=user)
@app.get('/users/', response_model=List[schemas.User])
def get_users(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
users = crud.get_users(db, skip=skip, limit=limit)
return users
@app.get('/users/{user_id}', response_model=schemas.User)
def get_user(user_id: int, db: Session = Depends(get_db)):
db_user = crud.get_user(db, user_id=user_id)
if not db_user:
raise HTTPException(status_code=404, detail="User not found.")
return db_user
@app.post('/newpost/', response_model=schemas.Post)
def create_user_post(content:schemas.PostCreate, db: Session = Depends(get_db), token: str=Depends(auth_handler.decode_token)):
return crud.create_user_post(db=db, user_id=content.user_id, content=content.content)
@app.get('/users/{user_id}/posts/', response_model=List[schemas.Post])
def get_user_posts(user_id: int, db: Session = Depends(get_db)):
posts = crud.get_posts_by_user(db, user_id=user_id)
if not posts:
raise HTTPException(status_code=404,
detail=f'There is no posts by {user_id}')
return posts
####auth futures#######
@app.post('/signup')
def signup(user_details: schemas.AuthModel, db: Session = Depends(get_db)):
if crud.get_user_by_email(db, email=user_details.username) != None:
return 'Account already exists'
try:
hashed_password = auth_handler.encode_password(user_details.password)
user = {'key': user_details.username, 'password': hashed_password}
return crud.create_user(db, user=user)
except:
error_msg = 'Failed to signup user'
return error_msg
@app.post('/login')
def login(user_details: schemas.AuthModel, db: Session = Depends(get_db)):
user = crud.get_user_by_email(db, email=user_details.username)
if (user is None):
return HTTPException(status_code=401, detail='Invalid username')
if (not auth_handler.verify_password(user_details.password, user.hashed_password)):
return HTTPException(status_code=401, detail='Invalid password')
access_token = auth_handler.encode_token(user.email)
refresh_token = auth_handler.encode_refresh_token(user.email)
return {'access_token': access_token, 'refresh_token': refresh_token}
@app.get('/refresh_token')
def refresh_token(credentials: HTTPAuthorizationCredentials = Security(security)):
refresh_token = credentials.credentials
new_token = auth_handler.refresh_token(refresh_token)
return {'access_token': new_token}
###################################################################
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=9000)
|
{"/main.py": ["/crud.py", "/models.py", "/schemas.py", "/auth.py"], "/crud.py": ["/models.py", "/schemas.py"]}
|
4,019
|
mauser7x63/fastAPI_socnet
|
refs/heads/main
|
/bot.py
|
import requests
import json
from faker import Faker
from requests.api import post
fake = Faker()
Faker.seed(3)
endpoint = 'http://127.0.0.1:8000/'
headers = {
'accept': 'application/json',
'Content-Type': 'application/json'
}
def loadFromJSNON(fileName):
'''
загрузить из JSON. Принимает строку с именем файла
'''
with open(fileName) as f:
fileStuff = f.read()
loadedStructure = json.loads(fileStuff)
return loadedStructure
def saveToJSON(fileName, data):
'''
сохранить структуру в JSON-формате. Принимает строку с именем файла и структуру для сохранения
'''
with open(fileName, 'w') as outfile:
json.dump(data, outfile, indent=4, sort_keys=True)
return
def loadBots():
bots = loadFromJSNON('fastAPI_socnet/bots.json')
if rules['number_of_users']>len(bots):
for user in range(0, rules['number_of_users']-len(bots)):
bot = Bot(name=fake.first_name_nonbinary(), passwd=fake.pystr())
print(f'user: {bot.username} with passwd: {bot.password}')
bots.update({
bot.username: bot.password
})
saveToJSON(fileName='fastAPI_socnet/bots.json', data=bots)
print(len(bots), ' bots has been saved')
return bots
def prepearBots(botsDict):
for user in botsDict:
user = Bot(name=user, passwd=botsDict[user])
print(user.username)
class Bot():
def __init__(self, name, passwd):
self.username = name
self.password = passwd
self.token = self.login()
def signUp(self):
url = endpoint+'signup'
body = {
'username': self.username,
'password': self.password
}
res = requests.post(url=url, headers=headers, json=body)
if res.status_code == 200:
print(f'request to {url} is ok')
return res.json()
else:
print('something goes wrong, error: ', res.status_code)
return res.status_code
def login(self):
url = endpoint+'login'
body = {
'username': self.username,
'password': self.password
}
res = requests.post(url=url, headers=headers, json=body)
if res.status_code == 200:
print(f'request to {url} is ok')
token = res.json().get('access_token')
if not token:
print('login failed. try to signup')
self.signUp()
token = self.login()
return token
else:
print('something goes wrong, error: ', res.status_code)
return res.status_code
def createPost(self, user_id, text):
url = f'{endpoint}newpost/?token={self.token}'
print(url)
body = {
"content": text,
"user_id": user_id,
"token": self.token
}
res = requests.post(url=url, headers=headers, json=body)
if res.status_code == 200:
return res.json()
else:
print("failed to post", res.status_code)
return res.status_code
def ratePost(self, post_id, like=True):
if like:
rate = 'like'
else:
rate = 'dislike'
url = f'{endpoint}post/{post_id}/{rate}?token={self.token}'
print('request to URL:', url)
res = requests.post(url=url, headers=headers, data={})
if res.status_code == 200:
pprint(res.json())
return {'message': f'post id={post_id} was liked'}
else:
print('something goes wrong, error: ', res.status_code)
return res.status_code
if __name__=="__main__":
from pprint import pprint
print('bot standalone runned')
rules = loadFromJSNON('fastAPI_socnet/bots_config.json')
pprint(rules)
users = loadBots()
prepearBots(users)
|
{"/main.py": ["/crud.py", "/models.py", "/schemas.py", "/auth.py"], "/crud.py": ["/models.py", "/schemas.py"]}
|
4,020
|
mauser7x63/fastAPI_socnet
|
refs/heads/main
|
/crud.py
|
from sqlalchemy.orm import Session
from models import User, Post
import schemas
def get_user(db: Session, user_id: int):
return db.query(User).filter(User.id == user_id).first()
def get_post(db: Session, post_id: int):
return db.query(Post).filter(Post.id == post_id).first()
def get_user_by_email(db: Session, email: str):
return db.query(User).filter(User.email == email).first()
def get_users(db: Session, skip: int = 0, limit: int = 100):
return db.query(User).offset(skip).limit(limit).all()
def create_user(db: Session, user: schemas.UserCreate):
db_user = User(email=user['key'], hashed_password=user['password'])
print('ok')
db.add(db_user)
db.commit()
db.refresh(db_user)
return db_user
def get_posts(db: Session, skip: int = 0, limit: int = 100):
return db.query(Post).offset(skip).limit(limit).all()
def create_user_post(db: Session, user_id: int, content: str):
db_post = Post(author_id=user_id, content=content)
db.add(db_post)
db.commit()
db.refresh(db_post)
return db_post
def get_posts_by_user(db: Session, user_id: int):
print('*'*50)
print(db.query(Post).filter(Post.author_id == user_id).all())
return db.query(Post).filter(Post.author_id == user_id).all()
def update_post(db:Session, post_id:int, like):
db_post = db.query(Post).get(post_id)
print(db_post)
db_post.likes+=like
print(db_post.likes)
db.commit()
db.refresh(db_post)
return db_post
|
{"/main.py": ["/crud.py", "/models.py", "/schemas.py", "/auth.py"], "/crud.py": ["/models.py", "/schemas.py"]}
|
4,021
|
mauser7x63/fastAPI_socnet
|
refs/heads/main
|
/models.py
|
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String
#from sqlalchemy.orm import relationship
from database import Base
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True, index=True)
email = Column(String, unique=True, index=True)
hashed_password = Column(String)
is_active = Column(Boolean, default=True)
class Post(Base):
__tablename__ = 'Post'
id = Column(Integer, primary_key=True, index=True)
content = Column(String)
likes = Column(Integer, default=0)
dislikes = Column(Integer, default=0)
author_id = Column(Integer, ForeignKey('user.id'))
|
{"/main.py": ["/crud.py", "/models.py", "/schemas.py", "/auth.py"], "/crud.py": ["/models.py", "/schemas.py"]}
|
4,022
|
mauser7x63/fastAPI_socnet
|
refs/heads/main
|
/schemas.py
|
from typing import List, Optional
from pydantic import BaseModel
class Post(BaseModel):
id: int
content: str
author_id: int
likes: int = 0
dislikes: int = 0
class Config:
orm_mode = True
class PostCreate(BaseModel):
content: str
user_id: int
token: str
class UserBase(BaseModel):
email: str
class UserCreate(UserBase):
password: str
class User(UserBase):
id: int
is_active: bool
class Config:
orm_mode = True
class AuthModel(BaseModel):
username: str
password: str
|
{"/main.py": ["/crud.py", "/models.py", "/schemas.py", "/auth.py"], "/crud.py": ["/models.py", "/schemas.py"]}
|
4,023
|
mauser7x63/fastAPI_socnet
|
refs/heads/main
|
/auth.py
|
import os
import jwt
from fastapi import HTTPException
from passlib.context import CryptContext
from datetime import datetime, timedelta
class Auth():
hasher = CryptContext(schemes=['bcrypt'])
#secret = os.getenv("APP_SECRET_STRING")
secret = "1b0b1cd761525c45be721743ce1a0cf9b3d053e04f7976ffdc4ff8e2e3279634"
def encode_password(self, password):
return self.hasher.hash(password)
def verify_password(self, password, encoded_password):
return self.hasher.verify(password, encoded_password)
def encode_token(self, username):
payload = {
'exp' : datetime.utcnow() + timedelta(days=0, minutes=30),
'iat' : datetime.utcnow(),
'scope' : 'access_token',
'sub' : username
}
return jwt.encode(
payload,
self.secret,
algorithm = 'HS256'
)
def decode_token(self, token):
try:
payload = jwt.decode(token, self.secret, algorithms=['HS256'])
if (payload['scope'] == 'access_token'):
return payload['sub']
raise HTTPException(status_code=401, detail = 'Scope for the token is invalid')
except jwt.ExpiredSignatureError:
raise HTTPException(status_code=401, detail = 'Token expired')
except jwt.InvalidTokenError:
raise HTTPException(status_code=401, detail='Token expired')
def encode_refresh_token(self, username):
payload = {
'exp' : datetime.utcnow() + timedelta(days=0, minutes=30),
'iat' : datetime.utcnow(),
'scope' : 'refrsh_token',
'sub' : username
}
print(payload)
return jwt.encode(
payload,
self.secret,
algorithm = 'HS256'
)
def refresh_token(self, refresh_token):
try:
payload = jwt.decode(refresh_token, self.secret, algorithms=['HS256'])
if (payload['scope'] == 'refrsh_token'):
username = payload['sub']
new_token = self.encode_token(username)
return new_token
raise HTTPException(status_code=401, detail='Invalid scope for token')
except jwt.ExpiredSignatureError:
raise HTTPException(status_code=401, detail='Refresh token expired')
except jwt.InvalidTokenError:
raise HTTPException(status_code=401, detail='Invalid refresh token')
|
{"/main.py": ["/crud.py", "/models.py", "/schemas.py", "/auth.py"], "/crud.py": ["/models.py", "/schemas.py"]}
|
4,032
|
murfreesboro/dftints
|
refs/heads/master
|
/generateBasis.py
|
"""
This module is used to generate the DFT basis set value etc.
up to the fourth derivatives
"""
__author__ = "Fenglai Liu"
import sys
import os
import infor
import shell
import basis
import codeprint
import shellsymbol
import derivorder
import derivparser
def generateCode(order):
"""
print out the code
"""
# get the file name
if order == 1:
funName = "dftbasisderiv1"
elif order == 2:
funName = "dftbasisderiv2"
elif order == 3:
funName = "dftbasisderiv3"
elif order == 4:
funName = "dftbasisderiv4"
else:
print "Improper order in the generateCode of generateBasis.py"
sys.exit()
inf = funName + ".cpp"
# now we open the file
f = open(inf, "w")
codeprint.initilizeIndent()
# the comment part for the file
f.write("/**\n")
line = " * This function is used to generate "+str(order)+" derivatives for basis set "
codeprint.printLine(line,f)
line = " * The basis set derivatives are evaluated for the given shell which "
codeprint.printLine(line,f)
line = " * is characterized by the L(no composite shell!). Generally, by given the "
codeprint.printLine(line,f)
line = " * derivative order (for exmaple, X, Y Z or XX, YY or XYY etc.)"
codeprint.printLine(line,f)
line = " * for an arbitrary shell we could combine the radial part and "
codeprint.printLine(line,f)
line = " * the angular part together so to form the result."
codeprint.printLine(line,f)
line = " * The result is arranged as: (nBas, ng, nDerivOrder)"
codeprint.printLine(line,f)
line = " * nBas is the number of Cartesian type basis set for shell with L"
codeprint.printLine(line,f)
line = " * \\param ng number of grid points "
codeprint.printLine(line,f)
line = " * \\param L angular momentum of the shell "
codeprint.printLine(line,f)
line = " * \\param nTolCarBas number of Cartesian basis set in the ang array "
codeprint.printLine(line,f)
line = " * \\param ang angular part of the basis set values(nTolCarBas,ng) "
codeprint.printLine(line,f)
line = " * \\param rad radial part of the basis set values "
codeprint.printLine(line,f)
line = " * \\return basis derivatives of basis set values for the given order"
codeprint.printLine(line,f)
line = " * \\author Fenglai Liu and Jing Kong "
codeprint.printLine(line,f)
f.write(" */\n")
# including head files
line = "#include\"libgen.h\""
codeprint.printLine(line,f)
line = "#include\"batchbasis.h\""
codeprint.printLine(line,f)
line = "using namespace batchbasis;"
codeprint.printLine(line,f)
f.write("\n")
# print out the function name
line = "void " + "BatchBasis::" + funName + '''(const UInt& ng, const UInt& L, const UInt& nTolCarBas, const Double* ang, const Double* rad, Double* basis) const '''
codeprint.printLine(line,f)
# here we enter in real code
line = "{"
codeprint.printLine(line,f)
codeprint.increaseIndentation()
f.write("\n")
# set up the nBas
line = "// now we set up the nBas for the computation"
codeprint.printLine(line,f)
line = "UInt nBas = (L+1)*(L+2)/2;"
codeprint.printLine(line,f)
f.write("\n")
# now we create the derivatives order
orderList = derivorder.derivOrderGeneration(order)
for derivOrder in orderList:
# comment
line = "// now we do derivatives for the given basis set to " + derivOrder
codeprint.printLine(line,f)
indexDerivOrder = orderList.index(derivOrder)
if indexDerivOrder > 0 :
line = "basis = basis + " + "ng*nBas; "
codeprint.printLine(line,f)
f.write("\n")
# within the loop, actually we choose doing code from S to I
maxL = infor.getMaxL()
for L in range(maxL+1):
# print out the block
if L == 0:
line = "if(L == " + str(L) + ") {"
else:
line = "} else if(L == " + str(L) + ") {"
codeprint.printLine(line,f)
codeprint.increaseIndentation()
f.write("\n")
# now it's the real work module
line = "for(UInt ip = 0; ip<ng; ip++) {"
codeprint.printLine(line,f)
codeprint.increaseIndentation()
line = "Double* bas = &basis[ip*nBas];"
codeprint.printLine(line,f)
line = "const Double* angArray = &ang[ip*nTolCarBas];"
codeprint.printLine(line,f)
s = shell.shell(L)
basList = s.getBasis()
for bas in basList:
formula = {"0":bas}
result = { }
derivparser.getDerivExpression(formula, derivOrder, 0, result)
ind = basList.index(bas)
derivparser.printExpression(result,derivOrder,ind,f)
# block end for ip
codeprint.decreaseIndentation()
line = "}"
codeprint.printLine(line,f)
codeprint.decreaseIndentation()
f.write("\n")
# block end with the L
line = "}"
codeprint.printLine(line,f)
f.write("\n\n")
# end of function block
codeprint.decreaseIndentation()
line = "}"
codeprint.printLine(line,f)
f.write("\n\n")
# end of whole file
f.close()
|
{"/derivparser.py": ["/shell.py", "/basis.py", "/codeprint.py"]}
|
4,033
|
murfreesboro/dftints
|
refs/heads/master
|
/shell.py
|
"""
This module is used describe the class related to the "Shell".
Shell actually is a group of basis set functions in the quantum chemistry,
all of these basis set functions share the same L, namely:
L = l+m+n
For example, Shell of L=1 has theree basis set functions, namely
Px 1,0,0
Py 0,1,0
Pz 0,0,1
"""
__author__ = "Fenglai Liu"
import sys
import os
import basis
import shellsymbol
import infor
class shell:
def __init__(self,L0):
"""
constructor for the shell class
L0 is the shell's angular momentum type
In the initilization, we also generate all of basis set functions
"""
self.L = L0
# check the L, it should not be less than zero
if L0 < 0:
print "L can not be less than zero in shell class\n"
sys.exit()
def __eq__(self,t):
if self.L == t.L:
return True
else:
return False
def __ne__(self,t):
if self.L != t.L:
return True
else:
return False
def getL(self):
"""
return the L
"""
return self.L
def getBasis(self):
"""
return the full basis set list
"""
# get the basis set order
order = self.generateBasisSetOrders()
# generate the basis set functions for this shell
# each basis set function is characterized by three numbers
l = len(order)/3
basisSets = [ ]
i = 0
while i < l:
basisSet = basis.basis(order[3*i],order[3*i+1],order[3*i+2])
basisSets.append(basisSet)
i = i + 1
return basisSets
def generateBasisSetOrders(self):
"""
generating the basis set's ordering
"""
orderList = []
L = self.L
i = 0
basisSetOrder = infor.getBasisSetOrder()
if basisSetOrder == "libint":
while i <= L:
nx = L - i
j = 0
while j<=i:
ny = i-j
nz = j
orderList.append(nx)
orderList.append(ny)
orderList.append(nz)
j = j + 1
i = i + 1
else:
print "Unrecognized basis set ordering to generate basis sets\n"
sys.exit()
return orderList
def hasBasisSet(self,bas):
"""
testing that whether we have the basis set in the
given shell
"""
bL = bas.getL()
if bL == self.L:
return True
else:
return False
def getName(self):
"""
give the name for this shell
"""
name = shellsymbol.getShellSymbol(self.L)
return name
|
{"/derivparser.py": ["/shell.py", "/basis.py", "/codeprint.py"]}
|
4,034
|
murfreesboro/dftints
|
refs/heads/master
|
/main.py
|
"""
main module for dft basis sets
"""
__author__ = "Fenglai Liu"
import sys
import os
import infor
import generateAngBasis
import generateBasis
import derivorder
# setting the basis set order
maxLChoice = 6
if len(sys.argv) == 2:
maxLChoice = int(sys.argv[1])
elif len(sys.argv) > 2:
print "Wrong argv list! We only support zero/one arguments! Please check it!\n"
sys.exit()
infor.setBasisSetOrder()
infor.setMaxL(maxLChoice)
# print out the angular part of code
generateAngBasis.generateCode()
# print out the basis set code
for i in range(4):
i = i + 1
generateBasis.generateCode(i)
# finally, we try to print out the derivatives information
# used in the program
count = 1
for i in range(4):
i = i + 1
dlist = derivorder.derivOrderGeneration(i)
for var in dlist:
v = "DERIV_" + var
line = "UInt " + v + " = " + str(count) + ";"
print line
count = count + 1
|
{"/derivparser.py": ["/shell.py", "/basis.py", "/codeprint.py"]}
|
4,035
|
murfreesboro/dftints
|
refs/heads/master
|
/derivorder.py
|
"""
This module is used to generate the derivatives sequence for the program
The derivatives sequence is like this, if we want to do 3rd order derivatives;
for example; do derivatives with respect to X, Y and Z then the derivative
var is written into "XYZ". For each order derivatives, we will set up
the sequence and then parse it later
"""
__author__ = "Fenglai Liu"
import sys
import os
def derivOrderGeneration(order):
"""
generating the derivatives sequence:
XX, YY etc. for derivatives order 2
XXX, XYY, ZZZ etc. for derivatives order 3
"""
axis = ( "X", "Y", "Z" )
result = [ ]
if order == 1:
result = axis
elif order == 2:
for i in axis:
for j in axis:
if axis.index(j) > axis.index(i):
continue
var = j + i
result.append(var)
elif order == 3:
for i in axis:
for j in axis:
for k in axis:
if axis.index(j) > axis.index(i):
continue
if axis.index(k) > axis.index(j):
continue
var = k + j + i
result.append(var)
elif order == 4:
for i in axis:
for j in axis:
for k in axis:
for l in axis:
if axis.index(j) > axis.index(i):
continue
if axis.index(k) > axis.index(j):
continue
if axis.index(l) > axis.index(k):
continue
var = l + k + j + i
result.append(var)
else:
print "Improper order in the derivOrderGeneration\n"
sys.exit()
# return
return result
def parseDeriv(var):
"""
for each given var, which is in format of XX, YY, XYZ etc.
we need to parse it to figure out that how many X, how many
Y and how many Z it has
"""
nx = 0
ny = 0
nz = 0
for i in range(len(var)):
if var[i] == "X":
nx = nx + 1
elif var[i] == "Y":
ny = ny + 1
elif var[i] == "Z":
nz = nz + 1
else:
print "Illegal character got in parseDeriv. Could be only X, Y or Z"
sys.exit()
return (nx, ny, nz)
|
{"/derivparser.py": ["/shell.py", "/basis.py", "/codeprint.py"]}
|
4,036
|
murfreesboro/dftints
|
refs/heads/master
|
/codeprint.py
|
"""
This module is used to provide functions to print the real codes
"""
__author__ = "Fenglai Liu"
import sys
import os
# the indentLength indicates current indent length in the code
# in default, each time we increase 3
indentLength = 0
def increaseIndentation():
"""
increase the indent for 3
"""
global indentLength
indentLength = indentLength + 3
def decreaseIndentation():
"""
decrease the indent for 3
"""
global indentLength
indentLength = indentLength - 3
if indentLength < 0:
print "Illegal indentLength in printcode.py\n"
sys.exit()
def printLine(line,f):
"""
print out the given line of code
"""
global indentLength
if indentLength != 0:
for i in range(indentLength):
f.write(" ")
f.write(line)
f.write("\n")
def initilizeIndent():
global indentLength
indentLength = 0
|
{"/derivparser.py": ["/shell.py", "/basis.py", "/codeprint.py"]}
|
4,037
|
murfreesboro/dftints
|
refs/heads/master
|
/shellsymbol.py
|
"""
This module stores the shell symbols
"""
__author__ = "Fenglai Liu"
import sys
import os
# the shell name list is taken from libint package
SHELL_NAME_LIST = [
'S', 'P', 'D', 'F', 'G', 'H', 'I', 'K', 'L', 'M', 'N',
'O', 'Q', 'R', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']
def getShellSymbol(i):
global SHELL_NAME_LIST
l = len(SHELL_NAME_LIST)
if i>=l:
print "Why you need to claim such high order shells, L>20?"
print "however, we still do it, but be careful with your code"
return "L" + str(i)
else:
return SHELL_NAME_LIST[i]
|
{"/derivparser.py": ["/shell.py", "/basis.py", "/codeprint.py"]}
|
4,038
|
murfreesboro/dftints
|
refs/heads/master
|
/basis.py
|
"""
This module is used describe the class related to the "basis set functions".
Originally, each basis set function is a combination of Gaussian primitive
functions:
psi = sum_{mu}d_{mu}chi_{mu}
psi is the basis set function, and chi_{mu} is the primitive functions.
All of chi are on the same center as psi, and d_{mu} is some fixed
coefficients. All of Gaussian primitive functions share the same angular
momentum with the basis set.
For each Gaussian primitive function, it has the form that:
chi = x^{l}y^{m}z^{n}e^{-alpha*r^{2}}
x^{l}y^{m}z^{n} is its angular momentum part, which is characterized by
three number of l, m, and n. The e^{-alpha*r^{2}} is its radial part,
so l,m,n combined with alpha and its prefactor of d_{mu}, then we know
all of information to get psi.
"""
__author__ = "Fenglai Liu"
import sys
import os
import shellsymbol
class basis:
def __init__(self,l0,m0,n0):
"""
Basis class is characterized by three numbers
They are corresponding to the angular momentum numbers
"""
self.l = l0
self.m = m0
self.n = n0
# test the angular momentum number
if l0<0 or m0<0 or n0<0:
print "Illegal angular momentum number in basis.py. It should not be less than zero\n"
print l0,m0,n0
sys.exit()
def __eq__(self,t):
"""
testing whether two basis sets are equal with each other
"""
l0,m0,n0 = self.getlmn()
l1,m1,n1 = t.getlmn()
if l0 == l1 and m0 == m1 and n0 == n1:
return True
else:
return False
def __ne__(self,t):
"""
testing whether two basis sets are equal with each other
"""
l0,m0,n0 = self.getlmn()
l1,m1,n1 = t.getlmn()
if l0 == l1 and m0 == m1 and n0 == n1:
return False
else:
return True
def getName(self):
"""
depending on the l,m,n; we get the name for this basis set
"""
L = self.l + self.m + self.n
name = shellsymbol.getShellSymbol(L)
if self.l > 0:
if self.l == 1:
name = name + "x"
else:
name = name + str(self.l) + "x"
if self.m > 0:
if self.m == 1:
name = name + "y"
else:
name = name + str(self.m) + "y"
if self.n > 0:
if self.n == 1:
name = name + "z"
else:
name = name + str(self.n) + "z"
return name
def getlmn(self):
"""
l,m,n is given
"""
return (self.l, self.m, self.n)
def getComponent(self,axis):
"""
for the given axis (X, Y or Z) we return the component
"""
if axis == "X":
return self.l
elif axis == "Y":
return self.m
elif axis == "Z":
return self.n
else:
print "Wrong axis passed in the getComponent"
sys.exit()
def getL(self):
"""
return the total angular momentum number of L
"""
L = self.l + self.m + self.n
return L
def loweringAng(self,axis):
"""
for the given axis (X, Y or Z) we determine
which component to lowering
"""
if axis == "X":
l1 = self.l - 1
m1 = self.m
n1 = self.n
if l1 < 0:
return (None,None,None)
else:
return (l1,m1,n1)
elif axis == "Y":
l1 = self.l
m1 = self.m - 1
n1 = self.n
if m1 < 0:
return (None,None,None)
else:
return (l1,m1,n1)
elif axis == "Z":
l1 = self.l
m1 = self.m
n1 = self.n - 1
if n1 < 0:
return (None,None,None)
else:
return (l1,m1,n1)
else:
print "Wrong axis passed in the loweringAng"
sys.exit()
def raisingAng(self,axis):
"""
for the given axis (X, Y or Z) we determine
which component to raising up
"""
if axis == "X":
l1 = self.l + 1
m1 = self.m
n1 = self.n
elif axis == "Y":
l1 = self.l
m1 = self.m + 1
n1 = self.n
elif axis == "Z":
l1 = self.l
m1 = self.m
n1 = self.n + 1
else:
print "Wrong axis passed in the raisingAng"
sys.exit()
return (l1,m1,n1)
|
{"/derivparser.py": ["/shell.py", "/basis.py", "/codeprint.py"]}
|
4,039
|
murfreesboro/dftints
|
refs/heads/master
|
/derivparser.py
|
"""
This module is used to generate the parser for the given basis set with
respect to the given derivatives
"""
__author__ = "Fenglai Liu"
import sys
import os
import shell
import basis
import codeprint
def getDerivExpression(formula, deriv, order, final_result):
"""
for the given formula, as well as the derivative var(something like
XXXX, XXYZ etc. generated in derivorder.py). We can get the derivatives
expression for the current formula. We note that this process continues
in recursively way until all of the derivatives are processed. If the
final order is arrived, we will push the result into the final_result
"""
result = { }
axis = deriv[order]
nunderscore = 1
for k, bas in formula.iteritems():
# to get rid of the "-" sign first
k = k.replace("-","")
# get the first term in the derivative expression
# the first term is "(l,m,n)*chi(l,m,n - delta)"
(l,m,n) = bas.loweringAng(axis)
# add a comment: if the loweringAng produce new
# l,m,n no matter which one is smaller than 0;
# then l,m,n are all none
# we only need to check that whether l is none or not
# so that to know the new basis set exist or not
if l is not None:
newBasis = basis.basis(l,m,n)
com = bas.getComponent(axis)
newkey1 = k + "_" + str(com)
if result.has_key(newkey1):
for i in range(nunderscore):
newkey1 = newkey1 + "-"
nunderscore = nunderscore + 1
result[newkey1] = newBasis
else:
result[newkey1] = newBasis
# get the second term
# the second term is 2alpha*chi(l,m,n + delta)
(l,m,n) = bas.raisingAng(axis)
newBasis = basis.basis(l,m,n)
newkey2 = k + "_" + "2alpha"
if result.has_key(newkey2):
for i in range(nunderscore):
newkey2 = newkey2 + "_"
nunderscore = nunderscore + 1
result[newkey2] = newBasis
else:
result[newkey2] = newBasis
# now let's judge whether we need to proceed it
order = order + 1
desire_order = len(deriv)
if order == desire_order:
for k, bas in result.iteritems():
final_result[k] = bas
else:
getDerivExpression(result, deriv, order, final_result)
def printExpression(expression, derivOrder, basIndex, f):
"""
now we print out the derivative expression here for the
given derivative order.
"""
# set up the LHS of the expression
line = "bas[" + str(basIndex) + "] = "
# get the length of the derivative order
l = len(derivOrder)
# we use count to know whether this is the first term
count = 0
# now let's search each order - for every order,
# we have a rad term
for order in range(l+1):
# set up the list for basis and coefficients
# they are corresponding to the same rad term
basList = [ ]
coeList = [ ]
for k, bas in expression.iteritems():
# to get rid of the "-" sign first
k = k.replace("-","")
klist = k.split("_")
# determine how many 2alpha we have in the k
# we only pick up these who math the order
n2alpha = 0
for i in klist:
if i == "2alpha":
n2alpha = n2alpha + 1
if n2alpha != order:
continue
# determine the coefficient in the k
coe = 1
for i in klist:
if i.isdigit() and i != "0":
coe = coe*int(i)
# push back the basis and coe
if bas in basList:
index = basList.index(bas)
coeList[index] = coeList[index] + coe
else:
basList.append(bas)
coeList.append(coe)
if len(basList) > 0:
# give the offset for the radial array
# we add the minus sign to this part
if order == 0:
rad = "rad[ip]"
elif order == 1:
rad = "rad[ip+ng]"
else:
rad = "rad[ip+" + str(order) + "*ng]"
if order % 2 == 1:
rad = "-" + rad
elif count > 0: # these term should have "+" sign
rad = "+" + rad
# set the basis set, by combining it with coefficients
# we will get the term corresponding to the rad term
ang = "*"
if len(basList) > 1:
ang = ang + "("
for bas in basList:
L = bas.getL()
gOffSet = L*(L+1)*(L+2)/6 # counting all of lower shell index since S
s = shell.shell(L)
bList = s.getBasis()
bind = bList.index(bas)
index = bind + gOffSet
cind = basList.index(bas)
if coeList[cind] != 1:
c = str(coeList[cind]) + "*"
else:
c = ""
ang = ang + c
ang = ang + "angArray[" + str(index) + "]"
#ang = ang + c + bas.getName()
if cind == len(basList) - 1:
if ang.find("(") > 0:
ang = ang + ")"
else:
ang = ang + "+"
# now add this order
line = line + rad + ang
# finally add counting
count = count + 1
line = line + ";"
codeprint.printLine(line,f)
|
{"/derivparser.py": ["/shell.py", "/basis.py", "/codeprint.py"]}
|
4,040
|
murfreesboro/dftints
|
refs/heads/master
|
/infor.py
|
"""
get the maximum L for generating the angular part of dft basis
"""
__author__ = "Fenglai Liu"
import sys
import os
# global data
basisSetOrder = " "
maxL = 6 # this is default, we just generate all of codes up to I orbital
def setBasisSetOrder():
"""
set the basis set order
in the future we can define other basis set order
if you want
just modify the shell.py
"""
global basisSetOrder
basisSetOrder = "libint"
def setMaxL(choice):
"""
set the maxL
in the future we can define other basis set order
if you want
just modify the shell.py
"""
global maxL
if choice >= 0:
maxL = choice
else:
print "Illegal choice provided in setMaxL, must be an integer >=0"
sys.exit()
def getBasisSetOrder():
global basisSetOrder
return basisSetOrder
def getMaxL():
global maxL
return maxL
|
{"/derivparser.py": ["/shell.py", "/basis.py", "/codeprint.py"]}
|
4,041
|
murfreesboro/dftints
|
refs/heads/master
|
/generateAngBasis.py
|
"""
This module is used to generate the angular part of codes for DFT basis set
module
"""
__author__ = "Fenglai Liu"
import sys
import os
import infor
import shell
import basis
import codeprint
import shellsymbol
def generateCode():
"""
print out the code for angular part
"""
f = open("angdftbasis.cpp", "w")
maxL = infor.getMaxL() + 4 # we consider the fourth derivatives of basis set
# the comment part for the file
f.write("/**\n")
line = " * This function is used to generating the angular part for the "
codeprint.printLine(line,f)
line = " * Cartesian type of basis set functions on a given atom. The "
codeprint.printLine(line,f)
line = " * basis set functions are evaluated for the given shell which "
codeprint.printLine(line,f)
line = " * is characterized by the lmax value."
codeprint.printLine(line,f)
line = " * \\param ng number of grid points "
codeprint.printLine(line,f)
line = " * \\param pts grid point coordinates(3*ng) "
codeprint.printLine(line,f)
line = " * \\param c basis set center coordinates(3) "
codeprint.printLine(line,f)
line = " * \\param lmax maximum L value of all shells on this atom "
codeprint.printLine(line,f)
line = " * \\return ang angular part of the basis set values(nCarBas,ng) "
codeprint.printLine(line,f)
line = " * \\author Fenglai Liu and Jing Kong "
codeprint.printLine(line,f)
f.write(" */\n")
# including head files
line = "#include\"libgen.h\""
codeprint.printLine(line,f)
line = "#include\"batchbasis.h\""
codeprint.printLine(line,f)
line = "using namespace batchbasis;"
codeprint.printLine(line,f)
f.write("\n\n")
# print out the function name
line = "void BatchBasis::angDFTBasis(const UInt& ng, const UInt& lmax, const Double* pts, const Double* c, Double* ang)"
codeprint.printLine(line,f)
# here we enter in real code
line = "{"
codeprint.printLine(line,f)
codeprint.increaseIndentation()
f.write("\n")
# now begin the loop over grids
# for each grid point, we calculate all the
# possible angular basis sets
line = "for(UInt i=0; i<ng; i++) {"
codeprint.printLine(line,f)
codeprint.increaseIndentation()
f.write("\n")
line = "Double GCX = pts[i*3 ] - c[0]; // X" # x
codeprint.printLine(line,f)
line = "Double GCY = pts[i*3+1] - c[1]; // Y" # y
codeprint.printLine(line,f)
line = "Double GCZ = pts[i*3+2] - c[2]; // Z" # z
codeprint.printLine(line,f)
# set the total basis set number
f.write("\n")
line = "// this is to evaluate total number of basis sets, L from 0 to lmax"
codeprint.printLine(line,f)
line = "UInt nTolBas = (lmax+1)*(lmax+2)*(lmax+3)/6; "
codeprint.printLine(line,f)
# loop over the angular momentums
f.write("\n")
line = "for(UInt L=0; L<= lmax; L++) {"
codeprint.printLine(line,f)
codeprint.increaseIndentation()
# loop over all possible angular momentums
for L in range(maxL+1):
symbol = shellsymbol.getShellSymbol(L)
if L == 0:
line = "if(L == " + str(L) + ") {"
else:
line = "} else if(L == " + str(L) + ") {"
codeprint.printLine(line,f)
codeprint.increaseIndentation()
s = shell.shell(L)
printCodeForShell(s,f)
codeprint.decreaseIndentation()
line = "}" # matching the if
codeprint.printLine(line,f)
# end block of l = lmin to lmax
codeprint.decreaseIndentation()
line = "}" # matching the for loop on L
codeprint.printLine(line,f)
codeprint.decreaseIndentation()
line = "}" # matching the loop over grids
codeprint.printLine(line,f)
# end of function block
codeprint.decreaseIndentation()
line = "}" # matching the main body function
codeprint.printLine(line,f)
f.write("\n\n")
# end of whole file
f.close()
def printCodeForShell(s,f):
"""
print out the code of shell section
"""
# consider S shell
L = s.getL()
if L == 0:
code = "ang[0+i*nTolBas]" + " = ONE;"
codeprint.printLine(code,f)
return
# real work
basisList = s.getBasis()
offset = L*(L+1)*(L+2)/6 # calculate the position of this shell
pos = 0
for bas in basisList:
l,m,n = bas.getlmn()
position = pos + offset
code = "ang[" + str(position) + "+i*nTolBas" + "]" + " = " # LHS
# get RHS
if l > 0:
codex = getXYZMultiplication("GCX",l)
else:
codex = ""
if m > 0:
codey = getXYZMultiplication("GCY",m)
else:
codey = ""
if n > 0:
codez = getXYZMultiplication("GCZ",n)
else:
codez = ""
# real expression
if m > 0 or n > 0:
if l > 0:
codex += "*"
if n > 0:
if m > 0:
codey += "*"
code += codex + codey + codez + ";"
codeprint.printLine(code,f)
pos = pos + 1
def getXYZMultiplication(v,order):
"""
here for each GCX, GCY or GCZ we multiply it up to order
and return the string
"""
result = ""
if order == 1:
result = v
elif order > 1:
result = v
for i in range(order-1):
result = result + "*" + v
else:
print "Inproper order in getXYZMultiplication"
sys.exit()
return result
|
{"/derivparser.py": ["/shell.py", "/basis.py", "/codeprint.py"]}
|
4,063
|
nio-blocks/queue
|
refs/heads/master
|
/queue_block.py
|
import json
from collections import defaultdict
from datetime import timedelta
from threading import Lock
from nio.block.base import Block
from nio.block.mixins.group_by.group_by import GroupBy
from nio.block.mixins.persistence.persistence import Persistence
from nio.command import command
from nio.command.params.dict import DictParameter
from nio.command.params.string import StringParameter
from nio.modules.scheduler import Job
from nio.properties import IntProperty, BoolProperty, \
Property, TimeDeltaProperty, VersionProperty
from nio.properties.util.evaluator import Evaluator
@command("update_props", DictParameter("props", default=''))
@command("view",
StringParameter("query", default='{{ True }}'),
StringParameter("group", default=''))
@command("remove",
StringParameter("query", default=''),
StringParameter("group", default=''))
@command("emit")
class Queue(Persistence, GroupBy, Block):
""" Queue block.
A NIO block for queueing up signals. As signals pile up,
the Queue block releases a configurable number at a configurable
interval. If incoming signals would overflow the queue, signals
are popped off the front as needed.
If a 'group_by' string is configured, incoming signals are divided
and grouped by the value of that attribute. The configured capacity
applies to *each* such queue, not the block as a whole.
"""
version = VersionProperty("1.0.1")
interval = TimeDeltaProperty(title='Notification Interval',
default={'seconds': 1},
allow_none=True)
capacity = IntProperty(default=100, title='Capacity')
chunk_size = IntProperty(default=1, title='Chunk Size')
reload = BoolProperty(default=False, title='Auto-Reload?')
uniqueness = Property(title='Queue Uniqueness Expression',
allow_none=True,
default="{{ None }}")
update = BoolProperty(title='Update Non-Unique Signals', default=False)
def persisted_values(self):
return ["_queues"]
def __init__(self):
super().__init__()
self._queues = defaultdict(list)
self._queue_locks = defaultdict(Lock)
self._meta_lock = Lock()
self._emit_job = None
def configure(self, context):
super().configure(context)
# Make sure perisisted queue capacity is less than current config
for queue_name, queue_values in self._queues.items():
self._queues[queue_name] = queue_values[:self.capacity()]
# build _groups for groupby mixin
self._groups = set(self._queues.keys())
def start(self):
super().start()
self._start_emit_job()
def stop(self):
if self._emit_job is not None:
self._emit_job.cancel()
super().stop()
def process_signals(self, signals):
self.logger.debug("Processing {} signals".format(len(signals)))
self.for_each_group(self._push_group, signals)
if not self.interval():
self.emit()
def pop(self, grp):
''' Remove the top n signals from the specified queue.
Args:
grp (str): The queue from which to pop.
count (int): The number of signals to pop off.
reload (bool): If True, put popped signals back on queue.
Returns:
top_n (list): 'Count' signals from the front of the queue.
'''
count = self.chunk_size()
reload = self.reload()
# lock the queue we're popping from
self.logger.debug("pop: {} {} {}".format(grp, count, reload))
with self._get_lock(grp):
# check out the front of the queue
top_n = self._queues[grp][0:count]
self.logger.debug(
"Removing %d signals from %s_queue" % (len(top_n), grp))
self._queues[grp][:] = self._queues[grp][len(top_n):]
# If reloading, put signal back on queue.
if reload:
self.logger.debug("Reloading {}_queue".format(grp))
self._queues[grp].extend(top_n)
return top_n
def push(self, signal, grp):
''' Add a signal to the back of the queue.
Args:
signal (Signal): The signal to add.
grp (str): Group to add signal to.
Returns:
None
'''
queue = self._queues[grp]
# check for uniqueness if property is set
try:
unique_val = self.uniqueness(signal)
self.logger.debug(
"Testing uniqueness for signal: {}".format(unique_val))
except Exception as e:
unique_val = None
self.logger.warning(
"Uniqueness expression failed. Using value of None.")
if unique_val is not None:
for idx, sig in enumerate(queue):
try:
sig_val = self.uniqueness(sig)
except Exception as e:
sig_val = None
if sig_val == unique_val:
self.logger.debug(
"Signal {} already in {}_queue".format(sig_val, grp)
)
if self.update():
queue[idx] = signal
return
# pop one off the top of that queue if it's at capacity
if len(queue) == self.capacity():
self.logger.debug(
"Pushing signal and capactity of {}_signal is full: {}".format(
grp, self.capacity()
)
)
queue.pop(0)
self.logger.debug("Appending signal to {}_queue".format(grp))
queue.append(signal)
def _push_group(self, signals, group):
# lock the queue before appending
with self._get_lock(group):
for signal in signals:
self.push(signal, group)
def _get_lock(self, grp):
''' Returns the lock for a particular queue.
Note that we're maintaining a synchronized dictionary of locks
alongside our dict of queues.
'''
with self._meta_lock:
self._queue_locks[grp] = self._queue_locks.get(grp, Lock())
return self._queue_locks[grp]
def _start_emit_job(self):
''' Start job that emits signals from the queue '''
if self.interval() and self.interval().total_seconds() > 0:
# only schedule if the interval is a positive number
self._emit_job = Job(
self.emit,
self.interval(),
True
)
def emit(self):
''' Notify the configured number of signals from the front of the queue.
'''
signals_to_notify = self.for_each_group(self.pop)
if signals_to_notify:
self.logger.debug(
"Notifying {} signals".format(len(signals_to_notify))
)
self.notify_signals(signals_to_notify)
def _inspect_group(self, response, group):
response_group = {'count': 0, 'signals': []}
query = response.get('query', '{{ True }}')
ignored_signals = []
for signal in self._queues.get(group, []):
try:
eval = Evaluator(query).evaluate(signal)
except:
eval = False
if eval:
response_group['signals'].append(
json.loads(json.dumps(
signal.to_dict(),
indent=4, separators=(',', ': '),
default=str))
)
response_group['count'] += 1
response['count'] += 1
else:
ignored_signals.append(signal)
response['groups'][group] = response_group
return response, ignored_signals
def view(self, query, group):
''' Command to view the signals that are in the queue.
If no group parameter is specified, all queues are returned.
'''
self.logger.debug("Command: view")
response = {}
response['query'] = query
response['group'] = group
response['count'] = 0
response['groups'] = {}
if group and group in self._queues:
# if group exists, return only the specified group
self._view_group(group, response)
elif not group:
# if no group is specifed in params return all groups
self.for_each_group(self._view_group,
**{'response': response})
return response
def _view_group(self, group, response):
with self._get_lock(group):
response, _ = self._inspect_group(response, group)
def remove(self, query, group):
''' Remove signals from *group* where *query* is True.
Signals are not notified.
'''
self.logger.debug("Command: remove")
response = {}
response['query'] = query
response['group'] = group
response['count'] = 0
response['groups'] = {}
if group and group in self._queues:
# if group exists, remove from only only the specified group
self._remove_from_group(group, response, query)
elif not group:
# if no group is specifed in params return all groups
self.for_each_group(self._remove_from_group,
**{'response': response, 'query': query})
return response
def _remove_from_group(self, group, response, query):
with self._get_lock(group):
response, signals = self._inspect_group(response, group)
# signals that don't match the query stay in the queue, but if
# there are no signals remaining, delete the entire queue.
if len(signals) > 0:
self._queues[group] = signals
else:
# _queues is a dict with keys that make up the set _groups.
# These must be kept in sync when removing keys in order to
# maintain the true state of the block. If these objects are
# not synced, a "view" or "remove" command for all groups will
# show that groups which have previously been expired are still
# present, due to the for_each_group() call, which uses the
# _groups set to iterate over the groups.
self.logger.debug("Deleting empty queue {}.".format(group))
self._queues.pop(group, None)
self._groups.remove(group)
def update_props(self, props):
''' Updates the *interval* property.
The next scheduled emit job with be canceled and a new repeatable emit
job is started.
'''
self.logger.debug("Command: update_props")
response = {}
if props is None or not isinstance(props, dict):
response['message'] = \
"'props' needs to be a dictionary: {}".format(props)
return response
# Update *interval*.
interval = props.get('interval')
if interval and isinstance(interval, dict) and \
(interval.get('days') or
interval.get('seconds') or interval.get('microseconds')):
days = interval.get('days', 0)
seconds = interval.get('seconds', 0)
microseconds = interval.get('microseconds', 0)
interval = timedelta(days, seconds, microseconds)
response['interval'] = interval
response['prev_interval'] = self.interval
# cancel emit job and restart with new interval
if self._emit_job is not None:
self._emit_job.cancel()
self._start_emit_job()
self.interval = interval
self.logger.info(
'Interval has been updated to {}'.format(interval))
elif interval:
response['message'] = \
"'interval' needs to be a timedelta dict: {}".format(interval)
return response
|
{"/tests/test_queue_block.py": ["/queue_block.py"]}
|
4,064
|
nio-blocks/queue
|
refs/heads/master
|
/tests/test_queue_block.py
|
from collections import defaultdict
from unittest.mock import MagicMock
from nio.testing.block_test_case import NIOBlockTestCase
from nio.signal.base import Signal
from nio.testing.modules.scheduler.scheduler import JumpAheadScheduler
from ..queue_block import Queue
class FlavorSignal(Signal):
def __init__(self, flavor, meta='regular'):
super().__init__()
self.flavor = flavor
self.meta = meta
class TestQueue(NIOBlockTestCase):
def test_emit(self):
signals = [Signal({})]
blk = Queue()
config = {
"interval": {
"seconds": 1
},
"capacity": 4,
"chunk_size": 1,
}
self.configure_block(blk, config)
blk.start()
blk.process_signals(signals)
JumpAheadScheduler.jump_ahead(2)
# queue should be empty and only the input signal should be notified
self.assertEqual(len(blk._queues[None]), 0)
self.assert_num_signals_notified(1, blk)
blk.stop()
def test_negative_interval(self):
""" Don't emit signals on any interval when it is negative """
signals = [Signal({})]
blk = Queue()
config = {
"interval": {
"seconds": -1
},
"capacity": 4,
"chunk_size": 1,
}
self.configure_block(blk, config)
blk.start()
blk.process_signals(signals)
JumpAheadScheduler.jump_ahead(2)
# signal should still be in the queue, and no signals notified
self.assertEqual(len(blk._queues[None]), 1)
self.assert_num_signals_notified(0, blk)
blk.stop()
def test_zero_interval(self):
""" Emit all queued signals on process_signals """
blk = Queue()
config = {
"capacity": 1,
"chunk_size": 1,
"group_by": "{{ $group }}",
"interval": {
"seconds": 0
},
"reload": True,
}
self.configure_block(blk, config)
blk.start()
blk.process_signals([
Signal({"group": "a", "number": 1}),
])
self.assertEqual(len(blk._queues), 1)
self.assert_num_signals_notified(1, blk)
self.assert_signal_list_notified([
Signal({"group": "a", "number": 1}),
])
blk.process_signals([
Signal({"group": "a", "number": 2}),
])
self.assertEqual(len(blk._queues), 1)
self.assert_num_signals_notified(2, blk)
self.assert_signal_list_notified([
Signal({"group": "a", "number": 2}),
])
blk.process_signals([
Signal({"group": "b", "number": 1}),
])
self.assertEqual(len(blk._queues), 2)
self.assert_num_signals_notified(4, blk)
self.assert_signal_list_notified([
Signal({"group": "a", "number": 2}),
Signal({"group": "b", "number": 1}),
])
blk.stop()
def test_group_by(self):
signals = [
FlavorSignal(None),
FlavorSignal('apple'),
FlavorSignal('cherry')
]
blk = Queue()
config = {
"interval": {
"minutes": 1
},
"capacity": 100,
"group_by": '{{$flavor}}'
}
self.configure_block(blk, config)
blk.start()
blk.process_signals(signals)
self.assertEqual(len(blk._queues[None]), 1)
self.assertEqual(len(blk._queues['cherry']), 1)
self.assertEqual(len(blk._queues['apple']), 1)
blk.stop()
def test_full(self):
signals = [
FlavorSignal('cherry'),
FlavorSignal('umami')
]
blk = Queue()
config = {
"interval": {
"minutes": 1
},
"capacity": 1,
"log_level": "DEBUG"
}
self.configure_block(blk, config)
blk.start()
blk.process_signals(signals)
self.assertEqual(len(blk._queues[None]), 1)
self.assertEqual(blk._queues[None][0].flavor, 'umami')
blk.stop()
def test_reload(self):
signals = [
FlavorSignal(flavor='apple'),
FlavorSignal(flavor='cherry')
]
blk = Queue()
config = {
"interval": {
"seconds": 1
},
"capacity": 100,
"group_by": '{{$flavor}}',
"reload": True
}
self.configure_block(blk, config)
blk.start()
blk.process_signals(signals)
self.assertEqual(len(blk._queues['cherry']), 1)
self.assertEqual(len(blk._queues['apple']), 1)
JumpAheadScheduler.jump_ahead(2.5)
self.assertEqual(len(blk._queues['cherry']), 1)
self.assertEqual(len(blk._queues['apple']), 1)
self.assert_num_signals_notified(4, blk)
blk.stop()
def test_unique(self):
signals = [
FlavorSignal(flavor='apple'),
FlavorSignal(flavor='cherry', meta='regular'),
FlavorSignal(flavor='cherry', meta='sour')
]
blk = Queue()
config = {
"interval": {
"minutes": 1
},
"capacity": 4,
"uniqueness": "{{$flavor}}"
}
self.configure_block(blk, config)
blk.start()
blk.process_signals(signals)
self.assertEqual(len(blk._queues[None]), 2)
self.assertEqual(blk._queues[None][1].meta, 'regular')
blk.stop()
def test_unique_with_default_config(self):
signals = [
FlavorSignal(flavor='apple'),
FlavorSignal(flavor='cherry', meta='regular'),
FlavorSignal(flavor='cherry', meta='sour')
]
blk = Queue()
self.configure_block(blk, {})
blk.start()
blk.process_signals(signals)
self.assertEqual(len(blk._queues[None]), 3)
self.assertEqual(blk._queues[None][1].meta, 'regular')
blk.stop()
def test_unique_with_update(self):
signals = [
FlavorSignal(flavor='apple'),
FlavorSignal(flavor='cherry', meta='regular'),
FlavorSignal(flavor='cherry', meta='sour')
]
blk = Queue()
config = {
"interval": {
"minutes": 1
},
"capacity": 4,
"uniqueness": "{{$flavor}}",
"update": True
}
self.configure_block(blk, config)
blk.start()
blk.process_signals(signals)
self.assertEqual(len(blk._queues[None]), 2)
self.assertEqual(blk._queues[None][1].meta, 'sour')
blk.stop()
def test_all(self):
signals = [
FlavorSignal(flavor='apple'),
FlavorSignal(flavor='cherry'),
FlavorSignal(flavor='cherry'),
FlavorSignal(flavor='cherry')
]
blk = Queue()
config = {
"interval": {
"seconds": 1
},
"capacity": 2,
"group_by": '{{$flavor}}',
"reload": True,
"uniqueness": "{{$flavor}}"
}
self.configure_block(blk, config)
blk.start()
blk.process_signals(signals)
self.assertEqual(len(blk._queues['cherry']), 1)
self.assertEqual(len(blk._queues['apple']), 1)
JumpAheadScheduler.jump_ahead(2)
self.assertEqual(len(blk._queues['cherry']), 1)
self.assertEqual(len(blk._queues['apple']), 1)
blk.process_signals([FlavorSignal('cherry')])
self.assertEqual(len(blk._queues['cherry']), 1)
blk.stop()
def test_view_command(self):
signals = [
FlavorSignal(None),
FlavorSignal('apple'),
FlavorSignal('cherry')
]
blk = Queue()
config = {
"interval": {
"minutes": 1
},
"capacity": 100,
"group_by": '{{$flavor}}'
}
self.configure_block(blk, config)
blk.start()
blk.process_signals(signals)
# view nothing from all groups
resp = blk.view('', None)
self.assertEqual(len(resp['groups'][None]['signals']), 0)
self.assertEqual(resp['groups'][None]['count'], 0)
self.assertEqual(resp['count'], 0)
self.assertEqual(resp['query'], '')
# viewing only None group is not possible because it becomes 'all'
resp = blk.view('{{ True }}', None)
self.assertEqual(len(resp['groups'][None]['signals']), 1)
self.assertEqual(resp['groups'][None]['count'], 1)
self.assertEqual(resp['count'], 3)
self.assertEqual(resp['query'], '{{ True }}')
# view all groups
resp = blk.view('{{ True }}', '')
self.assertEqual(resp['count'], 3)
self.assertEqual(resp['query'], '{{ True }}')
self.assertEqual(len(blk._queues[None]), 1)
self.assertEqual(len(blk._queues['cherry']), 1)
self.assertEqual(len(blk._queues['apple']), 1)
blk.stop()
def test_remove_command(self):
signals = [
FlavorSignal(None),
FlavorSignal('apple'),
FlavorSignal('cherry')
]
blk = Queue()
config = {
"interval": {
"minutes": 1
},
"capacity": 100,
"group_by": '{{$flavor}}'
}
self.configure_block(blk, config)
blk.start()
blk.process_signals(signals)
# don't remove anything from None
resp = blk.remove('', None)
self.assertEqual(len(resp['groups'][None]['signals']), 0)
self.assertEqual(resp['groups'][None]['count'], 0)
self.assertEqual(resp['count'], 0)
self.assertEqual(resp['query'], '')
self.assertEqual(len(blk._queues[None]), 1)
self.assertTrue(None in blk._groups)
# remove 'apple' group
resp = blk.remove('{{ True }}', 'apple')
self.assertEqual(len(resp['groups']['apple']['signals']), 1)
self.assertEqual(resp['groups']['apple']['count'], 1)
self.assertEqual(resp['count'], 1)
self.assertEqual(resp['query'], '{{ True }}')
self.assertFalse('apple' in blk._groups)
self.assertFalse('apple' in blk._queues)
# remove everything from all groups
resp = blk.remove('{{ True }}', '')
self.assertEqual(resp['count'], 2)
self.assertEqual(resp['query'], '{{ True }}')
self.assertEqual(len(blk._queues), 0)
self.assertEqual(len(blk._groups), 0)
blk.stop()
def _check_persisted_values(self, blk, persisted_queues):
blk._load.assert_called_once_with()
# Make sure queues is a defaultdict
self.assertEqual(defaultdict, type(blk._queues))
# Check values of loaded queues
for queue_name, queue_values in persisted_queues.items():
self.assertEqual(queue_values[:blk.capacity()],
blk._queues[queue_name])
self.assertTrue(queue_name in blk._groups)
def test_load_persistence(self):
blk = Queue()
persisted_queues = defaultdict(list, {'a': [1], 'b': [2, 3]})
def side_effect():
blk._queues = persisted_queues
blk._load = MagicMock(side_effect=side_effect)
self.configure_block(blk, {})
self._check_persisted_values(blk, persisted_queues)
def test_load_persistence_when_capacity_config_shrinks(self):
blk = Queue()
persisted_queues = defaultdict(list, {'a': [1], 'b': [2, 3]})
def side_effect():
blk._queues = persisted_queues
blk._load = MagicMock(side_effect=side_effect)
# Use a smaller capacity than is loaded from persistence
self.configure_block(blk, {"capacity": 1})
self._check_persisted_values(blk, persisted_queues)
|
{"/tests/test_queue_block.py": ["/queue_block.py"]}
|
4,111
|
Nanoribbon/Stock
|
refs/heads/master
|
/backup/StockerGui.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'StockerGui.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(954, 687)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.layoutWidget = QtWidgets.QWidget(self.centralwidget)
self.layoutWidget.setGeometry(QtCore.QRect(310, 10, 416, 33))
self.layoutWidget.setObjectName("layoutWidget")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.layoutWidget)
self.horizontalLayout_3.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.pushButton_2 = QtWidgets.QPushButton(self.layoutWidget)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout_3.addWidget(self.pushButton_2)
self.label_3 = QtWidgets.QLabel(self.layoutWidget)
self.label_3.setObjectName("label_3")
self.horizontalLayout_3.addWidget(self.label_3)
self.lineEdit = QtWidgets.QLineEdit(self.layoutWidget)
self.lineEdit.setMinimumSize(QtCore.QSize(60, 0))
self.lineEdit.setMaximumSize(QtCore.QSize(60, 16777215))
self.lineEdit.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit.setClearButtonEnabled(True)
self.lineEdit.setObjectName("lineEdit")
self.horizontalLayout_3.addWidget(self.lineEdit)
self.progressBar_1 = QtWidgets.QProgressBar(self.layoutWidget)
self.progressBar_1.setProperty("value", 0)
self.progressBar_1.setObjectName("progressBar_1")
self.horizontalLayout_3.addWidget(self.progressBar_1)
self.label_2 = QtWidgets.QLabel(self.layoutWidget)
self.label_2.setMinimumSize(QtCore.QSize(50, 0))
self.label_2.setMaximumSize(QtCore.QSize(50, 16777215))
self.label_2.setStyleSheet("background-color: rgb(255, 255, 255);")
self.label_2.setText("")
self.label_2.setObjectName("label_2")
self.horizontalLayout_3.addWidget(self.label_2)
self.layoutWidget_2 = QtWidgets.QWidget(self.centralwidget)
self.layoutWidget_2.setGeometry(QtCore.QRect(20, 50, 621, 33))
self.layoutWidget_2.setObjectName("layoutWidget_2")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.layoutWidget_2)
self.horizontalLayout_4.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.pushButton_3 = QtWidgets.QPushButton(self.layoutWidget_2)
self.pushButton_3.setObjectName("pushButton_3")
self.horizontalLayout_4.addWidget(self.pushButton_3)
self.checkBox = QtWidgets.QCheckBox(self.layoutWidget_2)
self.checkBox.setObjectName("checkBox")
self.horizontalLayout_4.addWidget(self.checkBox)
self.label = QtWidgets.QLabel(self.layoutWidget_2)
self.label.setObjectName("label")
self.horizontalLayout_4.addWidget(self.label)
self.lineEdit_2 = QtWidgets.QLineEdit(self.layoutWidget_2)
self.lineEdit_2.setMinimumSize(QtCore.QSize(60, 0))
self.lineEdit_2.setMaximumSize(QtCore.QSize(60, 16777215))
self.lineEdit_2.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_2.setClearButtonEnabled(True)
self.lineEdit_2.setObjectName("lineEdit_2")
self.horizontalLayout_4.addWidget(self.lineEdit_2)
self.progressBar_2 = QtWidgets.QProgressBar(self.layoutWidget_2)
self.progressBar_2.setProperty("value", 0)
self.progressBar_2.setObjectName("progressBar_2")
self.horizontalLayout_4.addWidget(self.progressBar_2)
self.label_4 = QtWidgets.QLabel(self.layoutWidget_2)
self.label_4.setMinimumSize(QtCore.QSize(50, 0))
self.label_4.setMaximumSize(QtCore.QSize(50, 16777215))
self.label_4.setStyleSheet("background-color: rgb(255, 255, 255);")
self.label_4.setText("")
self.label_4.setObjectName("label_4")
self.horizontalLayout_4.addWidget(self.label_4)
self.textBrowser = QtWidgets.QTextBrowser(self.centralwidget)
self.textBrowser.setGeometry(QtCore.QRect(20, 90, 61, 571))
self.textBrowser.setObjectName("textBrowser")
self.layoutWidget1 = QtWidgets.QWidget(self.centralwidget)
self.layoutWidget1.setGeometry(QtCore.QRect(20, 10, 271, 32))
self.layoutWidget1.setObjectName("layoutWidget1")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.layoutWidget1)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.pushButton_1 = QtWidgets.QPushButton(self.layoutWidget1)
self.pushButton_1.setObjectName("pushButton_1")
self.horizontalLayout_2.addWidget(self.pushButton_1)
self.label_1 = QtWidgets.QLabel(self.layoutWidget1)
self.label_1.setMinimumSize(QtCore.QSize(50, 0))
self.label_1.setMaximumSize(QtCore.QSize(50, 16777215))
self.label_1.setStyleSheet("background-color: rgb(255, 255, 255);")
self.label_1.setText("")
self.label_1.setObjectName("label_1")
self.horizontalLayout_2.addWidget(self.label_1)
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(730, 10, 113, 32))
self.pushButton.setObjectName("pushButton")
self.layoutWidget2 = QtWidgets.QWidget(self.centralwidget)
self.layoutWidget2.setGeometry(QtCore.QRect(91, 91, 851, 571))
self.layoutWidget2.setObjectName("layoutWidget2")
self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget2)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.label_5 = QtWidgets.QLabel(self.layoutWidget2)
self.label_5.setMinimumSize(QtCore.QSize(0, 25))
self.label_5.setMaximumSize(QtCore.QSize(16777215, 25))
self.label_5.setText("")
self.label_5.setObjectName("label_5")
self.verticalLayout.addWidget(self.label_5)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.widget_1 = QtWidgets.QWidget(self.layoutWidget2)
self.widget_1.setObjectName("widget_1")
self.horizontalLayout.addWidget(self.widget_1)
self.widget_2 = QtWidgets.QWidget(self.layoutWidget2)
self.widget_2.setObjectName("widget_2")
self.horizontalLayout.addWidget(self.widget_2)
self.verticalLayout.addLayout(self.horizontalLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.pushButton_2.setText(_translate("MainWindow", "Limit List"))
self.label_3.setText(_translate("MainWindow", "max Value:"))
self.lineEdit.setText(_translate("MainWindow", "2"))
self.pushButton_3.setText(_translate("MainWindow", "Analyze"))
self.checkBox.setText(_translate("MainWindow", "testdata"))
self.label.setText(_translate("MainWindow", "factor:"))
self.lineEdit_2.setText(_translate("MainWindow", "2"))
self.pushButton_1.setText(_translate("MainWindow", "Load NASDAQ Data"))
self.pushButton.setText(_translate("MainWindow", "test"))
|
{"/backup/Stocker.py": ["/StockerGui.py"]}
|
4,112
|
Nanoribbon/Stock
|
refs/heads/master
|
/backup/Stocker.py
|
"""
@author: Dr. Martin Hell
"""
import glob
import json
import time
import sys
import os
import re
import math
import numpy as np
import pandas as pd
import yfinance as yf
import ftplib
import datetime
import pyqtgraph as pg
import pyqtgraph.opengl as gl
import pyqtgraph.exporters
from plotly.offline import plot as plo
#import plotly.plotly as py
import plotly.graph_objs as go
import plotly.express as px
import matplotlib.pyplot as plt
from plotly.graph_objs import Scatter, Layout
from matplotlib.figure import Figure
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib import dates, pyplot
from PyQt5 import QtGui,QtCore
from PyQt5.QtCore import QCoreApplication
from PyQt5 import QtCore, uic, QtWidgets, QtWebEngineWidgets
from PyQt5.QtWidgets import QMainWindow, QApplication,QSlider, QWidget, QPushButton, QAction, QLineEdit, QMessageBox, QMenu, QVBoxLayout, QSizePolicy
from PyQt5.QtGui import QIcon
from PyQt5.QtGui import QPixmap, QScreen
from PyQt5.QtWidgets import QFileDialog
from pyqtgraph.Qt import QtCore, QtGui
from pyqtgraph import PlotWidget, plot
from time import time
from datetime import date
from matplotlib.ticker import MaxNLocator
from pathlib import Path
from yahoo_fin.stock_info import get_data
from yahoo_fin import stock_info as si
from pathlib import Path
from StockerGui import Ui_MainWindow
class MainWindow(QtWidgets.QMainWindow):
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
self.setWindowIcon(QtGui.QIcon('hellicon.png'))
self.setWindowTitle('Stocker')
uic.loadUi('StockerGui.ui', self)
self.pushButton_1.clicked.connect(self.symbollister)
self.pushButton_2.clicked.connect(self.pennystocks)
self.pushButton_3.clicked.connect(self.analysis)
# self.pushButton_4.clicked.connect(self.plotter)
self.pushButton.clicked.connect(self.test)
self.fig1 = Figure()
self.canvas1 = FigureCanvas(self.fig1)
self.verticalLayout.replaceWidget(self.widget_1, self.canvas1)
self.fig2 = Figure()
self.canvas2 = FigureCanvas(self.fig2)
self.verticalLayout.replaceWidget(self.widget_2, self.canvas2)
self.today=datetime.date.today()
self.past = self.today + datetime.timedelta(-30)
self.now = self.today + datetime.timedelta(-2)
def barcounter(self,bar,symbols, counter):
pb = getattr(self,bar)
pb.setMaximum(len(symbols))
pb.setValue(counter)
def get_symbol_df(self, ticka):
ticka.reset_index(drop=True)
ticka["index"] = pd.to_datetime(ticka["index"])
return ticka
def symbollister(self):
url = "ftp://ftp.nasdaqtrader.com/SymbolDirectory/nasdaqlisted.txt"
df=pd.read_csv(url, sep="|")
export={'Symbols':[]}
for i in df['Symbol']:
export['Symbols'].append(i)
json.dump( export, open("Tickers.json", 'w' ) )
self.label_1.setText("done")
def pennystocks(self):
pennystocks=[]
with open('Tickers.json') as f:
symbols = json.load(f)
counter=1
for x in symbols:
self.barcounter('progressBar_1',symbols, counter)
ticka = yf.Ticker(x)
hist = ticka.history(start_date=str(self.past), end_date=str(self.today))
print(str(x) +" -->> "+str(np.mean(hist["Close"])))
if np.mean(hist["Close"])<float(self.lineEdit.text()):
pennystocks.append(x)
else:
pass
counter+=1
json.dump( pennystocks, open("pennystocks.json", 'w' ) )
self.label_2.setText("done")
def analysis(self):
self.textBrowser.clear()
self.hotlist={}
self.ticklist=[]
gain=float(self.lineEdit_2.text())
if self.checkBox.isChecked():
datafile='pennystock_test.json'
else:
datafile='pennystock.json'
with open(datafile) as f:
symbols = json.load(f)
counter=1
for x in symbols:
ticka = yf.Ticker(str(x))
ref = ticka.history(start=str(self.past), end=str(self.now), index_as_date = True)
ref.reset_index(inplace=True)
op_val=ref['Open']
cl_val=ref['Close']
live_data = si.get_live_price(x)
if np.mean(op_val)*gain<=live_data:
self.textBrowser.append(str(x))
self.hotlist[str(x)]= [op_val]
self.ticklist.append(x)
self.barcounter('progressBar_2',symbols, counter)
QCoreApplication.processEvents()
counter+=1
self.Tot=len(self.ticklist)
self.Cols=1
# self.Rows=self.Tot//self.Cols
# if self.Tot%self.Cols!=0:
# self.Rows+=1
self.label_4.setText('done')
self.label_4.setStyleSheet("""QLineEdit { background-color: green; color: white }""")
self.past_plotter()
self.present_plotter()
def recorder(self):
for key in self.hotlist:
live_data = si.get_live_price(key)
self.hotlist[key].append(live_data)
def test(self):
ticka = yf.Ticker("NAKD")
hist = ticka.history(period="1d", interval="5m", index_as_date = True)
hist.reset_index(inplace=True)
print(hist['Datetime'][0])
t=hist['Datetime'][0]
t= t.strftime("%H:%M:%S")
#s= datetime.datetime.strptime(t, "%Y-%m-%d")
print(t)
# ticka = yf.Ticker("AACQW")
# hist =get_data("AACQW", start_date="10/04/2019", end_date="12/04/2019", index_as_date = True)
# hist.reset_index(inplace=True)
# get_data(ticker, start_date = None, end_date = None, index_as_date = True, interval = “1d”)
#hist = yf.download(str(ticka), start="2020-02-01", end="2020-02-04")
# hist = ticka.history(period="1d", index_as_date = True)
# hist = ticka.history(period="1d", index_as_date = False)
#hist = yf.download(str(ticka), start="2017-02-01", end="2017-02-04")
#hist = ticka.history(start_date=str(self.past), end_date=str(self.today))
# print(hist)
def past_plotter(self):
self.fig1.clear()
k=1
for tick in self.ticklist:
ticka = yf.Ticker(str(tick))
ref = ticka.history(period="1mo", index_as_date = True)
op_val=ref['Open']
cl_val=ref['Close']
x=np.arange(len(op_val)-1,-1,-1)
ax = self.fig1.add_subplot(self.Tot,self.Cols,k)
ax.set_xlim(len(op_val)-1,-1)
ax.tick_params(axis='x', which='top', bottom=False, labelbottom=False)
ax.plot(x,op_val, label='open',c='tab:purple')
ax.plot(x,cl_val, label='close',c='tab:brown' )
ax.axvline(x=1)
if k==self.Tot:
ax.tick_params(axis='x', which='top', bottom=False, labelbottom=True)
ax.legend(loc='upper left', shadow=True)
ax.fill_between(x, op_val, cl_val, where=cl_val >= op_val, facecolor='green', interpolate=True)
ax.fill_between(x, op_val, cl_val, where=cl_val <= op_val, facecolor='red', interpolate=True)
ax.set_title( str(tick), loc='center')
ax.xaxis.set_major_locator(MaxNLocator(integer=True))
k+=1
self.fig1.tight_layout()
self.canvas1.draw()
def present_plotter(self):
self.fig2.clear()
k=1
for tick in self.ticklist:
ticka = yf.Ticker(str(tick))
hist = ticka.history(period="1d", interval="5m", index_as_date = True)
print(hist)
hist.reset_index(inplace=True)
hist['Datetime']= [s.strftime("%H:%M:%S") for s in hist['Datetime']]
ax2 = self.fig2.add_subplot(self.Tot,self.Cols,k)
ax2.tick_params(axis='x', which='both', bottom=True, labelbottom=False)
if k==self.Tot:
ax2.tick_params(axis='x', which='both', bottom=True, labelbottom=True)
ax2.plot(hist['Datetime'],hist['Close'])
ax2.set_title(str(tick), loc='center')
# ax2.tick_params(labelrotation=90)
ax2.xaxis.set_major_locator(plt.MaxNLocator(5))
k+=1
self.fig2.tight_layout()
self.canvas2.draw()
#%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def main():
#app = QtGui.QApplication(sys.argv)
app = QtWidgets.QApplication(sys.argv)
app.setStyle('Fusion')
app.setAttribute(QtCore.Qt.AA_Use96Dpi)
main = MainWindow()
main.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
{"/backup/Stocker.py": ["/StockerGui.py"]}
|
4,113
|
Nanoribbon/Stock
|
refs/heads/master
|
/StockerGui.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'StockerGui.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(954, 687)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.textBrowser = QtWidgets.QTextBrowser(self.centralwidget)
self.textBrowser.setGeometry(QtCore.QRect(20, 90, 71, 571))
self.textBrowser.setObjectName("textBrowser")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(730, 10, 113, 32))
self.pushButton.setObjectName("pushButton")
self.layoutWidget = QtWidgets.QWidget(self.centralwidget)
self.layoutWidget.setGeometry(QtCore.QRect(101, 91, 841, 571))
self.layoutWidget.setObjectName("layoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout_9 = QtWidgets.QHBoxLayout()
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.line_6 = QtWidgets.QFrame(self.layoutWidget)
self.line_6.setFrameShape(QtWidgets.QFrame.VLine)
self.line_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_6.setObjectName("line_6")
self.horizontalLayout_9.addWidget(self.line_6)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_9.addItem(spacerItem)
self.radioButton = QtWidgets.QRadioButton(self.layoutWidget)
self.radioButton.setObjectName("radioButton")
self.horizontalLayout_9.addWidget(self.radioButton)
self.radioButton_2 = QtWidgets.QRadioButton(self.layoutWidget)
self.radioButton_2.setChecked(True)
self.radioButton_2.setObjectName("radioButton_2")
self.horizontalLayout_9.addWidget(self.radioButton_2)
self.radioButton_3 = QtWidgets.QRadioButton(self.layoutWidget)
self.radioButton_3.setObjectName("radioButton_3")
self.horizontalLayout_9.addWidget(self.radioButton_3)
self.radioButton_4 = QtWidgets.QRadioButton(self.layoutWidget)
self.radioButton_4.setObjectName("radioButton_4")
self.horizontalLayout_9.addWidget(self.radioButton_4)
self.radioButton_5 = QtWidgets.QRadioButton(self.layoutWidget)
self.radioButton_5.setObjectName("radioButton_5")
self.horizontalLayout_9.addWidget(self.radioButton_5)
self.line_7 = QtWidgets.QFrame(self.layoutWidget)
self.line_7.setFrameShape(QtWidgets.QFrame.VLine)
self.line_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_7.setObjectName("line_7")
self.horizontalLayout_9.addWidget(self.line_7)
self.refreshbutton = QtWidgets.QPushButton(self.layoutWidget)
self.refreshbutton.setObjectName("refreshbutton")
self.horizontalLayout_9.addWidget(self.refreshbutton)
self.line_8 = QtWidgets.QFrame(self.layoutWidget)
self.line_8.setFrameShape(QtWidgets.QFrame.VLine)
self.line_8.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_8.setObjectName("line_8")
self.horizontalLayout_9.addWidget(self.line_8)
self.horizontalLayout_7 = QtWidgets.QHBoxLayout()
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.pushButton_4 = QtWidgets.QPushButton(self.layoutWidget)
self.pushButton_4.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("left.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_4.setIcon(icon)
self.pushButton_4.setObjectName("pushButton_4")
self.horizontalLayout_7.addWidget(self.pushButton_4)
self.pushButton_5 = QtWidgets.QPushButton(self.layoutWidget)
self.pushButton_5.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap("right.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_5.setIcon(icon1)
self.pushButton_5.setObjectName("pushButton_5")
self.horizontalLayout_7.addWidget(self.pushButton_5)
self.horizontalLayout_9.addLayout(self.horizontalLayout_7)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_9.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout_9)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.widget_1 = QtWidgets.QWidget(self.layoutWidget)
self.widget_1.setObjectName("widget_1")
self.horizontalLayout.addWidget(self.widget_1)
self.widget_2 = QtWidgets.QWidget(self.layoutWidget)
self.widget_2.setObjectName("widget_2")
self.horizontalLayout.addWidget(self.widget_2)
self.verticalLayout.addLayout(self.horizontalLayout)
self.layoutWidget1 = QtWidgets.QWidget(self.centralwidget)
self.layoutWidget1.setGeometry(QtCore.QRect(20, 50, 921, 37))
self.layoutWidget1.setObjectName("layoutWidget1")
self.horizontalLayout_8 = QtWidgets.QHBoxLayout(self.layoutWidget1)
self.horizontalLayout_8.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_8.setObjectName("horizontalLayout_8")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.pushButton_3 = QtWidgets.QPushButton(self.layoutWidget1)
self.pushButton_3.setObjectName("pushButton_3")
self.horizontalLayout_4.addWidget(self.pushButton_3)
self.line = QtWidgets.QFrame(self.layoutWidget1)
self.line.setFrameShape(QtWidgets.QFrame.VLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.horizontalLayout_4.addWidget(self.line)
self.checkBox = QtWidgets.QCheckBox(self.layoutWidget1)
self.checkBox.setObjectName("checkBox")
self.horizontalLayout_4.addWidget(self.checkBox)
self.line_2 = QtWidgets.QFrame(self.layoutWidget1)
self.line_2.setFrameShape(QtWidgets.QFrame.VLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.horizontalLayout_4.addWidget(self.line_2)
self.label = QtWidgets.QLabel(self.layoutWidget1)
self.label.setMinimumSize(QtCore.QSize(50, 0))
self.label.setMaximumSize(QtCore.QSize(50, 16777215))
self.label.setObjectName("label")
self.horizontalLayout_4.addWidget(self.label)
self.lineEdit_2 = QtWidgets.QLineEdit(self.layoutWidget1)
self.lineEdit_2.setMinimumSize(QtCore.QSize(60, 0))
self.lineEdit_2.setMaximumSize(QtCore.QSize(60, 16777215))
self.lineEdit_2.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_2.setClearButtonEnabled(True)
self.lineEdit_2.setObjectName("lineEdit_2")
self.horizontalLayout_4.addWidget(self.lineEdit_2)
self.line_3 = QtWidgets.QFrame(self.layoutWidget1)
self.line_3.setFrameShape(QtWidgets.QFrame.VLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.horizontalLayout_4.addWidget(self.line_3)
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.label_4 = QtWidgets.QLabel(self.layoutWidget1)
self.label_4.setMinimumSize(QtCore.QSize(40, 0))
self.label_4.setMaximumSize(QtCore.QSize(40, 16777215))
self.label_4.setObjectName("label_4")
self.horizontalLayout_5.addWidget(self.label_4)
self.lineEdit_3 = QtWidgets.QLineEdit(self.layoutWidget1)
self.lineEdit_3.setMinimumSize(QtCore.QSize(40, 0))
self.lineEdit_3.setMaximumSize(QtCore.QSize(40, 16777215))
self.lineEdit_3.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_3.setObjectName("lineEdit_3")
self.horizontalLayout_5.addWidget(self.lineEdit_3)
self.horizontalLayout_4.addLayout(self.horizontalLayout_5)
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.label_6 = QtWidgets.QLabel(self.layoutWidget1)
self.label_6.setMinimumSize(QtCore.QSize(30, 0))
self.label_6.setMaximumSize(QtCore.QSize(30, 16777215))
self.label_6.setObjectName("label_6")
self.horizontalLayout_6.addWidget(self.label_6)
self.lineEdit_4 = QtWidgets.QLineEdit(self.layoutWidget1)
self.lineEdit_4.setMinimumSize(QtCore.QSize(40, 0))
self.lineEdit_4.setMaximumSize(QtCore.QSize(40, 16777215))
self.lineEdit_4.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_4.setObjectName("lineEdit_4")
self.horizontalLayout_6.addWidget(self.lineEdit_4)
self.horizontalLayout_4.addLayout(self.horizontalLayout_6)
self.line_4 = QtWidgets.QFrame(self.layoutWidget1)
self.line_4.setFrameShape(QtWidgets.QFrame.VLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.horizontalLayout_4.addWidget(self.line_4)
self.progressBar_2 = QtWidgets.QProgressBar(self.layoutWidget1)
self.progressBar_2.setMinimumSize(QtCore.QSize(200, 0))
self.progressBar_2.setMaximumSize(QtCore.QSize(200, 16777215))
self.progressBar_2.setProperty("value", 0)
self.progressBar_2.setObjectName("progressBar_2")
self.horizontalLayout_4.addWidget(self.progressBar_2)
self.horizontalLayout_8.addLayout(self.horizontalLayout_4)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_8.addItem(spacerItem2)
self.line_5 = QtWidgets.QFrame(self.layoutWidget1)
self.line_5.setFrameShape(QtWidgets.QFrame.VLine)
self.line_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_5.setObjectName("line_5")
self.horizontalLayout_8.addWidget(self.line_5)
self.widget = QtWidgets.QWidget(self.centralwidget)
self.widget.setGeometry(QtCore.QRect(20, 10, 639, 35))
self.widget.setObjectName("widget")
self.horizontalLayout_10 = QtWidgets.QHBoxLayout(self.widget)
self.horizontalLayout_10.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_10.setObjectName("horizontalLayout_10")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.pushButton_1 = QtWidgets.QPushButton(self.widget)
self.pushButton_1.setObjectName("pushButton_1")
self.horizontalLayout_2.addWidget(self.pushButton_1)
self.label_1 = QtWidgets.QLabel(self.widget)
self.label_1.setMinimumSize(QtCore.QSize(50, 0))
self.label_1.setMaximumSize(QtCore.QSize(50, 16777215))
self.label_1.setStyleSheet("background-color: rgb(255, 255, 255);")
self.label_1.setText("")
self.label_1.setObjectName("label_1")
self.horizontalLayout_2.addWidget(self.label_1)
self.horizontalLayout_10.addLayout(self.horizontalLayout_2)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.pushButton_2 = QtWidgets.QPushButton(self.widget)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout_3.addWidget(self.pushButton_2)
self.label_3 = QtWidgets.QLabel(self.widget)
self.label_3.setObjectName("label_3")
self.horizontalLayout_3.addWidget(self.label_3)
self.lineEdit = QtWidgets.QLineEdit(self.widget)
self.lineEdit.setMinimumSize(QtCore.QSize(60, 0))
self.lineEdit.setMaximumSize(QtCore.QSize(60, 16777215))
self.lineEdit.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit.setClearButtonEnabled(True)
self.lineEdit.setObjectName("lineEdit")
self.horizontalLayout_3.addWidget(self.lineEdit)
self.progressBar_1 = QtWidgets.QProgressBar(self.widget)
self.progressBar_1.setProperty("value", 0)
self.progressBar_1.setObjectName("progressBar_1")
self.horizontalLayout_3.addWidget(self.progressBar_1)
self.label_2 = QtWidgets.QLabel(self.widget)
self.label_2.setMinimumSize(QtCore.QSize(50, 0))
self.label_2.setMaximumSize(QtCore.QSize(50, 16777215))
self.label_2.setStyleSheet("background-color: rgb(255, 255, 255);")
self.label_2.setText("")
self.label_2.setObjectName("label_2")
self.horizontalLayout_3.addWidget(self.label_2)
self.horizontalLayout_10.addLayout(self.horizontalLayout_3)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.pushButton.setText(_translate("MainWindow", "test"))
self.radioButton.setText(_translate("MainWindow", "1 day"))
self.radioButton_2.setText(_translate("MainWindow", "5 days"))
self.radioButton_3.setText(_translate("MainWindow", "1 month"))
self.radioButton_4.setText(_translate("MainWindow", "1 year"))
self.radioButton_5.setText(_translate("MainWindow", "max"))
self.refreshbutton.setText(_translate("MainWindow", "refresh"))
self.pushButton_3.setText(_translate("MainWindow", "Analyze"))
self.checkBox.setText(_translate("MainWindow", "testdata"))
self.label.setText(_translate("MainWindow", "factor:"))
self.lineEdit_2.setText(_translate("MainWindow", "2"))
self.label_4.setText(_translate("MainWindow", "from:"))
self.lineEdit_3.setText(_translate("MainWindow", "-30"))
self.label_6.setText(_translate("MainWindow", "to:"))
self.lineEdit_4.setText(_translate("MainWindow", "-2"))
self.pushButton_1.setText(_translate("MainWindow", "Load NASDAQ Data"))
self.pushButton_2.setText(_translate("MainWindow", "Limit List"))
self.label_3.setText(_translate("MainWindow", "max Value:"))
self.lineEdit.setText(_translate("MainWindow", "2"))
|
{"/backup/Stocker.py": ["/StockerGui.py"]}
|
4,114
|
Marisol610/catalog-app
|
refs/heads/master
|
/app/__init__.py
|
#!/products/bin/env python3
# #coding-*- utf -*-
"""THIS IS ASSIGNMENT 3 FOR FSDI-111"""
from flask import Flask
from flask_bootstrap import Bootstrap
app = Flask(__name__)
Bootstrap(app)
app.config["SECRET_KEY"] = "MYSUPERSECRETSTRING"
from app import routes
|
{"/crud.py": ["/app/__init__.py"], "/app/routes.py": ["/app/__init__.py", "/app/forms/product.py"]}
|
4,115
|
Marisol610/catalog-app
|
refs/heads/master
|
/crud.py
|
from app import app
|
{"/crud.py": ["/app/__init__.py"], "/app/routes.py": ["/app/__init__.py", "/app/forms/product.py"]}
|
4,116
|
Marisol610/catalog-app
|
refs/heads/master
|
/app/forms/product.py
|
#!/products/bin/env python3
#-*- coding utf8 -*-
""" This is the app product definition"""
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired
class ProductForm(FlaskForm):
name = StringField("Enter the product's name", validators=[DataRequired()])
price = StringField("Enter the product' price", validators=[DataRequired()])
description = StringField("Enter the product's description", validators=[DataRequired()])
category = StringField("Enter the product's category", validators=[DataRequired()])
quantity = StringField("Enter a quantity", validators=[DataRequired()])
unique_tag = StringField("Enter the product's unique_tag", validators=[DataRequired()])
submit = SubmitField("Submit")
class ProductReviewForm(FlaskForm):
name = StringField("Enter your name", validators=[DataRequired()])
product_name = StringField("Enter the product's name", validators=[DataRequired()])
review = StringField("Enter your review for this product", validators=[DataRequired()])
|
{"/crud.py": ["/app/__init__.py"], "/app/routes.py": ["/app/__init__.py", "/app/forms/product.py"]}
|
4,117
|
Marisol610/catalog-app
|
refs/heads/master
|
/app/routes.py
|
#!/products/bin/env python3
#-*- coding utf8 -*-
""" This is the routes for product application """
from flask import request, render_template
from app import app
from app.database import create, read, update, delete, scan
from datetime import datetime
from flask import request
from app.forms.product import ProductForm
#product CRUD
@app.route("/")
def index():
serv_time = datetime.now().strftime("%F %H:%M:%S")
return {
"ok": True,
"version": "1.0.0",
"server_time": serv_time
}
@app.route("/product_form", methods=["GET", "POST"])
def product_form():
if request.method == "POST":
name = request.form.get("name")
price = request.form.get("price")
description = request.form.get("description")
category = request.form.get("category")
quantity = request.form.get("quantity")
unique_tag = request.form.get("unique_tag")
create(name, price, description, category, quantity, unique_tag)
form = ProductForm()
return render_template("form_example.html", form=form)
@app.route("/prod_review", methods=["GET", "POST"])
def prod_review():
if request.method == "POST":
name = request.form.get("name")
product_name = request.form.get("product_name")
review = request.form.get("review")
create(name, product_name, review)
form = ProductReviewForm()
return render_template("review.html", form=form)
@app.route("/catalog")
def catalog():
return render_template("catalog.html")
@app.route("/products")
def get_all_products():
out = scan()
out["ok"] = True
out["message"] = "Success"
#return out
return render_template("products.html", products=out["body"])
@app.route("/products/<pid>")
def get_one_product(pid):
out = read(int(pid))
out["ok"] = True
out["message"] = "Success"
return out
@app.route("/products", methods = ["POST"])
def create_product():
product_data = request.json
new_id = create(
product_data.get("name"),
product_data.get("price"),
product_data.get("description"),
product_data.get("category"),
product_data.get("quantity"),
product_data.get("unique_tag")
)
return {"ok": True, "message": "Success", "new_id": new_id}
@app.route("/products/<pid>", methods=["GET", "PUT"])
def update_product(pid):
#product_data = request.jason
if request.method =="PUT":
update(pid, request.form)
return {"ok": True, "message": "Updated"}
out = read(int(pid))
update_form = ProductForm()
if out["body"]:
return render_template("single_product.html", product=out["body"][0], form=update_form)
else:
return render_template("404.html"), 404
@app.route("/products/delete/<pid>", methods=["GET"])
def delete_product(pid):
out = update(int(pid), {"active": 0})
return {"ok": out, "message": "Deleted"}
#@app.route("/products/delete/<pid>", methods=["GET"])
#def delete_product(pid):
# id= input("Enter the id for the item you wish to delete")
# for product in products:
# if(str(prod.id) == id):
# delete(pid, request.form)
# return {"ok": True, "message": "Updated"}
@app.route('/agent')
def agent():
user_agent = request.headers.get("User-Agent")
return "<p>Your user agent is %s</p>" % user_agent
@app.route("/myroute")
def my_view_function():
return render_template("index.html")
# user CRUD
@app.route("/user/<name>")
def user(name):
return render_template("user.html", name=name)
@app.route("/user/<name>")
def show_user(name):
return render_template("user.html", name=name)
@app.route("/about")
def about():
return render_template("about.html", first_name="Marisol", last_name="Rodriguez", hobbies="Crochet and Baking")
@app.route("/users")
def get_all_users():
out = scan()
out["ok"] = True
out["message"] = "Success"
return out
@app.route("/users/<uid>")
def get_one_user(uid):
out = read(int(uid))
out["ok"] = True
out["message"] = "Success"
return out
@app.route("/users", methods = ["POST"])
def create_user():
user_data = request.json
new_id = create(
user_data.get("name"),
user_data.get("last name"),
user_data.get("hobbies"),
)
return {"ok": True, "message": "Success", "new_id": new_id}
@app.errorhandler(404)
def page_not_found(e):
return render_template("404.html"), 404
|
{"/crud.py": ["/app/__init__.py"], "/app/routes.py": ["/app/__init__.py", "/app/forms/product.py"]}
|
4,128
|
Spudar-Men/Hang_man
|
refs/heads/master
|
/main.py
|
from string_of_words import list_of_words
from string_of_words import print_playfield
from string_of_words import find
from more_itertools import locate
import random
import replit
rand_word = random.choice(list_of_words) #draws random word from list out 49 possible words
print("Cheat mode on. Word is: " + rand_word)
indexPosList = 0
playfield = []
for letter in rand_word:
playfield.append("[_]")
print_playfield(playfield)
print()
player_guess = "" #create empty string that will contain imput from player
win = False #Starts at False and is changed when win state is achieved
player_attempts = 3
print("Len of playfield is " + str(len(playfield)))
player_guess = ""
while player_attempts > 0 and win == False:
print("Player Attempts: " + str(player_attempts))
while len(player_guess) != 1 or player_guess == "":
player_guess = input("Please guess one letter from the secret Hangman word: ")
for letter in rand_word:
if letter == player_guess:
indexPosList = list(locate(rand_word, lambda a: a == letter))
for item in indexPosList:
playfield[item] = letter
if player_guess not in rand_word:
player_attempts -= 1
replit.clear()
print_playfield(playfield)
print()
player_guess = ""
if "[_]" not in playfield:
win = True
else:
if win == True:
print("You Won!")
else:
print("You Lost!")
|
{"/main.py": ["/string_of_words.py"]}
|
4,129
|
Spudar-Men/Hang_man
|
refs/heads/master
|
/string_of_words.py
|
#Contains string and list of 49 words and functions
words = """
Awkward
Bagpipes
Banjo
Bungler
Croquet
Crypt
Dwarves
Fervid
Fishhook
Fjord
Gazebo
Gypsy
Haiku
Haphazard
Hyphen
Ivory
Jazzy
Jiffy
Jinx
Jukebox
Kayak
Kiosk
Klutz
Memento
Mystify
Numbskull
Ostracize
Oxygen
Pajama
Phlegm
Pixel
Polka
Quad
Quip
Rhythmic
Rogue
Sphinx
Squawk
Swivel
Toady
Twelfth
Unzip
Waxy
Wildebeest
Yacht
Zealous
Zigzag
Zippy
Zombie
"""
words = words.lower() #removes uppercase characters
list_of_words = words.split() #creates list from string and sets each word into a separate element
def print_playfield(playfield):
for i in playfield:
print(i, end = " ")
#creates function that returns the indices of each instance of a specific character in in string
def find(s, ch):
return [i for i, ltr in enumerate(s) if ltr == ch]
|
{"/main.py": ["/string_of_words.py"]}
|
4,130
|
rickardlofberg/RiksdagenDataDownloader
|
refs/heads/master
|
/RiksdagenDataDownloader/api.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Third-party imports...
import requests
# Local imports
from . import unzip as unzip
from .riksdagen_client import RiksdagenClient
def uri_generator(data_format, collection=''):
client = RiksdagenClient()
return client.get_collection_uri(data_format, collection)
def download_and_yield(data_format, collection=''):
for url in uri_generator(data_format, collection=''):
data = requests.get(url)
for document in unzip.yield_zip_content(data.content):
yield document
def download_and_save(data_format, path, collection=''):
client = RiksdagenClient()
collections = [collection]
if not collection:
collections = client.available_collections()
for collection in collections:
for url in uri_generator(data_format, collection):
data = requests.get(url)
unzip.save_zip_content(data.content, path, collection)
|
{"/RiksdagenDataDownloader/api.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/tests/test_client_xml_parsing.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/RiksdagenDataDownloader/cli.py": ["/RiksdagenDataDownloader/riksdagen_client.py"]}
|
4,131
|
rickardlofberg/RiksdagenDataDownloader
|
refs/heads/master
|
/tests/test_client_xml_parsing.py
|
# Standard library imports...
from unittest.mock import Mock, patch
# Third-party imports...
from nose.tools import assert_is_not_none, assert_equal, assert_true
# Local imports...
from dataset_metadata import xml_metadata
from RiksdagenDataDownloader.riksdagen_client import RiksdagenClient
class TestClient(object):
@classmethod
def setup_class(cls):
fake_xml = xml_metadata(5, 'xml', 'ip')
mock_response = Mock()
mock_response.return_value.content = fake_xml
cls.mock_get_patcher = patch('RiksdagenDataDownloader.riksdagen_client.requests.get', side_effect=mock_response)
cls.mock_get = cls.mock_get_patcher.start()
@classmethod
def teardown_class(cls):
cls.mock_get_patcher.stop()
def test_documents_not_empty_after_instansiation(self):
riks_client = RiksdagenClient()
assert_is_not_none(riks_client.documents)
def test_expected_format_is_available(self):
riks_client = RiksdagenClient()
available_formats = riks_client.available_formats()
assert_true('xml' in available_formats)
def test_expected_collection_is_available(self):
riks_client = RiksdagenClient()
available_collections = riks_client.available_collections()
assert_true('ip' in available_collections)
|
{"/RiksdagenDataDownloader/api.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/tests/test_client_xml_parsing.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/RiksdagenDataDownloader/cli.py": ["/RiksdagenDataDownloader/riksdagen_client.py"]}
|
4,132
|
rickardlofberg/RiksdagenDataDownloader
|
refs/heads/master
|
/RiksdagenDataDownloader/cli.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Standard libary imports
import argparse
from .riksdagen_client import RiksdagenClient
from . import api
def main(args=None):
parser = argparse.ArgumentParser(
description="Retrive the data from data.riksdagen.se")
parser.add_argument(
'--available-formats',
default=False,
help='Print out the available data formats',
action='store_true')
parser.add_argument(
'--format',
help='Specify the data format to download')
parser.add_argument(
'--available-collections',
default=False,
help='Print out the available collections',
action='store_true')
parser.add_argument(
'--collection',
help='Specify the collection to download. Default: all of them')
parser.add_argument(
'--dir',
help='Directory to store output to')
args = parser.parse_args()
client = RiksdagenClient()
if args.available_formats:
print("Available formats:")
for available in client.available_formats():
print(available)
if args.available_collections:
print("Available collections:")
for available in client.available_collections():
print(available)
data_format = args.format
collection = args.collection
directory = args.dir
if data_format:
if args.dir:
api.download_and_save(data_format, directory, collection)
else:
for document in api.download_and_yield(data_format, collection):
print(document)
|
{"/RiksdagenDataDownloader/api.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/tests/test_client_xml_parsing.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/RiksdagenDataDownloader/cli.py": ["/RiksdagenDataDownloader/riksdagen_client.py"]}
|
4,133
|
rickardlofberg/RiksdagenDataDownloader
|
refs/heads/master
|
/tests/dataset_metadata.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Standard library imports
import xml.etree.cElementTree as ET
import datetime
import logging
import uuid
# Third party libs
from faker import Faker
from faker.providers import address, date_time
logging.disable(logging.DEBUG)
fake = Faker('en_GB')
fake.add_provider(address)
fake.add_provider(date_time)
def xml_metadata(number_of_datasets=1, set_format=None, set_collection=None):
root = ET.Element("datasetlista")
datatypes = ['xml', 'json', 'zip', 'html', 'sql', 'csv', 'text']
collections = ['anforande', 'bet', 'ds', 'EUN', 'f-lista', 'fpm', 'frsrdg', 'ip', 'kammakt', 'mot', 'Övrigt', 'prop', 'prot', 'Riksdagens diarium', 'rskr', 'samtr', 'Skriftliga frågor', 'sou', 't-lista', 'Utredningar', 'utskottsdokument', 'yttr', 'Ledamotsdata', 'votering']
for _ in range(number_of_datasets):
collection = set_collection or fake.random_element(elements=collections)
start_date = datetime.date(year=1993, month=1, day=1)
date = fake.date_time_between_dates(datetime_start=start_date)
fake_date = '{:%Y-%m-%d %X}'.format(date)
yyyy_slash_yy = '{}/{}'.format(date.year, str(date.year+1)[-2:])
yyyy_yy = '{}{}'.format(date.year, str(date.year+1)[-2:])
collection_date = '{}-{}'.format(collection, yyyy_slash_yy)
data_format = set_format or fake.random_element(elements=datatypes)
file_format = 'zip'
file_name = '{}-{}.{}.{}'.format(collection, yyyy_yy, data_format, file_format)
url = '/dataset/anforande/{}'.format(file_name)
doc = ET.SubElement(root, "dataset")
ET.SubElement(doc, 'namn').text = '{}'.format(collection)
ET.SubElement(doc, 'typ').text = '{}'.format(collection)
ET.SubElement(doc, 'samling').text = '{}'.format(collection_date)
ET.SubElement(doc, 'rm').text = '{}'.format(yyyy_slash_yy)
ET.SubElement(doc, 'filnamn').text = '{}'.format(file_name)
ET.SubElement(doc, 'storlek').text = '{}'.format(fake.random_int(min=1000, max=3000000))
ET.SubElement(doc, 'format').text = '{}'.format(data_format)
ET.SubElement(doc, 'filformat').text = '{}'.format(file_format)
ET.SubElement(doc, 'uppdaterad').text = '{}'.format(fake_date)
ET.SubElement(doc, 'url').text = '{}'.format(url)
ET.SubElement(doc, 'description').text = '{}'.format(fake.text())
ET.SubElement(doc, 'upplysning').text = '{}'.format(fake.text())
return ET.tostring(root,encoding='utf8', method='xml')
|
{"/RiksdagenDataDownloader/api.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/tests/test_client_xml_parsing.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/RiksdagenDataDownloader/cli.py": ["/RiksdagenDataDownloader/riksdagen_client.py"]}
|
4,134
|
rickardlofberg/RiksdagenDataDownloader
|
refs/heads/master
|
/RiksdagenDataDownloader/riksdagen_client.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Standard library imports
import logging
# Third-party imports...
import xmltodict
import requests
class RiksdagenClient:
""" A class which acts as an interface to get URIs for the
available datasets. It does this by downloading and parse
an XML provided by riksdagen. The class will automatically
be initilized with a link to an URL.
Dataset URL last checked: 2018-03-11
If it doesn't work you can initilize the class with another
URI.
"""
def __init__(self, xml_url='http://data.riksdagen.se/dataset/katalog/dataset.xml'):
self.base_url = 'http://data.riksdagen.se'
self.documents = dict()
# Get the metadata and return xmltodict object
xml_dict = self._get_meta_data(xml_url)
self._parse_data(xml_dict)
def __str__(self):
pass
def _get_meta_data(self, xml_url):
""" Helper method to retrive the XML with meta data """
xml_data = requests.get(xml_url)
if xml_data:
xml_dict = xmltodict.parse(xml_data.content, encoding='utf-8')
return xml_dict
else:
logging.critical("Not able to retrive data about the dataset")
def _parse_data(self, xml_dict):
""" Helper method to parse the data into new dictionaries"""
for dataset in xml_dict['datasetlista']['dataset']:
try:
doc_format = dataset['format']
doc_collection = dataset['typ']
doc_url = dataset['url']
self.documents[doc_format] = self.documents.get(doc_format, {})
self.documents[doc_format][doc_collection] = self.documents[doc_format].get(doc_collection, []) + [doc_url]
except Exception:
logging.warning(f"Could not parse dataset {dataset}")
def available_formats(self):
""" Returns a list of all the available data formats """
return list(self.documents.keys())
def available_collections(self):
""" Returns a list of all the available documenttypes """
collections = []
for collection_to_doc in self.documents.values():
collections = [c for c in collection_to_doc.keys() if c not in collections]
return collections
def get_collection_uri(self, data_format, collection=''):
""" Yield all the URIs to all the available datasets of that type
and collection. Default is to yield for all collections"""
try:
collections = self.documents[data_format]
except KeyError as key:
logging.exception(f"{data_format} is an invalid format")
raise key
if collection:
try:
uris = collections[collection]
except KeyError:
logging.exception(f"{collection} is an not a valid collection")
else:
uris = [self.base_url + uri for uris in collections.values() for uri in uris]
for uri in uris:
yield uri
def get_collection_uri_and_collection(self, data_format, collection=''):
""" Yield all the URIs to all the available datasets of that type
and collection. Default is to yield for all collections"""
try:
collections = self.documents[data_format]
except KeyError as key:
logging.exception(f"{data_format} is an invalid format")
raise key
if collection:
try:
uris = collections[collection]
except KeyError:
logging.exception(f"{collection} is an not a valid collection")
else:
uris = [self.base_url + uri for uris in collections.values() for uri in uris]
for uri in uris:
yield uri
|
{"/RiksdagenDataDownloader/api.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/tests/test_client_xml_parsing.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/RiksdagenDataDownloader/cli.py": ["/RiksdagenDataDownloader/riksdagen_client.py"]}
|
4,135
|
rickardlofberg/RiksdagenDataDownloader
|
refs/heads/master
|
/RiksdagenDataDownloader/unzip.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Standard library imports
import io
import os
import zipfile
def yield_zip_content(request_content):
""" Returns the raw data as a string from requests object
which is a zipfile. """
# Read the Bytes into a ZipFile-Object
zipdata = zipfile.ZipFile(io.BytesIO(request_content))
for zipped_file in zipdata.namelist():
yield zipdata.read(zipped_file).decode('utf-8')
def save_zip_content(request_content, directory='', subfolder=''):
""" Saves the content of the zipfile to path. """
# Make sure we have the directory to save to
if not os.path.exists(directory):
raise Exception("Selected folder doesn't exists.")
if subfolder:
directory = os.path.join(directory, subfolder)
if not os.path.exists(directory):
os.makedirs(directory)
# Read the Bytes into a ZipFile-Object
zipdata = zipfile.ZipFile(io.BytesIO(request_content))
for file_name in zipdata.namelist():
zipdata.extract(file_name, path=directory)
|
{"/RiksdagenDataDownloader/api.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/tests/test_client_xml_parsing.py": ["/RiksdagenDataDownloader/riksdagen_client.py"], "/RiksdagenDataDownloader/cli.py": ["/RiksdagenDataDownloader/riksdagen_client.py"]}
|
4,152
|
academiaeh05-2019/d303-exercicio-oo
|
refs/heads/master
|
/classes.py
|
from random import randint
class Cliente:
def __init__(self, nome, cpf):
self.nome = nome
self.cpf = cpf
class Conta:
def __init__(self, cliente):
self.titular = cliente
self.numero = self._gerar()
self._saldo = 0
def extrato(self):
print(f'Numero: {self.numero}\nSaldo: {self._saldo}')
def depositar(self, valor):
self._saldo += valor
def sacar(self, valor):
if(self._saldo < valor):
return False
else:
self._saldo -= valor
return True
def consultar_saldo(self):
return self._saldo
def _gerar(self):
self.random_num = f'{randint(1000, 9999)}-{randint(1, 9)}'
return self.random_num
|
{"/main.py": ["/interface.py"], "/interface.py": ["/classes.py"]}
|
4,153
|
academiaeh05-2019/d303-exercicio-oo
|
refs/heads/master
|
/main.py
|
from interface import CaixaEletronico
caixa_eletronico = CaixaEletronico()
caixa_eletronico.exibir_menu()
caixa_eletronico.exibir_menu()
|
{"/main.py": ["/interface.py"], "/interface.py": ["/classes.py"]}
|
4,154
|
academiaeh05-2019/d303-exercicio-oo
|
refs/heads/master
|
/interface.py
|
from classes import Cliente, Conta
class CaixaEletronico():
def __init__(self):
nome = input('Digite seu nome: ')
cpf = input('Digite seu CPF: ')
cliente = Cliente(nome, cpf)
self._conta = Conta(cliente)
print(f'Olá, {self._conta.titular.nome}, sua conta é {self._conta.numero}')
def exibir_menu(self):
print(f'1- Consultar saldo\n2- Depositar\n3- Sacar')
escolha = input('Escolha uma opção: ')
if escolha == '1':
self.exibir_saldo()
elif escolha == '2':
self.depositar()
elif escolha == '3':
self.sacar()
else:
print('Opção inválida.')
def exibir_saldo(self):
valor = str(self._conta.consultar_saldo())
print(f'Seu saldo é R$ {valor}.')
def depositar(self):
valor = float(input('Digite o valor: '))
self._conta.depositar(valor)
print('Depósito efetuado.')
self.exibir_saldo()
def sacar(self):
valor = float(input('Digite o valor: '))
if self._conta.sacar(valor):
print('Saque efetuado.')
self.exibir_saldo()
else:
print('Saldo insuficiente.')
|
{"/main.py": ["/interface.py"], "/interface.py": ["/classes.py"]}
|
4,155
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/extras/memory_limit_test.py
|
import sysv_ipc
done = False
size = 1024
while not done:
s = "Trying %d (%dk)..." % (size, size / 1024)
print(s)
try:
mem = sysv_ipc.SharedMemory(None, sysv_ipc.IPC_CREX, size=size)
except MemoryError:
done = True
else:
mem.detach()
mem.remove()
size += 1024
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,156
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/demos/message_queues/cleanup.py
|
import sysv_ipc
import utils
params = utils.read_params()
key = params["KEY"]
try:
mq = sysv_ipc.MessageQueue(key)
except sysv_ipc.ExistentialError:
print('''Message queue with key "{}" doesn't exist.'''.format(key))
else:
mq.remove()
print('Message queue with key "{}" removed'.format(key))
print("\nAll clean!")
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,157
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/tests/base.py
|
# Python imports
import unittest
import random
import time
# Project imports
import sysv_ipc
def make_key():
"""Generate a random key suitable for an IPC object."""
return random.randint(sysv_ipc.KEY_MIN, sysv_ipc.KEY_MAX)
def sleep_past_granularity():
"""A utility method that encapsulates a type-specific detail of testing.
I test all of the time-related variables in the IPC structs (o_time, shm_atime, shm_dtime,
shm_ctime, msg_ctime, msg_stime, and msg_rtime) to ensure they change when they're supposed
to (e.g. when a segment is detached, for shm_dtime). For variables that are initialized to 0
(like o_time), it's easy to verify that they're 0 to start with and then non-zero after the
change.
Other variables (like shm_ctime) are trickier to test because they're already non-zero
immediately after the object is created. My test has to save the value, do something that
should change it, and then compare the saved value to the current one via assertNotEqual().
Some (most? all?) systems define those time-related values as integral values (int or long),
so their granularity is only 1 second. If I don't force at least 1 second to elapse between
the statement where I save the value and the statement that should change it, they'll almost
always happen in the same second and the assertNotEqual() even though all code (mine and the
system) has behaved correctly.
This method sleeps for 1.1 seconds to avoid the problem described above.
"""
time.sleep(1.1)
class Base(unittest.TestCase):
"""Base class for test cases."""
def assertWriteToReadOnlyPropertyFails(self, target_object, property_name,
value):
"""test that writing to a readonly property raises an exception"""
# The attributes tested with this code are implemented differently in C.
# For instance, Semaphore.value is a 'getseters' with a NULL setter,
# whereas Semaphore.name is a reference into the Semaphore member
# definition.
# Under Python 2.6, writing to sem.value raises AttributeError whereas
# writing to sem.name raises TypeError. Under Python 3, both raise
# AttributeError (but with different error messages!).
# This illustrates that Python is a little unpredictable in this
# matter. Rather than testing each of the numerous combinations of
# of Python versions and attribute implementation, I just accept
# both TypeError and AttributeError here.
# ref: http://bugs.python.org/issue1687163
# ref: http://bugs.python.org/msg127173
with self.assertRaises((TypeError, AttributeError)):
setattr(target_object, property_name, value)
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,158
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/demos/buffer_protocol/demo.py
|
import sysv_ipc
# Create a shared memory segment and write the (English) alphabet to the shared memory.
mem = sysv_ipc.SharedMemory(None, sysv_ipc.IPC_CREX, size=sysv_ipc.PAGE_SIZE)
ASCII_A = 0x61
alphabet = ''.join([chr(ASCII_A + i) for i in range(26)])
alphabet = bytes(alphabet, 'ASCII')
mem.write(alphabet)
# Create a bytearray from the SharedMemory.
ba = bytearray(mem)
# bytearray instances have "most of the usual methods of mutable sequences", such as replace.
# https://docs.python.org/3/library/functions.html#func-bytearray
ba = ba.replace(b'c', b'x')
assert(ba[:4] == b'abxd')
# Unlike a memoryview (see below), changes to the bytearray do *not* affect the underlying
# SharedMemory -- the bytearray is a copy.
assert(mem.read(4) == b'abcd')
# Reset the memory to contain the alphabet unmodified.
mem.write(alphabet)
# Create a memoryview from the SharedMemory.
mv = memoryview(mem)
# This memoryview has format = 'B', itemsize = 1, shape = (sysv_ipc.PAGE_SIZE, ), ndim = 1,
# strides = (1, ), and is read/write.
# This shows that you can take slices of a memoryview
assert([chr(c) for c in mv[3:6]] == ['d', 'e', 'f'])
# This shows that you can write to the memoryview.
mv[4] = ord('x')
assert([chr(c) for c in mv[3:6]] == ['d', 'x', 'f'])
# Changes to the underlying segment are reflected in the memoryview
mem.write(b'xxx')
assert([chr(c) for c in mv[:6]] == ['x', 'x', 'x', 'd', 'x', 'f'])
mem.detach()
mem.remove()
print('Done!')
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,159
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/tests/test_module.py
|
# Python imports
import unittest
import os
import resource
import warnings
import numbers
import tempfile
# Project imports
import sysv_ipc
from .base import Base
ONE_MILLION = 1000000
class TestModuleConstants(Base):
"""Check that the sysv_ipc module-level constants are defined as expected"""
def test_constant_values(self):
"""test that constants are what I expect"""
self.assertEqual(sysv_ipc.IPC_CREX, sysv_ipc.IPC_CREAT | sysv_ipc.IPC_EXCL)
self.assertEqual(sysv_ipc.PAGE_SIZE, resource.getpagesize())
self.assertIn(sysv_ipc.SEMAPHORE_TIMEOUT_SUPPORTED, (True, False))
self.assertIsInstance(sysv_ipc.SEMAPHORE_VALUE_MAX, numbers.Integral)
self.assertGreaterEqual(sysv_ipc.SEMAPHORE_VALUE_MAX, 1)
self.assertIsInstance(sysv_ipc.VERSION, str)
self.assertIsInstance(sysv_ipc.IPC_PRIVATE, numbers.Integral)
self.assertIsInstance(sysv_ipc.KEY_MIN, numbers.Integral)
self.assertIsInstance(sysv_ipc.KEY_MAX, numbers.Integral)
self.assertGreater(sysv_ipc.KEY_MAX, sysv_ipc.KEY_MIN)
self.assertIsInstance(sysv_ipc.SHM_RDONLY, numbers.Integral)
self.assertIsInstance(sysv_ipc.SHM_RND, numbers.Integral)
# These constants are only available under Linux as of this writing (Jan 2018).
for attr_name in ('SHM_HUGETLB', 'SHM_NORESERVE', 'SHM_REMAP'):
if hasattr(sysv_ipc, attr_name):
self.assertIsInstance(getattr(sysv_ipc, attr_name), numbers.Integral)
self.assertIsInstance(sysv_ipc.__version__, str)
self.assertEqual(sysv_ipc.VERSION, sysv_ipc.__version__)
self.assertIsInstance(sysv_ipc.__author__, str)
self.assertIsInstance(sysv_ipc.__license__, str)
self.assertIsInstance(sysv_ipc.__copyright__, str)
class TestModuleErrors(Base):
"""Exercise the exceptions defined by the module"""
def test_errors(self):
self.assertTrue(issubclass(sysv_ipc.Error, Exception))
self.assertTrue(issubclass(sysv_ipc.InternalError, sysv_ipc.Error))
self.assertTrue(issubclass(sysv_ipc.PermissionsError, sysv_ipc.Error))
self.assertTrue(issubclass(sysv_ipc.ExistentialError, sysv_ipc.Error))
self.assertTrue(issubclass(sysv_ipc.BusyError, sysv_ipc.Error))
self.assertTrue(issubclass(sysv_ipc.NotAttachedError, sysv_ipc.Error))
class TestModuleFunctions(Base):
"""Exercise the sysv_ipc module-level functions"""
def test_attach(self):
"""Exercise attach()"""
# Create memory, write something to it, then detach
mem = sysv_ipc.SharedMemory(None, sysv_ipc.IPC_CREX)
mem.write('hello world')
mem.detach()
self.assertFalse(mem.attached)
self.assertEqual(mem.number_attached, 0)
# Reattach memory via a different SharedMemory instance
mem2 = sysv_ipc.attach(mem.id)
self.assertFalse(mem.attached)
self.assertTrue(mem2.attached)
self.assertEqual(mem.number_attached, 1)
self.assertEqual(mem2.number_attached, 1)
self.assertEqual(mem2.read(len('hello world')), b'hello world')
mem2.detach()
mem.remove()
self.assertRaises(sysv_ipc.ExistentialError, sysv_ipc.SharedMemory, mem.key)
def test_attach_kwargs(self):
"""Ensure attach takes kwargs as advertised"""
mem = sysv_ipc.SharedMemory(None, sysv_ipc.IPC_CREX)
mem.write('hello world')
mem.detach()
mem2 = sysv_ipc.attach(mem.id, flags=0)
mem2.detach()
mem.remove()
def test_ftok(self):
"""Exercise ftok()'s behavior of raising a warning as documented"""
# Test default value of silence_warning
with warnings.catch_warnings(record=True) as recorded_warnings:
warnings.simplefilter("always")
sysv_ipc.ftok('.', 42)
self.assertEqual(len(recorded_warnings), 1)
self.assertTrue(issubclass(recorded_warnings[-1].category, Warning))
# Test explicit False value of silence_warning
with warnings.catch_warnings(record=True) as recorded_warnings:
warnings.simplefilter("always")
sysv_ipc.ftok('.', 42, silence_warning=False)
self.assertEqual(len(recorded_warnings), 1)
self.assertTrue(issubclass(recorded_warnings[-1].category, Warning))
# Test explicit True value of silence_warning
with warnings.catch_warnings(record=True) as recorded_warnings:
warnings.simplefilter("always")
sysv_ipc.ftok('.', 42, silence_warning=True)
self.assertEqual(len(recorded_warnings), 0)
def test_ftok_kwargs(self):
"""Ensure ftok() takes kwargs as advertised"""
sysv_ipc.ftok('.', 42, silence_warning=True)
def test_ftok_return_value(self):
"""Ensure ftok() returns an int"""
self.assertIsInstance(sysv_ipc.ftok('.', 42, silence_warning=True), numbers.Integral)
def test_ftok_raises_os_error(self):
"""Ensure ftok() failure raises an exception"""
with tempfile.TemporaryDirectory() as tmp_dir_name:
# Create a path that should cause ftok() to fail.
does_not_exist_path = os.path.join(tmp_dir_name, "does_not_exist")
with self.assertRaises(OSError):
sysv_ipc.ftok(does_not_exist_path, 42, silence_warning=True)
def test_remove_semaphore(self):
"""Exercise remove_semaphore()"""
sem = sysv_ipc.Semaphore(None, sysv_ipc.IPC_CREX)
sysv_ipc.remove_semaphore(sem.id)
with self.assertRaises(sysv_ipc.ExistentialError):
sysv_ipc.Semaphore(sem.key)
def test_remove_shared_memory(self):
"""Exercise remove_shared_memory()"""
mem = sysv_ipc.SharedMemory(None, sysv_ipc.IPC_CREX)
sysv_ipc.remove_shared_memory(mem.id)
with self.assertRaises(sysv_ipc.ExistentialError):
sysv_ipc.SharedMemory(mem.key)
def test_remove_message_queue(self):
"""Exercise remove_message_queue()"""
mq = sysv_ipc.MessageQueue(None, sysv_ipc.IPC_CREX)
sysv_ipc.remove_message_queue(mq.id)
with self.assertRaises(sysv_ipc.ExistentialError):
sysv_ipc.MessageQueue(mq.key)
if __name__ == '__main__':
unittest.main()
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,160
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/demos/sem_and_shm/cleanup.py
|
# 3rd party modules
import sysv_ipc
# Modules for this project
import utils
params = utils.read_params()
key = params["KEY"]
try:
semaphore = sysv_ipc.Semaphore(key)
except sysv_ipc.ExistentialError:
print('''The semaphore with key "{}" doesn't exist.'''.format(key))
else:
semaphore.remove()
print('Removed the semaphore with key "{}".'.format(key))
try:
memory = sysv_ipc.SharedMemory(key)
except sysv_ipc.ExistentialError:
print('''The shared memory with key "{}" doesn't exist.'''.format(key))
else:
memory.remove()
print('Removed the shared memory with key "{}".'.format(key))
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,161
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/post_dist.py
|
#!/usr/bin/env python
# Python imports
import time
import hashlib
import shutil
import os
RSS_TIMESTAMP_FORMAT = "%a, %d %b %Y %H:%M:%S GMT"
with open("VERSION") as f:
VERSION = f.read().strip()
# Make a copy of the tarball for posterity
tarball_name = "sysv_ipc-%s.tar.gz" % VERSION
shutil.copyfile(os.path.join("dist", tarball_name),
os.path.join("releases", tarball_name))
tarball_name = "releases/sysv_ipc-%s.tar.gz" % VERSION
md5_name = "releases/sysv_ipc-%s.md5.txt" % VERSION
sha1_name = "releases/sysv_ipc-%s.sha1.txt" % VERSION
# Generate hashes of the tarball
tarball_content = open(tarball_name, 'rb').read()
for hash_function_name in ('md5', 'sha1', 'sha256'):
hash_function = getattr(hashlib, hash_function_name)
hash_value = hash_function(tarball_content).hexdigest()
hash_filename = "releases/sysv_ipc-{}.{}.txt".format(VERSION, hash_function_name)
open(hash_filename, "wb").write(hash_value.encode('ascii'))
print(hash_function_name + " = " + hash_value)
# Print an RSS item suitable for pasting into rss.xml
timestamp = time.strftime(RSS_TIMESTAMP_FORMAT, time.gmtime())
print("""
<item>
<guid isPermaLink="false">%s</guid>
<title>sysv_ipc %s Released</title>
<pubDate>%s</pubDate>
<link>http://semanchuk.com/philip/sysv_ipc/</link>
<description>Version %s of sysv_ipc has been released.
</description>
</item>
""" % (VERSION, VERSION, timestamp, VERSION))
print("Don't forget this:\ngit tag rel" + VERSION)
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,162
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/demos/sem_and_shm/utils.py
|
import time
import sys
NULL_CHAR = '\0'
def say(s):
who = sys.argv[0]
if who.endswith(".py"):
who = who[:-3]
s = "%s@%1.6f: %s" % (who, time.time(), s)
print(s)
def write_to_memory(memory, s):
say("writing %s " % s)
s += NULL_CHAR
s = s.encode()
memory.write(s)
def read_from_memory(memory):
s = memory.read()
s = s.decode()
i = s.find(NULL_CHAR)
if i != -1:
s = s[:i]
say("read %s" % s)
return s
def read_params():
params = {}
with open("params.txt", "r") as f:
for line in f:
line = line.strip()
if line:
if line.startswith('#'):
# comment in input; ignore
pass
else:
name, value = line.split('=')
name = name.upper().strip()
if name == "PERMISSIONS":
value = int(value, 8)
else:
value = int(value)
params[name] = value
return params
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,163
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/extras/explore_max_semaphore_value.py
|
import sysv_ipc
'''This is a simple test to see how many times a semaphore can be released.'''
sem = sysv_ipc.Semaphore(None, sysv_ipc.IPC_CREX)
print('Semaphore key is {}'.format(sem.key))
for i in range(1, 100000):
sem.release()
print('{:05}: value is {}'.format(i, sem.value))
sem.remove()
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,164
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/demos/sem_and_shm/conclusion.py
|
# Python modules
import hashlib
# 3rd party modules
import sysv_ipc
# Utils for this demo
import utils
utils.say("Oooo 'ello, I'm Mrs. Conclusion!")
params = utils.read_params()
semaphore = sysv_ipc.Semaphore(params["KEY"])
memory = sysv_ipc.SharedMemory(params["KEY"])
utils.say("memory attached at %d" % memory.address)
what_i_wrote = ""
s = ""
for i in range(0, params["ITERATIONS"]):
utils.say("i = %d" % i)
if not params["LIVE_DANGEROUSLY"]:
# Wait for Mrs. Premise to free up the semaphore.
utils.say("acquiring the semaphore...")
semaphore.acquire()
s = utils.read_from_memory(memory)
while s == what_i_wrote:
if not params["LIVE_DANGEROUSLY"]:
# Release the semaphore...
utils.say("releasing the semaphore")
semaphore.release()
# ...and wait for it to become available again.
utils.say("acquiring for the semaphore...")
semaphore.acquire()
s = utils.read_from_memory(memory)
if what_i_wrote:
what_i_wrote = what_i_wrote.encode()
try:
assert(s == hashlib.md5(what_i_wrote).hexdigest())
except AssertionError:
raise AssertionError("Shared memory corruption after %d iterations." % i)
s = s.encode()
what_i_wrote = hashlib.md5(s).hexdigest()
utils.write_to_memory(memory, what_i_wrote)
if not params["LIVE_DANGEROUSLY"]:
utils.say("releasing the semaphore")
semaphore.release()
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,165
|
osvenskan/sysv_ipc
|
refs/heads/develop
|
/prober.py
|
import os.path
import os
import subprocess
import distutils.sysconfig
# Set these to None for debugging or subprocess.PIPE to silence compiler
# warnings and errors.
STDOUT = subprocess.PIPE
STDERR = subprocess.PIPE
# STDOUT = None
# STDERR = None
# This is the max length that I want a printed line to be.
MAX_LINE_LENGTH = 78
PYTHON_INCLUDE_DIR = os.path.dirname(distutils.sysconfig.get_config_h_filename())
# print(PYTHON_INCLUDE_DIR)
def line_wrap_paragraph(s):
# Format s with terminal-friendly line wraps.
done = False
beginning = 0
end = MAX_LINE_LENGTH - 1
lines = []
while not done:
if end >= len(s):
done = True
lines.append(s[beginning:])
else:
last_space = s[beginning:end].rfind(' ')
lines.append(s[beginning:beginning + last_space])
beginning += (last_space + 1)
end = beginning + MAX_LINE_LENGTH - 1
return lines
def print_bad_news(value_name, default):
s = "Setup can't determine %s on your system, so it will default to %s which may not " + \
"be correct."
s = s % (value_name, default)
plea = "Please report this message and your operating system info to the package " + \
"maintainer listed in the README file."
lines = line_wrap_paragraph(s) + [''] + line_wrap_paragraph(plea)
border = '*' * MAX_LINE_LENGTH
s = border + "\n* " + ('\n* '.join(lines)) + '\n' + border
print(s)
def does_build_succeed(filename):
# Utility function that returns True if the file compiles and links
# successfully, False otherwise.
cmd = "cc -Wall -I%s -o ./prober/foo ./prober/%s" % \
(PYTHON_INCLUDE_DIR, filename)
p = subprocess.Popen(cmd, shell=True, stdout=STDOUT, stderr=STDERR)
# p.wait() returns the process' return code, so 0 implies that
# the compile & link succeeded.
return not bool(p.wait())
def compile_and_run(filename, linker_options=""):
# Utility function that returns the stdout output from running the
# compiled source file; None if the compile fails.
cmd = "cc -Wall -I%s -o ./prober/foo %s ./prober/%s" % \
(PYTHON_INCLUDE_DIR, linker_options, filename)
p = subprocess.Popen(cmd, shell=True, stdout=STDOUT, stderr=STDERR)
if p.wait():
# uh-oh, compile failed
return None
else:
s = subprocess.Popen(["./prober/foo"],
stdout=subprocess.PIPE).communicate()[0]
return s.strip().decode()
def sniff_semtimedop():
return does_build_succeed("semtimedop_test.c")
def sniff_union_semun_defined():
# AFAICT the semun union is supposed to be declared in one's code.
# However, a lot of legacy code gets this wrong and some header files
# define it, e.g.sys/sem.h on OS X where it's #ifdef-ed so that legacy
# code won't break. On some systems, it appears and disappears based
# on the #define value of _XOPEN_SOURCE.
return does_build_succeed("sniff_union_semun_defined.c")
def probe_semvmx():
# At present, this is hardcoded and that seems fine on all systems I've tested.
# https://github.com/osvenskan/sysv_ipc/issues/3
semvmx = 32767
return semvmx
def probe_page_size():
DEFAULT_PAGE_SIZE = 4096
page_size = compile_and_run("probe_page_size.c")
if page_size is None:
page_size = DEFAULT_PAGE_SIZE
print_bad_news("the value of PAGE_SIZE", page_size)
return page_size
def probe():
d = {"KEY_MAX": "LONG_MAX",
"KEY_MIN": "LONG_MIN"
}
# conditionals contains preprocessor #defines to be written to probe_results.h that might
# already be defined on some platforms. Any symbol in this list will be surrounded with
# preprocessor directives #ifndef/#endif in probe_results.h.
# If a symbol is in this list but isn't written to probe_results.h, no harm done.
conditionals = ["_SEM_SEMUN_UNDEFINED",
# PAGE_SIZE is already #defined elsewhere on FreeBSD.
"PAGE_SIZE",
]
with open("VERSION") as f:
version = f.read().strip()
d["SYSV_IPC_VERSION"] = f'"{version}"'
d["PAGE_SIZE"] = probe_page_size()
if sniff_semtimedop():
d["SEMTIMEDOP_EXISTS"] = ""
d["SEMAPHORE_VALUE_MAX"] = probe_semvmx()
# Some (all?) Linux platforms #define _SEM_SEMUN_UNDEFINED if it's up
# to my code to declare this union, so I use that flag as my standard.
if not sniff_union_semun_defined():
d["_SEM_SEMUN_UNDEFINED"] = ""
msg = """/*
This header file was generated when you ran setup. Once created, the setup
process won't overwrite it, so you can adjust the values by hand and
recompile if you need to.
To enable lots of debug output, add this line and re-run setup.py:
#define SYSV_IPC_DEBUG
To recreate this file, just delete it and re-run setup.py.
KEY_MIN, KEY_MAX and SEMAPHORE_VALUE_MAX are stored internally in longs, so
you should never #define them to anything larger than LONG_MAX regardless of
what your operating system is capable of.
*/
"""
filename = "probe_results.h"
if not os.path.exists(filename):
lines = []
for key in d:
if key in conditionals:
lines.append("#ifndef %s" % key)
lines.append("#define %s\t\t%s" % (key, d[key]))
if key in conditionals:
lines.append("#endif")
# A trailing '\n' keeps compilers happy...
with open(filename, "w") as f:
f.write(msg + '\n'.join(lines) + '\n')
return d
if __name__ == "__main__":
s = probe()
print(s)
|
{"/tests/test_module.py": ["/tests/base.py"]}
|
4,166
|
jonasserry/GDFT-Net
|
refs/heads/master
|
/Core/GDFT_Tester.py
|
from Core import GDFT_Data
from Core import GDFT_Net
import numpy as np
import matplotlib.pyplot as plt
import pickle
from collections import defaultdict
print("Tester Version: 1.02")
def load_tester(path):
with open(path, 'rb') as input:
tester = pickle.load(input)
return(tester)
class GDFT_Net_Tester():
def __init__(self,Tester_Path,Net_Path,dimensions):
self.Path=Tester_Path
self.Net_Path = Net_Path
self.Net=None
self.version = 1.1
self.dimensions=dimensions
self.errors = defaultdict(list)
self.standard_dev_delays = None
def load_Net(self):
self.Net = GDFT_Net.load_GDFT_Net(self.Net_Path)
self.Net.load_models()
def run_RMSE_Testing(self,numImages=None,SNRs=None,DS=None):
corr = []
i=0
if DS != None:
SNRs = DS.SNRs
for SNR in SNRs:
if DS == None:
raw_images,_,labels_1D = GDFT_Data.Create_Images(numImages, self.Net.numSteps, self.Net.dimensions, self.Net.t0, self.Net.wavenumberRange, self.Net.numChan, self.Net.numCoherent, self.Net.numIncoherent, SNR,numSteps_simulated=1024*1024,print_flag=False)
else:
raw_images,_,labels_1D = DS.get_Data(with_SNR=SNR)
prediction = self.Net.process_Images(raw_images,verbose=0)[1]*self.Net.numChan*2-self.Net.numChan
errors = prediction-labels_1D
rmse = np.sqrt(np.mean(((errors)**2),axis=1))
self.errors[round(SNR,2)].extend(errors)
print("SNR: {0:3.2f} RMSE: {1:3.2f} STD: {2:3.2f}".format(SNR,np.mean(rmse),np.std(rmse)))
corr.append(np.sqrt(np.mean(((labels_1D)**2))))
i+=1
self.standard_dev_delays = np.mean(corr) #alter this?
def get_RMSE_Data(self):
means = []
SNRs = []
stds = []
for SNR in sorted(self.errors.keys()):
SNRs.append(SNR)
rmses = np.sqrt(np.mean((np.array(self.errors[SNR])**2),axis=1))
means.append(np.mean(rmses))
stds.append(np.std(rmses))
return(np.array(SNRs),np.array(means),np.array(stds))
def get_error_at_index(self,i):
means = []
SNRs = []
stds = []
for SNR in sorted(self.errors.keys()):
SNRs.append(SNR)
err = np.abs(np.array(self.errors[SNR])[:,i])
means.append(np.mean(err))
stds.append(np.std(err))
return(np.array(SNRs),np.array(means),np.array(stds))
def get_error_at_index(self,i):
means = []
SNRs = []
stds = []
for SNR in sorted(self.errors.keys()):
SNRs.append(SNR)
err = np.abs(np.array(self.errors[SNR])[:,i])
means.append(np.mean(err))
stds.append(np.std(err))
return(np.array(SNRs),np.array(means),np.array(stds))
def get_error_variation_at_SNR(self,SNR):
means = []
inds =[]
stds = []
for i in range(self.dimensions[0]):
inds.append(i)
err = np.abs(np.array(self.errors[SNR])[:,i])
means.append(np.mean(err))
stds.append(np.std(err))
return(np.array(inds),np.array(means),np.array(stds))
def get_max_error(self):
means = []
SNRs = []
stds = []
for SNR in sorted(self.errors.keys()):
SNRs.append(SNR)
rmses = np.max((np.abs(self.errors[SNR])),axis=1)
means.append(np.mean(rmses))
stds.append(np.std(rmses))
return(np.array(SNRs),np.array(means),np.array(stds))
def plot_this_data(self,SNRs,means,stds,fig_size=(8,8),corr=1,xlabel="SNR",ylabel="RMSE",label=None,title=None,fontsize=12):
plt.figure(figsize=fig_size)
plt.errorbar(SNRs,means/corr,yerr=stds/corr,capsize=3,elinewidth=0.5,c ="black", ecolor="Black",label=label)
plt.title(title,fontsize=fontsize*1.5)
plt.xlabel(xlabel,fontsize=fontsize)
plt.ylabel(ylabel,fontsize=fontsize)
def save_data_to_file(self,path):
np.save(path, np.array(dict(self.errors)),allow_pickle=True)
def load_data_from_file(self,path):
P = np.load(path,allow_pickle=True)
self.errors.update(P.item())
def save(self,path=None):
if not path:
path = self.Path
self.Net = None
with open(path, 'wb') as output:
pickle.dump(self, output, pickle.HIGHEST_PROTOCOL)
print("Reload Net")
|
{"/Core/GDFT_Tester.py": ["/Core/__init__.py"], "/Core/GDFT_Data.py": ["/Core/__init__.py"], "/Core/GDFT_Net.py": ["/Core/__init__.py"]}
|
4,167
|
jonasserry/GDFT-Net
|
refs/heads/master
|
/Core/GDFT_Sim.py
|
import scipy.signal
import numpy as np
from numpy.random import normal
def temprl(nsamp,t0,index=-4.0/3.0):
"""Generate a time sequence of samples of atmospheric temporal
perturbations with a Kolmogorov-Tatarski structure function."""
temp=nsamp/float(t0)
const=np.sqrt(0.011193/temp/2./2.)/temp**index*nsamp
amplitude=np.arange(nsamp/2+1,dtype=np.float64)
amplitude[1:]=const*(amplitude[1:]**index)
noise=normal(size=(2,int(nsamp/2+1)))
return np.fft.irfft(amplitude*(noise[0]+1j*noise[1]))
def RcFilter(samples,tau):
e=np.exp(-1.0/tau)
return scipy.signal.lfilter([1-e],[1,-e],samples,axis=0)
def chop(samples,blockSize):
"""Chop first dimension of array into a 2-d array of blocks of length blockSize. The
original dimension does not have to be a multiple of blockSize - the remainder
is discarded. Will return an error for arrays which cannot be reshaped in this
way without copying"""
maxSamp=samples.shape[0]
numBlock=maxSamp//blockSize
numSamp=numBlock*blockSize
self=samples[:numSamp].view()
self.shape=(numBlock,blockSize)+samples.shape[1:]
return self
def BlockAverage(samples,blockSize):
return np.sum(chop(samples,blockSize),axis=1)/float(blockSize)
def DispersedFringes(delay,wavenumberRange,numChan):
wavenumber=np.linspace(wavenumberRange[0],wavenumberRange[1],numChan)
fringePhase=np.multiply.outer(delay,wavenumber)
v=np.exp(1j*fringePhase)
return v
def PowerSpectrum1d(v,oversample=2):
window = np.hamming(v.shape[-1])
return np.fft.fftshift(abs(np.fft.fft(v*window,axis=-1,n=v.shape[-1]*oversample))**2,axes=(-1,))
def ComplexNoise(shape,sigma=1.0):
r=np.random.normal(size=shape+(2,),scale=sigma/np.sqrt(2))
return r[...,0]+1j*r[...,1]
def GroupDelaySimulation(phase,wavenumberRange,numChan,numCoherent,numIncoherent,SNR):
coherentVisibilities=BlockAverage(DispersedFringes(phase,wavenumberRange,numChan),numCoherent)
coherentVisibilities+=ComplexNoise(coherentVisibilities.shape,sigma=np.sqrt(numChan)/SNR)
delaySpectrum=RcFilter(PowerSpectrum1d(coherentVisibilities),numIncoherent)
return delaySpectrum
def modifiedGDT(phase,wavenumberRange,numChan,numCoherent,numIncoherent,SNR):
"""Returns GDT output with given phase behaviour with and without applied random Noise"""
coherentVisibilities=BlockAverage(DispersedFringes(phase,wavenumberRange,numChan),numCoherent)
coherentVisibilities_withNoise=ComplexNoise(coherentVisibilities.shape,sigma=np.sqrt(numChan)/SNR) + coherentVisibilities
withNoise=RcFilter(PowerSpectrum1d(coherentVisibilities_withNoise),numIncoherent)
withoutNoise=RcFilter(PowerSpectrum1d(coherentVisibilities),numIncoherent)
return np.transpose(withNoise), np.transpose(withoutNoise)
def von_karman_temporal_samples(nsamp, t0, T0=1e6, two_telescopes=False):
"""
Return temporal samples of phase perturbations corresponding to Von Karman turbulence
Parameters
----------
nsamp : int
Number of time samples to generate - should be much larger than T0
t0 : float
Coherence time measured in samples t_0=0.314 r_0/V where V is effective windspeed.
T0 : float
Temporal outer scale T_0=L_0/V.
two_telescopes : boolean
Simulate phase sequences corresponding to the phase difference between two
uncorrelated telescopes i.e. twice the variance. If false, simulate the
perturbations above a single telescope.
Returns:
--------
samples : numpy.ndarray[float]
Samples of the phase perturbations at intervals of 1/t0
Notes:
------
A suitable setting for t_0 might be of order 10 samples.
For r_0=31.4cm (a moderate H-band value) and V=10m/s, then t_0=10ms.
If L_0=100m then T0=10s, i.e. 1000t_0, or T0=10^4 samples in this example.
"""
f = np.fft.rfftfreq(nsamp)
# Spectrum scale factor: divide by a factor of 2 to account for noise having a variance of 2
# Divide by a second factor of two to account for a single-sided spectrum
# Multiply by a factor of 2 if we want to represent the differential between
# two telescopes.
# Multiply by delta-f(=f[1]) to account for power in the range f->f+delta-f
scale = 0.011193 / (2.0 if two_telescopes else 4.0) * f[1]
spectrum = scale * t0 ** (-5.0 / 3.0) * (f ** 2 + 1 / T0 ** 2) ** (-4.0 / 3.0)
noise = normal(size=(2, len(f)))
# Multiply by nsamp to counteract the normalisation of the inverse fft
return nsamp * np.fft.irfft(np.sqrt(spectrum) * (noise[0] + 1j * noise[1]))
|
{"/Core/GDFT_Tester.py": ["/Core/__init__.py"], "/Core/GDFT_Data.py": ["/Core/__init__.py"], "/Core/GDFT_Net.py": ["/Core/__init__.py"]}
|
4,168
|
jonasserry/GDFT-Net
|
refs/heads/master
|
/Core/GDFT_Data.py
|
import time
import IPython
import gc
import cv2
import pickle
import numpy as np
import matplotlib.pyplot as plt
import scipy.signal as sig
from Core import GDFT_Sim as Sim
print("Data Version: 1.61")
###---------------- Image Creation --------------------
def DownSample(image,dimensions):
"""
Takes image and downsamples and resizes to given dimensions using openCV
Returns image with dimensions (dimensions[0],dimensions[1],1)
"""
x = cv2.resize(image,dimensions,interpolation = cv2.INTER_AREA) #Interpolation type?
x = cv2.normalize(x, None, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX)
return np.reshape(x,list(x.shape) + [1])
def Create_Image_From_Delays(delays,wavenumberRange,numChan,numCoherent,numIncoherent,SNR,dimensions,numSkip,numSteps=None,t0=None):
""" Returns raw GDFT image, 2D Mask, and 1D Label from given set of delays using specified params"""
raw_image,raw_label = Sim.modifiedGDT(delays,wavenumberRange,numChan,numCoherent,numIncoherent,SNR)
raw_image = DownSample(raw_image[:,numSkip:],dimensions)
raw_label = DownSample(raw_label[:,numSkip:],dimensions)
decimated = Decimate_Delays(delays,dimensions[0])
return raw_image,raw_label,decimated
def Create_Image(numSteps = 1024*128, dimensions =(256,256), t0 = 10, wavenumberRange=(0.8,1.2), numChan = 100, numCoherent=16, numIncoherent=25, SNR=1,numBatches=1,numSkip=20):
"""Returns raw GDFT image, 2D Mask, and 1D Label created using provided parameters"""
delays = Sim.von_karman_temporal_samples(1024*1024,t0,T0=1e4, two_telescopes=True)[0:numSteps]
return(Create_Image_From_Delays(delays,wavenumberRange,numChan,numCoherent,numIncoherent,SNR,dimensions,numSkip))
def Create_Images(NumImages, numSteps = 1024*128, dimensions =(256,256), t0 = 10, wavenumberRange=(0.8,1.2), numChan = 100, numCoherent=16, numIncoherent=25, SNR=1,numBatches=1,numSkip=20,numSteps_simulated=1024*1024,print_flag=True):
"""Returns specified number of raw GDFT image, 2D Mask, and 1D Label created using provided parameters"""
Images = np.empty((NumImages,dimensions[1],dimensions[0],1))
Labels_2D = np.empty((NumImages,dimensions[1],dimensions[0],1))
Labels_1D = np.empty((NumImages,dimensions[0]))
start_time = time.time()
Images_per_simulated_delays = int(numSteps_simulated/numSteps)
delays = Sim.von_karman_temporal_samples(numSteps_simulated,t0,T0=1e4, two_telescopes=True)
image_index = 0
for i in range(NumImages):
if image_index == Images_per_simulated_delays:
delays = Sim.von_karman_temporal_samples(numSteps_simulated,t0,T0=1e4, two_telescopes=True)
image_index = 0
image,label_2D,label_1D = Create_Image_From_Delays(delays[numSteps*image_index:numSteps*image_index+numSteps],wavenumberRange,numChan,numCoherent,numIncoherent,SNR,dimensions,numSkip)
Labels_2D[i] = label_2D
Labels_1D[i] = label_1D
Images[i] = image
image_index+=1
if i%10 ==0 and print_flag:
t = (time.time()-start_time) / (i+1) * (NumImages-i)
print("\rBatches remaining: %i | Images Remaining in Batch: %s | Time left in Batch: %s" %(numBatches, NumImages-i, time.strftime("%H:%M:%S", time.gmtime(t))),end='\r')
if print_flag:
total_t = time.time()-start_time
print("\rFinished Batch | Time taken: %s | Total Time Left: %s" % (time.strftime("%H:%M:%S", time.gmtime(total_t)),time.strftime("%H:%M:%S", time.gmtime(total_t*(numBatches-1)))))
return (Images,Labels_2D,Labels_1D)
def ConvertForNextNetwork(train_labels):
"""Convert 2D Labels into 1D Labels simply using argmax. NOTE: this is now deprecated"""
CorrectFeature = np.empty((train_labels.shape[0],train_labels.shape[2]))
for i in range(train_labels.shape[0]):
CorrectFeature[i] = Convert_to_1D_Label(train_labels[i])
return (CorrectFeature)
def Convert_to_1D_Label(label):
"""Convert 2D Label into 1D Label simply using argmax. NOTE: this is now deprecated"""
return(np.reshape(np.argmax(label,0),-1)/label.shape[0])
def Decimate_Delays(delays,x_dim):
"""Returns decimated (and filtered) delays with dimension given by x_dim"""
decimated = sig.decimate(delays,int(len(delays)/x_dim),axis=0,ftype = "fir")
assert(len(decimated)==x_dim), "Decimated length: {0} | Desired dimension {1}".format(len(decimated),x_dim)
return(decimated/2/np.pi)
###---------------- Data Set Creation --------------------
def create_Data_Set(id,NumImages,SNRs,t0=16, numSteps = 1024*128, dimensions =(256,256), wavenumberRange=(1.5,2.0), numChan = 100, numCoherent=16, numIncoherent=25,numSkip=0,**kwargs):
"""Returns variable SNR GDFT Data Set with provided SNR distribution and GDFT parameters"""
assert(len(NumImages)==len(SNRs))
Images = np.empty((np.sum(NumImages),dimensions[1],dimensions[0],1))
Labels_2D = np.empty((np.sum(NumImages),dimensions[1],dimensions[0],1))
Labels_1D = np.empty((np.sum(NumImages),dimensions[0]))
n=0
i=0
while i<len(NumImages):
images,labels_2D,labels_1D = Create_Images(NumImages[i],SNR = SNRs[i],numSteps=numSteps, dimensions = dimensions, t0=t0, wavenumberRange = wavenumberRange, numChan = numChan, numCoherent=numCoherent, numIncoherent=numIncoherent,numBatches=(len(NumImages)-i),numSkip=numSkip)
Images[n:n+NumImages[i]] = images
Labels_2D[n:n+NumImages[i]] = labels_2D
Labels_1D[n:n+NumImages[i]] = labels_1D
n+=NumImages[i]
i+=1
return GDFT_Data_Set(id,Images,Labels_2D,Labels_1D,NumImages,SNRs,t0,numChan,dimensions,numSteps,wavenumberRange,numCoherent,numIncoherent,numSkip)
def create_Data_Sets(id,NumImages,SNRs,t0=10, numSteps = 128000, y_dim=64,x_dims=[16,32,64,128,256,512], wavenumberRange=(1.5,2.0), numChan = 32, numCoherent=10, numIncoherent=25,numSkip=0,**kwargs):
"""Returns variable SNR GDFT Data Sets. A single set of GDFT samples is created using the final provided dimension (x_dims[-1]).
This set is chopped up to create data sets at other provided dimensions. """
assert(len(NumImages)==len(SNRs))
Images = np.empty((np.sum(NumImages),y_dim,x_dims[-1],1))
Labels_2D = np.empty((np.sum(NumImages),y_dim,x_dims[-1],1))
Labels_1D = np.empty((np.sum(NumImages),x_dims[-1]))
n=0
i=0
while i<len(NumImages): # Create Images at maximum dimension
images,labels_2D,labels_1D = Create_Images(NumImages[i],SNR = SNRs[i],numSteps=numSteps, dimensions = (x_dims[-1],y_dim), t0=t0, wavenumberRange = wavenumberRange, numChan = numChan, numCoherent=numCoherent, numIncoherent=numIncoherent,numBatches=(len(NumImages)-i),numSkip=numSkip)
Images[n:n+NumImages[i]] = images
Labels_2D[n:n+NumImages[i]] = labels_2D
Labels_1D[n:n+NumImages[i]] = labels_1D
n+=NumImages[i]
i+=1
Sets = []
for x in x_dims: # Chop images into smaller dimensions
images = []
labels_2d =[]
labels_1d = []
j=0
for n in NumImages:
i=0
while i < x_dims[-1]/x:
images.extend(Images[j:j+n,:,x*i:x*i+x,:])
labels_2d.extend(Labels_2D[j:j+n:,:,x*i:x*i+x,:])
labels_1d.extend(Labels_1D[j:j+n:,x*i:x*i+x])
i+=1
j+=n
Sets.append(GDFT_Data_Set(id+str(x),images,labels_2d,labels_1d,(np.array(NumImages)*x_dims[-1]/x).astype(int),SNRs,t0,numChan,(x,y_dim),int(numSteps*x/x_dims[-1]),wavenumberRange,numCoherent,numIncoherent,numSkip))
return Sets
###---------------- GDFT Data Set --------------------
class GDFT_Data_Set():
def __init__(self,id,Images,Labels_2D,Labels_1D,NumImages,SNRs,t0,numChan,dimensions,numSteps,wavenumberRange,numCoherent,numIncoherent,numSkip):
self.path = None
self.id = id
self.SNRs = SNRs
self.numSteps = numSteps
self.t0 = t0
self.numChan = numChan
self.dimensions = dimensions
self.wavenumberRange = wavenumberRange
self.numCoherent = numCoherent
self.numIncoherent = numIncoherent
self.numSkip = numSkip
self.Images = Images
self.Labels_1D = Labels_1D
self.Labels_2D = Labels_2D
self.Image_Nums = NumImages
self.dmax=numChan/(2*(wavenumberRange[1]-wavenumberRange[0]))
def get_Params(self):
return(self.numSteps,self.t0,self.numChan,self.wavenumberRange,self.numCoherent,self.numIncoherent,self.numSkip)
def save_As(self,path):
with open(path+self.id+".pkl", 'wb') as output:
pickle.dump(self, output, pickle.HIGHEST_PROTOCOL)
self.path=path
print("Saved as: " + path+self.id+".pkl")
def save(self):
if self.path == None:
raise Exception("No path set. Use save_As")
with open(self.path, 'wb') as output:
pickle.dump(self, output, pickle.HIGHEST_PROTOCOL)
print("Saved as: " + self.path)
def describe(self):
print("------------------------ID: %s ----------------------------"%(self.id))
print("numChan {0}".format(self.numChan))
print("FINISH THIS")
def get_Data(self,with_SNR=None):
"Returns Unshuffled Images and Labels,"
if with_SNR == None:
return(np.array(self.Images),np.array(self.Labels_2D),np.array(self.Labels_1D))
else:
i = self.SNRs.index(with_SNR) #SNR Index
end = np.cumsum(self.Image_Nums)[i]
if i == 0:
start = 0
else:
start = np.cumsum(self.Image_Nums)[i-1]
return(np.array(self.Images[start:end]),np.array(self.Labels_2D[start:end]),np.array(self.Labels_1D[start:end]))
def get_Shuffled_Data(self):
"returns shuffled COPY. Watch out for space"
rng_state = np.random.get_state()
a = np.random.permutation(self.Images)
np.random.set_state(rng_state)
b = np.random.permutation(self.Labels_2D)
np.random.set_state(rng_state)
c = np.random.permutation(self.Labels_1D)
return(a,b,c)
def findSNR(self,i):
"""Find SNR of sample i in variable SNR Data with distribution given by Bats and SNRs"""
cum_Bats = np.cumsum(self.Image_Nums)
n=0
while i > cum_Bats[n] and n<len(self.Image_Nums):
n+=1
return(self.SNRs[n])
def plot_Image_at_Index(self,i,title="",fs=10,aspect="auto", figsize=(10, 6)):
"""Plots Image and Label at given Index"""
_, axs = plt.subplots(nrows=3, ncols=1, figsize=figsize,sharex=True)
axs[0].imshow(self.Images[i][:,:,0], cmap=plt.get_cmap('gray_r'),origin="lower",aspect=aspect,extent=(0,self.numSteps/self.t0,-self.dmax,self.dmax))
axs[1].imshow(self.Labels_2D[i][:,:,0], cmap=plt.get_cmap('gray_r'),origin="lower",aspect=aspect,extent=(0,self.numSteps/self.t0,-self.dmax,self.dmax))
axs[0].set_ylabel("OPD(Wavelengths)",fontsize=fs)
axs[1].set_ylabel("OPD(Wavelengths)",fontsize=fs)
axs[1].set_xlabel("time/$t_0$",fontsize=fs)
axs[0].set_title("Image (SNR = %s)" % (self.findSNR(i)),fontsize=fs*1.5)
axs[1].set_title("Label",fontsize=fs*1.5)
axs[2].plot(np.linspace(0,self.numSteps/self.t0,len(self.Labels_1D[i])),self.Labels_1D[i])
plt.suptitle(title)
def load_Data_Set(path):
with open(path, 'rb') as input:
Set = pickle.load(input)
return(Set)
|
{"/Core/GDFT_Tester.py": ["/Core/__init__.py"], "/Core/GDFT_Data.py": ["/Core/__init__.py"], "/Core/GDFT_Net.py": ["/Core/__init__.py"]}
|
4,169
|
jonasserry/GDFT-Net
|
refs/heads/master
|
/Core/GDFT_Net.py
|
from Core import GDFT_Data
import numpy as np
import matplotlib.pyplot as plt
import pickle
from collections import defaultdict
# pylint: disable=E1130
print("Net Version: 1.72")
#FIX THESE IMPORTS
import tensorflow as tf
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras import backend
from tensorflow.keras import Input
from tensorflow.keras.models import Model, load_model
from tensorflow.keras.layers import Conv2D,MaxPooling2D,Dropout,concatenate, Flatten, Dense, UpSampling2D
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.utils import plot_model
def load_GDFT_Net(path):
with open(path, 'rb') as input:
Net = pickle.load(input)
return(Net)
class GDFT_Net():
def __init__(self,M1_path,M2_path,dimensions,Net_Path=None):
"""M1,M2 should be paths
Dimensions written as (x,y)
"""
self.M1_path = M1_path
self.M2_path = M2_path
self.M1 = None
self.M2 = None
self.dimensions = dimensions
self.path = Net_Path
self.P1_val_loss = []
self.P1_loss = []
self.P1_epochs_trained = 0
self.P1_nN = None
self.P2_val_loss = []
self.P2_loss = []
self.P2_epochs_trained = 0
self.P2_nN = None
self.numSteps = None # This would be better done with a simple params dict that can then be passed to all GDFT_Data functions
self.t0 = None
self.numChan = None
self.wavenumberRange = None
self.numCoherent = None
self.numIncoherent = None
self.numSkip = None
self.dmax=None
self.RMSEs = defaultdict(list)
self.errors = defaultdict(list)
self.standard_dev_delays = None
print("Remember: Load Models")
def describe(self):
print("Dimensions: {0}x{1}".format(*self.dimensions))
print("nN -> P1: {0} | P2: {1}".format(self.P1_nN,self.P2_nN))
print("Epochs --> P1: {0} | P2: {1}".format(self.P1_epochs_trained,self.P2_epochs_trained))
print("Min Loss --> P1: {0} | P2: {1}".format(min(self.P1_val_loss),min(self.P2_val_loss)))
def set_training_params(self,numSteps,t0,numChan,wavenumberRange,numCoherent,numIncoherent,numSkip):
self.numSteps = numSteps
self.t0 = t0
self.numChan = numChan
self.wavenumberRange = wavenumberRange
self.numCoherent = numCoherent
self.numIncoherent = numIncoherent
self.numSkip = numSkip
self.dmax=numChan/(2*(wavenumberRange[1]-wavenumberRange[0]))
def load_P1_Model(self):
self.M1 = load_model(self.M1_path)
def load_P2_Model(self):
self.M2 = load_model(self.M2_path)
def load_models(self):
self.load_P1_Model()
self.load_P2_Model()
def check_if_loaded(self):
if self.M1 == None:
self.load_P1_Model()
if self.M2 == None:
self.load_P2_Model()
def create_P1_Model(self,nN,model = None):
self.P1_nN = nN
self.P1_val_loss = []
self.P1_loss = []
self.P1_epochs_trained = 0
if model == None:
self.M1 = UNet_P1(input_size=(self.dimensions[1],self.dimensions[0],1),nN=nN)
else:
self.M1 = model
def train_P1(self,DS,epochs=10,batch_size=16,val_split=0.2):
assert self.M1 != None,"No Model Loaded"
train_images, train_labels, _ = DS.get_Shuffled_Data()
checkpoint = ModelCheckpoint(self.M1_path,monitor="val_loss", save_best_only=True,save_weights_only=False,verbose=1)
callbacks_list = [checkpoint]
history = self.M1.fit(train_images, train_labels,batch_size=batch_size, epochs=epochs, callbacks=callbacks_list, validation_split=val_split, verbose = 1)
self.P1_val_loss.extend(history.history['val_loss'])
self.P1_loss.extend(history.history['loss'])
self.P1_epochs_trained += epochs
plt.figure()
plt.plot(history.history['loss'],label="test_loss")
plt.plot(history.history['val_loss'],label="val_loss")
plt.xlabel("epoch")
plt.ylabel("Loss")
plt.legend()
def test_P1(self,SNR,fs=(10,10),aspect="auto"):
#assert self.M1 != None,"No Model Loaded"
raw_image, label_2d, _ = GDFT_Data.Create_Image(self.numSteps, self.dimensions, self.t0 , self.wavenumberRange, self.numChan, self.numCoherent, self.numIncoherent, SNR,self.numSkip)
p1_pred = self.M1.predict(np.reshape(raw_image,[1] + list(raw_image.shape)))
self.M1.evaluate(np.reshape(raw_image,[1] + list(raw_image.shape)),np.reshape(label_2d,[1] + list(label_2d.shape)),verbose=1)
plt.figure(figsize=fs)
plt.imshow(raw_image[:,:,0], cmap=plt.get_cmap('gray_r'),origin="lower",aspect=aspect)
plt.figure(figsize=fs)
plt.imshow(p1_pred[0,:,:,0], cmap=plt.get_cmap('gray_r'),origin="lower",aspect=aspect)
plt.figure(figsize=fs)
plt.imshow(label_2d[:,:,0], cmap=plt.get_cmap('gray_r'),origin="lower",aspect=aspect)
def create_P2_Model(self,nN,model = None):
self.P2_nN = nN
self.P2_val_loss = []
self.P2_loss = []
self.P2_epochs_trained = 0
if model == None:
self.M2 = UNet_P2(input_size=(self.dimensions[1],self.dimensions[0],1),nN=nN)
else:
self.M2 = model
def convert_Data_for_P2(self,DS,reload_P1=True):
"""returns shuffled P2 data from given data set"""
if reload_P1 or not self.M1:
self.load_P1_Model()
images,_,Labels_1D = DS.get_Shuffled_Data()
P2_images = self.M1.predict(images,verbose=1)
return(P2_images,(Labels_1D+self.dimensions[1]/2)/self.dimensions[1])
def train_P2(self,DS,epochs=10,batch_size=16,val_split=0.2,save_path = None):
self.check_if_loaded()
if not save_path:
save_path = self.M2_path
train_images, train_labels = self.convert_Data_for_P2(DS)
checkpoint = ModelCheckpoint(save_path,monitor="val_loss", save_best_only=True,save_weights_only=False,verbose=1)
callbacks_list = [checkpoint]
history = self.M2.fit(train_images, train_labels,batch_size=batch_size, epochs=epochs, callbacks=callbacks_list, validation_split=val_split,verbose = 1)
self.P2_val_loss.extend(history.history['val_loss'])
self.P2_loss.extend(history.history['loss'])
self.P2_epochs_trained += epochs
plt.figure()
plt.plot(history.history['loss'],label="test_loss")
plt.plot(history.history['val_loss'],label="val_loss")
plt.xlabel("epoch")
plt.ylabel("Loss")
plt.legend()
def process_Images(self,images,verbose=0):
First_Pass_Images = self.M1.predict(images,verbose)
Second_Pass_Images = self.M2.predict(First_Pass_Images,verbose)
return(First_Pass_Images,Second_Pass_Images)
def process_Image(self,image,verbose=0):
P1_Image = self.M1.predict(np.reshape(image,[1] + list(image.shape)),verbose)
P2_Image = self.M2.predict(P1_Image,verbose)
return(P1_Image[0],P2_Image[0])
def plot_Example(self,raw_image,label_2d,label_1d,SNR=1.0,fs=(10,10),aspect="auto"):
self.check_if_loaded()
First_Pass_Image,Second_Pass_Image = self.process_Image(raw_image,verbose=0)
RMSE = np.sqrt(np.mean((((Second_Pass_Image*self.dmax*2-self.dmax)-label_1d)**2)))
print("Network RMSE: {0:3.1f} Wavelengths".format(RMSE))
var = np.sqrt(np.mean(((label_1d**2))))
print("Variation: {0:3.1f} Wavelengths".format(var))
#Plotting
_, axs = plt.subplots(nrows=2, ncols=2, figsize=fs,sharey=True)
axs[0, 0].imshow(raw_image[:,:,0], cmap=plt.get_cmap('gray_r'),origin="lower",aspect=aspect,extent=(0,self.dimensions[0],(-self.dmax),self.dmax))
axs[0, 0].set_title(r"GDFT Image ($SNR_0$ = {0:3.2f})".format(SNR),fontsize=14)
axs[0, 0].set_ylabel("OPD(Wavelengths)",fontsize=14)
axs[1, 0].imshow(label_2d[:,:,0], cmap=plt.get_cmap('gray_r'),origin="lower",aspect=aspect,extent=(0,self.dimensions[0],(-self.dmax),self.dmax))
axs[1, 0].set_title("GDFT Image Correct Delays",fontsize=14)
axs[1, 0].set_ylabel("OPD(Wavelengths)",fontsize=14)
axs[1, 0].set_xlabel(r"Time/$t_0$",fontsize=14)
axs[0, 1].imshow(First_Pass_Image[:,:,0], cmap=plt.get_cmap('gray_r'),origin="lower",aspect=aspect,extent=(0,self.dimensions[0],(-self.dmax),self.dmax))
axs[0, 1].set_title("First Pass Network Prediction",fontsize=14)
x = np.linspace(0,self.numSteps/self.t0,len(Second_Pass_Image))
axs[1, 1].set_title("Results",fontsize=14)
axs[1, 1].plot(x,Second_Pass_Image*self.dmax*2-self.dmax,label="GDFT-Net",c="black",ls="--")
axs[1, 1].plot(x,label_1d,label="True Delays",c="black",ls="-")
axs[1, 1].set_xlabel(r"Time/$t_0$",fontsize=14)
axs[1, 1].legend(fontsize=12)
return()
def plot_random_Example(self,SNR,fs=(10,10),aspect="auto"):
raw_image, label_2d, label_1d = GDFT_Data.Create_Image(self.numSteps, self.dimensions, self.t0 , self.wavenumberRange, self.numChan, self.numCoherent, self.numIncoherent, SNR,self.numSkip)
self.plot_Example(raw_image,label_2d,label_1d,SNR,fs,aspect="auto")
def save_Net(self,filename=None):
self.M1 = None
self.M2 = None
if filename is None:
filename = self.path
else:
self.path=filename
with open(filename, 'wb') as output:
pickle.dump(self, output, pickle.HIGHEST_PROTOCOL)
print("Saved as: " + self.path)
print("Remember to reload models")
def UNet_P1 (pretrained_weights = None,input_size = (256,256,1),nN=64,drop=0.4):
inputs = Input(input_size)
conv1 = Conv2D(nN, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(nN, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(nN*2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(nN*2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(nN*4, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(nN*4, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(nN*8, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3)
conv4 = Conv2D(nN*8, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4)
drop4 = Dropout(drop)(conv4)
pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)
conv5 = Conv2D(nN*16, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4)
conv5 = Conv2D(nN*16, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
drop5 = Dropout(drop)(conv5)
up6 = Conv2D(nN*8, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(drop5))
merge6 = concatenate([drop4,up6], axis = 3)
drop6 = Dropout(drop)(merge6)
conv6 = Conv2D(nN*8, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(drop6)
conv6 = Conv2D(nN*8, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6)
up7 = Conv2D(nN*4, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv6))
merge7 = concatenate([conv3,up7], axis = 3)
drop7 = Dropout(drop)(merge7)
conv7 = Conv2D(nN*4, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(drop7)
conv7 = Conv2D(nN*4, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7)
up8 = Conv2D(nN*2, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = concatenate([conv2,up8], axis = 3)
drop8 = Dropout(drop)(merge8)
conv8 = Conv2D(nN*2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(drop8)
conv8 = Conv2D(nN*2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8)
up9 = Conv2D(nN, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = concatenate([conv1,up9], axis = 3)
drop9 = Dropout(drop)(merge9)
conv9 = Conv2D(nN, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(drop9)
conv9 = Conv2D(nN, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv9 = Conv2D(2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv10 = Conv2D(1, 1, activation = 'sigmoid')(conv9)
model = Model(inputs = [inputs], outputs = [conv10])
if(pretrained_weights):
model.load_weights(pretrained_weights)
model.compile(optimizer = Adam(lr=1e-4), loss = 'binary_crossentropy', metrics = ['accuracy'])
#model.summary()
return model
def UNet_P2 (pretrained_weights = None,input_size = (256,256,1),nN = 64,drop=0.4):
inputs = Input(input_size)
conv1 = Conv2D(nN, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(nN, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(nN*2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(nN*2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(nN*4, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(nN*4, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(nN*8, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3)
conv4 = Conv2D(nN*8, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4)
drop4 = Dropout(drop)(conv4)
pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)
conv5 = Conv2D(nN*16, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4)
conv5 = Conv2D(nN*16, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
drop5 = Dropout(drop)(conv5)
up6 = Conv2D(nN*8, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(drop5))
merge6 = concatenate([drop4,up6], axis = 3)
drop6 = Dropout(drop)(merge6)
conv6 = Conv2D(nN*8, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(drop6)
conv6 = Conv2D(nN*8, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6)
up7 = Conv2D(nN*4, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv6))
merge7 = concatenate([conv3,up7], axis = 3)
drop7 = Dropout(drop)(merge7)
conv7 = Conv2D(nN*4, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(drop7)
conv7 = Conv2D(nN*4, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7)
up8 = Conv2D(nN*2, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = concatenate([conv2,up8], axis = 3)
drop8 = Dropout(drop)(merge8)
conv8 = Conv2D(nN*2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(drop8)
conv8 = Conv2D(nN*2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8)
up9 = Conv2D(nN, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = concatenate([conv1,up9], axis = 3)
drop9 = Dropout(drop)(merge9)
conv9 = Conv2D(nN, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(drop9)
conv9 = Conv2D(nN, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv10 = Conv2D(2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv11 = Conv2D(1, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv10)
flatten = Flatten()(conv11)
drop = Dropout(drop)(flatten)
dense2 = Dense(input_size[1], activation = "sigmoid")(drop)
model = Model(inputs = [inputs], outputs = [dense2])
if(pretrained_weights):
model.load_weights(pretrained_weights)
model.compile(optimizer = "adam", loss = "mean_absolute_error", metrics = ["accuracy"])
return (model)
|
{"/Core/GDFT_Tester.py": ["/Core/__init__.py"], "/Core/GDFT_Data.py": ["/Core/__init__.py"], "/Core/GDFT_Net.py": ["/Core/__init__.py"]}
|
4,170
|
jonasserry/GDFT-Net
|
refs/heads/master
|
/Core/Heuristic.py
|
import scipy.stats as stats
import numpy as np
import time
import IPython
import matplotlib.pyplot as plt
def Heuristic(image,sigma0=10, mem = 5,no_mem = True):
size = image.shape[0]
raw_image = image[:,:,0]
correct_delays = []
def shift(correct_delays,i,memory):
if i<mem or no_mem:
return 1
else:
last_delays = correct_delays[i-mem:i]
sig = np.std(last_delays)+1
return (np.sqrt(sig))
previous_delay = np.argmax(raw_image[:,0])
correct_delays.append(previous_delay)
i=1
while i <image.shape[1]:
col = raw_image[:,i]
filter = stats.norm.pdf(np.linspace(int(-size/2),int(size/2),size),loc=previous_delay-size/2 ,scale = sigma0)
previous_delay = np.argmax(col*filter)
correct_delays.append(previous_delay)
i+=1
return(np.array(correct_delays)-size/2)
def Heuristic_V2(image,sigma0=10, SN_threshold = 1.4,scaling=np.abs):
height = image.shape[0]
length = image.shape[1]
raw_image = image[:,:,0]
predicted_delays = []
temp=[sigma0]
current_delay = np.argmax(raw_image[:,0])
predicted_delays.append(current_delay)
last_good_estimate=-5
i=1
while i <length:
col = raw_image[:,i]
broadening_factor = scaling(i-last_good_estimate)*sigma0
#potentially smooth how window moves around?
window = stats.norm.pdf(np.linspace(int(-height/2),int(height/2),height),loc=current_delay-height/2 ,scale = broadening_factor)
current_delay = np.argmax(col*window)
predicted_delays.append(current_delay)
SN = col[current_delay]/np.mean(np.delete(col,current_delay))
if SN>SN_threshold:
last_good_estimate=i
temp.append(broadening_factor)
i+=1
return(np.array(predicted_delays)-height/2,temp)
def Hueristic_Images(images,sigma0=10, mem = 5):
start_time = time.time()
New_Images = []
i=0
out = display(IPython.display.Pretty('Starting'), display_id=True)
for image in images:
out.update(IPython.display.Pretty("{0:4.1f}% done".format(i/len(images)*100)))
New_Images.append(Heuristic(image,sigma0,mem))
i+=1
print("Finished | Time taken: %s" % (time.strftime("%H:%M:%S", time.gmtime(time.time()-start_time))))
return np.array(New_Images)
def Hueristic_Images_V2(images,sigma0=10, SN_threshold = 5,scaling = np.abs):
start_time = time.time()
New_Images = []
i=0
out = display(IPython.display.Pretty('Starting'), display_id=True)
for image in images:
out.update(IPython.display.Pretty("{0:4.1f}% done".format(i/len(images)*100)))
New_Images.append(Heuristic_V2(image,sigma0,SN_threshold,scaling)[0])
i+=1
print("Finished | Time taken: %s" % (time.strftime("%H:%M:%S", time.gmtime(time.time()-start_time))))
return np.array(New_Images)
|
{"/Core/GDFT_Tester.py": ["/Core/__init__.py"], "/Core/GDFT_Data.py": ["/Core/__init__.py"], "/Core/GDFT_Net.py": ["/Core/__init__.py"]}
|
4,171
|
jonasserry/GDFT-Net
|
refs/heads/master
|
/Core/__init__.py
|
__all__ = ["GDFT_Data", "GDFT_Net","GDFT_Sim","Heuristic","GDFT_Tester"]
|
{"/Core/GDFT_Tester.py": ["/Core/__init__.py"], "/Core/GDFT_Data.py": ["/Core/__init__.py"], "/Core/GDFT_Net.py": ["/Core/__init__.py"]}
|
4,173
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/corrections.py
|
"""
Reflectometry data must be corrected as a part of reduction.
These functions facilitate this, including the footprint and
DCD q-variance corrections.
"""
import numpy as np
from scipy.stats import norm
from scipy.interpolate import splrep
def footprint_correction(beam_width, sample_size, theta):
"""
The factor by which the intensity should be multiplied to account for the
scattering geometry, where the beam is Gaussian in shape.
Args:
beam_width (:py:attr:`float`):
Width of incident beam, in metres.
sample_size (:py:attr:`float`):
Width of sample in the dimension of the beam, in metres.
theta (:py:attr:`float`):
Incident angle, in degrees.
Returns:
Array of correction factors.
"""
# Deal with the [trivial] theta=0 case.
theta = np.array([10**(-3) if t == 0 else t for t in theta])
beam_sd = beam_width / 2 / np.sqrt(2 * np.log(2))
projected_beam_sd = beam_sd / np.sin(np.radians(theta))
frac_of_beam_sampled = (
norm.cdf(sample_size/2, 0, projected_beam_sd) -
norm.cdf(-sample_size/2, 0, projected_beam_sd)
)
return frac_of_beam_sampled
def get_interpolator(
file_path, parser, q_axis_name="qdcd_", intensity_axis_name="adc2"):
"""
Get an interpolator object from scipy, this is useful for the DCD
q-normalisation step.
Args:
file_path (:py:attr:`str`): File path to the normalisation file.
parser (:py:attr:`callable`): Parser function for the normalisation
file.
q_axis_name (:py:attr:`str`, optional): Label for the q-value in the
normalisation file. Defaults to ``'qdcd_'``.
intensity_axis_name (:py:attr:`str`, optional): Label for the
intensity in the normalisation file. Defaults to ``'adc2'``.
Returns:
:py:attr:`tuple`: Containing:
- :py:attr:`array_like`: Interpolation knots.
- :py:attr:`array_like`: B-spline coefficients.
- :py:attr:`int`: Degree of spline.
"""
normalisation_data = parser(file_path)[1].sort_values(by='qdcd_')
return splrep(
normalisation_data[q_axis_name],
normalisation_data[intensity_axis_name])
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,174
|
DiamondLightSource/islatu
|
refs/heads/master
|
/tests/unit/test_scan.py
|
"""
This module tests the central islatu.scan module's Scan and Scan2D classes.
"""
import pytest
from pytest_lazyfixture import lazy_fixture as lazy
import numpy as np
from scipy.interpolate import interp1d
from islatu.background import roi_subtraction, fit_gaussian_1d
from islatu.cropping import crop_to_region
from islatu.scan import Scan2D
from islatu.region import Region
def test_subsample_q_01(scan2d_from_nxs_01: Scan2D):
"""
Make sure subsample_q deletes the appropriate things. Because it just calls
remove_data_points, which has already been tested extensively in test_data,
we only need to check a couple of values to make sure the right qs have been
deleted an we know that all the other attributes will have been handled
correctly.
"""
original_len = len(scan2d_from_nxs_01.theta)
# Defaults shouldn't change anything.
scan2d_from_nxs_01.subsample_q()
assert len(scan2d_from_nxs_01.theta) == original_len
def test_subsample_q_02(scan2d_from_nxs_01: Scan2D):
"""
Make sure that we can set just an upper bound. Note that this dataset goes
from 0.025Å to 0.06Å
"""
q_max = 0.04
assert max(scan2d_from_nxs_01.q_vectors) > q_max
scan2d_from_nxs_01.subsample_q(q_max=q_max)
assert max(scan2d_from_nxs_01.q_vectors) <= q_max
def test_subsample_q_03(scan2d_from_nxs_01: Scan2D):
"""
Make sure that we can set a lower bound. Note that this dataset goes from
0.025Å to 0.06Å.
"""
q_min = 0.04
assert min(scan2d_from_nxs_01.q_vectors) < q_min
scan2d_from_nxs_01.subsample_q(q_min=q_min)
assert min(scan2d_from_nxs_01.q_vectors) >= q_min
def test_subsample_q_04(scan2d_from_nxs_01: Scan2D):
"""
Test that we can set both lower and upper bounds.
"""
q_min = 0.032
q_max = 0.051
scan2d_from_nxs_01.subsample_q(q_min, q_max)
assert min(scan2d_from_nxs_01.q_vectors) >= q_min
assert max(scan2d_from_nxs_01.q_vectors) <= q_max
@pytest.mark.parametrize(
'scan, transmission',
[
(lazy('scan2d_from_nxs_01'), 0.000448426658633058)
]
)
def test_transmission_normalisation_intensities(scan: Scan2D, transmission):
"""
Make sure that we can correct for the attenuation of the beam. The
transmission values have been manually read from the .nxs file using a GUI.
"""
intensity_0 = np.copy(scan.intensity)
scan.transmission_normalisation()
for i, intensity in enumerate(scan.intensity):
assert intensity == intensity_0[i]/transmission
@pytest.mark.parametrize(
'scan, transmission',
[
(lazy('scan2d_from_nxs_01'), 0.000448426658633058)
]
)
def test_transmission_normalisation_errors(scan: Scan2D, transmission):
"""
Make sure that we can correct for the attenuation of the beam. The
transmission values have been manually read from the .nxs file using a GUI.
This function checks the intensity_e values have been dealt with properly.
"""
intensity_e_0 = np.copy(scan.intensity_e)
scan.transmission_normalisation()
for i, intensity_e in enumerate(scan.intensity_e):
assert intensity_e == intensity_e_0[i]/transmission
def test_qdcd_name_assumes(parsed_dcd_normalisation_01):
"""
Takes a parsed DCD normalisation pandas dataframe and makes sure that
we can find the qdcd data, which is [in]conveniently called qdcd_.
"""
_, dataframe = parsed_dcd_normalisation_01
assert "qdcd_" in dataframe
assert "adc2" in dataframe
def test_qdcd_normalisation_01(scan2d_from_nxs_01: Scan2D, dcd_norm_01_splev):
"""
Make sure that our qdcd normalisation is doing something, and isn't failing
silently. (This is a dumb test, but it's really quite hard to test that
this is working without just rewriting a division by splev).
"""
intensities_0 = np.copy(scan2d_from_nxs_01.intensity)
intensities_e_0 = np.copy(scan2d_from_nxs_01.intensity_e)
scan2d_from_nxs_01.qdcd_normalisation(dcd_norm_01_splev)
assert (intensities_0 != scan2d_from_nxs_01.intensity).all()
assert (intensities_e_0 != scan2d_from_nxs_01.intensity_e).all()
def test_qdcd_normalisation_02(scan2d_from_nxs_01: Scan2D, dcd_norm_01_splev,
parsed_dcd_normalisation_01):
"""
Make sure that our nice splev normalisation does something similar to what
would be achieved using a simple cubic scipy.interpolate.interp1D.
"""
# First, generate some test intensities by dividing by an interp1D function.
intensities_0 = np.copy(scan2d_from_nxs_01.intensity)
intensities_e_0 = np.copy(scan2d_from_nxs_01.intensity_e)
_, dataframe = parsed_dcd_normalisation_01
interp = interp1d(dataframe["qdcd_"], dataframe['adc2'], kind='cubic')
test_intensities = intensities_0 / interp(scan2d_from_nxs_01.q_vectors)
test_intensities_e = intensities_e_0 / interp(scan2d_from_nxs_01.q_vectors)
# Now, carry out the qdcd normalisation as normal.
scan2d_from_nxs_01.qdcd_normalisation(dcd_norm_01_splev)
# These interpolation methods could be decently different, but lets enforce
# that our values are the same to within 1%.
for i, test_intensity in enumerate(test_intensities):
assert test_intensity == pytest.approx(scan2d_from_nxs_01.intensity[i],
rel=0.01)
for i, test_inten_e in enumerate(test_intensities_e):
assert test_inten_e == pytest.approx(scan2d_from_nxs_01.intensity_e[i],
rel=0.01)
def test_footprint_correction_01(scan2d_from_nxs_01: Scan2D):
"""
Makes sure that the footprint correction acually does something for a
reasonable beam FWHM and a small (1mm) sample.
"""
# 100 micron beam.
beam_width = 100e-6
# 1 mm sample.
sample_size = 1e-3
intensities_0 = np.copy(scan2d_from_nxs_01.intensity)
intensities_e_0 = np.copy(scan2d_from_nxs_01.intensity_e)
scan2d_from_nxs_01.footprint_correction(beam_width, sample_size)
assert (intensities_0 != scan2d_from_nxs_01.intensity).all()
assert (intensities_e_0 != scan2d_from_nxs_01.intensity_e).all()
def test_footprint_correction_02(scan2d_from_nxs_01: Scan2D):
"""
Do a really naive footprint correction assuming a step function beam.
Enforce that this is the same as our fancy correction, to within 10%.
(Note: they are actually about 10% out from each other).
"""
# 100 micron beam.
beam_width = 100e-6
# 1 mm sample.
sample_size = 1e-3
intensities_0 = np.copy(scan2d_from_nxs_01.intensity)
intensities_e_0 = np.copy(scan2d_from_nxs_01.intensity_e)
beam_size_on_sample = beam_width / \
np.sin(np.radians(scan2d_from_nxs_01.theta))
incident_beam_fraction = sample_size / beam_size_on_sample
test_intensities = intensities_0/incident_beam_fraction
test_intensities_e = intensities_e_0/incident_beam_fraction
scan2d_from_nxs_01.footprint_correction(beam_width, sample_size)
for i, test_intensity in enumerate(test_intensities):
assert test_intensity == pytest.approx(
scan2d_from_nxs_01.intensity[i], 0.1)
for i, test_intensity_e in enumerate(test_intensities_e):
assert test_intensity_e == pytest.approx(
scan2d_from_nxs_01.intensity_e[i], 0.1)
def test_crop_01(scan2d_from_nxs_01: Scan2D, region_01):
"""
Check that crop is decreasing the size of the image.
"""
initial_shape = scan2d_from_nxs_01.images[0].shape
scan2d_from_nxs_01.crop(crop_to_region, region=region_01)
assert scan2d_from_nxs_01.images[0].shape[0] < initial_shape[0]
assert scan2d_from_nxs_01.images[0].shape[1] < initial_shape[1]
def test_crop_02(scan2d_from_nxs_01: Scan2D, region_01: Region):
"""
Make sure that our cropped region has the correct size.
"""
scan2d_from_nxs_01.crop(crop_to_region, region=region_01)
assert (scan2d_from_nxs_01.images[0].shape[0]
* scan2d_from_nxs_01.images[0].shape[1]) == region_01.num_pixels
def test_crop_03(scan2d_from_nxs_01: Scan2D, region_01: Region):
"""
Make sure that the region we've cropped to has the specified shape.
"""
scan2d_from_nxs_01.crop(crop_to_region, region=region_01)
assert scan2d_from_nxs_01.images[0].shape[0] == region_01.x_length
assert scan2d_from_nxs_01.images[0].shape[1] == region_01.y_length
def test_bkg_sub_01(scan2d_from_nxs_01: Scan2D):
"""
Make sure that we start out with no background.
"""
assert scan2d_from_nxs_01.images[0].bkg == 0
assert scan2d_from_nxs_01.images[0].bkg_e == 0
def test_bkg_sub_02(scan2d_from_nxs_01: Scan2D):
"""
Make sure that the background subtraction function is doing something.
"""
region_list = scan2d_from_nxs_01.metadata.background_regions
scan2d_from_nxs_01.bkg_sub(roi_subtraction, list_of_regions=region_list)
assert scan2d_from_nxs_01.images[0].bkg != 0
assert scan2d_from_nxs_01.images[0].bkg_e != 0
def test_bkg_sub_03(scan2d_from_nxs_01: Scan2D):
"""
Make sure that the background subtraction decreases our intensity.
"""
vals, stdevs = (np.zeros(
len(scan2d_from_nxs_01.intensity)),
np.zeros(len(scan2d_from_nxs_01.intensity)))
# Also update the image intensities & errors.
for i, image in enumerate(scan2d_from_nxs_01.images):
vals[i], stdevs[i] = image.sum()
# Store the intensity(Q) to the new value.
scan2d_from_nxs_01.intensity = np.array(vals)
scan2d_from_nxs_01.intensity_e = np.array(stdevs)
region_list = scan2d_from_nxs_01.metadata.background_regions
scan2d_from_nxs_01.bkg_sub(roi_subtraction, list_of_regions=region_list)
assert (vals > scan2d_from_nxs_01.intensity).all()
def test_bkg_sub_04(scan2d_from_nxs_01: Scan2D, scan2d_from_nxs_01_copy,
custom_bkg_region_01):
"""
Make sure that using two background regions yields a lower uncertainty
measurement of the background than using just one background region.
"""
regions_1 = [scan2d_from_nxs_01.metadata.background_regions[0]]
regions_2 = [scan2d_from_nxs_01.metadata.background_regions[0]] + [
custom_bkg_region_01]
scan2d_from_nxs_01.bkg_sub(roi_subtraction, list_of_regions=regions_1)
scan2d_from_nxs_01_copy.bkg_sub(
roi_subtraction, list_of_regions=regions_2)
for i, image_1 in enumerate(scan2d_from_nxs_01.images):
image_2 = scan2d_from_nxs_01_copy.images[i]
assert image_1.bkg_e > image_2.bkg_e
def test_gauss_bkg_01(scan2d_from_nxs_01: Scan2D):
"""
Make sure that our Gaussian fit background subtraction function is doing
something.
Note that this function is not being tested for sensible results because
this doesn't generally seem to be a sensible technique to use on I07. As
more instruments are supported, if this technique becomes useful, its
tests will need to be extended. For now, only the minimum is being done
to ensure that it is roughly functional.
"""
scan2d_from_nxs_01.bkg_sub(fit_gaussian_1d)
assert scan2d_from_nxs_01.images[0].bkg != 0
assert scan2d_from_nxs_01.images[0].bkg_e != 0
def test_gauss_bkg_02(scan2d_from_nxs_01: Scan2D):
"""
Make sure that carrying out this subtraction decreases our intensity.
Note that this function is not being tested for sensible results because
this doesn't generally seem to be a sensible technique to use on I07. As
more instruments are supported, if this technique becomes useful, its
tests will need to be extended. For now, only the minimum is being done
to ensure that it is roughly functional.
"""
vals = np.zeros(len(scan2d_from_nxs_01.intensity))
# Also update the image intensities & errors.
for i, image in enumerate(scan2d_from_nxs_01.images):
vals[i], _ = image.sum()
# Store the intensity(Q) to the new value.
scan2d_from_nxs_01.intensity = np.array(vals)
intensity_0 = np.copy(scan2d_from_nxs_01.intensity)
scan2d_from_nxs_01.bkg_sub(fit_gaussian_1d)
assert (scan2d_from_nxs_01.intensity < intensity_0).all()
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,175
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/io.py
|
"""
This module contains:
Parsing functions used to extract information from experimental files.
Classes used to help make parsing more modular. These include the NexusBase
class and its children.
"""
# We've gotta access the _value attribute on some NXobjects.
# pylint: disable=protected-access
import json
import os
from typing import List
from abc import abstractmethod
import nexusformat.nexus.tree as nx
from nexusformat.nexus import nxload
import pandas as pd
import numpy as np
import h5py
from .scan import Scan2D
from .image import Image
from .data import Data
from .region import Region
from .debug import debug
from .metadata import Metadata
class NexusBase(Metadata):
"""
This class contains *mostly* beamline agnostic nexus parsing convenience
stuff. It's worth noting that this class still makes a series of assumptions
about how data is laid out in a nexus file that can be broken. Instead of
striving for some impossible perfection, this class is practical in its
assumptions of how data is laid out in a .nxs file, and will raise if an
assumption is violated. All instrument-specific assumptions that one must
inevitably make to extract truly meaningful information from a nexus file
are made in children of this class.
Attrs:
file_path:
The local path to the file on the local filesystem.
nxfile:
The object produced by loading the file at file_path with nxload.
"""
def __init__(self, local_path: str):
super().__init__(local_path)
self.nxfile = nxload(local_path)
@property
def src_path(self):
"""
The name of this nexus file, as it was recorded when the nexus file was
written.
"""
return self.nxfile.file_name
@property
def detector(self):
"""
Returns the NXdetector instance stored in this NexusFile.
Raises:
ValueError if more than one NXdetector is found.
"""
det, = self.instrument.NXdetector
return det
@property
def instrument(self):
"""
Returns the NXinstrument instanced stored in this NexusFile.
Raises:
ValueError if more than one NXinstrument is found.
"""
instrument, = self.entry.NXinstrument
return instrument
@property
def entry(self) -> nx.NXentry:
"""
Returns this nexusfile's entry.
Raises:
ValueError if more than one entry is found.
"""
entry, = self.nxfile.NXentry
return entry
@property
def default_signal(self) -> np.ndarray:
"""
The numpy array of intensities pointed to by the signal attribute in the
nexus file.
"""
return self.default_nxdata[self.default_signal_name].nxdata
@property
def default_axis(self) -> np.ndarray:
"""
Returns the nxdata associated with the default axis.
"""
return self.default_nxdata[self.default_axis_name].nxdata
@property
def default_signal_name(self):
"""
Returns the name of the default signal.
"""
return self.default_nxdata.signal
@property
def default_axis_name(self) -> str:
"""
Returns the name of the default axis.
"""
return self.entry[self.entry.default].axes
@property
def default_nxdata_name(self):
"""
Returns the name of the default nxdata.
"""
return self.entry.default
@property
def default_nxdata(self) -> np.ndarray:
"""
Returns the default NXdata.
"""
return self.entry[self.default_nxdata_name]
# A hack to tell pylint that this class is still meant to be abstract.
@property
@abstractmethod
def default_axis_type(self) -> str:
return super().default_axis_type()
class I07Nexus(NexusBase):
"""
This class extends NexusBase with methods useful for scraping information
from nexus files produced at the I07 beamline at Diamond.
"""
excalibur_detector_2021 = "excroi"
excalibur_04_2022 = "exr"
@property
def local_data_path(self) -> str:
"""
The local path to the data (.h5) file. Note that this isn't in the
NexusBase class because it need not be reasonably expected to point at a
.h5 file.
Raises:
FileNotFoundError if the data file cant be found.
"""
file = _try_to_find_files(
[self._src_data_path], [self.local_path])[0]
return file
@property
def detector_name(self) -> str:
"""
Returns the name of the detector that we're using. Because life sucks,
this is a function of time.
"""
if "excroi" in self.entry:
return I07Nexus.excalibur_detector_2021
if "exr" in self.entry:
return I07Nexus.excalibur_04_2022
# Couldn't recognise the detector.
raise NotImplementedError()
@property
def default_axis_name(self) -> str:
"""
Returns the name of the default axis.
"""
return self.entry[self.entry.default].axes
@property
def default_axis_type(self) -> str:
"""
Returns the type of our default axis, either being 'q', 'th' or 'tth'.
"""
if self.default_axis_name == 'qdcd':
return 'q'
if self.default_axis_name == 'diff1chi':
return 'th'
if self.default_axis_name == 'diff1delta':
return 'tth'
# It's also possible that self.default_axis_name isn't recorded in some
# nexus files. Just in case, let's check the length of diff1delta.
if isinstance(self.instrument["diff1delta"].value.nxdata, np.ndarray):
return 'tth'
def _get_ith_region(self, i: int):
"""
Returns the ith region of interest found in the .nxs file.
Args:
i:
The region of interest number to return. This number should
match the ROI name as found in the .nxs file (generally not 0
indexed).
Returns:
The ith region of interest found in the .nxs file.
"""
x_1 = self.detector[self._get_region_bounds_key(i, 'x_1')][0]
x_2 = self.detector[self._get_region_bounds_key(i, 'Width')][0] + x_1
y_1 = self.detector[self._get_region_bounds_key(i, 'y_1')][0]
y_2 = self.detector[self._get_region_bounds_key(i, 'Height')][0] + y_1
return Region(x_1, x_2, y_1, y_2)
@property
def signal_regions(self) -> List[Region]:
"""
Returns a list of region objects that define the location of the signal.
Currently there is nothing better to do than assume that this is a list
of length 1.
"""
if self.detector_name == I07Nexus.excalibur_detector_2021:
return [self._get_ith_region(i=1)]
if self.detector_name == I07Nexus.excalibur_04_2022:
# Make sure our code executes for bytes and strings.
try:
json_str = self.instrument[
"ex_rois/excalibur_ROIs"]._value.decode("utf-8")
except AttributeError:
json_str = self.instrument[
"ex_rois/excalibur_ROIs"]._value
# This is badly formatted and cant be loaded by the json lib. We
# need to make a series of modifications.
json_str = json_str.replace('u', '')
json_str = json_str.replace("'", '"')
roi_dict = json.loads(json_str)
return [Region.from_dict(roi_dict['Region_1'])]
raise NotImplementedError()
@property
def background_regions(self) -> List[Region]:
"""
Returns a list of region objects that define the location of background.
Currently we just ignore the zeroth region and call the rest of them
background regions.
"""
if self.detector_name == I07Nexus.excalibur_detector_2021:
return [self._get_ith_region(i)
for i in range(2, self._number_of_regions+1)]
if self.detector_name == I07Nexus.excalibur_04_2022:
# Make sure our code executes for bytes and strings.
try:
json_str = self.instrument[
"ex_rois/excalibur_ROIs"]._value.decode("utf-8")
except AttributeError:
json_str = self.instrument[
"ex_rois/excalibur_ROIs"]._value
# This is badly formatted and cant be loaded by the json lib. We
# need to make a series of modifications.
json_str = json_str.replace('u', '')
json_str = json_str.replace("'", '"')
roi_dict = json.loads(json_str)
bkg_roi_list = list(roi_dict.values())[1:2]
return [Region.from_dict(x) for x in bkg_roi_list]
raise NotImplementedError()
@property
def probe_energy(self):
"""
Returns the energy of the probe particle parsed from this NexusFile.
"""
return float(self.instrument.dcm1energy.value)
@property
def transmission(self):
"""
Proportional to the fraction of probe particles allowed by an attenuator
to strike the sample.
"""
if 'filterset' in self.instrument:
return float(self.instrument.filterset.transmission)
elif 'fatt' in self.instrument:
return np.array(self.instrument.fatt.transmission)
else:
debug.log(f"\n No transmission value found in expected location, set transmission to 1 \n")
return float(1)
@property
def detector_distance(self):
"""
Returns the distance between sample and detector.
"""
return float(self.instrument.diff1detdist.value)
@property
def _src_data_path(self):
"""
Returns the raw path to the data file. This is useless if you aren't on
site, but used by islatu to guess where you've stored the data file
locally.
"""
# This is far from ideal; there currently seems to be no standard way
# to refer to point at information stored outside of the nexus file.
# If you're a human, it's easy enough to find, but with code this is
# a pretty rubbish task. Here I just grab the first .h5 file I find
# and run with it.
found_h5_files = []
def recurse_over_nxgroups(nx_object, found_h5_files):
"""
Recursively looks for nxgroups in nx_object that, when cast to a
string, end in .h5.
"""
for key in nx_object:
new_obj = nx_object[key]
if key == "data":
if new_obj.tree[8:-9].endswith(".h5"):
found_h5_files.append(new_obj.tree[8:-9])
if str(new_obj).endswith(".h5"):
found_h5_files.append(str(new_obj))
if str(new_obj).endswith(".h5['/data']"):
found_h5_files.append(str(new_obj)[:-9])
if isinstance(new_obj, nx.NXgroup):
recurse_over_nxgroups(new_obj, found_h5_files)
recurse_over_nxgroups(self.nxfile, found_h5_files)
return found_h5_files[0]
@property
def _region_keys(self) -> List[str]:
"""
Parses all of the detector's dictionary keys and returns all keys
relating to regions of interest.
"""
return [key for key in self.detector.keys() if key.startswith("Region")]
@property
def _number_of_regions(self) -> int:
"""
Returns the number of regions of interest described by this nexus file.
This *assumes* that the region keys take the form f'region_{an_int}'.
"""
split_keys = [key.split('_') for key in self._region_keys]
return max([int(split_key[1]) for split_key in split_keys])
def _get_region_bounds_key(self, region_no: int, kind: str) -> List[str]:
"""
Returns the detector key relating to the bounds of the region of
interest corresponding to region_no.
Args:
region_no:
An integer corresponding the the particular region of interest
we're interested in generating a key for.
kind:
The kind of region bounds keys we're interested in. This can
take the values:
'x_1', 'width', 'y_1', 'height'
where '1' can be replaced with 'start' and with/without caps on
first letter of width/height.
Raises:
ValueError if 'kind' argument is not one of the above.
Returns:
A list of region bounds keys that is ordered by region number.
"""
# Note that the x, y swapping is a quirk of the nexus standard, and is
# related to which axis on the detector varies most rapidly in memory.
if kind in ('x_1', 'x_start'):
insert = 'X'
elif kind in ('width', 'Width'):
insert = 'Width'
elif kind in ('y_1', 'y_start'):
insert = 'Y'
elif kind in ('height', 'Height'):
insert = 'Height'
else:
raise ValueError(
"Didn't recognise 'kind' argument.")
return f"Region_{region_no}_{insert}"
def i07_dat_to_dict_dataframe(file_path):
"""
Parses a .dat file recorded by I07, returning a [now mostly obsolete] tuple
containing a metadata dictionary and a pandas dataframe of the data.
Though outdated, this is still a handy way to parse the DCD normalization
.dat file.
Args:
(:py:attr:`str`): The ``.dat`` file to be read.
Returns:
:py:attr:`tuple`: Containing:
- :py:attr:`dict`: The metadata from the ``.dat`` file.
- :py:class:`pandas.DataFrame`: The data from the ``.dat`` file.
"""
f_open = open(file_path, "r", encoding='utf-8')
# Neither the data nor the metadata are being read yet.
data_reading = False
metadata_reading = False
# Create the dictionaries to be populated.
data_dict = {}
metadata_dict = {}
# Create the list to be filled with lists for each line
data_lines = []
for line in f_open:
# This string incidates the start of the metadata.
if "<MetaDataAtStart>" in line:
metadata_reading = True
# This string indicates the end of the metadata.
if "</MetaDataAtStart>" in line:
metadata_reading = False
# This string indicates the start of the data.
if " &END" in line:
data_reading = True
# Set counter to minus two, such that when is
# reaches the data it is 0.
count = -2
# When the metadata section is being read populate the metadata_dict
if metadata_reading:
if "=" in line:
metadata_in_line = []
for i in line.split("=")[1:]:
try:
j = float(i)
except ValueError:
j = i
metadata_in_line.append(j)
metadata_dict[line.split("=")[0]] = metadata_in_line
# When the data section is being read, make the list of the zeroth line
# the titles and everything after is the data_lines list of lists.
if data_reading:
count += 1
if count == 0:
titles = line.split()
if count > 0:
data_lines.append(line.split())
f_open.close()
# Sort the data_lines list of lists to transpore and make into a dict where
# the keys are the titles.
for j, _ in enumerate(data_lines[0]):
list_to_add = []
for i, _ in enumerate(data_lines):
try:
list_to_add.append(float(data_lines[i][j]))
except ValueError:
list_to_add.append(data_lines[i][j])
count = 0
if j >= len(titles):
data_dict[str(count)] = list_to_add
count += 1
else:
data_dict[titles[j]] = list_to_add
return metadata_dict, pd.DataFrame(data_dict)
def load_images_from_h5(h5_file_path, transpose=False):
"""
Loads images from a .h5 file.
Args:
h5_file_path:
Path to the h5 file from which we're loading images.
transpose:
Should we take the transpose of these images? Defaults to True.
"""
internal_data_path = 'data'
images = []
debug.log("Loading images from file " + h5_file_path, unimportance=0)
with h5py.File(h5_file_path, "r") as file_handle:
dataset = file_handle[internal_data_path][()]
num_images = dataset.shape[0]
# Prepare to show a progress bar for image loading.
debug.log(f"Loading {num_images} images.", unimportance=2)
for i in range(num_images):
debug.log("Currently loaded " + str(i+1) + " images.", end="\r")
images.append(Image(dataset[i], transpose=transpose))
# This line is necessary to prevent overwriting due to end="\r".
debug.log("")
debug.log(f"Loaded all {num_images} images.", unimportance=2)
return images
def i07_nxs_parser(file_path: str):
"""
Parses a .nxs file acquired from the I07 beamline at diamond, returning an
instance of Scan2D. This process involves loading the images contained in
the .h5 file pointed at by the .nxs file, as well as retrieving the metadata
from the .nxs file that is relevant for XRR reduction.
Args:
file_path:
Path to the .nxs file.
Returns:
An initialized Scan2D object containing all loaded detector frames, as
well as the relevant metadata from the .nxs file.
"""
# Use the magical parser class that does everything for us.
i07_nxs = I07Nexus(file_path)
# Load the images, taking a transpose if necessary (because which axis is
# x and which is why is determined by fast vs slow detector axes in memory).
if i07_nxs.detector_name in [
I07Nexus.excalibur_detector_2021,
I07Nexus.excalibur_04_2022]:
images = load_images_from_h5(i07_nxs.local_data_path, transpose=True)
# The dependent variable.
rough_intensity = i07_nxs.default_signal
rough_intensity_e = np.sqrt(rough_intensity)
# The independent variable.
axis = i07_nxs.default_axis
# We have to load the Data according to what our independent variable is.
if i07_nxs.default_axis_type == 'q':
data = Data(rough_intensity, rough_intensity_e, i07_nxs.probe_energy,
q_vectors=axis)
elif i07_nxs.default_axis_type == 'th':
data = Data(rough_intensity, rough_intensity_e, i07_nxs.probe_energy,
theta=axis)
elif i07_nxs.default_axis_type == 'tth':
data = Data(rough_intensity, rough_intensity_e, i07_nxs.probe_energy,
theta=axis/2)
else:
raise NotImplementedError(
f"{i07_nxs.default_axis_type} is not a supported axis type.")
# Returns the Scan2D object
return Scan2D(data, i07_nxs, images)
def _try_to_find_files(filenames: List[str],
additional_search_paths: List[str]):
"""
Check that data files exist if the file parsed by parser pointed to a
separate file containing intensity information. If the intensity data
file could not be found in its original location, check a series of
probable locations for the data file. If the data file is found in one
of these locations, update file's entry in self.data.
Returns:
:py:attr:`list` of :py:attr:`str`:
List of the corrected, actual paths to the files.
"""
found_files = []
# If we had only one file, make a list out of it.
if not hasattr(filenames, "__iter__"):
filenames = [filenames]
cwd = os.getcwd()
start_dirs = [
cwd, # maybe file is stored near the current working dir
# To search additional directories, add them in here manually.
]
start_dirs.extend(additional_search_paths)
local_start_directories = [x.replace('\\', '/') for x in start_dirs]
num_start_directories = len(local_start_directories)
# Now extend the additional search paths.
for i in range(num_start_directories):
search_path = local_start_directories[i]
split_srch_path = search_path.split('/')
for j in range(len(split_srch_path)):
extra_path_list = split_srch_path[:-(j+1)]
extra_path = '/'.join(extra_path_list)
local_start_directories.append(extra_path)
# This line allows for a loading bar to show as we check the file.
for i, _ in enumerate(filenames):
# Better to be safe... Note: windows is happy with / even though it
# defaults to \
filenames[i] = str(filenames[i]).replace('\\', '/')
# Maybe we can see the file in its original storage location?
if os.path.isfile(filenames[i]):
found_files.append(filenames[i])
continue
# If not, maybe it's stored locally? If the file was stored at
# location /a1/a2/.../aN/file originally, for a local directory LD,
# check locations LD/aj/aj+1/.../aN for all j<N and all LD's of
# interest. This algorithm is a generalization of Andrew McCluskey's
# original approach.
# now generate a list of all directories that we'd like to check
candidate_paths = []
split_file_path = str(filenames[i]).split('/')
for j in range(len(split_file_path)):
local_guess = '/'.join(split_file_path[j:])
for start_dir in local_start_directories:
candidate_paths.append(
os.path.join(start_dir, local_guess))
# Iterate over each of the candidate paths to see if any of them contain
# the data file we're looking for.
found_file = False
for candidate_path in candidate_paths:
if os.path.isfile(candidate_path):
# File found - add the correct file location to found_files
found_files.append(candidate_path)
found_file = not found_file
debug.log("Data file found at " + candidate_path + ".")
break
# If we didn't find the file, tell the user.
if not found_file:
raise FileNotFoundError(
"The data file with the name " + filenames[i] + " could "
"not be found. The following paths were searched:\n" +
"\n".join(candidate_paths)
)
return found_files
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,176
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/__init__.py
|
MAJOR = 1
MINOR = 0
MICRO = 7
__version__ = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,177
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/debug.py
|
"""
Islatu's simple Debug class.
"""
DEFAULT_LOG_LEVEL = 1
class Debug:
"""
A simple logger.
Attrs:
logging_level:
Current logging level. Higher means more unimportant messages will
be shown.
"""
def __init__(self, logging_level):
self.logging_level = logging_level
def log(self, log_string, unimportance: int = 1, **kwargs):
"""
Prints to stdout if self.logging_level >= unimportance.
Args:
log_string:
The string to be printed.
unimportance:
A measure of unimportance assigned to the printing of this
string. Very unimportant messages require a larger logging
level to be printed. Defaults to 1.
"""
if self.logging_level >= unimportance:
print(log_string, **kwargs)
debug = Debug(DEFAULT_LOG_LEVEL)
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,178
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/scan.py
|
"""
This module contains the Scan and Scan2D classes. A Scan is a measurement and so
inherits from MeasurementBase. An instance of Scan contains scan metadata, as
well as a suite of methods useful for data correction, uncertainty calculations
and the like.
A Scan2D is a Scan whose Data object's intensity values are computed from an
image captured by an area detector. Many of Scan's methods are overloaded to
make use of the additional information provided by the area detector, and extra
image manipulation methods are included in Scan2D.
"""
from typing import List
import numpy as np
from scipy.interpolate import splev
from islatu import corrections
from islatu.metadata import Metadata
from islatu.data import Data, MeasurementBase
from islatu.image import Image
class Scan(MeasurementBase):
"""
A class used to store reflectometry scans taken with a point detector.
"""
def __init__(self, data: Data, metadata: Metadata) -> None:
# Initialize the MeasurementBase from Data. This is much simpler than
# passing a million arguments directly to the scan.
super().__init__(data.intensity, data.intensity_e, data.energy,
metadata, data.theta)
def subsample_q(self, q_min=0, q_max=float('inf')):
"""
Delete data points less than q_min and more than q_max.
Args:
q_min:
The minimum q to be included in this scan. Defaults to 0 Å.
q_max:
The maximum q to be included in this scan. Defaults to inf Å.
"""
# A place to store all the indices violating our condition on q.
illegal_q_indices = np.where(
(self.q_vectors <= q_min) | (self.q_vectors >= q_max)
)[0]
# [0] necessary because np.where returns a tuple of arrays of length 1.
# This is a quirk of np.where – I don't think it's actually designed to
# be used like this, and they encourage np.asarray(condition).nonzero()
# Now remove all data points at these qs.
self.remove_data_points(illegal_q_indices)
def transmission_normalisation(self):
"""
Perform the transmission correction.
"""
if len(self.metadata.transmission)==1:
self.intensity /= float(self.metadata.transmission)
self.intensity_e /= float(self.metadata.transmission)
else:
self.intensity /= self.metadata.transmission
self.intensity_e /= self.metadata.transmission
def qdcd_normalisation(self, itp):
"""
Perform normalisation by DCD variance.
Args:
itp (:py:attr:`tuple`): Containing interpolation knots
(:py:attr:`array_like`), B-spline coefficients
(:py:attr:`array_like`), and degree of spline (:py:attr:`int`).
"""
self.intensity /= splev(self.q_vectors, itp)
self.intensity_e /= splev(self.q_vectors, itp)
def footprint_correction(self, beam_width, sample_size):
"""
Class method for :func:`islatu.corrections.footprint_correction`.
Args:
beam_width (:py:attr:`float`): Width of incident beam, in metres.
sample_size (:py:class:`uncertainties.core.Variable`): Width of
sample in the dimension of the beam, in metres.
theta (:py:attr:`float`): Incident angle, in degrees.
"""
frac_of_beam_sampled = corrections.footprint_correction(
beam_width, sample_size, self.theta)
self.intensity /= frac_of_beam_sampled
self.intensity_e /= frac_of_beam_sampled
class Scan2D(Scan):
"""
Attributes:
data (:py:attr:`islatu.data.Data`):
The intensity as a function of Q data for this scan.
metadata (:py:attr:`islatu.metadata.Metadata`):
This scan's metadata.
images (:py:attr:`list` of :py:class:`islatu.image.Image`):
The detector images in the given scan.
"""
def __init__(self, data: Data, metadata: Metadata, images: List[Image]) \
-> None:
super().__init__(data, metadata)
self.images = images
def crop(self, crop_function, **kwargs):
"""
Crop every image in images according to crop_function.
args:
crop_function (:py:attr:`callable`):
Cropping function to be used.
kwargs (:py:attr:`dict`, optional):
Keyword arguments for the cropping function. Defaults to
:py:attr:`None`.
progress (:py:attr:`bool`, optional):
Show a progress bar. Requires the :py:mod:`tqdm` package.
Defaults to :py:attr:`True`.
"""
(vals, stdevs) = (np.zeros(len(self.intensity)),
np.zeros(len(self.intensity)))
for i, image in enumerate(self.images):
image.crop(crop_function, **kwargs)
vals[i], stdevs[i] = self.images[i].sum()
self.intensity = np.array(vals)
self.intensity_e = np.array(stdevs)
def bkg_sub(self, bkg_sub_function, **kwargs):
"""
Perform background substraction for each image in a Scan.
Args:
bkg_sub_function (:py:attr:`callable`): Background subtraction
function to be used.
kwargs (:py:attr:`dict`, optional): Keyword arguments for
the background subtraction function. Defaults
to :py:attr:`None`.
progress (:py:attr:`bool`, optional): Show a progress bar.
Requires the :py:mod:`tqdm` package. Defaults
to :py:attr:`True`.
"""
vals, stdevs = np.zeros(
len(self.intensity)), np.zeros(len(self.intensity))
# We keep track of the bkg_sub_infos for meta-analyses.
bkg_sub_info = [
image.background_subtraction(bkg_sub_function, **kwargs)
for image in self.images
]
# Also update the image intensities & errors.
for i, image in enumerate(self.images):
vals[i], stdevs[i] = image.sum()
# Store the intensity(Q) to the new value.
self.intensity = np.array(vals)
self.intensity_e = np.array(stdevs)
# Expose the information relating to the background subtraction.
return bkg_sub_info
def remove_data_points(self, indices):
"""
Convenience method for the removal of specific data points by their
indices.
Args:
indices:
The indices to be removed.
"""
super().remove_data_points(indices)
# Delete images in reverse order if you don't like errors.
for idx in sorted(indices, reverse=True):
del self.images[idx]
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,179
|
DiamondLightSource/islatu
|
refs/heads/master
|
/tests/unit/test_refl_profile.py
|
"""
This module tests the islatu.refl_profile module's Profile class.
"""
import numpy as np
from numpy.testing import assert_allclose
from islatu.refl_profile import Profile
from islatu.cropping import crop_to_region
from islatu.background import roi_subtraction
from islatu.scan import Scan2D
def test_profile_data(profile_01: Profile, scan2d_from_nxs_01: Scan2D):
"""
Make sure that our profile has exactly the same q_vectors as its scan,
intensities, intensity_e's, etc.
"""
assert profile_01.energy == scan2d_from_nxs_01.energy
assert (profile_01.intensity == scan2d_from_nxs_01.intensity).all()
assert (profile_01.intensity_e == scan2d_from_nxs_01.intensity_e).all()
assert_allclose(profile_01.q_vectors, scan2d_from_nxs_01.q_vectors, 1e-5)
assert_allclose(profile_01.theta, scan2d_from_nxs_01.theta, 1e-5)
def test_profile_crop(profile_01: Profile):
"""
Make sure that the profile's crop method crops its constituent scans'
images.
"""
region = profile_01.scans[0].metadata.signal_regions[0]
profile_01.crop(crop_to_region, region=region)
def test_profile_bkg_sub(profile_01: Profile, scan2d_from_nxs_01: Scan2D):
"""
Make sure that bkg_sub from the profile is the same as bkg_sub from the
scan.
"""
bkg_region = scan2d_from_nxs_01.metadata.background_regions[0]
profile_01.bkg_sub(roi_subtraction, list_of_regions=[bkg_region])
scan2d_from_nxs_01.bkg_sub(roi_subtraction, list_of_regions=[bkg_region])
assert_allclose(profile_01.intensity_e,
scan2d_from_nxs_01.intensity_e, 1e-4)
assert_allclose(profile_01.intensity, scan2d_from_nxs_01.intensity, 1e-4)
def test_profile_subsample_q_01(profile_01: Profile):
"""
Make sure subsample_q deletes the appropriate things. Because it just calls
remove_data_points, which has already been tested extensively in test_data,
we only need to check a couple of values to make sure the right qs have been
deleted an we know that all the other attributes will have been handled
correctly.
"""
original_len = len(profile_01.scans[0].theta)
# Defaults shouldn't change anything.
profile_01.subsample_q("404876")
assert len(profile_01.scans[0].theta) == original_len
assert len(profile_01.theta) == original_len
def test_subsample_q_02(profile_01: Profile):
"""
Make sure that we can set just an upper bound. Note that this dataset goes
from 0.025Å to 0.06Å
"""
q_max = 0.04
assert max(profile_01.q_vectors) > q_max
assert max(profile_01.scans[0].q_vectors) > q_max
profile_01.subsample_q("404876", q_max=q_max)
assert max(profile_01.q_vectors) <= q_max
assert max(profile_01.scans[0].q_vectors) <= q_max
def test_subsample_q_03(profile_01: Profile):
"""
Make sure that we can set a lower bound. Note that this dataset goes from
0.025Å to 0.06Å.
"""
q_min = 0.04
assert min(profile_01.q_vectors) < q_min
assert min(profile_01.scans[0].q_vectors) < q_min
profile_01.subsample_q("404876", q_min=q_min)
assert min(profile_01.q_vectors) >= q_min
assert min(profile_01.scans[0].q_vectors) >= q_min
def test_subsample_q_04(profile_01: Profile):
"""
Test that we can set both lower and upper bounds.
"""
q_min = 0.032
q_max = 0.051
profile_01.subsample_q("404876", q_min, q_max)
assert min(profile_01.q_vectors) >= q_min
assert max(profile_01.q_vectors) <= q_max
def test_profile_footprint_correction(profile_01: Profile, scan2d_from_nxs_01):
"""
Assert that calling the footprint_correction method in an instance of
Profile is the same thing as calling it in all of its constituent Scans.
Then, if the Scan footprint correction tests pass, then this must also
work.
"""
beam_width = 100e-6
sample_size = 1e-3
profile_01.footprint_correction(beam_width, sample_size)
scan2d_from_nxs_01.footprint_correction(beam_width, sample_size)
assert_allclose(profile_01.intensity, scan2d_from_nxs_01.intensity)
assert_allclose(profile_01.intensity_e, profile_01.intensity_e)
def test_profile_transmission_normalisation(
profile_01: Profile, scan2d_from_nxs_01: Scan2D):
"""
Assert that carrying out a transmission normalisation on an instance of
Profile is the same thing as doing it on each of its constituent scans.
"""
profile_01.transmission_normalisation()
scan2d_from_nxs_01.transmission_normalisation()
assert_allclose(profile_01.intensity, scan2d_from_nxs_01.intensity)
assert_allclose(profile_01.intensity_e, profile_01.intensity_e)
def test_profile_qdcd_normalisation(
profile_01: Profile, scan2d_from_nxs_01: Scan2D, dcd_norm_01_splev):
"""
Assert that carrying out the qdcd correction on an instance of Profile is
the same thing as doing it on each of its constituent scans.
"""
profile_01.qdcd_normalisation(dcd_norm_01_splev)
scan2d_from_nxs_01.qdcd_normalisation(dcd_norm_01_splev)
assert_allclose(profile_01.intensity, scan2d_from_nxs_01.intensity)
assert_allclose(profile_01.intensity_e, profile_01.intensity_e)
def test_concatenate(profile_01: Profile):
"""
Explicit simple check that concatenate is working. Note that, if it isn't
working, many other tests would also raise.
"""
profile_01.scans[0].intensity = 0
profile_01.concatenate()
assert profile_01.intensity == 0
def test_rebin_01(profile_0102: Profile):
"""
Make sure that we can rebin the data using default parameters.
"""
initial_length = len(profile_0102.q_vectors)
profile_0102.rebin()
assert initial_length > len(profile_0102.q_vectors)
def test_rebin_02(profile_0102: Profile):
"""
Now that we know that rebin is doing something, lets make sure that it is
doing sane things.
"""
init = np.copy(profile_0102.intensity)
profile_0102.rebin()
new = profile_0102.intensity
big, small = (init[3], init[8]) if init[3] > init[8] else init[8], init[3]
assert small < new[3] and big > new[3]
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,180
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/region.py
|
"""
This module defines the Region object, whose instances define regions of
interest in images.
"""
class Region:
"""
Instances of this class define regions of interest.
"""
def __init__(self, x_start, x_end, y_start, y_end):
# Make sure that x_end > x_start, etc.
if x_end < x_start:
x_start, x_end = x_end, x_start
if y_end < y_start:
y_start, y_end = y_end, y_start
# These may be recorded as types other than int, but we really want
# these to be integers so they can be used to index objects.
self.x_start = int(x_start)
self.x_end = int(x_end)
self.y_start = int(y_start)
self.y_end = int(y_end)
@property
def x_length(self):
"""
Returns the length of the region in the x-direction.
"""
return self.x_end - self.x_start
@property
def y_length(self):
"""
Returns the length of the region in the y-direction.
"""
return self.y_end - self.y_start
@property
def num_pixels(self):
"""
returns the number of pixels in the region.
"""
return self.x_length * self.y_length
@classmethod
def from_dict(cls, region_dict: dict):
"""
Instantiates a Region from a dictionary with keys in:
['x', 'y', 'width', 'height'].
This is to help loading dictionarys that are generated by calling
json.loads on the NXcollections found in I07 nexus files as of
27/04/2022.
"""
x_start = int(region_dict['x'])
y_start = int(region_dict['y'])
x_end = x_start + int(region_dict['width'])
y_end = y_start + int(region_dict['height'])
return cls(x_start, x_end, y_start, y_end)
def __eq__(self, other):
"""
Allows for equality checks to be made between instances of Region.
"""
if not isinstance(other, Region):
return False
return self.x_start == other.x_start and self.x_end == other.x_end \
and self.y_start == other.y_start and self.y_end == other.y_end
def __str__(self):
return f"x_start: {self.x_start}, x_end: {self.x_end}, " + \
f"y_start: {self.y_start}, y_end: {self.y_end}."
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,181
|
DiamondLightSource/islatu
|
refs/heads/master
|
/tests/system/_test_cli.py
|
"""
This module tests the command line interface to islatu.
"""
import os
import subprocess
import numpy as np
def test_process_xrr_01(process_xrr_path):
"""
Make sure that we raise a FileNotFoundError when there's no file to be
processed by the process_xrr script.
"""
proc = subprocess.run(
[process_xrr_path], capture_output=True, text=True
)
error_type = proc.stderr.split('\n')[3].split(':')[0].strip()
assert error_type == "FileNotFoundError"
def test_process_xrr_02(process_xrr_path, path_to_resources, tmp_path,
old_dcd_data):
"""
Make sure that the processing is running, and that it is producing
acceptable results.
"""
yaml_path = path_to_resources + os.sep + "dcd.yaml"
proc = subprocess.run(
[process_xrr_path, '-d', path_to_resources, '-y', yaml_path,
'-o', tmp_path],
capture_output=True, text=True
)
# Make sure no errors were thrown during reduction.
# This will only print if the assertion fails.
print(proc.stdout)
print(proc.stderr)
assert proc.stdout.split('\n')[204].strip().startswith(
"Reduced data stored at "
)
# Make sure that the saved data is correct.
reduced_data = np.loadtxt(os.path.join(tmp_path, os.listdir(tmp_path)[0]))
assert np.allclose(reduced_data[0], old_dcd_data[0], 1e-3)
assert np.allclose(reduced_data[1], old_dcd_data[1], 1e-3)
assert np.allclose(reduced_data[2], old_dcd_data[2], 1e-3)
def test_process_xrr_03(process_xrr_path, path_to_resources, tmp_path,
old_dcd_data):
"""
Make sure that we can subsample q, and that we can select only specific
scan numbers.
"""
yaml_path = path_to_resources + os.sep + "dcd.yaml"
proc = subprocess.run(
[process_xrr_path, '-d', path_to_resources, '-y', yaml_path,
'-o', tmp_path], capture_output=True, text=True)
# Make sure no errors were thrown during reduction.
# This will only print if the assertion fails.
print(proc.stdout)
print(proc.stderr)
assert proc.stdout.split('\n')[204].strip().startswith(
"Reduced data stored at "
)
# Make sure that the saved data is correct.
reduced_data = np.loadtxt(os.path.join(tmp_path, os.listdir(tmp_path)[0]))
assert np.allclose(reduced_data[0], old_dcd_data[0], 1e-3)
assert np.allclose(reduced_data[1], old_dcd_data[1], 1e-3)
assert np.allclose(reduced_data[2], old_dcd_data[2], 1e-3)
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,182
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/background.py
|
"""
Background substraction is a necessary component of reflectometry reduction,
where the background scattering is removed from the reflected intensity.
Herein are some functions to enable that for a two-dimensional detector image,
as well as simple dataclasses in which we can store some information relating to
the background subtraction, and any fitting that we might have carried out.
"""
from dataclasses import dataclass
from typing import Callable, List
import numpy as np
from scipy.stats import norm
from scipy.optimize import curve_fit
from .region import Region
from .image import Image
@dataclass
class FitInfo:
"""
A simple dataclass in which we can store data relating to the quality of a
fit.
"""
popt: np.ndarray
pcov: np.ndarray
fit_function: Callable
@dataclass
class BkgSubInfo:
"""
A simple data class in which we can store information relating to a
background subtraction.
"""
bkg: float
bkg_e: float
bkg_sub_function: Callable
fit_info: FitInfo = None
def roi_subtraction(image, list_of_regions: List[Region]):
"""
Carry out background subtraction by taking a series of rectangular regions
of interested (ROIs) as being fair Poissonian measurements of the
background.
Args:
image:
The islatu.image.Image object from which we should subtract
background from.
list_of_regions:
A list of instances of the Regions class corresponding to background
regions.
"""
# We're going to need to count all intensity in all the background, as well
# as the number of pixels used in our measurement of the background.
sum_of_bkg_areas = 0
total_num_pixels = 0
# Make sure we've been given multiple regions. If not, np: make a list.
if isinstance(list_of_regions, Region):
list_of_regions = [list_of_regions]
# Add up all the intensity in all the pixels.
for region in list_of_regions:
# Now add the total intensity in this particular background region to
# the intensity measured in all the background regions so far.
sum_of_bkg_areas += np.sum(
image.array_original[
int(region.x_start):int(region.x_end),
int(region.y_start):int(region.y_end)
]
)
# Add the number of pixels in this background ROI to the total number of
# pixels used to compute the background measurement overall.
total_num_pixels += region.num_pixels
# Now Poisson stats can be abused to only calculate a single sqrt.
err_of_bkg_areas = np.sqrt(sum_of_bkg_areas)
if err_of_bkg_areas == 0:
err_of_bkg_areas = 1
# Get the per pixel background mean and stddev.
bkg_per_pixel = sum_of_bkg_areas / total_num_pixels
bkg_error_per_pixel = err_of_bkg_areas / total_num_pixels
# Expose the calculated background and background_error per pixel.
return BkgSubInfo(bkg_per_pixel, bkg_error_per_pixel, roi_subtraction)
def univariate_normal(data, mean, sigma, offset, factor):
"""
Produce a univariate normal distribution.
Args:
data (:py:attr:`array_like`): Abscissa data.
mean (:py:attr:`float`): Mean (horizontal).
sigma (:py:attr:`float`): Variance (horizontal).
offset (:py:attr:`float`): Offset from the 0 for the ordinate, this is
the background level.
factor (:py:attr:`float`): Multiplicative factor for area of normal
distribution.
Returns:
:py:attr:`array_like`: Ordinate data for univariate normal distribution.
"""
# Creation of the bivariate normal distribution
normal = norm(loc=mean, scale=sigma)
return offset + normal.pdf(data).flatten() * factor
def fit_gaussian_1d(image: Image, params_0=None, bounds=None, axis=0):
"""
Fit a one-dimensional Gaussian function with some ordinate offset to an
image with uncertainty. This is achieved by averaging in a given ``axis``
before performing the fit. Return the results, and index of the offset.
Args:
image:
The islatu image object to fit.
params_0 (:py:attr:`list`, optional):
An initial guess at the parameters. Defaults to values based on the
image.
bounds (:py:attr:`list` of :py:attr:`tuple`, optional):
Bounds for the fitting. Defaults to values based on the image.
axis (:py:attr:`int`):
The dimension along which the averaging will be performed.
Returns:
:py:attr:`tuple`: Containing:
- :py:attr:`array_like`: The results (with uncertainties) for each
of the 6 parameters fit.
- :py:attr:`int`: The index of the offset.
- :py:attr:`None`: As it is not possible to describe the reflected
peak width.
"""
arr, arr_e = image.array, image.array_e
ordinate = arr.mean(axis=axis)
# Now we can generate an array of errors.
ordinate_e = np.sqrt(np.mean(arr_e**2, axis=axis))
# Setting default values.
if params_0 is None:
# Now we generate the initial values for our Gaussian fit.
# These values are crucial – as this is a high dimensional fitting
# problem, it is likely that we'll get stuck in a local minimum if these
# aren't good.
# Guess that the Gaussian mean is at the most intense mean pixel value.
mean0 = np.argmax(ordinate)
# Guess that the standard deviation is a single pixel.
sdev0 = 1
# Guess that the background (offset) is the median pixel value.
offset0 = np.median(ordinate)
# Guess that the scale is equal to the largest recorded value.
scale0 = arr.max()
params_0 = [mean0, sdev0, offset0, scale0]
if bounds is None:
bounds = ([0, 0, 0, 0],
[ordinate.shape[0], ordinate.shape[0], scale0, scale0 * 10])
# Perform the fitting.
fit_popt_pcov = curve_fit(
univariate_normal,
np.arange(0, ordinate.shape[0], 1), ordinate, bounds=bounds,
sigma=ordinate_e, p0=params_0, maxfev=2000 * (len(params_0) + 1))
fit_info = FitInfo(fit_popt_pcov[0], fit_popt_pcov[1], univariate_normal)
# Determine uncertainty from covarience matrix.
# Note: the stddev of the fit Gaussian can be accessed via popt[1].
p_sigma = np.sqrt(np.diag(fit_info.pcov))
return BkgSubInfo(fit_info.popt[2], p_sigma[2], fit_gaussian_1d, fit_info)
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,183
|
DiamondLightSource/islatu
|
refs/heads/master
|
/setup.py
|
import io
from glob import glob
from os.path import basename, dirname, join, splitext, abspath
from setuptools import find_packages
from setuptools import setup
THIS_DIRECTORY = abspath(dirname(__file__))
with io.open(join(THIS_DIRECTORY, 'README.md')) as f:
LONG_DESCRIPTION = f.read()
REQUIREMENTS = [
"wheel",
"numpy",
"scipy",
"coverage",
"pandas",
"pyyaml",
"nexusformat",
"pytest",
"pytest-lazy-fixture",
"nbsphinx",
"jupyter-sphinx",
"jupyterlab",
"ipywidgets",
"pytest-cov",
]
setup(
name='islatu',
version='1.0.7',
license='MIT',
description='A package for the reduction of reflectometry data.',
author='Richard Brearton',
author_email='richardbrearton@gmail.com',
long_description=LONG_DESCRIPTION,
long_decription_content_type='text/markdown',
packages=find_packages('src'),
package_dir={'': 'src'},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Scientific/Engineering :: Physics'
],
setup_requires=REQUIREMENTS,
install_requires=REQUIREMENTS
)
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,184
|
DiamondLightSource/islatu
|
refs/heads/master
|
/tests/unit/test_data.py
|
"""
Module for testing the Data class, and the MeasurementBase class.
"""
import pytest
from pytest_lazyfixture import lazy_fixture as lazy
import numpy as np
from islatu.data import Data, MeasurementBase
from islatu.io import I07Nexus
from islatu.scan import Scan2D
# Fairly obvious disable for testing: we also need to test protected attrs.
# pylint: disable=protected-access
@pytest.mark.parametrize(
'data',
[lazy('generic_data_01'), lazy('generic_data_02')]
)
class TestDataSimple:
"""
Simple tests for the Data class that don't require any additional fixtures.
"""
def test_reflectivity_max(self, data: Data):
"""
Make sure that max(reflectivity) is 1.
"""
assert max(data.reflectivity) == 1
@pytest.mark.parametrize(
'data, correct_intensity',
[(lazy('generic_data_01'), np.arange(1100, 300, -45)[:10]),
(lazy('generic_data_02'), (np.arange(11100012, 0, -12938)[:6]))]
)
def test_intensity_access(data, correct_intensity):
"""
Make sure we can access data.intensity
"""
assert (data.intensity == correct_intensity).all()
@pytest.mark.parametrize(
'data, correct_intensity_e',
[(lazy('generic_data_01'), np.sqrt(np.arange(1100, 300, -45)[:10])),
(lazy('generic_data_02'), np.sqrt(np.arange(11100012, 0, -12938)[:6]))]
)
def test_intensity_e_access(data, correct_intensity_e):
"""
Make sure we can access the I_e attribute.
"""
assert(data.intensity_e == correct_intensity_e).all()
@pytest.mark.parametrize(
'data,correct_energy',
[(lazy('generic_data_01'), 12.5), (lazy('generic_data_02'), 8.04)])
def test_energy_access(data: Data, correct_energy):
"""
Make sure we can access the data.energy attribute, and that it has the
correct value.
"""
assert data.energy == correct_energy
@pytest.mark.parametrize(
'data, correct__theta',
[(lazy('generic_data_01'), None), (lazy('generic_data_02'), np.arange(6))]
)
def test__theta_access(data: Data, correct__theta):
"""
Make sure that we can access the _theta attribute, and that it has the
correct values.
"""
if correct__theta is not None:
assert (data._theta == correct__theta).all()
else:
assert data._theta is correct__theta
@pytest.mark.parametrize(
'data, correct__q',
[
(lazy('generic_data_01'), np.arange(10)/10),
(lazy('generic_data_02'), None)
]
)
def test__q_access(data: Data, correct__q):
"""
Make sure that we can access the data._q attribute, and that it has the
correct value.
"""
if correct__q is not None:
assert (data._q == correct__q).all()
else:
assert correct__q is data._q
def test_conversion_to_q(generic_data_02: Data):
"""
Check that we can correctly convert from theta to q. Basically any decent
programmatic way of checking this would be completely circular: I would
just re-implement the function I'm trying to test. So, I used a random
online calculator to check the value against my function.
"""
assert generic_data_02.q_vectors[1] == pytest.approx(0.142217, rel=1e-5)
def test_conversion_to_th(generic_data_01: Data):
"""
Check that we can correctly convert from q to theta. As above, this number
was calculated using online calculators. Don't hate the tester, hate the
tests.
"""
# Online calculator derped for these numbers so rel is small. These things
# are dumb and throw away significant figures just for kicks.
assert generic_data_01.theta[1] == pytest.approx(0.4525, rel=1e-3)
@pytest.mark.parametrize(
'data',
[lazy('generic_data_01'), lazy('generic_data_02'),
lazy('scan2d_from_nxs_01')]
)
def test_remove_data_points_01(data: Data):
"""
First data point removal test.
"""
# Make a deep copy of data. Worth noting that this copy won't quite be
# precise if our generic_data was defined using q values, hence the need for
# pytest.approx later.
data_copy = Data(np.copy(data.intensity),
np.copy(data.intensity_e),
data.energy, np.copy(data.theta))
# If our data is a Scan2D, we need to construct it slightly differently.
if isinstance(data, Scan2D):
data_copy = Scan2D(data_copy, data.metadata,
list(np.copy(data.images)))
data.remove_data_points([1])
assert len(data.intensity) + 1 == len(data_copy.intensity)
assert len(data.intensity_e) + 1 == len(data_copy.intensity_e)
assert len(data.theta) + 1 == len(data_copy.theta)
assert len(data.q_vectors) + 1 == len(data_copy.q_vectors)
assert len(data.reflectivity) + 1 == len(data_copy.reflectivity)
assert len(data.reflectivity_e) + 1 == len(data_copy.reflectivity_e)
assert data.intensity[1] == data_copy.intensity[2]
assert data.intensity_e[1] == data_copy.intensity_e[2]
assert data.theta[1] == pytest.approx(data_copy.theta[2], rel=1e-3)
assert data.q_vectors[1] == pytest.approx(data_copy.q_vectors[2], rel=1e-3)
assert data.reflectivity[1] == data_copy.reflectivity[2]
assert data.reflectivity_e[1] == data_copy.reflectivity_e[2]
if isinstance(data, Scan2D):
assert len(data.images) + 1 == len(data_copy.images)
assert data.images[1] == data_copy.images[2]
@pytest.mark.parametrize(
'data',
[lazy('generic_data_01'), lazy('generic_data_02'),
lazy('scan2d_from_nxs_01')]
)
def test_remove_data_points_02(data: Data):
"""
Second data point removal test. Most of these tests are fairly trivial, but
the point is more to make sure that we're indeed remembering to remove
a data point from every single array. Sure, it would be great to split
these into their own tests, but... cba. These could also have been wrapped
into fancy tests where I calculate with code which indices in the new
data object correspond to which indices in the original data_copy. But, that
leaves room for error, which defeats the point of testing.
"""
# Make a deep copy of data.
data_copy = Data(np.copy(data.intensity),
np.copy(data.intensity_e),
data.energy, np.copy(data.theta))
# If our data is a Scan2D, we need to construct it slightly differently.
if isinstance(data, Scan2D):
data_copy = Scan2D(data_copy, data.metadata,
list(np.copy(data.images)))
data.remove_data_points([1, 2, 4])
assert len(data.intensity) + 3 == len(data_copy.intensity)
assert len(data.intensity_e) + 3 == len(data_copy.intensity_e)
assert len(data.theta) + 3 == len(data_copy.theta)
assert len(data.q_vectors) + 3 == len(data_copy.q_vectors)
assert len(data.reflectivity) + 3 == len(data_copy.reflectivity)
assert len(data.reflectivity_e) + 3 == len(data_copy.reflectivity_e)
assert data.intensity[1] == data_copy.intensity[3]
assert data.intensity_e[1] == data_copy.intensity_e[3]
assert data.theta[1] == pytest.approx(data_copy.theta[3], rel=1e-3)
assert data.q_vectors[1] == pytest.approx(data_copy.q_vectors[3], rel=1e-3)
assert data.reflectivity[1] == data_copy.reflectivity[3]
assert data.reflectivity_e[1] == data_copy.reflectivity_e[3]
assert data.intensity[2] == data_copy.intensity[5]
assert data.intensity_e[2] == data_copy.intensity_e[5]
assert data.theta[2] == pytest.approx(data_copy.theta[5], rel=1e-3)
assert data.q_vectors[2] == pytest.approx(data_copy.q_vectors[5], rel=1e-3)
assert data.reflectivity[2] == data_copy.reflectivity[5]
assert data.reflectivity_e[2] == data_copy.reflectivity_e[5]
if isinstance(data, Scan2D):
assert len(data.images) + 3 == len(data_copy.images)
assert data.images[1] == data_copy.images[3]
assert data.images[2] == data_copy.images[5]
def test_measurement_base_metadata_type(measurement_base_01):
"""
Make sure that our measurement base type is indeed I07Nexus. If it is, then
the following tests just quickly make sure that its values have remained
intact.
"""
assert isinstance(measurement_base_01.metadata, I07Nexus)
def test_measurement_base_metadata_path(measurement_base_01,
path_to_i07_nxs_01):
"""
Make sure that we can access the metadata, and that its local_path is good.
"""
assert measurement_base_01.metadata.local_path == path_to_i07_nxs_01
def test_measurement_base_metadata_energy(measurement_base_01):
"""
Check that the metadata has the correct energy. The I07Nexus class
"""
assert measurement_base_01.metadata.probe_energy == 12.5
def test_measurement_base_underlying_data(measurement_base_01: MeasurementBase,
generic_data_01: Data):
"""
Make sure that the instance of MeasurementBase has the same values of
q, theta, intensity etc. as the instance of Data from which it was
constructed.
"""
# Note that, while there are multiple assertions here, they're really all
# testing the same thing: pretty trivial attribute access, and equivalence
# of parent and child for the subset of child that should be the same as
# parent.
assert (measurement_base_01._q == generic_data_01._q).all()
assert measurement_base_01._theta == generic_data_01._theta
assert (measurement_base_01.q_vectors == generic_data_01.q_vectors).all()
assert (measurement_base_01.intensity == generic_data_01.intensity).all()
assert (measurement_base_01.intensity_e ==
generic_data_01.intensity_e).all()
assert measurement_base_01.energy == generic_data_01.energy
assert (measurement_base_01.reflectivity ==
generic_data_01.reflectivity).all()
assert (measurement_base_01.reflectivity_e ==
generic_data_01.reflectivity_e).all()
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,185
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/runner.py
|
"""
This module contains functions whose purpose is simply to use the islatu
library to process data acquired from a specific instrument.
"""
from dataclasses import dataclass
from typing import List
from os import path
import os
from datetime import datetime
from ast import literal_eval as make_tuple
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
from yaml import load, dump
import numpy as np
import islatu
from islatu import background
from islatu import corrections
from islatu import cropping
from islatu import io
from islatu.region import Region
from islatu.io import i07_dat_to_dict_dataframe
from islatu.refl_profile import Profile
from islatu.debug import debug
# This could be done by reflection, but it feels slightly less arcane to use
# this kind of function map. It also gives these scripts a little more
# flexibility.
function_map = {
'roi_subtraction': background.roi_subtraction,
'None': None,
'i07': io.i07_nxs_parser,
'crop': cropping.crop_to_region
}
@dataclass
class Creator:
"""
Simple dataclass to store information relating to the person that created
this dataset.
"""
name: str = 'Unknown'
affiliation: str = 'Unknown'
time: datetime = datetime.now()
@dataclass
class Origin:
"""
Simple dataclass to store information relating to the experiment.
"""
contact: str = 'My local contact'
facility: str = 'Diamond Light Source'
id: str = None
title: str = None
directory_path: str = None
date: str = str(datetime.now())
year: str = None
@dataclass
class Measurement:
"""
This dataclass stores measurement-specific metadata.
"""
scheme: str = 'q-dispersive'
q_range: List[str] = (str(-np.inf), str(np.inf))
theta_axis_name: str = 'dcdtheta'
q_axis_name: str = 'qdcd'
transpose: bool = False
qz_dimension: int = 1
qxy_dimension: int = 0
@dataclass
class Experiment:
"""
This dataclass stores more instrument-specific metadata.
"""
instrument: str = 'i07'
probe: str = 'x-ray'
energy: float = 12.5
measurement: Measurement = Measurement()
sample: str = None
class DataSource:
"""
This class stores information relating both to the experiment, and to the
data processor.
"""
def __init__(self, title, origin=Origin(), experiment=Experiment(),
links=None):
self.origin = origin
self.origin.title = title
self.experiment = experiment
self.links = links
@dataclass
class Software:
"""
This dataclass stores information relating to the software used to carry
out the any reduction/processing steps (in this case, islatu of course).
"""
name: str = 'islatu'
link: str = 'https://islatu.readthedocs.io'
version: str = islatu.__version__
@dataclass
class DataState:
"""
This class stores more reduction specific parameters.
"""
background = None
resolution = None
dcd = None
transmission = None
intensity = None
rebinned = None
class Reduction:
"""
This class contains all of the information pertaining to data reduction
carried out on this reflectometry data.
"""
def __init__(self, software=Software(), input_files=None,
data_state=DataState(), parser=io.i07_nxs_parser,
crop_function=cropping.crop_to_region, crop_kwargs=None,
bkg_function=background.fit_gaussian_1d, bkg_kwargs=None,
dcd_normalisation=None, sample_size=None, beam_width=None):
if input_files is None:
input_files = []
self.software = software
self.input_files = input_files
self.data_state = data_state
self.parser = parser
self.crop_function = crop_function
self.crop_kwargs = crop_kwargs
self.bkg_function = bkg_function
self.bkg_kwargs = bkg_kwargs
self.dcd_normalisation = dcd_normalisation
self.sample_size = sample_size
self.beam_width = beam_width
class Data:
"""
This class stores information pertaining to the data collected in the
experiment.
"""
def __init__(self, columns=None, n_qvectors=50, q_min=None, q_max=None,
q_step=None, q_shape='linear'):
if columns is None:
columns = ['Qz / Aa^-1', 'RQz', 'sigma RQz, standard deviation',
'sigma Qz / Aa^-1, standard deviation']
self.column_1 = columns[0]
self.column_2 = columns[1]
self.column_3 = columns[2]
if len(columns) == 4:
self.column_4 = columns[3]
if columns == 'both':
self.both = True
self.column_4 = columns[3]
self.rebin = True
self.n_qvectors = n_qvectors
self.q_min = q_min
self.q_max = q_max
self.q_step = q_step
self.q_shape = q_shape
class Foreperson:
"""
This class brings together all of the above classes and dataclasses into
one big ball of yaml-able information.
"""
def __init__(self, run_numbers, yaml_file, directory, title):
self.creator = Creator()
self.data_source = DataSource(title)
self.reduction = Reduction()
self.data = Data()
self.yaml_file = yaml_file
y_file = open(yaml_file, 'r', encoding='utf-8')
recipe = load(y_file, Loader=Loader)
y_file.close()
self.setup(recipe)
directory_path = directory.format(
self.data_source.experiment.instrument,
self.data_source.origin.year,
self.data_source.origin.id)
if path.isdir(directory_path):
self.directory_path = directory_path
else:
raise FileNotFoundError(
"The experiment directory <" + directory_path +
"> cannot be found.")
self.reduction.input_files = [
self.directory_path + 'i07-' + str(r) + '.nxs' for r in run_numbers]
def setup(self, recipe):
"""
This is a McClusky special. I inherited it, and it works.
Don't ask questions.
"""
keys = recipe.keys()
# Populate information from the visit section
if 'visit' in keys:
self.data_source.origin.id = recipe['visit']['visit id']
if 'date' in recipe['visit'].keys():
self.data_source.origin.date = datetime.strptime(
str(recipe['visit']['date']), '%Y-%m-%d')
self.data_source.origin.year = self.data_source.origin.date.year
if 'local contact' in recipe['visit'].keys():
self.data_source.origin.contact = recipe[
'visit']['local contact']
if 'user' in recipe['visit'].keys():
self.creator.name = recipe['visit']['user']
if 'affiliation' in recipe['visit'].keys():
self.creator.affiliation = recipe['visit']['user affiliation']
else:
raise ValueError(
f"No visit given in {self.yaml_file}. " +
"You must at least give a visit id")
# Populate informatio from the information section
if 'instrument' in keys:
self.data_source.experiment.instrument = recipe['instrument']
self.reduction.parser = function_map[recipe['instrument']]
# Populate cropping information
if 'crop' in keys:
self.reduction.crop_function = function_map[
recipe['crop']['method']]
if 'kwargs' in recipe['crop']:
self.reduction.crop_kwargs = recipe['crop']['kwargs']
# Populate background subtraction method
if 'background' in keys:
self.reduction.bkg_function = function_map[
recipe['background']['method']]
if 'kwargs' in recipe['background']:
self.reduction.bkg_kwargs = recipe['background']['kwargs']
# Populate the setup information
if 'setup' in keys:
if 'dcd normalisation' in recipe['setup'].keys():
self.reduction.dcd_normalisation = recipe[
'setup']['dcd normalisation']
self.data_source.links = {
'instrument reference': 'doi:10.1107/S0909049512009272'}
if 'sample size' in recipe['setup'].keys():
self.reduction.sample_size = make_tuple(recipe[
'setup']['sample size'])
try:
_ = len(self.reduction.sample_size)
self.reduction.sample_size = self.reduction.sample_size[0]
except TypeError:
pass
else:
raise ValueError("No sample size given in setup of {}.".format(
self.yaml_file))
if 'beam width' in recipe['setup'].keys():
self.reduction.beam_width = make_tuple(recipe[
'setup']['beam width'])
try:
_ = len(self.reduction.beam_width)
self.reduction.beam_width = self.reduction.beam_width[0]
except TypeError:
pass
else:
raise ValueError(
f"No beam width given in setup of {self.yaml_file}"
)
if 'theta axis' in recipe['setup'].keys():
self.data_source.experiment.measurement.theta_axis_name = (
recipe['setup']['theta axis'])
if 'q axis' in recipe['setup'].keys():
self.data_source.experiment.measurement.q_axis_name = (
recipe['setup']['q axis'])
if 'transpose' in recipe['setup'].keys():
self.data_source.experiment.measurement.transpose = (
recipe['setup']['transpose'])
if self.data_source.experiment.measurement.transpose:
self.data_source.experiment.measurement.qz_dimension = 0
self.data_source.experiment.measurement.qxy_dimension = 1
if 'pixel max' in recipe['setup'].keys():
self.data_source.experiment.measurement.pixel_max = recipe[
'setup']['pixel max']
if 'hot pixel max' in recipe['setup'].keys():
self.data_source.experiment.measurement.hot_pixel_max = recipe[
'setup']['hot pixel max']
else:
raise ValueError(f"No setup given in {self.yaml_file}.")
if 'output_columns' in keys:
if recipe['output columns'] == 3:
self.data = Data(
columns=[
'Qz / Aa^-1', 'RQz', 'sigma RQz, standard deviation'])
if recipe['output columns'] == 34:
self.data = Data(columns='both')
if 'rebin' in keys:
if 'n qvectors' in recipe['rebin'].keys():
self.data.n_qvectors = recipe['rebin']['n qvectors']
elif 'min' in recipe['rebin'].keys() and 'max' in recipe[
'rebin'].keys() and 'step' in recipe['rebin'].keys():
self.data.q_step = recipe['rebin']['step']
if 'shape' in recipe['rebin'].keys():
self.data.q_shape = recipe['rebin']['shape']
else:
raise ValueError("Please define parameters of " +
f"rebin in {self.yaml_file}.")
else:
self.data.rebin = False
def log_processing_stage(processing_stage):
"""
Simple function to make logging slightly neater.
"""
debug.log("-" * 10)
debug.log(processing_stage, unimportance=0)
debug.log("-" * 10)
def i07reduce(run_numbers, yaml_file, directory='/dls/{}/data/{}/{}/',
title='Unknown', filename=None,
q_subsample_dicts=None):
"""
The runner that parses the yaml file and performs the data reduction.
run_numbers (:py:attr:`list` of :py:attr:`int`):
Reflectometry scans that make up the profile.
yaml_file (:py:attr:`str`):
File path to yaml config file
directory (:py:attr:`str`):
Outline for directory path.
title (:py:attr:`str`):
A title for the experiment.
filename:
Either a full path to the .dat file that will be produced by this
function, or a directory. If a directory is given, then the filename
will be automatically generated and the file will be placed in the
specified directory.
q_subsample_dicts:
A list of dictionaries, which takes the form:
[{'scan_ID': ID, 'q_min': q_min, 'q_max': q_max},...]
where type(ID) = str, type(q_min)=float, type(q_max)=float.
"""
# Make sure the directory is properly formatted.
if not str(directory).endswith(os.sep):
directory = directory + os.sep
the_boss = Foreperson(run_numbers, yaml_file, directory, title)
# Necessary to distnguish the same data processed by different pipelines.
yaml_pipeline_name = yaml_file.split(os.sep)[-1][:-5]
files_to_reduce = the_boss.reduction.input_files
log_processing_stage("File parsing")
refl = Profile.fromfilenames(files_to_reduce, the_boss.reduction.parser)
# Set the energy correctly.
the_boss.data_source.experiment.energy = refl.energy
log_processing_stage("Cropping")
# Currently, only crop_to_region is implemented.
if the_boss.reduction.crop_function is not cropping.crop_to_region and \
the_boss.reduction.crop_function is not None:
raise NotImplementedError(
"The only implemented cropping function is crop_to_region.")
# Check to see if we were given an explicit cropping region. If not, use
# the first (and likely only) signal region.
if (the_boss.reduction.crop_function is cropping.crop_to_region and
the_boss.reduction.crop_kwargs is None):
roi = refl.scans[0].metadata.signal_regions[0]
the_boss.reduction.crop_kwargs = {'region': roi}
debug.log(f"Crop ROI '{str(roi)}' generated from the .nxs file.")
else:
the_boss.reduction.crop_kwargs = {
'region': Region(**the_boss.reduction.crop_kwargs)
}
refl.crop(the_boss.reduction.crop_function,
**the_boss.reduction.crop_kwargs)
log_processing_stage("Subtracting background")
# Before subtracting background, make sure that, by default, we're at least
# trying to subtract background from roi_2.
if the_boss.reduction.bkg_function is background.roi_subtraction:
# Make sure we have the desired background regions.
if the_boss.reduction.bkg_kwargs is None:
the_boss.reduction.bkg_kwargs = {
'list_of_regions': refl.scans[0].metadata.background_regions}
else:
the_boss.reduction.bkg_kwargs = {
'list_of_regions': Region(**the_boss.reduction.bkg_kwargs)
}
else:
print("COULD NOT SUBTRACT BACKGROUND. SKIPPING...")
if the_boss.reduction.bkg_function is not None:
refl.bkg_sub(the_boss.reduction.bkg_function,
**the_boss.reduction.bkg_kwargs)
the_boss.reduction.data_state.background = 'corrected'
log_processing_stage("Performing data corrections...")
if the_boss.reduction.dcd_normalisation is not None:
log_processing_stage("DCD normalisation")
itp = corrections.get_interpolator(
the_boss.reduction.dcd_normalisation, i07_dat_to_dict_dataframe)
refl.qdcd_normalisation(itp)
the_boss.reduction.data_state.dcd = 'normalised'
log_processing_stage("Footprint correction.")
refl.footprint_correction(
the_boss.reduction.beam_width, the_boss.reduction.sample_size)
log_processing_stage("Transmission normalisation.")
refl.transmission_normalisation()
the_boss.reduction.data_state.transmission = 'normalised'
refl.concatenate()
if q_subsample_dicts is not None:
log_processing_stage(
"Doctoring data.\nSorry, I mean: Bounding q-vectors.")
# We'll need to subsample a subset of our scans.
for q_subsample_dict in q_subsample_dicts:
refl.subsample_q(**q_subsample_dict)
debug.log("Limited q-range on specified scans.")
# Rebin the data, if the user requested this.
if the_boss.data.rebin:
log_processing_stage("Rebinning the data.")
if the_boss.data.q_min is None:
debug.log("Linearly rebinning data into " +
str(the_boss.data.n_qvectors) + " uniformly spaced " +
"points in q-space.", unimportance=2)
refl.rebin(number_of_q_vectors=the_boss.data.n_qvectors)
else:
if the_boss.data.q_shape == 'linear':
debug.log("Rebinning data linearly.", unimportance=2)
spacing = np.linspace
elif the_boss.data.q_shape == 'log':
debug.log("Rebinning data logarithmically", unimportance=2)
spacing = np.logspace
debug.log(
f"Spacing generated from {refl.q_vectors.min()}Å to " +
f"{refl.q_vectors.max()}Å.", unimportance=2
)
refl.rebin(new_q=spacing(refl.q_vectors.min(), refl.q_vectors.max(),
the_boss.data.q_step))
the_boss.reduction.data_state.rebinned = the_boss.data.q_shape
the_boss.data_source.experiment.measurement.q_range = [
str(refl.q_vectors.min()), str(refl.q_vectors.max())]
the_boss.data.n_qvectors = str(len(refl.reflectivity))
# Prepare the data array.
data = np.array([refl.q_vectors, refl.reflectivity, refl.reflectivity_e]).T
debug.log("XRR reduction completed.", unimportance=2)
# Work out where to save the file.
datetime_str = datetime.now().strftime("%Y-%m-%d_%Hh%Mm%Ss")
dat_filename = 'XRR_{}_'.format(
run_numbers[0]) + yaml_pipeline_name + datetime_str + ".dat"
if filename is None:
# Make sure that the processing directory exists.
processing_path = path.join(the_boss.directory_path, 'processing')
if not os.path.exists(processing_path):
os.makedirs(processing_path)
# Now prepare the full path to the file
filename = (processing_path + dat_filename)
elif os.path.isdir(filename):
# It's possible we were given a directory in which to save the created
# file. In this case, use the filename variable as a directory and add
# our auto generated filename to it.
filename = os.path.join(filename, dat_filename)
# Write the data.
np.savetxt(
filename, data, header=f"{dump(vars(the_boss))}\n Q(1/Å) R R_error"
)
debug.log("-" * 10)
debug.log(f"Reduced data stored at {filename}", unimportance=0)
debug.log("-" * 10)
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,186
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/metadata.py
|
"""
This module contains the Metadata class, returned by parser methods in the
islatu.io module. This class provides a consistent way to refer to metadata
returned by different detectors/instruments, and also contains a dictionary
of all of the metadata as scraped from the parsed file.
"""
from abc import abstractmethod
import numpy as np
class Metadata:
"""
An ABC for classes that store metadata parsed from data files. This defines
the properties that must be implemented by parsing classes.
"""
def __init__(self, local_path):
self.local_path = local_path
@property
@abstractmethod
def probe_energy(self):
"""
This must be overridden.
"""
raise NotImplementedError()
@property
@abstractmethod
def default_axis(self) -> np.ndarray:
"""
Returns a numpy array of data associated with the default axis, where
"default axis" should be understood in the NeXus sense to mean the
experiment's dependent variable.
"""
raise NotImplementedError()
@property
@abstractmethod
def default_axis_name(self) -> str:
"""
Returns the name of the default axis, as it was recorded in the data
file stored at local_path.
"""
raise NotImplementedError()
@property
@abstractmethod
def default_axis_type(self) -> str:
"""
Returns what type of default axis we have. Options are 'q', 'th' or
'tth'.
"""
raise NotImplementedError()
@property
@abstractmethod
def transmission(self):
"""
Proportional to the fraction of probe particles allowed by an attenuator
to strike the sample.
"""
raise NotImplementedError()
@property
@abstractmethod
def detector_distance(self):
"""
Returns the distance between sample and detector.
"""
raise NotImplementedError()
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,187
|
DiamondLightSource/islatu
|
refs/heads/master
|
/tests/unit/test_io.py
|
"""
This file contains a suite of tests for the islatu.io module.
"""
# The following is necessary to use classes to share parameters using
# mark.parametrize.
# pylint: disable=no-self-use
# The following is necessary because of the dynamic nature of the nexusformat
# package's type generation.
# pylint: disable=no-member
# The following is to stop pylint from complaining about protected member tests.
# pylint: disable=protected-access
import pytest
import numpy as np
import nexusformat.nexus.tree as nx
from pytest_lazyfixture import lazy_fixture as lazy
from islatu.io import I07Nexus
from islatu.region import Region
@pytest.mark.parametrize(
'nexus_base',
[lazy('nexus_base_object_01'), lazy('i07_nexus_object_01')])
class TestNexusBaseAttrTypes:
"""
This class checks that the types of each of the fixtures that inherits from
NexusBase have attributes whose types are correct, and that can be accessed
without raising e.g. a ValueError (as would happen if assumptions relating
to the structure of the nexus file are broken).
"""
def test_local_path(self, nexus_base):
"""
Make sure that we can access the local_path.
"""
assert isinstance(nexus_base.local_path, str)
def test_nxfile(self, nexus_base):
"""
Makes sure that our nxfile has the correct type.
"""
assert isinstance(nexus_base.nxfile, nx.NXroot)
def test_src_path(self, nexus_base):
"""
Makes sure that our src_path can be acquired. Also make sure that
it isn't an empty string.
"""
assert isinstance(nexus_base.src_path, str)
assert len(nexus_base.src_path) != 0
def test_entry(self, nexus_base):
"""
Makes sure that there is only one entry in the nexus_base. Otherwise, a
ValueError will be thrown. This also tests that the entry has the
correct type.
"""
assert isinstance(nexus_base.entry, nx.NXentry)
def test_instrument(self, nexus_base):
"""
Makes sure that we can access the instrument property without throwing,
and that our instrument has the correct type.
"""
assert isinstance(nexus_base.instrument, nx.NXinstrument)
def test_detector(self, nexus_base):
"""
Makes sure that we can access the detector property of our nexus_base
without throwing anything, and that it has the correct type.
"""
assert isinstance(nexus_base.detector, nx.NXdetector)
def test_default_axis_nxdata(self, nexus_base):
"""
Makes sure that our default axis is provided as a numpy array.
"""
assert isinstance(nexus_base.default_axis, np.ndarray)
def test_default_signal_nxdata(self, nexus_base):
"""
Make sure that we can access our default signal, and that its type is
np.ndarray.
"""
assert isinstance(nexus_base.default_signal, np.ndarray)
@pytest.mark.parametrize(
'nexus_base, path',
[
(lazy('nexus_base_object_01'), lazy('path_to_i07_nxs_01')),
(lazy('i07_nexus_object_01'), lazy('path_to_i07_nxs_01'))
]
)
def test_local_path(nexus_base, path):
"""
Make sure that the local_paths of our nexus_base objects are
correct.
"""
assert nexus_base.local_path == path
@pytest.mark.parametrize(
'nexus_base, path',
[
(lazy('nexus_base_object_01'),
'/dls/i07/data/2021/si28707-1/i07-404876.nxs'),
(lazy('i07_nexus_object_01'),
'/dls/i07/data/2021/si28707-1/i07-404876.nxs')
]
)
def test_src_path(nexus_base, path):
"""
Checks that the parsed nxs path is correct. Worth noting that, when
extending this test for more .nxs files, it's important to manually
scrape the src_path by parsing nxfile.tree, unless you already know
what value this will take (because, e.g., you just downloaded the file).
"""
assert nexus_base.src_path == path
@pytest.mark.parametrize(
'i07_nexus, path',
[
(lazy('i07_nexus_object_01'),
'/dls/i07/data/2021/si28707-1/excaliburScan404876_000001.h5')
]
)
def test_src_data_path(i07_nexus: I07Nexus, path):
"""
Make sure we can properly find the path to where the data was originally
stored, as referenced in the .nxs file. This is used to guess where the
.h5 file is stored locally.
"""
assert i07_nexus._src_data_path == path
@pytest.mark.parametrize(
'i07_nexus, path',
[
(lazy('i07_nexus_object_01'), lazy('path_to_i07_h5_01'))
]
)
def test_local_data_path(i07_nexus: I07Nexus, path):
"""
Tests our class' ability to find .h5 files stored locally. This test
only makes sure that our class can find .h5 files that are stored in the
same directory as the .nxs file. More directories are searched, but
these are not tested (a test generating .h5 files throughout the
directory structure would not be portable, and would merit tests of its
own).
"""
assert i07_nexus.local_data_path == path
@pytest.mark.parametrize(
'i07_nexus, correct_num',
[(lazy('i07_nexus_object_01'), 3)]
)
def test_number_of_regions(i07_nexus: I07Nexus, correct_num):
"""
Makes sure that we can correctly determine the number of regions of
interest in the nexus file.
"""
assert i07_nexus._number_of_regions == correct_num
@pytest.mark.parametrize(
'i07_nexus, region_number, kind, result',
[
(lazy('i07_nexus_object_01'), 1, 'x_1', 'Region_1_X'),
(lazy('i07_nexus_object_01'), 1, 'x_start', 'Region_1_X'),
(lazy('i07_nexus_object_01'), 17, 'Height', 'Region_17_Height'),
(lazy('i07_nexus_object_01'), 9917, 'y_1', 'Region_9917_Y'),
(lazy('i07_nexus_object_01'), 6, 'Width', 'Region_6_Width'),
(lazy('i07_nexus_object_01'), 4, 'y_start', 'Region_4_Y')
]
)
def test_region_bounds_keys(i07_nexus: I07Nexus,
region_number, kind, result):
"""
Makes sure that region bounds keys are being generated correctly.
"""
assert i07_nexus._get_region_bounds_key(region_number, kind) == result
@pytest.mark.parametrize(
'i07_nexus, regions',
[
(lazy('i07_nexus_object_01'), lazy('signal_regions_01'))
]
)
def test_signal_regions_len(i07_nexus, regions):
"""
Make sure our signal regions has the correct length.
"""
assert len(i07_nexus.signal_regions) == len(regions)
@pytest.mark.parametrize(
'i07_nexus, regions',
[
(lazy('i07_nexus_object_01'), lazy('signal_regions_01'))
]
)
def test_signal_regions(i07_nexus: I07Nexus, regions):
"""
Tests the I07Nexus class' ability to parse signal regions of interest.
"""
# Note: this should probably always be a for loop with just 1 iteration.
for i, _ in enumerate(regions):
assert i07_nexus.signal_regions[i] == regions[i]
@pytest.mark.parametrize(
'i07_nexus, regions',
[
(lazy('i07_nexus_object_01'), lazy('bkg_regions_01'))
]
)
def test_bkg_regions_len(i07_nexus: I07Nexus, regions):
"""
Makes sure that we can extract background regions from an I07 nexus
file.
"""
assert len(i07_nexus.background_regions) == len(regions)
@pytest.mark.parametrize(
'i07_nexus, regions',
[
(lazy('i07_nexus_object_01'), lazy('bkg_regions_01'))
]
)
def test_bkg_regions(i07_nexus: I07Nexus, regions):
"""
Makes sure that we can extract background regions from an I07 nexus
file.
"""
for i, _ in enumerate(regions):
assert i07_nexus.background_regions[i] == regions[i]
@pytest.mark.parametrize(
'i07_nexus, transmission',
[(lazy('i07_nexus_object_01'), 0.000448426658633058)])
def test_transmission(i07_nexus: I07Nexus, transmission):
"""
Make sure we can correctly parse the transmission coefficient.
"""
assert i07_nexus.transmission == transmission
@pytest.mark.parametrize(
'i07_nexus, probe_energy',
[(lazy('i07_nexus_object_01'), 12.5)]
)
def test_probe_energy(i07_nexus: I07Nexus, probe_energy):
"""
Make sure we can extract the energy of the probe particle from the .nxs
file.
"""
assert i07_nexus.probe_energy == probe_energy
@pytest.mark.parametrize(
'i07_nexus, detector_distance',
[(lazy('i07_nexus_object_01'), 1.1155)]
)
def test_detector_distance(i07_nexus: I07Nexus, detector_distance):
"""
Make sure that we can extract the detector distance from the .nxs file.
"""
assert i07_nexus.detector_distance == detector_distance
@pytest.mark.parametrize(
'i07_nexus, description',
[(lazy('i07_nexus_object_01'), 'q')]
)
def test_default_axis_type(i07_nexus: I07Nexus, description):
"""
Make sure that we are correctly identifying the kind of axis data
stored in the nexus file.
"""
assert i07_nexus.default_axis_type == description
@pytest.mark.parametrize(
'i, ith_region',
[
(1, Region(1208, 1208+50, 206, 206+18)),
(2, Region(1258, 1258+50, 206, 206+18)),
(3, Region(1208, 1208+50, 188, 188+18))
]
)
def test_ith_region_nxs_01(i07_nexus_object_01: I07Nexus,
i, ith_region):
"""
Make sure that we can extract the ith region from i07_nexus_object_01.
"""
assert i07_nexus_object_01._get_ith_region(i) == ith_region
def test_detector_name(i07_nexus_object_01: I07Nexus):
"""
Make sure that we can properly extract the name of the detector.
"""
assert i07_nexus_object_01.detector_name == I07Nexus.excalibur_detector_2021
def test_excalibur_name():
"""
Make sure that we're spelling the detector name properly!
"""
assert I07Nexus.excalibur_detector_2021 == "excroi"
assert I07Nexus.excalibur_04_2022 == "exr"
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,188
|
DiamondLightSource/islatu
|
refs/heads/master
|
/tests/unit/test_debug.py
|
"""
This module contains a couple of simple tests for Islatu's debugger.
"""
from islatu.debug import debug
def test_debug_default_log_lvl():
"""
Make sure that the debugger starts out with a logging_lvl of 1.
"""
assert debug.logging_level == 1
def test_debug_log_lvl_change():
"""
Make sure that we can change the logging level, if required.
"""
debug.logging_level = 2
assert debug.logging_level == 2
debug.logging_level = 1
assert debug.logging_level == 1
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,189
|
DiamondLightSource/islatu
|
refs/heads/master
|
/tests/conftest.py
|
"""
This module contains fixture definitions used when testing the islatu module.
"""
# The following pylint rule is, unfortunately, necessary due to how pytest works
# with fixtures. Consequently, all fixtures are defined in this file so that
# redefined-outer-name only needs to be disabled once.
# pylint: disable=redefined-outer-name
# We need to test protected members too.
# pylint: disable=protected-access
import os
import pytest
import numpy as np
from islatu.io import I07Nexus, i07_nxs_parser, i07_dat_to_dict_dataframe
from islatu.corrections import get_interpolator
from islatu.data import Data, MeasurementBase
from islatu.region import Region
from islatu.refl_profile import Profile
@pytest.fixture
def path_to_resources():
"""
Returns the path to the resources folder.
"""
if os.path.isdir("resources"):
return "resources" + os.sep
if os.path.isdir("tests") and os.path.isdir("src"):
return "tests" + os.sep + "resources" + os.sep
raise FileNotFoundError(
"Couldn't locate the tests/resources directory. Make sure that " +
"the pytest command is run from within the base islatu directory" +
", or from within the tests directory."
)
@pytest.fixture
def path_to_i07_nxs_01(path_to_resources):
"""
Returns the path to an i07 nexus file. If it can't be found, raises.
"""
return os.path.join(path_to_resources, "i07-404876.nxs")
@pytest.fixture
def path_to_i07_nxs_02(path_to_resources):
"""
Returns the path to a second i07 nexus file. If it cant be found, raises.
"""
return os.path.join(path_to_resources, "i07-404877.nxs")
@pytest.fixture
def path_to_dcd_normalisation_01(path_to_resources):
"""
Returns the path to the qdcd normalisation file corresponding to i07_nxs_01.
"""
return os.path.join(path_to_resources, "404863.dat")
@pytest.fixture
def parsed_dcd_normalisation_01(path_to_dcd_normalisation_01):
"""
Returns the ([metadata] dict, [data] dataframe) relating to the first
dcd normalisation file.
"""
return i07_dat_to_dict_dataframe(path_to_dcd_normalisation_01)
@pytest.fixture
def dcd_norm_01_splev(path_to_dcd_normalisation_01):
"""
Returns the scipy splev corresponding to the first dcd normalisation file.
"""
return get_interpolator(path_to_dcd_normalisation_01,
i07_dat_to_dict_dataframe)
@pytest.fixture
def path_to_i07_h5_01(path_to_resources):
"""
Returns the path to an i07 h5 file. If it can't be found, raises.
"""
return os.path.join(path_to_resources, "excaliburScan404876_000001.h5")
@pytest.fixture
def nexus_base_object_01(path_to_i07_nxs_01):
"""
Returns the path's corresponding i07 nexus object.
"""
return I07Nexus(path_to_i07_nxs_01)
@pytest.fixture
def i07_nexus_object_01(path_to_i07_nxs_01):
"""
Returns the path's corresponding i07 nexus object.
"""
return I07Nexus(path_to_i07_nxs_01)
@pytest.fixture
def signal_regions_01():
"""
Returns the list of signal regions recorded in i07_nexus_object_01.
"""
return [Region(1208, 1208+50, 206, 206+18)]
@pytest.fixture
def bkg_regions_01():
"""
Returns the list of signal regions recorded in i07_nexus_object_01.
"""
return [Region(1258, 1258+50, 206, 206+18),
Region(1208, 1208+50, 188, 188+18)]
@pytest.fixture
def custom_bkg_region_01():
"""
Returns a decent background regions, specifically chosen for scan_01.
"""
return Region(1340, 1420, 220, 300)
@pytest.fixture
def scan2d_from_nxs_01(path_to_i07_nxs_01):
"""
Uses the i07_nxs_parser to produce an instance of Scan2D from the given
path.
"""
return i07_nxs_parser(path_to_i07_nxs_01)
@pytest.fixture
def scan2d_from_nxs_01_copy(path_to_i07_nxs_01):
"""
An exact copy of the above Scan2D instance. Useful to have in some tests.
"""
return i07_nxs_parser(path_to_i07_nxs_01)
@pytest.fixture
def scan_02(path_to_i07_nxs_02):
"""
Returns another scan at higher q.
"""
return i07_nxs_parser(path_to_i07_nxs_02)
@pytest.fixture
def generic_data_01():
"""
Constructs a generic, valid, Data instance.
"""
# Some meaningless values.
q_vecs = np.arange(10)/10
intensities = np.arange(1100, 300, -45)[:10]
# A realistic value (in keV)
energy = 12.5
return Data(intensities, np.sqrt(intensities), energy, q_vectors=q_vecs)
@pytest.fixture
def generic_data_02():
"""
Constructs another random Data instance, this time initializing with theta
rather than q.
"""
# More meaningless values.
theta = np.arange(6)
intensities = np.arange(11100012, 0, -12938)[:6]
# Cu k-alpha
energy = 8.04
return Data(intensities, np.sqrt(intensities), energy, theta)
@pytest.fixture
def measurement_base_01(path_to_i07_nxs_01, generic_data_01: Data):
"""
Constructs a fairly meaningless instance of MeasurementBase to test against.
This uses generic_data_01 to populate its data, and gets metadata by
parsing a nxs file.
"""
i07_nxs_metadata = I07Nexus(path_to_i07_nxs_01)
return MeasurementBase(generic_data_01.intensity,
generic_data_01.intensity_e, generic_data_01.energy,
i07_nxs_metadata, q=generic_data_01._q)
@pytest.fixture
def region_01():
"""
Returns a fairly generic instance of islatu.region's Region class.
"""
return Region(x_start=1056, x_end=1124, y_start=150, y_end=250)
@pytest.fixture
def profile_01(path_to_i07_nxs_01):
"""
Returns an instance of the Profile class that containts just scan_01.
"""
return Profile.fromfilenames([path_to_i07_nxs_01], i07_nxs_parser)
@pytest.fixture
def profile_0102(path_to_i07_nxs_01, path_to_i07_nxs_02):
"""
Returns an instance of the Profile class that contains scan_01 and scan_02.
"""
return Profile.fromfilenames([path_to_i07_nxs_01, path_to_i07_nxs_02],
i07_nxs_parser)
@pytest.fixture
def old_dcd_data(path_to_resources):
"""
Returns a np.ndarray of the data as processed by islatu prior to a
substantial refactor. This old DCD data was confirmed to be correctly
reduced by beamline staff.
"""
return np.loadtxt(os.path.join(
path_to_resources, "XRR_404875_dcd_template2021-11-01_15h35m02s.dat"))
@pytest.fixture
def process_xrr_path(path_to_resources):
"""
Uses relative pathfinding to return a valid path to process_xrr.py
"""
return os.path.join(
path_to_resources, '../../CLI/process_xrr.py'
)
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,190
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/data.py
|
"""
This module contains both the Data class and the MeasurementBase class.
In a reflectometry measurement, the experimental data corresponds to the
reflected intensity as a function of scattering vector Q. In a typical
diffractometer, Q is a virtual axis, calculated geometrically from various motor
positions. The Data class takes care of these conversions, exposing q, theta,
intensity, reflectivity, and energy.
The MeasurementBase class defines a simple class that is Data, but that also has
metadata.
"""
import numpy as np
from scipy.constants import physical_constants
class Data:
"""
The base class of all Islatu objects that contain data.
Attributes:
intensity:
A numpy array containing intensities in this dataset.
intensity_e:
A numpy array containing the corresponding errors in intensity.
theta:
A numpy array containing the probe particle's angle of
incidence at each intensity.
q_vectors:
A numpy array containing the magnitude of the probe particle's
scattering vector for each intensity value.
energy:
The energy of the probe particle used to acquire this data. This
is necessary to swap between theta and q.
Args:
intensity:
A numpy array of the intensities in this dataset.
intensity_e:
The errors on the intensities.
energy:
The energy of the probe particle used to acquire this data.
theta:
A numpy array containing the probe particle's angle of
incidence at each intensity. NOTE: only one of theta/q needs to
be provided.
q_vectors:
A numpy array containing the magnitude of the probe particle's
scattering vector for each intensity value. NOTE: only one of
theta/q needs to be provided.
"""
def __init__(self, intensity, intensity_e, energy, theta=None,
q_vectors=None):
self.intensity = intensity
self.intensity_e = intensity_e
self.energy = energy
if (theta is None) and (q_vectors is None):
raise ValueError(
"Either theta or q must be provided to create a Data instance"
)
# When using properties, it wont matter which of these ends up as None.
self._theta = theta
self._q = q_vectors
@property
def reflectivity(self) -> np.array:
"""
Returns the intensity, normalized such that the maximum value of the
intensity is equal to 1. To acquire
"""
return self.intensity/np.amax(self.intensity)
@property
def reflectivity_e(self) -> np.array:
"""
Returns the errors on the intensity, divided by the maximum value of the
intensity array.
"""
return self.intensity_e/np.amax(self.intensity)
@property
def q_vectors(self) -> np.array:
"""
Returns self._q if this instance of Data was generated from q-data.
Otherwise, converts from self._theta to q.
"""
if (self._q is None) and (self._theta is not None):
return self._theta_to_q(self._theta, self.energy)
else:
return self._q
@q_vectors.setter
def q_vectors(self, value) -> None:
"""
Sets self._q.
"""
self._q = value
@property
def theta(self) -> np.array:
"""
Returns self._theta if this instance of Data was generate from th-data.
Otherwise, converts from scattered q to theta.
"""
if (self._theta is None) and (self._q is not None):
return self._q_to_theta(self._q, self.energy)
else:
return self._theta
@theta.setter
def theta(self, value) -> None:
self._theta = value
def _theta_to_q(self, theta, energy) -> np.array:
"""
Calculates the scattering vector Q from diffractometer theta.
Args:
theta (:py:attr:`str`):
Array of theta values to be converted.
energy (:py:attr:`float`):
Energy of the incident probe particle.
"""
planck = physical_constants["Planck constant in eV s"][0] * 1e-3
speed_of_light = physical_constants[
"speed of light in vacuum"][0] * 1e10
q_values = np.sin(np.radians(theta)) / (planck * speed_of_light)
q_values *= energy * 4.0 * np.pi
return q_values
def _q_to_theta(self, q_values, energy) -> np.array:
"""
Calculates the diffractometer theta from scattering vector Q.
Args:
theta (:py:attr:`str`):
Array of theta values to be converted.
energy (:py:attr:`float`):
Energy of the incident probe particle.
"""
planck = physical_constants["Planck constant in eV s"][0] * 1e-3
speed_of_light = physical_constants[
"speed of light in vacuum"][0] * 1e10
theta_values = planck * speed_of_light * \
np.arcsin(q_values / (energy * 4 * np.pi))
theta_values = theta_values*180/np.pi
return theta_values
def remove_data_points(self, indices):
"""
Convenience method for the removal of a specific data point by its
index.
Args:
indices:
The indices to be removed.
"""
if self._q is not None:
self._q = np.delete(self._q, indices)
if self._theta is not None:
self._theta = np.delete(self._theta, indices)
self.intensity = np.delete(self.intensity, indices)
self.intensity_e = np.delete(self.intensity_e, indices)
class MeasurementBase(Data):
"""
All measurements derive from this class.
Attrs:
metadata:
The metadata relevant to this measurement.
"""
def __init__(self, intensity, intensity_e, energy, metadata, theta=None,
q=None) -> None:
# Initialize the Data.
super().__init__(intensity, intensity_e, energy, theta, q)
# Store the metadata.
self.metadata = metadata
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
4,191
|
DiamondLightSource/islatu
|
refs/heads/master
|
/src/islatu/image.py
|
"""
The two-dimension detector generates images of the reflected intensity.
The purpose of the Image class stored in this module is the investigation
and manipulation of these images.
"""
import numpy as np
class Image:
"""
This class stores information about the detector images.
Attributes:
file_path (:py:attr:`str`):
File path for the image.
array (:py:attr:`array_like`):
The image described as an array.
array_original (:py:attr:`array_like`):
The original value of the image array when it was loaded from disk.
array_e (:py:attr:`array_like`):
The errors on each pixel of the array.
bkg (:py:attr:`float`):
The background that was subtracted from the image.
bkg_e (:py:attr:`float`):
The uncertainty on the background.
Args:
file_path (:py:attr:`str`): The file path for the image.
data (:py:class:`pandas.DataFrame`, optional): Experimental data about
the measurement. Defaults to :py:attr:`None`.
transpose (:py:attr:`bool`, optional): Should the data be rotated by
90 degrees? Defaults to :py:attr:`False`.
"""
def __init__(self, array: np.ndarray, transpose: bool = False):
"""
Initialisation of the :py:class:`islatu.image.Image` class, includes
assigning uncertainties.
"""
if transpose:
array = array.T
self.array = array
self.array_original = np.copy(array)
self.array_e = self.initial_std_devs
self.bkg = 0
self.bkg_e = 0
@property
def nominal_values(self):
"""
Get the nominal values of the image array.
Returns:
:py:attr:`array_like`: Nominal values of image.
"""
return self.array
@property
def initial_std_devs(self):
"""
Get the standard deviation values of the original raw image array.
Returns:
:py:attr:`array_like`: Standard deviation values of image.
"""
array_error = np.sqrt(self.array_original)
array_error[np.where(self.array_original == 0)] = 1
return array_error
@property
def shape(self):
"""
Array shape
Returns:
:py:attr:`tuple` of :py:attr:`int`: The shape of the image.
"""
return self.array.shape
def __repr__(self):
"""
Custom representation.
Returns:
:py:attr:`array_like`: Image array.
"""
return self.array
def __str__(self):
"""
Custom string.
Returns:
:py:attr:`array_like`: Image array.
"""
return self.array
def crop(self, crop_function, **kwargs):
"""
Perform an image crop based on some function.
Args:
crop_function (:py:attr:`callable`): The function to crop the data.
**kwargs (:py:attr:`dict`): The crop function keyword arguments.
"""
self.array = crop_function(self.array, **kwargs)
self.array_e = crop_function(self.array_e, **kwargs)
def background_subtraction(self, background_subtraction_function,
**kwargs):
"""
Perform a background subtraction based on some function.
Args:
background_subtraction_function (:py:attr:`callable`): The
function to model the data and therefore remove the background.
**kwargs (:py:attr:`dict`): The background substraction function
keyword arguments.
"""
bkg_sub_info = background_subtraction_function(
self, **kwargs
)
# Store the calculated background, and its error.
self.bkg, self.bkg_e = bkg_sub_info.bkg, bkg_sub_info.bkg_e
# Do the subtraction.
self.array = self.array - self.bkg
self.array_e = np.sqrt(self.bkg_e**2 + self.array_e**2)
# Expose information relating to the background subtraction for
# meta-analyses.
return bkg_sub_info
def sum(self):
"""
Perform a summation on the image's array.
Returns:
A tuple taking the form (summed_intensity, summed_intensity_e).
"""
intensity = np.sum(self.array)
intensity_e = np.sqrt(np.sum(self.array_e**2))
return intensity, intensity_e
|
{"/src/islatu/io.py": ["/src/islatu/scan.py", "/src/islatu/image.py", "/src/islatu/data.py", "/src/islatu/region.py", "/src/islatu/debug.py", "/src/islatu/metadata.py"], "/src/islatu/background.py": ["/src/islatu/region.py", "/src/islatu/image.py"], "/src/islatu/refl_profile.py": ["/src/islatu/scan.py", "/src/islatu/stitching.py", "/src/islatu/data.py"], "/src/islatu/stitching.py": ["/src/islatu/scan.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.