index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
45,424 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0304/demo_conf.py | # -*- coding:utf-8 -*-
# @Time :2020-03-04 20:38
# @Email :876417305@qq.com
# @Author :yanxia
# @File :demo_conf.PY
from configparser import ConfigParser
#实例化类
cf=ConfigParser()
#读取conf文件:文件路径 编码方式
#相对路径
cf.read("demo.cfg",encoding="utf-8")
#绝对路径
# cf.read("C:\Users\10977\Desktop\剪切")
#读取所有的sections 以列表的形式存在
secs=cf.sections()
print(secs)
# 获取某一个sections下面的options 的值
# cf.options("db")
print(cf.options(secs[0]))
# 获取某一个section下面的某一个options的值
res=cf.get("db","db_port")#获取的所有结果都是字符串
print(res,type(res))
# 获取int类型的值 cf.getint
old =cf.getint("db","db_port")
print(old,type(old))
# 获取bool 类型的值
res1=cf.getboolean("excel","res")
print(res1,type(res1))
# 获取float类型的值
res2=cf.getfloat("excel","row")
print(res2,type(res2))
sex=cf.get("person_info","sex")
print(eval(sex)) | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,425 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0228/boy_friend.py | # -*- coding:utf-8 -*-
# @Time :2020-02-28 21:08
# @Email :876417305@qq.com
# @Author :yanxia
# @File :boy_friend.PY
class BoyFriend:#类
sex='boy'
height=180# 类属性
@staticmethod# 静态方法
def coding(language='python'):#类方法
print("会写{}代码,并且写得很好".format(language))
def cooking(self,*args):
cook_name=""
for item in args:
cook_name+=item
cook_name+="、"
print("会做饭,会做{}".format(cook_name))
@classmethod#类方法
def paly_basketball(cls):
print("我男朋友的身高是{}:".format(cls.height))
print("最喜欢打篮球")
cls.coding()
cls().cooking()
def print_self(self):#对象方法:只能对象来调用
print("self",self)
print("男票的身高是{}".format(self.height))
self.cooking("麻辣烫","小炒肉")
self.paly_basketball()
self.coding()
x=BoyFriend()
#BoyFriend.print_self()# 对象方法只能由对象来调用,不然就会报错 TypeError: print_self() missing 1 required positional argument: 'self'
# BoyFriend.paly_basketball()#类调用
#x.paly_basketball()#对象调用
# BoyFriend.coding()#类调用
x.coding()#对象调用
#x.paly_basketball()
#x.print_self()
#x.cooking("niurou")
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,426 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0307/Sd_youjian.py | # -*- coding:utf-8 -*-
# @Time :2020-03-10 15:49
# @Email :876417305@qq.com
# @Author :yanxia
# @File :email_colose.PY
import ssl
from smtplib import SMTP,SMTP_SSL
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.application import MIMEApplication
#配置各种邮件服务器
emailname="yanxia_626@163.com"
emailpwd="gdd610626wyx"
email_severs={"163.com":{"smtp":{"host":"smtp.163.com","port":25,"ssl_port":465}},}
#创建ssl context用于加密
context=ssl.create_default_context()
def parse_mail(mailname):
"""解析邮件名 返回对应的邮件服务商"""
server_name=mailname.split("@")
if len(server_name)==1:
raise TypeError("email format error")
server_name=server_name[-1]
if server_name not in list(email_severs.keys()):
raise NameError("no this emial server")
sever=email_severs.get(server_name,"")
return sever
class HttpEmail(SMTP):
def __init__(self,mailname,pwd):
self.mailname=mailname
self.login(mailname,pwd)
def mail_msg(self,msg,type="html",subject=""):
"""组装邮件正文"""
msg=MIMEText(msg,type)
msg["Subject"]="自动化的报告"
return msg
def send_email(self,to,msg,files=None,type="plain",subject=""):
"""
发送邮件的主函数
:param to: 发送给谁
:param msg: 原始邮件数据
:param files: 发送的附件文件路径
:param type: 格式类型
:param subject: 标题
"""
total=MIMEMultipart()
total["Subject"]=subject
body=self.mail_msg(msg,type=type,subject=subject)
total.attach(body)
if files and isinstance(files,list):
for fiename in files:
file=MIMEApplication(open(fiename,"rb").read())
file.add_header("Content-Disposition","attachment",fiename=fiename)
#附件添加到总的里面
total.attach(file)
return self.send_email(self.mailname,to,total.as_string())
class MyEmailSSL(SMTP_SSL,HttpEmail):
"""SSL发送邮件"""
def __init__(self,mail_name,pwd):
self.mailname=mail_name
server=parse_mail(mail_name).get("smtp","")
super().__init__(server.get("host"),server.get("ssl_port"))
super().login(mail_name,pwd)
if __name__ == '__main__':
msg="""
春暖花开的时候,
希望一切都会好起来
我想这是最好的结果
"""
with HttpEmail(emailname,emailpwd) as mail:
mail.send_email("yanxia_626@163.com",msg["demo.txt"],subject="告诉你一件好事儿")
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,427 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0310shizhan/call_web.py | # -*- coding:utf-8 -*-
# @Time :2020-03-11 21:40
# @Email :876417305@qq.com
# @Author :yanxia
# @File :call_web.PY
import requests
# params={"username":"zhuge2019","password":"QAZwsx123"}
# url="https://callapiv2.zhuge.com/home/index/index"
# res=requests.post(url=url,data=params)
# print(res.json())
# print(res.json()['data']['token'])
# params={"authorization":res.json()['data']['token'],"uid":"900","company_id":1,"super_manage":2}
# url="https://callapiv2.zhuge.com/Admin/v1/User/get_user_rule"
# res1=requests.post(url=url,data=params)
# print(res1.json())
session=requests.sessions.session()
#登录
params={"username":"zhuge2019","password":"QAZwsx123"}
res=session.request("post",url="https://callapiv2.zhuge.com/home/index/index",data=params)
print(res.text)
#获取左侧导航条
params={"authorization":res.json()['data']['token'],"uid":"900","company_id":1,"super_manage":2}
res=session.request("post",url="https://callapiv2.zhuge.com/Admin/v1/User/get_user_rule",data=params)
print(res.json())
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,428 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0314/testcase/test_bidLoan.py | # -*- coding:utf-8 -*-
# @Time :2020-03-16 14:48
# @Email :876417305@qq.com
# @Author :yanxia
# @File :test_bidLoan.PY
import unittest
from ddt import ddt,data
from class_0314.common import do_excel
from class_0314.common import contants
from class_0314.common.http_request import HttpRequest2
from class_0314.common import context
from class_0314.common import do_mysql
from class_0314.common.context import Context
@ddt
class BidloanTeat(unittest.TestCase):
excel=do_excel.DoExcel(contants.case_file,"bidLoan")
cases=excel.get_cases()
@classmethod
def setUpClass(cls):
cls.http_request = HttpRequest2()
cls.mysql = do_mysql.DoMysql() # 为了避免资源浪费,所有的请求都在一个链接里面完成
@data(*cases)
def test_bidloan(self,case):
print("开始执行测试",case.title)
print(case.url)
print(case.data)
case.data=context.replace(case.data)
resp=self.http_request.request(case.method,case.url,case.data)
print(resp.text)
try:
self.assertEqual(str(case.expected),resp.json()['code'])
self.excel.write_result(case.case_id+1,resp.text,"PASS")
#增加数据库的操作,判断加标成功后,取到loan_id
if resp.json()["msg"]=="加标成功":
sql='select * from future.loan where MemberID=88538 order by id desc limit 1'
loan_id=self.mysql.fetch_one(sql)['Id']
print("加标之后的标id",loan_id)
#保存到类属性里面
setattr(Context,"loan_id",str(loan_id))
except Exception as e:
self.excel.write_result(case.case_id+1,resp.text,"FAIL")
raise e
@classmethod
def tearDownClass(cls):
cls.http_request.close()
cls.mysql.close()
#!!!!注意写sql语句的时候,一定要带上表名,不然就会报 pymysql.err.InternalError: (1046, 'No database selected') | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,429 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0313/testcase/test_register.py | # -*- coding:utf-8 -*-
# @Time :2020-03-13 16:33
# @Email :876417305@qq.com
# @Author :yanxia
# @File :test_register.PY
import unittest
from class_0313.common.http_request import HttpRequest2
from class_0313.common import do_excel
from class_0313.common import contants
from ddt import ddt, data
from class_0313.common import do_mysql
@ddt
class RegisterTest(unittest.TestCase):
excel = do_excel.DoExcel(contants.case_file, "register")
cases = excel.get_cases()
@classmethod
def setUpClass(cls):
cls.http_request = HttpRequest2()
cls.mysql=do_mysql.DoMysql()#为了避免资源浪费,所有的请求都在一个链接里面完成
@data(*cases)
def test_register(self, case):
if case.data.find('register_mobile')>-1:#如果find没找到这个'register_mobile',就返回-1
sql='select min(mobilephone) from future.member'
min_phone=self.mysql.fetch_one(sql)[0]#查询最大/最小手机号,fetch-one返回的是一个元祖
#最大手机号码+1,从字符串转成int+1
max_phone=int(min_phone) + 18
case.data=case.data.replace('register_mobile',str(max_phone))#替换参数值,字符串是不可变数组,
# 会生成一个新的值,不能再原有的上面替换,所以要给一个case.data值接收
resp = self.http_request.request(case.method, case.url, case.data)
try:
self.assertEqual(case.expected, resp.text)
self.excel.write_result(case.case_id + 1, resp.text, "PASS")
except AssertionError as e:
self.excel.write_result(case.case_id + 1, resp.text, "FAIL")
raise e
@classmethod
def tearDownClass(cls):
cls.http_request.close()
cls.mysql.close()#关闭数据库的连接
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,430 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0312/study_session_request.py | # -*- coding:utf-8 -*-
# @Time :2020-03-12 10:56
# @Email :876417305@qq.com
# @Author :yanxia
# @File :study_session_request.PY
import requests
session=requests.sessions.session()
#登录
params={"mobilephone":"15810447878","pwd":123456}
"http://test.lemonban.com/futureloan/mvc/api/member/login"
resp=session.request('post',url="http://test.lemonban.com/futureloan/mvc/api/member/login",data=params)
#充值
params={"mobilephone":"15810447878","amount":111}
resp=session.request('post',url="http://test.lemonban.com/futureloan/mvc/api/member/recharge",data=params)
print(resp.text)
session.close()
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,431 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0228/class_02.py | # -*- coding:utf-8 -*-
# @Time :2020-02-28 22:47
# @Email :876417305@qq.com
# @Author :yanxia
# @File :class_02.PY
# 手机
# 属性:价格 品牌 颜色 尺寸
# 方法:打电话 发短信 听音乐
class Phone:
#手机属性
# color="black"
# price=4500
# brand="oppo"
# size="5.5"
#初始化方法-参数化-魔法方法
def __init__(self,color,price,brand,size):
self.color=color
self.price=price #加了self就是对象属性
self.brand=brand
self.size=size
#类方法
@classmethod
def call(cls,tell_number):
print("拨号{},开始打电话".format(tell_number))
def send_message(self,tell_number,content):
print("给{},发短信:{}".format(tell_number,content))
def watch_tv(self,*args):
app=""
for item in args:
app+=item
app+="、"
print("可以利用这些APP看电视,比如说:{}看电视".format(app))
def take_shoot(self):
print("拍照")
@staticmethod
def add(a,b):
print(a+b)
def phone_info(self):
print("颜色{},品牌{},价格{},尺寸{}".format(self.color,self.brand,self.price,self.size))
if __name__ == '__main__':
# t = Phone()
t=Phone("red","vivo",500,"5.0")#有初始化方法的时候,调用时,必须传参数
# t.add(4,5)
# t.take_shoot()
# t.watch_tv("爱奇艺","腾讯")
# t.send_message("18394428623","上课啦")
# t.phone_info()
Phone.call("178272821") | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,432 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0314/testcase/test_addproject.py | # -*- coding:utf-8 -*-
# @Time :2020-03-14 0:12
# @Email :876417305@qq.com
# @Author :yanxia
# @File :test_addproject.PY
import unittest
from ddt import ddt, data
from class_0314.common import do_excel
from class_0314.common import contants
from class_0314.common.config import config
from class_0314.common import context
from class_0314.common.http_request import HttpRequest2
@ddt
class RechargeTest(unittest.TestCase):
excel = do_excel.DoExcel(contants.case_file, "add") # 引入这个excel文件
cases = excel.get_cases() # 获取excel的值
@classmethod
def setUpClass(cls):
cls.http_request = HttpRequest2() # 实例化sessio
@data(*cases)
def test_add(self, case):
# 使用字典的方式替换
# case.data = eval(case.data)
# if case.data.__contains__('mobilephone') and case.data['mobilephone'] == 'normal_user':
# case.data['mobilephone'] = config.get('case','normal_user') # 拿到配置文件里面的的值,赋值给case.data
# if case.data.__contains__('pwd') and case.data['pwd'] == 'normal_pwd':
# case.data['pwd'] = config.get('case','normal_pwd') # 拿到配置文件里面的值,赋值给case.data
# if case.data.__contains__('memberId') and case.data['memberId'] == 'loan_member_id':
# case.data['memberId'] = config.get('case','loan_member_id') # 拿到配置文件里面的值,赋值给case.data
# print(case.data)
# print(case.title)
case.data=context.replace(case.data)#使用context的replace函数做替换
resp = self.http_request.request(case.method, case.url, case.data)
try:
self.assertEqual(str(case.expected), resp.json()['code'])
self.excel.write_result(case.case_id + 1, resp.text, "PASS")
except AssertionError as e:
self.excel.write_result(case.case_id + 1, resp.text, "FILE")
raise e
@classmethod
def tearDownClass(cls):
cls.http_request.close() # 关闭session
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,433 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0314/run.py | # -*- coding:utf-8 -*-
# @Time :2020-03-23 13:16
# @Email :876417305@qq.com
# @Author :yanxia
# @File :run.py.PY
import sys
sys.path.append('./')#project跟目录
print(sys.path)
import unittest
import HTMLTestRunnerNew
from class_0314.common import contants
discover=unittest.defaultTestLoader.discover(contants.case_dir,'test_*.py')
with open(contants.report_dir +'/report.html','wb') as file:
runner=HTMLTestRunnerNew.HTMLTestRunner(stream=file,
title="python 接口测试API",
description='测试接口',
tester='yanxia')
runner.run(discover) | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,434 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0312/do_excel.py | # -*- coding:utf-8 -*-
# @Time :2020-03-12 16:16
# @Email :876417305@qq.com
# @Author :yanxia
# @File :do_excel.PY
#完成excel的读和写
import openpyxl
from class_0312 import http_request
class Case:
"""测试用例类,每个测试用例,实际上就是它的一个实例(每一个测试用例,实际就是Case的一个实例)"""
def __init__(self):
self.case_id=None
self.title=None
self.url=None
self.data=None
self.method=None
self.expected=None
self.actual=None
self.result=None
class DoExcel:
def __init__(self,file_name,sheet_name):
try:
self.file_name=file_name
self.workbook=openpyxl.load_workbook(file_name)
self.sheet_name=sheet_name
self.sheet=self.workbook[sheet_name]
except Exception as e:
print("请核对excel路径")
raise e
def get_cases(self):
max_row=self.sheet.max_row #获取最大行数
cases=[] #列表存放所有的测试用例
for i in range(2,max_row+1):
case=Case()#Case实例化
case.case_id=self.sheet.cell(row=i,column=1).value
case.title = self.sheet.cell(row=i, column=2).value
case.url = self.sheet.cell(row=i, column=3).value
case.data= self.sheet.cell(row=i, column=4).value
case.method= self.sheet.cell(row=i, column=5).value
case.expected= self.sheet.cell(row=i, column=6).value
cases.append(case)
self.workbook.close()
return cases
def write_result(self,row,actual,result):#列固定,行不固定
sheet=self.workbook[self.sheet_name]#因为写完保存之后要关闭这个excel
sheet.cell(row,7).value=actual
sheet.cell(row,8).value=result
self.workbook.save(filename=self.file_name)
self.workbook.close()
if __name__ == '__main__':
do_excel=DoExcel('cases.xlsx',sheet_name='login')
cases=do_excel.get_cases() #实际是一个对象实例,看整个数据可以用case.__dict_
http_request=http_request.HttpRequest()# _
for case in cases:
# print(case.case_id)
# print(case.method)
# print(case.data)
print(case.__dict__)#每一个类都有这个dict,会返回类里面所有属性
resp=http_request.request(case.method,case.url,case.data)
actual=resp.text
if case.expected==actual:#判断期望结果与实际结果一致
do_excel.write_result(case.case_id+1,actual,"pass")
else:
do_excel.write_result(case.case_id+1,actual,"fail")
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,435 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0307/http_xlsx.py | # -*- coding:utf-8 -*-
# @Time :2020-03-09 11:08
# @Email :876417305@qq.com
# @Author :yanxia
# @File :http_xlsx.PY
from openpyxl import load_workbook
import json
def get_datavalue():
wb=load_workbook("data.xlsx")
#sheet=wb["Sheet1"]
sheet=wb.worksheets[0]#是一个列表,取第一个
login_data=[]
for row in range(2,sheet.max_row+1):
user_login={"url":sheet.cell(row,1).value,
"method":sheet.cell(row,2).value,
"data":json.loads(sheet.cell(row,3).value),
"expected":sheet.cell(row,4).value,
"case_id":sheet.cell(row,7).value}
login_data.append(user_login)
wb.close()
return login_data
def write_result(row,result):
wb = load_workbook("data.xlsx")
# sheet=wb["Sheet1"]
sheet = wb.worksheets[0] # 是一个列表,取第一个
sheet.cell(row,6,result)
wb.save("data.xlsx")
wb.close() | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,436 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0228/lujing.py | # -*- coding:utf-8 -*-
# @Time :2020-03-09 18:29
# @Email :876417305@qq.com
# @Author :yanxia
# @File :lujing.PY
# 文件的路径处理
import json
a={"memberId":88538,"title":"借款300万","amount":300000,"loanRate":18.0,"loanTerm":6,"loanDateType":0,"repaymemtWay":5,"biddingDays":10}
b=json.dumps(a)
print(type(b))
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,437 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0307/http_unittest.py | # -*- coding:utf-8 -*-
# @Time :2020-03-07 21:10
# @Email :876417305@qq.com
# @Author :yanxia
# @File :http_unittest.PY
import unittest
from class_0307.http_request import HttpRequests
from ddt import ddt,data,unpack
@ddt
class TestHttp_Request(unittest.TestCase):
url = "http://test.lemonban.com/futureloan/mvc/api/member/login"
@data(["get",{"mobilephone": "18688773467", "pwd": "123456"},"登录成功"],
["post",{"mobilephone":"","pwd":"123456"},"手机号不能为空"],
["get",{"mobilephone":"18688773467","pwd":"126"},"用户名或密码错误"],
["post",{"mobilephone":"18688773467","pwd":""},"密码不能为空"])
@unpack
def test_login_request(self,method,data,expected):
res=HttpRequests().http_request(self.url,method,data).json()
try:
self.assertEqual(expected,res["msg"])
except AssertionError as e:
raise e
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,438 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0308/learn_ddt.py | # -*- coding:utf-8 -*-
# @Time :2020-03-08 15:24
# @Email :876417305@qq.com
# @Author :yanxia
# @File :learn_ddt.PY
import unittest
from ddt import ddt,data,unpack
@ddt #@ddt装饰测试类unittest.TestCase的子类
class TestAdd(unittest.TestCase):
# data里面的数据传进来是一个元组,共有一个元素,执行一条用例
# data加上*,变成了元组,有3个元素,执行3条用例
#unpack 根据逗号来进行拆分,变成了3个参数,测试方法就要用三个参数接收
# @data(*[[0,0,0],[1,1,2],[-2,-1,-1]]) #data装饰我们的方法 跟for循环一样遍历每个数据 传递给被装饰的方法的参数 有几条数据就执行几次用例
# @unpack
# def test_001(self,a,b,expected):
# print("a的值是:",a)
# print("b的值是:",b)
# print("expected的值是:",expected)
# @data(*[{"a":0,"b":0,"expected":0},{"a":1,"b":0,"expected":1},{"a":-10,"b":8,"expected":-2}])
# @unpack #字典进行拆分是针对每一条用例的数据进行拆分
# def test_add_zero(self,a,b,expected):#如果是字典话,要用它的key作为参数来进行数据的接收
# print("a的值是:",a)
# print("b的值是:",b)
# print("excepted的值是:",expected)
# 如果参数长短不一样,可以用默认值如:d=None这样的默认参数
@data(*[[0,0,0,4],[1,1,2]]) # data装饰我们的方法 跟for循环一样遍历每个数据 传递给被装饰的方法的参数 有几条数据就执行几次用例
@unpack
def test_001(self,a,b,expected,d=None):
print("a的值是:",a)
print("b的值是:",b)
print("expected的值是:",expected)
print("d的值是:",d)
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,439 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0306/test_suite.py | # -*- coding:utf-8 -*-
# @Time :2020-03-07 16:21
# @Email :876417305@qq.com
# @Author :yanxia
# @File :test_suite.PY
# 第一种方法
import unittest
import HTMLTestRunnerNew
from class_0306.learn_unittest import *
#存储用例的容器suite 套件
suite=unittest.TestSuite()#创建了一个对象
suite.addTest(TestAdd(0,0,0,"test_add_zero"))#添加测试用例到suite这个套件里面
suite.addTest(TestAdd(1,-1,0,"test_add_zero"))
suite.addTest(TestAdd(99,10,109,"test_add_zero"))
#suite.addTest(TestAdd("test_add_positive_nagative"))
#
# #执行测试用例
# runner=unittest.TextTestRunner()#创建一个对象来
# runner.run(suite)
# #第二种方法 通过load加载用例,通过模块名加载用例
# from class_0306 import learn_unittest
# suite=unittest.TestSuite()#创建了一个对象
# loader=unittest.TestLoader()#用例的加载器
# suite.addTest(loader.loadTestsFromModule(learn_unittest))
#执行测试用例
with open("test.html","wb") as file:
runner=HTMLTestRunnerNew.HTMLTestRunner(stream=file,
title="自动化报告",
verbosity=2,
description="大家看下这是报告",
tester="艳霞")
runner.run(suite)
#第三种方法 通过load加载用例,通过测试类名来加载用例
# from class_0306.learn_unittest import *
# suite=unittest.TestSuite()#创建了一个对象
# loader=unittest.TestLoader()#用例的加载器
# suite.addTest(loader.loadTestsFromTestCase(TestAdd))
# #执行测试用例
# runner=unittest.TextTestRunner()#创建一个对象来
# runner.run(suite)
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,440 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0310/基础面试题.py | # -*- coding:utf-8 -*-
# @Time :2020-03-10 17:23
# @Email :876417305@qq.com
# @Author :yanxia
# @File :基础面试题.PY
# a=[1,2,3,5,7,"q",3]#去重复数
# def del_repeat(a):
# b=[]
# for i in a:
# if i not in b:
# b.append(i)
# return b
# print(del_repeat (a))
# a=["t","2","6","a"]
# print("".join(a))
# a_str=""
# for i in a:
# a_str+=str(i)
# print(a_str)
# b="asdfghgkjhk"
# list_b=[]
# for i in b:
# list_b.append(i)
# print(list_b)
# #字符串去重,并排序
# a="qwewrtadfqrewqwq"
# s=[]
# for i in a:
# if i not in s:
# s.append(i)
# s.sort()
# print(s)
# a=['3']
# b=a
# c=a[:]
# a.append(10)
# print(a)
# print(b)
# print(c)
class A:
a_intance=[]
def __new__(cls):
if cls.a_intance is None:
cls.a_intance=super.__new__(cls)
return cls.a_intance
else:
return cls.a_intance
def __init__(self):
pass
a=A()
print(id(A))
print(id(A))
print(id(A))
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,441 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0314/common/config.py | # -*- coding:utf-8 -*-
# @Time :2020-03-13 21:46
# @Email :876417305@qq.com
# @Author :yanxia
# @File :config.PY
import configparser
from class_0314.common import contants
class ReadConfig:
'''完成配置文件的读取'''
def __init__(self):
self.config=configparser.ConfigParser()#实例化这个ConfigParser()类
self.config.read(contants.global_file)#先加载global里面的
switch=self.config.getboolean("switch","on")
if switch:#如果开关打开为True,使用online的配置
self.config.read(contants.online_file)
else:#开关关闭的时候用test
self.config.read(contants.test_file)
def get(self,section,option):
return self.config.get(section,option)
config=ReadConfig()
# if __name__ == '__main__':
# config=ReadConfig()
# print(config.get("api","pre_url"))
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,442 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0314/common/do_excel.py | # -*- coding:utf-8 -*-
# @Time :2020-03-13 11:58
# @Email :876417305@qq.com
# @Author :yanxia
# @File :do_excel.PY
import openpyxl
from class_0314.common import http_request
from class_0314.common import contants
class Case:
def __init__(self):
self.case_id = None
self.title = None
self.url = None
self.data = None
self.method = None
self.expected = None
self.actual = None
self.result = None
self.sql=None
class DoExcel:
def __init__(self, file_name, sheet_name):
self.file_name = file_name
self.workbook = openpyxl.load_workbook(file_name)
self.sheet_name = sheet_name
self.sheet = self.workbook[sheet_name]
def get_cases(self):
max_row = self.sheet.max_row
cases = []
for i in range(2, max_row + 1):
case = Case()
case.case_id = self.sheet.cell(row=i, column=1).value
case.title = self.sheet.cell(row=i, column=2).value
case.url = self.sheet.cell(row=i, column=3).value
case.data = self.sheet.cell(row=i, column=4).value
case.method = self.sheet.cell(row=i, column=5).value
case.expected = self.sheet.cell(row=i, column=6).value
case.sql=self.sheet.cell(row=i,column=9).value#执行的sql
cases.append(case)
self.workbook.close()
return cases
def write_result(self, row, actual, result):
sheet = self.workbook[self.sheet_name]
sheet.cell(row, 7).value = actual
sheet.cell(row, 8).value = result
self.workbook.save(filename=self.file_name)
self.workbook.close()
if __name__ == '__main__':
do_excel = DoExcel(contants.case_file, sheet_name="recharge")
cases = do_excel.get_cases()
http_request = http_request.HttpRequest()
for case in cases:
print(case.__dict__)
resp = http_request.request(case.method, case.url, case.data)
actual = resp.text
if case.expected == actual: # 判断期望结果与实际结果一致
do_excel.write_result(case.case_id + 1, actual, "PASS")
else:
do_excel.write_result(case.case_id + 1, actual, "FAIL")
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,443 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0306/learn_unittest.py | # -*- coding:utf-8 -*-
# @Time :2020-03-06 21:52
# @Email :876417305@qq.com
# @Author :yanxia
# @File :learn_unittest.PY
#单元测试
#类:属性、方法
#属性:类属性、类方法
#方法 类方法 静态方法 对象方法
# 单元测试时做什么呢?对某个功能去做测试,每一个功能都是封装在类里面
# ---类里面有属性和方法,单元测试测试的是方法:创建对象 调用方法 传参---通过传递多组数据来测试不同的情况
# 框架 unittest----pytest 断言:期望结果和实际结果的对比
import unittest
from class_0306.math_method import MathMethod
class TestAdd(unittest.TestCase):#测试类
#没有测试用例 我们来加
#测试用例:test开头
def test_add_zero(self):
#excepted=0#期望值
print("a的值是:{},b的值是{},expected的值是{}".format(self.a,self.b,self.expected))
res=MathMethod().add(self.a,self.b)
#断言
self.assertEqual(self.expected,res)
print("test_add_zero")
def test_add_positive_nagative(self):
excepted=-2
res=MathMethod().add(1,-3)
#断言
self.assertEqual(excepted,res)
print("test_add_positive_nagative")
class TestSub(unittest.TestCase):
def test_sub(self):
excepted=1
res=MathMethod().sub(2,1)
self.assertEqual(excepted,res)
print("执行的减法")
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,444 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0314/common/logger.py | # -*- coding:utf-8 -*-
# @Time :2020-03-17 9:10
# @Email :876417305@qq.com
# @Author :yanxia
# @File :logger.PY
'''日志模块'''
import logging
from class_0314.common import contants
from class_0314.common.config import config
def get_logger(name):
logger=logging.getLogger(name)
logger.setLevel('DEBUG')
formatter=logging.Formatter("%(asctime)s-%(name)s-%(levelname)s-日志信息:%(message)s-[%(filename)s:%(lineno)d]")
console_handler=logging.StreamHandler()#指定输出到控制台
#把日志级别放到配置文件里面获取
console_level=config.get('logger','console_level')
console_handler.setLevel(console_level)
console_handler.setFormatter(formatter)
file_handler=logging.FileHandler(contants.log_dir+'/case.log')#指定输出到文件
#把日志级别放到配置文件里面获取
file_level = config.get('logger','file_level')
file_handler.setLevel(file_level)
file_handler.setFormatter(formatter)
logger.addHandler(console_handler)
logger.addHandler(file_handler)
return logger
logger=get_logger('case')
logger.info('测试开始啦')
logger.error('测试报错啦')
logger.debug('测试数据')
logger.info("测试结束") | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,445 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0313/common/http_request.py | # -*- coding:utf-8 -*-
# @Time :2020-03-13 11:58
# @Email :876417305@qq.com
# @Author :yanxia
# @File :http_request.PY
import requests
from class_0313.common.config import config
class HttpRequest:
def request(self, method, url, data=None, json=None, cookies=None):
if type(data) == str:
data = eval(data)
if method.lower() == "get":
res = requests.get(url, params=data, cookies=cookies)
elif method.lower() == "post":
if json:
res = requests.post(url, json=json, cookies=cookies)
else:
res = requests.post(url, data=data, cookies=cookies)
else:
print("un_support method")
return res
# 第二种方法,不需要传cookies的写法,利用session这个对象,会自动传递cookies
class HttpRequest2:
def __init__(self):
self.session = requests.sessions.session()
def request(self, method, url, data=None, json=None):
if type(data) == str:
data = eval(data)
# 拼接请求的url
url=config.get('api','pre_url')+url
if method.lower() == "get":
resp = self.session.request(method=method,url=url,params=data)
elif method.lower() == "post":
if json:
resp = self.session.request(method=method,url=url,json=json)
else:
resp = self.session.request(method=method,url=url,data=data)
else:
resp =None
print("un_support method")
#print("请求响应",resp.text)
return resp
def close(self):
self.session.close()
if __name__ == '__main__':
http_request = HttpRequest2()
params = {"mobilephone":"18298461920","pwd":123456}
resp = http_request.request("GET", "/member/login", data=params)
print(resp.text)
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,446 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0314/common/do_mysql.py | # -*- coding:utf-8 -*-
# @Time :2020-03-14 17:31
# @Email :876417305@qq.com
# @Author :yanxia
# @File :do_mysql.PY
import pymysql
from class_0314.common.config import config
class DoMysql:
''' 完成与mysql数据库的交互'''
def __init__(self):
#把这些参数放到配置文件里面,然后读取配置文件里面的值
# host = "test.lemonban.com"
# user = "test"
# password = "test"
# port = 3306
host=config.get("db","host")
user = config.get("db", "user")
password = config.get("db", "password")
port = config.get("db", "port")
self.mysql = pymysql.connect(host=host,user=user,password=password,port=int(port))
self.cursor=self.mysql.cursor(pymysql.cursors.DictCursor)#创建游标的时候创建一个字典类型的游标
#self.cursor = self.mysql.cursor()
def fetch_one(self,sql):
self.cursor.execute(sql)
self.mysql.commit()
return self.cursor.fetchone()
def fetch_all(self,sql):
self.cursor.execute(sql)
return self.cursor.fetchall()
def fetch_many(self,sql):
self.cursor.execute(sql)
return self.cursor.fetchmany()
def close(self):
self.cursor.close()#关闭游标
self.mysql.close()#关闭连接
if __name__ == '__main__':
mysql=DoMysql()
#result=mysql.fetch_one('select max(mobilephone) from future.member')
result=mysql.fetch_one('select * from future.loan where MemberID=88538 order by id desc limit 1')
print(result)
mysql.close() | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,447 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0314/testcase/test_login.py | # -*- coding:utf-8 -*-
# @Time :2020-03-13 15:05
# @Email :876417305@qq.com
# @Author :yanxia
# @File :test_login.PY
import unittest
from class_0314.common import do_excel
from class_0314.common import contants
from class_0314.common.http_request import HttpRequest2
from ddt import ddt, data
from class_0314.common import logger
logger=logger.get_logger(__name__)#__name__意思是把我在logger里面设置的case名字给这个用例用
@ddt
class LoginTest(unittest.TestCase):
excel = do_excel.DoExcel(contants.case_file, "login")
cases = excel.get_cases()
@classmethod
def setUpClass(cls):
logger.info("准备测试前置")
cls.http_request = HttpRequest2()
@data(*cases)
def test_login(self, case):
logger.info("测试的title:{0}".format(case.title))
resp = self.http_request.request(case.method, case.url, case.data)
try:
self.assertEqual(case.expected, resp.text)
self.excel.write_result(case.case_id + 1, resp.text, "PASS")
except AssertionError as e:
self.excel.write_result(case.case_id + 1, resp.text, "Fail")
logger.error("测试报错了:{0}".format(e))
raise e
logger.info('结束测试:{0}'.format(case.title))
@classmethod
def tearDownClass(cls):
logger.info('测试结束后置处理')
cls.http_request.close()
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,448 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0314/testcase/test_recharge.py | # -*- coding:utf-8 -*-
# @Time :2020-03-13 16:33
# @Email :876417305@qq.com
# @Author :yanxia
# @File :test_recharge.PY
import unittest
from class_0314.common.http_request import HttpRequest2
from class_0314.common import do_excel
from class_0314.common import contants
from ddt import ddt, data
from class_0314.common.do_mysql import DoMysql
@ddt
class RechargeTest(unittest.TestCase):
excel = do_excel.DoExcel(contants.case_file, "recharge") # 引入这个excel文件
cases = excel.get_cases() # 获取excel的值
@classmethod
def setUpClass(cls):
cls.http_request = HttpRequest2() # 实例化sessio
cls.mysql=DoMysql()
@data(*cases)
def test_recharge(self, case):
print(case.title)
#请求之前判断是否执行sql
#如果执行的sql不是一条,可以用字典,或者列表
if case.sql is not None:
sql=eval(case.sql)['sql1']
member=self.mysql.fetch_one(sql)#执行sql
print(member['LeaveAmount'])
before=member['LeaveAmount']#取sql里面的值,存到before这个变量里面
resp = self.http_request.request(case.method, case.url, case.data)
actual_code = resp.json()['code']
try:
self.assertEqual(str(case.expected), actual_code)
self.excel.write_result(case.case_id + 1, resp.text, "PASS")
#成功之后,判断是否执行SQL
if case.sql is not None:
sql = eval(case.sql)['sql1']
member = self.mysql.fetch_one(sql)
print(member['LeaveAmount'])
after = member['LeaveAmount']
recharge_amount=int(eval(case.data)['amount'])#充值金额
self.assertEqual(before+recharge_amount,after)
except AssertionError as e:
self.excel.write_result(case.case_id + 1, resp.text, "FILE")
raise e
@classmethod
def tearDownClass(cls):
cls.http_request.close() # 关闭session
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,449 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0310shizhan/study_request.py | # -*- coding:utf-8 -*-
# @Time :2020-03-10 21:52
# @Email :876417305@qq.com
# @Author :yanxia
# @File :study_request.PY
import requests
'''
1、构造请求:请求方式、请求地址、请求参数
2、发起请求
3、返回响应
4、判断响应码,响应体'''
#注册接口
params={"mobilephone":"15810447833","pwd":123456}
resp=requests.get("http://test.lemonban.com/futureloan/mvc/api/member/register",params=params)
print(resp.text)
# 登录接口
params={"mobilephone":"15810447878","pwd":123456}
resp=requests.post("http://test.lemonban.com/futureloan/mvc/api/member/login",data=params)
print(resp.text)
print(resp.cookies)
#充值接口
params={"mobilephone":"15810447878","amount":111}
resp=requests.post("http://test.lemonban.com/futureloan/mvc/api/member/recharge",
data=params,cookies=resp.cookies)
print(resp.text)
print(resp.cookies)
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,450 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0228/class_01.py | # -*- coding:utf-8 -*-
# @Time :2020-02-28 20:51
# @Email :876417305@qq.com
# @Author :yanxia
# @File :class_01.PY
class BoyFriend:#类
sex='boy'
height=180# 类属性
def coding(self,language='python'):#类方法
print("会写{}代码,并且写得很好".format(language))
def cooking(self,*args):
cook_name=""
for item in args:
cook_name+=item
cook_name+="、"
print("会做饭,会做{}".format(cook_name))
def paly_basketball(self):
return "最喜欢打篮球"
t=BoyFriend()#对象
print(t.height)
print(t.sex)
t.cooking('西红柿炒蛋','酸菜鱼')
t.coding()
print(t.paly_basketball())
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,451 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0310shizhan/re_study.py | # -*- coding:utf-8 -*-
# @Time :2020-03-16 8:51
# @Email :876417305@qq.com
# @Author :yanxia
# @File :re_study.PY
import re #解析正则表达式 查找 替换
from class_0313.common.config import config
# 正则表达式学习
data = '{"mobilephone":"#normal_user#","pwd":"#normal_pwd#"}'
# 原本字符、元字符来组成
p = "#(.*?)#" # #表示开始和结束,.代表匹配除了换行符以外的任何字符,*表示匹配前面的多次
# m=re.search(p,data)#从任意位置找,找到就返回Match object 找不到就返回None
# #print(m.group(0))#返回表达式和租里面的内容match='#normal_user#'>打印是#normal_user#
# print(m.group(1))#只返回指定组的内容
# g=m.group(1)#获取到参数化的key
# v=config.get('case',g)#根据key去配置文件里面的值
# print(v)
# data_new=re.sub(p,v,data,count=1) #查找替换 count代表查找替换的次数
# print(data_new)
#ms=re.findall(p,data)#查找全部 返回列表
#print(m)
#print(ms)
# 如果要匹配多次,替换多次,使用循环来解决
while re.search(p,data):
print(data)
m = re.search(p, data)
g = m.group(1) # 获取到参数化的key
v = config.get('case', g) # 根据key去配置文件里面的值
print(v)
data=re.sub(p,v,data,count=1)
print("最后的data",data)
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,452 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0228/class_03.py | # -*- coding:utf-8 -*-
# @Time :2020-02-29 11:09
# @Email :876417305@qq.com
# @Author :yanxia
# @File :class_03.PY
# 继承 拓展 重写
# 新加了支付功能
from class_0228.class_02 import Phone
class Phone_1(Phone): #括号里面的是父类 Phone_1是子类
# def __init__(self): #如果子类有自己的初始化,就不用继续使用父类的初始化,同样调用有用到父类里面的初始化函数的类时,就会报错
# pass
def phone_info(self):# 重写 父类以前有的,子类进行重写(如果子类与父类有重名函数,那么子类的操作就叫重写)
print("这是一款智能手机")#重写的生效范围只在子类里面
@classmethod
def pay(cls):# 子类的方法,子类的拓展 如果子类里面有父类没有的方法,那么子类的操作就叫做拓展
print("可以支付")
# 子类可以拥有父类里面的所有属性所有方法--就可以直接调用
if __name__ == '__main__': #导入模块
Phone_1.call("18989013321")
t=Phone_1("red","vivo",500,"5.0")
t.call("19989013321")
t.pay()# 要不要传参,主要是看有没有把父类的初始化方法继承过来,如果
Phone_1.pay() #父类有初始化函数,那子类对象调用就一定要传参
t.phone_info()
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,453 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0306/math_method.py | # -*- coding:utf-8 -*-
# @Time :2020-03-07 15:39
# @Email :876417305@qq.com
# @Author :yanxia
# @File :math_method.PY
class MathMethod:
def add(self,a,b):
return a+b
def sub(self,a,b):
return a-b
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,454 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0228/classhome_work.py | # -*- coding:utf-8 -*-
# @Time :2020-02-28 22:29
# @Email :876417305@qq.com
# @Author :yanxia
# @File :home.PY
class User:
frist_name="yanxia"
last_name="wang"
sex="女"
heigh=162
def describe_user(self):
print("我叫{}{},性别{},身高{}".format(self.last_name,self.frist_name,self.sex,self.heigh))
def greet_user(self,name,information):
print("{}{}".format(name,information))
if __name__ == '__main__':
t=User()
t.describe_user()
t.greet_user("hello","欢迎来到python的世界")
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,455 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0312/http_request.py | # -*- coding:utf-8 -*-
# @Time :2020-03-12 9:59
# @Email :876417305@qq.com
# @Author :yanxia
# @File :http_request.PY
import requests
class HttpRequest:
"""
使用这类的request方法去完成不同的http请求,并且返回响应结果
"""
def request(self,method,url,data=None,json=None,cookies=None):
'''
:param method: 请求方法
:param url: 请求参数
:param data: 请求数据
:param json: 请求数据为json
:param cookies: cookies值
:return: 返回res是response对象
'''
if type(data)==str:
data=eval(data)
if method.lower() =="get":
resp=requests.get(url,params=data,cookies=cookies)
elif method.lower()=="post":
if json:#意思是json不为空
resp=requests.post(url,json=json,cookies=cookies)
else:
resp=requests.post(url,params=data,cookies=cookies)
else:
print("UN-support method")
return resp
# 第二种方法,不需要传cookies的写法,利用session这个对象,会自动传递cookies
class HttpRequest2:
def __init__(self):
self.session=requests.sessions.session()
def request(self,method,url,data=None,json=None):
if method.lower()=="get":
resp=self.session.request(method=method,url=url,data=data)
elif method.lower()=="post":
if json:
resp=self.session.request(method=method,url=url,json=json)
else:
resp=self.session.request(method=method,url=url,data=data)
else:
print("UN-support method")
return resp
def close(self):
self.session.close()#用完记得关闭,很关键
if __name__ == '__main__':
http_request=HttpRequest()
#调用登录
params = {"mobilephone":"15810447878","pwd": 123456}
res=http_request.request("post","http://test.lemonban.com/futureloan/mvc/api/member/login",data=params)
print(res.json())
#调用充值
params = {"mobilephone":"15810447878","amount": 111}
res = http_request.request("post","http://test.lemonban.com/futureloan/mvc/api/member/recharge",data=params,cookies=res.cookies)
print(res.json())
print(res.request.method)
# http_request2=HttpRequest2()
# params = {"mobilephone": "15810447878", "pwd": 123456}
# res=http_request2.request("post","http://test.lemonban.com/futureloan/mvc/api/member/login",data=params)
# params={"mobilephone":"15810447878","amount": 111}
# res=http_request2.request("post","http://test.lemonban.com/futureloan/mvc/api/member/recharge",data=params)
# http_request2.close()
# print(res.json())
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,456 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0303/learn_requests.py | # -*- coding:utf-8 -*-
# @Time :2020-03-03 21:37
# @Email :876417305@qq.com
# @Author :yanxia
# @File :learn_requests.PY
# 安装 pip install requests
# 作用是什么:发送http请求。常见的有get,post,delete,put等
# 为什么学习它?http协议的接口
# request 客户端---服务端的请求 包含:请求头 请求地址 请求参数 http协议版本
# response 服务端对客户端的一个请求响应,包含:响应头 响应报文 状态码
import requests
url="http://www.lemfix.com/topics/1015"
# request是模拟客户端向服务端发送一个请求。
# 发送一个get请求
res=requests.get(url)
#requests.post(url)
print("状态码",res.status_code)
print("响应头",res.headers)
print(res.text) | {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,457 | wangyanxia-626/pythpn_15 | refs/heads/master | /class_0303/learn_openpy_2.py | # -*- coding:utf-8 -*-
# @Time :2020-03-04 17:27
# @Email :876417305@qq.com
# @Author :yanxia
# @File :learn_openpy_2.PY
from openpyxl import load_workbook
wb=load_workbook("py15.xlsx")
sheet=wb["Sheet1"]
# 循环取值第二种办法
print(sheet.max_row)#获取最大行
print(sheet.max_column)#获取最大列
# 循环取值第一种方法
for i in range(1,sheet.max_row+1):
for j in range(1,sheet.max_column+1):
if sheet.cell(i,j).value: # if条件为非空,为真的时候才执行if后面的语句
res=(sheet.cell(i,j).value)
print(res)
| {"/class_0307/http_unittest2.py": ["/class_0307/http_xlsx.py"], "/class_0313/testcase/test_login.py": ["/class_0313/common/http_request.py"], "/class_0307/http_suit.py": ["/class_0307/http_unittest.py", "/class_0307/__init__.py"], "/class_0313/testcase/test_register.py": ["/class_0313/common/http_request.py"], "/class_0314/testcase/test_addproject.py": ["/class_0314/common/config.py"], "/class_0306/test_suite.py": ["/class_0306/learn_unittest.py"], "/class_0306/learn_unittest.py": ["/class_0306/math_method.py"], "/class_0314/common/logger.py": ["/class_0314/common/config.py"], "/class_0314/common/do_mysql.py": ["/class_0314/common/config.py"], "/class_0314/testcase/test_recharge.py": ["/class_0314/common/do_mysql.py"], "/class_0228/class_03.py": ["/class_0228/class_02.py"]} |
45,478 | gillie1022/mysite | refs/heads/master | /mysite/views.py | from django.shortcuts import render, redirect, get_object_or_404
from .models import Project, Technology
def homepage(request):
return render(request, 'mysite/index.html')
def resumé(request):
return render(request, 'mysite/resumé.html')
def list_projects(request):
projects = Project.objects.all().order_by('title')
return render(request, 'mysite/list_projects.html', {'projects': projects})
def project_detail(request, project_pk):
project = get_object_or_404(Project.objects.all(), pk=project_pk)
return render(request, 'mysite/project_detail.html', {'project': project})
def projects_by_technology(request, technology):
technology = get_object_or_404(Technology.objects.all(), name=technology)
projects = Project.objects.filter(technologies__name__contains=technology)
return render(request, 'mysite/projects_by_technology.html', {'projects': projects, 'technology': technology})
def calculator(request):
return render(request, 'mysite/projects/calculator.html')
def dog_adoption(request):
return render(request, 'mysite/projects/dog_adoption.html')
def the_squarrre(request):
return render(request, 'mysite/projects/the_squarrre.html')
def form_validation(request):
return render(request, 'mysite/projects/form_validation.html') | {"/mysite/views.py": ["/mysite/models.py"], "/mysite/admin.py": ["/mysite/models.py"]} |
45,479 | gillie1022/mysite | refs/heads/master | /mysite/migrations/0003_auto_20200828_1517.py | # Generated by Django 3.1 on 2020-08-28 15:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mysite', '0002_technology'),
]
operations = [
migrations.AlterField(
model_name='project',
name='livesite',
field=models.CharField(blank=True, max_length=250, null=True),
),
]
| {"/mysite/views.py": ["/mysite/models.py"], "/mysite/admin.py": ["/mysite/models.py"]} |
45,480 | gillie1022/mysite | refs/heads/master | /mysite/migrations/0009_project_outside_livesite.py | # Generated by Django 3.1.1 on 2020-09-14 14:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mysite', '0008_auto_20200909_1417'),
]
operations = [
migrations.AddField(
model_name='project',
name='outside_livesite',
field=models.CharField(blank=True, max_length=250, null=True),
),
]
| {"/mysite/views.py": ["/mysite/models.py"], "/mysite/admin.py": ["/mysite/models.py"]} |
45,481 | gillie1022/mysite | refs/heads/master | /blog/views.py | from django.shortcuts import render, redirect, get_object_or_404
def homepage(request):
return render(request, "mysite/index.html")
def list_posts(request):
post = Post.objects.all()
return render(request, "mysite/list_posts.html", {"posts": posts})
def post_detail(request, post_pk):
post = get_object_or_404(Post.objects.all(), pk=post_pk)
return render(request, "mysite/post_detail.html", {"post": post})
def post_by_category(request, category):
posts = Post.objects.filter(categories__name__contains=category).order_by(
"-created_on"
)
return render(request, "mysite/posts_by_category.html", {"category": category, "posts": posts})
| {"/mysite/views.py": ["/mysite/models.py"], "/mysite/admin.py": ["/mysite/models.py"]} |
45,482 | gillie1022/mysite | refs/heads/master | /mysite/admin.py | from django.contrib import admin
from .models import Project, Technology
admin.site.register(Project)
admin.site.register(Technology)
| {"/mysite/views.py": ["/mysite/models.py"], "/mysite/admin.py": ["/mysite/models.py"]} |
45,483 | gillie1022/mysite | refs/heads/master | /mysite/models.py | from django.db import models
class Technology(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Project(models.Model):
title = models.CharField(max_length=100)
description = models.TextField()
image = models.CharField(max_length=250, null=True, blank=True)
github = models.CharField(max_length=250)
livesite = models.CharField(max_length=250, null=True, blank=True)
outside_livesite = models.CharField(max_length=250, null=True, blank=True)
technologies = models.ManyToManyField('Technology', related_name='projects')
def __str__(self):
return self.title
| {"/mysite/views.py": ["/mysite/models.py"], "/mysite/admin.py": ["/mysite/models.py"]} |
45,484 | gillie1022/mysite | refs/heads/master | /project/urls.py | """project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.conf import settings
from django.urls import include, path
from mysite import views as mysite_views
urlpatterns = [
path('admin/', admin.site.urls),
path('', mysite_views.homepage, name='homepage'),
path('resumé/', mysite_views.resumé, name='resumé'),
path('projects/', mysite_views.list_projects, name='project_list'),
path('projects/<int:project_pk>/', mysite_views.project_detail, name='project_detail'),
path('projects/<technology>/', mysite_views.projects_by_technology, name='projects_by_technology'),
path('projects/calculator', mysite_views.calculator, name='calculator'),
path('projects/dog_adoption', mysite_views.dog_adoption, name='dog_adoption'),
path('projects/the_squarrre', mysite_views.the_squarrre, name='the_squarrre'),
path('projects/form_validation', mysite_views.form_validation, name='form_validation'),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
path('__debug__/', include(debug_toolbar.urls)),
# For django versions before 2.0:
# url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
| {"/mysite/views.py": ["/mysite/models.py"], "/mysite/admin.py": ["/mysite/models.py"]} |
45,485 | gillie1022/mysite | refs/heads/master | /mysite/migrations/0004_auto_20200831_1818.py | # Generated by Django 3.1 on 2020-08-31 18:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mysite', '0003_auto_20200828_1517'),
]
operations = [
migrations.RemoveField(
model_name='technology',
name='project',
),
migrations.AddField(
model_name='project',
name='technologies',
field=models.ManyToManyField(related_name='projects', to='mysite.Technology'),
),
]
| {"/mysite/views.py": ["/mysite/models.py"], "/mysite/admin.py": ["/mysite/models.py"]} |
45,486 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/config.py | import logging
import os.path
import ConfigParser
import commands
from ceph_deploy import exc
from ceph_deploy import conf
from ceph_deploy.cliutil import priority
from ceph_deploy import hosts
LOG = logging.getLogger(__name__)
def config_push(args):
conf_data = conf.ceph.load_raw(args)
errors = 0
for hostname in args.client:
LOG.debug('Pushing config to %s', hostname)
try:
distro = hosts.get(hostname, username=args.username)
distro.conn.remote_module.write_conf(
args.cluster,
conf_data,
args.overwrite_conf,
)
distro.conn.exit()
except RuntimeError as e:
LOG.error(e)
errors += 1
if errors:
raise exc.GenericError('Failed to config %d hosts' % errors)
''' start osd that sync in verify process '''
osds = []
if os.path.exists('/root/osds'):
with open('/root/osds','r') as f:
osds = f.readline()
cmd = '/etc/init.d/ceph -a -c /etc/ceph/ceph.conf start %s' % osds.strip('\n')
LOG.debug("excute: %s", cmd)
(ret ,msg) = commands.getstatusoutput(cmd)
os.unlink('/root/osds')
def merge_conf(remote_conf, local_conf):
remote_conf_sections = []
local_conf_sections = []
diff_sections = []
remote_conf_hd = ConfigParser.ConfigParser()
remote_conf_hd.read(remote_conf)
local_conf_hd = ConfigParser.ConfigParser()
local_conf_hd.read(local_conf)
remote_conf_sections = remote_conf_hd.sections()
local_conf_sections = local_conf_hd.sections()
for sec in remote_conf_sections:
if sec not in local_conf_sections:
diff_sections.append(sec)
for section in diff_sections:
if not local_conf_hd.has_section(section):
local_conf_hd.add_section(section)
items = remote_conf_hd.items(section)
for item in items:
local_conf_hd.set(section, item[0], item[1])
data = open(local_conf,'w')
local_conf_hd.write(data)
data.close()
return
def verify_conf(conn):
osd_bfile = '/root/ceph.conf.1'
local_file = '/etc/ceph/ceph.conf'
osd_bfile_secs = []
local_file_secs = []
diff_secs = []
if conn.remote_module.exist_file(osd_bfile):
osd_bfile_content = conn.remote_module.get_file(osd_bfile)
''' pull remote file to local ,named ceph.conf.1 '''
with open(osd_bfile, 'wb') as f:
f.write(osd_bfile_content)
local_file_hd = ConfigParser.ConfigParser()
local_file_hd.read(local_file)
local_file_secs = local_file_hd.sections()
osd_bfile_hd = ConfigParser.ConfigParser()
osd_bfile_hd.read(osd_bfile)
osd_bfile_secs = osd_bfile_hd.sections()
for sec in osd_bfile_secs:
if sec not in local_file_secs:
diff_secs.append(sec)
if diff_secs:
LOG.debug("Start Verify ConfigFile")
f = open('/root/osds', 'wa')
for secs in diff_secs:
f.write(secs+' ')
if not local_file_hd.has_section(secs):
local_file_hd.add_section(secs)
items = osd_bfile_hd.items(secs)
for item in items:
local_file_hd.set(secs, item[0], item[1])
data = open(local_file, 'w')
local_file_hd.write(data)
data.close()
f.close()
return
def config_pull(args):
topath = '{cluster}.conf.tmp'.format(cluster=args.cluster)
frompath = '/etc/ceph/{cluster}.conf'.format(cluster=args.cluster)
errors = 0
for hostname in args.client:
try:
LOG.debug('Checking %s for %s', hostname, frompath)
distro = hosts.get(hostname, username=args.username)
conf_file_contents = distro.conn.remote_module.get_file(frompath)
if conf_file_contents is not None:
LOG.debug('Got %s from %s', frompath, hostname)
if os.path.exists(topath):
with open(topath, 'rb') as f:
existing = f.read()
if existing != conf_file_contents and not args.overwrite_conf:
LOG.error('local config file %s exists with different content; use --overwrite-conf to overwrite' % topath)
raise
with open(topath, 'wb') as f:
f.write(conf_file_contents)
merge_conf(topath, frompath)
'''
verify osd config data,
that`s needed when ceph-deploy tools run in each ceph-hosts
'''
#verify_conf(distro.conn)
return
distro.conn.exit()
LOG.debug('Empty or missing %s on %s', frompath, hostname)
except:
LOG.error('Unable to pull %s from %s', frompath, hostname)
finally:
errors += 1
raise exc.GenericError('Failed to fetch config from %d hosts' % errors)
def config(args):
if args.subcommand == 'push':
config_push(args)
elif args.subcommand == 'pull':
config_pull(args)
else:
LOG.error('subcommand %s not implemented', args.subcommand)
@priority(70)
def make(parser):
"""
Copy ceph.conf to/from remote host(s)
"""
config_parser = parser.add_subparsers(dest='subcommand')
config_parser.required = True
config_push = config_parser.add_parser(
'push',
help='push Ceph config file to one or more remote hosts'
)
config_push.add_argument(
'client',
metavar='HOST',
nargs='+',
help='host(s) to push the config file to',
)
config_pull = config_parser.add_parser(
'pull',
help='pull Ceph config file from one or more remote hosts'
)
config_pull.add_argument(
'client',
metavar='HOST',
nargs='+',
help='host(s) to pull the config file from',
)
parser.set_defaults(
func=config,
)
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,487 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/lib/vendor/remoto/lib/vendor/execnet/script/xx.py | import rlcompleter2
rlcompleter2.setup()
import register, sys
try:
hostport = sys.argv[1]
except:
hostport = ':8888'
gw = register.ServerGateway(hostport)
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,488 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/tests/parser/test_disk.py | import pytest
from ceph_deploy.cli import get_parser
from ceph_deploy.tests.util import assert_too_few_arguments
SUBCMDS_WITH_ARGS = ['list', 'prepare', 'activate', 'zap']
class TestParserDisk(object):
def setup(self):
self.parser = get_parser()
def test_disk_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy disk' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
@pytest.mark.parametrize('cmd', SUBCMDS_WITH_ARGS)
def test_disk_valid_subcommands_with_args(self, cmd):
self.parser.parse_args(['disk'] + ['%s' % cmd] + ['host1'])
def test_disk_invalid_subcommand(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk bork'.split())
out, err = capsys.readouterr()
assert 'invalid choice' in err
def test_disk_list_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk list --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy disk list' in out
def test_disk_list_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk list'.split())
out, err = capsys.readouterr()
assert_too_few_arguments(err)
def test_disk_list_single_host(self):
args = self.parser.parse_args('disk list host1'.split())
assert args.disk[0][0] == 'host1'
def test_disk_list_multi_host(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args('disk list'.split() + hostnames)
# args.disk is a list of tuples, and tuple[0] is the hostname
hosts = [x[0] for x in args.disk]
assert hosts == hostnames
def test_disk_prepare_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk prepare --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy disk prepare' in out
def test_disk_prepare_zap_default_false(self):
args = self.parser.parse_args('disk prepare host1:sdb'.split())
assert args.zap_disk is False
def test_disk_prepare_zap_true(self):
args = self.parser.parse_args('disk prepare --zap-disk host1:sdb'.split())
assert args.zap_disk is True
def test_disk_prepare_fstype_default_xfs(self):
args = self.parser.parse_args('disk prepare host1:sdb'.split())
assert args.fs_type == "xfs"
def test_disk_prepare_fstype_btrfs(self):
args = self.parser.parse_args('disk prepare --fs-type btrfs host1:sdb'.split())
assert args.fs_type == "btrfs"
def test_disk_prepare_fstype_invalid(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk prepare --fs-type bork host1:sdb'.split())
out, err = capsys.readouterr()
assert 'invalid choice' in err
def test_disk_prepare_dmcrypt_default_false(self):
args = self.parser.parse_args('disk prepare host1:sdb'.split())
assert args.dmcrypt is False
def test_disk_prepare_dmcrypt_true(self):
args = self.parser.parse_args('disk prepare --dmcrypt host1:sdb'.split())
assert args.dmcrypt is True
def test_disk_prepare_dmcrypt_key_dir_default(self):
args = self.parser.parse_args('disk prepare host1:sdb'.split())
assert args.dmcrypt_key_dir == "/etc/ceph/dmcrypt-keys"
def test_disk_prepare_dmcrypt_key_dir_custom(self):
args = self.parser.parse_args('disk prepare --dmcrypt --dmcrypt-key-dir /tmp/keys host1:sdb'.split())
assert args.dmcrypt_key_dir == "/tmp/keys"
def test_disk_prepare_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk prepare'.split())
out, err = capsys.readouterr()
assert_too_few_arguments(err)
def test_disk_prepare_single_host(self):
args = self.parser.parse_args('disk prepare host1:sdb'.split())
assert args.disk[0][0] == 'host1'
def test_disk_prepare_multi_host(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args('disk prepare'.split() + [x + ":sdb" for x in hostnames])
# args.disk is a list of tuples, and tuple[0] is the hostname
hosts = [x[0] for x in args.disk]
assert hosts == hostnames
def test_disk_activate_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk activate --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy disk activate' in out
def test_disk_activate_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk activate'.split())
out, err = capsys.readouterr()
assert_too_few_arguments(err)
def test_disk_activate_single_host(self):
args = self.parser.parse_args('disk activate host1:sdb1'.split())
assert args.disk[0][0] == 'host1'
def test_disk_activate_multi_host(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args('disk activate'.split() + [x + ":sdb1" for x in hostnames])
# args.disk is a list of tuples, and tuple[0] is the hostname
hosts = [x[0] for x in args.disk]
assert hosts == hostnames
def test_disk_zap_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk zap --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy disk zap' in out
def test_disk_zap_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('disk zap'.split())
out, err = capsys.readouterr()
assert_too_few_arguments(err)
def test_disk_zap_single_host(self):
args = self.parser.parse_args('disk zap host1:sdb'.split())
assert args.disk[0][0] == 'host1'
def test_disk_zap_multi_host(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args('disk zap'.split() + [x + ":sdb" for x in hostnames])
# args.disk is a list of tuples, and tuple[0] is the hostname
hosts = [x[0] for x in args.disk]
assert hosts == hostnames
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,489 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/tests/test_remotes.py | from mock import patch
from ceph_deploy.hosts import remotes
from ceph_deploy.hosts.remotes import platform_information
class FakeExists(object):
def __init__(self, existing_paths):
self.existing_paths = existing_paths
def __call__(self, path):
for existing_path in self.existing_paths:
if path == existing_path:
return path
class TestWhich(object):
def setup(self):
self.exists_module = 'ceph_deploy.hosts.remotes.os.path.exists'
def test_finds_absolute_paths(self):
exists = FakeExists(['/bin/ls'])
with patch(self.exists_module, exists):
path = remotes.which('ls')
assert path == '/bin/ls'
def test_does_not_find_executable(self):
exists = FakeExists(['/bin/foo'])
with patch(self.exists_module, exists):
path = remotes.which('ls')
assert path is None
class TestPlatformInformation(object):
""" tests various inputs that remotes.platform_information handles
you can test your OS string by comparing the results with the output from:
python -c "import platform; print platform.linux_distribution()"
"""
def setup(self):
pass
def test_handles_deb_version_num(self):
def fake_distro(): return ('debian', '8.4', '')
distro, release, codename = platform_information(fake_distro)
assert distro == 'debian'
assert release == '8.4'
assert codename == 'jessie'
def test_handles_deb_version_slash(self):
def fake_distro(): return ('debian', 'wheezy/something', '')
distro, release, codename = platform_information(fake_distro)
assert distro == 'debian'
assert release == 'wheezy/something'
assert codename == 'wheezy'
def test_handles_deb_version_slash_sid(self):
def fake_distro(): return ('debian', 'jessie/sid', '')
distro, release, codename = platform_information(fake_distro)
assert distro == 'debian'
assert release == 'jessie/sid'
assert codename == 'sid'
def test_handles_no_codename(self):
def fake_distro(): return ('SlaOS', '99.999', '')
distro, release, codename = platform_information(fake_distro)
assert distro == 'SlaOS'
assert release == '99.999'
assert codename == ''
# Normal distro strings
def test_hanles_centos_64(self):
def fake_distro(): return ('CentOS', '6.4', 'Final')
distro, release, codename = platform_information(fake_distro)
assert distro == 'CentOS'
assert release == '6.4'
assert codename == 'Final'
def test_handles_ubuntu_percise(self):
def fake_distro(): return ('Ubuntu', '12.04', 'precise')
distro, release, codename = platform_information(fake_distro)
assert distro == 'Ubuntu'
assert release == '12.04'
assert codename == 'precise'
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,490 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/hosts/remotes.py | try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import socket
import os
import shutil
import tempfile
import platform
import commands
disk_conf = '/opt/ceph/f2fs/conf/osddisk'
ceph_conf_file = '/etc/ceph/ceph.conf'
conf_hd = configparser.ConfigParser()
conf_hd.read(ceph_conf_file)
osd_path = '/Ceph/Data/Osd/'
def platform_information(_linux_distribution=None):
""" detect platform information from remote host """
linux_distribution = _linux_distribution or platform.linux_distribution
distro, release, codename = linux_distribution()
if not codename and 'debian' in distro.lower(): # this could be an empty string in Debian
debian_codenames = {
'10': 'buster',
'9': 'stretch',
'8': 'jessie',
'7': 'wheezy',
'6': 'squeeze',
}
major_version = release.split('.')[0]
codename = debian_codenames.get(major_version, '')
# In order to support newer jessie/sid or wheezy/sid strings we test this
# if sid is buried in the minor, we should use sid anyway.
if not codename and '/' in release:
major, minor = release.split('/')
if minor == 'sid':
codename = minor
else:
codename = major
if not codename and 'oracle' in distro.lower(): # this could be an empty string in Oracle linux
codename = 'oracle'
if not codename and 'virtuozzo linux' in distro.lower(): # this could be an empty string in Virtuozzo linux
codename = 'virtuozzo'
return (
str(distro).rstrip(),
str(release).rstrip(),
str(codename).rstrip()
)
def machine_type():
""" detect machine type """
return platform.machine()
def make_mgr_key(mgr_name, path):
# try:
uid = 167
gid = 167
if not os.path.exists(path):
os.makedirs(path)
os.chown(path, uid, gid)
# create mgr key
auth_add = 'ceph auth get-or-create %s mon \'allow *\' osd \'allow *\' mds \'allow *\' -o %s/%s/keyring' % \
(mgr_name, "/Ceph/Data/Mgr", mgr_name)
(ret, out) = commands.getstatusoutput(auth_add)
# if 0 != ret:
# raise RuntimeError('create mgr key failed! --->ret:%s out:%s' % (ret, out))
# start mgr
start_cmd = '/etc/init.d/ceph -a -c /etc/ceph/ceph.conf start %s' % mgr_name
(ret, out) = commands.getstatusoutput(start_cmd)
def write_sources_list(url, codename, filename='ceph.list', mode=0o644):
"""add deb repo to /etc/apt/sources.list.d/"""
repo_path = os.path.join('/etc/apt/sources.list.d', filename)
content = 'deb {url} {codename} main\n'.format(
url=url,
codename=codename,
)
write_file(repo_path, content.encode('utf-8'), mode)
def write_sources_list_content(content, filename='ceph.list', mode=0o644):
"""add deb repo to /etc/apt/sources.list.d/ from content"""
repo_path = os.path.join('/etc/apt/sources.list.d', filename)
if not isinstance(content, str):
content = content.decode('utf-8')
write_file(repo_path, content.encode('utf-8'), mode)
def write_yum_repo(content, filename='ceph.repo'):
"""add yum repo file in /etc/yum.repos.d/"""
repo_path = os.path.join('/etc/yum.repos.d', filename)
if not isinstance(content, str):
content = content.decode('utf-8')
write_file(repo_path, content.encode('utf-8'))
def set_apt_priority(fqdn, path='/etc/apt/preferences.d/ceph.pref'):
template = "Package: *\nPin: origin {fqdn}\nPin-Priority: 999\n"
content = template.format(fqdn=fqdn)
with open(path, 'w') as fout:
fout.write(content)
def set_repo_priority(sections, path='/etc/yum.repos.d/ceph.repo', priority='1'):
Config = configparser.ConfigParser()
Config.read(path)
Config.sections()
for section in sections:
try:
Config.set(section, 'priority', priority)
except configparser.NoSectionError:
# Emperor versions of Ceph used all lowercase sections
# so lets just try again for the section that failed, maybe
# we are able to find it if it is lower
Config.set(section.lower(), 'priority', priority)
with open(path, 'w') as fout:
Config.write(fout)
# And now, because ConfigParser is super duper, we need to remove the
# assignments so this looks like it was before
def remove_whitespace_from_assignments():
separator = "="
lines = open(path).readlines()
fp = open(path, "w")
for line in lines:
line = line.strip()
if not line.startswith("#") and separator in line:
assignment = line.split(separator, 1)
assignment = tuple(map(str.strip, assignment))
fp.write("%s%s%s\n" % (assignment[0], separator, assignment[1]))
else:
fp.write(line + "\n")
remove_whitespace_from_assignments()
def write_conf(cluster, conf, overwrite):
""" write cluster configuration to /etc/ceph/{cluster}.conf """
path = '/etc/ceph/{cluster}.conf'.format(cluster=cluster)
tmp_file = tempfile.NamedTemporaryFile('w', dir='/etc/ceph', delete=False)
err_msg = 'config file %s exists with different content; use --overwrite-conf to overwrite' % path
if os.path.exists(path):
with open(path, 'r') as f:
old = f.read()
if old != conf and not overwrite:
raise RuntimeError(err_msg)
tmp_file.write(conf)
tmp_file.close()
shutil.move(tmp_file.name, path)
os.chmod(path, 0o644)
return
if os.path.exists('/etc/ceph'):
with open(path, 'w') as f:
f.write(conf)
os.chmod(path, 0o644)
else:
err_msg = '/etc/ceph/ does not exist - could not write config'
raise RuntimeError(err_msg)
def write_keyring(path, key, uid=-1, gid=-1):
""" create a keyring file """
# Note that we *require* to avoid deletion of the temp file
# otherwise we risk not being able to copy the contents from
# one file system to the other, hence the `delete=False`
tmp_file = tempfile.NamedTemporaryFile('wb', delete=False)
tmp_file.write(key)
tmp_file.close()
keyring_dir = os.path.dirname(path)
if not path_exists(keyring_dir):
makedir(keyring_dir, uid, gid)
shutil.move(tmp_file.name, path)
def create_ceph_path(path, uid=-1, gid=-1):
"""create the ceph path if it does not exist"""
if not os.path.exists(path):
os.makedirs(path)
os.chown(path, uid, gid);
def create_mon_path(path, uid=-1, gid=-1):
"""create the mon path if it does not exist"""
if not os.path.exists(path):
os.makedirs(path)
os.chown(path, uid, gid);
def create_meta_path(path, uid=-1, gid=-1):
"""create the meta path if it does not exist"""
if not os.path.exists(path):
os.makedirs(path)
os.chown(path, uid, gid);
def create_done_path(done_path, uid=-1, gid=-1):
"""create a done file to avoid re-doing the mon deployment"""
with open(done_path, 'wb'):
pass
os.chown(done_path, uid, gid);
def create_init_path(init_path, uid=-1, gid=-1):
"""create the init path if it does not exist"""
if not os.path.exists(init_path):
with open(init_path, 'wb'):
pass
os.chown(init_path, uid, gid);
def append_to_file(file_path, contents):
"""append contents to file"""
with open(file_path, 'a') as f:
f.write(contents)
def path_getuid(path):
return os.stat(path).st_uid
def path_getgid(path):
return os.stat(path).st_gid
def readline(path):
with open(path) as _file:
return _file.readline().strip('\n')
def path_exists(path):
return os.path.exists(path)
def get_realpath(path):
return os.path.realpath(path)
def listdir(path):
return os.listdir(path)
def makedir(path, ignored=None, uid=-1, gid=-1):
ignored = ignored or []
try:
os.makedirs(path)
except OSError as error:
if error.errno in ignored:
pass
else:
# re-raise the original exception
raise
else:
os.chown(path, uid, gid);
def unlink(_file):
os.unlink(_file)
''' define fuc for osd '''
def create_client_admin_keyring(key_path, uid, gid):
key_file = '/Ceph/Meta/Keyring/client.admin.keyring'
if not os.path.exists(key_file):
get_key = 'ceph --cluster=ceph --name=mon. --keyring=%s auth get-or-create client.admin mon \'allow *\' osd \'allow *\' mgr \'allow *\' mds \'allow *\'' % key_path
(ret,out) = commands.getstatusoutput(get_key)
with open(key_file,'w') as f:
f.write(out)
f.write('\n')
def tar_scripts():
cmd = "tar xvzf /etc/ceph/scripts.tar.gz --directory=/etc/ceph/"
(ret, out) = commands.getstatusoutput(cmd)
return
def prepare_osd(hostname, publicip):
cmd = "python2.7 /etc/ceph/scripts/prepare_all.py %s %s %s" % (hostname, publicip, publicip)
(ret,out) = commands.getstatusoutput(cmd)
return [ret, out]
def create_osddisk_path(osddisk_path, scripts_path):
if os.path.exists(osddisk_path):
clear_dir = 'rm -rf /opt/ceph/f2fs/conf/*'
(ret,out) = commands.getstatusoutput(clear_dir)
if not os.path.exists(osddisk_path):
create_dir = 'mkdir -p /opt/ceph/f2fs/conf'
(ret,out) = commands.getstatusoutput(create_dir)
create_file = 'touch %s' % osddisk_path
(ret,out) = commands.getstatusoutput(create_file)
if not os.path.exists(scripts_path):
mk_dir = 'mkdir -p %s' % scripts_path
(ret,out) = commands.getstatusoutput(mk_dir)
def scan_new_disk(scripts_path, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used, ssd_tt_used,hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type):
if strategy == 'parted':
option = "deploy"
scan_disk = 'sh /etc/ceph/scripts/disk_fs_mgmt.sh -M%s -O%s -a%s -b%s -c%s -d%s -e%s -f%s -g%s -t%s' % (
mode, option, nvme_def_used, nvme_tt_nums, ssd_def_used, ssd_tt_used,
hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type)
else:
scan_disk = 'sh %s/disk_fs_mgmt.sh -Odeploy -Mauto -tnone' % scripts_path
(ret, out) = commands.getstatusoutput(scan_disk)
return [ret,out]
def prepare_osd_dir(scripts_path):
mkf2fs_cmd='sh %s/parallel_mkf2fs.sh 1' % scripts_path
(ret, out) = commands.getstatusoutput(mkf2fs_cmd)
return [ret,out]
def get_osd_disk(osddisk_path):
new_disk_index = 1
# get new disk list
# ata-INTEL_SSDSC2BB600G4L_PHWL536400VM600TGN-part2 scsi-3600062b200c8e5b021b78b502a088287
get_avail_disks = 'sed -n \'%s,$p\' %s' % (str(new_disk_index), disk_conf)
(ret, avail_disk_msg) = commands.getstatusoutput(get_avail_disks)
if (0 != ret):
return ret
avail_disk_list = avail_disk_msg.split('\n')
osd_disk = []
for i in range(0, len(avail_disk_list)):
meta_disk = avail_disk_list[i].split(" ")[0]
osd_disk.append(meta_disk)
if (not os.path.exists('/Ceph/Meta/Keyring')):
mk_meta_dir = 'mkdir -p /Ceph/Meta/Keyring'
(ret, msg) = commands.getstatusoutput(mk_meta_dir)
mk_osd_dir = 'mkdir -p /Ceph/Data/Osd/osd-%s' % meta_disk
(ret, msg) = commands.getstatusoutput(mk_osd_dir)
return osd_disk
def get_osd_num():
create_osd_cmd = 'ceph osd create > /tmp/osd_no'
(ret, msg) = commands.getstatusoutput(create_osd_cmd)
if (0 != ret):
return ret
get_osd_no = 'cat /tmp/osd_no'
(ret, osd_no) = commands.getstatusoutput(get_osd_no)
return osd_no
def get_osd_weight(disk):
#ata-INTEL_SSDSC2BB600G4L_PHWL536400VM600TGN-part1
get_disk_name = "ls -l /dev/disk/by-id | grep -w %s | awk '{print $11}'|awk -F'/' '{print $3}'" % disk
(ret, disk_name) = commands.getstatusoutput(get_disk_name)
get_osd_size = 'df | grep -w %s| awk \'{print $2}\'' % disk_name
(ret, osd_size) = commands.getstatusoutput(get_osd_size)
weight = int(osd_size)/1024.00/1024.00/1024.00
return weight
def prepare_osd_one(disk, osd, hostname):
mkfs_osd = 'ceph-osd -i %s --mkfs --mkjournal --mkkey' % osd
(ret, msg) = commands.getstatusoutput(mkfs_osd)
if(0 != ret):
return ret
# add auth
auth_add = 'ceph auth get-or-create osd.%s osd \'allow *\' mgr \'allow profile osd\' mon \'allow rwx\' -o %s/osd.%s.keyring' % \
(osd, "/Ceph/Meta/Keyring", osd)
(ret, msg) = commands.getstatusoutput(auth_add)
if(0 != ret):
return ret
# modify crush map
weight = get_osd_weight(disk)
# modify cluster crush
modify_crush = 'ceph osd crush create-or-move %s %s host=%s rack=unknownrack root=default' \
% (osd, weight, hostname)
(ret, msg) = commands.getstatusoutput(modify_crush)
if(0 != ret):
return ret
def start_osd(osds):
for osd in osds:
start_osd_cmd = '/etc/init.d/ceph -a -c /etc/ceph/ceph.conf start osd.%s' % osd
(ret, msg) = commands.getstatusoutput(start_osd_cmd)
if (0 != ret):
return ret
return
def add_osd_with_no(hostname, public_ip, cluster_ip, dev_id, osd_no):
osd_sec_name = 'osd.' + str(osd_no)
if not conf_hd.has_section(osd_sec_name):
conf_hd.add_section(osd_sec_name)
osd_port = 6900 + int(osd_no)
conf_hd.set(osd_sec_name, 'host', hostname)
conf_hd.set(osd_sec_name, 'public addr', public_ip + ':' + str(osd_port))
conf_hd.set(osd_sec_name, 'cluster addr', cluster_ip)
conf_hd.set(osd_sec_name, 'osd journal size', '10000')
ceph_osd_path = osd_path + 'osd-' + dev_id
conf_hd.set(osd_sec_name, 'osd journal', ceph_osd_path + '/journal')
conf_hd.set(osd_sec_name, 'osd data', ceph_osd_path)
conf_write = open(ceph_conf_file, 'w')
conf_hd.write(conf_write)
conf_write.close()
return
'''end osd fuc define'''
def write_monitor_keyring(keyring, monitor_keyring, uid=-1, gid=-1):
"""create the monitor keyring file"""
write_file(keyring, monitor_keyring, 0o600, None, uid, gid)
def write_file(path, content, mode=0o644, directory=None, uid=-1, gid=-1):
if directory:
if path.startswith("/"):
path = path[1:]
path = os.path.join(directory, path)
if os.path.exists(path):
# Delete file in case we are changing its mode
os.unlink(path)
with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, mode), 'wb') as f:
f.write(content)
os.chown(path, uid, gid)
def touch_file(path):
with open(path, 'wb') as f: # noqa
pass
def exist_file(path):
if os.path.exists(path):
return True
else:
return False
def get_file(path):
""" fetch remote file """
try:
with open(path, 'rb') as f:
return f.read()
except IOError:
pass
def object_grep(term, file_object):
for line in file_object.readlines():
if term in line:
return True
return False
def grep(term, file_path):
# A small grep-like function that will search for a word in a file and
# return True if it does and False if it does not.
# Implemented initially to have a similar behavior as the init system
# detection in Ceph's init scripts::
# # detect systemd
# # SYSTEMD=0
# grep -qs systemd /proc/1/comm && SYSTEMD=1
# .. note:: Because we intent to be operating in silent mode, we explicitly
# return ``False`` if the file does not exist.
if not os.path.isfile(file_path):
return False
with open(file_path) as _file:
return object_grep(term, _file)
def shortname():
"""get remote short hostname"""
return socket.gethostname().split('.', 1)[0]
def which_service():
""" locating the `service` executable... """
# XXX This should get deprecated at some point. For now
# it just bypasses and uses the new helper.
return which('service')
def which(executable):
"""find the location of an executable"""
locations = (
'/usr/local/bin',
'/bin',
'/usr/bin',
'/usr/local/sbin',
'/usr/sbin',
'/sbin',
)
for location in locations:
executable_path = os.path.join(location, executable)
if os.path.exists(executable_path):
return executable_path
def make_mon_removed_dir(path, file_name):
""" move old monitor data """
try:
os.makedirs('/var/lib/ceph/mon-removed')
except OSError as e:
if e.errno != errno.EEXIST:
raise
shutil.move(path, os.path.join('/var/lib/ceph/mon-removed/', file_name))
def safe_mkdir(path, uid=-1, gid=-1):
""" create path if it doesn't exist """
try:
os.mkdir(path)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
else:
os.chown(path, uid, gid)
def safe_makedirs(path, uid=-1, gid=-1):
""" create path recursively if it doesn't exist """
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
else:
os.chown(path, uid, gid)
def zeroing(dev):
""" zeroing last few blocks of device """
# this kills the crab
#
# sgdisk will wipe out the main copy of the GPT partition
# table (sorry), but it doesn't remove the backup copies, and
# subsequent commands will continue to complain and fail when
# they see those. zeroing the last few blocks of the device
# appears to do the trick.
lba_size = 4096
size = 33 * lba_size
return True
with open(dev, 'wb') as f:
f.seek(-size, os.SEEK_END)
f.write(size*b'\0')
def enable_yum_priority_obsoletes(path="/etc/yum/pluginconf.d/priorities.conf"):
"""Configure Yum priorities to include obsoletes"""
config = configparser.ConfigParser()
config.read(path)
config.set('main', 'check_obsoletes', '1')
with open(path, 'w') as fout:
config.write(fout)
# remoto magic, needed to execute these functions remotely
if __name__ == '__channelexec__':
for item in channel: # noqa
channel.send(eval(item)) # noqa
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,491 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/hosts/common.py | import commands
import os
import time
import ConfigParser
from os.path import join
from ceph_deploy.util import paths
from ceph_deploy import conf
from ceph_deploy.lib import remoto
from ceph_deploy.util import constants
from ceph_deploy.util import system
ceph_conf_file = '/root/ceph.conf'
conf_hd = ConfigParser.ConfigParser()
conf_hd.read(ceph_conf_file)
osddisk_path = '/opt/ceph/f2fs/conf/osddisk'
scripts_path = '/etc/ceph/scripts'
osd_path = '/Ceph/Data/Osd/'
def ceph_version(conn):
"""
Log the remote ceph-version by calling `ceph --version`
"""
return remoto.process.run(conn, ['ceph', '--version'])
def mon_create(distro, args, monitor_keyring, mon_index):
logger = distro.conn.logger
hostname = distro.conn.remote_module.shortname()
logger.debug('remote hostname: %s' % hostname)
ceph_dir = '/Ceph'
mon_dir = '/Ceph/Data/Mon/mon.' + mon_index
meta_dir = '/Ceph/Meta/Keyring'
uid = 167
gid = 167
done_path = join(mon_dir, 'done')
init_path = join(mon_dir, 'init')
conf_data = conf.ceph.load_raw(args)
# write the configuration file
distro.conn.remote_module.write_conf(
args.cluster,
conf_data,
args.overwrite_conf,
)
keyring = paths.mon.keyring(args.cluster, mon_index)
# if the mon path does not exist, create it
if not distro.conn.remote_module.path_exists(ceph_dir):
distro.conn.remote_module.create_ceph_path(ceph_dir, uid, gid)
distro.conn.remote_module.create_meta_path(meta_dir, uid, gid)
distro.conn.remote_module.create_mon_path(mon_dir, uid, gid)
logger.debug('checking for done path: %s' % done_path)
if not distro.conn.remote_module.path_exists(done_path):
logger.debug('done path does not exist: %s' % done_path)
if not distro.conn.remote_module.path_exists(paths.mon.constants.tmp_path):
logger.info('creating tmp path: %s' % paths.mon.constants.tmp_path)
distro.conn.remote_module.makedir(paths.mon.constants.tmp_path)
logger.info('creating keyring file: %s' % keyring)
distro.conn.remote_module.write_monitor_keyring(
keyring,
monitor_keyring,
uid, gid,
)
user_args = []
if uid != 0:
user_args = user_args + [ '--setuser', str(uid) ]
if gid != 0:
user_args = user_args + [ '--setgroup', str(gid) ]
remoto.process.run(
distro.conn,
[
'ceph-mon',
'--cluster', args.cluster,
'--mkfs',
'-i', mon_index,
'--keyring', keyring,
] + user_args
)
# create the done file
distro.conn.remote_module.create_done_path(done_path, uid, gid)
# create init path
distro.conn.remote_module.create_init_path(init_path, uid, gid)
# start mon service
start_mon_service(distro, args.cluster, mon_index)
time.sleep(2)
# create client.admin.keyring file
logger.info('create client.admin keyring file in : %s' % keyring)
distro.conn.remote_module.create_client_admin_keyring(keyring, uid, gid)
logger.info('unlinking keyring file %s' % keyring)
distro.conn.remote_module.unlink(keyring)
def mon_add(distro, args, monitor_keyring, mon_index):
mon_dir = '/Ceph/Data/Mon/mon.' + mon_index
mk_mon_dir = 'mkdir -p %s' % mon_dir
commands.getstatusoutput(mk_mon_dir)
done_path = join(mon_dir, 'done')
init_path = join(mon_dir, 'init')
hostname = distro.conn.remote_module.shortname()
logger = distro.conn.logger
uid = distro.conn.remote_module.path_getuid(constants.base_path)
gid = distro.conn.remote_module.path_getgid(constants.base_path)
monmap_path = paths.mon.monmap(args.cluster, mon_index)
conf_data = conf.ceph.load_raw(args)
# write the configuration file
distro.conn.remote_module.write_conf(
args.cluster,
conf_data,
args.overwrite_conf,
)
# if the mon path does not exist, create it
distro.conn.remote_module.create_mon_path(mon_dir, uid, gid)
logger.debug('checking for done path: %s' % done_path)
if not distro.conn.remote_module.path_exists(done_path):
logger.debug('done path does not exist: %s' % done_path)
if not distro.conn.remote_module.path_exists(paths.mon.constants.tmp_path):
logger.info('creating tmp path: %s' % paths.mon.constants.tmp_path)
distro.conn.remote_module.makedir(paths.mon.constants.tmp_path)
keyring = paths.mon.keyring(args.cluster, hostname)
logger.info('creating keyring file: %s' % keyring)
distro.conn.remote_module.write_monitor_keyring(
keyring,
monitor_keyring,
uid, gid,
)
# get the monmap
remoto.process.run(
distro.conn,
[
'ceph',
'--cluster', args.cluster,
'mon',
'getmap',
'-o',
monmap_path,
],
)
# now use it to prepare the monitor's data dir
user_args = []
if uid != 0:
user_args = user_args + [ '--setuser', str(uid) ]
if gid != 0:
user_args = user_args + [ '--setgroup', str(gid) ]
remoto.process.run(
distro.conn,
[
'ceph-mon',
'--cluster', args.cluster,
'--mkfs',
'-i', mon_index,
'--monmap',
monmap_path,
'--keyring', keyring,
] + user_args
)
logger.info('unlinking keyring file %s' % keyring)
distro.conn.remote_module.unlink(keyring)
# create the done file
distro.conn.remote_module.create_done_path(done_path, uid, gid)
# create init path
distro.conn.remote_module.create_init_path(init_path, uid, gid)
# start mon service
start_mon_service(distro, args.cluster, mon_index)
def map_components(notsplit_packages, components):
"""
Returns a list of packages to install based on component names
This is done by checking if a component is in notsplit_packages,
if it is, we know we need to install 'ceph' instead of the
raw component name. Essentially, this component hasn't been
'split' from the master 'ceph' package yet.
"""
packages = set()
for c in components:
if c in notsplit_packages:
packages.add('ceph')
else:
packages.add(c)
return list(packages)
def start_mon_service(distro, cluster, hostname):
"""
start mon service depending on distro init
"""
if distro.init == 'sysvinit':
service = distro.conn.remote_module.which_service()
remoto.process.run(
distro.conn,
[
service,
'ceph',
'-c',
'/etc/ceph/{cluster}.conf'.format(cluster=cluster),
'start',
'mon.{hostname}'.format(hostname=hostname)
],
timeout=7,
)
system.enable_service(distro.conn)
elif distro.init == 'upstart':
remoto.process.run(
distro.conn,
[
'initctl',
'emit',
'ceph-mon',
'cluster={cluster}'.format(cluster=cluster),
'id={hostname}'.format(hostname=hostname),
],
timeout=7,
)
elif distro.init == 'systemd':
# enable ceph target for this host (in case it isn't already enabled)
remoto.process.run(
distro.conn,
[
'systemctl',
'enable',
'ceph.target'
],
timeout=7,
)
# enable and start this mon instance
remoto.process.run(
distro.conn,
[
'systemctl',
'enable',
'ceph-mon@{hostname}'.format(hostname=hostname),
],
timeout=7,
)
remoto.process.run(
distro.conn,
[
'systemctl',
'start',
'ceph-mon@{hostname}'.format(hostname=hostname),
],
timeout=7,
)
def write_osd_data(hostname, public_ip, cluster_ip, disk, osd_no):
'''write to local manager host where deph-deploy run ,default /root/ceph.conf'''
osd_sec_name = 'osd.' + str(osd_no)
if not conf_hd.has_section(osd_sec_name):
conf_hd.add_section(osd_sec_name)
osd_port = 6900 + int(osd_no)
conf_hd.set(osd_sec_name, 'host', hostname)
conf_hd.set(osd_sec_name, 'public addr', public_ip + ':' + str(osd_port))
conf_hd.set(osd_sec_name, 'cluster addr', cluster_ip)
conf_hd.set(osd_sec_name, 'osd journal size', '10000')
ceph_osd_path = osd_path + 'osd-' + disk
conf_hd.set(osd_sec_name, 'osd journal', ceph_osd_path + '/journal')
conf_hd.set(osd_sec_name, 'osd data', ceph_osd_path)
conf_write = open(ceph_conf_file, 'w')
conf_hd.write(conf_write)
conf_write.close()
return
# hostname, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used,ssd_tt_used, hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type
def osd_create_all(distro, args, hostname, publicip, clusterip, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used,ssd_tt_used, hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type):
logger = distro.conn.logger
logger.info('start prepare osd on HOST : %s , pIP : %s , cIP : %s' % (hostname, publicip, clusterip))
distro.conn.remote_module.create_osddisk_path(osddisk_path, scripts_path)
# tar zxvf /etc/ceph/scripts.tar.gz
distro.conn.remote_module.tar_scripts()
# scan host disks
# parted disks
# write journal data disk to /opt/ceph/f2fs/conf/osddisk
distro.conn.remote_module.scan_new_disk(scripts_path, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used, ssd_tt_used,hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type)
# wait for disk ready
time.sleep(5)
# mkf2fs and mount to point
distro.conn.remote_module.prepare_osd_dir(scripts_path)
disk_list = distro.conn.remote_module.get_osd_disk(osddisk_path)
if not disk_list:
logger.info('No disk ready on HOST : %s' % hostname)
logger.info('The num of disk ready for osd is : %s' % len(disk_list))
logger.debug('DISK : %s' % disk_list)
osds = []
for disk in disk_list:
if disk:
osd_num = distro.conn.remote_module.get_osd_num()
logger.info('OSD.%s -->DISK: %s' % (osd_num, disk))
logger.debug('start write local ceph.conf')
write_osd_data(hostname, publicip, clusterip, disk, osd_num)
logger.debug('start write /etc/ceph/ceph.conf on %s' % hostname)
distro.conn.remote_module.add_osd_with_no(hostname, publicip, clusterip, disk, osd_num)
try:
logger.debug('start prepare osd')
distro.conn.remote_module.prepare_osd_one(disk, osd_num, hostname)
osds.append(osd_num)
except Exception as e:
logger.info('prepare osd.%s failed' % osd_num)
if osds:
try:
logger.debug('start run osd')
distro.conn.remote_module.start_osd(osds)
except Exception as e:
logger.info('start osd failed %s' % e)
def mgr_create(distro, mgr_name, path, hostname):
logger = distro.conn.logger
logger.info('start prepare mgr on HOST:%s ' % hostname)
distro.conn.remote_module.make_mgr_key(mgr_name, path) | {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,492 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/__init__.py |
__version__ = '1.5.39'
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,493 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/tests/parser/test_osd.py | import pytest
from ceph_deploy.cli import get_parser
from ceph_deploy.tests.util import assert_too_few_arguments
SUBCMDS_WITH_ARGS = ['list', 'create', 'prepare', 'activate']
class TestParserOSD(object):
def setup(self):
self.parser = get_parser()
def test_osd_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy osd' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
@pytest.mark.parametrize('cmd', SUBCMDS_WITH_ARGS)
def test_osd_valid_subcommands_with_args(self, cmd):
self.parser.parse_args(['osd'] + ['%s' % cmd] + ['host1'])
def test_osd_invalid_subcommand(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd bork'.split())
out, err = capsys.readouterr()
assert 'invalid choice' in err
def test_osd_list_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd list --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy osd list' in out
def test_osd_list_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd list'.split())
out, err = capsys.readouterr()
assert_too_few_arguments(err)
def test_osd_list_single_host(self):
args = self.parser.parse_args('osd list host1'.split())
assert args.disk[0][0] == 'host1'
def test_osd_list_multi_host(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args('osd list'.split() + hostnames)
# args.disk is a list of tuples, and tuple[0] is the hostname
hosts = [x[0] for x in args.disk]
assert hosts == hostnames
def test_osd_create_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd create --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy osd create' in out
def test_osd_create_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd create'.split())
out, err = capsys.readouterr()
assert_too_few_arguments(err)
def test_osd_create_single_host(self):
args = self.parser.parse_args('osd create host1:sdb'.split())
assert args.disk[0][0] == 'host1'
def test_osd_create_multi_host(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args('osd create'.split() + [x + ":sdb" for x in hostnames])
# args.disk is a list of tuples, and tuple[0] is the hostname
hosts = [x[0] for x in args.disk]
assert hosts == hostnames
def test_osd_create_zap_default_false(self):
args = self.parser.parse_args('osd create host1:sdb'.split())
assert args.zap_disk is False
def test_osd_create_zap_true(self):
args = self.parser.parse_args('osd create --zap-disk host1:sdb'.split())
assert args.zap_disk is True
def test_osd_create_fstype_default_xfs(self):
args = self.parser.parse_args('osd create host1:sdb'.split())
assert args.fs_type == "xfs"
def test_osd_create_fstype_btrfs(self):
args = self.parser.parse_args('osd create --fs-type btrfs host1:sdb'.split())
assert args.fs_type == "btrfs"
def test_osd_create_fstype_invalid(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd create --fs-type bork host1:sdb'.split())
out, err = capsys.readouterr()
assert 'invalid choice' in err
def test_osd_create_dmcrypt_default_false(self):
args = self.parser.parse_args('osd create host1:sdb'.split())
assert args.dmcrypt is False
def test_osd_create_dmcrypt_true(self):
args = self.parser.parse_args('osd create --dmcrypt host1:sdb'.split())
assert args.dmcrypt is True
def test_osd_create_dmcrypt_key_dir_default(self):
args = self.parser.parse_args('osd create host1:sdb'.split())
assert args.dmcrypt_key_dir == "/etc/ceph/dmcrypt-keys"
def test_osd_create_dmcrypt_key_dir_custom(self):
args = self.parser.parse_args('osd create --dmcrypt --dmcrypt-key-dir /tmp/keys host1:sdb'.split())
assert args.dmcrypt_key_dir == "/tmp/keys"
def test_osd_prepare_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd prepare --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy osd prepare' in out
def test_osd_prepare_zap_default_false(self):
args = self.parser.parse_args('osd prepare host1:sdb'.split())
assert args.zap_disk is False
def test_osd_prepare_zap_true(self):
args = self.parser.parse_args('osd prepare --zap-disk host1:sdb'.split())
assert args.zap_disk is True
def test_osd_prepare_fstype_default_xfs(self):
args = self.parser.parse_args('osd prepare host1:sdb'.split())
assert args.fs_type == "xfs"
def test_osd_prepare_fstype_invalid(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd prepare --fs-type bork host1:sdb'.split())
out, err = capsys.readouterr()
assert 'invalid choice' in err
def test_osd_prepare_dmcrypt_default_false(self):
args = self.parser.parse_args('osd prepare host1:sdb'.split())
assert args.dmcrypt is False
def test_osd_prepare_dmcrypt_true(self):
args = self.parser.parse_args('osd prepare --dmcrypt host1:sdb'.split())
assert args.dmcrypt is True
def test_osd_prepare_dmcrypt_key_dir_default(self):
args = self.parser.parse_args('osd prepare host1:sdb'.split())
assert args.dmcrypt_key_dir == "/etc/ceph/dmcrypt-keys"
def test_osd_prepare_dmcrypt_key_dir_custom(self):
args = self.parser.parse_args('osd prepare --dmcrypt --dmcrypt-key-dir /tmp/keys host1:sdb'.split())
assert args.dmcrypt_key_dir == "/tmp/keys"
def test_osd_prepare_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd prepare'.split())
out, err = capsys.readouterr()
assert_too_few_arguments(err)
def test_osd_prepare_single_host(self):
args = self.parser.parse_args('osd prepare host1:sdb'.split())
assert args.disk[0][0] == 'host1'
def test_osd_prepare_multi_host(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args('osd prepare'.split() + [x + ":sdb" for x in hostnames])
# args.disk is a list of tuples, and tuple[0] is the hostname
hosts = [x[0] for x in args.disk]
assert hosts == hostnames
def test_osd_activate_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd activate --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy osd activate' in out
def test_osd_activate_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('osd activate'.split())
out, err = capsys.readouterr()
assert_too_few_arguments(err)
def test_osd_activate_single_host(self):
args = self.parser.parse_args('osd activate host1:sdb1'.split())
assert args.disk[0][0] == 'host1'
def test_osd_activate_multi_host(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args('osd activate'.split() + [x + ":sdb1" for x in hostnames])
# args.disk is a list of tuples, and tuple[0] is the hostname
hosts = [x[0] for x in args.disk]
assert hosts == hostnames
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,494 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/lib/vendor/remoto/lib/vendor/execnet/deprecated.py | """
some deprecated calls
(c) 2008-2009, Holger Krekel and others
"""
import execnet
def PopenGateway(python=None):
""" instantiate a gateway to a subprocess
started with the given 'python' executable.
"""
APIWARN("1.0.0b4", "use makegateway('popen')")
spec = execnet.XSpec("popen")
spec.python = python
return execnet.default_group.makegateway(spec)
def SocketGateway(host, port):
""" This Gateway provides interaction with a remote process
by connecting to a specified socket. On the remote
side you need to manually start a small script
(py/execnet/script/socketserver.py) that accepts
SocketGateway connections or use the experimental
new_remote() method on existing gateways.
"""
APIWARN("1.0.0b4", "use makegateway('socket=host:port')")
spec = execnet.XSpec("socket=%s:%s" %(host, port))
return execnet.default_group.makegateway(spec)
def SshGateway(sshaddress, remotepython=None, ssh_config=None):
""" instantiate a remote ssh process with the
given 'sshaddress' and remotepython version.
you may specify an ssh_config file.
"""
APIWARN("1.0.0b4", "use makegateway('ssh=host')")
spec = execnet.XSpec("ssh=%s" % sshaddress)
spec.python = remotepython
spec.ssh_config = ssh_config
return execnet.default_group.makegateway(spec)
def APIWARN(version, msg, stacklevel=3):
import warnings
Warn = DeprecationWarning("(since version %s) %s" %(version, msg))
warnings.warn(Warn, stacklevel=stacklevel)
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,495 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/calamari.py | import errno
import logging
import os
from ceph_deploy import hosts, exc
from ceph_deploy.lib import remoto
LOG = logging.getLogger(__name__)
def distro_is_supported(distro_name):
"""
An enforcer of supported distros that can differ from what ceph-deploy
supports.
"""
supported = ['centos', 'redhat', 'ubuntu', 'debian']
if distro_name in supported:
return True
return False
def connect(args):
for hostname in args.hosts:
distro = hosts.get(hostname, username=args.username)
if not distro_is_supported(distro.normalized_name):
raise exc.UnsupportedPlatform(
distro.distro_name,
distro.codename,
distro.release
)
LOG.info(
'Distro info: %s %s %s',
distro.name,
distro.release,
distro.codename
)
LOG.info('assuming that a repository with Calamari packages is already configured.')
LOG.info('Refer to the docs for examples (http://ceph.com/ceph-deploy/docs/conf.html)')
rlogger = logging.getLogger(hostname)
# Emplace minion config prior to installation so that it is present
# when the minion first starts.
minion_config_dir = os.path.join('/etc/salt/', 'minion.d')
minion_config_file = os.path.join(minion_config_dir, 'calamari.conf')
rlogger.debug('creating config dir: %s' % minion_config_dir)
distro.conn.remote_module.makedir(minion_config_dir, [errno.EEXIST])
rlogger.debug(
'creating the calamari salt config: %s' % minion_config_file
)
distro.conn.remote_module.write_file(
minion_config_file,
('master: %s\n' % args.master).encode('utf-8')
)
distro.packager.install('salt-minion')
distro.packager.install('diamond')
# redhat/centos need to get the service started
if distro.normalized_name in ['redhat', 'centos']:
remoto.process.run(
distro.conn,
['chkconfig', 'salt-minion', 'on']
)
remoto.process.run(
distro.conn,
['service', 'salt-minion', 'start']
)
distro.conn.exit()
def calamari(args):
if args.subcommand == 'connect':
connect(args)
def make(parser):
"""
Install and configure Calamari nodes. Assumes that a repository with
Calamari packages is already configured. Refer to the docs for examples
(http://ceph.com/ceph-deploy/docs/conf.html)
"""
calamari_parser = parser.add_subparsers(dest='subcommand')
calamari_parser.required = True
calamari_connect = calamari_parser.add_parser(
'connect',
help='Configure host(s) to connect to Calamari master'
)
calamari_connect.add_argument(
'--master',
nargs='?',
metavar='MASTER SERVER',
help="The domain for the Calamari master server"
)
calamari_connect.add_argument(
'hosts',
nargs='+',
)
parser.set_defaults(
func=calamari,
)
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,496 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/hosts/centos/mon/__init__.py | from ceph_deploy.hosts.common import mon_add as add # noqa
from ceph_deploy.hosts.common import mon_create as create # noqa
from ceph_deploy.hosts.common import osd_create_all as create_all # noqa
from ceph_deploy.hosts.common import mgr_create as create_mgr # noqa
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,497 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/lib/vendor/remoto/lib/vendor/execnet/__init__.py | """
execnet: pure python lib for connecting to local and remote Python Interpreters.
(c) 2012, Holger Krekel and others
"""
__version__ = '1.2.0'
from . import apipkg
apipkg.initpkg(__name__, {
'PopenGateway': '.deprecated:PopenGateway',
'SocketGateway': '.deprecated:SocketGateway',
'SshGateway': '.deprecated:SshGateway',
'makegateway': '.multi:makegateway',
'set_execmodel': '.multi:set_execmodel',
'HostNotFound': '.gateway_bootstrap:HostNotFound',
'RemoteError': '.gateway_base:RemoteError',
'TimeoutError': '.gateway_base:TimeoutError',
'XSpec': '.xspec:XSpec',
'Group': '.multi:Group',
'MultiChannel': '.multi:MultiChannel',
'RSync': '.rsync:RSync',
'default_group': '.multi:default_group',
'dumps': '.gateway_base:dumps',
'loads': '.gateway_base:loads',
'load': '.gateway_base:load',
'dump': '.gateway_base:dump',
'DataFormatError': '.gateway_base:DataFormatError',
})
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,498 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/lib/vendor/remoto/lib/vendor/execnet/script/quitserver.py | """
send a "quit" signal to a remote server
"""
import sys
import socket
hostport = sys.argv[1]
host, port = hostport.split(':')
hostport = (host, int(port))
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(hostport)
sock.sendall('"raise KeyboardInterrupt"\n')
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,499 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/tests/parser/test_calamari.py | import pytest
from ceph_deploy.cli import get_parser
from ceph_deploy.tests.util import assert_too_few_arguments
class TestParserCalamari(object):
def setup(self):
self.parser = get_parser()
def test_calamari_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('calamari --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy calamari' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_calamari_connect_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('calamari connect --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy calamari connect' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_calamari_connect_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('calamari connect'.split())
out, err = capsys.readouterr()
assert_too_few_arguments(err)
def test_calamari_connect_one_host(self):
args = self.parser.parse_args('calamari connect host1'.split())
assert args.hosts == ['host1']
def test_calamari_connect_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args('calamari connect'.split() + hostnames)
assert args.hosts == hostnames
def test_calamari_connect_master_default_is_none(self):
args = self.parser.parse_args('calamari connect host1'.split())
assert args.master is None
def test_calamari_connect_master_custom(self):
args = self.parser.parse_args('calamari connect --master master.ceph.com host1'.split())
assert args.master == "master.ceph.com"
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,500 | sheepcat/ceph-deploy | refs/heads/master | /ceph_deploy/osd.py | import argparse
import json
import logging
import os
import re
import sys
import time
from textwrap import dedent
from ceph_deploy import conf, exc, hosts, mon
from ceph_deploy.util import constants, system, packages, net
from ceph_deploy.cliutil import priority
from ceph_deploy.lib import remoto
LOG = logging.getLogger(__name__)
def get_bootstrap_osd_key(cluster):
"""
Read the bootstrap-osd key for `cluster`.
"""
path = '{cluster}.bootstrap-osd.keyring'.format(cluster=cluster)
try:
with open(path, 'rb') as f:
return f.read()
except IOError:
raise RuntimeError('bootstrap-osd keyring not found; run \'gatherkeys\'')
def get_admin_key():
"""
Read the client.admin.key.
"""
path = '/root/client.admin.keyring'
try:
with open(path, 'rb') as f:
return f.read()
except IOError:
raise RuntimeError('client.admin.keyring not found; run \'gatherkeys\'')
def create_osd_keyring(conn, cluster, key):
"""
Run on osd node, writes the bootstrap key if not there yet.
"""
logger = conn.logger
path = '/var/lib/ceph/bootstrap-osd/{cluster}.keyring'.format(
cluster=cluster,
)
if not conn.remote_module.path_exists(path):
logger.warning('osd keyring does not exist yet, creating one')
conn.remote_module.write_keyring(path, key)
def osd_tree(conn, cluster):
"""
Check the status of an OSD. Make sure all are up and in
What good output would look like::
{
"epoch": 8,
"num_osds": 1,
"num_up_osds": 1,
"num_in_osds": "1",
"full": "false",
"nearfull": "false"
}
Note how the booleans are actually strings, so we need to take that into
account and fix it before returning the dictionary. Issue #8108
"""
ceph_executable = system.executable_path(conn, 'ceph')
command = [
ceph_executable,
'--cluster={cluster}'.format(cluster=cluster),
'osd',
'tree',
'--format=json',
]
out, err, code = remoto.process.check(
conn,
command,
)
try:
loaded_json = json.loads(b''.join(out).decode('utf-8'))
# convert boolean strings to actual booleans because
# --format=json fails to do this properly
for k, v in loaded_json.items():
if v == 'true':
loaded_json[k] = True
elif v == 'false':
loaded_json[k] = False
return loaded_json
except ValueError:
return {}
def osd_status_check(conn, cluster):
"""
Check the status of an OSD. Make sure all are up and in
What good output would look like::
{
"epoch": 8,
"num_osds": 1,
"num_up_osds": 1,
"num_in_osds": "1",
"full": "false",
"nearfull": "false"
}
Note how the booleans are actually strings, so we need to take that into
account and fix it before returning the dictionary. Issue #8108
"""
ceph_executable = system.executable_path(conn, 'ceph')
command = [
ceph_executable,
'--cluster={cluster}'.format(cluster=cluster),
'osd',
'stat',
'--format=json',
]
try:
out, err, code = remoto.process.check(
conn,
command,
)
except TypeError:
# XXX This is a bug in remoto. If the other end disconnects with a timeout
# it will return a None, and here we are expecting a 3 item tuple, not a None
# so it will break with a TypeError. Once remoto fixes this, we no longer need
# this try/except.
return {}
try:
loaded_json = json.loads(b''.join(out).decode('utf-8'))
# convert boolean strings to actual booleans because
# --format=json fails to do this properly
for k, v in loaded_json.items():
if v == 'true':
loaded_json[k] = True
elif v == 'false':
loaded_json[k] = False
return loaded_json
except ValueError:
return {}
def catch_osd_errors(conn, logger, args):
"""
Look for possible issues when checking the status of an OSD and
report them back to the user.
"""
logger.info('checking OSD status...')
status = osd_status_check(conn, args.cluster)
osds = int(status.get('num_osds', 0))
up_osds = int(status.get('num_up_osds', 0))
in_osds = int(status.get('num_in_osds', 0))
full = status.get('full', False)
nearfull = status.get('nearfull', False)
if osds > up_osds:
difference = osds - up_osds
logger.warning('there %s %d OSD%s down' % (
['is', 'are'][difference != 1],
difference,
"s"[difference == 1:])
)
if osds > in_osds:
difference = osds - in_osds
logger.warning('there %s %d OSD%s out' % (
['is', 'are'][difference != 1],
difference,
"s"[difference == 1:])
)
if full:
logger.warning('OSDs are full!')
if nearfull:
logger.warning('OSDs are near full!')
def prepare_disk(
conn,
cluster,
disk,
journal,
activate_prepared_disk,
init,
zap,
fs_type,
dmcrypt,
dmcrypt_dir,
storetype,
block_wal,
block_db):
"""
Run on osd node, prepares a data disk for use.
"""
ceph_disk_executable = system.executable_path(conn, 'ceph-disk')
args = [
ceph_disk_executable,
'-v',
'prepare',
]
if zap:
args.append('--zap-disk')
if dmcrypt:
args.append('--dmcrypt')
if dmcrypt_dir is not None:
args.append('--dmcrypt-key-dir')
args.append(dmcrypt_dir)
if storetype == 'bluestore':
if block_wal:
args.append('--block.wal')
args.append(block_wal)
if block_db:
args.append('--block.db')
args.append(block_db)
if storetype:
args.append('--' + storetype)
args.extend([
'--cluster',
cluster,
'--fs-type',
fs_type,
'--',
disk,
])
if journal is not None:
args.append(journal)
remoto.process.run(
conn,
args
)
if activate_prepared_disk:
# we don't simply run activate here because we don't know
# which partition ceph-disk prepare created as the data
# volume. instead, we rely on udev to do the activation and
# just give it a kick to ensure it wakes up. we also enable
# ceph.target, the other key piece of activate.
if init == 'systemd':
system.enable_service(conn, "ceph.target")
elif init == 'sysvinit':
system.enable_service(conn, "ceph")
def exceeds_max_osds(args, reasonable=20):
"""
A very simple function to check against multiple OSDs getting created and
warn about the possibility of more than the recommended which would cause
issues with max allowed PIDs in a system.
The check is done against the ``args.disk`` object that should look like::
[
('cephnode-01', '/dev/sdb', '/dev/sda5'),
('cephnode-01', '/dev/sdc', '/dev/sda6'),
...
]
"""
hosts = [item[0] for item in args.disk]
per_host_count = dict(
(
(h, hosts.count(h)) for h in set(hosts)
if hosts.count(h) > reasonable
)
)
return per_host_count
def prepare(args, cfg, activate_prepared_disk):
LOG.debug(
'Preparing cluster %s disks %s',
args.cluster,
' '.join(':'.join(x or '' for x in t) for t in args.disk),
)
if args.filestore:
if args.fs_type == 'f2fs':
LOG.debug('start run prepare_all_disk')
admin_keyring_path = '/Ceph/Meta/Keyring/client.admin.keyring'
admin_key = get_admin_key()
errors = 0
# hostname, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used,ssd_tt_used, hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type
for hostname, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used,ssd_tt_used, hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type in args.disk:
try:
distro = hosts.get(
hostname,
username=args.username,
callbacks=[packages.ceph_is_installed]
)
LOG.info('distro info: %s %s %s', distro.name, distro.release, distro.codename)
rlogger = logging.getLogger(hostname)
# get client.admin.keyring from manager node
if not distro.conn.remote_module.path_exists(admin_keyring_path):
LOG.debug('client.admin.keyring not exist yet, creating one')
distro.conn.remote_module.write_keyring(admin_keyring_path, admin_key)
# ensure remote hostname is good to go
#hostname_is_compatible(distro.conn, rlogger, hostname)
localIP = net.get_nonlocal_ip(hostname)
LOG.debug('get host ip : %s' ,localIP)
LOG.debug('Create:add mon to ceph.conf')
distro.mon.create_all(distro, args, hostname, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used,ssd_tt_used, hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type)
distro.conn.exit()
except RuntimeError as e:
LOG.error(e)
errors += 1
# start sync all osd ceph.conf
conf_data = conf.ceph.load_raw(args)
errnos = 0
for hostname, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used, ssd_tt_used, hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type in args.disk:
LOG.debug('sync configfile for host %s ...', hostname)
try:
distro = hosts.get(hostname, username=args.username)
distro.conn.remote_module.write_conf(
args.cluster,
conf_data,
args.overwrite_conf,
)
distro.conn.exit()
except RuntimeError as e:
LOG.error(e)
errnos += 1
if errnos:
raise exc.GenericError('Failed to sync configfile %d monitors' % errors)
return
hosts_in_danger = exceeds_max_osds(args)
if hosts_in_danger:
LOG.warning('if ``kernel.pid_max`` is not increased to a high enough value')
LOG.warning('the following hosts will encounter issues:')
for host, count in hosts_in_danger.items():
LOG.warning('Host: %8s, OSDs: %s' % (host, count))
key = get_bootstrap_osd_key(cluster=args.cluster)
bootstrapped = set()
errors = 0
for hostname, disk, journal in args.disk:
localIP = net.get_nonlocal_ip(hostname)
LOG.debug('hostname:%s ; ip:%s' % (hostname, localIP))
try:
if disk is None:
raise exc.NeedDiskError(hostname)
distro = hosts.get(
hostname,
username=args.username,
callbacks=[packages.ceph_is_installed]
)
LOG.info(
'Distro info: %s %s %s',
distro.name,
distro.release,
distro.codename
)
if hostname not in bootstrapped:
bootstrapped.add(hostname)
LOG.debug('Deploying osd to %s', hostname)
conf_data = conf.ceph.load_raw(args)
distro.conn.remote_module.write_conf(
args.cluster,
conf_data,
args.overwrite_conf
)
create_osd_keyring(distro.conn, args.cluster, key)
LOG.debug('Preparing host %s disk %s journal %s activate %s',
hostname, disk, journal, activate_prepared_disk)
storetype = None
if args.bluestore:
storetype = 'bluestore'
if args.filestore:
storetype = 'filestore'
prepare_disk(
distro.conn,
cluster=args.cluster,
disk=disk,
journal=journal,
activate_prepared_disk=activate_prepared_disk,
init=distro.init,
zap=args.zap_disk,
fs_type=args.fs_type,
dmcrypt=args.dmcrypt,
dmcrypt_dir=args.dmcrypt_key_dir,
storetype=storetype,
block_wal=args.block_wal,
block_db=args.block_db
)
# give the OSD a few seconds to start
time.sleep(5)
catch_osd_errors(distro.conn, distro.conn.logger, args)
LOG.debug('Host %s is now ready for osd use.', hostname)
distro.conn.exit()
except RuntimeError as e:
LOG.error(e)
errors += 1
if errors:
raise exc.GenericError('Failed to create %d OSDs' % errors)
def activate(args, cfg):
LOG.debug(
'Activating cluster %s disks %s',
args.cluster,
# join elements of t with ':', t's with ' '
# allow None in elements of t; print as empty
' '.join(':'.join((s or '') for s in t) for t in args.disk),
)
for hostname, disk, journal in args.disk:
distro = hosts.get(
hostname,
username=args.username,
callbacks=[packages.ceph_is_installed]
)
LOG.info(
'Distro info: %s %s %s',
distro.name,
distro.release,
distro.codename
)
LOG.debug('activating host %s disk %s', hostname, disk)
LOG.debug('will use init type: %s', distro.init)
ceph_disk_executable = system.executable_path(distro.conn, 'ceph-disk')
remoto.process.run(
distro.conn,
[
ceph_disk_executable,
'-v',
'activate',
'--mark-init',
distro.init,
'--mount',
disk,
],
)
# give the OSD a few seconds to start
time.sleep(5)
catch_osd_errors(distro.conn, distro.conn.logger, args)
if distro.init == 'systemd':
system.enable_service(distro.conn, "ceph.target")
elif distro.init == 'sysvinit':
system.enable_service(distro.conn, "ceph")
distro.conn.exit()
def disk_zap(args):
for hostname, disk, journal in args.disk:
if not disk or not hostname:
raise RuntimeError('zap command needs both HOSTNAME and DISK but got "%s %s"' % (hostname, disk))
LOG.debug('zapping %s on %s', disk, hostname)
distro = hosts.get(
hostname,
username=args.username,
callbacks=[packages.ceph_is_installed]
)
LOG.info(
'Distro info: %s %s %s',
distro.name,
distro.release,
distro.codename
)
distro.conn.remote_module.zeroing(disk)
ceph_disk_executable = system.executable_path(distro.conn, 'ceph-disk')
remoto.process.run(
distro.conn,
[
ceph_disk_executable,
'zap',
disk,
],
)
distro.conn.exit()
def disk_list(args, cfg):
for hostname, disk, journal in args.disk:
distro = hosts.get(
hostname,
username=args.username,
callbacks=[packages.ceph_is_installed]
)
LOG.info(
'Distro info: %s %s %s',
distro.name,
distro.release,
distro.codename
)
LOG.debug('Listing disks on {hostname}...'.format(hostname=hostname))
ceph_disk_executable = system.executable_path(distro.conn, 'ceph-disk')
remoto.process.run(
distro.conn,
[
ceph_disk_executable,
'list',
],
)
distro.conn.exit()
def osd_list(args, cfg):
monitors = mon.get_mon_initial_members(args, error_on_empty=True, _cfg=cfg)
# get the osd tree from a monitor host
mon_host = monitors[0]
distro = hosts.get(
mon_host,
username=args.username,
callbacks=[packages.ceph_is_installed]
)
tree = osd_tree(distro.conn, args.cluster)
distro.conn.exit()
interesting_files = ['active', 'magic', 'whoami', 'journal_uuid']
for hostname, disk, journal in args.disk:
distro = hosts.get(hostname, username=args.username)
remote_module = distro.conn.remote_module
osds = distro.conn.remote_module.listdir(constants.osd_path)
ceph_disk_executable = system.executable_path(distro.conn, 'ceph-disk')
output, err, exit_code = remoto.process.check(
distro.conn,
[
ceph_disk_executable,
'list',
]
)
for _osd in osds:
osd_path = os.path.join(constants.osd_path, _osd)
journal_path = os.path.join(osd_path, 'journal')
_id = int(_osd.split('-')[-1]) # split on dash, get the id
osd_name = 'osd.%s' % _id
metadata = {}
json_blob = {}
# piggy back from ceph-disk and get the mount point
device = get_osd_mount_point(output, osd_name)
if device:
metadata['device'] = device
# read interesting metadata from files
for f in interesting_files:
osd_f_path = os.path.join(osd_path, f)
if remote_module.path_exists(osd_f_path):
metadata[f] = remote_module.readline(osd_f_path)
# do we have a journal path?
if remote_module.path_exists(journal_path):
metadata['journal path'] = remote_module.get_realpath(journal_path)
# is this OSD in osd tree?
for blob in tree['nodes']:
if blob.get('id') == _id: # matches our OSD
json_blob = blob
print_osd(
distro.conn.logger,
hostname,
osd_path,
json_blob,
metadata,
)
distro.conn.exit()
def get_osd_mount_point(output, osd_name):
"""
piggy back from `ceph-disk list` output and get the mount point
by matching the line where the partition mentions the OSD name
For example, if the name of the osd is `osd.1` and the output from
`ceph-disk list` looks like this::
/dev/sda :
/dev/sda1 other, ext2, mounted on /boot
/dev/sda2 other
/dev/sda5 other, LVM2_member
/dev/sdb :
/dev/sdb1 ceph data, active, cluster ceph, osd.1, journal /dev/sdb2
/dev/sdb2 ceph journal, for /dev/sdb1
/dev/sr0 other, unknown
/dev/sr1 other, unknown
Then `/dev/sdb1` would be the right mount point. We piggy back like this
because ceph-disk does *a lot* to properly calculate those values and we
don't want to re-implement all the helpers for this.
:param output: A list of lines from stdout
:param osd_name: The actual osd name, like `osd.1`
"""
for line in output:
line_parts = re.split(r'[,\s]+', line)
for part in line_parts:
mount_point = line_parts[1]
if osd_name == part:
return mount_point
def print_osd(logger, hostname, osd_path, json_blob, metadata, journal=None):
"""
A helper to print OSD metadata
"""
logger.info('-'*40)
logger.info('%s' % osd_path.split('/')[-1])
logger.info('-'*40)
logger.info('%-14s %s' % ('Path', osd_path))
logger.info('%-14s %s' % ('ID', json_blob.get('id')))
logger.info('%-14s %s' % ('Name', json_blob.get('name')))
logger.info('%-14s %s' % ('Status', json_blob.get('status')))
logger.info('%-14s %s' % ('Reweight', json_blob.get('reweight')))
if journal:
logger.info('Journal: %s' % journal)
for k, v in metadata.items():
logger.info("%-13s %s" % (k.capitalize(), v))
logger.info('-'*40)
def osd(args):
cfg = conf.ceph.load(args)
if args.subcommand == 'list':
osd_list(args, cfg)
elif args.subcommand == 'prepare':
prepare(args, cfg, activate_prepared_disk=False)
elif args.subcommand == 'create':
prepare(args, cfg, activate_prepared_disk=True)
elif args.subcommand == 'activate':
activate(args, cfg)
else:
LOG.error('subcommand %s not implemented', args.subcommand)
sys.exit(1)
def disk(args):
cfg = conf.ceph.load(args)
if args.subcommand == 'list':
disk_list(args, cfg)
elif args.subcommand == 'prepare':
prepare(args, cfg, activate_prepared_disk=False)
elif args.subcommand == 'activate':
activate(args, cfg)
elif args.subcommand == 'zap':
disk_zap(args)
else:
LOG.error('subcommand %s not implemented', args.subcommand)
sys.exit(1)
def colon_separated(s):
cache_dis_type = None
cache_nums = None
hdd_tt_nums = None
hdd_def_used = None
ssd_tt_used = None
ssd_def_used = None
nvme_tt_nums = None
nvme_def_used = None
mode = None
strategy = None
journal = None
disk = None
host = None
pIP = None
cIP = None
if s.count(':') == 14:
(host, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used,
ssd_tt_used, hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type) = s.split(':')
elif s.count(':') == 13:
(host, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used,
ssd_tt_used, hdd_def_used, hdd_tt_nums, cache_nums) = s.split(':')
elif s.count(':') == 12:
(host, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used,
ssd_tt_used, hdd_def_used, hdd_tt_nums) = s.split(':')
elif s.count(':') == 11:
(host, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used,
ssd_tt_used, hdd_def_used) = s.split(':')
elif s.count(':') == 10:
(host, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used, ssd_tt_used) = s.split(':')
elif s.count(':') == 9:
(host, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used) = s.split(':')
elif s.count(':') == 8:
(host, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums) = s.split(':')
elif s.count(':') == 7:
(host, pIP, cIP, disk, journal, strategy, mode, nvme_def_used) = s.split(':')
elif s.count(':') == 6:
(host, pIP, cIP, disk, journal, strategy, mode) = s.split(':')
elif s.count(':') == 5:
(host, pIP, cIP, disk, journal, strategy) = s.split(':')
elif s.count(':') == 4:
(host, pIP, cIP, disk, journal) = s.split(':')
elif s.count(':') == 3:
(host, pIP, cIP, disk) = s.split(':')
elif s.count(':') == 2:
(host, disk, journal) = s.split(':')
elif s.count(':') == 1:
(host, disk) = s.split(':')
elif s.count(':') == 0:
(host) = s
else:
raise argparse.ArgumentTypeError('must be in form HOST:DISK[:JOURNAL]:STRATEGY:MODE:NVME_DEF_USED:NVME_TT_NUMS:SSD_DEF_USED:SSD_TT_NUMS:HDD_DEF_USED:HDD_TT_NUMS:CACHE_NUMS:CACHE_DIS_TYPE')
if disk:
# allow just "sdb" to mean /dev/sdb
disk = os.path.join('/dev', disk)
if journal:
journal = os.path.join('/dev', journal)
return (host, pIP, cIP, disk, journal, strategy, mode, nvme_def_used, nvme_tt_nums, ssd_def_used,
ssd_tt_used, hdd_def_used, hdd_tt_nums, cache_nums, cache_dis_type)
@priority(50)
def make(parser):
"""
Prepare a data disk on remote host.
"""
sub_command_help = dedent("""
Manage OSDs by preparing a data disk on remote host.
For paths, first prepare and then activate:
ceph-deploy osd prepare {osd-node-name}:/path/to/osd
ceph-deploy osd activate {osd-node-name}:/path/to/osd
For disks or journals the `create` command will do prepare and activate
for you.
"""
)
parser.formatter_class = argparse.RawDescriptionHelpFormatter
parser.description = sub_command_help
osd_parser = parser.add_subparsers(dest='subcommand')
osd_parser.required = True
osd_list = osd_parser.add_parser(
'list',
help='List OSD info from remote host(s)'
)
osd_list.add_argument(
'disk',
nargs='+',
metavar='HOST:DISK[:JOURNAL]',
type=colon_separated,
help='remote host to list OSDs from'
)
osd_create = osd_parser.add_parser(
'create',
help='Create new Ceph OSD daemon by preparing and activating disk'
)
osd_create.add_argument(
'--zap-disk',
action='store_true',
help='destroy existing partition table and content for DISK',
)
osd_create.add_argument(
'--fs-type',
metavar='FS_TYPE',
choices=['xfs',
'btrfs',
'f2fs'
],
default='xfs',
help='filesystem to use to format DISK (xfs, btrfs, f2fs)',
)
osd_create.add_argument(
'--dmcrypt',
action='store_true',
help='use dm-crypt on DISK',
)
osd_create.add_argument(
'--dmcrypt-key-dir',
metavar='KEYDIR',
default='/etc/ceph/dmcrypt-keys',
help='directory where dm-crypt keys are stored',
)
osd_create.add_argument(
'--filestore',
action='store_true', default=None,
help='filestore objectstore',
)
osd_create.add_argument(
'--bluestore',
action='store_true', default=None,
help='bluestore objectstore',
)
osd_create.add_argument(
'--block-db',
default=None,
help='bluestore block.db path'
)
osd_create.add_argument(
'--block-wal',
default=None,
help='bluestore block.wal path'
)
osd_create.add_argument(
'disk',
nargs='+',
metavar='HOST:DISK[:JOURNAL]',
type=colon_separated,
help='host and disk to prepare',
)
osd_prepare = osd_parser.add_parser(
'prepare',
help='Prepare a disk for use as Ceph OSD by formatting/partitioning disk'
)
osd_prepare.add_argument(
'--filestore',
action='store_true', default=None,
help='filestore objectstore',
)
osd_prepare.add_argument(
'--zap-disk',
action='store_true',
help='destroy existing partition table and content for DISK',
)
osd_prepare.add_argument(
'--fs-type',
metavar='FS_TYPE',
choices=['xfs',
'btrfs',
'f2fs'
],
default='xfs',
help='filesystem to use to format DISK (xfs, btrfs, f2fs)',
)
osd_prepare.add_argument(
'--dmcrypt',
action='store_true',
help='use dm-crypt on DISK',
)
osd_prepare.add_argument(
'--dmcrypt-key-dir',
metavar='KEYDIR',
default='/etc/ceph/dmcrypt-keys',
help='directory where dm-crypt keys are stored',
)
osd_prepare.add_argument(
'--bluestore',
action='store_true', default=None,
help='bluestore objectstore',
)
osd_prepare.add_argument(
'--block-db',
default=None,
help='bluestore block.db path'
)
osd_prepare.add_argument(
'--block-wal',
default=None,
help='bluestore block.wal path'
)
osd_prepare.add_argument(
'disk',
nargs='+',
metavar='HOST:DISK[:JOURNAL]:STRATEGY:MODE:NVME_DEF_USED:NVME_TT_NUMS:SSD_DEF_USED:SSD_TT_NUMS:HDD_DEF_USED:HDD_TT_NUMS:CACHE_NUMS:CACHE_DIS_TYPE',
type=colon_separated,
help='host and disk to prepare, f2fs need ip argument',
)
osd_activate = osd_parser.add_parser(
'activate',
help='Start (activate) Ceph OSD from disk that was previously prepared'
)
osd_activate.add_argument(
'disk',
nargs='+',
metavar='HOST:DISK[:JOURNAL]',
type=colon_separated,
help='host and disk to activate',
)
parser.set_defaults(
func=osd,
)
@priority(50)
def make_disk(parser):
"""
Manage disks on a remote host.
"""
disk_parser = parser.add_subparsers(dest='subcommand')
disk_parser.required = True
disk_zap = disk_parser.add_parser(
'zap',
help='destroy existing partition table and content for DISK',
)
disk_zap.add_argument(
'disk',
nargs='+',
metavar='HOST:DISK',
type=colon_separated,
help='host and disk'
)
disk_list = disk_parser.add_parser(
'list',
help='List disk info from remote host(s)'
)
disk_list.add_argument(
'disk',
nargs='+',
metavar='HOST:DISK',
type=colon_separated,
help='remote host to list OSDs from'
)
disk_prepare = disk_parser.add_parser(
'prepare',
help='Prepare a disk for use as Ceph OSD by formatting/partitioning disk'
)
disk_prepare.add_argument(
'--zap-disk',
action='store_true',
help='destroy existing partition table and content for DISK',
)
disk_prepare.add_argument(
'--fs-type',
metavar='FS_TYPE',
choices=['xfs',
'btrfs',
'f2fs'
],
default='xfs',
help='filesystem to use to format DISK (xfs, btrfs, f2fs)',
)
disk_prepare.add_argument(
'--dmcrypt',
action='store_true',
help='use dm-crypt on DISK',
)
disk_prepare.add_argument(
'--dmcrypt-key-dir',
metavar='KEYDIR',
default='/etc/ceph/dmcrypt-keys',
help='directory where dm-crypt keys are stored',
)
disk_prepare.add_argument(
'--bluestore',
action='store_true', default=None,
help='bluestore objectstore',
)
disk_prepare.add_argument(
'--filestore',
action='store_true', default=None,
help='filestore objectstore',
)
disk_prepare.add_argument(
'--block-db',
default=None,
help='bluestore block.db path'
)
disk_prepare.add_argument(
'--block-wal',
default=None,
help='bluestore block.wal path'
)
disk_prepare.add_argument(
'disk',
nargs='+',
metavar='HOST:DISK',
type=colon_separated,
help='host and disk to prepare',
)
disk_activate = disk_parser.add_parser(
'activate',
help='Start (activate) Ceph OSD from disk that was previously prepared'
)
disk_activate.add_argument(
'disk',
nargs='+',
metavar='HOST:DISK',
type=colon_separated,
help='host and disk to activate',
)
parser.set_defaults(
func=disk,
)
| {"/ceph_deploy/config.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/tests/test_remotes.py": ["/ceph_deploy/hosts/remotes.py"], "/ceph_deploy/hosts/common.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/calamari.py": ["/ceph_deploy/__init__.py"], "/ceph_deploy/hosts/centos/mon/__init__.py": ["/ceph_deploy/hosts/common.py"], "/ceph_deploy/osd.py": ["/ceph_deploy/__init__.py"]} |
45,501 | femtotrader/alpha_vantage | refs/heads/develop | /alpha_vantage/timeseries.py | from .alphavantage import AlphaVantage as av
class TimeSeries(av):
"""This class implements all the api calls to times series
"""
@av._output_format
@av._call_api_on_func
def get_intraday(self, symbol, interval='15min', outputsize='compact'):
""" Return intraday time series in two json objects as data and
meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min'
(default '15min')
outputsize: The size of the call, supported values are
'compact' and 'full; the first returns the last 100 points in the
data series, and 'full' returns the full-length intraday times
series, commonly above 1MB (default 'compact')
"""
_FUNCTION_KEY = "TIME_SERIES_INTRADAY"
return _FUNCTION_KEY, "Time Series ({})".format(interval), 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_daily(self, symbol, outputsize='compact'):
""" Return daily time series in two json objects as data and
meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
outputsize: The size of the call, supported values are
'compact' and 'full; the first returns the last 100 points in the
data series, and 'full' returns the full-length intraday times
series, commonly above 1MB (default 'compact')
"""
_FUNCTION_KEY = "TIME_SERIES_DAILY"
return _FUNCTION_KEY, 'Time Series (Daily)', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_daily_adjusted(self, symbol, outputsize='compact'):
""" Return daily adjusted (date, daily open, daily high, daily low,
daily close, daily split/dividend-adjusted close, daily volume)
time series in two json objects as data and
meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
outputsize: The size of the call, supported values are
'compact' and 'full; the first returns the last 100 points in the
data series, and 'full' returns the full-length intraday times
series, commonly above 1MB (default 'compact')
"""
_FUNCTION_KEY = "TIME_SERIES_DAILY_ADJUSTED"
return _FUNCTION_KEY, 'Time Series (Daily)', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_weekly(self, symbol):
""" Return weekly time series in two json objects as data and
meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
"""
_FUNCTION_KEY = "TIME_SERIES_WEEKLY"
return _FUNCTION_KEY, 'Weekly Time Series', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_monthly(self, symbol):
""" Return monthly time series in two json objects as data and
meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
"""
_FUNCTION_KEY = "TIME_SERIES_MONTHLY"
return _FUNCTION_KEY, 'Monthly Time Series', 'Meta Data'
| {"/alpha_vantage/timeseries.py": ["/alpha_vantage/alphavantage.py"], "/alpha_vantage/techindicators.py": ["/alpha_vantage/alphavantage.py"], "/test_alpha_vantage/testalphavantage.py": ["/alpha_vantage/alphavantage.py", "/alpha_vantage/timeseries.py", "/alpha_vantage/techindicators.py", "/alpha_vantage/sectorperformance.py"], "/alpha_vantage/sectorperformance.py": ["/alpha_vantage/alphavantage.py"]} |
45,502 | femtotrader/alpha_vantage | refs/heads/develop | /alpha_vantage/techindicators.py | from .alphavantage import AlphaVantage as av
class TechIndicators(av):
"""This class implements all the technical indicator api calls
"""
@av._output_format
@av._call_api_on_func
def get_sma(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return simple moving average time series in two json objects as data and
meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "SMA"
return _FUNCTION_KEY, 'Technical Analysis: SMA', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_ema(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return exponential moving average time series in two json objects
as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "EMA"
return _FUNCTION_KEY, 'Technical Analysis: EMA', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_wma(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return weighted moving average time series in two json objects
as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "WMA"
return _FUNCTION_KEY, 'Technical Analysis: WMA', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_dema(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return double exponential moving average time series in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "DEMA"
return _FUNCTION_KEY, 'Technical Analysis: DEMA', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_tema(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return triple exponential moving average time series in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "TEMA"
return _FUNCTION_KEY, 'Technical Analysis: TEMA', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_trima(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return triangular moving average time series in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "TRIMA"
return _FUNCTION_KEY, 'Technical Analysis: TRIMA', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_kama(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return Kaufman adaptative moving average time series in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "KAMA"
return _FUNCTION_KEY, 'Technical Analysis: KAMA', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_mama(self, symbol, interval='60min', time_period=20, series_type='close',
fastlimit=None, slowlimit=None):
""" Return MESA adaptative moving average time series in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
fastlimit: Positive floats for the fast limit are accepted
(default=None)
slowlimit: Positive floats for the slow limit are accepted
(default=None)
"""
_FUNCTION_KEY = "MAMA"
return _FUNCTION_KEY, 'Technical Analysis: MAMA', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_t3(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return triple exponential moving average time series in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "T3"
return _FUNCTION_KEY, 'Technical Analysis: T3', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_macd(self, symbol, interval='60min', series_type='close',
fastperiod=None, slowperiod=None, signalperiod=None):
""" Return the moving average convergence/divergence time series in two
json objects as data and meta_data. It raises ValueError when problems
arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min'
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
fastperiod: Positive integers are accepted (default=None)
slowperiod: Positive integers are accepted (default=None)
signalperiod: Positive integers are accepted (default=None)
"""
_FUNCTION_KEY = "MACD"
return _FUNCTION_KEY, 'Technical Analysis: MACD', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_macdext(self, symbol, interval='60min', series_type='close',
fastperiod=None, slowperiod=None, signalperiod=None, fastmatype=None,
slowmatype=None, signalmatype=None):
""" Return the moving average convergence/divergence time series in two
json objects as data and meta_data. It raises ValueError when problems
arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
fastperiod: Positive integers are accepted (default=None)
slowperiod: Positive integers are accepted (default=None)
signalperiod: Positive integers are accepted (default=None)
fastmatype: Moving average type for the faster moving average.
By default, fastmatype=0. Integers 0 - 8 are accepted
(check down the mappings) or the string containing the math type can
also be used.
slowmatype: Moving average type for the slower moving average.
By default, slowmatype=0. Integers 0 - 8 are accepted
(check down the mappings) or the string containing the math type can
also be used.
signalmatype: Moving average type for the signal moving average.
By default, signalmatype=0. Integers 0 - 8 are accepted
(check down the mappings) or the string containing the math type can
also be used.
* 0 = Simple Moving Average (SMA),
* 1 = Exponential Moving Average (EMA),
* 2 = Weighted Moving Average (WMA),
* 3 = Double Exponential Moving Average (DEMA),
* 4 = Triple Exponential Moving Average (TEMA),
* 5 = Triangular Moving Average (TRIMA),
* 6 = T3 Moving Average,
* 7 = Kaufman Adaptive Moving Average (KAMA),
* 8 = MESA Adaptive Moving Average (MAMA)
"""
_FUNCTION_KEY = "MACDEXT"
return _FUNCTION_KEY, 'Technical Analysis: MACDEXT', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_stoch(self, symbol, interval='60min', fastkperiod=None,
slowkperiod=None, slowdperiod=None, slowkmatype=None, slowdmatype=None):
""" Return the stochatic oscillator values in two
json objects as data and meta_data. It raises ValueError when problems
arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
fastkperiod: The time period of the fastk moving average. Positive
integers are accepted (default=None)
slowkperiod: The time period of the slowk moving average. Positive
integers are accepted (default=None)
slowdperiod: The time period of the slowd moving average. Positive
integers are accepted (default=None)
slowkmatype: Moving average type for the slowk moving average.
By default, fastmatype=0. Integers 0 - 8 are accepted
(check down the mappings) or the string containing the math type can
also be used.
slowdmatype: Moving average type for the slowd moving average.
By default, slowmatype=0. Integers 0 - 8 are accepted
(check down the mappings) or the string containing the math type can
also be used.
* 0 = Simple Moving Average (SMA),
* 1 = Exponential Moving Average (EMA),
* 2 = Weighted Moving Average (WMA),
* 3 = Double Exponential Moving Average (DEMA),
* 4 = Triple Exponential Moving Average (TEMA),
* 5 = Triangular Moving Average (TRIMA),
* 6 = T3 Moving Average,
* 7 = Kaufman Adaptive Moving Average (KAMA),
* 8 = MESA Adaptive Moving Average (MAMA)
"""
_FUNCTION_KEY = "STOCH"
return _FUNCTION_KEY, 'Technical Analysis: STOCH', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_stochf(self, symbol, interval='60min', fastkperiod=None,
fastdperiod=None, fastdmatype=None):
""" Return the stochatic oscillator values in two
json objects as data and meta_data. It raises ValueError when problems
arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
fastkperiod: The time period of the fastk moving average. Positive
integers are accepted (default=None)
fastdperiod: The time period of the fastd moving average. Positive
integers are accepted (default=None)
fastdmatype: Moving average type for the fastdmatype moving average.
By default, fastmatype=0. Integers 0 - 8 are accepted
(check down the mappings) or the string containing the math type can
also be used.
* 0 = Simple Moving Average (SMA),
* 1 = Exponential Moving Average (EMA),
* 2 = Weighted Moving Average (WMA),
* 3 = Double Exponential Moving Average (DEMA),
* 4 = Triple Exponential Moving Average (TEMA),
* 5 = Triangular Moving Average (TRIMA),
* 6 = T3 Moving Average,
* 7 = Kaufman Adaptive Moving Average (KAMA),
* 8 = MESA Adaptive Moving Average (MAMA)
"""
_FUNCTION_KEY = "STOCHF"
return _FUNCTION_KEY, 'Technical Analysis: STOCHF', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_rsi(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the relative strength index time series in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "RSI"
return _FUNCTION_KEY, 'Technical Analysis: RSI', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_stochrsi(self, symbol, interval='60min', time_period=20,
series_type='close', fastkperiod=None, fastdperiod=None, fastdmatype=None):
""" Return the stochatic relative strength index in two
json objects as data and meta_data. It raises ValueError when problems
arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
fastkperiod: The time period of the fastk moving average. Positive
integers are accepted (default=None)
fastdperiod: The time period of the fastd moving average. Positive
integers are accepted (default=None)
fastdmatype: Moving average type for the fastdmatype moving average.
By default, fastmatype=0. Integers 0 - 8 are accepted
(check down the mappings) or the string containing the math type can
also be used.
* 0 = Simple Moving Average (SMA),
* 1 = Exponential Moving Average (EMA),
* 2 = Weighted Moving Average (WMA),
* 3 = Double Exponential Moving Average (DEMA),
* 4 = Triple Exponential Moving Average (TEMA),
* 5 = Triangular Moving Average (TRIMA),
* 6 = T3 Moving Average,
* 7 = Kaufman Adaptive Moving Average (KAMA),
* 8 = MESA Adaptive Moving Average (MAMA)
"""
_FUNCTION_KEY = "STOCHRSI"
return _FUNCTION_KEY, 'Technical Analysis: STOCHRSI', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_willr(self, symbol, interval='60min', time_period=20):
""" Return the Williams' %R (WILLR) values in two json objects as data
and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
"""
_FUNCTION_KEY = "WILLR"
return _FUNCTION_KEY, 'Technical Analysis: WILLR', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_adx(self, symbol, interval='60min', time_period=20):
""" Return the average directional movement index values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
"""
_FUNCTION_KEY = "ADX"
return _FUNCTION_KEY, 'Technical Analysis: ADX', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_adxr(self, symbol, interval='60min', time_period=20):
""" Return the average directional movement index rating in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
"""
_FUNCTION_KEY = "ADXR"
return _FUNCTION_KEY, 'Technical Analysis: ADXR', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_apo(self, symbol, interval='60min', series_type='close',
fastperiod=None, slowperiod=None, matype=None):
""" Return the absolute price oscillator values in two
json objects as data and meta_data. It raises ValueError when problems
arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min'
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
fastperiod: Positive integers are accepted (default=None)
slowperiod: Positive integers are accepted (default=None)
matype : Moving average type. By default, fastmatype=0.
Integers 0 - 8 are accepted (check down the mappings) or the string
containing the math type can also be used.
* 0 = Simple Moving Average (SMA),
* 1 = Exponential Moving Average (EMA),
* 2 = Weighted Moving Average (WMA),
* 3 = Double Exponential Moving Average (DEMA),
* 4 = Triple Exponential Moving Average (TEMA),
* 5 = Triangular Moving Average (TRIMA),
* 6 = T3 Moving Average,
* 7 = Kaufman Adaptive Moving Average (KAMA),
* 8 = MESA Adaptive Moving Average (MAMA)
"""
_FUNCTION_KEY = "APO"
return _FUNCTION_KEY, 'Technical Analysis: APO', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_ppo(self, symbol, interval='60min', series_type='close',
fastperiod=None, slowperiod=None, matype=None):
""" Return the percentage price oscillator values in two
json objects as data and meta_data. It raises ValueError when problems
arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min'
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
fastperiod: Positive integers are accepted (default=None)
slowperiod: Positive integers are accepted (default=None)
matype : Moving average type. By default, fastmatype=0.
Integers 0 - 8 are accepted (check down the mappings) or the string
containing the math type can also be used.
* 0 = Simple Moving Average (SMA),
* 1 = Exponential Moving Average (EMA),
* 2 = Weighted Moving Average (WMA),
* 3 = Double Exponential Moving Average (DEMA),
* 4 = Triple Exponential Moving Average (TEMA),
* 5 = Triangular Moving Average (TRIMA),
* 6 = T3 Moving Average,
* 7 = Kaufman Adaptive Moving Average (KAMA),
* 8 = MESA Adaptive Moving Average (MAMA)
"""
_FUNCTION_KEY = "PPO"
return _FUNCTION_KEY, 'Technical Analysis: PPO', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_mom(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the momentum values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "MOM"
return _FUNCTION_KEY, 'Technical Analysis: MOM', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_bop(self, symbol, interval='60min', time_period=20):
""" Return the balance of power values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
"""
_FUNCTION_KEY = "BOP"
return _FUNCTION_KEY, 'Technical Analysis: BOP', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_cci(self, symbol, interval='60min', time_period=20):
""" Return the commodity channel index values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
"""
_FUNCTION_KEY = "CCI"
return _FUNCTION_KEY, 'Technical Analysis: CCI', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_cmo(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the Chande momentum oscillator in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "CMO"
return _FUNCTION_KEY, 'Technical Analysis: CMO', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_roc(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the rate of change values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "ROC"
return _FUNCTION_KEY, 'Technical Analysis: ROC', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_rocr(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the rate of change ratio values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "ROCR"
return _FUNCTION_KEY, 'Technical Analysis: ROCR', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_aroon(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the aroon values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "AROON"
return _FUNCTION_KEY, 'Technical Analysis: AROON', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_aroonosc(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the aroon oscillator values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "AROONOSC"
return _FUNCTION_KEY, 'Technical Analysis: AROONOSC', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_mfi(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the money flow index values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "MFI"
return _FUNCTION_KEY, 'Technical Analysis: MFI', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_trix(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the1-day rate of change of a triple smooth exponential
moving average in two json objects as data and meta_data.
It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "TRIX"
return _FUNCTION_KEY, 'Technical Analysis: TRIX', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_ultsoc(self, symbol, interval='60min', timeperiod1=None,
timeperiod2=None, timeperiod3=None):
""" Return the ultimate oscillaror values in two json objects as
data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
timeperiod1: The first time period indicator. Positive integers are
accepted. By default, timeperiod1=7
timeperiod2: The first time period indicator. Positive integers are
accepted. By default, timeperiod2=14
timeperiod3: The first time period indicator. Positive integers are
accepted. By default, timeperiod3=28
"""
_FUNCTION_KEY = "ULTOSC"
return _FUNCTION_KEY, 'Technical Analysis: ULTOSC', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_dx(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the directional movement index values in two json objects as
data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "DX"
return _FUNCTION_KEY, 'Technical Analysis: DX', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_minus_di(self, symbol, interval='60min', time_period=20):
""" Return the minus directional indicator values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
"""
_FUNCTION_KEY = "MINUS_DI"
return _FUNCTION_KEY, 'Technical Analysis: MINUS_DI', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_plus_di(self, symbol, interval='60min', time_period=20):
""" Return the plus directional indicator values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
"""
_FUNCTION_KEY = "PLUS_DI"
return _FUNCTION_KEY, 'Technical Analysis: PLUS_DI', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_minus_dm(self, symbol, interval='60min', time_period=20):
""" Return the minus directional movement values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
"""
_FUNCTION_KEY = "MINUS_DM"
return _FUNCTION_KEY, 'Technical Analysis: MINUS_DM', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_plus_dm(self, symbol, interval='60min', time_period=20):
""" Return the plus directional movement values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
"""
_FUNCTION_KEY = "PLUS_DM"
return _FUNCTION_KEY, 'Technical Analysis: PLUS_DM', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_bbands(self, symbol, interval='60min', time_period=20, series_type='close',
nbdevup=None, nbdevdn=None, matype=None):
""" Return the bollinger bands values in two
json objects as data and meta_data. It raises ValueError when problems
arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min'
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
nbdevup: The standard deviation multiplier of the upper band. Positive
integers are accepted as default (default=2)
nbdevdn: The standard deviation multiplier of the lower band. Positive
integers are accepted as default (default=2)
matype : Moving average type. By default, matype=0.
Integers 0 - 8 are accepted (check down the mappings) or the string
containing the math type can also be used.
* 0 = Simple Moving Average (SMA),
* 1 = Exponential Moving Average (EMA),
* 2 = Weighted Moving Average (WMA),
* 3 = Double Exponential Moving Average (DEMA),
* 4 = Triple Exponential Moving Average (TEMA),
* 5 = Triangular Moving Average (TRIMA),
* 6 = T3 Moving Average,
* 7 = Kaufman Adaptive Moving Average (KAMA),
* 8 = MESA Adaptive Moving Average (MAMA)
"""
_FUNCTION_KEY = "BBANDS"
return _FUNCTION_KEY, 'Technical Analysis: BBANDS', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_midpoint(self, symbol, interval='60min', time_period=20, series_type='close'):
""" Return the midpoint values in two json objects as
data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "MIDPOINT"
return _FUNCTION_KEY, 'Technical Analysis: MIDPOINT', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_midprice(self, symbol, interval='60min', time_period=20):
""" Return the midprice values in two json objects as
data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
"""
_FUNCTION_KEY = "MIDPRICE"
return _FUNCTION_KEY, 'Technical Analysis: MIDPRICE', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_sar(self, symbol, interval='60min', acceleration=None, maximum=None):
""" Return the midprice values in two json objects as
data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
acceleration: The acceleration factor. Positive floats are accepted (
default 0.01)
maximum: The acceleration factor maximum value. Positive floats
are accepted (default 0.20 )
"""
_FUNCTION_KEY = "SAR"
return _FUNCTION_KEY, 'Technical Analysis: SAR', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_trange(self, symbol, interval='60min'):
""" Return the true range values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
"""
_FUNCTION_KEY = "TRANGE"
return _FUNCTION_KEY, 'Technical Analysis: TRANGE', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_atr(self, symbol, interval='60min', time_period=20):
""" Return the average true range values in two json objects as
data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
"""
_FUNCTION_KEY = "ATR"
return _FUNCTION_KEY, 'Technical Analysis: ATR', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_natr(self, symbol, interval='60min', time_period=20):
""" Return the normalized average true range values in two json objects
as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
time_period: How many data points to average (default 20)
"""
_FUNCTION_KEY = "NATR"
return _FUNCTION_KEY, 'Technical Analysis: NATR', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_ad(self, symbol, interval='60min'):
""" Return the Chaikin A/D line values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
"""
_FUNCTION_KEY = "AD"
return _FUNCTION_KEY, 'Technical Analysis: Chaikin A/D', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_adosc(self, symbol, interval='60min', fastperiod=None,
slowperiod=None):
""" Return the Chaikin A/D oscillator values in two
json objects as data and meta_data. It raises ValueError when problems
arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min'
fastperiod: Positive integers are accepted (default=None)
slowperiod: Positive integers are accepted (default=None)
"""
_FUNCTION_KEY = "ADOSC"
return _FUNCTION_KEY, 'Technical Analysis: ADOSC', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_obv(self, symbol, interval='60min'):
""" Return the on balance volume values in two json
objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
"""
_FUNCTION_KEY = "OBV"
return _FUNCTION_KEY, 'Technical Analysis: OBV', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_ht_trendline(self, symbol, interval='60min', series_type='close'):
""" Return the Hilbert transform, instantaneous trendline values in two
json objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "HT_TRENDLINE"
return _FUNCTION_KEY, 'Technical Analysis: HT_TRENDLINE', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_ht_sine(self, symbol, interval='60min', series_type='close'):
""" Return the Hilbert transform, sine wave values in two
json objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "HT_SINE"
return _FUNCTION_KEY, 'Technical Analysis: HT_SINE', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_ht_trendmode(self, symbol, interval='60min', series_type='close'):
""" Return the Hilbert transform, trend vs cycle mode in two
json objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "HT_TRENDMODE"
return _FUNCTION_KEY, 'Technical Analysis: HT_TRENDMODE', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_ht_dcperiod(self, symbol, interval='60min', series_type='close'):
""" Return the Hilbert transform, dominant cycle period in two
json objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "HT_DCPERIOD"
return _FUNCTION_KEY, 'Technical Analysis: HT_DCPERIOD', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_ht_dcphase(self, symbol, interval='60min', series_type='close'):
""" Return the Hilbert transform, dominant cycle phase in two
json objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "HT_DCPHASE"
return _FUNCTION_KEY, 'Technical Analysis: HT_DCPHASE', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_ht_phasor(self, symbol, interval='60min', series_type='close'):
""" Return the Hilbert transform, phasor components in two
json objects as data and meta_data. It raises ValueError when problems arise
Keyword Arguments:
symbol: the symbol for the equity we want to get its data
interval: time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min', 'daily',
'weekly', 'monthly' (default '60min')
series_type: The desired price type in the time series. Four types
are supported: 'close', 'open', 'high', 'low' (default 'close')
"""
_FUNCTION_KEY = "HT_PHASOR"
return _FUNCTION_KEY, 'Technical Analysis: HT_PHASOR', 'Meta Data'
| {"/alpha_vantage/timeseries.py": ["/alpha_vantage/alphavantage.py"], "/alpha_vantage/techindicators.py": ["/alpha_vantage/alphavantage.py"], "/test_alpha_vantage/testalphavantage.py": ["/alpha_vantage/alphavantage.py", "/alpha_vantage/timeseries.py", "/alpha_vantage/techindicators.py", "/alpha_vantage/sectorperformance.py"], "/alpha_vantage/sectorperformance.py": ["/alpha_vantage/alphavantage.py"]} |
45,503 | femtotrader/alpha_vantage | refs/heads/develop | /test_alpha_vantage/testalphavantage.py | #!/usr/bin/env python
from ..alpha_vantage.alphavantage import AlphaVantage
from ..alpha_vantage.timeseries import TimeSeries
from ..alpha_vantage.techindicators import TechIndicators
from ..alpha_vantage.sectorperformance import SectorPerformances
from nose.tools import assert_true, assert_false
from simplejson import loads, dumps
from pandas import DataFrame as df
import unittest
import timeit
import os
import time
class TestAlphaVantage(unittest.TestCase):
"""
Test data request different implementations
"""
_API_KEY_TEST = os.environ['API_KEY']
_API_EQ_NAME_TEST = 'MSFT'
def _assert_result_is_format(self, func, output_format='json', **args):
"""Check that the data and meta data object are dictionaries
Keyword arguments
func -- the function to assert its format
output_format -- the format of the call
**args -- The parameteres for the call
"""
stime = timeit.default_timer()
data, meta_data = func(**args)
elapsed = timeit.default_timer() - stime
#TODO: WORKaround to not call the api that often when testing
time.sleep(0.3)
print('Function: {} - Format: {} - Took: {}'.format(func.__name__,
output_format, elapsed))
if output_format == 'json':
self.assertIsInstance(data, dict, 'Result Data must be a dictionary')
if meta_data is not None:
self.assertIsInstance(meta_data, dict, 'Result Meta Data must be a \
dictionary')
elif output_format == 'pandas':
self.assertIsInstance(data, df, 'Result Data must be a pandas data frame')
if meta_data is not None:
self.assertIsInstance(meta_data, dict, 'Result Meta Data must be a \
dictionary')
def test_key_none(self):
"""Raise an error when a key has not been given
"""
try:
AlphaVantage()
self.fail(msg='A None api key must raise an error')
except ValueError:
self.assertTrue(True)
def test_exchange_supported(self):
""" Check that the function returns false when asked for an unsupported
exchange.
"""
av = AlphaVantage(key=TestAlphaVantage._API_KEY_TEST)
assert_true(av.is_exchange_supported('ETR') is not None)
assert_true(av.is_exchange_supported('Nonsense') is None)
def test_get_intraday_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ts = TimeSeries(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ts.get_intraday,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ts = TimeSeries(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ts.get_intraday, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_daily_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ts = TimeSeries(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ts.get_daily,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ts = TimeSeries(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ts.get_daily, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_daily_adjusted_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ts = TimeSeries(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ts.get_daily_adjusted,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ts = TimeSeries(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ts.get_daily_adjusted, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_weekly_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ts = TimeSeries(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ts.get_weekly,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ts = TimeSeries(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ts.get_weekly, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_monthly_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ts = TimeSeries(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ts.get_monthly,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ts = TimeSeries(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ts.get_monthly, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_sma_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_sma,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_sma, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_ema_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_ema,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_ema, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_wma_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_wma,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_wma, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_dema_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_dema,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_dema, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_dema_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_tema,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_tema, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_trima_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_trima,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_trima, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_kama_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_kama,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_kama, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_mama_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_mama,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_mama, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_t3_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_t3,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_t3, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_macd_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_macd,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_macd, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_macdext_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_macdext,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_macdext, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_stoch_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_stoch,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_stoch, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_stochf_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_stochf,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_stochf, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_rsi_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_rsi,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_rsi, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_stochrsi_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_stochrsi,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_stochrsi, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_willr_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_willr,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_willr, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_adx_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_adx,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_adx, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_adxr_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_adxr,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_adxr, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_apo_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_apo,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_apo, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_ppo_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_ppo,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_ppo, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_mom_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_mom,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_mom, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_bop_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_bop,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_bop, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_cci_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_cci,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_cci, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_cmo_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_cmo,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_cmo, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_roc_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_roc,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_roc, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_rocr_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_rocr,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_rocr, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_aroon_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_aroon,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_aroon, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_aroonosc_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_aroonosc,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_aroonosc, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_mfi_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_mfi,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_mfi, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_trix_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_trix,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_trix, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_ultsoc_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_ultsoc,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_ultsoc, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_dx_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_dx,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_dx, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_minus_di_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_minus_di,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_minus_di, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_plus_di_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_plus_di,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_plus_di, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_minus_dm_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_minus_dm,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_minus_dm, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_plus_dm_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_plus_dm,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_plus_dm, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_bbands_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_bbands,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_bbands, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_midpoint_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_midpoint,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_midpoint, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_midprice_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_midprice,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_midprice, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_sar_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_sar,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_sar, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_trange_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_trange,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_trange, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_atr_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_atr,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_atr, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_natr_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_natr,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_natr, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_ad_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_ad,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_ad, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_adosc_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_adosc,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_adosc, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_obv_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_obv,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_obv, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_ht_trendline_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_ht_trendline,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_ht_trendline, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_ht_sine_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_ht_sine,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_ht_sine, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_ht_trendmode_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_ht_trendmode,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_ht_trendmode, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_ht_dcperiod_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_ht_dcperiod,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_ht_dcperiod, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_ht_dcphase_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_ht_dcphase,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_ht_dcphase, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_ht_phasor_is_format(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(ti.get_ht_phasor,
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
# Test panda as output
ti = TechIndicators(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(ti.get_ht_phasor, output_format='pandas',
symbol=TestAlphaVantage._API_EQ_NAME_TEST)
def test_get_sector(self):
"""Result must be a dictionary containning the json data
"""
# Test dictionary as output
sp = SectorPerformances(key=TestAlphaVantage._API_KEY_TEST)
self._assert_result_is_format(sp.get_sector)
# Test panda as output
sp = SectorPerformances(key=TestAlphaVantage._API_KEY_TEST, output_format='pandas')
self._assert_result_is_format(sp.get_sector, output_format='pandas')
| {"/alpha_vantage/timeseries.py": ["/alpha_vantage/alphavantage.py"], "/alpha_vantage/techindicators.py": ["/alpha_vantage/alphavantage.py"], "/test_alpha_vantage/testalphavantage.py": ["/alpha_vantage/alphavantage.py", "/alpha_vantage/timeseries.py", "/alpha_vantage/techindicators.py", "/alpha_vantage/sectorperformance.py"], "/alpha_vantage/sectorperformance.py": ["/alpha_vantage/alphavantage.py"]} |
45,504 | femtotrader/alpha_vantage | refs/heads/develop | /alpha_vantage/sectorperformance.py | #!/usr/bin/env python
from .alphavantage import AlphaVantage as av
from functools import wraps
import pandas
import re
class SectorPerformances(av):
"""This class implements all the sector performance api calls
"""
def percentage_to_float(self, val):
""" Transform a string of ther form f.f% into f.f/100
Keyword Arguments:
val: The string to convert
"""
return float(val.strip('%'))/100
def _output_format_sector(func, override=None):
""" Decorator in charge of giving the output its right format, either
json or pandas (replacing the % for usable floats, range 0-1.0)
Keyword Arguments:
func: The function to be decorated
override: Override the internal format of the call, default None
Returns:
A decorator for the format sector api call
"""
@wraps(func)
def _format_wrapper(self, *args, **kwargs):
json_response, data_key, meta_data_key = func(self, *args, **kwargs)
if isinstance(data_key, list):
# Replace the strings into percentage
data = {key: {k:self.percentage_to_float(v)
for k,v in json_response[key].items()} for key in data_key}
else:
data = json_response[data_key]
#TODO: Fix orientation in a better way
meta_data = json_response[meta_data_key]
# Allow to override the output parameter in the call
if override is None:
output_format = self.output_format.lower()
elif 'json' or 'pandas' in override.lower():
output_format = override.lower()
# Choose output format
if output_format == 'json':
return data, meta_data
elif output_format == 'pandas':
data_pandas = pandas.DataFrame.from_dict(data,
orient='columns')
# Rename columns to have a nicer name
col_names = [re.sub(r'\d+.', '', name).strip(' ')
for name in list(data_pandas)]
data_pandas.columns = col_names
return data_pandas, meta_data
else:
raise ValueError('Format: {} is not supported'.format(
self.output_format))
return _format_wrapper
@_output_format_sector
@av._call_api_on_func
def get_sector(self):
"""This API returns the realtime and historical sector performances
calculated from S&P500 incumbents.
Returns:
A pandas or a dictionary with the results from the api call
"""
_FUNCTION_KEY = "SECTOR"
# The keys for the json output
_DATA_KEYS = ["Rank A: Real-Time Performance",
"Rank B: 1 Day Performance",
"Rank C: 5 Day Performance",
"Rank D: 1 Month Performance",
"Rank E: 3 Month Performance",
"Rank F: Year-to-Date (YTD) Performance",
"Rank G: 1 Year Performance",
"Rank H: 3 Year Performance",
"Rank I: 5 Year Performance",
"Rank J: 10 Year Performance"]
return _FUNCTION_KEY, _DATA_KEYS, 'Meta Data'
| {"/alpha_vantage/timeseries.py": ["/alpha_vantage/alphavantage.py"], "/alpha_vantage/techindicators.py": ["/alpha_vantage/alphavantage.py"], "/test_alpha_vantage/testalphavantage.py": ["/alpha_vantage/alphavantage.py", "/alpha_vantage/timeseries.py", "/alpha_vantage/techindicators.py", "/alpha_vantage/sectorperformance.py"], "/alpha_vantage/sectorperformance.py": ["/alpha_vantage/alphavantage.py"]} |
45,505 | femtotrader/alpha_vantage | refs/heads/develop | /alpha_vantage/alphavantage.py | try:
# Python 3 import
from urllib.request import urlopen
except ImportError:
# Python 2.* import
from urllib2 import urlopen
from simplejson import loads
from functools import wraps
import inspect
import pandas
import re
class AlphaVantage:
""" Base class where the decorators and base function for the other
classes of this python wrapper will inherit from.
"""
_ALPHA_VANTAGE_API_URL = "http://www.alphavantage.co/query?"
_ALPHA_VANTAGE_MATH_MAP = ['SMA', 'EMA', 'WMA', 'DEMA', 'TEMA', 'TRIMA', 'T3',
'KAMA', 'MAMA']
_EXCHANGE_SUPPORTED = { 'ASX': 'Australian Securities Exchange',
'BOM': 'Bombay Stock Exchange',
'BIT': 'Borsa Italiana Milan Stock Exchange',
'TSE': 'Canadian/Toronto Securities Exchange',
'FRA': 'Deutsche Boerse Frankfurt Stock Exchange',
'ETR': 'Deutsche Boerse Frankfurt Stock Exchange',
'AMS': 'Euronext Amsterdam',
'EBR': 'Euronext Brussels',
'ELI': 'Euronext Lisbon',
'EPA': 'Euronext Paris',
'LON': 'London Stock Exchange',
'MCX': 'Moscow Exchange',
'NASDAQ': 'NASDAQ Exchange',
'CPH': 'NASDAQ OMX Copenhagen',
'HEL': 'NASDAQ OMX Helsinki',
'ICE': 'NASDAQ OMX Iceland',
'STO': 'NASDAQ OMX Stockholm',
'NSE': 'National Stock Exchange of India',
'NYSE': 'New York Stock Exchange',
'SGX': 'Singapore Exchange',
'SHA': 'Shanghai Stock Exchange',
'SHE': 'Shenzhen Stock Exchange',
'TPE': 'Taiwan Stock Exchange',
'TYO': 'Tokyo Stock Exchange'}
def __init__(self, key=None, retries=5, output_format='json', treat_info_as_error=True):
""" Initialize the class
Keyword Arguments:
key: Alpha Vantage api key
retries: Maximum amount of retries in case of faulty connection or
server not able to answer the call.
output_format: Either 'json' or 'pandas'
"""
if key is None:
raise ValueError('Get a free key from the alphavantage website: https://www.alphavantage.co/support/#api-key')
self.key = key
self.retries = retries
self.output_format = output_format
self.treat_info_as_error = treat_info_as_error
def _retry(func):
""" Decorator for retrying api calls (in case of errors from the api
side in bringing the data)
Keyword Arguments:
func: The function to be retried
"""
@wraps(func)
def _retry_wrapper(self, *args, **kwargs):
error_message = ""
for retry in range(self.retries + 1):
try:
return func(self, *args, **kwargs)
except ValueError as err:
error_message = str(err)
raise ValueError(str(error_message))
return _retry_wrapper
@classmethod
def _call_api_on_func(cls, func):
""" Decorator for forming the api call with the arguments of the
function, it works by taking the arguments given to the function
and building the url to call the api on it
Keyword Arguments:
func: The function to be decorated
"""
# Argument Handling
argspec = inspect.getargspec(func)
try:
# Asumme most of the cases have a mixed between args and named
# args
positional_count = len(argspec.args) - len(argspec.defaults)
defaults = dict(zip(argspec.args[positional_count:], argspec.defaults))
except TypeError:
if argspec.args:
# No defaults
positional_count = len(argspec.args)
defaults = {}
elif argspec.defaults:
# Only defaults
positional_count = 0
defaults = argspec.defaults
# Actual decorating
@wraps(func)
def _call_wrapper(self, *args, **kwargs):
used_kwargs = kwargs.copy()
# Get the used positional arguments given to the function
used_kwargs.update(zip(argspec.args[positional_count:],
args[positional_count:]))
# Update the dictionary to include the default parameters from the
# function
used_kwargs.update({k: used_kwargs.get(k, d)
for k, d in defaults.items()})
# Form the base url, the original function called must return
# the function name defined in the alpha vantage api and the data
# key for it and for its meta data.
function_name, data_key, meta_data_key = func(self, *args, **kwargs)
url = "{}function={}".format(AlphaVantage._ALPHA_VANTAGE_API_URL,
function_name)
for idx, arg_name in enumerate(argspec.args[1:]):
try:
arg_value = args[idx]
except IndexError:
arg_value = used_kwargs[arg_name]
if 'matype' in arg_name and arg_value:
# If the argument name has matype, we gotta map the string
# or the integer
arg_value = self.map_to_matype(arg_value)
if arg_value:
# Discard argument in the url formation if it was set to
# None (in other words, this will call the api with its
# internal defined parameter)
url = '{}&{}={}'.format(url, arg_name, arg_value)
url = '{}&apikey={}'.format(url, self.key)
return self._handle_api_call(url), data_key, meta_data_key
return _call_wrapper
@classmethod
def _output_format(cls, func, override=None):
""" Decorator in charge of giving the output its right format, either
json or pandas
Keyword Arguments:
func: The function to be decorated
override: Override the internal format of the call, default None
"""
@wraps(func)
def _format_wrapper(self, *args, **kwargs):
json_response, data_key, meta_data_key = func(self, *args, **kwargs)
data = json_response[data_key]
if meta_data_key is not None:
meta_data = json_response[meta_data_key]
else:
meta_data = None
# Allow to override the output parameter in the call
if override is None:
output_format = self.output_format.lower()
elif 'json' or 'pandas' in override.lower():
output_format = override.lower()
# Choose output format
if output_format == 'json':
return data, meta_data
elif output_format == 'pandas':
data_pandas = pandas.DataFrame.from_dict(data,
orient='index', dtype=float)
# Rename columns to have a nicer name
col_names = [re.sub(r'\d+.', '', name).strip(' ')
for name in list(data_pandas)]
data_pandas.columns = col_names
return data_pandas, meta_data
else:
raise ValueError('Format: {} is not supported'.format(
self.output_format))
return _format_wrapper
def map_to_matype(self, matype):
""" Convert to the alpha vantage math type integer. It returns an
integer correspondant to the type of math to apply to a function. It
raises ValueError if an integer greater than the supported math types
is given.
Keyword Arguments:
matype: The math type of the alpha vantage api. It accepts integers
or a string representing the math type.
* 0 = Simple Moving Average (SMA),
* 1 = Exponential Moving Average (EMA),
* 2 = Weighted Moving Average (WMA),
* 3 = Double Exponential Moving Average (DEMA),
* 4 = Triple Exponential Moving Average (TEMA),
* 5 = Triangular Moving Average (TRIMA),
* 6 = T3 Moving Average,
* 7 = Kaufman Adaptive Moving Average (KAMA),
* 8 = MESA Adaptive Moving Average (MAMA)
"""
# Check if it is an integer or a string
try:
value = int(matype)
if abs(value) > len(AlphaVantage._ALPHA_VANTAGE_MATH_MAP):
raise ValueError("The value {} is not supported".format(value))
except ValueError:
value = AlphaVantage._ALPHA_VANTAGE_MATH_MAP.index(matype)
return value
@_retry
def _handle_api_call(self, url):
""" Handle the return call from the api and return a data and meta_data
object. It raises a ValueError on problems
Keyword Arguments:
url: The url of the service
data_key: The key for getting the data from the jso object
meta_data_key: The key for getting the meta data information out of
the json object
"""
response = urlopen(url)
url_response = response.read()
json_response = loads(url_response)
if not json_response:
raise ValueError('Error getting data from the api, no return was given.')
elif "Error Message" in json_response:
raise ValueError(json_response["Error Message"])
elif "Information" in json_response and self.treat_info_as_error:
raise ValueError(json_response["Information"])
return json_response
def is_exchange_supported(self, exchange_name):
"""
Get if a specific global exchange type is supported by this library
Keyword Arguments:
exchange_name: The exchange type to check for
Returns:
The description of the given key or None
"""
try:
return AlphaVantage._EXCHANGE_SUPPORTED[exchange_name]
except KeyError:
return None
| {"/alpha_vantage/timeseries.py": ["/alpha_vantage/alphavantage.py"], "/alpha_vantage/techindicators.py": ["/alpha_vantage/alphavantage.py"], "/test_alpha_vantage/testalphavantage.py": ["/alpha_vantage/alphavantage.py", "/alpha_vantage/timeseries.py", "/alpha_vantage/techindicators.py", "/alpha_vantage/sectorperformance.py"], "/alpha_vantage/sectorperformance.py": ["/alpha_vantage/alphavantage.py"]} |
45,506 | femtotrader/alpha_vantage | refs/heads/develop | /helpers/pipy_rst_convert.py | #!/usr/bin/env python
import pypandoc
import codecs
from os import path
if __name__ == '__main__':
"""
Simple script to generate the rst file for pipy
"""
parent = path.abspath(path.dirname(path.dirname(__file__)))
readmemd_path = path.join(parent, 'README.md')
readmerst_path = path.join(parent, 'README.rst')
output = pypandoc.convert_file(readmemd_path,'rst')
with codecs.open(readmerst_path, 'w+', encoding='utf8') as f:
f.write(output)
| {"/alpha_vantage/timeseries.py": ["/alpha_vantage/alphavantage.py"], "/alpha_vantage/techindicators.py": ["/alpha_vantage/alphavantage.py"], "/test_alpha_vantage/testalphavantage.py": ["/alpha_vantage/alphavantage.py", "/alpha_vantage/timeseries.py", "/alpha_vantage/techindicators.py", "/alpha_vantage/sectorperformance.py"], "/alpha_vantage/sectorperformance.py": ["/alpha_vantage/alphavantage.py"]} |
45,507 | femtotrader/alpha_vantage | refs/heads/develop | /setup.py | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
except IOError:
long_description = 'Python module to get stock data from the Alpha Vantage Api'
setup(
name='alpha_vantage',
version='1.2.1',
author='Romel J. Torres',
author_email='romel.torres@gmail.com',
license='MIT',
description='Python module to get stock data from the Alpha Vantage Api',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Office/Business :: Financial :: Investment',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
url='https://github.com/RomelTorres/alpha_vantage',
install_requires=[
'simplejson',
'pandas',
'nose'
],
keywords=['stocks', 'market', 'finance', 'alpha_vantage', 'quotes',
'shares'],
packages=find_packages(exclude=['helpers', 'test_alpha_vantage', 'images']),
package_data={
'alpha_vantage': [],
}
)
| {"/alpha_vantage/timeseries.py": ["/alpha_vantage/alphavantage.py"], "/alpha_vantage/techindicators.py": ["/alpha_vantage/alphavantage.py"], "/test_alpha_vantage/testalphavantage.py": ["/alpha_vantage/alphavantage.py", "/alpha_vantage/timeseries.py", "/alpha_vantage/techindicators.py", "/alpha_vantage/sectorperformance.py"], "/alpha_vantage/sectorperformance.py": ["/alpha_vantage/alphavantage.py"]} |
45,509 | cppmx/flask-api-quickstart | refs/heads/master | /app/auth/service.py | import datetime
import bcrypt
import jwt
from usernames import is_safe_username
from validate_email import validate_email
from app import app, db
from app.auth.exceptions import (
InvalidCredentialsError,
InvalidTokenError,
TokenExpiredError,
)
from app.exceptions import InvalidFieldError
from app.models import BlacklistToken, User
from app.utils import now
ENCODING = "utf-8"
def create_user(username, email, password):
if not is_safe_username(username):
raise InvalidFieldError(
"username", "Username contains forbidden characters or is a reserved word."
)
if len(username) < 5:
raise InvalidFieldError(
"username", "Username has to be at least 5 characters long."
)
if len(password) < 8:
raise InvalidFieldError(
"password", "Password has to be at least 8 characters long."
)
if not validate_email(email):
raise InvalidFieldError("email")
email_used = True if User.query.filter_by(email=email).first() else False
if email_used:
raise InvalidFieldError("email", "Email address is already used.")
username_used = True if User.query.filter_by(username=username).first() else False
if username_used:
raise InvalidFieldError("username", "Username is already used.")
hashed_password = hash_password(password)
user = User(username, email, hashed_password, now())
db.session.add(user)
db.session.commit()
def login_user(username, password):
"""Generate a new auth token for the user"""
saved_user = User.query.filter_by(username=username).first()
if saved_user and check_password(password, saved_user.password):
token = encode_auth_token(saved_user.id)
return token
else:
raise InvalidCredentialsError()
def hash_password(password):
return bcrypt.hashpw(password.encode(ENCODING), bcrypt.gensalt()).decode(ENCODING)
def check_password(password, hashed_password):
return bcrypt.checkpw(password.encode(ENCODING), hashed_password.encode(ENCODING))
def encode_auth_token(user_id):
"""Create a token with user_id and expiration date using secret key"""
exp_days = app.config.get("AUTH_TOKEN_EXPIRATION_DAYS")
exp_seconds = app.config.get("AUTH_TOKEN_EXPIRATION_SECONDS")
exp_date = now() + datetime.timedelta(
days=exp_days, seconds=exp_seconds
)
payload = {"exp": exp_date, "iat": now(), "sub": user_id}
return jwt.encode(payload, app.config["SECRET_KEY"], algorithm="HS256").decode(
ENCODING
)
def decode_auth_token(token):
"""Convert token to original payload using secret key if the token is valid"""
try:
payload = jwt.decode(token, app.config["SECRET_KEY"], algorithms="HS256")
return payload
except jwt.ExpiredSignatureError as ex:
raise TokenExpiredError() from ex
except jwt.InvalidTokenError as ex:
raise InvalidTokenError() from ex
def blacklist_token(token):
bl_token = BlacklistToken(token, now())
db.session.add(bl_token)
db.session.commit()
def is_token_blacklisted(token):
bl_token = BlacklistToken.query.filter_by(token=token).first()
return True if bl_token else False
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,510 | cppmx/flask-api-quickstart | refs/heads/master | /app/models.py | from app import db
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(254), unique=True, nullable=False)
password = db.Column(db.String(256), unique=False, nullable=False)
created_on = db.Column(db.DateTime(timezone=True), nullable=False)
def __init__(self, username, email, password, created_on):
self.username = username
self.email = email
self.password = password
self.created_on = created_on
def __repr__(self):
return f"User(id={self.id},username={self.username})"
class BlacklistToken(db.Model):
id = db.Column(db.Integer, primary_key=True)
token = db.Column(db.String(256))
blacklisted_on = db.Column(db.DateTime(timezone=True), nullable=False)
def __init__(self, token, blacklisted_on):
self.token = token
self.blacklisted_on = blacklisted_on
def __repr__(self):
return f"BlacklistToken(id={self.id},token={self.token})"
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,511 | cppmx/flask-api-quickstart | refs/heads/master | /app/auth/helpers.py | from functools import wraps
from flask import request
from app.auth.exceptions import InvalidTokenError, TokenExpiredError
from app.auth.service import decode_auth_token, is_token_blacklisted
from app.models import User
def get_token_from_header():
token = None
if "Authorization" in request.headers:
auth_header = request.headers["Authorization"]
try:
token = auth_header.split(" ")[1]
except IndexError:
token = None
return token
def auth_required(f):
"""Decorator to require auth token on marked endpoint"""
@wraps(f)
def decorated_function(*args, **kwargs):
token = get_token_from_header()
if not token:
raise InvalidTokenError()
if is_token_blacklisted(token):
raise TokenExpiredError()
token_payload = decode_auth_token(token)
current_user = User.query.filter_by(id=token_payload["sub"]).first()
if not current_user:
raise InvalidTokenError()
return f(*args, **kwargs)
return decorated_function
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,512 | cppmx/flask-api-quickstart | refs/heads/master | /tests/test_app.py | import json
from tests.helpers import (
assert_error,
assert_error_invalid_token,
assert_success_200,
get_valid_token,
login_user,
signup_user,
)
def test_ping(client):
response = client.get("/ping")
data = json.loads(response.data)
assert_success_200(response)
assert data["status"] == "running"
def test_access_protected_endpoint_with_valid_token(client):
token = get_valid_token(client)
response = client.get(
"/protected",
headers=dict(Authorization="Bearer " + token),
content_type="application/json",
)
assert_success_200(response)
data = json.loads(response.data)
assert data["message"] == "Protected message"
def test_access_protected_endpoint_without_token(client):
response = client.get("/protected", content_type="application/json")
assert_error_invalid_token(response)
def test_access_protected_endpoint_without_valid_token(client):
token = "djkafkldhsfhl"
response = client.get(
"/protected",
headers=dict(Authorization="Bearer " + token),
content_type="application/json",
)
assert_error_invalid_token(response)
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,513 | cppmx/flask-api-quickstart | refs/heads/master | /app/utils.py | import datetime
import pytz
def now():
time = datetime.datetime.utcnow()
return time.replace(tzinfo=pytz.utc)
def as_utc_iso(date):
return date.astimezone(datetime.timezone.utc).isoformat() | {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,514 | cppmx/flask-api-quickstart | refs/heads/master | /app/exceptions.py | from flask import jsonify
class AppError(Exception):
"""Base class for all errors. Can represent error as HTTP response for API calls"""
status_code = 500
error_code = "INTERNAL_ERROR"
message = "Request cannot be processed at the moment."
def __init__(self, status_code=None, error_code=None, message=None):
Exception.__init__(self)
if message is not None:
self.message = message
if status_code is not None:
self.status_code = status_code
if error_code is not None:
self.error_code = error_code
def to_api_response(self):
response = jsonify(
{"errorCode": self.error_code, "errorMessage": self.message}
)
response.status_code = self.status_code
return response
class InvalidFieldError(AppError):
def __init__(self, field_name, message=""):
AppError.__init__(
self,
status_code=422,
error_code="INVALID_FIELD",
message=f"Invalid '{field_name}''. {message}",
)
class BadRequestError(AppError):
def __init__(self, message="Malformed request."):
AppError.__init__(
self, status_code=400, error_code="BAD_REQUEST", message=message
)
class NotFoundError(AppError):
def __init__(self, message="Requested resource not found."):
AppError.__init__(
self, status_code=404, error_code="NOT_FOUND", message=message
)
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,515 | cppmx/flask-api-quickstart | refs/heads/master | /app/auth/exceptions.py | from app.exceptions import AppError
class InvalidCredentialsError(AppError):
def __init__(self):
AppError.__init__(
self,
status_code=401,
error_code="INVALID_CREDENTIALS",
message="Invalid username or password.",
)
class InvalidTokenError(AppError):
def __init__(self):
AppError.__init__(
self,
status_code=401,
error_code="INVALID_TOKEN",
message="Token is invalid or missing.",
)
class TokenExpiredError(AppError):
def __init__(self):
AppError.__init__(
self,
status_code=401,
error_code="TOKEN_EXPIRED",
message="Authentication token has expired.",
)
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,516 | cppmx/flask-api-quickstart | refs/heads/master | /tests/conftest.py | import pytest
from app import app, db
def create_app():
"""Return Flask's app object with test configuration"""
app.config.from_object("app.config.TestingConfig")
return app
def set_up():
"""Create database tables according to the app models"""
db.create_all()
db.session.commit()
def tear_down():
"""Remove all tables from the database"""
db.session.remove()
db.drop_all()
@pytest.fixture
def client():
"""Create Flask's test client to interact with the application"""
client = create_app().test_client()
set_up()
yield client
tear_down()
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,517 | cppmx/flask-api-quickstart | refs/heads/master | /app/__init__.py | import os
from flask import Flask, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
app = Flask(__name__)
app_settings = os.getenv("APP_SETTINGS", "app.config.DevelopmentConfig")
app.config.from_object(app_settings)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
from app.auth.views import auth_api
app.register_blueprint(auth_api, url_prefix="/api/v1/auth")
from app.auth.helpers import auth_required
from app.models import User, BlacklistToken
@app.route("/ping")
def index():
return jsonify({"status": "running"})
@app.route("/protected")
@auth_required
def protected():
return jsonify({"message": "Protected message"})
from app.exceptions import AppError, NotFoundError
@app.errorhandler(404)
def custom404(error):
return NotFoundError().to_api_response()
@app.errorhandler(Exception)
def handle_exception(exception):
return AppError().to_api_response()
@app.errorhandler(AppError)
def handle_application_error(exception):
return exception.to_api_response()
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,518 | cppmx/flask-api-quickstart | refs/heads/master | /tests/helpers.py | import json
def signup_user(client, username, email, password):
data = dict()
if username:
data["username"] = username
if email:
data["email"] = email
if password:
data["password"] = password
return client.post(
"/api/v1/auth/signup", content_type="application/json", data=json.dumps(data)
)
def login_user(client, username, password):
data = dict()
if username:
data["username"] = username
if password:
data["password"] = password
return client.post(
"/api/v1/auth/login", content_type="application/json", data=json.dumps(data)
)
def get_valid_token(client):
username = "usrname1"
email = "usrname1@example.com"
password = "Password1"
signup_user(client=client, username=username, email=email, password=password)
response = login_user(client=client, username=username, password=password)
data = json.loads(response.data)
return data["token"]
def assert_success_200(response):
assert response.status_code == 200
assert response.content_type == "application/json"
def assert_error(response, error_code):
assert response.status_code == error_code
assert response.content_type == "application/json"
def assert_error_invalid_token(response):
assert_error(response, 401)
data = json.loads(response.data)
assert data["errorCode"] == "INVALID_TOKEN"
def assert_error_token_expired(response):
assert_error(response, 401)
data = json.loads(response.data)
assert data["errorCode"] == "TOKEN_EXPIRED"
def assert_error_missing_field(response, field):
assert_error(response, 400)
data = json.loads(response.data)
assert data["errorCode"] == "BAD_REQUEST"
assert field in data["errorMessage"]
def assert_error_invalid_field(response, field):
assert_error(response, 422)
data = json.loads(response.data)
assert data["errorCode"] == "INVALID_FIELD"
assert field in data["errorMessage"]
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,519 | cppmx/flask-api-quickstart | refs/heads/master | /tests/test_auth.py | import json
from tests.helpers import (
assert_error,
assert_error_invalid_field,
assert_error_missing_field,
assert_error_token_expired,
assert_success_200,
get_valid_token,
login_user,
signup_user,
)
def test_signup_success(client):
response = signup_user(
client=client,
username="username1",
email="mail@example.com",
password="Password1",
)
assert_success_200(response)
data = json.loads(response.data)
assert data["success"] == True
def test_signup_missing_username(client):
response = signup_user(
client=client, username=None, email="mail@example.com", password="Password1"
)
assert_error_missing_field(response, "username")
def test_signup_missing_email(client):
response = signup_user(
client=client, username="username1", email=None, password="Password1"
)
assert_error_missing_field(response, "email")
def test_signup_missing_password(client):
response = signup_user(
client=client, username="username1", email="mail@example.com", password=None
)
assert_error_missing_field(response, "password")
def test_signup_invalid_username_too_short(client):
response = signup_user(
client=client, username="adam", email="mail@example.com", password="Password1"
)
assert_error_invalid_field(response, "username")
def test_signup_invalid_username_forbidden_chars(client):
response = signup_user(
client=client,
username=" -- -- -- ",
email="mail@example.com",
password="Password1",
)
assert_error_invalid_field(response, "username")
def test_signup_invalid_email(client):
response = signup_user(
client=client,
username="username1",
email="mailexample.com",
password="Password1",
)
assert_error_invalid_field(response, "email")
def test_signup_invalid_password_too_short(client):
response = signup_user(
client=client, username="username1", email="mail@example.com", password="1234"
)
assert_error_invalid_field(response, "password")
def test_signup_username_already_used(client):
username = "username1"
password = "Password1"
signup_user(
client=client, username=username, email="email@mail.com", password=password
)
response = signup_user(
client=client, username=username, email="email2@mail.com", password=password
)
assert_error(response, 422)
data = json.loads(response.data)
assert data["errorCode"] == "INVALID_FIELD"
assert "Username is already used" in data["errorMessage"]
def test_signup_email_already_used(client):
email = "email@mail.com"
password = "Password1"
signup_user(client=client, username="username1", email=email, password=password)
response = signup_user(
client=client, username="username2", email=email, password=password
)
assert_error(response, 422)
data = json.loads(response.data)
assert data["errorCode"] == "INVALID_FIELD"
assert "Email address is already used" in data["errorMessage"]
def test_login_success(client):
username = "username1"
email = "user1@example.com"
password = "Password1"
signup_user(client=client, username=username, email=email, password=password)
response = login_user(client=client, username=username, password=password)
assert_success_200(response)
data = json.loads(response.data)
assert data["token"]
def test_login_bad_credentials(client):
username = "username1"
email = "user1@example.com"
password = "Password1"
signup_user(client=client, username=username, email=email, password=password)
response = login_user(client=client, username=username, password="Password2")
assert_error(response, 401)
data = json.loads(response.data)
assert data["errorCode"] == "INVALID_CREDENTIALS"
assert not "token" in data
def test_logout(client):
token = get_valid_token(client)
response = client.post(
"/api/v1/auth/logout",
headers=dict(Authorization="Bearer " + token),
content_type="application/json",
)
assert_success_200(response)
response_protected = client.get(
"/protected",
headers=dict(Authorization="Bearer " + token),
content_type="application/json",
)
assert_error_token_expired(response_protected)
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,520 | cppmx/flask-api-quickstart | refs/heads/master | /app/auth/views.py | from flask import Blueprint, jsonify, request
from app.auth.helpers import auth_required, get_token_from_header
from app.auth.service import (
blacklist_token,
create_user,
is_token_blacklisted,
login_user,
)
from app.exceptions import BadRequestError
auth_api = Blueprint("auth_api", __name__)
@auth_api.route("/signup", methods=["POST"])
def signup():
if not request.json:
raise BadRequestError()
if not "username" in request.json:
raise BadRequestError("'username' field is missing.")
if not "password" in request.json:
raise BadRequestError("'password' field is missing.")
if not "email" in request.json:
raise BadRequestError("'email' field is missing.")
create_user(
request.json["username"], request.json["email"], request.json["password"]
)
return jsonify({"success": True})
@auth_api.route("/login", methods=["POST"])
def login():
if not request.json:
raise BadRequestError()
if not "username" in request.json:
raise BadRequestError("'username' field is missing.")
if not "password" in request.json:
raise BadRequestError("'password' field is missing.")
token = login_user(request.json["username"], request.json["password"])
return jsonify({"token": token})
@auth_api.route("/logout", methods=["POST"])
@auth_required
def logout():
token = get_token_from_header()
blacklist_token(token)
return jsonify({"success": True})
| {"/app/auth/service.py": ["/app/__init__.py", "/app/auth/exceptions.py", "/app/exceptions.py", "/app/models.py", "/app/utils.py"], "/app/models.py": ["/app/__init__.py"], "/app/auth/helpers.py": ["/app/auth/exceptions.py", "/app/auth/service.py", "/app/models.py"], "/tests/test_app.py": ["/tests/helpers.py"], "/app/auth/exceptions.py": ["/app/exceptions.py"], "/tests/conftest.py": ["/app/__init__.py"], "/app/__init__.py": ["/app/auth/views.py", "/app/auth/helpers.py", "/app/models.py", "/app/exceptions.py"], "/tests/test_auth.py": ["/tests/helpers.py"], "/app/auth/views.py": ["/app/auth/helpers.py", "/app/auth/service.py", "/app/exceptions.py"]} |
45,522 | 20MESC/HTN_blockPlanning | refs/heads/master | /tableTopBlocks_operators.py | """
tableTopBlocks domain definition for Pyhop 1.1.
Author:Rosario Scalise, April 30, 2016
"""
import pyhop
"""Each Pyhop planning operator is a Python function. The 1st argument is
the current state, and the others are the planning operator's usual arguments.
This is analogous to how methods are defined for Python classes (where
the first argument is always the name of the class instance). For example,
the function pickup(state,b) implements the planning operator for the task
('pickup', b).
The blocks-world operators state variables:
- locContents[loc] = what is contained at loc given by (x,y)
- locOccupied[loc] = Boolean describing whether there is something occupying loc (x,y)
- locRobot = loc in (x,y) at which the robot is currently located
- holding = name of the block being held, or False if the hand is empty.
"""
def pickUp(state, loc):
# if robot at loc AND robot not holding anything AND loc has something to pickup
if not state.holding and state.locOccupied[loc]:
#state.loc[b] = 'hand'
state.locOccupied[loc] = False
state.holding = state.locContents[loc]
# be sure to delete old contents once they are picked up!
del state.locContents[loc]
print(state.holding)
return state
else: return False
def putDown(state, loc):
if state.holding:
state.locContents[loc] = state.holding
state.holding = ""
return state
else: return False
def moveRobot(state, locTo):
state.locRobot = locTo
return state
#def unstack(state,b,c):
# if state.pos[b] == c and c != 'table' and state.clear[b] == True and state.holding == False:
# state.pos[b] = 'hand'
# state.clear[b] = False
# state.holding = b
# state.clear[c] = True
# return state
# else: return False
#
#def stack(state,b,c):
# if state.pos[b] == 'hand' and state.clear[c] == True:
# state.pos[b] = c
# state.clear[b] = True
# state.holding = False
# state.clear[c] = False
# return state
# else: return False
#
"""
Below, 'declare_operators(pickup, unstack, putdown, stack)' tells Pyhop
what the operators are. Note that the operator names are *not* quoted.
"""
pyhop.declare_operators(moveRobot,pickUp,putDown)
| {"/tableTopBlocks.py": ["/tableTopBlocks_operators.py", "/tableTopBlocks_methods.py"]} |
45,523 | 20MESC/HTN_blockPlanning | refs/heads/master | /tableTopBlocks_methods.py | """
tableTopBlocks methods for Pyhop 1.1.
Author: Rosario Scalise, April 30, 2016
"""
import pyhop
from pyhop import Goal
from bidict import bidict
from tableTopBlocks_utils import get_line
import math
"""
Here are some helper functions that are used in the methods' preconditions.
"""
# CHECKS TO SEE IF BLOCK IS WHERE IT IS SUPPOSED TO BE
def is_done(b1,state,goal):
if b1 in goal.locContents.values() and goal.locContents.inv[b1] != state.locContents.inv[b1]:
return False
else:
return True
# GIVES BLOCKS STATUSES
def status(b1,state,goal):
if is_done(b1,state,goal):
return 'done'
elif not state.locOccupied[goal.locContents.inv[b1]]:
return 'move-to-table'
else:
return 'waiting'
# OLD STATUS WHICH ALLOWS FOR BLOCK STACKING LOGIC
#def status(b1,state,goal):
# if is_done(b1,state,goal):
# return 'done'
# elif not state.clear[b1]:
# return 'inaccessible'
# elif not (b1 in goal.pos) or goal.pos[b1] == 'table':
# return 'move-to-table'
# elif is_done(goal.pos[b1],state,goal) and state.clear[goal.pos[b1]]:
# return 'move-to-block'
# else:
# return 'waiting'
def all_blocks(state):
return state.locContents.values()
"""
In each Pyhop planning method, the first argument is the current state (this is analogous to Python methods, in which the first argument is the class instance). The rest of the arguments must match the arguments of the task that the method is for. For example, ('pickup', b1) has a method get_m(state,b1), as shown below.
"""
### methods for "move_blocks"
# Currently being handled by checking occupied space -> Finding the blocks in each occupied space -> Checking status of block -> Check if its part of goal -> If it needs to be moved, move it
# TODO: There might be a more concise way to address this.
def moveBlocks_m(state,goal):
"""
This method implements the following block-stacking algorithm:
If there's a block that can be moved to its final position, then
do so and call move_blocks recursively. Otherwise, if there's a
block that needs to be moved and can be moved to the table, then
do so and call move_blocks recursively. Otherwise, no blocks need
to be moved.
"""
# for each currently occupied location (keys will only list these since we delete and add keys so that only currently occupied locations are a valid key)
for loc in state.locContents.keys():
b1 = state.locContents[loc]
s = status(b1,state,goal)
if s == 'move-to-table':
return [('moveOne',loc,goal.locContents.inv[b1]),('moveBlocks',goal)]
else:
continue
## here is where calls to address blocks would go
## no more blocks need moving
return []
# OLD METHODS WHICH ALLOWS FOR BLOCK STACKING
#for loc in all_blocks(state):
# s = status(b1,state,goal)
# if s == 'move-to-table':
# return [('moveOne',b1,'table'),('moveBlocks',goal)]
# elif s == 'move-to-block':
# return [('moveOne',b1,goal.pos[b1]), ('moveBlocks',goal)]
# else:
# continue
##
## if we get here, no blocks can be moved to their final locations
#b1 = pyhop.find_if(lambda x: status(x,state,goal) == 'waiting', all_blocks(state))
#if b1 != None:
# return [('moveOne',b1,'table'), ('moveBlocks',goal)]
##
## if we get here, there are no blocks that need moving
#return []
"""
declare_methods must be called once for each taskname. Below, 'declare_methods('get',get_m)' tells Pyhop that 'get' has one method, get_m. Notice that 'get' is a quoted string, and get_m is the actual function.
"""
pyhop.declare_methods('moveBlocks',moveBlocks_m)
### methods for "move_one"
def moveOne_m(state,locI,locF):
"""
Generate subtasks to get b1 and put it at dest.
"""
#TODO: POTENTIALLY CHANGE WHERE THIS EFFECT HAPPENS
# when a block is moved to its final resting place, it should be removed from blocksAvail list
state.blocksAvail.remove(state.locContents[locI])
return [('moveRobot', locI),('pickUp', locI), ('moveRobot', locF), ('putDown', locF)]
pyhop.declare_methods('moveOne',moveOne_m)
##### BELOW ARE METHODS FOR FORMING SHAPE COMPONENTS AND SHAPES
def createLine_m(state,pI,pF):
"""
Generate subtasks to create a line starting at loc given by pI and ending at loc given by pF.
"""
#TODO: CHECK BOUNDS
# uses Bresenham's Line Algorithm to compute discrete points for line
linePointsList = get_line(pI,pF)
#TODO: Alternate Method
# Currently filters available blocks by first n (n is number of points on linePointsList)
blocksList = state.blocksAvail[0:len(linePointsList)]
gL = Goal('goalLine')
gL.locContents = bidict(zip(linePointsList,blocksList))
gL.locOccupied = {loc:False for loc in state.locContents.keys()} # locContents.keys() gives all locs
gL.locOccupied.update({loc:True for loc in gL.locContents.keys()})
return [('moveBlocks',gL)]
pyhop.declare_methods('createLine',createLine_m)
def createRect_m(state,center,sideLen1,sideLen2):
"""
Generate subtasks to create a rectangle centered around 'center' with side lengths 'sideLen' .
"""
# Compute Vertices
cx = center[0]
cy = center[1]
sideLen1 = sideLen1 - 1
sideLen2 = sideLen2 - 1
v1 = (int(cx-math.floor(sideLen1/2.0)),int(cy-math.floor(sideLen2/2.0)))
v2 = (int(cx-math.floor(sideLen1/2.0)),int(cy+math.ceil(sideLen2/2.0)))
v3 = (int(cx+math.ceil(sideLen1/2.0)),int(cy+math.ceil(sideLen2/2.0)))
v4 = (int(cx+math.ceil(sideLen1/2.0)),int(cy-math.floor(sideLen2/2.0)))
# TODO: CHECK BOUNDS
return [('createLine',v1,v2),('createLine',v2,v3),('createLine',v3,v4),('createLine',v4,v1)]
pyhop.declare_methods('createRect',createRect_m)
### methods for "get"
def get_m(state,b1):
"""
Generate either a pickup or an unstack subtask for b1.
"""
if state.clear[b1]:
if state.pos[b1] == 'table':
return [('pickup',b1)]
else:
return [('unstack',b1,state.pos[b1])]
else:
return False
pyhop.declare_methods('get',get_m)
### methods for "put"
def put_m(state,b1,b2):
"""
Generate either a putdown or a stack subtask for b1.
b2 is b1's destination: either the table or another block.
"""
if state.holding == b1:
if b2 == 'table':
return [('putdown',b1)]
else:
return [('stack',b1,b2)]
else:
return False
pyhop.declare_methods('put',put_m)
| {"/tableTopBlocks.py": ["/tableTopBlocks_operators.py", "/tableTopBlocks_methods.py"]} |
45,524 | 20MESC/HTN_blockPlanning | refs/heads/master | /tableTopBlocks.py | """
Table Top Blocks-world .
Author: Rosario Scalise, April 30, 2016
"""
from __future__ import print_function
from pyhop import *
from bidict import bidict
"""
List all operators and methods
Check here to see that everything is loaded correctly
"""
import tableTopBlocks_operators
print('')
print_operators()
import tableTopBlocks_methods
print('')
print_methods()
"""
Define Init State, Goal State, and run planner
Init:
(1,3) : b1
(2,3) : b2
(3,3) : b3
(4,3) : b4
(5,3) : b5
(1,5) : b6
(2,5) : b7
(3,5) : b8
(4,5) : b9
(5,5) : b10
(2,2) : robot
Goal:
createRect((3,3),3,4)
consists of:
(2,2) : b
(2,3) : b
(2,4) : b
(2,5) : b
(3,5) : b
(4,2) : b
(4,3) : b
(4,4) : b
(4,5) : b
(3,2) : b
"""
print("- Define state1:")
# Create Initial blocks state
s1 = State('state1')
# Locations will be in standard graph axes and ordered pairs (x,y)
s1.locContents = bidict({(1,3):'b1',(2,3):'b2',(3,3):'b3',(4,3):'b4',(5,3):'b5',(1,5):'b6',(2,5):'b7',(3,5):'b8',(4,5):'b9',(5,5):'b10'},) # (1,1) holds b1, (1,2) holds b2
# TODO: Definitely come up with alternate solution to available blocks list - probably block status
s1.blocksAvail = s1.locContents.values()
# Could maybe at some point replace this by just checking if the key exists in loc?
s1.locOccupied = {(x,y):False for x in range(1,6) for y in range(1,6)}
s1.locOccupied.update({loc:True for loc in s1.locContents.keys()}) # make sure these reflect the occupied locs
s1.locRobot = (2,2)
s1.holding = False
print_state(s1)
print('')
print("- Define goal1:")
#g1 = Goal('goal1')
#g1.locContents = bidict({(1,1):'b1',(1,2):'b2',(1,3):'b3'})
#g1.locOccupied = {loc:False for loc in s1.locContents.keys()} #locContents.keys() gives all locs
#g1.locOccupied.update({loc:True for loc in g1.locContents.keys()})
#g1.locRobot = (2,2)
#print_goal(g1)
#print('')
result = pyhop(s1,[('createRect',(3,3),3,4)], verbose=1)
import ipdb
ipdb.set_trace()
| {"/tableTopBlocks.py": ["/tableTopBlocks_operators.py", "/tableTopBlocks_methods.py"]} |
45,544 | joncastillo/AivenCloudTest | refs/heads/main | /PostgresService.py | import psycopg2
class PostgresService:
def __init__(self, aiven_service):
self.m_aiven_service = aiven_service
self.m_conn = None
def connect(self):
host = self.m_aiven_service.m_service_config_postgres.m_hostname
port = self.m_aiven_service.m_service_config_postgres.m_port
dbname = self.m_aiven_service.m_service_config_postgres.m_databaseName
username = self.m_aiven_service.m_service_config_postgres.m_username
password = self.m_aiven_service.m_service_config_postgres.m_password
try:
self.m_conn = psycopg2.connect(f"dbname={dbname} user={username} password={password} host={host} port={str(port)}")
except Exception as e:
print("I am unable to connect to the database")
print(str(e))
return 1
return 0
def disconnect(self):
self.m_conn.close()
self.m_conn = None
# too complicated - not required for this assignment
#def run_stored_procedure(self, filename, *args):
# with self.m_conn.cursor() as cursor:
# with open(filename, "r") as stored_proc:
# cursor.execute(stored_proc.read(), args)
# return 0
def create_table_if_not_exist(self):
with self.m_conn.cursor() as cursor:
cursor.execute("CREATE TABLE IF NOT EXISTS logs "
"("
" id SERIAL PRIMARY KEY,"
" datetime VARCHAR,"
" url VARCHAR,"
" status_code VARCHAR,"
" json_filtered_text VARCHAR"
");")
self.m_conn.commit()
return 0
def store_to_database(self, datetime, url, status_code, json_filtered_text):
with self.m_conn.cursor() as cursor:
cursor.execute("CREATE TABLE IF NOT EXISTS logs "
"("
" id SERIAL PRIMARY KEY,"
" datetime VARCHAR,"
" url VARCHAR,"
" status_code VARCHAR,"
" json_filtered_text VARCHAR"
");")
cursor.execute(f"INSERT INTO logs(datetime, url, status_code, json_filtered_text) "
f"VALUES ('{datetime}', '{url}', '{status_code}', '{json_filtered_text}')")
self.m_conn.commit()
print(f"inserting: {datetime} {url} {status_code} {json_filtered_text}")
return 0
| {"/KafkaService.py": ["/AivenService.py"], "/AivenCloudTest.py": ["/AivenService.py", "/ServiceConfigPostGres.py", "/KafkaService.py", "/MonitoringTool.py", "/PostgresService.py"]} |
45,545 | joncastillo/AivenCloudTest | refs/heads/main | /AivenService.py | import aiven.client.client as ac
class AivenService:
"""
Class for Initializing Aiven's Services
Attributes
----------
m_aiven_client : AivenClient
class provided by the Aiven Client Python library. Used for configuring Aiven services.
m_aiven_project_name : str
The Aiven project name. For demo accounts, this is predetermined by Aiven during sign-up. Seen at the top left corner of https://console.aiven.io.
m_aiven_email : str
credentials used for logging into the Aiven service.
m_aiven_password : KafkaService
credentials used for logging into the Aiven service.
m_service_config_kafka : ServiceConfigKafka
user defined configurations for initialising Aiven's Kafka service.
m_service_config_postgres : ServiceConfigPostGres
user defined configurations for initialising Aiven's Postgres service.
m_token : str
login token
m_aiven_handle_kafka : dict
metadata returned by Aiven when creating the Kafka service. Used for changing Kafka configurations.
m_aiven_handle_postgres : dict
metadata returned by Aiven when creating the Postgres service. Used for changing Postgres configurations.
"""
def __init__(self, aiven_project_name, aiven_email, aiven_password, service_config_kafka, service_config_postgres):
self.m_aiven_client = ac.AivenClient("https://api.aiven.io")
self.m_aiven_project_name = aiven_project_name
self.m_aiven_email = aiven_email
self.m_aiven_password = aiven_password
self.m_service_config_kafka = service_config_kafka
self.m_service_config_kafka.set_hostname(self.m_aiven_project_name)
self.m_service_config_postgres = service_config_postgres
self.m_service_config_postgres.set_hostname(self.m_aiven_project_name)
self.m_token = None
self.m_aiven_handle_kafka = None
self.m_aiven_handle_postgres = None
self.m_kafka_producer = None
self.m_kafka_consumer = None
def login(self):
"""
login routine to Aiven's services.
"""
self.m_token = self.m_aiven_client.authenticate_user(self.m_aiven_email,self.m_aiven_password)["token"]
self.m_aiven_client.set_auth_token(self.m_token)
def kafka_init(self):
"""
Used for creating a Kafka service in Aiven.
"""
try:
self.m_aiven_handle_kafka = self.m_aiven_client.get_service(self.m_aiven_project_name, self.m_service_config_kafka.m_serviceName)
if not self.m_aiven_handle_kafka["user_config"]["kafka_connect"]:
self.m_aiven_handle_kafka = self.m_aiven_client.update_service(self.m_aiven_project_name, self.m_service_config_kafka.m_serviceName, user_config={"kafka_connect": True})
except ac.Error:
self.m_aiven_handle_kafka = self.m_aiven_client.create_service(self.m_aiven_project_name, self.m_service_config_kafka.m_serviceName, "kafka", "business-4", "google-australia-southeast1", {"kafka_connect": True})
self.create_kafka_cert_files()
return self.m_aiven_handle_kafka
def create_kafka_cert_files(self):
"""
Download and save keys from the server. Used in KafkaService when creating a producer or a consumer.
"""
ca = self.m_aiven_client.get_project_ca(self.m_aiven_project_name)
with open("ca.pem", "w") as cafile:
cafile.write(ca["certificate"])
with open("service.cert", "w") as certfile:
certfile.write(self.m_aiven_handle_kafka["connection_info"]["kafka_access_cert"])
with open("service.key", "w") as keyfile:
keyfile.write(self.m_aiven_handle_kafka["connection_info"]["kafka_access_key"])
def postgres_init(self):
"""
Used for creating a Postgres service in Aiven.
"""
try:
self.m_aiven_handle_postgres = self.m_aiven_client.get_service(self.m_aiven_project_name, self.m_service_config_postgres.m_serviceName)
except ac.Error:
self.m_aiven_handle_postgres = self.m_aiven_client.create_service(self.m_aiven_project_name, self.m_service_config_postgres.m_serviceName, "pg", "hobbyist", "google-australia-southeast1")
return self.m_aiven_handle_postgres
| {"/KafkaService.py": ["/AivenService.py"], "/AivenCloudTest.py": ["/AivenService.py", "/ServiceConfigPostGres.py", "/KafkaService.py", "/MonitoringTool.py", "/PostgresService.py"]} |
45,546 | joncastillo/AivenCloudTest | refs/heads/main | /ServiceConfigPostGres.py | class ServiceConfigPostGres:
"""
Class that defines configuration for Aiven's Postgres service
"""
def __init__(self, service_name, username, password, port, database_name):
self.m_serviceName = service_name
self.m_username = username
self.m_password = password
self.m_port = port
self.m_databaseName = database_name
self.m_hostname = None
self.connection = None
def set_hostname(self, aiven_project_name):
"""
The service's hostname is a combination of the service name and the project name.
"""
self.m_hostname = self.m_serviceName + '-' + aiven_project_name + '.aivencloud.com'
| {"/KafkaService.py": ["/AivenService.py"], "/AivenCloudTest.py": ["/AivenService.py", "/ServiceConfigPostGres.py", "/KafkaService.py", "/MonitoringTool.py", "/PostgresService.py"]} |
45,547 | joncastillo/AivenCloudTest | refs/heads/main | /KafkaService.py | import json
from kafka import KafkaProducer
from kafka import KafkaConsumer
from AivenService import AivenService
class KafkaService:
"""
Helper methods for sending/receiving messages through Aiven's Kafka
Attributes
----------
m_aiven_service : AivenService
Contains configurations for various Aiven services.
m_postgres_service : PostgresService
Helper functions for database manipulation.
m_kafka_producer : KafkaProducer
The Kafka Producer
m_kafka_consumer : KafkaConsumer
The Kafka Consumer
"""
def __init__(self, aiven_service, postgres_service):
self.m_aiven_service = aiven_service
self.m_postgres_service = postgres_service
self.m_kafka_producer = None
self.m_kafka_consumer = None
def kafka_create_topic(self, topic):
"""
Creates the kafka topic if it doesn't exist yet.
"""
if not isinstance(self.m_aiven_service, AivenService):
print("aiven service is not available")
return 1
for topic_in_kafka in self.m_aiven_service.m_aiven_handle_kafka["topics"]:
if topic == topic_in_kafka['topic_name']:
print("kafka topic already exists.")
return 0
self.m_aiven_service.m_aiven_client.create_service_topic(
project=self.m_aiven_service.m_aiven_project_name,
service=self.m_aiven_service.m_service_config_kafka.m_serviceName,
topic=topic,
partitions=1,
replication=2,
min_insync_replicas=1,
retention_bytes=-1,
retention_hours=24,
cleanup_policy="delete"
)
return 0
def kafka_create_producer(self):
"""
Creates the kafka producer.
"""
if self.m_aiven_service is None:
print("aiven service is not available")
return 1
self.m_kafka_producer = KafkaProducer(
bootstrap_servers=self.m_aiven_service.m_service_config_kafka.m_hostname + ":" + str(self.m_aiven_service.m_service_config_kafka.m_port),
security_protocol="SSL",
ssl_cafile="ca.pem",
ssl_certfile="service.cert",
ssl_keyfile="service.key"
)
return 0
def kafka_send(self, topic, message):
"""
Helper function for sending a message through the kafka producer.
Parameters
----------
topic : str
The Kafka topic
message : str
The message to be sent through Kafka for consumption by the Kafka Consumer.
"""
if self.m_kafka_producer is None:
print("kafka producer not available")
return 1
else:
self.m_kafka_producer.send(topic, message.encode("utf-8"))
return 0
def kafka_create_consumer(self, topic):
"""
Creates the kafka consumer.
"""
self.m_kafka_consumer = KafkaConsumer(
topic,
auto_offset_reset="earliest",
bootstrap_servers=self.m_aiven_service.m_service_config_kafka.m_hostname + ":" + str(self.m_aiven_service.m_service_config_kafka.m_port),
client_id="demo-client-1",
group_id="demo-group",
security_protocol="SSL",
ssl_cafile="ca.pem",
ssl_certfile="service.cert",
ssl_keyfile="service.key"
)
self.m_postgres_service.connect()
self.m_postgres_service.create_table_if_not_exist()
self.m_postgres_service.disconnect()
return 0
def kafka_run_consumer(self):
"""
Loop through the messaging queue and process messages as they are received.
"""
for msg in self.m_kafka_consumer:
m = msg.value.decode("utf-8")
json_message = json.loads(m)
datetime = json_message["datetime"]
url = json_message["url"]
status_code = json_message["status_code"]
filtered_text = json.dumps(json_message["filtered_text"])
self.m_postgres_service.connect()
self.m_postgres_service.store_to_database(datetime,url,status_code,filtered_text)
self.m_postgres_service.disconnect()
self.m_kafka_consumer.commit()
return 0
| {"/KafkaService.py": ["/AivenService.py"], "/AivenCloudTest.py": ["/AivenService.py", "/ServiceConfigPostGres.py", "/KafkaService.py", "/MonitoringTool.py", "/PostgresService.py"]} |
45,548 | joncastillo/AivenCloudTest | refs/heads/main | /MonitoringTool.py | import json
import re
import requests
import time
from datetime import datetime
class MonitoringTool(object):
"""
This monitoring tool polls a website for its status and snippets of its contents
Attributes
----------
m_url : str
the url of website to be polled. For example: https://google.com
m_regex : str
the regex to be applied to the contents for filtering. Like grep.
m_delay : float
delay between polls in seconds
m_kafka_service : KafkaService
Kafka service helper for producer/consumer methods.
m_kafka_topic : str
Kafka topic, the kafka channel where messages are transmitted.
"""
def __init__(self, url, regex, delay, kafka_service, kafka_topic):
self.m_url = url
self.m_regex = regex
self.m_delay = delay
self.m_kafka_service = kafka_service
self.m_kafka_topic = kafka_topic
def __execute_regex(self, string):
"""
Executes a regular expression on a specified string.
All matches are contained as a list of dictionaries with each entry containing the location and the matched text.
Parameters
----------
string : str
The string to be processed
Returns
----------
list
list of matches
"""
output = []
p = re.compile(self.m_regex)
for m in p.finditer(string):
location = m.start()
text = m.group()
entry = {"location": location, "text": text}
output.append(entry)
return output
def execute(self):
"""
Continuously polls a url for status_code and contents. This is meant to be run asynchronously.
"""
while True:
r = requests.get(self.m_url, allow_redirects=True)
status_code = r.status_code
content = str(r.content)
regex_out = self.__execute_regex(content)
now = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
output = {"datetime": now, "url": self.m_url, "status_code": status_code, "filtered_text": regex_out}
output_string = json.dumps(output)
self.m_kafka_service.kafka_send(self.m_kafka_topic, output_string)
time.sleep(self.m_delay)
| {"/KafkaService.py": ["/AivenService.py"], "/AivenCloudTest.py": ["/AivenService.py", "/ServiceConfigPostGres.py", "/KafkaService.py", "/MonitoringTool.py", "/PostgresService.py"]} |
45,549 | joncastillo/AivenCloudTest | refs/heads/main | /AivenCloudTest.py | import argparse
import threading
from AivenService import AivenService
from ServiceConfigKafka import ServiceConfigKafka
from ServiceConfigPostGres import ServiceConfigPostGres
from KafkaService import KafkaService
from MonitoringTool import MonitoringTool
from PostgresService import PostgresService
def main():
"""
Main function for testing Aiven's Kafka and Postgres Databases
Usage: python AivenCloudTest.py --url https://www.cnn.com --regex "news.+\." --delay 30
This samples https://www.cnn.com every 30 seconds for snippets starting with news and ends with a period.
Requires (via pip install):
aiven-client 2.10.0
kafka-python 2.0.2
psycopg2 2.8.6
requests 2.25.1
References:
https://github.com/aiven/aiven-examples
https://help.aiven.io/en/articles/489572-getting-started-with-aiven-for-apache-kafka
https://kafka.apache.org/intro
"""
parser = argparse.ArgumentParser(description="Test Aiven-Kafka Producer+Consumer and Aiven-Postgres.")
parser.add_argument('--url', dest='url', type=str, help='the url to monitor', required=True)
parser.add_argument('--regex', dest='regex', type=str, help='a regex to filter out contents of the webpage',
required=True)
parser.add_argument('--delay', dest='delay', type=int, help='the delay between scans in seconds', required=True)
args = parser.parse_args()
# Topic name for Kafka
topic = "topic_monitor_message"
# Kafka configuration, passwords are of no value outside my Aiven projects
config_kafka = ServiceConfigKafka(
"aiven-assignment-kafka",
"jonathan-9d93",
"password0!",
17044,
"accesskey.key",
"access.crt",
"ca.crt")
# Posgres configuration, passwords are of no value outside my Aiven projects
config_postgres = ServiceConfigPostGres(
"aiven-assignment-postgres",
"avnadmin",
"rv64oul8g05h4ebm",
17042,
"defaultdb")
# Aiven.io service credentials and configuration, passwords are of no value outside my Aiven projects
aiven_service = AivenService(
"jonathan-9d93",
"jonathan.chioco.castillo@gmail.com",
"aiven81611",
config_kafka,
config_postgres)
# login
aiven_service.login()
# create Aiven kafka service if missing
aiven_service.kafka_init()
# create Aiven postgres service if missing
aiven_service.postgres_init()
# Initialise internal Postgres helper class
postgres_service = PostgresService(aiven_service)
# Initialise internal Kafka helper class
kafka_service = KafkaService(aiven_service, postgres_service)
# Create Kafka Topic if missing
kafka_service.kafka_create_topic(topic)
# Create Kafka Producer
kafka_service.kafka_create_producer()
# Create Kafka Consumer (Supposed to be in another machine)
kafka_service.kafka_create_consumer(topic)
# Launch Kafka Consumer Thread
thread_kafka_consumer = threading.Thread(target=kafka_service.kafka_run_consumer, name="kafka consumer", args=())
thread_kafka_consumer.start()
# Launch Monitoring Tool (Kafka Producer)
monitoringTool = MonitoringTool(args.url, args.regex, args.delay, kafka_service, topic)
thread_kafka_monitoringTool = threading.Thread(target=monitoringTool.execute, name="monitoring tool", args=())
thread_kafka_monitoringTool.start()
# ensure that the main thread doesn't exit
thread_kafka_consumer.join()
thread_kafka_monitoringTool.join()
if __name__ == "__main__":
main()
| {"/KafkaService.py": ["/AivenService.py"], "/AivenCloudTest.py": ["/AivenService.py", "/ServiceConfigPostGres.py", "/KafkaService.py", "/MonitoringTool.py", "/PostgresService.py"]} |
45,561 | DiMaksimov/DB2_Test | refs/heads/master | /users/migrations/0002_auto_20190403_1827.py | # Generated by Django 2.2 on 2019-04-03 15:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='db2user',
name='birthday',
field=models.DateField(null=True),
),
]
| {"/users/forms.py": ["/users/models.py"], "/users/admin.py": ["/users/forms.py", "/users/models.py"]} |
45,562 | DiMaksimov/DB2_Test | refs/heads/master | /users/models.py | from django.contrib.auth.models import AbstractUser
from django.db import models
class DB2User(AbstractUser):
birthday = models.DateField(null=True)
country = models.CharField(max_length=50, default='Unknown')
city = models.CharField(max_length=50, default='Unknown')
def __str__(self):
return self.email
| {"/users/forms.py": ["/users/models.py"], "/users/admin.py": ["/users/forms.py", "/users/models.py"]} |
45,563 | DiMaksimov/DB2_Test | refs/heads/master | /users/forms.py | from django import forms
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import DB2User
class DB2UserCreationForm(UserCreationForm):
class Meta(UserCreationForm):
model = DB2User
fields = ('username', 'email', 'birthday', 'country', 'city', )
birthday = forms.DateField(widget=forms.SelectDateWidget)
class DB2UserChangeForm(UserChangeForm):
class Meta:
model = DB2User
fields = ('username', 'email', 'birthday', 'country', 'city', )
birthday = forms.DateField(widget=forms.SelectDateWidget)
| {"/users/forms.py": ["/users/models.py"], "/users/admin.py": ["/users/forms.py", "/users/models.py"]} |
45,564 | DiMaksimov/DB2_Test | refs/heads/master | /users/admin.py | from django.contrib import admin
from django.contrib.auth import get_user_model
from django.contrib.auth.admin import UserAdmin
from .forms import DB2UserCreationForm, DB2UserChangeForm
from .models import DB2User
class CustomUserAdmin(UserAdmin):
add_form = DB2UserCreationForm
form = DB2UserChangeForm
model = DB2User
list_display = ['email', 'username', ]
admin.site.register(DB2User, CustomUserAdmin)
| {"/users/forms.py": ["/users/models.py"], "/users/admin.py": ["/users/forms.py", "/users/models.py"]} |
45,566 | schallerdavid/moldbprep | refs/heads/master | /moldbprep/io.py | import multiprocessing
import os
import pandas as pd
from rdkit import Chem
from rdkit.Chem import AllChem
from rdkit import RDLogger
from rdkit.Chem.Descriptors import ExactMolWt
import re
import sys
import time
def count_sdf_mols(file_path):
"""
This function returns the number of molecules in an sdf-file.
Parameters
----------
file_path : str
Full path to sdf file.
Returns
-------
counter : int
Number of molecules.
"""
print('Counting molecules in {}...'.format(file_path))
counter = 0
with open(file_path, 'r', errors='backslashreplace') as sdf_file:
for line in sdf_file:
if '$$$$' in line:
counter += 1
return counter
def sdf_properties(file_path):
"""
This function returns a list of properties stored in an sdf-file.
Parameters
----------
file_path : str
Full path to sdf file.
Returns
-------
properties : list
Properties stored in sdf-file.
"""
properties = []
with open(file_path, 'r') as sdf_file:
for line in sdf_file:
if '> <' in line or '> <' in line:
properties.append(re.search('<.*>|$', line).group()[1:-1])
elif '$$$$' in line:
break
return properties
def database_prompt(file_path):
"""
This function prompts the user to enter the vendor name and to identify the sdf field storing the molecule
identifier in an sdf file.
Parameters
----------
file_path : str
Full path to sdf file.
Returns
-------
vendor : str
Name of vendor.
identifier_field : str
Name of sdf field storing the molecule identifier.
"""
vendor = ''
id_column = 0
properties = ['None'] + sdf_properties(file_path)
while len(vendor) < 1:
vendor = input('Provide a vendor name for sdf file located at {}.\n>>> '.format(file_path))
while id_column not in range(1, len(properties) + 1):
id_column = int(input('Enter the number for the sdf field storing the molecule identifier.\n' + '\n'.join(
'{} - '.format(counter + 1) + property for counter, property in enumerate(properties)) + '\n>>> '))
identifier_field = properties[int(id_column) - 1]
return vendor, identifier_field
def time_to_text(seconds):
"""
This function converts a time in seconds into a reasonable format.
Parameters
----------
seconds : float
Time in seconds.
Returns
-------
time_as_text: str
Time in s, min, h, d, weeks or years depending on input.
"""
if seconds > 60:
if seconds > 3600:
if seconds > 86400:
if seconds > 1209600:
if seconds > 62899252:
time_as_text = 'years'
else:
time_as_text = '{} weeks'.format(round(seconds / 1209600, 1))
else:
time_as_text = '{} d'.format(round(seconds / 86400, 1))
else:
time_as_text = '{} h'.format(round(seconds / 3600, 1))
else:
time_as_text = '{} min'.format(round(seconds / 60, 1))
else:
time_as_text = '{} s'.format(int(seconds))
return time_as_text
def update_progress(progress, progress_info, eta):
"""
This function writes a progress bar to the terminal.
Parameters
----------
progress: float
Progress of process described by number between 0 and 1.
progress_info: str
Info text that should be placed before the progress bar.
eta: float
Estimated time needed for finishing the process.
"""
bar_length = 10
block = int(bar_length * progress)
if progress == 1.0:
status = ' Done\n'
else:
status = ' ETA {:8}'.format(time_to_text(eta))
text = '\r{}: [{}] {:>5.1f}%{}'.format(progress_info, '=' * block + ' ' * (bar_length - block), progress * 100,
status)
sys.stdout.write(text)
sys.stdout.flush()
return
def sdf_text(mol, properties):
"""
This function converts an RDKit molecule into an sdf representation as text.
Parameters
----------
mol : rdkit.Chem.rdchem.Mol
An RDKit molecule.
mol_name: str
Name of the molecule.
properties: dict
Dictionary of sdf properties with property name as key and property value as value.
Returns
-------
sdf_text: str
Molecule as text in sdf format.
"""
sdf_text = Chem.MolToMolBlock(mol)
sdf_text += '\n'.join(['> <{}>\n{}\n'.format(key, value) for key, value in properties.items()])
sdf_text += '\n$$$$\n'
return sdf_text
def sdf_text_worker(merged_results, vendors, num_mols, start_time, mol_counter, fragment_counter, drug_like_counter,
big_counter, parent_fragment_collector, parent_drug_like_collector, parent_big_collector,
failures, addhs, embed, verbose):
if not verbose:
RDLogger.DisableLog('rdApp.*')
fragment_collector, drug_like_collector, big_collector = [], [], []
for index, row in merged_results.iterrows():
try:
mol = Chem.MolFromSmiles(row['smiles'])
if addhs:
mol = Chem.AddHs(mol)
if embed:
AllChem.EmbedMolecule(mol)
properties = {vendor: row[vendor] for vendor in vendors}
mol_name = ','.join([identifier for identifier in properties.values() if len(identifier)> 0])
if len(mol_name) > 20:
mol_name = mol_name[:17] + '...'
mol.SetProp('_Name', mol_name)
properties['smiles'] = row['smiles']
molecular_weight = ExactMolWt(mol)
except:
failures.append(' '.join(['write_error', row['smiles']]))
molecular_weight = 10000
if molecular_weight < 1200:
if molecular_weight < 300:
with fragment_counter.get_lock():
fragment_counter.value += 1
fragment_collector.append(sdf_text(mol, properties))
elif 300 <= molecular_weight < 700:
with drug_like_counter.get_lock():
drug_like_counter.value += 1
drug_like_collector.append(sdf_text(mol, properties))
else:
with big_counter.get_lock():
big_counter.value += 1
big_collector.append(sdf_text(mol, properties))
with mol_counter.get_lock():
mol_counter.value += 1
update_progress(mol_counter.value / num_mols, 'Progress of writing',
((time.time() - start_time) / mol_counter.value) * (num_mols - mol_counter.value))
parent_fragment_collector.extend(fragment_collector)
parent_drug_like_collector.extend(drug_like_collector)
parent_big_collector.extend(big_collector)
return
def write_sdf(merged_results, mols_per_file, output_path, vendors, failures, num_processes, addhs, embed, verbose):
"""
This function writes molecules to sd-files with vendor IDs as properties.
Parameters
----------
merged_results: pandas.DataFrame
Processed molecules with smiles and vendor IDs.
mols_per_file: int
Number of molecules writter per file.
output_path: str
Directory for writing sd-files.
"""
if not os.path.isdir(output_path):
os.mkdir(output_path)
manager = multiprocessing.Manager()
mol_counter = multiprocessing.Value('i', 0)
num_mols = merged_results.shape[0]
fragment = open(os.path.join(output_path, 'fragment.sdf'), 'w')
fragment_file_counter = 0
fragment_counter = multiprocessing.Value('i', 0)
fragment_collector = manager.list()
drug_like = open(os.path.join(output_path, 'drug-like.sdf'), 'w')
drug_like_file_counter = 0
drug_like_counter = multiprocessing.Value('i', 0)
drug_like_collector = manager.list()
big = open(os.path.join(output_path, 'big.sdf'), 'w')
big_file_counter = 0
big_counter = multiprocessing.Value('i', 0)
big_collector = manager.list()
mols_per_job = 1000
mols_in_memory = 10000
jobs = []
for mol_start in range(0, num_mols, mols_per_job):
if mol_start + mols_per_job <= num_mols:
jobs.append((mol_start, mol_start + mols_per_job))
else:
jobs.append((mol_start, num_mols))
job_chunks = []
for job_start in range(0, len(jobs), num_processes):
if job_start + num_processes <= len(jobs):
job_chunks.append((job_start, job_start + num_processes))
else:
job_chunks.append((job_start, len(jobs)))
start_time = time.time()
for job_start, job_end in job_chunks:
processes = [multiprocessing.Process(target=sdf_text_worker,
args=(merged_results[jobs[job_id][0]: jobs[job_id][1]], vendors, num_mols,
start_time, mol_counter, fragment_counter, drug_like_counter,
big_counter, fragment_collector, drug_like_collector, big_collector,
failures, addhs, embed, verbose))
for job_id in range(job_start, job_end)]
for process in processes:
process.start()
for process in processes:
process.join()
if fragment_counter.value > mols_per_file:
fragment.write(''.join(fragment_collector[0:mols_per_file - fragment_counter.value]))
fragment_collector = manager.list(fragment_collector[mols_per_file - fragment_counter.value:])
fragment_counter.value = len(fragment_collector)
fragment.close()
fragment_file_counter += 1
fragment = open(os.path.join(output_path, 'fragment_{}.sdf'.format(fragment_file_counter)), 'w')
if len(fragment_collector) >= mols_in_memory:
fragment.write(''.join(fragment_collector))
fragment_collector = manager.list()
if drug_like_counter.value > mols_per_file:
drug_like.write(''.join(drug_like_collector[0:mols_per_file - drug_like_counter.value]))
drug_like_collector = manager.list(drug_like_collector[mols_per_file - drug_like_counter.value:])
drug_like_counter.value = len(drug_like_collector)
drug_like.close()
drug_like_file_counter += 1
drug_like = open(os.path.join(output_path, 'drug-like_{}.sdf'.format(drug_like_file_counter)), 'w')
if len(drug_like_collector) >= mols_in_memory:
drug_like.write(''.join(drug_like_collector))
drug_like_collector = manager.list()
if big_counter.value > mols_per_file:
big.write(''.join(big_collector[0:mols_per_file - big_counter.value]))
big_collector = manager.list(big_collector[mols_per_file - big_counter.value:])
big_counter.value = len(big_collector)
big.close()
big_file_counter += 1
big = open(os.path.join(output_path, 'big_{}.sdf'.format(big_file_counter)), 'w')
if len(big_collector) >= mols_in_memory:
big.write(''.join(big_collector))
big_collector = manager.list()
if len(fragment_collector) > 0:
fragment.write(''.join(fragment_collector))
if len(drug_like_collector) > 0:
drug_like.write(''.join(drug_like_collector))
if len(big_collector) > 0:
big.write(''.join(big_collector))
fragment.close()
drug_like.close()
big.close()
return
def write_statistics(num_mols, merged_results, vendors, output_path, failure_count):
"""
Write statistics about merged databases.
Parameters
----------
num_mols : int
Number of input molecules.
merged_results : pandas.DataFrame
Dataframe containing the merged results.
vendors : list
List of vendors.
output_path : str
Path to output directory.
failure_count : int
Number of failures during standardization and writing.
"""
vendor_matches = {vendor: merged_results[vendor] != '' for vendor in vendors}
with open(os.path.join(output_path, 'database.statistics'), 'w') as file:
file.write('Input: {} molecules\n\n'.format(num_mols))
file.write('Vendor\tTotal\tUnique\n')
for vendor in vendors:
total = vendor_matches[vendor].sum()
if len(vendors) > 1:
unique = (vendor_matches[vendor] > pd.concat([vendor_matches[x] for x in vendors if x != vendor], axis=1
).max(axis=1)).sum()
else:
unique = total
file.write('\t'.join([vendor, str(total), str(unique)]) + '\n')
file.write('\nCategory\tTotal\n')
directory_contents = os.listdir(output_path)
for file_name in ['fragment', 'drug-like', 'big']:
mol_count = 0
for directory_content in directory_contents:
if file_name in directory_content:
mol_count_file = count_sdf_mols(os.path.join(output_path, directory_content))
mol_count += mol_count_file
if mol_count_file == 0:
os.remove(os.path.join(output_path, file_name + '.sdf'))
file.write('{}\t{}\n'.format(file_name, mol_count))
file.write('\nfailures\t{}'.format(failure_count))
return
| {"/moldbprep/tests/test_standardize.py": ["/moldbprep/standardize.py"], "/moldbprep.py": ["/moldbprep/io.py", "/moldbprep/standardize.py"], "/moldbprep/standardize.py": ["/moldbprep/io.py"], "/moldbprep/tests/test_io.py": ["/moldbprep/io.py"]} |
45,567 | schallerdavid/moldbprep | refs/heads/master | /moldbprep/tests/test_standardize.py | from moldbprep.standardize import protonate_mol, enumerate_stereo_isomers, merge_ids
import pandas as pd
import pytest
from rdkit import Chem
@pytest.mark.parametrize("input_smiles, output_smiles", [
("CNC1CCN(CCCN)CC1", "C[NH2+]C1CC[NH+](CCC[NH3+])CC1"),
("CC[O-]", "CCO"),
("CNC(N)=N", "CNC(N)=[NH2+]"),
("CCC(N)=N", "CCC(N)=[NH2+]"),
("CCC(O)=O", "CCC([O-])=O"),
("CCP(O)(O)=O", "CCP([O-])([O-])=O"),
("CS(=O)(=O)NC=O", "CS(=O)(=O)[N-]C=O"),
("CC1=NNN=N1", "CC1=N[N-]N=N1"),
("CC(=O)C=CO", "CC(=O)C=C[O-]"),
("c1(C2CCCCC2)[nH]nnn1", "c1(C2CCCCC2)nnn[n-]1"),
("c1(C2CCCCC2)n[nH]nn1", "c1(C2CCCCC2)nn[n-]n1"),
("c1(C2CCCCC2)nnn[n-]1", "c1(C2CCCCC2)nnn[n-]1"),
("c1(C2CCCCC2)nn[n-]n1", "c1(C2CCCCC2)nn[n-]n1"),
("O=C1C(O)=C(O)C([C@H](O)CO)O1", "O=C1C(O)=C([O-])C([C@H](O)CO)O1"),
("O=C1C(O)=C([O-])C([C@H](O)CO)O1", "O=C1C(O)=C([O-])C([C@H](O)CO)O1"),
("O=C(O)C1CCCCC1", "O=C([O-])C1CCCCC1"),
("S(=O)(=O)(NC(=O)NCCCC)c1ccc(C)cc1", "S(=O)(=O)([N-]C(=O)NCCCC)c1ccc(C)cc1"),
("S(=O)(=O)(Nc1noc(C)c1)c1ccc(N)cc1", "S(=O)(=O)(Nc1noc(C)c1)c1ccc(N)cc1"),
("P(=O)(O)(O)[C@@H]1[C@H](C)O1", "P(=O)([O-])([O-])[C@@H]1[C@H](C)O1"),
("S(=O)(=O)(O)c1ccc(C)cc1", "S(=O)(=O)([O-])c1ccc(C)cc1"),
("CNC(=O)Oc1ccc2N(C)[C@H]3N(C)CC[C@@]3(C)c2c1", "CNC(=O)Oc1ccc2N(C)[C@H]3[NH+](C)CC[C@@]3(C)c2c1"),
("S(=O)(=O)(NC(=N)N)c1ccc(N)cc1", "S(=O)(=O)(NC(=N)N)c1ccc(N)cc1"),
("S(CCN/C(=N/C#N)/NC)Cc1c(C)nc[nH]1", "S(CCN/C(=N/C#N)/NC)Cc1c(C)nc[nH]1"),
("Clc1cc2nccc(N[C@H](CCCN(CC)CC)C)c2cc1", "Clc1cc2nccc(N[C@H](CCC[NH+](CC)CC)C)c2cc1"),
("O=C(O)[C@@H](N)Cc1nc[nH]c1", "O=C([O-])[C@@H]([NH3+])Cc1nc[nH]c1"),
("FC1(F)C(N)CCCC1", "FC1(F)C(N)CCCC1"),
("[C@H](CN1C[C@@H](C)O[C@@H](C)C1)(Cc1ccc(C(CC)(C)C)cc1)C",
"[C@H](CN1C[C@@H](C)O[C@@H](C)C1)(Cc1ccc(C(CC)(C)C)cc1)C"),
("O[C@@H](CNC)c1cc(O)c(O)cc1", "O[C@@H](C[NH2+]C)c1cc(O)c(O)cc1"),
("O(C)c1c(OC)cc(Cc2c(N)nc(N)nc2)cc1OC", "O(C)c1c(OC)cc(Cc2c(N)nc(N)nc2)cc1OC"),
("O=C(NN)c1ccncc1", "O=C(NN)c1ccncc1")
])
def test_protonate_mol(input_smiles, output_smiles):
assert Chem.MolToSmiles(protonate_mol(Chem.MolFromSmiles(input_smiles))) == \
Chem.MolToSmiles(Chem.MolFromSmiles(output_smiles))
def test_enumerate_stereo_isomers():
assert sorted([Chem.MolToSmiles(mol) for mol in enumerate_stereo_isomers(Chem.MolFromSmiles('FC(Cl)Br'), 8)]) == \
sorted(['F[C@H](Cl)Br', 'F[C@@H](Cl)Br'])
assert len(enumerate_stereo_isomers(Chem.MolFromSmiles('BrC=CC1OC(C2)(F)C2(Cl)C1'), 4)) == 4
def test_merge_ids():
assert merge_ids(pd.DataFrame([['C', '1', ''],
['C', '2', ''],
['A', '3', ''],
['C', '', '1'],
['B', '', '2']], columns=['smiles', 'DB1', 'DB2']), ['DB1', 'DB2']).equals(
pd.DataFrame([['A', '3', ''],
['B', '', '2'],
['C', '1,2', '1']], columns=['smiles', 'DB1', 'DB2']))
| {"/moldbprep/tests/test_standardize.py": ["/moldbprep/standardize.py"], "/moldbprep.py": ["/moldbprep/io.py", "/moldbprep/standardize.py"], "/moldbprep/standardize.py": ["/moldbprep/io.py"], "/moldbprep/tests/test_io.py": ["/moldbprep/io.py"]} |
45,568 | schallerdavid/moldbprep | refs/heads/master | /moldbprep.py | """
moldbprep.py
Prepare, standardize and merge molecule databases for virtual screening.
Handles the primary functions
"""
import argparse
import math
from moldbprep.io import count_sdf_mols, database_prompt, write_sdf, write_statistics, time_to_text
from moldbprep.standardize import standardize_mols, merge_ids
import multiprocessing
import os
import pandas as pd
import time
logo = '\n'.join([" _ _ _ v. alpha ",
" _ __ ___ ___ | | __| | |__ _ __ _ __ ___ _ __ ",
" | '_ ` _ \ / _ \| |/ _` | '_ \| '_ \| '__/ _ \ '_ \ ",
" | | | | | | (_) | | (_| | |_) | |_) | | | __/ |_) |",
" |_| |_| |_|\___/|_|\__,_|_.__/| .__/|_| \___| .__/ ",
" |_| |_| ",
" Prepare, standardize and merge molecule databases ",
" for virtual screening. "])
def standardize_processes(sdf_file_dict, mols_per_job):
jobs = []
for sdf_path, value in sdf_file_dict.items():
num_mols = value[0]
vendor = value[1]
identifier_field = value[2]
for mol_start in range(0, num_mols, mols_per_job):
if mol_start + mols_per_job > num_mols:
mol_end = num_mols - 1
else:
mol_end = mol_start + mols_per_job - 1
jobs.append({'sdf_path': sdf_path, 'mol_start': mol_start, 'mol_end': mol_end, 'vendor': vendor,
'identifier_field': identifier_field})
return jobs
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='moldbprep', description=logo, formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-i', dest='input_paths',
help='directory containing sdf files, or paths to input sdf files seperated by comma',
required=True)
parser.add_argument('-o', dest='output_path', help='path to output folder', default='.')
parser.add_argument('-p', dest='num_processes', help='number of parallel processes', default=1)
parser.add_argument('-m', dest='mols_per_file', help='number of molecules per file', default=1000000)
parser.add_argument('-s', dest='max_stereo_isomers',
help='maximal number of stereo isomers to generate per molecule, default=0 --> no enumeration',
default=0)
parser.add_argument('-t', dest='tautomer', action='store_true', help='standardize tautomers')
parser.add_argument('-e', dest='embed', action='store_true', help='generate 3D conformation')
parser.add_argument('-a', dest='addhs', action='store_true', help='add hydrogens')
parser.add_argument('-v', dest='verbose', action='store_true', help='show RDKit warnings')
if os.path.isdir(parser.parse_args().input_paths):
input_directory = os.path.abspath(parser.parse_args().input_paths)
input_paths = [os.path.join(input_directory, path) for path in os.listdir(input_directory)
if path[-4:] == '.sdf']
else:
input_paths = [os.path.abspath(path) for path in parser.parse_args().input_paths.split(',')]
output_path = os.path.abspath(parser.parse_args().output_path)
num_processes = int(parser.parse_args().num_processes)
mols_per_file = int(parser.parse_args().mols_per_file)
max_stereo_isomers = int(parser.parse_args().max_stereo_isomers)
tautomer = parser.parse_args().tautomer
embed = parser.parse_args().embed
addhs = parser.parse_args().addhs
verbose = parser.parse_args().verbose
print(logo)
sdf_file_dict = {file_path: [count_sdf_mols(file_path), *database_prompt(file_path)] for file_path in input_paths}
vendors = list(set([value[1] for value in sdf_file_dict.values()]))
num_mols = sum([value[0] for value in sdf_file_dict.values()])
max_mols_per_db = max([value[0] for value in sdf_file_dict.values()])
print('Standardizing {} molecules from {} vendors...'.format(num_mols, len(vendors)))
start_time = time.time()
manager = multiprocessing.Manager()
results = manager.list()
failures = manager.list()
jobs = manager.list()
for job in standardize_processes(sdf_file_dict, math.ceil(max_mols_per_db / num_processes)):
jobs.append(job)
mol_counter = multiprocessing.Value('i', 0)
processes = [multiprocessing.Process(target=standardize_mols, args=(jobs, mol_counter, num_mols, results,
start_time, vendors, max_stereo_isomers,
failures, tautomer, verbose))
for process_id in range(num_processes)]
for process in processes:
process.start()
for process in processes:
process.join()
print('Processing results...')
results = pd.DataFrame(list(results), columns=['smiles'] + vendors)
results = merge_ids(results, vendors)
print('Writing {} molecules...'.format(results.shape[0]))
write_sdf(results, mols_per_file, output_path, vendors, failures, num_processes, addhs, embed, verbose)
failures = list(failures)
if len(failures) > 0:
with open(os.path.join(output_path, 'moldbprep.failures'), 'w') as file:
file.write('\n'.join(failures))
write_statistics(num_mols, results, vendors, output_path, len(failures))
print('Finished after {}.'.format(time_to_text(time.time() - start_time)))
| {"/moldbprep/tests/test_standardize.py": ["/moldbprep/standardize.py"], "/moldbprep.py": ["/moldbprep/io.py", "/moldbprep/standardize.py"], "/moldbprep/standardize.py": ["/moldbprep/io.py"], "/moldbprep/tests/test_io.py": ["/moldbprep/io.py"]} |
45,569 | schallerdavid/moldbprep | refs/heads/master | /moldbprep/standardize.py | from moldbprep.io import update_progress
import pandas as pd
from molvs.standardize import Standardizer, LargestFragmentChooser, Uncharger
from molvs.tautomer import TautomerCanonicalizer
from rdkit import Chem
from rdkit import RDLogger
from rdkit.Chem.AllChem import ReactionFromSmarts
from rdkit.Chem.Descriptors import ExactMolWt
from rdkit.Chem.EnumerateStereoisomers import EnumerateStereoisomers, StereoEnumerationOptions
import time
def protonate_mol(mol):
"""
This function protonates molecules based on substructure patterns.
Parameters
----------
mol : rdkit.Chem.rdchem.Mol
An RDKit molecule.
Returns
-------
mol : rdkit.Chem.rdchem.Mol
A protonated RDKit molecule.
"""
transformations = {
'acids C=C-OH': '[H][O;$(OC=CC(=O))!$([-1])!$(OC[-1]):1][C:2]>>[C:2][O-;$(OC=CC(=O)):1]',
'acids NH': '[H][NH1;$(N(C=O)S(=O)(=O)[#6])!$([-1])!$(N*[-1]):1]([C:3])[S:2]>>[C:3][N-;$(N(C(=O))S(=O)(=O)):1][S:2]',
'acids OH': '[H][O;$(O[C,S,P]=O)!$([-1])!$(O[C,S][-1]):1][A;C,S,P:2]>>[O-;$(O[C,S,P]=O):1][A;C,S,P:2]',
'aliphatic amines': '[#1,#6:4][N;!$([+1])!$(N=*)!$(N[+1])!$(N*([+1,#8,#15,#16,F,Cl]))!$(N*#*)!$(N**[+1])!$(N[#7,#8,#15,#16,c])!$(NC=[#7,#8,#16])!$(N#*)!$(N@*@*@O)!$(NC(F)F)!$(NCC(F)F)!$(NC=C[C,S,N]~[NH0,OH0]):1]([#1,#6:2])[#1,#6:3]>>[H][N+:1]([#1,#6:4])([#1,#6:2])[#1,#6:3]',
'amidines': '[N:3][C:2]=[N;!$([+1])!$(N[#7,#8])!$(N=CN[#7,#8])!$(N=C([N,O])N)!$(NS(=O)(=O))!$(N=CNS(=O)(=O))!$(N(c)=CS)!$(N=C(S)Nc)!$(N=CN=*)!$(NC#N)!$(N=CNC#N)!$(N=CNC=[#8,#16])!$(N(C=*)=CNC=*)!$([N](=[CX3])[CX3]):1][#1,#6:4]>>[H][N+:1]([#1,#6:4])=[C:2][N:3]',
'guanidines': '[N:4][C:2]([N:3])=[N;!$([+1])!$(NC[+1])!$(N=CN=*)!$(N(C(=O))=CNC(=O))!$(N=C(NC=O)NC=O)!$(N=CN*(~O)~O)!$(NC(=O)[CX3])!$(NC#N):1][#1,#6:5]>>[H][N+:1]([#1,#6:5])=[C:2]([N:4])[N:3]',
'tetrazole1': '[c:1]1[n:2][n:3]([H])[n:4][n:5]1>>[c:1]1[n:2][n-:3][n:4][n:5]1',
'tetrazole2': '[c:1]1[n:2][n:3][n:4][n:5]1([H])>>[c:1]1[n:2][n:3][n:4][n-:5]1'
}
mol = Uncharger().uncharge(mol)
mol = Chem.AddHs(mol)
for smarts in transformations.values():
products = [0]
rxn = ReactionFromSmarts(smarts)
while len(products) > 0:
products = rxn.RunReactants((mol,))
if len(products) > 0:
mol = Chem.MolFromSmiles(Chem.MolToSmiles(products[0][0]))
mol = Chem.AddHs(mol)
mol = Chem.RemoveHs(mol)
return mol
def enumerate_stereo_isomers(mol, max_stereo_isomers):
"""
This function emumerates stereo isomers.
Parameters
----------
mol : rdkit.Chem.rdchem.Mol
An RDKit molecule.
max_stereo_isomers: int
Maximal number of stereo isomers to generate.
Returns
-------
isomers : tuple
A tuple of enumerated RDKit molecules.
"""
options = StereoEnumerationOptions(tryEmbedding=True, unique=True, maxIsomers=max_stereo_isomers)
isomers = tuple(EnumerateStereoisomers(mol, options=options))
return isomers
def standardize_mols(jobs, mol_counter, num_mols, results, start_time, vendors, max_stereo_isomers, failures,
tautomer, verbose):
"""
This function passes molecules to the standardization functions.
Parameters
----------
jobs: multiprocessing.manager.list
A list containing job information as dictionaries.
mol_counter: multiprocessing.manager.value
A counter keeping track of processed molecules.
num_mols: int
Total number of molecules to be processed.
results: multiprocessing.manager.list
A list containing lists describing the processed molecules.
start_time: float
Starting time of molecule processing.
vendors: list
List of vendors.
max_stereo_isomers: int
Maximal number of stereo isomers to generater per molecule.
verbose : bool
If RDKit warning should be displayed.
"""
if not verbose:
RDLogger.DisableLog('rdApp.*')
job = 'initiate'
processed_mols = []
while job is not None:
try:
job = jobs.pop(0)
vendor_position = vendors.index(job['vendor'])
supplier = Chem.SDMolSupplier(job['sdf_path'])
for mol_id in range(job['mol_start'], job['mol_end'] + 1):
mol = supplier[mol_id]
if job['identifier_field'] == 'None':
identifier = 'unknown'
else:
try:
identifier = mol.GetProp(job['identifier_field'])
except AttributeError:
identifier = 'unknown'
try:
# generate smiles for error catching
smiles = 'unknown'
smiles = Chem.MolToSmiles(mol)
# default standardization from molvs
mol = Standardizer().standardize(mol)
# choose largest fragment
mol = LargestFragmentChooser().choose(mol)
# canonicalize tautomer
if tautomer:
mol = TautomerCanonicalizer().canonicalize(mol)
# protonate mol
mol = protonate_mol(mol)
# molecular weight will not change anymore
if ExactMolWt(mol) < 1200:
# enumerate stereo isomers and append mols
if max_stereo_isomers > 0:
for mol in enumerate_stereo_isomers(mol, max_stereo_isomers):
mol_as_list = [Chem.MolToSmiles(mol)] + [''] * len(vendors)
mol_as_list[1 + vendor_position] = identifier
processed_mols.append(mol_as_list)
else:
mol_as_list = [Chem.MolToSmiles(mol)] + [''] * len(vendors)
mol_as_list[1 + vendor_position] = identifier
processed_mols.append(mol_as_list)
except:
failures.append(' '.join(['standardize_error', smiles, job['vendor'], identifier]))
with mol_counter.get_lock():
mol_counter.value += 1
update_progress(mol_counter.value / num_mols, 'Progress of standardization',
((time.time() - start_time) / mol_counter.value) * (num_mols - mol_counter.value))
except IndexError:
job = None
results += processed_mols
return
def merge_ids(results, vendors):
"""
This function merges identifiers from vendors for the same molecule.
Parameters
----------
results: pandas.DataFrame
A dataframe with columns smiles, vendor1, ..., vendorx.
vendors: list
A list containing vendor names matching the ones in results.
Returns
-------
merged_results: pandas.DataFrame
A dataframe with columns smiles, vendor1, ..., vendorx. Duplicates of smiles are removed by merging vendor
identifiers with a comma.
"""
grouped_per_vendor = []
print('Merging molecules and identifiers for each vendor...')
for vendor in vendors:
print('Merging {} database...'.format(vendor))
vendor_results = results[results[vendor] != '']
joined_ids = vendor_results.groupby(['smiles'])[vendor].apply(','.join).to_frame().reset_index()
other_vendors = pd.DataFrame([[''] * (len(vendors) - 1)] * joined_ids.shape[0], columns=[x for x in vendors
if x != vendor])
grouped_per_vendor.append(pd.concat([joined_ids, other_vendors], axis=1))
grouped_per_vendor = pd.concat(grouped_per_vendor).reset_index(drop=True)
grouped = []
print('Merging molecules and identifiers into main database...')
for vendor in vendors:
print('Merging {} database...'.format(vendor))
if len(grouped) == 0:
grouped.append(grouped_per_vendor.groupby(['smiles'])[vendor].apply(','.join).str.strip(',').reset_index())
else:
grouped.append(grouped_per_vendor.groupby(['smiles'])[vendor].apply(','.join
).str.strip(',').reset_index(drop=True))
merged_results = pd.concat(grouped, axis=1)
return merged_results
| {"/moldbprep/tests/test_standardize.py": ["/moldbprep/standardize.py"], "/moldbprep.py": ["/moldbprep/io.py", "/moldbprep/standardize.py"], "/moldbprep/standardize.py": ["/moldbprep/io.py"], "/moldbprep/tests/test_io.py": ["/moldbprep/io.py"]} |
45,570 | schallerdavid/moldbprep | refs/heads/master | /moldbprep/tests/test_io.py | from moldbprep.io import count_sdf_mols, sdf_properties, time_to_text, sdf_text
import pytest
import os
from rdkit import Chem
def test_count_sdf_mols():
assert count_sdf_mols(os.path.join(os.getcwd(), "moldbprep", "data", "db1.sdf")) == 3
def test_sdf_properties():
assert sdf_properties(os.path.join(os.getcwd(), "moldbprep", "data", "db1.sdf")) == ['ID', 'vendor']
@pytest.mark.parametrize("time, text", [
(5, '5 s'),
(61, '1.0 min'),
(3601, '1.0 h'),
(86401, '1.0 d'),
(1209601, '1.0 weeks'),
(62899253, 'years'),
])
def test_time_to_text(time, text):
assert time_to_text(time) == text
def test_sdf_text():
assert sdf_text(Chem.MolFromSmiles('CCC'), {'db1': '1', 'db2': ''}) == \
'\n' \
' RDKit 2D\n' \
'\n' \
' 3 2 0 0 0 0 0 0 0 0999 V2000\n' \
' 0.0000 0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0\n' \
' 1.2990 0.7500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0\n' \
' 2.5981 -0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0\n' \
' 1 2 1 0\n' \
' 2 3 1 0\n' \
'M END\n' \
'> <db1>\n' \
'1\n' \
'\n' \
'> <db2>\n' \
'\n' \
'\n' \
'$$$$'
| {"/moldbprep/tests/test_standardize.py": ["/moldbprep/standardize.py"], "/moldbprep.py": ["/moldbprep/io.py", "/moldbprep/standardize.py"], "/moldbprep/standardize.py": ["/moldbprep/io.py"], "/moldbprep/tests/test_io.py": ["/moldbprep/io.py"]} |
45,571 | torresdaniel11/innova6_backend | refs/heads/master | /gti/migrations/0006_auto_20180330_0409.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-03-30 04:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('gti', '0005_auto_20180330_0406'),
]
operations = [
migrations.AddField(
model_name='questionarticles',
name='question_article_category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='gti.Category'),
),
migrations.AddField(
model_name='questions',
name='question_category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='gti.Category'),
),
]
| {"/gti/views.py": ["/gti/models.py"], "/gti/admin.py": ["/gti/models.py"], "/gti/serializers.py": ["/gti/models.py"]} |
45,572 | torresdaniel11/innova6_backend | refs/heads/master | /gti/models.py | from __future__ import unicode_literals
import binascii
import os
from django.db import models
from django.template.defaultfilters import slugify
class Articles(models.Model):
article_tittle = models.CharField(max_length=200)
article_content = models.TextField()
article_slug = models.SlugField(editable=False)
article_create_date = models.DateTimeField(auto_now_add=True, blank=True, null=True)
article_update_date = models.DateTimeField(auto_now=True, blank=True, null=True)
def __unicode__(self):
return self.article_tittle
def save(self, *args, **kwargs):
if not self.id:
self.article_slug = slugify(self.article_tittle)
super(Articles, self).save(*args, **kwargs)
class ConversationLevels(models.Model):
conversation_level_name = models.CharField(max_length=200)
conversation_color = models.CharField(max_length=200)
def __unicode__(self):
return self.conversation_level_name
def save(self, *args, **kwargs):
super(ConversationLevels, self).save(*args, **kwargs)
class Category(models.Model):
category_name = models.CharField(max_length=200)
def __unicode__(self):
return self.category_name
def save(self, *args, **kwargs):
super(Category, self).save(*args, **kwargs)
class Conversations(models.Model):
conversation_token = models.CharField(max_length=200, editable=False)
conversation_name = models.CharField(max_length=200, blank=True)
conversation_email = models.CharField(max_length=200, blank=True)
conversation_platform = models.CharField(max_length=200, blank=True)
conversation_faculty = models.CharField(max_length=200, blank=True)
conversation_create_date = models.DateTimeField(auto_now_add=True, blank=True, null=True)
conversation_update_date = models.DateTimeField(auto_now=True, blank=True, null=True)
conversation_conversation_level = models.ForeignKey(ConversationLevels, editable=False, null=True, blank=True,
on_delete=models.DO_NOTHING)
def __unicode__(self):
return self.conversation_token
def generate_key(self):
return binascii.hexlify(os.urandom(30)).decode()
def save(self, *args, **kwargs):
if not self.id:
self.conversation_token = self.generate_key()
super(Conversations, self).save(*args, **kwargs)
class Questions(models.Model):
question_name = models.CharField(max_length=200)
question_description = models.TextField()
question_keywords = models.TextField()
question_conversation_level = models.ForeignKey(ConversationLevels, null=True, blank=True,
on_delete=models.DO_NOTHING)
question_category = models.ForeignKey(Category, null=True, blank=True, on_delete=models.DO_NOTHING)
question_update = models.BooleanField(default=False)
question_replace = models.BooleanField(default=False)
question_field_update = models.TextField(null=True, blank=True)
def __unicode__(self):
return self.question_name
def save(self, *args, **kwargs):
super(Questions, self).save(*args, **kwargs)
class QuestionArticles(models.Model):
question_article_name = models.CharField(max_length=200)
question_article_description = models.TextField()
question_article_keywords = models.TextField()
question_article_question = models.ForeignKey(Questions, null=True, blank=True,
on_delete=models.DO_NOTHING)
question_article_category = models.ForeignKey(Category, null=True, blank=True, on_delete=models.DO_NOTHING)
def __unicode__(self):
return self.question_article_name
def save(self, *args, **kwargs):
super(QuestionArticles, self).save(*args, **kwargs)
class QuestionRecords(models.Model):
question_record_response = models.TextField()
question_record_conversation = models.ForeignKey(Conversations,
on_delete=models.CASCADE)
question_record_question = models.ForeignKey(Questions,
on_delete=models.DO_NOTHING)
question_record_token = models.CharField(max_length=200, editable=False, blank=True, null=True)
question_record_create_date = models.DateTimeField(auto_now_add=True, blank=True, null=True)
def save(self, *args, **kwargs):
super(QuestionRecords, self).save(*args, **kwargs)
| {"/gti/views.py": ["/gti/models.py"], "/gti/admin.py": ["/gti/models.py"], "/gti/serializers.py": ["/gti/models.py"]} |
45,573 | torresdaniel11/innova6_backend | refs/heads/master | /gti/urls.py | from django.conf.urls import url, include
from rest_framework import routers
from gti import views
suggested_questions = views.ConversationView.as_view({
'get': 'suggested_questions_get',
'post': 'save_response_suggested_questions_post'
})
suggested_questions_records = views.ConversationView.as_view({
'get': 'retrieve_response_suggested_questions_post',
})
router = routers.DefaultRouter()
router.register(r'articles', views.ArticleView)
router.register(r'conversations', views.ConversationView)
router.register(r'questions', views.QuestionView)
router.register(r'categories', views.CategoryView)
router.register(r'question_articles', views.QuestionArticlesView)
router.register(r'conversation_levels', views.ConversationLevelsView)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^conversations/(?P<conversation_token>[^/.]+)/questions/$', suggested_questions, name='suggested-questions'),
url(r'^question_records/(?P<conversation_token>[^/.]+)/questions/$', suggested_questions,
name='save-response-suggested_questions_post'),
url(r'^question_records/(?P<conversation_token>[^/.]+)/$', suggested_questions_records,
name='save-response-suggested_questions_post')
]
| {"/gti/views.py": ["/gti/models.py"], "/gti/admin.py": ["/gti/models.py"], "/gti/serializers.py": ["/gti/models.py"]} |
45,574 | torresdaniel11/innova6_backend | refs/heads/master | /gti/views.py | import random
from rest_framework import status
from rest_framework import viewsets, permissions
from rest_framework.response import Response
from rest_framework.decorators import detail_route
from serializers import ArticlesSerializers
from serializers import ConversationsSerializers
from serializers import QuestionsSerializers
from serializers import QuestionArticlesSerializers
from serializers import CategorySerializers
from serializers import QuestionRecordsSerializers
from serializers import ConversationLevelsSerializer
from .models import QuestionRecords
from rest_framework.parsers import JSONParser
from gti import models
class ArticleView(viewsets.ModelViewSet):
queryset = models.Articles.objects.all()
serializer_class = ArticlesSerializers
class ConversationView(viewsets.ModelViewSet):
queryset = models.Conversations.objects.all()
serializer_class = ConversationsSerializers
lookup_field = 'conversation_token'
@detail_route(methods=['get'])
def suggested_questions_get(self, request, *args, **kwargs):
conversation = self.get_object()
questions = models.Questions.objects.filter(
question_conversation_level=conversation.conversation_conversation_level)
if questions.count():
serializer = QuestionsSerializers(questions, many=True)
max = questions.count() - 1
i = random.randint(0, max)
return Response(serializer.data[i])
else:
return Response(status=status.HTTP_200_OK)
@detail_route(methods=['post'])
def save_response_suggested_questions_post(self, request, *args, **kwargs):
conversation = self.get_object()
data = JSONParser().parse(request)
serializer = QuestionRecordsSerializers(data=data)
conversation_conversation_level = list(models.ConversationLevels.objects.filter(
id=conversation.conversation_conversation_level.id + 1))
if serializer.is_valid():
question = models.Questions.objects.get(
id=serializer.initial_data.get('question_record_question', None).get('id'))
conversation_conversation_level = list(models.ConversationLevels.objects.filter(
id=conversation.conversation_conversation_level.id + 1))
if conversation_conversation_level:
conversation.conversation_conversation_level = conversation_conversation_level[0]
if question.question_update:
if question.question_field_update == 'conversation_name':
conversation.conversation_name = serializer.data['question_record_response']
elif question.question_field_update == 'conversation_email':
conversation.conversation_email = serializer.data['question_record_response']
elif question.question_field_update == 'conversation_platform':
conversation.conversation_platform = serializer.data['question_record_response']
elif question.conversation_platform == 'conversation_faculty':
conversation.conversation_faculty = serializer.data['question_record_response']
conversation.save()
qr = QuestionRecords(question_record_response=serializer.data['question_record_response'],
question_record_conversation=conversation,
question_record_question=question,
question_record_token=conversation.conversation_token)
qr.save()
conversationResponse = models.Conversations.objects.get(
conversation_token=conversation.conversation_token)
return Response(ConversationsSerializers(conversationResponse).data)
else:
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
@detail_route(methods=['get'])
def retrieve_response_suggested_questions_post(self, request, *args, **kwargs):
conversation = self.get_object()
questions = models.QuestionRecords.objects.filter(
question_record_token=conversation.conversation_token)
serializer = QuestionRecordsSerializers(questions, many=True)
return Response(serializer.data)
class QuestionView(viewsets.ModelViewSet):
queryset = models.Questions.objects.all()
serializer_class = QuestionsSerializers
class QuestionArticlesView(viewsets.ModelViewSet):
queryset = models.QuestionArticles.objects.all()
serializer_class = QuestionArticlesSerializers
class ConversationLevelsView(viewsets.ModelViewSet):
queryset = models.ConversationLevels.objects.all()
serializer_class = ConversationLevelsSerializer
class CategoryView(viewsets.ModelViewSet):
queryset = models.Category.objects.all()
serializer_class = CategorySerializers
class QuestionRecordsView(viewsets.ModelViewSet):
queryset = models.QuestionRecords.objects.all()
serializer_class = QuestionRecordsSerializers
| {"/gti/views.py": ["/gti/models.py"], "/gti/admin.py": ["/gti/models.py"], "/gti/serializers.py": ["/gti/models.py"]} |
45,575 | torresdaniel11/innova6_backend | refs/heads/master | /gti/admin.py | from django.contrib import admin
from .models import Articles
from .models import ConversationLevels
from .models import Conversations
from .models import Questions
from .models import QuestionArticles
from .models import QuestionRecords
from .models import Category
# Register your models here.
class ArticlesAdmin(admin.ModelAdmin):
list_display = ('article_tittle', 'article_create_date', 'article_update_date')
admin.site.register(Articles, ArticlesAdmin)
class ConversationLevelsAdmin(admin.ModelAdmin):
list_display = ('conversation_level_name', 'conversation_color')
admin.site.register(ConversationLevels, ConversationLevelsAdmin)
class ConversationsAdmin(admin.ModelAdmin):
list_display = (
'conversation_token', 'conversation_name', 'conversation_email', 'conversation_platform',
'conversation_faculty',
'conversation_create_date', 'conversation_update_date', 'conversation_conversation_level')
admin.site.register(Conversations, ConversationsAdmin)
class QuestionsAdmin(admin.ModelAdmin):
list_display = (
'question_name', 'question_description', 'question_keywords', 'question_conversation_level', 'question_update',
'question_field_update')
admin.site.register(Questions, QuestionsAdmin)
class QuestionArticlesAdmin(admin.ModelAdmin):
list_display = (
'question_article_name', 'question_article_description', 'question_article_keywords',
'question_article_question')
admin.site.register(QuestionArticles, QuestionArticlesAdmin)
class QuestionRecordsAdmin(admin.ModelAdmin):
list_display = (
'question_record_response', 'question_record_conversation', 'question_record_question', 'question_record_token',
'question_record_create_date')
admin.site.register(QuestionRecords, QuestionRecordsAdmin)
class CategoryAdmin(admin.ModelAdmin):
list_display = (
'id', 'category_name')
admin.site.register(Category, CategoryAdmin)
| {"/gti/views.py": ["/gti/models.py"], "/gti/admin.py": ["/gti/models.py"], "/gti/serializers.py": ["/gti/models.py"]} |
45,576 | torresdaniel11/innova6_backend | refs/heads/master | /gti/migrations/0007_auto_20180330_0622.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-03-30 06:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('gti', '0006_auto_20180330_0409'),
]
operations = [
migrations.AddField(
model_name='questions',
name='question_field_update',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='questions',
name='question_update',
field=models.BooleanField(default=False),
),
]
| {"/gti/views.py": ["/gti/models.py"], "/gti/admin.py": ["/gti/models.py"], "/gti/serializers.py": ["/gti/models.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.