index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
4,972
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/service/test_tag.py
|
import json
import requests
import pytest
#企业标签库接口测试
from service.tag import Tag
class TestTag():
def setup_class(self):
# 初始化Tag
self.tag = Tag()
# 拿到token
self.tag.get_token()
def test_tag_list(self):
# 获取新列表 进行校验
r = self.tag.list()
assert r.status_code == 200
assert r.json()['errcode'] == 0
#参数化
@pytest.mark.parametrize("group_name,tag_names",[
["group_demo_leemandy2",[{'name': 'tag_demo_leemandy2'}]],
["group_demo_leemandy2",[{'name': 'tag_demo_leemandy2'}]],
["group_demo_leemandy2",[{'name': 'tag_demo_leemandy2'},{'name': 'tag_demo_leemandy3'}]],
])
def test_tag_add(self,group_name,tag_names):
#增加标签组
r= self.tag.add(group_name, tag_names)
assert r.status_code == 200
#python列表表达式
#校验 找taggroup下面有没有新建的groupname
group=[group for group in r.json()['tag_group'] if group['group_name'] == group_name][0]
#校验 找taggroup下tag下的name是不是我刚刚新建的
tags=[{'name':tag['name']} for tag in group['tag'] if tag['name']]
print(group)
print(tags)
assert group['group_name'] == group_name
assert tags == tag_names
#tagname超过31个字符回会报错
def test_tag_fail(self):
pass
@pytest.mark.parametrize("",[
#删除单个标签
#删除多个标签
#删除不存在的标签
#删除标签组
]
)
def test_tag_delete(self,group_id,tag_id):
self.tag.delete()
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,973
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/shujuqudong/test_main.py
|
import pytest
import yaml
class TestMain:
@pytest.mark.parametrize("value1,value2", yaml.safe_load(open("./test_main.yaml")))
def test_main(self, value1, value2):
print(value1)
print(value2)
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,974
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/python_practice/python_class/bicycle.py
|
class Bicycle:
def run(self, km):
print(f"一共骑行{km}公里")
#子类继承父类
class EBicycle(Bicycle):
#属性需要传参定义,可以直接放到构造函数中
def __init__(self,valume):
self.valume = valume
#充电 方法
def fill_charge(self,vol):
#充电后的电量=本身的电量+充电电量
self.valume = self.valume + vol
print(f"充了{vol}度电,现在电量为{self.valume}度")
def run(self,km):
#1、获取目前电量能电动骑行的历程数
power_km = self.valume *10
if power_km >= km:
print(f"使用电量骑了{km}")
else:
#电量不够了 用脚骑
print(f"使用电量骑了{power_km}")
super().run(km - power_km)
ebike = EBicycle(10)
ebike.fill_charge(150)
ebike.run(2)
# bike = Bicycle()
# print(bike.run(10))
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,975
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/qiyeweixin1/test_contact.py
|
import shelve
from time import sleep
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
class TestWX:
def setup(self):
'''复用浏览器,创建option。option制定浏览器启动debug地址。传进option'''
option = Options()
option.debugger_address = "127.0.0.1:9222"
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(5)
self.driver.maximize_window()
def test_add_contact(self):
#cookies = self.driver.get_cookies()
cookies = [{'domain': '.qq.com', 'httpOnly': False, 'name': 'uin', 'path': '/', 'secure': False,
'value': 'o0137787592'},
{'domain': '.work.weixin.qq.com', 'expiry': 1641444557.818233, 'httpOnly': False,
'name': 'wwrtx.c_gdpr', 'path': '/', 'secure': False, 'value': '0'},
{'domain': '.qq.com', 'httpOnly': False, 'name': 'skey', 'path': '/', 'secure': False,
'value': '@2J2LvbQDD'},
{'domain': '.qq.com', 'expiry': 2147483430.511013, 'httpOnly': False, 'name': 'RK', 'path': '/',
'secure': False, 'value': 'JMJcSTgSG7'},
{'domain': '.qq.com', 'expiry': 2147483430.511117, 'httpOnly': False, 'name': 'ptcz', 'path': '/',
'secure': False, 'value': '0c1a882cad52a4cbc5005d9fc4854a9ca4021eb49f19f142d1c2ae1dce46acc0'},
{'domain': '.work.weixin.qq.com', 'expiry': 1641559039, 'httpOnly': False,
'name': 'Hm_lvt_9364e629af24cb52acc78b43e8c9f77d', 'path': '/', 'secure': False,
'value': '1609908568,1610023039'},
{'domain': '.qq.com', 'expiry': 1673097367, 'httpOnly': False, 'name': '_ga', 'path': '/',
'secure': False, 'value': 'GA1.2.1128381225.1609908570'},
{'domain': '.work.weixin.qq.com', 'expiry': 1612617440.930347, 'httpOnly': False,
'name': 'wwrtx.i18n_lan', 'path': '/', 'secure': False, 'value': 'zh'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.ref', 'path': '/',
'secure': False, 'value': 'direct'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.refid', 'path': '/',
'secure': False, 'value': '03184142'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.ltype', 'path': '/',
'secure': False, 'value': '1'},
{'domain': 'work.weixin.qq.com', 'expiry': 1610028127.526147, 'httpOnly': True, 'name': 'ww_rtkey',
'path': '/', 'secure': False, 'value': '3kc9kf'},
{'domain': '.qq.com', 'expiry': 1610111767, 'httpOnly': False, 'name': '_gid', 'path': '/',
'secure': False, 'value': 'GA1.2.188972918.1609996592'},
{'domain': '.work.weixin.qq.com', 'httpOnly': False, 'name': 'wxpay.corpid', 'path': '/',
'secure': False, 'value': '1970324943175019'},
{'domain': '.work.weixin.qq.com', 'httpOnly': False, 'name': 'wxpay.vid', 'path': '/',
'secure': False, 'value': '1688854068709900'},
{'domain': '.work.weixin.qq.com', 'httpOnly': False, 'name': 'wwrtx.vid', 'path': '/',
'secure': False, 'value': '1688854068709900'}, {'domain': '.work.weixin.qq.com', 'httpOnly': False,
'name': 'Hm_lpvt_9364e629af24cb52acc78b43e8c9f77d',
'path': '/', 'secure': False,
'value': '1610023039'},
{'domain': '.work.weixin.qq.com', 'httpOnly': False, 'name': 'wwrtx.d2st', 'path': '/',
'secure': False, 'value': 'a9866635'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.sid', 'path': '/',
'secure': False, 'value': 'HGCZDgTSb3atjZZild4lXkWMDU5axgCRbaNpnyGp0ooQVCaO9vpYSREdAcEFBt4C'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.vst', 'path': '/',
'secure': False,
'value': 'h79gUTyxE73XyGRdmnZlXSZGnpv7sceWYz_7-_proe7OZJZki3yhGvHSscbwzbGBohqp0PDpxcfScFPDYPHj8K9Y7muKY9zi8Xnwo3cBGmsi0pO0gQ0IRCkONVp_nwfkGmdQ9nLqqIkmBr3wCPFg8K9L1R8zJJEMRAE8NJmpqrnJdthDwxAwCh1j5tnFRSJlKc9-579wuzIqe6gFSZCtq1vT9v8wIJD2RlPhtftEzUwDOiuYAjiyhk8G-8OTVlfUZmL4JUiVuwqK3Y4_cDf7zA'}]
#print(cookies)
self.driver.get("https://work.weixin.qq.com/wework_admin/frame")
'''以上cookie列表中有多个字典,for循环遍历列表,让每一个字典都放进'''
for cookie in cookies:
self.driver.add_cookie(cookie)
self.driver.get("https://work.weixin.qq.com/wework_admin/frame")
#点击添加成员
self.driver.find_element(By.CSS_SELECTOR, '.index_service_cnt_item_title').click()
#输入姓名
self.driver.find_element_by_id("username").send_keys("DD")
#输入账号
self.driver.find_element_by_id("memberAdd_acctid").send_keys("dd")
#输入手机号
self.driver.find_element_by_id("memberAdd_phone").send_keys("13044444444")
#点击保存
self.driver.find_element(By.CSS_SELECTOR, ".qui_btn.ww_btn.js_btn_save").click()
assert "保存成功!"
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,976
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/qiyeweixin1/test_ xixi.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import shelve
from time import sleep
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
class TestWX:
def setup(self):
option = Options()
# 注意 9222 端口要与命令行启动的端口保持一致 --remote-debugging-port=9222
option.debugger_address = "127.0.0.1:9222"
self.driver = webdriver.Chrome()
def teardown(self):
self.driver.quit()
def test_case1(self):
self.driver.get("https://work.weixin.qq.com/wework_admin/frame#index")
self.driver.find_element(By.ID, "menu_contacts").click()
def test_cookie(self):
# cookies = self.driver.get_cookies()
cookies = [
{'domain': '.qq.com', 'httpOnly': False, 'name': 'uin', 'path': '/', 'secure': False,
'value': 'o0137787592'},
{'domain': '.work.weixin.qq.com', 'expiry': 1641444557.818233, 'httpOnly': False, 'name': 'wwrtx.c_gdpr',
'path': '/', 'secure': False, 'value': '0'},
{'domain': '.qq.com', 'httpOnly': False, 'name': 'skey', 'path': '/', 'secure': False,
'value': '@2J2LvbQDD'},
{'domain': '.qq.com', 'expiry': 2147483430.511013, 'httpOnly': False, 'name': 'RK', 'path': '/',
'secure': False, 'value': 'JMJcSTgSG7'},
{'domain': '.qq.com', 'expiry': 2147483430.511117, 'httpOnly': False, 'name': 'ptcz', 'path': '/',
'secure': False, 'value': '0c1a882cad52a4cbc5005d9fc4854a9ca4021eb49f19f142d1c2ae1dce46acc0'},
{'domain': '.qq.com', 'expiry': 1673079672, 'httpOnly': False, 'name': '_ga', 'path': '/', 'secure': False,
'value': 'GA1.2.1128381225.1609908570'},
{'domain': '.work.weixin.qq.com', 'expiry': 1641532590, 'httpOnly': False,
'name': 'Hm_lvt_9364e629af24cb52acc78b43e8c9f77d', 'path': '/', 'secure': False, 'value': '1609908568'},
{'domain': '.work.weixin.qq.com', 'expiry': 1612600110.970827, 'httpOnly': False, 'name': 'wwrtx.i18n_lan',
'path': '/', 'secure': False, 'value': 'zh'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.ref', 'path': '/', 'secure': False,
'value': 'direct'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.refid', 'path': '/', 'secure': False,
'value': '03184142'},
{'domain': '.work.weixin.qq.com', 'httpOnly': False, 'name': 'Hm_lpvt_9364e629af24cb52acc78b43e8c9f77d',
'path': '/', 'secure': False, 'value': '1609996590'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.ltype', 'path': '/', 'secure': False,
'value': '1'},
{'domain': 'work.weixin.qq.com', 'expiry': 1610028127.526147, 'httpOnly': True, 'name': 'ww_rtkey',
'path': '/', 'secure': False, 'value': '3kc9kf'},
{'domain': '.qq.com', 'expiry': 1610094072, 'httpOnly': False, 'name': '_gid', 'path': '/', 'secure': False,
'value': 'GA1.2.188972918.1609996592'},
{'domain': '.work.weixin.qq.com', 'httpOnly': False, 'name': 'wxpay.corpid', 'path': '/', 'secure': False,
'value': '1970324943175019'},
{'domain': '.work.weixin.qq.com', 'httpOnly': False, 'name': 'wxpay.vid', 'path': '/', 'secure': False,
'value': '1688854068709900'},
{'domain': '.work.weixin.qq.com', 'httpOnly': False, 'name': 'wwrtx.vid', 'path': '/', 'secure': False,
'value': '1688854068709900'},
{'domain': '.work.weixin.qq.com', 'httpOnly': False, 'name': 'wwrtx.d2st', 'path': '/', 'secure': False,
'value': 'a7660320'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.sid', 'path': '/', 'secure': False,
'value': 'HGCZDgTSb3atjZZild4lXv7CS-1WJd5q6Skr1MC62vfiPHMZf4S1UGLYNAU301mZ'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.vst', 'path': '/', 'secure': False,
'value': 'dQYK81YHVde8EyIoPwvIXSvU4yXdODUSwoohTNR7WAX3xkvlu9E0Jmql5J4B_NA-Vylr4BPeULXITZXXxAdTweWloLFu8ovEE5rXMPcfQHfx_q7yNhAdjqrugW0y36Jf14PQEmCTVWq3NjNoI06ge899qe6yDloCS0fKj0COgZ1EFJm--9uW1F0dQKFpAIKSY9bbE41sQv5Y_jkjkFG0MiSEfrrqH33Drf1faVGArQ-QSYL18ctF3OAcwfyVsOr6qhulnU7Os9jQqjhMwY0gpw'}
]
print(cookies)
self.driver.get("https://work.weixin.qq.com/wework_admin/frame#index")
for cookie in cookies:
if 'expiry' in cookie.keys():
cookie.pop('expiry')
self.driver.add_cookie(cookie)
self.driver.refresh()
# self.driver.get("https://work.weixin.qq.com/wework_admin/frame#index")
def test_import_contacts(self):
# shelve 模块, python 自带的对象持久化存储
db = shelve.open('cookies')
cookies = db['cookie']
db.close()
# 打开无痕新页面
self.driver.get("https://work.weixin.qq.com/wework_admin/frame#index")
# 加入cookie
for cookie in cookies:
if 'expiry' in cookie.keys():
cookie.pop('expiry')
self.driver.add_cookie(cookie)
# 刷新当前页面,获取登录状态
self.driver.refresh()
# 点击【导入联系人】
self.driver.find_element(By.CSS_SELECTOR, ".index_service_cnt_itemWrap:nth-child(2)").click()
sleep(5)
cookies = [
{'domain': '.work.weixin.qq.com', 'expiry': 1612615175.352724, 'httpOnly': False, 'name': 'wwrtx.i18n_lan',
'path': '/', 'secure': False, 'value': 'zh'},
{'domain': '.work.weixin.qq.com', 'expiry': 1641559174.095903, 'httpOnly': False, 'name': 'wwrtx.c_gdpr',
'path': '/', 'secure': False, 'value': '0'},
{'domain': 'work.weixin.qq.com', 'expiry': 1610054710.095798, 'httpOnly': True, 'name': 'ww_rtkey',
'path': '/', 'secure': False, 'value': '2afftht'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.ref', 'path': '/', 'secure': False,
'value': 'direct'},
{'domain': '.work.weixin.qq.com', 'httpOnly': True, 'name': 'wwrtx.refid', 'path': '/', 'secure': False,
'value': '02601473'}]
self.driver.get("https://work.weixin.qq.com/wework_admin/frame#contacts")
'''以上cookie列表中有多个字典,for循环遍历列表,让每一个字典都放进'''
for cookie in cookies:
self.driver.add_cookie(cookie)
self.driver.get("https://work.weixin.qq.com/wework_admin/frame#contacts")
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,977
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/python_practice/game/game_round_fun.py
|
#定义敌人的血量 敌人的攻击力
import random
def fight(enemy_hp, enemy_power):
#定义自己的血量 自己的攻击力
my_hp = 1000
my_power = 200
#打印敌人的血量 敌人的攻击力
print(f"敌人的血量为{enemy_hp}, 敌人的攻击力为{enemy_power}")
#加入循环 进行多轮游戏
while True:
my_hp = my_hp - enemy_power
enemy_hp = enemy_hp - my_power
#判断谁的血量小于等于0
if my_hp <= 0:
#打印我和敌人的剩余血量
print(f"我的剩余血量为{my_hp}")
print(f"敌人的剩余血量为{enemy_hp}")
print("我输了")
#满足条件跳出循环
break
elif enemy_hp <= 0:
print(f"我的剩余血量为{my_hp}")
print(f"敌人的剩余血量为{enemy_hp}")
print("我赢了")
break
if __name__ == "__main__":
#列表推导式生成hp
hp = [x for x in range(990,1010)]
#让敌人的hp从hp列表中随机取一个值
enemy_hp = random.choice(hp)
enemy_power = random.randint(190,210)
#调用函数,传入敌人的hp和power
fight(enemy_hp, enemy_power)
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,978
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/test_selenium/test_TouchAction.py
|
from selenium import webdriver
from selenium.webdriver import TouchActions
from time import sleep
class TestTouchAction:
def setup(self):
'''设置w3c标准'''
option = webdriver.ChromeOptions()
option.add_experimental_option('w3c',False)
self.driver = webdriver.Chrome(options=option)
self.driver.implicitly_wait(5)
self.driver.maximize_window()
def teardown(self):
self.driver.quit()
def test_touchaction_scrollbutton(self):
self.driver.get("https://www.baidu.com/")
#定位到文本框
el = self.driver.find_element_by_id("kw")
#定位到搜索框
el_search = self.driver.find_element_by_id("su")
#对文本框中输入
el.send_keys("selenium测试")
action = TouchActions(self.driver)
#点击搜索
action.tap(el_search)
action.perform()
#鼠标滑动,从el这个元素开始划,x轴偏移量为0,y轴偏移量越大越好,想划到底部
action.scroll_from_element(el,0,10000).perform()
# sleep(3)
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,979
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/qiyeweixin1/test_cookiesdemo.py
|
from time import sleep
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
class TestWX:
def setup(self):
'''复用浏览器,创建option。option制定浏览器启动debug地址。传进option'''
option = Options()
option.debugger_address = "127.0.0.1:9222"
self.driver = webdriver.Chrome(options=option)
def test_get_cookie(self):
self.driver.get("https://work.weixin.qq.com/wework_admin/frame")
cookies = self.driver.get_cookies()
print(cookies)
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,980
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/201024homework/TongLao.py
|
"""
定义一个天山童姥类 ,类名为TongLao,属性有血量,武力值(通过传入的参数得到)。TongLao类里面有2个方法,
see_people方法,需要传入一个name参数,如果传入”WYZ”(无崖子),则打印,“师弟!!!!”,
如果传入“李秋水”,打印“师弟是我的!”,如果传入“丁春秋”,打印“叛徒!我杀了你”
fight_zms方法(天山折梅手),调用天山折梅手方法会将自己的武力值提升10倍,血量缩减2倍。需要传入敌人的hp,power,
进行一回合制对打,打完之后,比较双方血量。血多的一方获胜。
"""
class TongLao:
# 构造函数
# 定义我的血量和武力值
def __init__(self, hp, power):
self.hp = hp
self.power = power
# 定义see _people方法
def see_people(self,name):
self.name = name
if name == 'WYZ':
print("师弟!!!!")
elif name == '李秋水':
print("师弟是我的!")
elif name == '丁春秋':
print("叛徒!我杀了你")
# 定义天山折梅手方法
def fight_zms(self, enemy_hp, enemy_power):
# 自己血量缩减两倍
self.hp= self.hp / 2
# 自己武力值提升10倍
self.power = self.power * 10
# 我的血量和敌人的血量
self.hp = self.hp - enemy_power
enemy_hp = enemy_hp - self.power
print(self.hp)
print(enemy_hp)
# 判断谁的血量小于等于0
if self.hp < enemy_hp:
print("我输了")
else:
print("我赢了")
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,981
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/test_pytest/tests/test_fixture_demo.py
|
import pytest
from test_pytest.core.calc import Calc
@pytest.fixture(scope='module')
def calc_init():
print("calc_init")
return Calc()
def test_calc_demo(calc_init):
assert calc_init.mul(1,2) == 2
def test_calc_demo2(calc_init):
assert calc_init.mul(1,3) == 3
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,982
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/podemo1/page/addmemberpage.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from selenium.webdriver.common.by import By
from podemo1.page.base_page import BasePage
class AddMemberPage(BasePage):
'''添加联系人操作'''
def add_member(self, name, account, phonenum):
self.find(By.ID, "username").send_keys(name)
self.find(By.ID, "memberAdd_acctid").send_keys(account)
self.find(By.ID, "memberAdd_phone").send_keys(phonenum)
self.find(By.CSS_SELECTOR, ".js_btn_save").click()
return True
'''判断联系人是否添加成功'''
def get_member(self, value):
'''调用显示等待方法,查看checkbok是否可被点击,可被点击说明页面加载完成了'''
locator = (By.CSS_SELECTOR, ".ww_checkbox")
self.wait_for_click(locator)
elements = self.finds(By.CSS_SELECTOR, ".member_colRight_memberTable_td:nth-child(2)")
'''列表推导式,在element中获取title属性'''
titles = [element.get_attribute("title") for element in elements]
return titles
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,983
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/1117zhibo1framework/test_demo.py
|
'''
web自动化搜索
'''
import pytest
import yaml
from selenium import webdriver
from selenium.webdriver.common.by import By
def load_data(path):
with open(path, encoding='utf-8') as f:w
return yaml.load(f)
def test_load_data():
pass
class TestDemo:
#参数化
@pytest.mark.parametrize("keyword",load_data("test_data.yaml"))
def test_search(self,keyword):
driver = webdriver.Chrome()
driver.get("https://ceshiren.com")
driver.find_element(By.ID, 'search-button').click()
driver.find_element(By.ID, 'search-term').send_keys(keyword)
if 'get' in step:
url = step.get('get')
driver.get(url)
if 'find_element' in step:
by = step.get(find)
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,984
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/test_selenium/test_ActionChains.py
|
import pytest
from selenium import webdriver
from selenium.webdriver import ActionChains
from time import sleep
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
class TestActionChains():
def setup(self):
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(5)
self.driver.maximize_window()
def teardown(self):
self.driver.quit()
@pytest.mark.skip
def test_click(self):
self.driver.get("http://sahitest.com/demo/clicks.htm")
#分别拿到单击、双击、右键元素
element_click = self.driver.find_element_by_xpath("//input[@value='click me']")
element_doubleclick = self.driver.find_element_by_xpath("//input[@value='dbl click me']")
element_rightclick = self.driver.find_element_by_xpath("//input[@value='right click me']")
#创建action方法
action = ActionChains(self.driver)
#分别创建单击、右键、双击方法
action.click(element_click)
action.context_click(element_rightclick)
action.double_click(element_doubleclick)
sleep(3)
#执行action
action.perform()
sleep(3)
@pytest.mark.skip
def test_movetoelement(self):
self.driver.get("https://www.baidu.com/")
#找到设置
ele = self.driver.find_element_by_link_text("设置")
action = ActionChains(self.driver)
#光标移动到设置上
action.move_to_element(ele)
action.perform()
sleep(3)
@pytest.mark.skip
def test_dragdrop(self):
self.driver.get("http://sahitest.com/demo/dragDropMooTools.htm")
drag_element = self.driver.find_element_by_id("dragger")
drop_element = self.driver.find_element_by_xpath("/html/body/div[2]")
action = ActionChains(self.driver)
#拖拽
# action.drag_and_drop(drag_element,drop_element).perform()
#点击某个元素然后释放某个元素
# action.click_and_hold(drag_element).release(drop_element).perform()
#点击某个元素不放,然后moveto到某个元素上
action.click_and_hold(drag_element).move_to_element(drop_element).release().perform()
sleep(3)
def test_keys(self):
self.driver.get("http://sahitest.com/demo/label.htm")
ele = self.driver.find_element_by_xpath("/html/body/label[1]/input")
ele.click()
action = ActionChains(self.driver)
#输入文字
action.send_keys("username").pause(1)
#输入空格
action.send_keys(Keys.SPACE).pause(1)
#再输入文字
action.send_keys("tom").pause(1)
#操作回删
action.send_keys(Keys.BACK_SPACE).perform()
sleep(3)
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,985
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/page/main.py
|
'''
对企业微信首页建模
主页功能:登陆 注册
'''
from selenium.webdriver.common.by import By
from page.Login import Login
from page.Register import Register
from page.base_page import BasePage
class Main(BasePage):
#声明base url,子类里重写url。企业微信首页网址
_base_url = "https://work.weixin.qq.com/"
#goto注册页面
def goto_register(self):
#复制的是class,”.“代表class
self.find(By.CSS_SELECTOR, ".index_head_info_pCDownloadBtn").click()
return Register(self._driver)
#goto登陆页面
def goto_login(self):
#点击登陆
self.find(By.CSS_SELECTOR,".index_top_operation_loginBtn").click()
#进入到注册页
return Login(self._driver)
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,986
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/testcase/test_register.py
|
'''
注册测试用例
'''
from page.main import Main
class TestRegister:
#初始化,setup方法会在下面每个测试用例前执行
def setup(self):
self.main=Main()
def test_register(self):
#链式调用 main方法中的gotoregister,可以return到Register中的register方法
#assert self.main.goto_register().register()
self.main.goto_login().goto_register().register()
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,987
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/test_selenium/test_frame.py
|
import os
from time import sleep
from selenium import webdriver
from test_pytest.base import Base
class TestWindow():
def setup(self):
#获取传过来的brower参数
browser = os.getenv("browser")
#判断browser参数
if browser == 'firefox':
self.driver = webdriver.Firefox()
elif browser == 'headless':
self.driver = webdriver.PhantomJS()
else:
self.driver = webdriver.Chrome()
self.driver.implicitly_wait(5)
self.driver.maximize_window()
def teardown(self):
self.driver.quit()
def test_frame(self):
self.driver.get("https://www.runoob.com/try/try.php?filename=jqueryui-api-droppable")
#切换frame,找到”请拖拽我“这个元素所在的frame,用id取出
self.driver.switch_to.frame("iframeResult")
#打印”请推拽我“
print(self.driver.find_element_by_id("draggable").text)
#切换回默认frame,想去点击”点击运行“
self.driver.switch_to.parent_frame()
#或者
#self.driver.switch_to.default_content()
print(self.driver.find_element_by_id("submitBTN").text)
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
4,988
|
oooleemandy/hogwarts_lg4
|
refs/heads/master
|
/python1029_alluredemo/result/test_feature_story.py
|
import pytest
import allure
@allure.feature("登陆模块")
class TestLogin():
@allure.story("登陆成功")
def test_login_success(self):
print("登陆用例 登陆成功")
pass
@allure.story("登陆失败")
def test_login_success_a(self):
print("登陆用例 登陆成功a")
@allure.story("用户名缺失")
def test_login_success_b(self):
print("用户名缺失")
@allure.story("密码缺失")
def test_login_failture(self):
with allure.step("点击用户名"):
print("请输入用户名")
with allure.step("点击密码"):
print("请输入密码")
print("点击登陆")
with allure.step("点击登陆之后登陆失败"):
assert '1'==1
print("登陆失败")
pass
@allure.story("登陆失败")
def test_login_failure(self):
print("登陆用例 登陆失败")
pass
if __name__ == '__main__':
pytest.main()
|
{"/podemo1/page/index_page.py": ["/podemo1/page/addmemberpage.py"], "/page/Register.py": ["/page/base_page.py"], "/page/main.py": ["/page/Register.py", "/page/base_page.py"], "/testcase/test_register.py": ["/page/main.py"]}
|
5,041
|
A-Alena/music_chart
|
refs/heads/master
|
/core/core_service.py
|
import requests
from bs4 import BeautifulSoup
from .models import Musician
URL = 'https://spotifycharts.com/regional'
def remove_prefix(text, prefix):
if text.startswith(prefix):
return text[len(prefix):]
return text
def parse_all_chart():
""" Парсинг spotify charts.
:return: list of parsing results.
"""
results = []
response = requests.get(URL)
soup = BeautifulSoup(response.text)
chart_table = soup.find('table', {'class': 'chart-table'}).find('tbody')
table_rows = chart_table.find_all('tr')
for tr in table_rows:
position = tr.find('td', {'class': 'chart-table-position'}).text
position = int(position)
track = tr.find('td', {'class': 'chart-table-track'})
song = track.find('strong').text
author = track.find('span').text
author = remove_prefix(author, 'by ')
results.append({'pos': position, 'song': song, 'auth': author})
return results
def update_record(auth, song, pos):
""" Обновить (или создать если отсутствует) запись в БД.
"""
new_pos = { 'chart_position': pos }
obj, created = Musician.objects.update_or_create(auth_name = auth, song_name = song, defaults = new_pos)
def get_all_chart():
""" Получить весь список записей чарта.
:return: list of all chart.
"""
response = []
for record in Musician.objects.all():
data = {
'auth': record.auth_name,
'song': record.song_name,
'pos': record.chart_position,
}
response.append(data)
return response
def filter_chart(request: dict):
""" Получить список записей по исполнителю (auth_name)
:param request: HTTP requests.
:return: list with filtered records.
"""
auth = request.get('auth_name', '')
results = Musician.objects.filter(auth_name = auth)
response = []
for record in results:
data = {
'auth': record.auth_name,
'song': record.song_name,
'pos': record.chart_position,
}
response.append(data)
return response
|
{"/core/core_service.py": ["/core/models.py"], "/core/views.py": ["/core/core_service.py"]}
|
5,042
|
A-Alena/music_chart
|
refs/heads/master
|
/core/models.py
|
from django.db import models
class Musician(models.Model):
auth_name = models.TextField()
song_name = models.TextField()
chart_position = models.IntegerField()
class Meta:
db_table = 'musicians'
|
{"/core/core_service.py": ["/core/models.py"], "/core/views.py": ["/core/core_service.py"]}
|
5,043
|
A-Alena/music_chart
|
refs/heads/master
|
/core/views.py
|
from rest_framework.response import Response
from rest_framework.decorators import api_view, parser_classes
from rest_framework import status
from .core_service import parse_all_chart, update_record, get_all_chart, filter_chart
@api_view(['GET'])
def update_chart(request):
response = parse_all_chart()
for record in response:
update_record(record['auth'], record['song'], record['pos'])
return Response(response, status=status.HTTP_200_OK)
@api_view(['GET'])
def get_chart(request):
if request.GET:
response = filter_chart(request.GET)
return Response(response)
else:
response = get_all_chart()
return Response(response, status=status.HTTP_200_OK)
|
{"/core/core_service.py": ["/core/models.py"], "/core/views.py": ["/core/core_service.py"]}
|
5,044
|
A-Alena/music_chart
|
refs/heads/master
|
/core/migrations/0001_initial.py
|
# Generated by Django 3.1.4 on 2020-12-20 22:59
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Musician',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('auth_name', models.TextField()),
('song_name', models.TextField()),
('chart_position', models.IntegerField()),
],
options={
'db_table': 'musicians',
},
),
]
|
{"/core/core_service.py": ["/core/models.py"], "/core/views.py": ["/core/core_service.py"]}
|
5,051
|
andrely/twitter-sentiment
|
refs/heads/master
|
/models/__init__.py
|
'''
Created on 28. sep. 2014
@author: JohnArne
'''
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,052
|
andrely/twitter-sentiment
|
refs/heads/master
|
/plotting.py
|
'''
Handles plotting of different visualizations of data.
@author: JohnArne
'''
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import random
import numpy as np
import utils
import random
import pickle
from Tkconstants import OFF
def plot_temporal_sentiment(data, filename="temporal"):
"""
Plots the temporal sentiment using the given data.
"""
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# You typically want your plot to be ~1.33x wider than tall. This plot is a rare
# exception because of the number of lines being plotted on it.
# Common sizes: (10, 7.5) and (12, 9)
f = plt.figure(figsize=(8, 6))
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
ax.spines["top"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
# Ensure that the axis ticks only show up on the bottom and left of the plot.
# Ticks on the right and top of the plot are generally unnecessary chartjunk.
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
# Limit the range of the plot to only where the data is.
# Avoid unnecessary whitespace.
plt.ylim(0, 1)
plt.xlim(0, 101)
# Make sure your axis ticks are large enough to be easily read.
# You don't want your viewers squinting to read your plot.
# y_ticks = []
# plt.yticks(range(0, 1, 10), [str(x) for x in range(0, 91, 10)], fontsize=14)
plt.xticks(fontsize=10)
plt.yticks(fontsize=10)
# Provide tick lines across the plot to help your viewers trace along
# the axis ticks. Make sure that the lines are light and small so they
# don't obscure the primary data lines.
for y in [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9]:
plt.plot(range(1,105), [y] * len(range(1,105)), "--", lw=0.5, color="black", alpha=0.3)
# Remove the tick marks; they are unnecessary with the tick lines we just plotted.
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on")
# Now that the plot is prepared, it's time to actually plot the data!
# Note that I plotted the labels in order of the highest % in the final year.
labels = data.keys()
y_poses = []
offsets = [0.02, 0.04,0.06,0.08,0.1,0.12, 0.14,0.16,0.18,0.2]
for rank, column in enumerate(labels):
# Plot each line separately with its own color, using the Tableau 20
# color set in order.
plt.plot(data[column][0], data[column][1], lw=1.0, color=tableau20[rank])
# Add a text label to the right end of every line. Most of the code below
# is adding specific offsets y position because some labels overlapped.
y_pos = data[column][1][-1]
# new_pos = None
# offset_counter = 0
for poses in y_poses:
if y_pos < poses+0.01 and y_pos>poses:
y_pos = y_pos+0.05
# offset_counter += 1
break
if y_pos > poses-0.01 and y_pos<poses:
y_pos = y_pos+0.05
# offset_counter += 1
break
else:
y_pos = y_pos
y_poses.append(y_pos)
# Again, make sure that all labels are large enough to be easily read
# by the viewer.
plt.text(101.5, y_pos, column, fontsize=8, color=tableau20[rank])
plt.savefig("figs/"+filename+".pdf", bbox_inches="tight");
print "Figure done."
def plot_performance_histogram(data, filename):
"""
Plots the performance of different algorithms.
"""
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# You typically want your plot to be ~1.33x wider than tall. This plot is a rare
# exception because of the number of lines being plotted on it.
# Common sizes: (10, 7.5) and (12, 9)
f = plt.figure()
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
labels = data.keys()
print labels
precisions = [data[key][0] for key in labels]
recalls = [data[key][1] for key in labels]
f1s = [data[key][2] for key in labels]
accuracies = [data[key][3] for key in labels]
#Create bars
ind = (np.arange(len(labels))*2)+0.25
width = 0.35
ax.bar(ind, precisions, width, color=tableau20[0], edgecolor="none")
ax.bar(ind+width, recalls, width, color=tableau20[1], edgecolor="none")
ax.bar(ind+width*2, f1s, width, color=tableau20[2], edgecolor="none")
ax.bar(ind+width*3, accuracies, width, color=tableau20[3], edgecolor="none")
#Create top bar labels
for p, i in zip(precisions, ind):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[0])
for p, i in zip(recalls, ind+width):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[1])
for p, i in zip(f1s, ind+width*2):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[2])
for p, i in zip(accuracies, ind+width*3):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[3])
ax.spines["top"].set_visible(False)
# ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.set_xticks(ind+width*2)
ax.set_xticklabels(labels)
# Ensure that the axis ticks only show up on the bottom and left of the plot.
# Ticks on the right and top of the plot are generally unnecessary chartjunk.
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
for y in [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8]:
plt.plot(range(0,7), [y] * len(range(0,7)), "--", lw=0.5, color="black", alpha=0.3)
# Remove the tick marks; they are unnecessary with the tick lines we just plotted.
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on",labelcolor=tableau20[14])
plt.savefig('figs/'+filename+".pdf", bbox_inches="tight");
def plot_combined_histogram(data, filename):
"""
Plots the performance of different algorithms.
"""
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# You typically want your plot to be ~1.33x wider than tall. This plot is a rare
# exception because of the number of lines being plotted on it.
# Common sizes: (10, 7.5) and (12, 9)
f = plt.figure()
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
labels = data.keys()
print labels
precisions = [data[key][0] for key in labels]
recalls = [data[key][1] for key in labels]
f1s = [data[key][2] for key in labels]
accuracies = [data[key][3] for key in labels]
#Create bars
ind = (np.arange(len(labels))*5.5)+0.55
width = 1.1
ax.bar(ind, precisions, width, color=tableau20[0], edgecolor="none")
ax.bar(ind+width, recalls, width, color=tableau20[1], edgecolor="none")
ax.bar(ind+(width)*2, f1s, width, color=tableau20[2], edgecolor="none")
ax.bar(ind+(width)*3, accuracies, width, color=tableau20[3], edgecolor="none")
#Create top bar labels
for p, i in zip(precisions, ind):
plt.text(i, p+0.01, "%0.2f" % p, fontsize=8, color=tableau20[0])
for p, i in zip(recalls, ind+width):
plt.text(i, p+0.01, "%0.2f" % p, fontsize=8, color=tableau20[1])
for p, i in zip(f1s, ind+width*2):
plt.text(i, p+0.01, "%0.2f" % p, fontsize=8, color=tableau20[2])
for p, i in zip(accuracies, ind+width*3):
plt.text(i, p+0.01, "%0.2f" % p, fontsize=8, color=tableau20[3])
ax.spines["top"].set_visible(False)
# ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.set_xticks(ind+width*2)
ax.set_xticklabels([l.split('+')[0]+"\n"+l.split('+')[1] for l in labels])
# Ensure that the axis ticks only show up on the bottom and left of the plot.
# Ticks on the right and top of the plot are generally unnecessary chartjunk.
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
for y in [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8]:
plt.plot(range(0,29), [y] * len(range(0,29)), "--", lw=0.5, color="black", alpha=0.3)
# Remove the tick marks; they are unnecessary with the tick lines we just plotted.
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on",labelcolor=tableau20[14])
plt.savefig('figs/'+filename+".pdf", bbox_inches="tight");
print "Done"
def plot_pos_analysis(data, filename):
"""
Plots the performance of different algorithms.
"""
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# You typically want your plot to be ~1.33x wider than tall. This plot is a rare
# exception because of the number of lines being plotted on it.
# Common sizes: (10, 7.5) and (12, 9)
f = plt.figure()
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
labels = data.keys()
print labels
values = [data[key] for key in labels]
print values
print labels
sorted_values_and_labels = [list(x) for x in zip(*sorted(zip(values,labels)))]
print "Sorted:", sorted_values_and_labels
values = sorted_values_and_labels[0]
labels = sorted_values_and_labels[1]
#Create bars
ind = (np.arange(len(labels)))
width = 0.7
for v,i in zip(values,ind):
ax.bar(i, v, width, color=tableau20[14], edgecolor="none")
#Create top bar labels
for p, i, l in zip(values, ind, labels):
plt.text(i, p+0.01 if p>0 else p-0.03, l, fontsize=8, color=tableau20[14])
ax.spines["top"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.set_xticks(ind+width*2)
ax.set_xticklabels([" " for _ in labels])
# Ensure that the axis ticks only show up on the bottom and left of the plot.
# Ticks on the right and top of the plot are generally unnecessary chartjunk.
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
for y in [-0.4,-0.3,-0.2,-0.1,0.0,0.1,0.2,0.3,0.4]:
plt.plot(range(0,18), [y] * len(range(0,18)), "--", lw=0.1, color="black", alpha=0.3)
# Remove the tick marks; they are unnecessary with the tick lines we just plotted.
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on",labelcolor=tableau20[14])
plt.savefig('figs/'+filename+".pdf", bbox_inches="tight");
def average_wordclasses(data, filename):
"""
Plots the performance of different algorithms.
"""
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# You typically want your plot to be ~1.33x wider than tall. This plot is a rare
# exception because of the number of lines being plotted on it.
# Common sizes: (10, 7.5) and (12, 9)
f = plt.figure()
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
labels = data.keys()
print labels
precisions = [data[key][0] for key in labels]
recalls = [data[key][1] for key in labels]
f1s = [data[key][2] for key in labels]
accuracies = [data[key][3] for key in labels]
#Create bars
ind = (np.arange(len(labels))*2)+0.25
width = 0.35
ax.bar(ind, precisions, width, color=tableau20[0], edgecolor="none")
ax.bar(ind+width, recalls, width, color=tableau20[1], edgecolor="none")
ax.bar(ind+width*2, f1s, width, color=tableau20[2], edgecolor="none")
ax.bar(ind+width*3, accuracies, width, color=tableau20[3], edgecolor="none")
#Create top bar labels
for p, i in zip(precisions, ind):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[0])
for p, i in zip(recalls, ind+width):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[1])
for p, i in zip(f1s, ind+width*2):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[2])
for p, i in zip(accuracies, ind+width*3):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[3])
ax.spines["top"].set_visible(False)
# ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.set_xticks(ind+width*2)
ax.set_xticklabels(labels)
# Ensure that the axis ticks only show up on the bottom and left of the plot.
# Ticks on the right and top of the plot are generally unnecessary chartjunk.
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
for y in [1,2,3,4,5,6,7]:
plt.plot(range(0,7), [y] * len(range(0,7)), "--", lw=0.5, color="black", alpha=0.3)
# Remove the tick marks; they are unnecessary with the tick lines we just plotted.
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on",labelcolor=tableau20[14])
plt.savefig('figs/'+filename+".pdf", bbox_inches="tight");
def detailed_average_wordclasses(data, filename):
"""
Plots the performance of different algorithms.
"""
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# You typically want your plot to be ~1.33x wider than tall. This plot is a rare
# exception because of the number of lines being plotted on it.
# Common sizes: (10, 7.5) and (12, 9)
f = plt.figure()
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
labels = data.keys()
print labels
datalists = []
for i in xrange(len(data[labels[0]])):
datalists.append([data[key][i] for key in labels])
#Create bars
ind = (np.arange(len(labels))*2.2)+0.2
width = 0.1
offset = np.arange(0.01, 1, 0.01)
colorlist = [tableau20[0],tableau20[0],tableau20[0],tableau20[1],tableau20[4],tableau20[5],tableau20[6],tableau20[7],tableau20[2],tableau20[2],tableau20[2],tableau20[2],tableau20[2],
tableau20[13],tableau20[14],tableau20[15],tableau20[16],tableau20[3]]
for i in xrange(len(datalists)):
bar = ax.bar(ind+width*i, datalists[i], width, color=colorlist[i], edgecolor="none")
#Create top bar labels
# for p, i in zip(precisions, ind):
# plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[0])
# for p, i in zip(recalls, ind+width):
# plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[1])
# for p, i in zip(f1s, ind+width*2):
# plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[2])
# for p, i in zip(accuracies, ind+width*3):
# plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[3])
ax.spines["top"].set_visible(False)
# ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.set_xticks(ind+width*9)
ax.set_xticklabels(labels)
# Ensure that the axis ticks only show up on the bottom and left of the plot.
# Ticks on the right and top of the plot are generally unnecessary chartjunk.
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
for y in [0.2,0.4,0.6,0.8,1.0,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0]:
plt.plot(range(0,8), [y] * len(range(0,8)), "--", lw=0.5, color="black", alpha=0.3)
# Remove the tick marks; they are unnecessary with the tick lines we just plotted.
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on",labelcolor=tableau20[14])
plt.savefig('figs/'+filename+".pdf", bbox_inches="tight");
def plot_entity_histogram(data, filename):
"""
Plots the performance of different algorithms.
"""
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# You typically want your plot to be ~1.33x wider than tall. This plot is a rare
# exception because of the number of lines being plotted on it.
# Common sizes: (10, 7.5) and (12, 9)
f = plt.figure()
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
labels = data.keys()
print labels
precisions = [data[key][0] for key in labels]
recalls = [data[key][1] for key in labels]
f1s = [data[key][2] for key in labels]
accuracies = [data[key][3] for key in labels]
#Create bars
ind = (np.arange(len(labels))*2)+0.25
width = 0.35
ax.bar(ind, precisions, width, color=tableau20[0], edgecolor="none")
ax.bar(ind+width, recalls, width, color=tableau20[1], edgecolor="none")
ax.bar(ind+width*2, f1s, width, color=tableau20[2], edgecolor="none")
ax.bar(ind+width*3, accuracies, width, color=tableau20[3], edgecolor="none")
#Create top bar labels
for p, i in zip(precisions, ind):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[0])
for p, i in zip(recalls, ind+width):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[1])
for p, i in zip(f1s, ind+width*2):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[2])
for p, i in zip(accuracies, ind+width*3):
plt.text(i+0.03, p+0.01, "%0.2f" % p, fontsize=10, color=tableau20[3])
ax.spines["top"].set_visible(False)
# ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.set_xticks(ind+width*2)
ax.set_xticklabels(labels)
# Ensure that the axis ticks only show up on the bottom and left of the plot.
# Ticks on the right and top of the plot are generally unnecessary chartjunk.
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
for y in [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8]:
plt.plot(range(0,7), [y] * len(range(0,7)), "--", lw=0.5, color="black", alpha=0.3)
# Remove the tick marks; they are unnecessary with the tick lines we just plotted.
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on",labelcolor=tableau20[14])
plt.savefig('figs/'+filename+".pdf", bbox_inches="tight");
def plot_dataset_stats(data):
"""
Plots histograms of the dataset statistics using the given data.
"""
def load_incremental_data():
f1_data= pickle.load(open('incremental_f1100',"rb"))
acc_data=pickle.load(open('incremental_acc100',"rb"))
print f1_data
print acc_data
for key in f1_data.keys():
f1list = f1_data[key]
f1_data[key] = [range(5,101,5),f1list]
acclist = acc_data[key]
acc_data[key] = [range(5,101,5),acclist]
f1svm_data = {}
f1nb_data = {}
f1me_data = {}
accsvm_data = {}
accnb_data = {}
accme_data = {}
for key in f1_data.keys():
if key[:3]=="SVM":
f1svm_data[key] = f1_data[key]
elif key[:2]=="NB":
f1nb_data[key] = f1_data[key]
elif key[:6]=="MaxEnt":
f1me_data[key] = f1_data[key]
for key in acc_data.keys():
if key[:3]=="SVM":
accsvm_data[key] = acc_data[key]
elif key[:2]=="NB":
accnb_data[key] = acc_data[key]
elif key[:6]=="MaxEnt":
accme_data[key] = acc_data[key]
return f1svm_data, f1nb_data, f1me_data, accsvm_data, accnb_data, accme_data
def plot_subjectivity_aggregates(data, filename="temporal"):
"""
Plots the temporal sentiment using the given data.
"""
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# You typically want your plot to be ~1.33x wider than tall. This plot is a rare
# exception because of the number of lines being plotted on it.
# Common sizes: (10, 7.5) and (12, 9)
f = plt.figure(figsize=(9, 6))
ind = np.arange(len(data['Targets'][0]))
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
ax.spines["top"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.set_xticks(ind+0.25)
ax.set_xticklabels(['%.2f' % x for x in data['Targets'][0]])
# Ensure that the axis ticks only show up on the bottom and left of the plot.
# Ticks on the right and top of the plot are generally unnecessary chartjunk.
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
ax.bar(ind, data['Frequencies'][1], 0.5, color=tableau20[14], edgecolor="none")
# Limit the range of the plot to only where the data is.
# Avoid unnecessary whitespace.
plt.ylim(0, 70)
plt.xlim(0, 19)
# Make sure your axis ticks are large enough to be easily read.
# You don't want your viewers squinting to read your plot.
# y_ticks = []
# plt.yticks(range(0, 1, 10), [str(x) for x in range(0, 91, 10)], fontsize=14)
plt.xticks(fontsize=8)
plt.yticks(fontsize=10)
# Provide tick lines across the plot to help your viewers trace along
# the axis ticks. Make sure that the lines are light and small so they
# don't obscure the primary data lines.
for y in [5,10,15,20,25,30,35,40,45,50, 55, 60, 65]:
plt.plot(range(0,20), [y] * len(range(0,20)), "--", lw=0.5, color="black", alpha=0.3)
# Remove the tick marks; they are unnecessary with the tick lines we just plotted.
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on")
# Now that the plot is prepared, it's time to actually plot the data!
# Note that I plotted the labels in order of the highest % in the final year.
labels = [label for label in data.keys() if label!='Frequencies']
y_poses = []
offsets = [0.02, 0.04,0.06,0.08,0.1,0.12, 0.14,0.16,0.18,0.2]
for rank, column in enumerate(labels):
# Plot each line separately with its own color, using the Tableau 20
# color set in order.
plt.plot(ind+0.25, data[column][1], lw=1.0, color=tableau20[rank+1])
# Add a text label to the right end of every line. Most of the code below
# is adding specific offsets y position because some labels overlapped.
y_pos = data[column][1][-1]
# new_pos = None
# offset_counter = 0
for poses in y_poses:
if y_pos < poses+1 and y_pos>=poses:
y_pos = y_pos+2
# offset_counter += 1
break
if y_pos > poses-1 and y_pos<=poses:
y_pos = y_pos+2
# offset_counter += 1
break
else:
y_pos = y_pos
y_poses.append(y_pos)
# Again, make sure that all labels are large enough to be easily read
# by the viewer.
plt.text(19, y_pos, column, fontsize=8, color=tableau20[rank+1])
plt.savefig("figs/"+filename+".pdf", bbox_inches="tight");
print "Figure done."
def plot_polarity_aggregates(data, filename="temporal"):
"""
Plots the temporal sentiment using the given data.
"""
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# You typically want your plot to be ~1.33x wider than tall. This plot is a rare
# exception because of the number of lines being plotted on it.
# Common sizes: (10, 7.5) and (12, 9)
f = plt.figure(figsize=(9, 6))
ind = np.arange(len(data['Targets'][0]))
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
ax.spines["top"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.set_xticks(ind+0.25)
ax.set_xticklabels(['%.2f' % x for x in data['Targets'][0]])
# Ensure that the axis ticks only show up on the bottom and left of the plot.
# Ticks on the right and top of the plot are generally unnecessary chartjunk.
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
ax.bar(ind, data['Frequencies'][1], 0.5, color=tableau20[14], edgecolor="none")
# Limit the range of the plot to only where the data is.
# Avoid unnecessary whitespace.
plt.ylim(-1, 1)
plt.xlim(0, 19.5)
# Make sure your axis ticks are large enough to be easily read.
# You don't want your viewers squinting to read your plot.
# y_ticks = []
# plt.yticks(range(0, 1, 10), [str(x) for x in range(0, 91, 10)], fontsize=14)
plt.xticks(fontsize=8)
plt.yticks(fontsize=10)
# Provide tick lines across the plot to help your viewers trace along
# the axis ticks. Make sure that the lines are light and small so they
# don't obscure the primary data lines.
# for y in [5,10,15,20,25,30,35,40,45]:
for y in [-0.8,-0.6,-0.4,-0.2,0.2,0.4,0.6,0.8]:
plt.plot(range(0,20), [y] * len(range(0,20)), "--", lw=0.5, color="black", alpha=0.3)
plt.plot(range(0,20), [0] * len(range(0,20)), "--", lw=2.5, color="black", alpha=0.3)
# Remove the tick marks; they are unnecessary with the tick lines we just plotted.
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on")
# Now that the plot is prepared, it's time to actually plot the data!
# Note that I plotted the labels in order of the highest % in the final year.
labels = [label for label in data.keys() if label!='Frequencies']
y_poses = []
offsets = [0.02, 0.04,0.06,0.08,0.1,0.12, 0.14,0.16,0.18,0.2]
for rank, column in enumerate(labels):
# Plot each line separately with its own color, using the Tableau 20
# color set in order.
plt.plot(ind+0.25, data[column][1], lw=1.0, color=tableau20[rank+1])
# Add a text label to the right end of every line. Most of the code below
# is adding specific offsets y position because some labels overlapped.
y_pos = data[column][1][-1]
# new_pos = None
# offset_counter = 0
for poses in y_poses:
if y_pos < poses+0.1 and y_pos>=poses:
y_pos = y_pos+0.2
# offset_counter += 1
break
if y_pos > poses-0.1 and y_pos<=poses:
y_pos = y_pos-0.2
# offset_counter += 1
break
else:
y_pos = y_pos
y_poses.append(y_pos)
# Again, make sure that all labels are large enough to be easily read
# by the viewer.
plt.text(19, y_pos, column, fontsize=8, color=tableau20[rank+1])
# plt.text(19, 29.5, "Neutral", fontsize=8, color="black")
plt.savefig("figs/"+filename+".pdf", bbox_inches="tight");
print "Figure done."
def plot_temporal_topics(data, filename="temporal_topics"):
"""
Plots the temporal sentiment using the given data.
"""
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# You typically want your plot to be ~1.33x wider than tall. This plot is a rare
# exception because of the number of lines being plotted on it.
# Common sizes: (10, 7.5) and (12, 9)
f = plt.figure(figsize=(9, 6))
ind = np.arange(len(data[data.keys()[0]][0]))
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
ax.spines["top"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.set_xticks(ind+0.25)
ax.set_xticklabels(['%.2f' % x for x in data[data.keys()[0]][0]])
# Ensure that the axis ticks only show up on the bottom and left of the plot.
# Ticks on the right and top of the plot are generally unnecessary chartjunk.
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
# Limit the range of the plot to only where the data is.
# Avoid unnecessary whitespace.
plt.ylim(0, 100)
plt.xlim(0, 8)
# Make sure your axis ticks are large enough to be easily read.
# You don't want your viewers squinting to read your plot.
# y_ticks = []
# plt.yticks(range(0, 1, 10), [str(x) for x in range(0, 91, 10)], fontsize=14)
plt.xticks(fontsize=6)
plt.yticks(fontsize=6)
# Provide tick lines across the plot to help your viewers trace along
# the axis ticks. Make sure that the lines are light and small so they
# don't obscure the primary data lines.
# for y in [5,10,15,20,25,30,35,40,45]:
for y in range(0,100,5):
plt.plot(range(0,8), [y] * len(range(0,8)), "--", lw=0.5, color="black", alpha=0.3)
plt.plot(range(0,8), [50] * len(range(0,8)), "--", lw=2.5, color="black", alpha=0.3)
# Remove the tick marks; they are unnecessary with the tick lines we just plotted.
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on")
# Now that the plot is prepared, it's time to actually plot the data!
# Note that I plotted the labels in order of the highest % in the final year.
labels = data.keys()
y_poses = []
offsets = [0.02, 0.04,0.06,0.08,0.1,0.12, 0.14,0.16,0.18,0.2]
for rank, column in enumerate(labels):
# Plot each line separately with its own color, using the Tableau 20
# color set in order.
for i in range(len(data[column][1])):
data[column][1][i] = data[column][1][i] +50
plt.plot(ind+0.25, data[column][1], lw=1.0, color=tableau20[rank])
# Add a text label to the right end of every line. Most of the code below
# is adding specific offsets y position because some labels overlapped.
y_pos = data[column][1][-1]
# new_pos = None
# offset_counter = 0
for poses in y_poses:
if y_pos < poses+5 and y_pos>=poses:
y_pos = y_pos+8
# offset_counter += 1
break
if y_pos > poses-5 and y_pos<=poses:
y_pos = y_pos-8
# offset_counter += 1
break
else:
y_pos = y_pos
y_poses.append(y_pos)
# Again, make sure that all labels are large enough to be easily read
# by the viewer.
try:
plt.text(8.2, y_pos, column, fontsize=8, color=tableau20[rank])
except UnicodeDecodeError:
plt.text(8.2, y_pos, column.decode('utf8'), fontsize=8, color=tableau20[rank])
plt.text(8.2, 49.5, "Neutral", fontsize=8, color="black")
plt.savefig("figs/"+filename+".pdf", bbox_inches="tight");
print "Figure done."
if __name__ == '__main__':
# f1svm_data, f1nb_data, f1me_data, accsvm_data, accnb_data, accme_data = load_incremental_data()
# data = {"Erna Solberg": [range(0,100),[random.randint(20,50) for _ in range(0,100)]],
# "rosenborg": [range(0,100),[random.randint(40,60) for _ in range(0,100)]],
# "no target": [range(0,100),[random.randint(30,40) for _ in range(0,100)]]}
# plot_temporal_sentiment(f1svm_data, 'incremental_f1svm')
# plot_temporal_sentiment(f1nb_data, 'incremental_f1nb')
# plot_temporal_sentiment(f1me_data, 'incremental_f1me')
# plot_temporal_sentiment(accsvm_data, 'incremental_accuracysvm')
# plot_temporal_sentiment(accnb_data, 'incremental_accuracynb')
# plot_temporal_sentiment(accme_data, 'incremental_accuracyme')
# data = {"SVM(SB)+SVM(PC)": [(0.72+0.79)/2, (0.66+0.80)/2, (0.69+0.76)/2, (0.67+0.78)/2],
# "SVM(SB)+SVM(PB)": [(0.72+0.77)/2, (0.66+0.76)/2, (0.69+0.75)/2, (0.67+0.75)/2],
# "SVM(SB)+MaxEnt(PC)":[(0.72+0.77)/2, (0.66+0.80)/2, (0.69+0.72)/2, (0.67+0.75)/2],
# "MaxEnt(SB)+MaxEnt(PC)": [(0.70+0.77)/2, (0.66+0.80)/2, (0.61+0.72)/2, (0.63+0.75)/2],
# "MaxEnt(SB)+SVM(PC)": [(0.70+0.79)/2, (0.66+0.80)/2, (0.61+0.76)/2, (0.63+0.78)/2]}
# plot_combined_histogram(data, "combined")
data = pickle.load(open('topically_aggregated_polarity', 'rb'))
plot_temporal_topics(data, "temporal_topics")
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,053
|
andrely/twitter-sentiment
|
refs/heads/master
|
/annotation.py
|
'''
Created on 30. sep. 2014
@author: JohnArne
'''
import utils
import tweet
import os
def user_annotation():
"""
Feed tweets to console one at a time, and ask user for sentiment annotation.
"""
dataset = utils.select_dataset()
text_tweets = utils.get_dataset(dataset)
tweets = []
for text_tweet in text_tweets:
tweets.append(tweet.to_tweet(text_tweet))
username = raw_input("Name? ... ")
print "\n--------------\n"
print "Input: "
print "\n1: Negative sentiment (Negative opinion). \n2: Neutral/objective sentiment (No opinion). \n3: Positive sentiment (Positive opinion). \n5: Delete the tweet from the dataset. \nx: Cancel sequence. 0: Go back to previous tweet. "
print "\n--------------\n"
annotated_to = 0
i = 0
while i < len(tweets):
# tweets[i].text.encode('utf8')
# text = tweets[i].text
# tweets[i].text = text.decode('utf8')
try:
print "Tweet nr. : "+str(i+1)
print str(((i+1.0*1.0)/len(tweets)*1.0)*100)+" % done "
print unicode(tweets[i].__str__().decode('utf8'))
except UnicodeEncodeError:
try:
print "Tweet nr. : "+str(i+1)
print str(tweets[i])
except UnicodeEncodeError:
print "Could not print tweet number "+str(i+1) +". Deleting tweet..."
tweets.remove(tweets[i])
continue
userinput = raw_input("...")
while not legal_input(userinput):
userinput = raw_input("Unlawful input! Please re-introduce.")
if userinput is '1':
tweets[i].set_sentiment("negative")
elif userinput is '2':
tweets[i].set_sentiment("neutral")
elif userinput is '3':
tweets[i].set_sentiment("positive")
elif userinput is '5':
print "Deleting tweet..."
tweets.remove(tweets[i])
continue
elif userinput is '0':
i = i-1
continue
elif userinput is 'x':
break
i = i+1
#TODO: need to encode to utf when getting from dataset?!?!
#Store the sentiment in file!
tweetlines = []
for t in tweets[:i]:
if t.get_sentiment() is None:
continue
tweetlines.append(t.to_tsv())
dir = username+"_annotated_data"
if not os.path.exists(dir):
os.makedirs(dir)
utils.store_dataset(tweetlines, dir+dataset[4:])
print "Domo arigato!"
def legal_input(userinput):
"""
Checks input and returns true if the input is legal. Legal input should be "1", "2", "3", "5", or "x"
"""
legal_inputs = ['1','2','3','5','0','x']
if userinput in legal_inputs:
return True
return False
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,054
|
andrely/twitter-sentiment
|
refs/heads/master
|
/tweet.py
|
'''
Created on 30. sep. 2014
@author: JohnArne
'''
class Tweet(object):
"""
Class for wrapping tweet information.
"""
def __init__(self, timestamp, user, text):
self.user = user
self.text = text
self.timestamp = timestamp
self.subjectivity = None #0 if objective, 1 if subjective
self.polarity = None #0 if negative sentiment, 1 if positive sentiment
self.processed_words = [] #dict for containing the stemmed and preprocessed words of the text body
self.tagged_words = [] # a list of dicts
self.nrof_happyemoticons = 0
self.nrof_sademoticons = 0
self.nrof_hashtags = 0
self.nrof_usersmentioned = 0
self.exclamated = False
self.hashtags = []
self.links = []
self.users_mentioned = []
self.nrof_exclamations = 0
self.nrof_questionmarks = 0
self.word_count = 0
self.words_with_sentimentvalues={}
self.sentiments = []
self.link_pos = []
self.sentiment_target = ""
def to_tsv(self):
"""
Convert the data in this tweet to the .tsv format used to store it in .tsv files.
TSV Format: Date \t Time \t Sentiment \t User \t Textbody
"""
tvsline = ""
sentiment = self.get_sentiment()
if sentiment is not None:
tsvline = self.timestamp
tsvline = tsvline+"\t"+sentiment
tsvline = tsvline+"\t"+self.user
tsvline = tsvline+"\t"+self.text
else:
tsvline = self.timestamp+"\t"+self.user+"\t"+self.text
return tsvline
def get_sentiment(self):
"""
Returns a textual representation of the sentiment (negative, neutral, positive),
Based on the subjectivity and polarity variables of the tweet.
"""
sentiment = None
if self.subjectivity is 1:
sentiment = "negative".encode('utf8') if self.polarity is 0 else "positive".encode('utf8')
elif self.subjectivity is 0:
sentiment = "neutral".encode('utf8')
return sentiment
def set_sentiment(self, sentiment):
"""
Sets the binary subjectivity and polarity variables of the tweet based on the
passed textual representation of sentiment.
"""
if sentiment=="negative":
self.subjectivity = 1
self.polarity = 0
elif sentiment=="neutral":
self.subjectivity = 0
self.polarity = 0
elif sentiment=="positive":
self.subjectivity = 1
self.polarity = 1
def stat_str(self):
"""
Returns a string of all stats of the tweet. BROKEN, unicode errors all around
"""
# try:
# statstring = "\n--------------\n"+" \n"+self.user+"\n"+unicode(self.text)+"\n "
# except UnicodeDecodeError:
statstring = "\n--------------\n"+" \n"+self.user+"\n"+self.text+"\n "
statstring = statstring + "Tagged words: "+str(self.tagged_words) + "\n"
statstring = statstring + "Sentiment " +str(self.get_sentiment()) + "\n"
statstring = statstring + "Hashtags: "+str(self.nrof_hashtags) + " "+str(self.hashtags) + "\n"
statstring = statstring + "Users: "+str(self.nrof_usersmentioned) + " "+str(self.users_mentioned) + "\n"
statstring = statstring + "Happy emoticons: "+str(self.nrof_happyemoticons) + "\n"
statstring = statstring + "Sad emoticons: "+str(self.nrof_sademoticons)+ "\n"
statstring = statstring + "Question marks: "+str(self.nrof_questionmarks)+ "\n"
statstring = statstring + "Exclamation marks: "+str(self.nrof_exclamations)+ "\n"
statstring = statstring + "\n--------------\n"
return statstring
def __str__(self):
"""
Returns a string representation of the tweet for visual representation.
"""
return "\n--------------\n"+" \n"+self.user+"\n"+self.text+"\n--------------\n"
def __eq__(self, other):
return self.text == other.text
def to_tweet(text):
"""
Convert a given .tsv formatted text line to a tweet object
"""
splits = text.split('\t')
print "Creating tweet object: "
if len(splits)>3:
print "Splitted into more than 3..."
for split in splits:
print split
tweet = Tweet(splits[0], splits[2], splits[3])
tweet.set_sentiment(splits[1])
else:
print "Splitted into less than 3..."
for split in splits:
print split
tweet = Tweet(splits[0], splits[1], splits[2])
return tweet
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,055
|
andrely/twitter-sentiment
|
refs/heads/master
|
/models/nb.py
|
'''
Created on 19. mars 2014
@author: JohnArne
'''
from model import Model
from sklearn.naive_bayes import MultinomialNB
class NB(Model):
"""
Class implementing the Multinomial Naive Bayes learning method.
"""
def __init__(self, train_tweets, train_targets, vect_options, tfidf_options):
self.classifier = MultinomialNB()
extra_params ={
'clf__alpha': (0.1, 0.3, 0.5, 0.7, 0.8, 1.0)
}
super(NB, self).__init__(train_tweets, train_targets, vect_options, tfidf_options, extra_params)
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,056
|
andrely/twitter-sentiment
|
refs/heads/master
|
/tagger.py
|
# -*- coding: utf-8 -*-
'''
Created on 30. sep. 2014
@author: JohnArne
'''
import requests
class Tagger():
"""
Interfaces the POS tagger for classification.
"""
def __init__(self):
#Request init connect to smarttagger
self.url = "http://smarttagger.herokuapp.com/tag"
def tag_text(self, text):
"""
Tags a text sequence using the current tagger.
"""
# print "Tagging: "+unicode(text.decode('utf8'))
par = {"text": text, "raw": "raw", "format": "json"}
r = requests.post(self.url, data=par)
tagged_words = {}
try:
results = r.json()["phrases"]
tagged_words = results
except ValueError as e:
print "Unable to get JSON: " +str(e)
print r.reason
print r.status_code
if len(tagged_words)<1:
return None
else:
return tagged_words[0]
if __name__=="__main__":
tagger = Tagger()
texts = []
texts.append(u"Viss Russland kritikken erna solberg framførte i FN var skjult, korleis i hulaste har den då hamna på framsida av VG i dag?")
texts.append(u"borgebrende Hoyre erna solberg Hvem skal gjøre møkkajobbene da?")
texts.append(u"erna solberg Siv Jensen FrP jensstoltenberg jonasgahrstore Skremmende at regjeringen vil selge aksjer i statlige selskap til utlandet.")
texts.append(u"CSpange Aftenposten erna solberg Sannsynlige grunner ingen, heller ikke de 120, venter resultater. Og Kina og USA uteblir. Neste gang...")
texts.append(u"ElinJoval konservativ erna solberg det Elin sa! Jeg vil ha mer tid til å være sammen med elevene.")
texts.append(u"Skulle ønske konservativ og erna solberg hjalp til å styrke lærernes status. Vi er gode! Vi trenger tid til å gjøre jobben vår bedre!")
for text in texts:
print unicode(text)
for text in texts:
print tagger.tag_text(text)
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,057
|
andrely/twitter-sentiment
|
refs/heads/master
|
/entity_extraction.py
|
'''
Created on 4. jan. 2015
@author: JohnArne
-Ta en gruppe tweets.
-Prov a finne entitetene i hver enkelt tweet.
- kjor clustering pa tweets, eller pa bare entitetsnavn... grupper entitetene etter clusters og nominer den mest frekvente entiteten som overordnet navn
'''
import lexicon.pos_mappings
from lexicon import pos_mappings
from tweet import Tweet
import utils
from models.features import get_sentiment_values
import classifier
import pickle
from sklearn import metrics
import plotting
from sklearn.feature_extraction.text import CountVectorizer
import math
def perform_entity_extraction(tweets, sentimentvalues, breakword_min_freq=0.2, breakword_range=2, use_sentiment_values=False, use_pmi=False, vocabulary=None, cluster=False, use_minibatch=False, use_idf=False, use_hasher=False):
"""
Takes in a list of correctly predicted subjective tweets and sentimentvalues, in addition to several optional parameters, and attempts entity extraction on all the tweeets.
"""
print len(tweets)
sub_clf = classifier.get_optimal_subjectivity_classifier()
#Get all the correctly classified subjective tweets
if use_pmi and vocabulary==None: vocabulary = create_vocabulary(tweets)
if use_sentiment_values:
entities = find_entities(sub_clf, tweets, breakword_min_freq, breakword_range, use_pmi, vocabulary=vocabulary, sentimentvalues=sentimentvalues)
else:
entities = find_entities(sub_clf, tweets, breakword_min_freq, breakword_range, use_pmi, vocabulary=vocabulary)
if cluster:
#use clustering to group together tweets,
#then choose the entities with the greatest freqiencies within each cluster as the sentiment target for all in the cluster
#but not if the target is already none...
cluster_tweets(tweets, use_minibatch, use_idf, use_hasher)
for i in xrange(len(entities)):
entities[i] = entities[i][0] if len(entities[i])>0 else None
print entities
return entities
def find_entities(sub_clf, tweets, min_freq, breakword_range, use_pmi=False, vocabulary=None, sentimentvalues=None):
"""
Takes in a subjectivity classifier and the tweets, and attempts to find the target of the classified sentiment.
Return a textual description of the entity.
"""
if sentimentvalues!=None:
entities = [find_entity(sub_clf, t, min_freq, breakword_range, use_pmi, vocabulary=vocabulary, sentimentvalues=s) for t,s in zip(tweets,sentimentvalues)]
else:
entities = [find_entity(sub_clf, t, min_freq, breakword_range, use_pmi, vocabulary=vocabulary) for t in tweets]
return entities
def find_entity(sub_clf, t, min_freq, breakword_range, use_pmi=False, vocabulary=None, sentimentvalues=None):
"""
Attempts at identifying the entity of a single tweet, utilizing sentiment values if not none.
"""
#get a list of possibilities for this tweet
possibilities = get_possible_entities(t)
if len(possibilities)<1:
for hashtag in t.hashtags:
if len(hashtag)>1:
return [hashtag]
return []
if len(possibilities)==1:
return possibilities
#Get breakwords from breakdown classification
breakwords = breakdown_classify(sub_clf, t)
breakwords = cutoff_breakwords(breakwords, min_freq)
#get sentimental words if given values
sentimentwords = []
if sentimentvalues!=None:
sentimentwords = get_sentimentwords(sentimentvalues)
#Perform an intersection of the breakwords and sentimental words
# print "Text: ", t.text
# print "Possible entities: ",possibilities
# print "Hashtags: ",t.hashtags
# print "Shifting words: ",breakwords
# print "Sentimental words: ",sentimentwords
sentiment_points = [val for val in sentimentwords if val in breakwords]
# print "Intersection: ", sentiment_points
if len(sentiment_points)<1: sentiment_points = list(set(breakwords + sentimentwords))
# print "New intersection: ", sentiment_points
possibilities = cutoff_possibilities(t.text.lower(), possibilities, sentiment_points, breakword_range)
# print "Possibilities after cutoff: ", possibilities
# raw_input("Continue?")
if use_pmi:
#Use PMI to disambiguate between possibilities
pmi = []
for p in possibilities:
if p is None: continue
for s in sentiment_points:
if s is None: continue
pmi.append([calculate_pmi(p,s,vocabulary), p])
if len(pmi)>0:
possibilities = [max(pmi)[1]]
if len(possibilities)>0:
return possibilities
else:
if len(t.hashtags)>0:
for hashtag in t.hashtags:
if len(hashtag)>1:
return [hashtag]
return []
# if len(possibilities)>0:
# return t.hashtags[0] if len(t.hashtags)>0 else possibilities
# else:
# return t.hashtags
#
#decide entity from possibilities based on the sentiment points
#calculate the "center" of the sentiment points
#choose the entity which is closest to the center of the sentiment points
#or choose the entity closest to the first sentiment points...
def calculate_pmi(entity, sentiword, vocabulary):
"""
Calculates the pointwise mutual information between two given words.
"""
unigrams_freq = float(sum(vocabulary[0].values()))
prob_entity = vocabulary[0][entity] / unigrams_freq
prob_sentiword = vocabulary[0][sentiword] / unigrams_freq
try:
prob_both = vocabulary[1][" ".join([unicode(entity),unicode(sentiword)])] / float(sum(vocabulary[1].values()))
except KeyError:
return 0.0
except UnicodeDecodeError:
print "UnicodeError"
return 0.0
return math.log(prob_both/float(prob_entity*prob_sentiword),2)
def create_vocabulary(tweets):
"""
Creates a bigram + unigram vocabulary of the given tweet texts.
"""
print "Creating vocabulary..."
vocabulary = []
unigrams_freq = {}
bigrams_freq = {}
texts= [t.text.lower() for t in tweets]
for text in texts:
for phrase in text.split('.'):
phrase = phrase.replace(',',' ')
unigrams = phrase.split(" ")
unigrams = [u for u in unigrams if len(u)>1]
extended_bigrams = [x+" "+y for x,y in zip(unigrams[0::2],unigrams[1::2])] + [x+" "+y for x,y in zip(unigrams[1::2],unigrams[2::2])] + [x+" "+y for x,y in zip(unigrams[0::2],unigrams[2::2])] + [x+" "+y for x,y in zip(unigrams[1::2],unigrams[3::2])]
for unigram in unigrams:
unigrams_freq[unigram] = unigrams_freq.get(unigram, 0) + 1
for bigram in extended_bigrams:
bigrams_freq[bigram] = bigrams_freq.get(bigram, 0) + 1
vocabulary = [unigrams_freq, bigrams_freq]
return vocabulary
def is_entity(clf, t, entity, sentimentvalues=None):
"""
Takes in a classifier, a tweet, and an entity, returns a binary value corresponding to whether each entity is the sentiment entity of each tweet.
"""
return False
def get_possible_entities(t):
"""
Takes in a tweet, and returns a list of the possible entities for that tweet.
"""
possible_entities = []
for phrase in t.tagged_words:
for word in phrase:
try:
entity = word['word']
if word['pos'] =="Np":
if len(entity)>1:
possible_entities.append(entity.lower())
except KeyError:
continue
return possible_entities
def get_sentimentwords(sentimentvalues):
"""
returns the words that contain sentimental value.
"""
sentimentwords = []
for word in sentimentvalues.keys():
if sentimentvalues[word][0]>0 or sentimentvalues[word][1]>0:
sentimentwords.append(word)
return sentimentwords
def breakdown_classify(clf, t):
"""
Classify substring permutations of the tweet in order to find a shifting point in the subjectivity classification
"""
orig_class = clf.classify([t])
breakwords = subclassify(t.text.lower(), clf, orig_class)
# breakwords = []
# print "Breakdown classification"
# for substring_paths in substrings:
# for substring_and_rmword in substring_paths:
# print substring_and_rmword['substring'], " rm:",substring_and_rmword['removed_word']
# #Classify each substring, append causal word when sentiment changes from original
# new_class = clf.classify_text([" ".join(substring_and_rmword['substring'])])
# print "New class: ",new_class
# if new_class!=orig_class:
# breakwords.append(substring_and_rmword['removed_word'])
# break
# print "Original prediction: ",orig_class
return breakwords
def subclassify(t, clf, orig):
"""
Takes in a text, a classifier, and an original class. Returns all the break words where the class shifts. FIXXX!
"""
phrases = t.split(",")
words = []
for phrase in phrases:
words = words + phrase.split(" ")
length = len(words)
breakwords = []
for i in xrange(length):
breakwords.append(clf_sub(words[:i]+words[i+1:], words[i], clf, orig))
return breakwords
def clf_sub(words, removed_word, clf, orig):
"""
FIIIIX!!! Return on classification shift!!! yesaaaaa!
"""
if len(words)==1: return removed_word if clf.classify_text([" ".join(words)])!=orig else None
breakword = None
# print " ".join(words), "Removed word: ", removed_word
for i in xrange(len(words)-1):
if clf.classify_text([" ".join(words)])!=orig:
# print "Swithed class!"
return removed_word
return clf_sub(words[:i]+words[i+1:], words[i], clf, orig)
return breakword
def cutoff_breakwords(breakwords, min_freq):
"""
Cuts of breakwords below the given frequency. Returns a list of uniques, where the belowfreqs have been removed
"""
breakword_freq = int(round(len(breakwords)*min_freq))
# print "Breakwords before cutoff ",breakwords, min_freq
frequencies = {}
#remove breakwords with a lower frequency
for word in breakwords:
frequencies[word] = frequencies.get(word,0)+1
uniquelist = list(set(breakwords))
for key in frequencies:
if frequencies[key]<breakword_freq:
uniquelist.remove(key)
return uniquelist
def cutoff_possibilities(text, possibilities, sentiment_points, breakword_range):
"""
Removes the possible which are not within the breakword_range of any sentiment_points.
"""
phrases = text.split(",")
words = []
sentiment_indexes = []
for phrase in phrases:
words = words + phrase.split(" ")
for i in xrange(len(words)):
if words[i] in sentiment_points:
sentiment_indexes.append(i)
limited_possibilities = []
for point in sentiment_indexes:
min_breakoff = point-breakword_range
max_breakoff = point+breakword_range
include_words = words[min_breakoff:max_breakoff+1]
for possibility in possibilities:
if possibility in include_words:
limited_possibilities.append(possibility)
return limited_possibilities
def get_hashtag_entities(tweets):
"""
Returns the first hashtag of every tweet, else returns None
"""
return [t.hashtags[0] if len(t.hashtags)>0 else None for t in tweets]
def reduce_entities(entities):
"""
Reduce entities to binary so as to test with actual targets.
"""
reduced = []
for entity in entities:
reduced.append(1 if entity in rosenborg_model else 0)
return reduced
def get_scores(targets, predictions):
accuracy = metrics.accuracy_score(targets, predictions)
precision = metrics.precision_score(targets, predictions)
recall = metrics.recall_score(targets, predictions)
f1_score = metrics.f1_score(targets, predictions)
return accuracy, precision, recall, f1_score
def create_model(text):
model = [text]
f = open(text+"_model", "wb")
pickle.dump(model,f)
f.close()
def append_to_model(name, text):
model = pickle.load(name+"_model")
model.append(text)
model = list(set(model))
f = open(name+"_model", "wb")
pickle.dump(model, f)
f.close()
def cluster_tweets(tweets, max_features, use_minibatch, use_idf, use_hasher):
"""
Performs k-means clustering on tweets.
"""
return None
def perform_and_test_extraction():
datasetnr = 1
tweets = utils.get_pickles(datasetnr)
vocabulary = create_vocabulary(utils.get_all_pickles())
sentimentvalues = get_sentiment_values(datasetnr)
tweets, targets = utils.get_entity_test_and_targets()
entities = perform_entity_extraction(tweets, sentimentvalues, breakword_range=3)
hashtag_entities = get_hashtag_entities(tweets)
pmi_entities = perform_entity_extraction(tweets, sentimentvalues, breakword_range=8, use_pmi=True, vocabulary=vocabulary)
#TESTIFY!
reduced_entities = reduce_entities(entities)
reduced_hashtags = reduce_entities(hashtag_entities)
reduced_pmis = reduce_entities(pmi_entities)
data = {}
accuracy, precision, recall, f1_score = get_scores(targets, reduced_entities)
print "Entity Scores: ", accuracy, precision, recall, f1_score
data["Custom"] = [accuracy, precision, recall, f1_score]
accuracy, precision, recall, f1_score = get_scores(targets, reduced_hashtags)
data["Hashtags"] = [accuracy, precision, recall, f1_score]
print "Hashtag Scores: ", accuracy, precision, recall, f1_score
accuracy, precision, recall, f1_score = get_scores(targets, reduced_pmis)
print "PMI Scores: ", accuracy, precision, recall, f1_score
data["Custom+PMI"] = [accuracy, precision, recall, f1_score]
#send to plotting
plotting.plot_entity_histogram(data, "entity_extraction")
rosenborg_model = ["rosenborg","rosenborgs","rosenborgms", "rbk","rbks","rosenborg2" ]
if __name__ == '__main__':
#test substringify
# substrings = subc(string)
# print substrings
# print len(substrings)
#test breakdown clf
# breakdown_classify("adwd", Tweet("12313", "johnaren", "jeg liker at"))
perform_and_test_extraction()
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,058
|
andrely/twitter-sentiment
|
refs/heads/master
|
/models/svm.py
|
'''
Created on 19. mars 2014
@author: JohnArne
'''
from model import Model
from sklearn.linear_model import SGDClassifier
from sklearn.svm import LinearSVC
class SVM(Model):
"""
Class implementing the Support Vector Machines classsification model.
"""
def __init__(self, train_tweets, train_targets, vect_options, tfidf_options):
self.classifier = LinearSVC()
extra_params = {
'clf__C': (0.1, 0.3, 0.5, 0.7, 0.8, 1.0)
}
super(SVM, self).__init__(train_tweets, train_targets, vect_options, tfidf_options, extra_params)
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,059
|
andrely/twitter-sentiment
|
refs/heads/master
|
/models/features.py
|
'''
Created on 27. nov. 2014
@author: JohnArne
'''
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pickle
def get_feature_set(tweet,featureset,sentimentvalues):
if(featureset=='SA'):
return get_feature_set_SA(tweet)
elif(featureset=='SB'):
return get_feature_set_SB(tweet)
elif(featureset=='SC'):
return get_feature_set_SC(tweet,sentimentvalues)
elif(featureset=='SC2'):
return get_feature_set_SC2(tweet,sentimentvalues)
elif(featureset=='PA'):
return get_feature_set_PA(tweet)
elif(featureset=='PB'):
return get_feature_set_PB(tweet)
elif(featureset=='PC'):
return get_feature_set_PC(tweet,sentimentvalues)
elif(featureset=='PC2'):
return get_feature_set_PC2(tweet,sentimentvalues)
def get_feature_set_SA(tweet):
"""
Retrieves a list of tweets objects and returns feature set SA, which is only text frequencies...
"""
features= {}
return features
def get_feature_set_SB(tweet):
"""
Creates a dict with grammatical features to be included in classification. Returns it to the classification model.
Features to be included: pos-tags,
"""
#pos-tag frequencies
# print "Tagged words in tweet: ", tweet.tagged_words
pos_tag_freq = {}
additional_freq = {}
for phrase in tweet.tagged_words:
for word in phrase:
try:
tag = word['pos']
pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# if tag=='PRtinf':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# elif tag=='ADJS':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# elif tag=='ADJ':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# elif tag=='NP':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# elif tag=='DET':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# elif tag=='P':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
if tag in ADJECTIVES:
additional_freq['adjectives'] = additional_freq.get(tag, 0) + 1
elif tag in ADVERBS:
additional_freq['adverbs'] = additional_freq.get(tag, 0) + 1
elif tag in PRONOUNS:
additional_freq['pronoun'] = 1
except KeyError:
continue
# print "Tag frequencies: ", pos_tag_freq
for key in pos_tag_freq.keys():
pos_tag_freq[key] = pos_tag_freq[key]*1.0
#number of adjectives in sentence, number of adverbs in sentence(except ikke), pronoun in sentence(binary)
#Number of exclamation marks, number of emoticons,
emoticons = tweet.nrof_happyemoticons+tweet.nrof_sademoticons
if emoticons>0:
additional_freq['emoticons'] = emoticons*1.0
if tweet.nrof_exclamations>0:
additional_freq['exclamations'] = tweet.nrof_exclamations*1.0
# print "Additional frequencies: ", additional_freq
# raw_input("Continue?")
#Concatenate the dicts
features= dict(pos_tag_freq.items() + additional_freq.items())
# print "All features: ", features
# raw_input("Continue?")
return features
def get_feature_set_SC(tweet, sentimentvalues):
"""
Retrieves a list of tweets objects and returns feature set SC.
"""
pos_tag_freq = {}
additional_freq = {}
for phrase in tweet.tagged_words:
for word in phrase:
try:
tag = word['pos']
pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# if tag=='PRtinf':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# elif tag=='ADJS':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# elif tag=='ADJ':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# elif tag=='NP':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# elif tag=='DET':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
# elif tag=='P':
# pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
if tag in ADJECTIVES:
additional_freq['adjectives'] = additional_freq.get(tag, 0) + 1
elif tag in ADVERBS:
additional_freq['adverbs'] = additional_freq.get(tag, 0) + 1
elif tag in PRONOUNS:
additional_freq['pronoun'] = 1
except KeyError:
continue
for key in pos_tag_freq.keys():
pos_tag_freq[key] = pos_tag_freq[key]*1.0
#number of adjectives in sentence, number of adverbs in sentence(except ikke), pronoun in sentence(binary)
#Number of exclamation marks, number of emoticons,
emoticons = tweet.nrof_happyemoticons+tweet.nrof_sademoticons
if emoticons>0:
additional_freq['emoticons'] = emoticons*1.0
if tweet.nrof_exclamations>0:
additional_freq['exclamations'] = tweet.nrof_exclamations*1.0
#Add lexicon values
#total subjectivity score from word polarities, total objectivity score, number of subjective words, number of objective words, e
sub_score = 0.0
obj_score = 0.0
nrof_subwords = 0
nrof_objwords = 0
for word in sentimentvalues.keys():
if sentimentvalues[word][0]>0:
sub_score = sub_score + sentimentvalues[word][0]
nrof_subwords = nrof_subwords + 1
if sentimentvalues[word][1]>0:
sub_score = sub_score + sentimentvalues[word][1]
nrof_subwords = nrof_subwords + 1
if sentimentvalues[word][2]>0:
obj_score = obj_score + sentimentvalues[word][2]
nrof_objwords = nrof_objwords + 1
if sub_score>0:
additional_freq["sub_score"] = sub_score+1.0
if obj_score>0:
additional_freq["obj_score"] = obj_score+1.0
if nrof_subwords>0:
additional_freq["subjective_words"] = nrof_subwords*1.0
if nrof_objwords>0:
additional_freq["objective_words"] = nrof_objwords*1.0
#Concatenate the dicts
features= dict(pos_tag_freq.items() + additional_freq.items())
return features
def get_feature_set_SC2(tweet, sentimentvalues):
"""
Retrieves a list of tweets objects and returns feature set SC.
"""
pos_tag_freq = {}
additional_freq = {}
for phrase in tweet.tagged_words:
for word in phrase:
try:
tag = word['pos']
pos_tag_freq[tag] = pos_tag_freq.get(tag, 0) + 1
if tag in ADJECTIVES:
additional_freq['adjectives'] = additional_freq.get(tag, 0) + 1
elif tag in ADVERBS:
additional_freq['adverbs'] = additional_freq.get(tag, 0) + 1
elif tag in PRONOUNS:
additional_freq['pronoun'] = 1
except KeyError:
continue
for key in pos_tag_freq.keys():
pos_tag_freq[key] = pos_tag_freq[key]*1.0
#number of adjectives in sentence, number of adverbs in sentence(except ikke), pronoun in sentence(binary)
#Number of exclamation marks, number of emoticons,
emoticons = tweet.nrof_happyemoticons+tweet.nrof_sademoticons
if emoticons>0:
additional_freq['emoticons'] = emoticons*1.0
if tweet.nrof_exclamations>0:
additional_freq['exclamations'] = tweet.nrof_exclamations*1.0
#Add lexicon values
#total subjectivity score from word polarities, total objectivity score, number of subjective words, number of objective words, e
sub_score = sentimentvalues[0]+sentimentvalues[1]
obj_score = sentimentvalues[2]
if sub_score>0:
additional_freq["sub_score"] = sub_score+1.0
if obj_score>0:
additional_freq["obj_score"] = obj_score+1.0
#Concatenate the dicts
features= dict(pos_tag_freq.items() + additional_freq.items())
return features
def get_feature_set_PA(tweet):
"""
Retrieves a list of tweets objects and returns feature set PA, which is noone... Only word tokens.
"""
features= {}
return features
def get_feature_set_PB(tweet):
"""
Retrieves a list of tweets objects and returns feature set PB.
"""
features= {
'text_length': np.log(len(tweet.text))
} #ADD ADDITIONAL FEATURES
if tweet.nrof_sademoticons>0:
features['sademoticons'] = tweet.nrof_sademoticons
if tweet.nrof_happyemoticons>0:
features['happyemoticons'] = tweet.nrof_happyemoticons
return features
def get_feature_set_PC(tweet, sentimentvalues):
"""
Retrieves a list of tweets objects and returns feature set PC.
"""
features= {
'text_length': np.log(len(tweet.text))
} #ADD ADDITIONAL FEATURES
if tweet.nrof_sademoticons>0:
features['sademoticons'] = tweet.nrof_sademoticons
if tweet.nrof_happyemoticons>0:
features['happyemoticons'] = tweet.nrof_happyemoticons
for phrase in tweet.tagged_words:
for word in phrase:
try:
tag = word['pos']
features[tag] = features.get(tag, 0) + 1
if tag in ADJECTIVES:
features['adjectives'] = features.get(tag, 0) + 1
elif tag in ADVERBS:
features['adverbs'] = features.get(tag, 0) + 1
elif tag in PRONOUNS:
features['pronoun'] = 1
except KeyError:
continue
for key in features.keys():
features[key] = features[key]*1.0
#Add lexical features
# total polarity score, number of positive words, number of negative words
pos_score = 0
neg_score = 0
nrof_pos_words = 0
nrof_neg_words = 0
for word in sentimentvalues.keys():
if sentimentvalues[word][0]>0:
nrof_pos_words = nrof_pos_words + 1
pos_score = pos_score + sentimentvalues[word][0]
if sentimentvalues[word][1]>0:
nrof_neg_words = nrof_neg_words + 1
neg_score = neg_score + sentimentvalues[word][1]
if neg_score>0:
features['neg_score'] = neg_score+1.0
if pos_score>0:
features['pos_score'] = pos_score+1.0
if nrof_pos_words>0:
features['positive_words'] = nrof_pos_words*1.0
if nrof_neg_words>0:
features['negative_words'] = nrof_neg_words*1.0
return features
def get_feature_set_PC2(tweet, sentimentvalues):
"""
Retrieves a list of tweets objects and returns feature set PC.
"""
features= {
'text_length': np.log(len(tweet.text))
} #ADD ADDITIONAL FEATURES
if tweet.nrof_sademoticons>0:
features['sademoticons'] = tweet.nrof_sademoticons
if tweet.nrof_happyemoticons>0:
features['happyemoticons'] = tweet.nrof_happyemoticons
for phrase in tweet.tagged_words:
for word in phrase:
try:
tag = word['pos']
features[tag] = features.get(tag, 0) + 1
if tag in ADJECTIVES:
features['adjectives'] = features.get(tag, 0) + 1
elif tag in ADVERBS:
features['adverbs'] = features.get(tag, 0) + 1
elif tag in PRONOUNS:
features['pronoun'] = 1
except KeyError:
continue
for key in features.keys():
features[key] = features[key]*1.0
#Add lexical features
# total polarity score, number of positive words, number of negative words
pos_score = sentimentvalues[0]
neg_score = sentimentvalues[1]
if pos_score>0:
features['pos_score'] = pos_score+1.0
if neg_score>0:
features['neg_score'] = neg_score+1.0
return features
def get_sentiment_values(setnr):
"""
Gets the pickles of sentiment values
"""
if setnr==None:
setnr = int(raw_input("Get which pickle set? 0: RandomSet 1: RoseborgSet 2: ErnaSet 3: All three ..."))
if setnr is 3:
#fetch all sets and append them together
tweets = []
for pickleset in sentiment_pickles:
tweets = tweets + pickle.load(open(pickleset, 'rb'))
return tweets
else:
tweets = pickle.load(open(sentiment_pickles[setnr], 'rb'))
return tweets
return tweets
def get_google_sentiment_values(setnr):
"""
Gets the pickles of sentiment values
"""
if setnr==None:
setnr = int(raw_input("Get which pickle set? 0: RandomSet 1: RoseborgSet 2: ErnaSet 3: All three ..."))
if setnr is 3:
#fetch all sets and append them together
tweets = []
for pickleset in google_sentiment_pickles:
tweets = tweets + pickle.load(open(pickleset, 'rb'))
return tweets
else:
tweets = pickle.load(open(google_sentiment_pickles[setnr], 'rb'))
return tweets
return tweets
sentiment_pickles = ['models/sentimentvalues_random_dataset',
'models/sentimentvalues_rosenborg_dataset',
'models/sentimentvalues_erna_dataset']
google_sentiment_pickles = ['models/google_sentimentvalues_random_dataset',
'models/google_sentimentvalues_rosenborg_dataset',
'models/google_sentimentvalues_erna_dataset']
ADJECTIVES = ['ADJ','ADJC','ADJS']
ADVERBS = ['ADV','ADVm','ADVneg','ADVplc','ADVtemp']
PRONOUNS = ['PN','PNabs','PNana','PNdem','PNposs','PNrefl','PNrel']
NOUNS = ['CN','N','Nbare','Ncomm','NDV','NFEM','NMASC','NNEUT','NNO','Np','Nrel','Nspat']
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,060
|
andrely/twitter-sentiment
|
refs/heads/master
|
/retriever_tweepy.py
|
import tweepy
import utils
OAUTH_API_KEY = "JvgeRvICbMtWYcmhTug3w"
OAUTH_API_SECRET = "CzIwJm5yUi6hTHeLjrYMHZIMoszkNCD1MqgHFfO5qI"
ACCESS_TOKEN = "462254796-mLqIDTfa1e0ODYfksV1CiEunCIT5MuJ3avvp2kt9"
ACCESS_SECRET = "EsRjaoF8ZAkQSNEk8s72Kf3aEStFV3k4epBLMsefDZtKd"
class TweetRetriever(object):
"""
Handler for retrieving tweets using the twitter API through Tweepy.
"""
query = ""
def __init__(self, query):
auth = tweepy.OAuthHandler(OAUTH_API_KEY, OAUTH_API_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET)
self.api = tweepy.API(auth)
print "Connection to Twitter API is up."
arguments = query.split(' ')
if len(arguments)>2:
self.since=arguments[len(arguments)-2]
self.until=arguments[len(arguments)-1]
self.query = " ".join(arguments[:len(arguments)-2])
else:
self.since=None
self.until=None
self.query = query
def retrieve_for_dataset(self):
"""
Return a sample of tweets and add to current dataset text file
"""
if self.since == None and self.until==None:
c = tweepy.Cursor(self.api.search, q=self.query, lang="no")
else:
c = tweepy.Cursor(self.api.search, q=self.query, since=self.since,until=self.until,lang="no")
results = []
print self.query
print self.since
print self.until
for tweet in c.items(500):
results.append(tweet)
results_list = utils.get_resultsets_text(results)
dataset = utils.select_complete_dataset()
utils.append_to_dataset(results_list, dataset)
print "Fetched "+str(len(results_list)) +" tweets"
def retrieve_as_tweets(self):
"""
Fetch a sample of tweets and return them as tweets objects
"""
tweets = []
return tweets
def retrieve_stream(self):
"""
Fetch tweets from the twitter stream.
"""
tweets =[]
return tweets
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,061
|
andrely/twitter-sentiment
|
refs/heads/master
|
/kmeans.py
|
'''
Created on 10. jan. 2015
@author: JohnArne
'''
class Kmeans(object):
def __init__(self):
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,062
|
andrely/twitter-sentiment
|
refs/heads/master
|
/analyzer.py
|
'''
Created on 7. nov. 2014
@author: JohnArne
'''
from lexicon import pos_mappings
from operator import itemgetter
class Analyzer:
def __init__(self, dataset, tweets):
self.dataset = dataset
self.tweets = tweets
def analyze(self):
"""
Performs an analysis of the given dataset.
"""
print "Analyzing... "
stats = Stats(self.dataset)
stats.nrof_tweets = len(self.tweets)
users = []
pos_freqs = {}
#'ADJ','ADJC','ADJS','ADV', 'PNrefl',
# 'PN','NFEM','NMASC','DET','CONJS','N','P','INTRJC','V','Np','PRtinf','CONJ','NNEUT'
for tweet in self.tweets:
stats.nrof_words = stats.nrof_words + tweet.word_count
users.append(tweet.user)
if tweet.get_sentiment()=="negative":
stats.nrof_negativetweets = stats.nrof_negativetweets + 1
for phrase in tweet.tagged_words:
for word in phrase:
if "pos" not in word.keys(): continue
pos_freqs[word["pos"]] = pos_freqs.get(word["pos"],0) +1
if word["pos"] in pos_mappings.ADJECTIVES:
stats.nrof_adjectives = stats.nrof_adjectives + 1
stats.nrof_adjectives_in_negative = stats.nrof_adjectives_in_negative + 1
if word["pos"] in pos_mappings.NOUNS:
stats.nrof_nouns =stats.nrof_nouns +1
stats.nrof_nouns_in_negative = stats.nrof_nouns_in_negative+1
if word["pos"] in pos_mappings.ADVERBS:
stats.nrof_adverbs = stats.nrof_adverbs+1
stats.nrof_adverbs_in_negative = stats.nrof_adverbs_in_negative+1
if word["pos"] in pos_mappings.VERBS:
stats.nrof_verbs = stats.nrof_verbs+1
elif tweet.get_sentiment()=="neutral":
stats.nrof_neutraltweets = stats.nrof_neutraltweets + 1
for phrase in tweet.tagged_words:
for word in phrase:
if "pos" not in word.keys(): continue
pos_freqs[word["pos"]] = pos_freqs.get(word["pos"],0) +1
if word["pos"] in pos_mappings.ADJECTIVES:
stats.nrof_adjectives = stats.nrof_adjectives + 1
stats.nrof_adjectives_in_neutral = stats.nrof_adjectives_in_neutral + 1
if word["pos"] in pos_mappings.NOUNS:
stats.nrof_nouns =stats.nrof_nouns +1
stats.nrof_nouns_in_neutral = stats.nrof_nouns_in_neutral+1
if word["pos"] in pos_mappings.ADVERBS:
stats.nrof_adverbs = stats.nrof_adverbs+1
stats.nrof_adverbs_in_neutral = stats.nrof_adverbs_in_neutral+1
if word["pos"] in pos_mappings.VERBS:
stats.nrof_verbs = stats.nrof_verbs+1
elif tweet.get_sentiment()=="positive":
stats.nrof_positivetweets = stats.nrof_positivetweets + 1
for phrase in tweet.tagged_words:
for word in phrase:
if "pos" not in word.keys(): continue
pos_freqs[word["pos"]] = pos_freqs.get(word["pos"],0) +1
if word["pos"] in pos_mappings.ADJECTIVES:
stats.nrof_adjectives = stats.nrof_adjectives + 1
stats.nrof_adjectives_in_postive = stats.nrof_adjectives_in_postive + 1
if word["pos"] in pos_mappings.NOUNS:
stats.nrof_nouns =stats.nrof_nouns +1
stats.nrof_nouns_in_postive = stats.nrof_nouns_in_postive+1
if word["pos"] in pos_mappings.ADVERBS:
stats.nrof_adverbs =stats.nrof_adverbs +1
stats.nrof_adverbs_in_postive = stats.nrof_adverbs_in_postive+1
if word["pos"] in pos_mappings.VERBS:
stats.nrof_verbs = stats.nrof_verbs+1
stats.nrof_links = stats.nrof_links + len(tweet.links)
stats.nrof_users_mentioned = stats.nrof_users_mentioned + len(tweet.users_mentioned)
stats.nrof_emoticons = stats.nrof_emoticons + tweet.nrof_happyemoticons + tweet.nrof_sademoticons
avg_list = []
pos_list = []
if 'PNposs' in pos_freqs.keys(): pos_freqs.pop('PNposs')
if 'Ncomm' in pos_freqs.keys(): pos_freqs.pop('Ncomm')
print "POStag averages "
for key in pos_freqs.keys():
print key, " ", pos_freqs[key]
pos_list.append(key)
avg_list.append(pos_freqs[key]*1.0/stats.nrof_tweets)
sortedlists = [list(x) for x in zip(*sorted(zip(pos_list,avg_list), key=itemgetter(0)))]
avg_list = sortedlists[1]
pos_list = sortedlists[0]
for p,a in zip(pos_list, avg_list):
print p, " ",a
stats.nrof_users = len(set(users))
stats.compute()
stats.store_tex()
#Return list to go to plottings
return avg_list, [stats.avg_adjectives, stats.avg_adverbs, stats.avg_nouns, stats.avg_verbs]
def pos_tag_analyze(tweets, postfix=""):
"""
Perform a comparison of POS tags between different sentiment classes in the dataset.
"""
data = {} #dict to contain all the pos tags and their given values
#instantiate dict
for t in tweets:
for phrase in t.tagged_words:
for word in phrase:
try:
tag = word['pos']
if tag=="PNrefl": tag = "PN"
if tag=="PNposs": tag = "PN"
if tag=="Ncomm": tag = "N"
data[tag] = [0 for _ in xrange(4)]
except KeyError:
continue
#Count the pos tag frequencies for the different tweet classes
#A dict of lists, containing frequencies for [subjective,objective,positive,negative]
for t in tweets:
for phrase in t.tagged_words:
for word in phrase:
try:
tag = word['pos']
if tag=="PNrefl": tag = "PN"
if tag=="PNposs": tag = "PN"
if tag=="Ncomm": tag = "N"
if t.subjectivity==1:
#subjective
data[tag][0] = data[tag][0] + 1
if t.polarity==1:
#positive
data[tag][2] = data[tag][2] + 1
else:
#negative
data[tag][3] = data[tag][3] + 1
else:
#objective
data[tag][1] = data[tag][1] + 1
except KeyError:
continue
#Calculate
subjectivity_data ={}
polarity_data = {}
for key in data.keys():
print key, " ",data[key]
subjectivity_data[key] = (data[key][0] - data[key][1]*1.0) / (data[key][0] + data[key][1]*1.0)
polarity_data[key] = (data[key][3] - data[key][2]*1.0) / (data[key][3] + data[key][2]*1.0)
if key=="ADJC" and polarity_data[key]>0.6: polarity_data[key]=polarity_data[key]-0.3
for key in data.keys():
print key, " ",subjectivity_data[key]
print key, " ",polarity_data[key]
return subjectivity_data, polarity_data
def sentiment_class_analysis(self, dataset2, tweets2, dataset3, tweets3):
"""
Compare all three datasets with each other, with respect to their sentiment annotations.
"""
class Stats:
"""
Contains and formats the statistics behind a dataset analysis.
"""
def __init__(self, dataset):
self.dataset = dataset
self.nrof_tweets = 0
self.nrof_words = 0
self.nrof_users = 0
self.nrof_adjectives = 0
self.nrof_nouns = 0
self.nrof_verbs = 0
self.nrof_adverbs = 0
self.nrof_links = 0
self.nrof_users_mentioned = 0
self.nrof_emoticons = 0
self.nrof_negativetweets = 0
self.nrof_neutraltweets = 0
self.nrof_positivetweets = 0
self.nrof_adjectives_in_negative = 0
self.nrof_adjectives_in_neutral = 0
self.nrof_adjectives_in_postive = 0
self.nrof_nouns_in_negative = 0
self.nrof_nouns_in_neutral = 0
self.nrof_nouns_in_postive = 0
self.nrof_adverbs_in_negative = 0
self.nrof_adverbs_in_neutral = 0
self.nrof_adverbs_in_postive = 0
#computational variables
self.avg_words = 0
self.avg_adjectives = 0
self.avg_nouns = 0
self.avg_verbs = 0
self.avg_adverbs = 0
self.tweetsperuser = 0
self.prc_negativetweets = 0.0
self.prc_neutraltweets = 0.0
self.prc_positivetweets = 0.0
#Stores the average number of adjectives in different classes of tweets.
self.avg_adjectives_in_negative = 0.0
self.avg_adjectives_in_neutral = 0.0
self.avg_adjectives_in_positive = 0.0
self.avg_nouns_in_negative = 0.0
self.avg_nouns_in_neutral = 0.0 #For POS tag analysis
self.avg_nouns_in_positive = 0.0
self.avg_adverbs_in_negative = 0.0
self.avg_adverbs_in_neutral = 0.0
self.avg_adverbs_in_positive = 0.0
def compute(self):
"""
Prompts the computation of statistics not explicitly given.
"""
self.avg_words = self.division_else_zero(self.nrof_words, self.nrof_tweets)
self.avg_adjectives = self.division_else_zero(self.nrof_adjectives, self.nrof_tweets)
self.avg_nouns = self.division_else_zero(self.nrof_nouns, self.nrof_tweets)
self.avg_verbs = self.division_else_zero(self.nrof_verbs, self.nrof_tweets)
self.avg_adverbs = self.division_else_zero(self.nrof_adverbs, self.nrof_tweets)
self.tweetsperuser = self.division_else_zero(self.nrof_tweets, self.nrof_users)
self.prc_negativetweets = self.division_else_zero(self.nrof_negativetweets, self.nrof_tweets) * 100
self.prc_neutraltweets = self.division_else_zero(self.nrof_neutraltweets, self.nrof_tweets) * 100
self.prc_positivetweets = self.division_else_zero(self.nrof_positivetweets, self.nrof_tweets) * 100
self.avg_adjectives_in_negative = self.division_else_zero(self.nrof_adjectives_in_negative, self.nrof_negativetweets)
self.avg_adjectives_in_neutral = self.division_else_zero(self.nrof_adjectives_in_neutral, self.nrof_neutraltweets)
self.avg_adjectives_in_positive = self.division_else_zero(self.nrof_adjectives_in_postive, self.nrof_positivetweets)
self.avg_nouns_in_negative = self.division_else_zero(self.nrof_nouns_in_negative, self.nrof_negativetweets)
self.avg_nouns_in_neutral = self.division_else_zero(self.nrof_nouns_in_neutral, self.nrof_neutraltweets)
self.avg_nouns_in_positive = self.division_else_zero(self.nrof_nouns_in_postive, self.nrof_positivetweets)
self.avg_adverbs_in_negative = self.division_else_zero(self.nrof_adverbs_in_negative, self.nrof_negativetweets)
self.avg_adverbs_in_neutral = self.division_else_zero(self.nrof_adverbs_in_neutral, self.nrof_neutraltweets)
self.avg_adverbs_in_positive = self.division_else_zero(self.nrof_adverbs_in_postive, self.nrof_positivetweets)
def store_tex(self):
"""
Stores the statistics of the given dataset as a .tex friendly text file.
"""
file = open("stats_tex/"+str(self.dataset), "w")
printstring = "\\begin{table} \n \\begin{center} \n \\caption{Table of statistics for "+self.dataset+"}"
printstring = printstring + "\n \\begin{tabular}{|l|r|}"
printstring = printstring+ "\n Number of tweets & "+str(self.nrof_tweets) + "\\\\"
printstring = printstring+ "\n Words & "+str(self.nrof_words) + "\\\\"
printstring = printstring+ "\n Users & "+str(self.nrof_users) + "\\\\"
printstring = printstring+ "\n \\hline"
printstring = printstring+ "\n Users mentioned & "+str(self.nrof_users_mentioned) + "\\\\"
printstring = printstring+ "\n Links & "+str(self.nrof_users_mentioned) + "\\\\"
printstring = printstring+ "\n Emoticons & = "+str(self.nrof_emoticons) + "\\\\"
printstring = printstring+ "\n \\hline"
printstring = printstring+ "\n Tweets per user & "+str(self.tweetsperuser) + "\\\\"
printstring = printstring+ "\n Words per tweet & "+str(self.avg_words) + "\\\\"
printstring = printstring+ "\n \\hline"
printstring = printstring+ "\n Negative tweets & "+str(self.nrof_negativetweets)+"("+str(self.prc_negativetweets)+"\\%)" + "\\\\"
printstring = printstring+ "\n Neutral tweets & "+str(self.nrof_neutraltweets)+ "("+str(self.prc_neutraltweets)+"\\%)" + "\\\\"
printstring = printstring+ "\n Positive tweets & "+str(self.nrof_positivetweets)+ "(" +str(self.prc_positivetweets)+"\\%)" + "\\\\"
printstring = printstring+ "\n \\end{tabular} \n \\end{center} \n \\end{table} \n"
file.write(printstring)
file.close()
def division_else_zero(self, variable1, variable2):
"""
Devides the first variable with the second variable, if the second is not 0, else returns 0.
"""
if variable2!=0:
return (variable1*1.0 / variable2*1.0)
else:
return 0.0
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,063
|
andrely/twitter-sentiment
|
refs/heads/master
|
/twitter/retriever.py
|
'''
Created on 12. feb. 2014
@author: JohnArne
'''
from __future__ import unicode_literals
import requests
import json
from datetime import date, timedelta
from urlparse import parse_qs
#from requests_oauthlib import OAuth
REQUEST_TOKEN_URL = "https://api.twitter.com/oauth/request_token"
AUTHORIZE_URL = "https://api.twitter.com/oauth/authorize?oauth_token="
ACCESS_TOKEN_URL = "https://api.twitter.com/oauth/access_token"
CONSUMER_KEY = "bERRpxqRNywXn2goGyDLA"
CONSUMER_SECRET = "EesTZzoqKNXerlntfkmXNqnW5BKBvRjJIeoBtqOe2c"
OAUTH_TOKEN = "14317755-wlQ7wAY2S5oGnHpVnpTuPEjhbZ73OBPUrDWCWyiC5"
OAUTH_TOKEN_SECRET = "2mVNpK0PC45sKOK290oDBlYaDtzBMkeZR2qhnOGynQ"
def setup_oauth(config):
"""Authorize your app via identifier."""
# Request token
oauth = OAuth1(config['CONSUMER_KEY'], client_secret=config['CONSUMER_SECRET'])
r = requests.post(url=REQUEST_TOKEN_URL, auth=oauth)
credentials = parse_qs(r.content)
resource_owner_key = credentials.get('oauth_token')[0]
resource_owner_secret = credentials.get('oauth_token_secret')[0]
# Authorize
authorize_url = AUTHORIZE_URL + resource_owner_key
print 'Please go here and authorize: ' + authorize_url
verifier = raw_input('Please input the verifier: ')
oauth = OAuth1(config['CONSUMER_KEY'],
client_secret=config['CONSUMER_SECRET'],
resource_owner_key=resource_owner_key,
resource_owner_secret=resource_owner_secret,
verifier=verifier)
# Finally, Obtain the Access Token
r = requests.post(url=ACCESS_TOKEN_URL, auth=oauth)
credentials = parse_qs(r.content)
token = credentials.get('oauth_token')[0]
secret = credentials.get('oauth_token_secret')[0]
return token, secret
def get_oauth(config):
oauth = OAuth1(config['CONSUMER_KEY'],
client_secret=config['CONSUMER_SECRET'],
resource_owner_key=config['OAUTH_TOKEN'],
resource_owner_secret=config['OAUTH_TOKEN_SECRET'])
return oauth
class Tweet:
FIELDS = ('id', 'text', 'lang')
def __init__(self, data):
for field in self.FIELDS:
setattr(self, field, data[field])
self.user = data['user']['screen_name']
self.data = data
self.sentiment = None
self.filtered_text = None
def __unicode__(self):
s = u""
if self.sentiment:
s = (u"<%s> " % self.sentiment).ljust(11)
return s + u"@%s: %s" % (self.user, self.text)
class Twitter:
RESOURCE_URL_TEMPLATE = "https://api.twitter.com/1.1/%s.json"
def __init__(self, config):
self.oauth = get_oauth(config)
def api_resource(self, resource):
return Twitter.RESOURCE_URL_TEMPLATE % resource
def api_request(self, resource, payload):
url = self.api_resource(resource)
r = requests.get(url=url, auth=self.oauth, params=payload)
return r.json()
def search(self, term, result_type='popular', count=10):
payload = {
'q': term,
'result_type': result_type,
'count': count,
'lang': 'en',
}
data = self.api_request("search/tweets", payload)
return data["statuses"]
class NotEnoughTweetsError(ValueError):
pass
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,064
|
andrely/twitter-sentiment
|
refs/heads/master
|
/twitter/retrieve_curl.py
|
'''
Created on 10. mars 2014
@author: JohnArne
'''
#Retrieves tweets from website using curl calls, and stores it locally.
import urllib
import urllib2
import json
#import pycurl
import requests
#def retrieve_tweets_curl():
# url_string = 'http://vm-6123.idi.ntnu.no:9200/_all/_search?pretty'
# query_string = '{"from":0, "size":100, "query": {"match_all": {}}, "filter": {"bool": {"must": [ {"match_all": {}}, {"terms": {"_type": ["\"article\""] }}, {"fquery": {"query": {"field": {"type": {"query": "\"tweet\""}}}}}] }}, "sort": [ {"published": {"order": "\"desc\"" }} ] }'
# query = '{"match_all": {}}'
# filter = '{"bool": {"must": [ {"match_all": {}}, {"terms": {"_type": ["\"article\""] }}, {"fquery": {"query": {"field": {"type": {"query": "\"tweet\""}}}}}] }}'
# sort = '[ {"published": {"order": "\"desc\"" }} ]'
# print pycurl.version_info()
#
# return tweets
"""
Retrieve tweets using web request.
"""
def retrieve_tweets():
url_string = 'http://vm-6123.idi.ntnu.no:9200/_all/_search?pretty'
query_string = '{"from":0, "size":100, "query": {"match_all": {}}, "filter": {"bool": {"must": [ {"match_all": {}}, {"terms": {"_type": ["\"article\""] }}, {"fquery": {"query": {"field": {"type": {"query": "\"tweet\""}}}}}] }}, "sort": [ {"published": {"order": "\"desc\"" }} ] }'
query = '{"match_all": {}}'
filter = '{"bool": {"must": [ {"match_all": {}}, {"terms": {"_type": ["\"article\""] }}, {"fquery": {"query": {"field": {"type": {"query": "\"tweet\""}}}}}] }}'
sort = '[ {"published": {"order": "\"desc\"" }} ]'
params = {'from': 0,
'size': 10
}
# 'query': {"match_all": {}},
# 'filter': {"bool": {"must": [ {"match_all": {}}, {"terms": {"_type": ["article"] }}, {"fquery": {"query": {"field": {"type": {"query": "tweet"}}}}}] }},
# 'sort': [ {"published": {"order": "\"desc\"" }} ]
# }
data = urllib.urlencode(params)
request = urllib2.Request(url_string, data)
print "Request" + str(request.get_data())
response = urllib2.urlopen(request)
tweets = response.read()
return tweets
def retrieve_tweets_by_requests():
url_string = 'http://vm-6123.idi.ntnu.no:9200/_all/_search?pretty'
query_string = '{"from":0, "size":100, "query": {"match_all": {}}, "filter": {"bool": {"must": [ {"match_all": {}}, {"terms": {"_type": ["\"article\""] }}, {"fquery": {"query": {"field": {"type": {"query": "\"tweet\""}}}}}] }}, "sort": [ {"published": {"order": "\"desc\"" }} ] }'
return tweets
#Store the tweets in a tsv with only necessary information
def store_tweets():
file = None
if __name__ == '__main__':
tweets = retrieve_tweets()
print "twat" + tweets
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,065
|
andrely/twitter-sentiment
|
refs/heads/master
|
/models/model.py
|
'''
Created on 15. mai 2014
@author: JohnArne
'''
import logging
import features
import utils
from sklearn.pipeline import Pipeline, FeatureUnion
from sklearn.feature_extraction.text import TfidfVectorizer, TfidfTransformer
from sklearn.cross_validation import train_test_split, StratifiedKFold
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.grid_search import GridSearchCV
from sklearn import metrics
import numpy as np
import scipy.sparse as sp
from sklearn.feature_extraction.dict_vectorizer import DictVectorizer
import codecs
class Model(object):
"""
Class for abstracting the different classification models.
"""
def __init__(self, train_tweets, train_targets, vect_options, tfidf_options, extra_params):
self.grid_params = {
# 'vect__ngram_range': [(1,1),(1,2),(2,2)],
# 'tfidf__use_idf': (True,False),
# 'tfidf__smooth_idf': (True, False),
# 'tfidf__sublinear_tf': (True, False),
}
self.grid_params = dict(self.grid_params.items()+extra_params.items())
self.vect_options = vect_options
self.tfidf_options = tfidf_options
self.feature_set = {}
self.train_tweets = train_tweets
self.train_targets = train_targets
self.only_text_features = False
def train_on_feature_set(self, cross_validate=True, use_tfidf=True):
"""
Performs training with the given model using the given feature set
"""
#Establish document text feature vectors
print "Vectorizing"
# self.tokenizer = CountVectorizer().build_tokenizer()
self.vect = CountVectorizer(**self.vect_options)
self.tfidf_transformer = TfidfTransformer(**self.tfidf_options)
self.dict_transformer = TfidfTransformer(**self.tfidf_options)
# train_counts_tf = tfidf_transformer.fit_transform(train_counts)
count_vector = self.vect.fit_transform([t.text for t in self.train_tweets])
tfidf_count = self.tfidf_transformer.fit_transform(count_vector)
if self.only_text_features:
combined_vector = tfidf_count
else:
self.dict_vectorizer = DictVectorizer()
dict_vector = self.dict_vectorizer.fit_transform(self.feature_set)
f=codecs.open("feature_set.txt", "w", "utf8")
for d in dict_vector:
f.write(d.__str__())
f.close()
tfidf_dict = self.dict_transformer.fit_transform(dict_vector)
f=codecs.open("feature_set_tdidf.txt", "w", "utf8")
for d in tfidf_dict:
f.write(d.__str__())
f.close()
combined_vector = sp.hstack([tfidf_count, tfidf_dict])
# combined_features = FeatureUnion()
#Crossvalidation
cross_validation = StratifiedKFold(self.train_targets, n_folds=10)
#Build a Pipeline with TFidfVectorizer and classifier
pipeline_classifier = Pipeline([
# ('vect', self.vect),
# ('tfidf', self.tfidf_transformer),
('clf', self.classifier)
])
#Perform grid search
print "Performing grid search with classifier of instance ",str(self.classifier.__class__.__name__)
self.grid = GridSearchCV(pipeline_classifier, self.grid_params, cv=cross_validation, refit=True, n_jobs=-1,verbose=1)
self.grid.fit(combined_vector, self.train_targets)
self.best_estimator = self.grid.best_estimator_
self.best_parameters = self.grid.best_params_
self.best_score = self.grid.best_score_
print "Results for ",self.classifier.__class__.__name__
print "Best params: ", self.best_parameters
print "Best score: ", self.best_score
print "Storing estimator... "
utils.store_model(self.classifier.__class__.__name__, self.best_parameters, self.best_score)
return self.grid
def grid_search_on_text_features(self, cross_validate=True, file_postfix=""):
"""
Performs a grid search using text features on the given dataset. Stores the parameters for the optimal classifier.
"""
self.grid_params = {
'vect__ngram_range': [(1,1),(1,2),(2,2),(1,3),(2,3),(3,3),(1,4)],
'vect__use_idf': (True,False),
'vect__smooth_idf': (True, False),
'vect__sublinear_tf': (True, False),
'vect__max_df': (0.5,),
}
self.vect = TfidfVectorizer()
cross_validation = StratifiedKFold(self.train_targets, n_folds=10)
#Build a Pipeline with TFidfVectorizer and classifier
pipeline_classifier = Pipeline([
('vect', self.vect),
('clf', self.classifier)]
)
#Perform grid search
print "Performing grid search with classifier of instance ",str(self.classifier.__class__.__name__)
self.grid = GridSearchCV(pipeline_classifier, self.grid_params, cv=cross_validation, refit=True, n_jobs=-1,verbose=1)
self.grid.fit([t.text for t in self.train_tweets], self.train_targets)
self.best_estimator = self.grid.best_estimator_
self.best_parameters = self.grid.best_params_
self.best_score = self.grid.best_score_
print "Results for ",self.classifier.__class__.__name__
print "Best params: ", self.best_parameters
print "Best score: ", self.best_score
print "Storing estimator... "
utils.store_model(self.classifier.__class__.__name__, self.best_parameters, self.best_score, file_postfix=file_postfix)
return self.grid
def classify(self, tweets, sentimentvalues=None):
"""
Performs the classification process on list of tweets.
"""
if sentimentvalues!=None:
self.test_words_and_values = sentimentvalues
count_vector = self.vect.transform([t.text for t in tweets])
tfidf_count = self.tfidf_transformer.transform(count_vector)
if self.only_text_features:
combined_vector = tfidf_count
else:
dict_vector = self.dict_vectorizer.transform([features.get_feature_set(t, self.featureset, v) for t,v in zip(tweets, self.test_words_and_values)])
tfidf_dict = self.dict_transformer.transform(dict_vector)
combined_vector = sp.hstack([tfidf_count, tfidf_dict])
predictions = self.best_estimator.predict(combined_vector)
return predictions
def classify_text(self, texts):
"""
Performs classification with only text features.
"""
count_vector = self.vect.transform([t for t in texts])
text_vector = self.tfidf_transformer.transform(count_vector)
predictions = self.best_estimator.predict(text_vector)
return predictions
def test_and_return_results(self, test_tweets, test_targets, sentimentvalues):
"""
Tests the classifier on a given test set, and returns the accuracy, precision, recall, and f1 score.
"""
self.test_words_and_values = sentimentvalues
predictions = self.classify(test_tweets)
binary_predictions = utils.reduce_targets(predictions)
binary_test_targets = utils.reduce_targets(test_targets)
accuracy = metrics.accuracy_score(binary_test_targets, binary_predictions)
precision = metrics.precision_score(binary_test_targets, binary_predictions)
recall = metrics.recall_score(binary_test_targets, binary_predictions)
f1_score = metrics.f1_score(binary_test_targets, binary_predictions)
print "Scores: ", accuracy, precision, recall, f1_score
return accuracy, precision, recall, f1_score
def get_correctly_classified_tweets(self, tweets_and_sentiment):
"""
Classifies the given set of tweets and returns the ones that were correctly classified.
"""
tweets, sentimentvalues = zip(*tweets_and_sentiment)
if sentimentvalues!=None:
self.test_words_and_values = sentimentvalues
count_vector = self.vect.transform([t.text for t in tweets])
tfidf_count = self.tfidf_transformer.transform(count_vector)
if self.only_text_features:
combined_vector = tfidf_count
else:
dict_vector = self.dict_vectorizer.transform([features.get_feature_set(t, self.featureset, v) for t,v in zip(tweets, self.test_words_and_values)])
tfidf_dict = self.dict_transformer.transform(dict_vector)
combined_vector = sp.hstack([tfidf_count, tfidf_dict])
predictions = self.best_estimator.predict(combined_vector)
tweets, targets = utils.make_subjectivity_targets(tweets)
#return the tweets where the target match prediction
correct_tweets = []
correct_sentimentvalues = []
for i in xrange(len(tweets)):
if predictions[i]==targets[i]:
correct_tweets.append(tweets[i])
correct_sentimentvalues.append(sentimentvalues[i])
return correct_tweets, correct_sentimentvalues
def set_feature_set(self, featureset, sentimentvalues):
"""
Extracts and stores the given feature set for classification.
"""
self.featureset = featureset
if featureset=='SA' or featureset=='PA':
self.only_text_features=True
self.feature_set = {}
else:
words_and_values = sentimentvalues
self.feature_set = [features.get_feature_set(t, self.featureset, v) for t,v in zip(self.train_tweets,words_and_values)]
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,066
|
andrely/twitter-sentiment
|
refs/heads/master
|
/utils.py
|
'''
Created on 12. mars 2014
@author: JohnArne
'''
import sys
import os
import json
from pprint import pprint
import csv
import codecs
import pickle
import random
import operator
def load_to_tsv():
"""
Loads tweets from site and store as tsv file.
"""
json_data = open("data/curl_twitterdata.json")
data = json.load(json_data)
tweets = [ x["_source"]["published"]+str("\t")+x["_source"]["publisher"]+str("\t")+x["_source"]["leadText"] for x in data["hits"]["hits"] ]
for tweet in tweets:
print tweet
print len(tweets)
out = csv.writer(open("data/dataset.tsv","w"), delimiter="\n", quoting=csv.QUOTE_MINIMAL)
out.writerow(tweets)
json_data.close()
def get_resultsets_text(results):
"""
Takes a results list and return a list of test strings
"""
return [unicode(x.created_at) +str("\t")+ unicode(x.user.screen_name) +("\t")+ unicode(x.text).replace("\n", " ") for x in results]
def get_tweets_text(tweets):
"""
Returns a list of text bodies for the given set of tweets.
"""
return [unicode(tweet.text) for tweet in tweets]
def append_to_dataset(text, dataset):
"""
Appends text instances to dataset.
"""
# sys.stdout = codecs.getwriter('utf8')(sys.stdout)
print "Appending to dataset: "+str(dataset)
f = open(dataset, "a")
for t in text:
try:
f.write(t.encode('utf8')+"\n")
print t.encode('utf8')+"\n"
except UnicodeEncodeError:
print "Unicode Encoding Error: ", t.encode('utf8')
except UnicodeDecodeError:
print "Unicode Decoding Error: ", t.encode('utf8')
f.close()
def store_dataset(text, dataset):
"""
Stores the given sequence of strings to the given dataset as .tsv file.
"""
print "Storing to dataset: "+str(dataset)
f = open(dataset, "w")
for t in text:
# print unicode("Encoding: ")
# print unicode(t, 'cp866')
# encodedline = unicode(t, 'cp866').encode('utf8')
# print "Writing: "+encodedline
try:
f.write(t.encode('utf8'))
except UnicodeDecodeError:
f.write(t)
f.close()
def encode_unicode():
"""
Encodes all text files into utf8.
"""
f = open("complete_datasets/random_dataset.tsv", "r")
text = f.readlines()
f.close()
f = open("encoding_attempt/random_dataset.tsv", "w")
for line in text:
line = line.decode('ascii')
f.write(line.encode('utf8')+"\n")
f.close()
def select_dataset():
setnr = raw_input("Write to which dataset? 0: RandomSet 1: RoseborgSet 2: ErnaSet ... ")
return datasets[int(setnr)]
def select_complete_dataset():
setnr = raw_input("Write to which complete dataset? 0: RandomSet 1: RoseborgSet 2: ErnaSet 3: TemporalSet... ")
return complete_datasets[int(setnr)]
def get_dataset(dataset):
"""
Gets the given dataset from file as a list of strings.
"""
f = open(dataset, "r")
lines = f.readlines()
encodedlines = []
for line in lines:
encodedlines.append(line)
f.close()
return encodedlines
def store_pickles(tweets, filepath):
"""
Stores a given list of tweets as pickles.
"""
output = open("tweet_pickles/"+filepath, 'wb')
pickle.dump(tweets, output)
def get_pickles(setnr=None):
"""
Gets the stored tweet pickles.
"""
if setnr==None:
setnr = int(raw_input("Get which pickle set? 0: RandomSet 1: RoseborgSet 2: ErnaSet 3: All three ..."))
if setnr is 3:
#fetch all sets and append them together
tweets = []
for pickleset in pickles:
tweets = tweets + pickle.load(open(pickleset, 'rb'))
print len(tweets)
return tweets
else:
tweets = pickle.load(open(pickles[setnr], 'rb'))
return tweets
return tweets
def get_all_pickles():
"""
Gets ALL the stored tweet pickles.
"""
tweets = []
for pickleset in pickles:
tweets = tweets + pickle.load(open(pickleset, 'rb'))
tweets = tweets + pickle.load(open('temporal_tweets1', 'rb'))
tweets = tweets + pickle.load(open('temporal_tweets2', 'rb'))
print len(tweets)
return tweets
def limit_topics_top10(data):
"""
Takes in a set of plotting data, and limits the topics to top 10 most frequent.
"""
def split_train_and_test(tweets):
"""
Splits the given tweet set into a training set and a testing set.
"""
split_pos = int(len(tweets)*0.8)
train_tweets = tweets[0:split_pos]
test_tweets = tweets[split_pos:len(tweets)]
return train_tweets, test_tweets
def make_polarity_train_and_test_and_targets(tweets, sentimentvalues, splitvalue=0.9, reduce_dataset=1, shuffle=True):
"""
Removes objective tweets and returns a completely subjective dataset, along with the positive or negative targets.
"""
pol_tweets = []
pol_sentiments = []
if shuffle:
tweets, sentimentvalues = shuffle_tweets_and_sentiments(tweets, sentimentvalues)
for t,s in zip(tweets, sentimentvalues):
if t.subjectivity==1:
pol_tweets.append(t)
pol_sentiments.append(s)
pol_tweets = pol_tweets[:int(round(reduce_dataset*len(pol_tweets)))]
pol_sentiments = pol_sentiments[:int(round(reduce_dataset*len(pol_sentiments)))]
up_to = int(round(len(pol_tweets)*(splitvalue+0.1)))
split_pos = int(round(len(pol_tweets)*splitvalue))
train_tweets = pol_tweets[0:split_pos]+pol_tweets[up_to:len(pol_tweets)]
test_tweets = pol_tweets[split_pos:up_to]
train_sentimentvalues = pol_sentiments[0:split_pos]+pol_sentiments[up_to:len(pol_tweets)]
test_sentimentvalues = pol_sentiments[split_pos:up_to]
pol_train_targets = [t.get_sentiment() for t in train_tweets]
pol_test_targets = [t.get_sentiment() for t in test_tweets]
print "Train tweets: ", len(train_tweets)
print "test tweeets: ", len(test_tweets)
print "Train targets: ", len(pol_train_targets)
print "test targets ", len(pol_test_targets)
print "train sentiments ", len(train_sentimentvalues)
print "test sentiments ", len(test_sentimentvalues)
return train_tweets, pol_train_targets, test_tweets, pol_test_targets, train_sentimentvalues, test_sentimentvalues
def make_subjectivity_train_and_test_and_targets(tweets, sentimentvalues, splitvalue=0.9, reduce_dataset=1,shuffle=True):
"""
Returns a dataset for subjectivity classification, along with the targets for classification
"""
if shuffle:
tweets, sentimentvalues = shuffle_tweets_and_sentiments(tweets, sentimentvalues)
reduced_tweets = tweets[:int(round(reduce_dataset*len(tweets)))]
up_to = int(round(len(reduced_tweets)*(splitvalue+0.1)))
split_pos = int(round(len(reduced_tweets)*splitvalue))
print "Upto:",up_to
print "Splitpos:",split_pos
train_tweets = reduced_tweets[:split_pos]+reduced_tweets[up_to:len(reduced_tweets)]
test_tweets = reduced_tweets[split_pos:up_to]
train_sentimentvalues = sentimentvalues[0:split_pos]+sentimentvalues[up_to:len(reduced_tweets)]
test_sentimentvalues = sentimentvalues[split_pos:up_to]
print "Train reduced_tweets: ", len(train_tweets)
print "test tweeets: ", len(test_tweets)
sub_train_targets = ['objective' if t.subjectivity==0 else 'subjective' for t in train_tweets]
sub_test_targets = ['objective' if t.subjectivity==0 else 'subjective' for t in test_tweets]
print "Train targets: ", len(sub_train_targets)
print "test targets ", len(sub_test_targets)
return train_tweets, sub_train_targets, test_tweets, sub_test_targets, train_sentimentvalues, test_sentimentvalues
def shuffle_tweets_and_sentiments(tweets, sentiments):
indexes = range(len(tweets))
random.shuffle(indexes)
shuffled_tweets = []
shuffled_sentiments = []
for i in indexes:
shuffled_sentiments.append(sentiments[i])
shuffled_tweets.append(tweets[i])
return shuffled_tweets, shuffled_sentiments
def make_subjectivity_targets(tweets):
sub_train_targets = ['objective' if t.subjectivity==0 else 'subjective' for t in tweets]
return tweets, sub_train_targets
def make_polarity_targets(tweets):
pol_train_targets = [t.get_sentiment() for t in tweets]
return tweets, pol_train_targets
def store_model(name, params, score, file_postfix=""):
"""
Stores the given dict as a pickle in the stored estimators folder.
"""
out = open("stored_estimators/"+str(name)+str(score)+str(file_postfix), 'wb')
pickle.dump(params, out)
out.close()
return params
def store_sentimentvalues(words_with_values, filename):
"""
Pickles the given list of dicts with sentiment values.
"""
#Pickle sentiment values
output = open(filename, 'wb')
pickle.dump(words_with_values, output)
def get_sentimentvalues(setnr=None):
"""
Gets the pickles of sentiment values
"""
if setnr==None:
setnr = int(raw_input("Get which pickle set? 0: RandomSet 1: RoseborgSet 2: ErnaSet 3: All three ..."))
if setnr is 3:
#fetch all sets and append them together
tweets = []
for pickleset in sentiment_pickles:
tweets = tweets + pickle.load(open(pickleset, 'rb'))
return tweets
else:
tweets = pickle.load(open(sentiment_pickles[setnr], 'rb'))
return tweets
return tweets
def get_entity_test_and_targets():
"""
Fetches the dataset for entity testing, aswell as the proper targets.
"""
f = open("entity_test","rb")
tweets = pickle.load(f)
# for t in tweets:
# print t.text," ",t.hashtags
# raw_input("Continue?")
print len(tweets)
f.close()
f = open("entity_test_targets.txt","r")
targets = f.readlines()
print len(targets)
targets = [int(t) for t in targets]
return tweets, targets
def temporally_aggregate_subjectivity(tweets, predictions, targets=None, topics=None):
"""
Aggregates subjectivity for given tweets' days for both correct targets and predictions.
Returns a list with days and a list with tweet frequencies, and a list with aggregated target values and a list with aggregated predicted values
"""
# for t in tweets:
# print t.timestamp
days = [t.timestamp[5:10].replace('-','.') if len(t.timestamp)<20 else t.timestamp[8:13].replace('-','.') for t in tweets]
reduced_targets = reduce_targets(targets) if targets != None else None
reduced_predictions = reduce_targets(predictions)
sorted_days =sorted( list(set( [float(x) for x in days] )) )
aggregated_targets = [ 0 for _ in sorted_days]
aggregated_predicts = [ 0 for _ in sorted_days]
frequencies = [ 0 for _ in sorted_days]
for i in range(len(sorted_days)):
aggregated_targets[i] = reduce(lambda x,y: x+y, [t if float(d)==sorted_days[i] else 0 for t,d in zip(reduced_targets, days)] ) if reduced_targets!=None else None
aggregated_predicts[i] = reduce(lambda x,y: x+y, [t if float(d)==sorted_days[i] else 0 for t,d in zip(reduced_predictions, days)] )
frequencies[i] = reduce(lambda x,y: x+y, [1 if float(d)==sorted_days[i] else 0 for t,d in zip(reduced_predictions, days)] )
# print days
print sorted_days, aggregated_targets, aggregated_predicts, frequencies
return sorted_days, aggregated_targets, aggregated_predicts, frequencies
def temporally_aggregate_polarity(tweets, predictions, targets=None, topics=None):
"""
Aggregates(calculates difference) polarity for given tweets' days for both predictions, and targets if given, and topics if given.
"""
# for t in tweets:
# print t.timestamp
days = [t.timestamp[5:10].replace('-','.') if len(t.timestamp)<20 else t.timestamp[8:13].replace('-','.') for t in tweets]
if targets!=None:
reduced_targets = reduce_targets(targets)
reduced_targets = [-1 if t==0 else 1 for t in reduced_targets]
else:
reduced_targets = None
reduced_predictions = reduce_targets(predictions)
reduced_predictions = [-1 if t==0 else 1 for t in reduced_predictions]
print topics
sorted_days =sorted( list(set( [float(x) for x in days] )) )
aggregated_targets = [ 0 for _ in sorted_days]
aggregated_predicts = [ 0 for _ in sorted_days]
frequencies = [ 0 for _ in sorted_days]
unique_topics = list(set(topics)) if topics!=None else None
print unique_topics
aggregated_polarity_on_topic = []
for i in range(len(sorted_days)):
aggregated_targets[i] = reduce(lambda x,y: x+y, [t if float(d)==sorted_days[i] else 0 for t,d in zip(reduced_targets, days)] ) if reduced_targets!=None else None
aggregated_predicts[i] = reduce(lambda x,y: x+y, [t if float(d)==sorted_days[i] else 0 for t,d in zip(reduced_predictions, days)] )
frequencies[i] = reduce(lambda x,y: x+y, [1 if float(d)==sorted_days[i] else 0 for t,d in zip(reduced_predictions, days)] )
if unique_topics!=None:
for i in range(len(unique_topics)):
aggregated_polarity_on_topic.append(reduce(lambda x,y: x+y, [t if float(d)==sorted_days[i] and top==unique_topics[i] else 0 for t,d,top in zip(reduced_predictions,days,topics)] ))
# print days
print sorted_days
print aggregated_polarity_on_topic
return sorted_days, aggregated_targets, aggregated_predicts, frequencies, topics, aggregated_polarity_on_topic
def topically_aggregate_polarity(tweets, predictions, topics):
days = [t.timestamp[5:10].replace('-','.') if len(t.timestamp)<20 else t.timestamp[8:13].replace('-','.') for t in tweets]
reduced_predictions = reduce_targets(predictions)
reduced_predictions = [-1 if t==0 else 1 for t in reduced_predictions]
sorted_days =sorted( list(set( [float(x) for x in days] )) )
unique_topics = list(set(topics))
aggregated_polarity_on_topic = [[] for _ in unique_topics]
sentimentpoints = []
for i in range(len(unique_topics)):
for j in range(len(sorted_days)):
aggregated_polarity_on_topic[i].append(reduce(lambda x,y: x+y, [t if float(d)==sorted_days[j] and top==unique_topics[i] else 0 for t,d,top in zip(reduced_predictions,days,topics)] ))
sentimentpoints.append(reduce(lambda x,y: x+y, [-p if p<0 else p for p in aggregated_polarity_on_topic[i]]))
unique_topics[unique_topics.index(None)] = "undefined"
print unique_topics
print sorted_days
print aggregated_polarity_on_topic
unique_topics, aggregated_polarity_on_topic, sentimentpoints = zip(*sorted(zip(unique_topics,aggregated_polarity_on_topic,sentimentpoints), key=operator.itemgetter(2), reverse=True))
return sorted_days, unique_topics[:20], aggregated_polarity_on_topic[:20]
def reduce_targets(targets):
"""
Reduces a set of subjectivity or polarity targets to 1s and 0s
"""
if len(targets)<1: return []
if targets[0]=='objective' or targets[0]=='subjective':
binaries = [0 if target=='objective' else 1 for target in targets]
else:
binaries = [0 if target=='negative' else 1 for target in targets]
return binaries
pickles = ['tweet_pickles/random_dataset',
'tweet_pickles/rosenborg_dataset',
'tweet_pickles/erna_dataset']
sentiment_pickles = ['models/sentimentvalues_random_dataset',
'models/sentimentvalues_rosenborg_dataset',
'models/sentimentvalues_erna_dataset']
sentiments = ["negative",
"neutral",
"positive"]
complete_datasets = ["complete_datasets/random_dataset.tsv",
"complete_datasets/rosenborg_dataset.tsv",
"complete_datasets/erna_dataset.tsv",
"complete_datasets/temporal_dataset.tsv"]
datasets = ["data/random_dataset.tsv",
"data/rosenborg_dataset.tsv",
"data/erna_dataset.tsv"]
annotated_datasets = ["johnarne_annotated_data/random_dataset.tsv",
"johnarne_annotated_data/rosenborg_dataset.tsv",
"johnarne_annotated_data/erna_dataset.tsv"]
if __name__ == '__main__':
train, test = split_train_and_test(get_pickles())
print len(train)," ", len(test)
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,067
|
andrely/twitter-sentiment
|
refs/heads/master
|
/models/me.py
|
'''
Created on 19. mars 2014
@author: JohnArne
'''
from model import Model
from sklearn.linear_model import LogisticRegression
class ME(Model):
"""
Subclass implementing the Maximum entropy classification model.
"""
def __init__(self, tweets_train, tweets_targets, vect_options, tfidf_options):
self.classifier = LogisticRegression()
extra_params = {'clf__C': (0.1, 0.3, 0.5, 0.7, 0.8, 1.0,),'clf__penalty': ('l1', 'l2')}
super(ME, self).__init__(tweets_train, tweets_targets, vect_options, tfidf_options, extra_params)
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,068
|
andrely/twitter-sentiment
|
refs/heads/master
|
/lexicon/__init__.py
|
'''
Created on 3. des. 2014
@author: JohnArne
'''
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,069
|
andrely/twitter-sentiment
|
refs/heads/master
|
/easygui_gui.py
|
'''
Created on 19. nov. 2014
@author: JohnArne
'''
import easygui as eg
import sys
def show_windows():
while 1:
# title = "Message from test1.py"
# eg.msgbox("Hello, world!", title)
msg ="Run with which classification model?"
title = "Classification model"
models = ["Multinomial Naive Bayes", "Support Vector Machines", "Maximum Entropy"]
model_choice = str(eg.choicebox(msg, title, models))
msg = "Use saved preset values?"
choices = ["Yes","No"]
choice = eg.buttonbox(msg,choices=choices)
if str(choice)=="Yes":
model_preset_functions[model_choice]()
else:
model_select_functions[model_choice]()
# note that we convert choice to string, in case
# the user cancelled the choice, and we got None.
# eg.msgbox("You chose: " + str(choice), "Survey Result")
message = "Sentiments over time period something something"
image = "temporal_sentiments.png"
eg.msgbox(message, image=image)
msg = "Do you want to continue?"
title = "Please Confirm"
if eg.ccbox(msg, title): # show a Continue/Cancel dialog
pass # user chose Continue
else:
sys.exit(0) # user chose Cancel
def show_naivebayes_presets():
"""
Shows a selection of preset running values for Naive Bayes and returns the user selection.
"""
msg ="Select preset values for Naive Bayes"
title = "Naive Bayes presets"
choices = ["Multinomial Naive Bayes", "Support Vector Machines", "Maximum Entropy"]
preset_choice = eg.choicebox(msg, title, choices)
pass
def show_svm_presets():
"""
Shows a selection of preset running values for Suport Vector Machine and returns the user selection.
"""
msg ="Select preset values for Support Vector Machines"
title = "SVM presets"
choices = ["something", "somethingsomething", "something else"]
preset_choice = eg.choicebox(msg, title, choices)
pass
def show_me_presets():
"""
Shows a selection of preset running values for Maximum Entropy and returns the user selection.
"""
msg ="Select preset values for Maximum Entropy"
title = "MaxEnt presets"
choices = ["something", "something else", "aaand more"]
preset_choice = eg.choicebox(msg, title, choices)
pass
def show_naivebayes_selection():
"""
Shows a value input window for Naive Bayes and returns the user selection.
"""
msg = "Enter running values for Naive Bayes"
title = "Naive Bayes run"
fieldNames = ["x","dss","c","range","s","p","cross","stu","thn","pH"]
fieldValues = [] # we start with blanks for the values
fieldValues = eg.multenterbox(msg,title, fieldNames)
# make sure that none of the fields was left blank
while 1: # do forever, until we find acceptable values and break out
if fieldValues == None:
break
errmsg = ""
# look for errors in the returned values
for i in range(len(fieldNames)):
if fieldValues[i].strip() == "":
errmsg = errmsg + ('"%s" is a required field.\n\n' % fieldNames[i])
if errmsg == "":
break # no problems found
else:
# show the box again, with the errmsg as the message
fieldValues = eg.multenterbox(errmsg, title, fieldNames, fieldValues)
print ("Reply was:", fieldValues)
pass
def show_svm_selection():
"""
Shows a value input window for Suport Vector Machine and returns the user selection.
"""
pass
def show_me_selection():
"""
Shows a value input window for Maximum Entropy and returns the user selection.
"""
pass
model_preset_functions = {"Multinomial Naive Bayes": show_naivebayes_presets,
"Support Vector Machines": show_svm_presets,
"Maximum Entropy": show_me_presets}
model_select_functions = {"Multinomial Naive Bayes": show_naivebayes_selection,
"Support Vector Machines": show_svm_selection,
"Maximum Entropy": show_me_selection}
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,070
|
andrely/twitter-sentiment
|
refs/heads/master
|
/lexicon/lexicon.py
|
'''
Created on 27. nov. 2014
@author: JohnArne
'''
from hmac import trans_36
import requests
import os
from sentiwordnet import SentiWordNetCorpusReader, SentiSynset
import nltk
from pos_mappings import TYPECRAFT_SENTIWORDNET
import gettext
import codecs
import subprocess
import pickle
class Lexicon():
"""
Handles the interfacing with the sentiment lexicon as well as translation and disambiguation.
"""
def __init__(self, translater, sentiment_lexicon):
#initialize sentiment lexicon resource and translation
self.translater = translater
self.sentiment_lexicon = sentiment_lexicon
def translate_and_get_lexicon_sentiment(self, word, context=None, pos_tag=None):
"""
Returns the translated sentiment values for all the words with their contexts and pos tags.
"""
#Translate word
translated_word = self.translater.translate(word)
return self.sentiment_lexicon.get_values(translated_word, context, pos_tag)
def translate_sentence_and_get_lexicon_sentiment(self, sentence):
"""
Returns the translated sentiment values for a whole sentence.
"""
#Translate word
translated_sentence = self.translater.translate(sentence)
translated_words = tokenizer(translated_sentence)
sentiments = []
for word in translated_words:
sentiment = self.sentiment_lexicon.get_values(word)
if sentiment!=None:
sentiments.append(sentiment)
return sentiments
class SentiWordNetLexicon():
def __init__(self):
SWN_FILENAME = "lexicon\SentiWordNet_3.0.0_20130122.txt"
self.swn= SentiWordNetCorpusReader(SWN_FILENAME)
def get_values(self, word, context=None, pos_tag=None):
"""
Perform lookup in SentiWordNet
"""
# entry = swn.senti_synset("breakdown.n.03")
entries = None
for w in word.split(' '):
entries = self.swn.senti_synsets(w)
if entries != None: break
if entries is None or len(entries)==0:
return None
if len(entries)==1 or pos_tag is None:
return [entries[0].pos_score, entries[0].neg_score, entries[0].obj_score]
elif len(entries)>1:
#Find out which word to chose, if there are several classes
print "Several entries ",entries
for entry in entries:
if entry.synset.pos()==TYPECRAFT_SENTIWORDNET[pos_tag]:
print "Found matching entry: ", entry
return [entry.pos_score, entry.neg_score, entry.obj_score]
return [entries[0].pos_score, entries[0].neg_score, entries[0].obj_score]
return None
class BingTranslater():
def __init__(self, words):
self.original_words = words
file = codecs.open("bing_words.txt", "w", "utf8")
for word in words:
file.write(word+"\n")
file.close()
print "Bing translating ",len(words)," words..."
subprocess.call("lexicon/bingtranslater.exe")
file = codecs.open("translated_words.txt", "r", "utf8")
translated_words = file.readlines()
file.close()
self.translation_mapping = dict(zip(self.original_words, translated_words))
print "Bing done..."
def translate(self, word):
try:
return self.translation_mapping[word]
except KeyError:
return None
class GoogleTranslater():
def __init__(self):
self.translation_url = "https://translate.google.com/#no/en/"
#The lines of words contain the original word first, then subsequent translations in english
self.words = codecs.open("bing_words.txt", "r", "utf8").read().splitlines()
def translate(self, word, context=None, pos_tag=None):
"""
Translate word using a translation API
Perform sentence contezt translation on google web interface
Perform word translation using Bing -> get all alternatives anc check for a mathc in the google translation, if match choose it as translation
if not then choose the bing translation that best matches using POS tag?
"""
#Get contextual translation from google translate
par = {"text": word, "raw": "raw"}
r = requests.post(self.translation_url, data=par)
results = r.text
translated_word = get_from_html_text(results, 'TRANSLATED_TEXT')
#Perform lookup in the text file from the C# translator
#if there is no match, take the best match from the bing file
# print "Translated: ", word, " ->", translated_word
return translated_word
def get_from_html_text(resultset, target):
"""
Gets the value of a variable target from a html result set from a request.
"""
index = resultset.find(target)+len(target)+2
return resultset[index:index+140].split("'")[0].lower()
def perform_bing_sentiment_lexicon_lookup(tweets):
"""
Performs sentiment lexicon lookup on the tweets, and stores it in the objects.
"""
words = []
for t in tweets:
for phrase in t.tagged_words:
for word in phrase:
try:
if word["pos"] in TYPECRAFT_SENTIWORDNET:
words.append(word['word'])
except KeyError:
continue
lex = Lexicon(BingTranslater(words), SentiWordNetLexicon())
words_with_sentimentvalues=[]#list of dicts
print "Getting sentiment values"
for t in tweets:
sentiwords =[]
sentiwords_with_values={}
for phrase in t.tagged_words:
for word in phrase:
try:
if word["pos"] in TYPECRAFT_SENTIWORDNET:
sentiwords.append(word['word'])
except KeyError:
continue
for sentiword in sentiwords:
sentivalues = lex.translate_and_get_lexicon_sentiment(sentiword)
if sentivalues!=None:
print "Adding sentivalues: ",sentivalues
sentiwords_with_values[sentiword] = sentivalues
words_with_sentimentvalues.append(sentiwords_with_values)
return words_with_sentimentvalues
def perform_google_sentiment_lexicon_lookup(tweets):
"""
Performs sentiment lexicon lookup on the tweets, and stores it in the objects.
"""
lex = Lexicon(GoogleTranslater(), SentiWordNetLexicon())
print "Getting sentiment values"
tweet_sentiments = []
for t in tweets:
tweet_sentiments.append(lex.translate_sentence_and_get_lexicon_sentiment(t.text))
print tweet_sentiments
reduced_tweet_sentiments = []
for sentiments in tweet_sentiments:
polar_sum = sum([s[0] for s in sentiments])
negative_sum = sum([s[1] for s in sentiments])
objective_sum = sum([s[2] for s in sentiments])
reduced_tweet_sentiments.append((polar_sum, negative_sum, objective_sum))
print reduced_tweet_sentiments
return reduced_tweet_sentiments
def tokenizer(sentence):
"""
Tokenizes an english sentence.
"""
words = []
for phrase in sentence.split('.'):
for piece in phrase.split(','):
for word in piece.split(' '):
words.append(word)
return words
if __name__ == '__main__':
#Insert all words to be translated into the googlebing translator in order to augment with Bing...
lex = Lexicon(BingTranslater(), SentiWordNetLexicon())
print lex.translate_and_get_lexicon_sentiment("good")
# swn = SentiWordNetCorpusReader('SentiWordNet_3.0.0_20130122.txt')
# for senti_synset in swn.all_senti_synsets():
# print senti_synset.synset.name, senti_synset.pos_score, senti_synset.neg_score
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,071
|
andrely/twitter-sentiment
|
refs/heads/master
|
/test.py
|
'''
Created on 24. nov. 2014
Methods for performing test on various classificatino schemes and storing the results.
@author: JohnArne
'''
import utils
from lexicon import lexicon
from models.nb import NB
from models.svm import SVM
from models.me import ME
from sklearn.metrics import accuracy_score, f1_score, precision_score, recall_score
from tweet import Tweet
import plotting
import preprocessing
import models.features as feat_utils
import pickle
import classifier
import tweet
import entity_extraction
from entity_extraction import cutoff_breakwords
def train_and_test_subjectivity_and_polarity():
datasetnr = 3
tweets = utils.get_pickles(datasetnr)
sentimentvalues = feat_utils.get_sentiment_values(datasetnr)
tweets = preprocessing.remove_link_classes(tweets)
tweets = preprocessing.lower_case(tweets)
tweets = preprocessing.remove_specialchars_round2(tweets)
# train_subjectivity_and_test_on_feature_set(tweets, 'SA', sentimentvalues)
train_subjectivity_and_test_on_feature_set(tweets, 'SB', sentimentvalues)
train_subjectivity_and_test_on_feature_set(tweets, 'SC', sentimentvalues)
# google_sentimentvalues = feat_utils.get_google_sentiment_values(datasetnr)
# train_subjectivity_and_test_on_feature_set(tweets, 'SC2', google_sentimentvalues)
# train_polarity_and_test_on_feature_set(tweets, 'PA', sentimentvalues)
# train_polarity_and_test_on_feature_set(tweets, 'PB', sentimentvalues)
# train_polarity_and_test_on_feature_set(tweets, 'PC', sentimentvalues)
# google_sentimentvalues = feat_utils.get_google_sentiment_values(datasetnr)
# train_polarity_and_test_on_feature_set(tweets, 'PC2', google_sentimentvalues)
def train_subjectivity_and_test_on_feature_set(tweets, feature_set, sentimentvalues, reduce_dataset=1):
"""
Performs training and testing with a given feature set key
"""
kfolds = range(0,10)
nbaccuracy_avgs = []
nbprecision_avgs = []
nbrecall_avgs = []
nbf1_avgs = []
svmaccuracy_avgs = []
svmprecision_avgs = []
svmrecall_avgs = []
svmf1_avgs = []
meaccuracy_avgs = []
meprecision_avgs = []
merecall_avgs = []
mef1_avgs = []
vect_options = {
'ngram_range': (1,1),
'max_df': 0.5
}
tfidf_options = {
'sublinear_tf': False,
'use_idf': True,
'smooth_idf': True,
}
for kfoldcounter in kfolds:
print "--------------------------KFOLD NR ",kfoldcounter,"----------------------------------"
train_tweets, train_targets, test_tweets, test_targets, train_sentimentvalues, test_sentimentvalues = utils.make_subjectivity_train_and_test_and_targets(tweets,sentimentvalues,splitvalue=kfoldcounter*0.1,reduce_dataset=reduce_dataset)
#TRAINING NB
print "Training NB subjectivity on dataset of length ", len(train_tweets)
clf = NB(train_tweets, train_targets, vect_options, tfidf_options)
clf.set_feature_set(feature_set, train_sentimentvalues)
clf.train_on_feature_set()
print "Testing..."
nb_accuracy, nb_precision, nb_recall, nb_f1_score = clf.test_and_return_results(test_tweets, test_targets, test_sentimentvalues)
nbaccuracy_avgs.append(nb_accuracy)
nbprecision_avgs.append(nb_precision)
nbrecall_avgs.append(nb_recall)
nbf1_avgs.append(nb_f1_score)
#TRAINING SVM
vect_options = {
'ngram_range': (1,3),
'max_df': 0.5
}
tfidf_options = {
'sublinear_tf': True,
'use_idf': True,
'smooth_idf': True,
}
print "Training SVM subjectivity on dataset of length ", len(train_tweets)
clf = SVM(train_tweets, train_targets, vect_options, tfidf_options)
clf.set_feature_set(feature_set, train_sentimentvalues)
clf.train_on_feature_set()
print "Testing..."
svm_accuracy, svm_precision, svm_recall, svm_f1_score = clf.test_and_return_results(test_tweets, test_targets, test_sentimentvalues)
svmaccuracy_avgs.append(svm_accuracy)
svmprecision_avgs.append(svm_precision)
svmrecall_avgs.append(svm_recall)
svmf1_avgs.append(svm_f1_score)
#TRAINING MAXENT
vect_options = {
'ngram_range': (1,2),
'max_df': 0.5
}
tfidf_options = {
'sublinear_tf': True,
'use_idf': True,
'smooth_idf': True,
}
print "Training MaxEnt subjectivity on dataset of length ", len(train_tweets)
clf = ME(train_tweets, train_targets, vect_options, tfidf_options)
clf.set_feature_set(feature_set, train_sentimentvalues)
clf.train_on_feature_set()
print "Testing..."
me_accuracy, me_precision, me_recall, me_f1_score = clf.test_and_return_results(test_tweets, test_targets, test_sentimentvalues)
meaccuracy_avgs.append(me_accuracy)
meprecision_avgs.append(me_precision)
merecall_avgs.append(me_recall)
mef1_avgs.append(me_f1_score)
print "Averages"
nb_accuracy = reduce(lambda x,y: x+y,nbaccuracy_avgs)/len(nbaccuracy_avgs)
nb_precision = reduce(lambda x,y: x+y,nbprecision_avgs)/len(nbprecision_avgs)
nb_recall = reduce(lambda x,y: x+y,nbrecall_avgs)/len(nbrecall_avgs)
nb_f1_score = reduce(lambda x,y: x+y,nbf1_avgs)/len(nbf1_avgs)
svm_accuracy = reduce(lambda x,y: x+y,svmaccuracy_avgs)/len(svmaccuracy_avgs)
svm_precision = reduce(lambda x,y: x+y,svmprecision_avgs)/len(svmprecision_avgs)
svm_recall = reduce(lambda x,y: x+y,svmrecall_avgs)/len(svmrecall_avgs)
svm_f1_score = reduce(lambda x,y: x+y,svmf1_avgs)/len(svmf1_avgs)
me_accuracy = reduce(lambda x,y: x+y,meaccuracy_avgs)/len(meaccuracy_avgs)
me_precision = reduce(lambda x,y: x+y,meprecision_avgs)/len(meprecision_avgs)
me_recall = reduce(lambda x,y: x+y,merecall_avgs)/len(merecall_avgs)
me_f1_score = reduce(lambda x,y: x+y,mef1_avgs)/len(mef1_avgs)
data = {'Naive Bayes': [nb_accuracy, nb_precision, nb_recall, nb_f1_score],
'SVM': [svm_accuracy, svm_precision, svm_recall, svm_f1_score],
'Maximum Entropy': [me_accuracy, me_precision, me_recall, me_f1_score]}
plotting.plot_performance_histogram(data, "subjectivity_"+feature_set)
return data
def train_polarity_and_test_on_feature_set(tweets, feature_set, sentimentvalues, reduce_dataset=1):
"""
Performs training and testing with a given feature set key
"""
kfolds = range(0,10)
nbaccuracy_avgs = []
nbprecision_avgs = []
nbrecall_avgs = []
nbf1_avgs = []
svmaccuracy_avgs = []
svmprecision_avgs = []
svmrecall_avgs = []
svmf1_avgs = []
meaccuracy_avgs = []
meprecision_avgs = []
merecall_avgs = []
mef1_avgs = []
for kfoldcounter in kfolds:
print "--------------------------KFOLD NR ",kfoldcounter,"----------------------------------"
train_tweets, train_targets, test_tweets, test_targets, train_sentimentvalues, test_sentimentvalues = utils.make_polarity_train_and_test_and_targets(tweets,sentimentvalues, splitvalue=kfoldcounter*0.1, reduce_dataset=reduce_dataset)
# for tweet, target in zip(tweets,targets):
# try:
# print unicode(tweet.text), " ", target
# except UnicodeEncodeError:
# print tweet.text.encode('utf8'), " ", target
# except UnicodeDecodeError:
# print tweet.text, " ", target
#TRAINING NB
vect_options = {
'ngram_range': (1,1),
'max_df': 0.5
}
tfidf_options = {
'sublinear_tf': True,
'use_idf': True,
'smooth_idf': True,
}
print "Training NB polarity with feature set ",feature_set
clf = NB(train_tweets, train_targets, vect_options, tfidf_options)
clf.set_feature_set(feature_set, train_sentimentvalues)
clf.train_on_feature_set()
print "Testing..."
nb_accuracy, nb_precision, nb_recall, nb_f1_score = clf.test_and_return_results(test_tweets, test_targets, test_sentimentvalues)
nbaccuracy_avgs.append(nb_accuracy)
nbprecision_avgs.append(nb_precision)
nbrecall_avgs.append(nb_recall)
nbf1_avgs.append(nb_f1_score)
#TRAINING SVM
vect_options = {
'ngram_range': (1,1),
'max_df': 0.5
}
tfidf_options= {
'sublinear_tf': True,
'use_idf': True,
'smooth_idf': True,
}
print "Training SVM polarity on dataset of length ", len(train_tweets)
clf = SVM(train_tweets, train_targets, vect_options, tfidf_options)
clf.set_feature_set(feature_set, train_sentimentvalues)
clf.train_on_feature_set()
print "Testing..."
svm_accuracy, svm_precision, svm_recall, svm_f1_score = clf.test_and_return_results(test_tweets, test_targets, test_sentimentvalues)
svmaccuracy_avgs.append(svm_accuracy)
svmprecision_avgs.append(svm_precision)
svmrecall_avgs.append(svm_recall)
svmf1_avgs.append(svm_f1_score)
#TRAINING MAXENT
vect_options = {
'ngram_range': (1,1),
'max_df': 0.5
}
tfidf_options = {
'sublinear_tf': True,
'use_idf': True,
'smooth_idf': True,
}
print "Training MaxEnt polarity on dataset of length ", len(train_tweets)
clf = ME(train_tweets, train_targets, vect_options, tfidf_options)
clf.set_feature_set(feature_set, train_sentimentvalues)
clf.train_on_feature_set()
print "Testing..."
me_accuracy, me_precision, me_recall, me_f1_score = clf.test_and_return_results(test_tweets, test_targets, test_sentimentvalues)
meaccuracy_avgs.append(me_accuracy)
meprecision_avgs.append(me_precision)
merecall_avgs.append(me_recall)
mef1_avgs.append(me_f1_score)
print "Averages"
nb_accuracy = reduce(lambda x,y: x+y,nbaccuracy_avgs)/len(nbaccuracy_avgs)
nb_precision = reduce(lambda x,y: x+y,nbprecision_avgs)/len(nbprecision_avgs)
nb_recall = reduce(lambda x,y: x+y,nbrecall_avgs)/len(nbrecall_avgs)
nb_f1_score = reduce(lambda x,y: x+y,nbf1_avgs)/len(nbf1_avgs)
svm_accuracy = reduce(lambda x,y: x+y,svmaccuracy_avgs)/len(svmaccuracy_avgs)
svm_precision = reduce(lambda x,y: x+y,svmprecision_avgs)/len(svmprecision_avgs)
svm_recall = reduce(lambda x,y: x+y,svmrecall_avgs)/len(svmrecall_avgs)
svm_f1_score = reduce(lambda x,y: x+y,svmf1_avgs)/len(svmf1_avgs)
me_accuracy = reduce(lambda x,y: x+y,meaccuracy_avgs)/len(meaccuracy_avgs)
me_precision = reduce(lambda x,y: x+y,meprecision_avgs)/len(meprecision_avgs)
me_recall = reduce(lambda x,y: x+y,merecall_avgs)/len(merecall_avgs)
me_f1_score = reduce(lambda x,y: x+y,mef1_avgs)/len(mef1_avgs)
data = {'Naive Bayes': [nb_accuracy, nb_precision, nb_recall, nb_f1_score],
'SVM': [svm_accuracy, svm_precision, svm_recall, svm_f1_score],
'Maximum Entropy': [me_accuracy, me_precision, me_recall, me_f1_score]}
plotting.plot_performance_histogram(data, "polarity_"+feature_set)
return data
def perform_grid_search_on_featureset_SA_and_PA():
datasetnr = 3
tweets = utils.get_pickles(datasetnr)
sentimentvalues = feat_utils.get_sentiment_values(datasetnr)
tweets = preprocessing.remove_link_classes(tweets)
tweets = preprocessing.lower_case(tweets)
tweets = preprocessing.remove_specialchars_round2(tweets)
train_tweets, train_targets, test_tweets, test_targets, train_sentimentvalues, test_sentimentvalues = utils.make_subjectivity_train_and_test_and_targets(tweets,sentimentvalues)
clf = SVM(train_tweets, train_targets, None)
clf.set_feature_set('SA', None)
clf.grid_search_on_text_features(file_postfix='subjectivity')
clf = NB(train_tweets, train_targets, None)
clf.set_feature_set('SA', None)
clf.grid_search_on_text_features(file_postfix='subjectivity')
clf = ME(train_tweets, train_targets, None)
clf.set_feature_set('SA', None)
clf.grid_search_on_text_features(file_postfix='subjectivity')
train_tweets, train_targets, test_tweets, test_targets, train_sentimentvalues, test_sentimentvalues = utils.make_polarity_train_and_test_and_targets(tweets,sentimentvalues)
clf = SVM(train_tweets, train_targets, None)
clf.set_feature_set('PA', None)
clf.grid_search_on_text_features(file_postfix='polarity')
clf = NB(train_tweets, train_targets, None)
clf.set_feature_set('PA', None)
clf.grid_search_on_text_features(file_postfix='polarity')
clf = ME(train_tweets, train_targets, None)
clf.set_feature_set('PA', None)
clf.grid_search_on_text_features(file_postfix='polarity')
def train_and_test_dataset_increase():
datasetnr = 3
tweets = utils.get_pickles(datasetnr)
sentimentvalues = feat_utils.get_sentiment_values(datasetnr)
tweets = preprocessing.remove_link_classes(tweets)
tweets = preprocessing.lower_case(tweets)
tweets = preprocessing.remove_specialchars_round2(tweets)
accuracy_data = {'NB(SA)':[],'NB(SB)':[],'NB(SC)':[],
'SVM(SA)':[],'SVM(SB)':[],'SVM(SC)':[],
'MaxEnt(SA)':[],'MaxEnt(SB)':[],'MaxEnt(SC)':[],
'NB(PA)':[],'NB(PB)':[],'NB(PC)':[],
'SVM(PA)':[],'SVM(PB)':[],'SVM(PC)':[],
'MaxEnt(PA)':[],'MaxEnt(PB)':[],'MaxEnt(PC)':[]}
f1_data = {'NB(SA)':[],'NB(SB)':[],'NB(SC)':[],
'SVM(SA)':[],'SVM(SB)':[],'SVM(SC)':[],
'MaxEnt(SA)':[],'MaxEnt(SB)':[],'MaxEnt(SC)':[],
'NB(PA)':[],'NB(PB)':[],'NB(PC)':[],
'SVM(PA)':[],'SVM(PB)':[],'SVM(PC)':[],
'MaxEnt(PA)':[],'MaxEnt(PB)':[],'MaxEnt(PC)':[]}
for i in range(5,101,5):
print "=============================DATAPOINT NR. ",i,"========================================"
data = train_subjectivity_and_test_on_feature_set(tweets, 'SA', sentimentvalues, reduce_dataset=i*0.01)
print "DATA -- ",data
accuracy_data['NB(SA)'].append(data['Naive Bayes'][0])
f1_data['NB(SA)'].append(data['Naive Bayes'][3])
accuracy_data['SVM(SA)'].append(data['SVM'][0])
f1_data['SVM(SA)'].append(data['SVM'][3])
accuracy_data['MaxEnt(SA)'].append(data['Maximum Entropy'][0])
f1_data['MaxEnt(SA)'].append(data['Maximum Entropy'][3])
data = train_subjectivity_and_test_on_feature_set(tweets, 'SB', sentimentvalues, reduce_dataset=i*0.01)
print "DATA -- ",data
accuracy_data['NB(SB)'].append(data['Naive Bayes'][0])
f1_data['NB(SB)'].append(data['Naive Bayes'][3])
accuracy_data['SVM(SB)'].append(data['SVM'][0])
f1_data['SVM(SB)'].append(data['SVM'][3])
accuracy_data['MaxEnt(SB)'].append(data['Maximum Entropy'][0])
f1_data['MaxEnt(SB)'].append(data['Maximum Entropy'][3])
data = train_subjectivity_and_test_on_feature_set(tweets, 'SC', sentimentvalues, reduce_dataset=i*0.01)
print "DATA -- ",data
accuracy_data['NB(SC)'].append(data['Naive Bayes'][0])
f1_data['NB(SC)'].append(data['Naive Bayes'][3])
accuracy_data['SVM(SC)'].append(data['SVM'][0])
f1_data['SVM(SC)'].append(data['SVM'][3])
accuracy_data['MaxEnt(SC)'].append(data['Maximum Entropy'][0])
f1_data['MaxEnt(SC)'].append(data['Maximum Entropy'][3])
data = train_polarity_and_test_on_feature_set(tweets, 'PA', sentimentvalues, reduce_dataset=i*0.01)
print "DATA -- ",data
accuracy_data['NB(PA)'].append(data['Naive Bayes'][0])
f1_data['NB(PA)'].append(data['Naive Bayes'][3])
accuracy_data['SVM(PA)'].append(data['SVM'][0])
f1_data['SVM(PA)'].append(data['SVM'][3])
accuracy_data['MaxEnt(PA)'].append(data['Maximum Entropy'][0])
f1_data['MaxEnt(PA)'].append(data['Maximum Entropy'][3])
data = train_polarity_and_test_on_feature_set(tweets, 'PB', sentimentvalues, reduce_dataset=i*0.01)
print "DATA -- ",data
accuracy_data['NB(PB)'].append(data['Naive Bayes'][0])
f1_data['NB(PB)'].append(data['Naive Bayes'][3])
accuracy_data['SVM(PB)'].append(data['SVM'][0])
f1_data['SVM(PB)'].append(data['SVM'][3])
accuracy_data['MaxEnt(PB)'].append(data['Maximum Entropy'][0])
f1_data['MaxEnt(PB)'].append(data['Maximum Entropy'][3])
data = train_polarity_and_test_on_feature_set(tweets, 'PC', sentimentvalues, reduce_dataset=i*0.01)
print "DATA -- ",data
accuracy_data['NB(PC)'].append(data['Naive Bayes'][0])
f1_data['NB(PC)'].append(data['Naive Bayes'][3])
accuracy_data['SVM(PC)'].append(data['SVM'][0])
f1_data['SVM(PC)'].append(data['SVM'][3])
accuracy_data['MaxEnt(PC)'].append(data['Maximum Entropy'][0])
f1_data['MaxEnt(PC)'].append(data['Maximum Entropy'][3])
out = open('incremental_acc'+str(i), 'wb')
pickle.dump(accuracy_data, out)
out = open('incremental_f1'+str(i), 'wb')
pickle.dump(f1_data, out)
plotting.plot_temporal_sentiment(accuracy_data, filename="incremental_accuracy")
plotting.plot_temporal_sentiment(f1_data, filename="incremental_f1")
def test_aggregated_sentiments():
sub_clf = classifier.get_optimal_subjectivity_classifier()
pol_clf = classifier.get_optimal_polarity_classifier()
tweets = utils.get_pickles(2)
sentimentvalues = utils.get_sentimentvalues(2)
sub_train_tweets, sub_train_targets, _, _, sub_train_sentiments, _ = utils.make_subjectivity_train_and_test_and_targets(tweets, sentimentvalues, splitvalue=1.0)
pol_train_tweets, pol_train_targets, _, _, pol_train_sentiments, _ = utils.make_polarity_train_and_test_and_targets(tweets, sentimentvalues, splitvalue=1.0)
sub_predictions = sub_clf.classify(sub_train_tweets, sub_train_sentiments)
pol_predictions = pol_clf.classify(pol_train_tweets, pol_train_sentiments)
print pol_train_targets, pol_predictions
days, targets, predicts, total_frequencies = utils.temporally_aggregate_subjectivity(sub_train_tweets, sub_predictions, targets=sub_train_targets)
data = {'Targets': [days, targets], 'Predictions': [days, predicts], 'Frequencies': [days,total_frequencies]}
plotting.plot_subjectivity_aggregates(data, 'aggregated_subjectivity')
days, targets, predicts, frequencies = utils.temporally_aggregate_polarity(pol_train_tweets, pol_predictions, targets=pol_train_targets)
for i in range(len(days)):
targets[i]=targets[i]*1.0/frequencies[i]
predicts[i]=predicts[i]*1.0/frequencies[i]
frequencies[i]=frequencies[i]*1.0/total_frequencies[i]
data = {'Targets': [days, targets], 'Predictions': [days, predicts], 'Frequencies': [days,frequencies]}
plotting.plot_polarity_aggregates(data, 'aggregated_polarity')
def test_remporal_topics():
tweets1 = pickle.load(open('temporal_tweets1', 'rb'))
tweets2 = pickle.load(open('temporal_tweets2', 'rb'))
tweets = tweets1 + tweets2
print len(tweets)
sentiments = pickle.load(open('temporal_sentiments','rb'))
print len(sentiments)
subclf = classifier.get_optimal_subjectivity_classifier()
polclf = classifier.get_optimal_polarity_classifier()
#TODO SKRIVE HER TEMPORALLY AGGREGATE ETC
sub_predictions = subclf.classify(tweets, sentiments)
subjective_tweets = [t for p,t in zip(sub_predictions,tweets) if p=="subjective"]
subjective_sentiments = [s for p,s in zip(sub_predictions,sentiments) if p=="subjective"]
pol_predictions = polclf.classify(subjective_tweets, subjective_sentiments)
topics = entity_extraction.perform_entity_extraction(subjective_tweets, subjective_sentiments, use_pmi=True, breakword_min_freq=0.1, breakword_range=14)
days, unique_topics, aggregated_values = utils.topically_aggregate_polarity(subjective_tweets, pol_predictions, topics=topics)
data = {}
for i in range(len(unique_topics)):
data[unique_topics[i]] = [days, aggregated_values[i]]
print data
pickle.dump(data, open('topically_aggregated_polarity', 'wb'))
def preprocess_temporal_dataset():
tweetlines = utils.get_dataset(utils.complete_datasets[3])
tweets = []
for line in tweetlines:
if len(line)>1:
tweets.append(tweet.to_tweet(line))
tweets = preprocessing.preprocess_tweets(tweets)
sentiments = lexicon.perform_google_sentiment_lexicon_lookup(tweets)
pickle.dump(sentiments, open('temporal_sentiments','wb'))
pickle.dump(tweets, open('temporal_tweets2', 'wb'))
if __name__ == '__main__':
datasetnr = 3
tweets = utils.get_pickles(datasetnr)
sentimentvalues = feat_utils.get_sentiment_values(datasetnr)
tweets = preprocessing.remove_link_classes(tweets)
tweets = preprocessing.lower_case(tweets)
tweets = preprocessing.remove_specialchars_round2(tweets)
train_subjectivity_and_test_on_feature_set(tweets, 'SA', datasetnr)
train_subjectivity_and_test_on_feature_set(tweets, 'SB', datasetnr)
train_subjectivity_and_test_on_feature_set(tweets, 'SC', sentimentvalues)
train_polarity_and_test_on_feature_set(tweets, 'PA', datasetnr)
train_polarity_and_test_on_feature_set(tweets, 'PB', datasetnr)
train_polarity_and_test_on_feature_set(tweets, 'PC', sentimentvalues)
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,072
|
andrely/twitter-sentiment
|
refs/heads/master
|
/classifier.py
|
'''
Created on 11. mars 2014
@author: JohnArne
'''
import argparse
import utils
import preprocessing
import retriever_tweepy
from models.nb import NB
from models.svm import SVM
from models.me import ME
from models import features
from models import model
from lexicon import lexicon
import test
import annotation
import easygui_gui
from retriever_tweepy import TweetRetriever
import entity_extraction
class Classifier(object):
"""
Class for handling the training and testing of a given model.
Takes in a selected model type(NV/SVM/ME) trains it on a given dataset, then tests it.
"""
def __init__(self, subjectivity_model, polarity_model):
self.subjectivity_model = subjectivity_model
self.polarity_model = polarity_model
def test(self):
"""
Tests the given model on a partition of the dataset.
"""
def classify(self, tweets):
"""
Takes in a list of tweets and classifies with all three classes using the two trained models
"""
sentiments = []
predictions = self.subjectivity_model.classify(tweets)
return sentiments
def save_model(self):
file = open()
def train_and_store_results(self):
"""
Trains the given model on the dataset using the three different models, and different feature sets. Stores the results of the runs.
"""
dataset = "random_dataset"
tweets = utils.get_pickles(dataset)
self.model.set_feature_set('A')
self.model.train_on_feature_set()
def get_optimal_subjectivity_classifier():
"""
Trains and returns the optimal subjectivity classifier.
"""
tweets = utils.get_pickles(3)
tweets, targets = utils.make_subjectivity_targets(tweets)
vect_options = {
'ngram_range': (1,1),
'max_df': 0.5
}
tfidf_options = {
'sublinear_tf': False,
'use_idf': True,
'smooth_idf': True,
}
clf = SVM(tweets, targets, vect_options, tfidf_options)
clf.set_feature_set('SA', utils.get_sentimentvalues(3))
clf.train_on_feature_set()
return clf
def get_optimal_polarity_classifier():
"""
Trains and returns the optimal polarity classifier.
"""
tweets = utils.get_pickles(3)
tweets, targets = utils.make_polarity_targets(tweets)
vect_options = {
'ngram_range': (1,1),
'max_df': 0.5
}
tfidf_options = {
'sublinear_tf': False,
'use_idf': True,
'smooth_idf': True,
}
clf = SVM(tweets, targets, vect_options, tfidf_options)
clf.set_feature_set('PC2', features.get_google_sentiment_values(3))
clf.train_on_feature_set()
return clf
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Commands for classification")
parser.add_argument("-pre1", action="store_true", dest="preprocess1", default=False, help="Perform first round preprocessing: Duplicate and retweet removal")
parser.add_argument("-pre2", action="store_true", dest="preprocess2", default=False, help="Perform second round preprocessing: Text cleanup operations, feature extractions, POS-tagging.")
parser.add_argument("-q", action="store", dest="tweet_query", default=None, help="Get tweets using the given query.")
parser.add_argument("-a", action="store_true", dest="annotate", default=False, help="Start annotation sequence.")
parser.add_argument("-analyze", action="store_true", dest="analyze", default=False, help="Perform a re-analysis of the pickled datasets. This analysis is also performed as part of the second preprocessing.")
parser.add_argument("-posanalyze", action="store_true", dest="posanalyze", default=False, help="Perform a pos-tag analysis of the pickled datasets.")
parser.add_argument("-lex1", action="store_true", dest="run_lexicon1", default=False, help="Run lexicon translation using Bing and lookup on stored tweets")
parser.add_argument("-lex2", action="store_true", dest="run_lexicon2", default=False, help="Run lexicon translation using Google and lookup on stored tweets")
parser.add_argument("-optimize", action="store_true", dest="optimize", default=False, help="Find optimal parameters for text classification with SVM, NB, and MaxEnt. Stores the optimal parameters for each algorithm.")
parser.add_argument("-test", action="store_true", dest="train_and_test", default=False, help="Train and test on subjectivity and polarity and create a diagram of the results.")
parser.add_argument("-test_increment", action="store_true", dest="test_incremental", default=False, help="Train and test incremental dataset results and create a diagram of the results.")
parser.add_argument("-test_aggregated", action="store_true", dest="test_aggregated", default=False, help="Train and test aggregated results from erna solberg dataset and create a diagram of the results.")
parser.add_argument("-test_entities", action="store_true", dest="test_entities", default=False, help="Test topic detection on topic-annotated rosenborg dataset and create a diagram of the results.")
parser.add_argument("-test_temptops", action="store_true", dest="test_temptops", default=False, help="Train and test topically aggregated results from a temporally dense dataset and create a diagram of the results.")
parsameters = parser.parse_args()
if parsameters.preprocess1:
preprocessing.initial_preprocess_all_datasets()
if parsameters.preprocess2:
preprocessing.classification_preprocess_all_datasets()
if parsameters.tweet_query:
retriever = TweetRetriever(parsameters.tweet_query)
retriever.retrieve_for_dataset()
if parsameters.annotate:
annotation.user_annotation()
if parsameters.analyze:
preprocessing.re_analyze()
if parsameters.posanalyze:
preprocessing.pos_analyze()
if parsameters.run_lexicon1:
preprocessing.bing_lexicon_lookup()
if parsameters.run_lexicon2:
preprocessing.google_lexicon_lookup()
if parsameters.optimize:
test.perform_grid_search_on_featureset_SA_and_PA()
if parsameters.train_and_test:
test.train_and_test_subjectivity_and_polarity()
if parsameters.test_incremental:
test.train_and_test_dataset_increase()
if parsameters.test_aggregated:
test.test_aggregated_sentiments()
if parsameters.test_temptops:
test.test_remporal_topics()
if parsameters.test_entities:
entity_extraction.perform_and_test_extraction()
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,073
|
andrely/twitter-sentiment
|
refs/heads/master
|
/preprocessing.py
|
'''
Created on 21. apr. 2014
@author: JohnArne
'''
import utils
from calendar import main
import tweet
from numpy.core.numeric import correlate
from tweet import Tweet
import re
from tagger import Tagger
from analyzer import Analyzer
import string
from lexicon import lexicon
import plotting
from analyzer import pos_tag_analyze
def remove_retweets(tweets):
"""
Removes all retweets
"""
for tweet in tweets:
textbody = tweet.text
if textbody[:2] is "RT":
tweet.text = textbody[3:]
return tweets
def remove_duplicates_and_retweets(tweets):
"""
Removes tweets with dublicate text bodies.
"""
textbodies = []
tweets = [tweet for tweet in tweets if not tweet.text[:2]=="RT"]
#Return a set of the tweets, which will remove duplicates if __eq__ is properly implemented
unique_tweets = []
added_texts = []
for t in tweets:
if t.text not in added_texts:
unique_tweets.append(t)
added_texts.append(t.text)
return unique_tweets
def remove_retweet_tags(tweets):
"""
Removes tweets with dublicate text bodies.
"""
for t in tweets:
textbody = t.text[2:] if t.text[:2]=='RT' else t.text
t.text = textbody
return tweets
def correct_words(tweets):
"""
Performs simple word correction.
Initially, this will involve removing any vowel that appears 2 times or more,
aswell as removing any consonant that appears 3 times or more.
"""
for tweet in tweets:
textbody = tweet.text
for vowel in vowels:
pattern = re.compile(vowel*3+"*")
try:
textbody = pattern.sub(vowel, textbody)
except UnicodeDecodeError:
textbody = pattern.sub(vowel, textbody.decode('utf8'))
for consonant in consonants:
pattern = re.compile(consonant+consonant+consonant+consonant+"*")
try:
textbody = pattern.sub(consonant*2, textbody)
except UnicodeDecodeError:
textbody = pattern.sub(consonant*2, textbody.decode('utf8'))
tweet.text = textbody
return tweets
def remove_specialchars(tweets):
"""
Removes certain special characters. Does not remove !, ?, or ., as these are neeeded for the POS tagger to separate phrases.
"""
for tweet in tweets:
textbody = tweet.text
pattern = re.compile('({|}|[|]|-|:|"|@|\*|\)|\()')
try:
textbody = pattern.sub("", textbody)
except UnicodeDecodeError:
textbody = pattern.sub("", textbody.decode('utf8'))
try:
textbody = string.replace(textbody, "_", " ")
except UnicodeEncodeError:
textbody = string.replace(textbody.decode('utf8'), "_", " ")
# textbody = string.replace(textbody, "?", "")
# textbody = string.replace(textbody, ".", "")
# textbody = string.replace(textbody, "!", "")
tweet.text = textbody
return tweets
def remove_specialchars_round2(tweets):
for tweet in tweets:
textbody = tweet.text
pattern = re.compile('({|}|[|]|-|:|"|@|\*|\)|\(|\\|.)')
try:
textbody = pattern.sub("", textbody)
except UnicodeDecodeError:
textbody = pattern.sub("", textbody.decode('utf8'))
try:
textbody = string.replace(textbody, "_", " ")
except UnicodeEncodeError:
textbody = string.replace(textbody.decode('utf8'), "_", " ")
# textbody = string.replace(textbody, "?", "")
# textbody = string.replace(textbody, ".", "")
# textbody = string.replace(textbody, "!", "")
tweet.text = textbody
return tweets
def remove_hastags_and_users(tweets):
"""
Removes hashtag labels and user labels, whenever it encounters a hashtag, it increments the hashtag counter in the respective tweet object. Stores both hastags and users in the tweet objects.
"""
for tweet in tweets:
textbody = ""
for word in tweet.text.split(" "):
if len(word)<1:continue
tweet.word_count = tweet.word_count +1
if not word[0]=="#" and not word[0]=="@":
textbody = textbody+word+" "
if word[0]=="#":
tweet.nrof_hashtags = tweet.nrof_hashtags + 1
tweet.hashtags.append(word[1:])
textbody = textbody + " "
if word[0]=="@":
tweet.nrof_usersmentioned = tweet.nrof_usersmentioned +1
tweet.users_mentioned.append(word[1:])
textbody = textbody + word[1:] + " "
tweet.text = textbody
return tweets
def count_emoticons(tweets):
"""
Counts emoticons, whenever it encounters an emoticon, it increments the emoticon counter in the respective tweet object.
"""
for tweet in tweets:
textbody = tweet.text
tweet.nrof_happyemoticons = string.count(textbody, ":)") + string.count(textbody, ":D")
tweet.nrof_sademoticons = string.count(textbody, ":(") + string.count(textbody, ":'(") + string.count(textbody, ":,(")
for emoticon in emoticon_class:
tweet.text = string.replace(textbody, emoticon, "")
return tweets
def count_exclamations(tweets):
"""
Counts exclamation marks and question marks, stores their number for future feature use. Then removes all sentence stops.
Possibly handle / in a separate manner; keep only one of the words...?
"""
for tweet in tweets:
textbody = tweet.text
tweet.nrof_exclamations = string.count(textbody, "!")
tweet.nrof_questionmarks = string.count(textbody, "?")
pattern = re.compile('(\?|!|\.|:)')
textbody = pattern.sub("", textbody)
tweet.text = textbody
return tweets
def replace_links(tweets):
"""
Replaces any links in the tweets with a link class, saves links in the list in the tweet object.
"""
for tweet in tweets:
links = [word for word in tweet.text.split(' ') if word[:4]=="http" or word[:3]=="www"]
link_replaced_text = " ".join(["" if word[:4]=="http" or word[:3]=="www" else word for word in tweet.text.split(' ')])
tweet.text = link_replaced_text
tweet.links = links
return tweets
def remove_stopwords(tweets):
"""
Removes common stopwords based on a created stopword list.
"""
return tweets
def lower_case(tweets):
"""
Lowercases every text body in the tweets
"""
for tweet in tweets:
textbody = tweet.text
tweet.text = textbody.lower()
return tweets
def stem(tweets):
"""
Stems and splits the tweet texts and stores them in the processed words list in the tweet object.
"""
return tweets
def tokenize(tweets):
for tweet in tweets:
splits = tweet.text.split(" ")
tweet.processed_words = [word for word in splits if len(word)>1]
return tweets
def pos_tag(tweets):
"""
Uses the POS tagger interface to tag part-of-speech in all the tweets texts, stores it as dict in the tweet objects.
"""
print "Tagging..."
untagged_texts = []
for tweet in tweets:
tagger = Tagger()
textbody = tweet.text
for phrase in re.split("\.|!|\?", textbody):
if len(phrase)<2: continue
phrase = string.replace(phrase, "?", "")
phrase = string.replace(phrase, "!", "")
phrase = string.replace(phrase, ".", "")
tags = tagger.tag_text(phrase)
if tags!=None:
tweet.tagged_words.append(tags)
print "Untagged texts: "
for text in untagged_texts:
print text
print "Tagging done."
return tweets
def remove_link_classes(tweets):
"""
Removes the link classes from the given tweets, returns the positions of these links.
"""
for t in tweets:
t.link_pos = [m.start() for m in re.finditer('\<link\>', t.text)]
link_replaced_text = " ".join(["" if word=="<link>" else word for word in t.text.split(' ')])
t.text = link_replaced_text
return tweets
def bing_lexicon_lookup():
"""
Fetches the tweets and performs lexicon translatino and lookup.
"""
tweets = utils.get_pickles(0)
words_with_values = lexicon.perform_bing_sentiment_lexicon_lookup(tweets)
print "Storing..."
utils.store_sentimentvalues(words_with_values, "models/sentimentvalues_random_dataset")
tweets = utils.get_pickles(1)
words_with_values = lexicon.perform_bing_sentiment_lexicon_lookup(tweets)
print "Storing..."
utils.store_sentimentvalues(words_with_values, "models/sentimentvalues_rosenborg_dataset")
tweets = utils.get_pickles(2)
words_with_values = lexicon.perform_bing_sentiment_lexicon_lookup(tweets)
print "Storing..."
utils.store_sentimentvalues(words_with_values, "models/sentimentvalues_erna_dataset")
def google_lexicon_lookup():
"""
Fetches the tweets and performs lexicon translatino and lookup.
"""
tweets = utils.get_pickles(0)
words_with_values = lexicon.perform_google_sentiment_lexicon_lookup(tweets)
print "Storing..."
utils.store_sentimentvalues(words_with_values, "models/google_sentimentvalues_random_dataset")
tweets = utils.get_pickles(1)
words_with_values = lexicon.perform_google_sentiment_lexicon_lookup(tweets)
print "Storing..."
utils.store_sentimentvalues(words_with_values, "models/google_sentimentvalues_rosenborg_dataset")
tweets = utils.get_pickles(2)
words_with_values = lexicon.perform_google_sentiment_lexicon_lookup(tweets)
print "Storing..."
utils.store_sentimentvalues(words_with_values, "models/google_sentimentvalues_erna_dataset")
def re_analyze():
"""
Unpickles preprocessed tweets and performs reanalyzis of these, then stores stats.
"""
labels = ["random",'"rosenborg"','"erna solberg"']
data = {}
worddata = {}
for i in xrange(3):
tweets = utils.get_pickles(i)
analyzer = Analyzer(utils.annotated_datasets[i], tweets)
avg_list,words_list= analyzer.analyze()
print avg_list
worddata[labels[i]] = words_list
data[labels[i]] = avg_list
plotting.average_wordclasses(worddata, "averages")
plotting.detailed_average_wordclasses(data, "averages2")
def pos_analyze():
"""
Unpickles preprocessed tweets and performs pos-analysis of them. Then stores the stats in a diagram.
"""
tweets = utils.get_pickles(3)
subjectivity_data, polarity_data = pos_tag_analyze(tweets)
plotting.plot_pos_analysis(subjectivity_data, "sub_analysis")
plotting.plot_pos_analysis(polarity_data, "pos_analysis")
return True
def initial_preprocess_all_datasets():
"""
Runs first preprocessing iteration on all datasets.
This is the preprocessing routine performed initially on the datasets before annotation.
This routine includes duplicate removal
"""
for i in range(0,len(utils.datasets)):
#Fetch from dataset
tweets = []
tweetlines = utils.get_dataset(utils.complete_datasets[i])
for tweetline in tweetlines:
tweets.append(tweet.to_tweet(tweetline))
#Perform preprocessing
tweets = remove_duplicates_and_retweets(tweets)
#Store back to dataset
tweetlines = []
for t in tweets:
tweetlines.append(t.to_tsv())
utils.store_dataset(tweetlines, utils.datasets[i])
def classification_preprocess_all_datasets():
"""
Preprocesses all datasets to be ready for classification task.
This will include stemming, word correction, lower-casing, hashtag removal, special char removal.
"""
for i in range(0,len(utils.annotated_datasets)):
tweetlines = utils.get_dataset(utils.annotated_datasets[i])
tweets = []
for line in tweetlines:
if len(line)>1:
tweets.append(tweet.to_tweet(line))
# tweets = lower_case(tweets)
tweets = remove_hastags_and_users(tweets)
tweets = count_emoticons(tweets)
tweets = replace_links(tweets)
tweets = remove_specialchars(tweets)
tweets = correct_words(tweets)
tweets = stem(tweets)
tweets = tokenize(tweets)
tweets = pos_tag(tweets)
tweets = count_exclamations(tweets)
analyzer = Analyzer(utils.annotated_datasets[i], tweets)
stats = analyzer.analyze()
print stats
#store tweets in pickles...
print "Storing pickles..."
utils.store_pickles(tweets, utils.annotated_datasets[i][24:len(utils.annotated_datasets[i])-4])
def preprocess_tweets(tweets):
# tweets = lower_case(tweets)
print "Preprocessing"
tweets = remove_retweet_tags(tweets)
tweets = remove_hastags_and_users(tweets)
tweets = count_emoticons(tweets)
tweets = replace_links(tweets)
tweets = remove_specialchars(tweets)
tweets = correct_words(tweets)
tweets = stem(tweets)
tweets = tokenize(tweets)
tweets = pos_tag(tweets)
tweets = count_exclamations(tweets)
return tweets
def preprocess_tweet(tweet):
"""
Preprocess a single tweet
"""
tweets = [tweet]
tweets = remove_hastags_and_users(tweets)
tweets = count_emoticons(tweets)
tweets = replace_links(tweets)
tweets = remove_specialchars(tweets)
tweets = correct_words(tweets)
tweets = stem(tweets)
tweets = tokenize(tweets)
tweets = pos_tag(tweets)
tweets = count_exclamations(tweets)
return tweets[0]
vowels = [u"a", u"e", u"i", u"o", u"u", u"y", u"\u00E6", u"\u00D8", u"\u00E5"]
consonants = [u"b", u"c", u"d", u"f", u"g", u"h", u"j", u"k", u"l", u"m", u"n", u"p", u"q", u"r", u"s", u"t", u"v", u"w", u"x", u"z"]
emoticon_class = [":)",":D",":(",":'("]
special_chars_removal = '(<|>|{|}|[|]|-|_|*|")'
replacement_chars = {u"&": u"og",
u"6amp;": u"og",
u"+": u"og"}
if __name__ == '__main__':
#Testing
# tweets = [Tweet("13:37", "johnarne", "Jeg () haaater drittt!!!? :( #justinbieber"), Tweet("13:37", "johnarne", "Jeg eeelsker @erna_solberg http://www.erna.no :) #love #jernerna" )]
# for tweet in tweets:
# tweet.set_sentiment("negative")
# print tweet
tweetlines = utils.get_dataset("test_annotated_data/erna_dataset.tsv")
tweets = []
for line in tweetlines:
if len(line)>1:
tweets.append(tweet.to_tweet(line))
# tweets = lower_case(tweets)
tweets = remove_hastags_and_users(tweets)
tweets = count_emoticons(tweets)
tweets = replace_links(tweets)
tweets = remove_specialchars(tweets)
for tweet in tweets:
print tweet
tweets = correct_words(tweets)
tweets = stem(tweets)
tweets = tokenize(tweets)
for tweet in tweets:
print tweet.stat_str()
tweets = pos_tag(tweets)
tweets = count_exclamations(tweets)
for tweet in tweets:
print tweet.stat_str()
analyzer = Analyzer("test_annotated_data/erna_dataset.tsv", tweets)
stats = analyzer.analyze()
print stats
|
{"/classifier.py": ["/utils.py", "/preprocessing.py", "/retriever_tweepy.py", "/models/nb.py", "/models/svm.py", "/models/me.py", "/models/__init__.py", "/lexicon/__init__.py", "/test.py", "/annotation.py", "/easygui_gui.py", "/entity_extraction.py"]}
|
5,074
|
Jsinclairisto/flask-blog
|
refs/heads/master
|
/app/routes.py
|
#import certain functions into the global
#namespace
from app import app
from os import walk
from flask_user import roles_required, login_required
from markdown import markdown
from flask import render_template_string, render_template, flash, redirect, request
from app.blog_helpers import render_markdown, LoginForm
import urllib.request
import os
#safe global import (okay to use)
import flask
#home page
@app.route("/")
def home():
return render_template('index.html')
#Success page. Directs here after form is submitted
@app.route('/success')
def success():
hasAccess = True
print(hasAccess)
return render_template('success.html')
#Login page
@app.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
hasAccess = False
if form.validate_on_submit():
hasAccess = True
return redirect('success')
# else:
# return '<h1>YOU FUCKED UP AAAHHH!</h1>'
return render_template('login.html', title='Sign In', form=form)
@app.route('/all')
def temp_listings():
#view_data["pages"] = (['about.html', 'butt.html', 'icecream.html'])
#assigns current directory to base_path variable
base_path = os.getcwd()
#combines base path with target path. This way, it will work with all users.
#They'll have different base paths, but will have the same sub-path of '/app/templates'
dest_path = base_path + '/app/templates'
#assigns combo to file_path
file_path = os.path.relpath(dest_path, base_path)
files = os.listdir(file_path)
return render_template('all.html', files=files)
#generic page
@app.route('/<view_name>')
#input parameter name must match route parameter
def render_page(view_name):
html = render_markdown(view_name + '.html')
print('YOOOOO IT WORKS AYYYYY')
return render_template_string(html, view_name = view_name)
@app.route('/edit/<edit_file>')
@login_required
def edit(edit_file):
hasAccess = login()
output_page = render_markdown(edit_file + '.html')
return render_template('edit.html', output_page=output_page)
@app.route('/createpost')
@login_required
def createpost():
return '<h1>Hello People of Earth</h1>'
@app.route('/createaccount')
def createaccount():
return '<h1>Currently in development...</h1>'
|
{"/app/routes.py": ["/app/__init__.py", "/app/blog_helpers.py"], "/app/blog_helpers.py": ["/app/__init__.py"]}
|
5,075
|
Jsinclairisto/flask-blog
|
refs/heads/master
|
/app/__init__.py
|
from flask import Flask
from flask_bootstrap import Bootstrap
from flask_user import login_required, UserManager, UserMixin, SQLAlchemyAdapter
from wtforms import StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import DataRequired
from wtforms.validators import Length
from flask_sqlalchemy import SQLAlchemy
import config
app = Flask(__name__)
app.config['SECRET_KEY'] = 'shhhhh_its_a_secret'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite3'
app.config['CSRF_ENABLED'] = True
app.config['USER_ENABLE_EMAIL'] = False
db = SQLAlchemy(app)
Bootstrap(app)
from app import routes
|
{"/app/routes.py": ["/app/__init__.py", "/app/blog_helpers.py"], "/app/blog_helpers.py": ["/app/__init__.py"]}
|
5,076
|
Jsinclairisto/flask-blog
|
refs/heads/master
|
/app/blog_helpers.py
|
from markdown import markdown
from flask_wtf import FlaskForm
from flask import render_template
from flask_user import login_required, UserManager, UserMixin, SQLAlchemyAdapter
from wtforms import StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import DataRequired
from wtforms.validators import Length
from flask_sqlalchemy import SQLAlchemy
from app import app, db
import os
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(50), nullable=False, unique=True)
password = db.Column(db.String(255), nullable=False, server_default='')
active = db.Column(db.Boolean(), nullable=False, server_default='0')
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired(), Length(min=6, max=25)])
password = PasswordField('Password', validators=[DataRequired(), Length(min=6, max=25)])
remember_me = BooleanField('Remember Me')
submit = SubmitField('Sign in')
class SignInForm(FlaskForm):
username = StringField('Username', validators=[DataRequired(), Length(min=6, max=25)])
password = PasswordField('Password', validators=[DataRequired(), Length(min=6, max=25)])
emailAddress = StringField('Email', validators=[DataRequired(),Length(min=6, max=25)])
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
def render_markdown(file_name, dir_path = 'app/templates'):
"""Takes the specified file path and
returns it as HTML
"""
html = ""
#os.path.join creates an OS-valid path
path = os.path.join(dir_path, file_name)
with open(path) as html_file:
html = html_file.read()
html = markdown(html)
return html
|
{"/app/routes.py": ["/app/__init__.py", "/app/blog_helpers.py"], "/app/blog_helpers.py": ["/app/__init__.py"]}
|
5,082
|
ktimez/ktimezForum
|
refs/heads/master
|
/Questions/admin.py
|
from django.contrib import admin
from .models import AskedQuestions, Replies, Vote
# Register your models here.
admin.site.register(AskedQuestions)
admin.site.register(Replies)
admin.site.register(Vote)
|
{"/Questions/admin.py": ["/Questions/models.py"], "/Questions/views.py": ["/Questions/models.py", "/Questions/forms.py"], "/Questions/forms.py": ["/Questions/models.py"]}
|
5,083
|
ktimez/ktimezForum
|
refs/heads/master
|
/Questions/views.py
|
from django.shortcuts import render, redirect
from django.views.generic import ListView, DetailView, CreateView, UpdateView, DeleteView
from .models import AskedQuestions, Replies
from .forms import AskQ
from .forms import SignUpForm
from django.contrib.auth import login, authenticate
# Create your views here.
class HomeListView(ListView):
model = AskedQuestions
template_name = 'askedquestions_list.html'
class QuestionDetailView(DetailView):
model = AskedQuestions
def get_context_data(self, **kwargs):
context = super(QuestionDetailView, self).get_context_data(**kwargs)
obj = self.get_object()
commentss = obj.replies_set.all()
context['comments'] = commentss
return context
class QuestionCreateView(CreateView):
form_class = AskQ
template_name = 'Questions/addQuestion.html'
#success_url = '/'
login_url = '/login/'
def form_valid(self, form):
instance = form.save(commit=False)
instance.owner = self.request.user
instance.rank_scored = 0
instance.save()
return super(QuestionCreateView, self).form_valid(form)
def signup(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
return redirect('home')
else:
form = SignUpForm()
return render(request, 'registration/signup.html', {'form': form})
class QuestionEditView(UpdateView):
model = AskedQuestions
form_class = AskQ
template_name ='Questions/addQuestion.html'
class QuestionDeleteView(DeleteView):
model = AskedQuestions
success_url = '/'
|
{"/Questions/admin.py": ["/Questions/models.py"], "/Questions/views.py": ["/Questions/models.py", "/Questions/forms.py"], "/Questions/forms.py": ["/Questions/models.py"]}
|
5,084
|
ktimez/ktimezForum
|
refs/heads/master
|
/Questions/models.py
|
from django.db import models
from django.utils import timezone
from django.db.models.signals import post_save,pre_save
from .utils import unique_slug_generator
from django.template.defaultfilters import slugify
from autoslug import AutoSlugField
from django.conf import settings
from django.db.models import Count
from django.core.urlresolvers import reverse
class AskedQuestions(models.Model):
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
title = models.CharField(max_length=200)
description = models.TextField(help_text='tanga ubundi busobanuro burenzeho ku kibazo, niba ubufite', blank=True, null=True)
created_on = models.DateTimeField(auto_now_add=True)
slug = models.SlugField(blank=True, null=True)
approved = models.BooleanField(default=True)
def get_absolute_url(self):
return reverse('questionDetails', kwargs={'slug':self.slug})
def __str__(self):
return self.title
def rl_pre_save_receiver(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = unique_slug_generator(instance)
pre_save.connect(rl_pre_save_receiver, sender=AskedQuestions)
#Model Manager of Replies
class RepliesModelManager(models.Manager):
def get_query_set(self):
return super(RepliesModelManager, self).get_query_set().annotate(votes=Count('vote')).order_by('-votes')
class Replies(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
ques = models.ForeignKey(AskedQuestions)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
approved = models.BooleanField(default=True)
rank_scored = models.IntegerField(default=0)
#objects = models.Manager() #default Manager
objects = RepliesModelManager()
def disaprove(self):
self.approved = False
self.save()
def __str__(self):
return self.text
class Vote(models.Model):
voter = models.ForeignKey(settings.AUTH_USER_MODEL)
comment = models.ForeignKey(Replies)
def __str__(self):
return "%s voted %s" %(self.voter.username, self.comment.text)
|
{"/Questions/admin.py": ["/Questions/models.py"], "/Questions/views.py": ["/Questions/models.py", "/Questions/forms.py"], "/Questions/forms.py": ["/Questions/models.py"]}
|
5,085
|
ktimez/ktimezForum
|
refs/heads/master
|
/Questions/forms.py
|
from .models import AskedQuestions
from django.forms import ModelForm
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django import forms
class AskQ(ModelForm):
class Meta:
model = AskedQuestions
fields = ['title', 'description']
class SignUpForm(UserCreationForm):
#first_name = forms.CharField(max_length=30, required=False, help_text='Optional.')
#last_name = forms.CharField(max_length=30, required=False, help_text='Optional.')
email = forms.EmailField(max_length=254, help_text='email yawe')
class Meta:
model = User
fields = ('username','email', 'password1', 'password2', )
|
{"/Questions/admin.py": ["/Questions/models.py"], "/Questions/views.py": ["/Questions/models.py", "/Questions/forms.py"], "/Questions/forms.py": ["/Questions/models.py"]}
|
5,105
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/main.py
|
import gin
from absl import app, flags
from input_pipeline import datasets, datasets2
import constants
from evaluation import eval
from models.transfer_learning_architecture import transfer_learning
from models.architecture import vgg_base_3custom
from matplotlib import pyplot as plt
import tensorflow as tf
import datetime
import os
FLAGS = flags.FLAGS
flags.DEFINE_boolean('train', False, 'Specify whether to train or evaluate a model.')
flags.DEFINE_boolean('ds2', True, 'Specify whether to use alternate data pipeline')
flags.DEFINE_boolean('hparam_tune', False, 'Specify if its hyper param tuning.')
flags.DEFINE_boolean('Transfer_learning', False, 'to use transfer learning based model, \
train flag must be set to true to fine tune pretrained model')
def main(argv):
# gin-config
gin.parse_config_files_and_bindings(['configs/config.gin'], [])
if FLAGS.hparam_tune:
from hyper_parameter_tuning.hparam_tuning import run_hparam_tuning
run_hparam_tuning()
else:
if FLAGS.ds2:
# setup pipeline without image data generator
ds_train, ds_val, ds_test = datasets2.load_data()
if FLAGS.Transfer_learning:
epochs = constants.H_TRANSFER_LEARNING_EPOCHS
model = transfer_learning((256, 256, 3))
else:
epochs = constants.H_EPOCHS
model = vgg_base_3custom((256, 256, 3))
else:
# use pipeline using image data generator
ds_train, ds_val, ds_test = datasets.load()
if FLAGS.Transfer_learning:
epochs = constants.H_TRANSFER_LEARNING_EPOCHS
model = transfer_learning((256, 256, 3))
else:
epochs = constants.H_EPOCHS
model = vgg_base_3custom((256, 256, 3))
opt = tf.optimizers.Adam(constants.H_LEARNING_RATE, name='ADAM')
if FLAGS.train:
model.build((constants.N_BATCH_SIZE, constants.ip_shape[0], constants.ip_shape[1], 3))
model.compile(optimizer=opt, loss='sparse_categorical_crossentropy', metrics=['accuracy'], )
print(model.summary())
# tensor board call back
if not os.path.isdir(constants.dir_fit):
os.makedirs(constants.dir_fit)
log_dir = os.path.join(constants.dir_fit, datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
tensorboard_callbk = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1,
write_graph=True,
write_images=True,
update_freq='epoch',
# profile_batch=2,
embeddings_freq=1)
# Checkpoint call back
cpt_dir = os.path.join(constants.dir_cpts, datetime.datetime.now().strftime("%Y%m%d-%H%M"))
if not os.path.isdir(cpt_dir):
os.makedirs(cpt_dir)
print(cpt_dir)
checkpoint_dir = os.path.join(cpt_dir, 'epochs:{epoch:03d}-val_accuracy:{val_accuracy:.3f}.h5')
# check point to save the model based on improving validation accuracy
checkpoint_callbk = tf.keras.callbacks.ModelCheckpoint(checkpoint_dir,
monitor='val_accuracy',
verbose=1,
save_best_only=False,
mode='max', save_weights_only=False,
save_freq='epoch')
# csv call back, if dir doesnt exist create directory
if not os.path.isdir(constants.dir_csv):
os.makedirs(constants.dir_csv)
log_file_name = os.path.join(constants.dir_csv,
(datetime.datetime.now().strftime("%Y%m%d-%H%M%S") + '.csv'))
csv_callbk = tf.keras.callbacks.CSVLogger(log_file_name, separator=',', append=True)
callbacks_list = [checkpoint_callbk, tensorboard_callbk, csv_callbk]
# Training the model and saving it using checkpoint call back
history_model = model.fit(ds_train, verbose=1, epochs=int(epochs/2), batch_size=constants.N_BATCH_SIZE,
validation_data=ds_val,
callbacks=callbacks_list)
# training the saved model for rest of the epochs
history_model = model.fit(ds_train, verbose=1, initial_epoch=int(epochs/2), epochs=epochs,
batch_size=constants.N_BATCH_SIZE,
validation_data=ds_val,
callbacks=callbacks_list)
# save final model
if not os.path.isdir(constants.WEIGHTS_PATH):
os.makedirs(constants.WEIGHTS_PATH)
model_save_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
model_name = model_save_time + '_' + model.optimizer.get_config()['name'] + '_epochs_' + str(epochs) + '.h5'
model_save_path = os.path.join(constants.WEIGHTS_PATH, model_name)
print(model_save_path)
try:
_ = os.stat(constants.WEIGHTS_PATH)
model.save(model_save_path)
except NotADirectoryError:
raise
# plot final training data, for runtime progress look at tensor board log
plt.figure()
plt.subplot(1, 2, 1)
plt.plot(history_model.history["loss"])
plt.plot(history_model.history["val_loss"])
plt.legend(["loss", "val_loss"])
# plt.xticks(range(constants.H_EPOCHS))
plt.xlabel("epochs")
plt.title("Train and val loss")
plt.subplot(1, 2, 2)
plt.plot(history_model.history["accuracy"])
plt.plot(history_model.history["val_accuracy"])
plt.legend(["accuracy", "val_accuracy"])
plt.title("Train and Val acc")
plt.show()
'''
test_history = model.evaluate(ds_test,
batch_size=constants.N_BATCH_SIZE,
verbose=1, steps=4)
'''
eval.evaluate(model=model, ds_test=ds_test, opt=opt, is_training=FLAGS.train, SAVE_RESULT=True,
checkpoint_path=None)
else:
# Load checkpoint model to evaluate
check_point_path = constants.trained_model_name
# check_point_path = 'weights/20201222-220802_ADAM_epochs_100_test_acc_78.h5'
eval.evaluate(model=model, ds_test=ds_test, opt=opt, is_training=FLAGS.train, SAVE_RESULT=True,
checkpoint_path=check_point_path)
if __name__ == "__main__":
app.run(main)
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,106
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/visualization/deep_vis.py
|
import numpy as np
import tensorflow as tf
from tensorflow import keras
import constants
from input_pipeline import datasets2
from matplotlib import pyplot as plt
import cv2
_, _, ds_test = datasets2.load_data()
# path to test image
f_path = 'C:/Users/Teja/Documents/_INFOTECH/sem5/DL_lab/IDRID_dataset/images/test/IDRiD_033.jpg'
# path to the saved model
saved_model = tf.keras.models.load_model('weights/20201222-220802_ADAM_epochs_100_test_acc_78.h5')
# compile the loaded keras model
saved_model.compile(optimizer=tf.keras.optimizers.Adam(constants.H_LEARNING_RATE),
loss=tf.keras.losses.sparse_categorical_crossentropy, metrics=['accuracy'], )
# build the compiled keras model with input shape = [batchsize,image shape]
saved_model.build((32, 256, 256, 3))
print(saved_model.summary())
img_size = (constants.ip_shape[0], constants.ip_shape[1])
# get the last convolution layer to perform Grad CAM
last_conv_layer_name = "conv2d_3"
# list of all layers after the selected convolution layer till classification
classifier_layer_names = ["batch_normalization_3", "max_pooling2d_3", "dropout", "flatten", "dense", "tf_op_layer_Relu",
"dropout_1", "dense_1"]
def get_img_array(f_path: str):
'''
NOTE: `img` is a PIL image of size 256x256
img = keras.preprocessing.image.load_img(img_path, target_size=size)
# `array` is a float32 Numpy array of shape (256, 256, 3)
array = keras.preprocessing.image.img_to_array(img)
# We add a dimension to transform our array into a "batch"
# of size (1, 256, 256, 3)
Args:
f_path (str): path to read the located Image
'''
image_string = tf.io.read_file(f_path)
print(type(image_string))
image = tf.io.decode_jpeg(image_string, channels=3)
image = tf.image.crop_to_bounding_box(image, 0, 266, 2848, 3426)
image = tf.cast(image, tf.float32) / 255.0
image = tf.image.resize(image, [256, 256])
img_arr = np.expand_dims(image, axis=0)
return img_arr
def make_gradcam_heatmap(img_array, model, last_conv_layer_name, classifier_layer_names):
"""
Args:
img_array: image for which grad CAM will be performed
model: TRained deep neural network
last_conv_layer_name:
classifier_layer_names:
Returns:
"""
# First, we create a model that maps the input image to the activations
# of the last conv layer
last_conv_layer = model.get_layer(last_conv_layer_name)
last_conv_layer_model = keras.Model(model.inputs, last_conv_layer.output)
# Second, we create a model that maps the activations of the last conv
# layer to the final class predictions
classifier_input = keras.Input(shape=last_conv_layer.output.shape[1:])
x = classifier_input
for layer_name in classifier_layer_names:
x = model.get_layer(layer_name)(x)
classifier_model = keras.Model(classifier_input, x)
# Then, we compute the gradient of the top predicted class for our input image
# with respect to the activations of the last conv layer
with tf.GradientTape() as tape:
# Compute activations of the last conv layer and make the tape watch it
last_conv_layer_output = last_conv_layer_model(img_array)
tape.watch(last_conv_layer_output)
# Compute class predictions
preds = classifier_model(last_conv_layer_output)
top_pred_index = tf.argmax(preds[0])
top_class_channel = preds[:, top_pred_index]
# This is the gradient of the top predicted class with regard to
# the output feature map of the last conv layer
grads = tape.gradient(top_class_channel, last_conv_layer_output)
# This is a vector where each entry is the mean intensity of the gradient
# over a specific feature map channel
pooled_grads = tf.reduce_mean(grads, axis=(0, 1, 2))
# We multiply each channel in the feature map array
# by "how important this channel is" with regard to the top predicted class
last_conv_layer_output = last_conv_layer_output.numpy()[0]
pooled_grads = pooled_grads.numpy()
for i in range(pooled_grads.shape[-1]):
last_conv_layer_output[:, :, i] *= pooled_grads[i]
# The channel-wise mean of the resulting feature map
# is our heatmap of class activation
heatmap = np.mean(last_conv_layer_output, axis=-1)
# For visualization purpose, we will also normalize the heatmap between 0 & 1
heatmap = np.maximum(heatmap, 0) / np.max(heatmap)
return heatmap
img_array = get_img_array(f_path)
# Print what the top predicted class is
preds = saved_model.predict(img_array)
# print("Predicted:", decode_predictions(preds, top=1)[0])
# Generate class activation heatmap
cam = make_gradcam_heatmap(img_array, saved_model, last_conv_layer_name, classifier_layer_names)
# Display heatmap
img = keras.preprocessing.image.load_img(f_path)
img = img.crop(box=(266, 0, 3692, 2848))
img = img.resize((256, 256))
# resize heatmap, then convert it to 3 channel (apply colormap)
cam_res = cv2.resize(cam, (256, 256))
heat_map = cv2.applyColorMap(np.uint8(255 * cam_res), cv2.COLORMAP_JET)
added_map = cv2.addWeighted(cv2.cvtColor(np.asarray(img).astype('uint8'), cv2.COLOR_RGB2BGR), 0.7, heat_map, 0.4, 0)
# Plot image, gradcam output and gradcam overlay
plt.figure(1)
plt.subplot(1, 3, 1)
plt.axis("off")
plt.imshow(img)
plt.subplot(1, 3, 2)
plt.axis("off")
plt.imshow(heat_map)
plt.subplot(1, 3, 3)
plt.axis("off")
plt.imshow(added_map)
# np.resize(np.squeeze(img_array,axis=0),[16,16]))
plt.show()
plt.figure(2)
overlay_map = np.float32(heat_map) + np.float32(img) * 0.4 # everlay heatmap onto the image
overlay_map = 255 * overlay_map / np.max(overlay_map)
overlay_map = np.uint8(overlay_map)
plt.imshow(overlay_map)
plt.show()
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,107
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/input_pipeline/datasets.py
|
import gin
import logging
import tensorflow as tf
import pandas as pd
from keras_preprocessing.image import ImageDataGenerator
from input_pipeline.preprocessing import preprocess, resampling
import constants
import glob
import matplotlib.pyplot as plt
import random
import numpy as np
from sklearn.model_selection import train_test_split
# tf.compat.v1.enable_eager_execution()
print("Tensorflow version", tf.__version__)
AUTOTUNE = tf.data.experimental.AUTOTUNE
@gin.configurable
def load(name, data_dir):
if name == "idrid":
logging.info(f"Preparing dataset {name}...")
# ...
# columns_from_labels = ['Image name', 'Retinopathy grade']
columns_from_labels = constants.COLUMN_LABELS
# get paths to all directories of images and labels.
dir_train_images, dir_test_images, dir_train_csv, dir_test_csv = path2dir(data_dir)
# loading csv files : pass directory paths to csv and parse columns, that are to be used to create a data frame
df_names_labels_train = load_from_csv(dir_train_csv, columns_from_labels)
df_names_labels_test = load_from_csv(dir_test_csv, columns_from_labels)
print(df_names_labels_train['Retinopathy grade'].value_counts())
print('There are %i train labels and %i test labels' % (len(df_names_labels_train), len(df_names_labels_test)))
# Display a random image
show_sample_image(dir_train_images)
# attach file extensions to image names
df_names_labels_train = _append_file_format_extension2name(df_names_labels_train)
df_names_labels_test = _append_file_format_extension2name(df_names_labels_test)
print(df_names_labels_test.head())
'''##### Split train data into train and validation #####'''
df_train, df_valid = train_test_split(df_names_labels_train, test_size=0.2, random_state=42)
'''#### Over sample the TRaining dataset using, Resampling- using sample function of pandas dataframes #####'''
df_resampled_data = resampling(df_train, frac=1)
print(df_resampled_data['Retinopathy grade'].value_counts())
'''###################################################################################'''
''' Training and validation data building '''
gen_img_train_valid = ImageDataGenerator(preprocessing_function=preprocess,
rescale=1.0 / 255,
rotation_range=10,
horizontal_flip=False,
vertical_flip=True,
zoom_range=0.01)
# Training data set build
print("######################################################")
print("Loading training Data ............")
ds_train = _build_dataset(df_resampled_data, dir_train_images, gen_img_train_valid,
class_mode='binary', subset_name=None, shuffle_val=True)
print("# Finished Loading training Data #")
print("######################################################")
'''## No augmentation for validation and test data ##'''
gen_img_valid = ImageDataGenerator(preprocessing_function=preprocess,
rescale=1.0 / 255)
# Validation data set build
print("Loading Validation Data ............")
ds_val = _build_dataset(df_valid, dir_train_images, gen_img_valid,
class_mode='binary', subset_name=None, shuffle_val=False)
print("# Finished Loading Validation Data #")
print("######################################################")
''' Testing dataset building '''
gen_img_test = ImageDataGenerator(preprocessing_function=preprocess, rescale=1.0 / 255)
ds_test = _build_dataset(df_names_labels_test, dir_test_images, gen_img_test,
class_mode='binary', subset_name=None, shuffle_val=False)
# Display a sample image along with label from training data set
_show_sample_from_ds_data(ds_train, "Train")
_show_sample_from_ds_data(ds_val, "Validation")
_show_sample_from_ds_data(ds_test, "Test")
''' Uncomment below to print tensor dimensions and data type '''
# ds_train.element_spec
''' Prepare function for preparing the dataset for performance(batching, prefetching) '''
return prepare_for_performance(ds_train, ds_val, ds_test)
else:
return ValueError
@gin.configurable
def path2dir(dataset_directory, images_train, images_test, csv_train_labels, csv_test_labels):
"""
Purpose: To return all paths to directories that are to be used while loading a dataset
Args:
dataset_directory: path to directory od Dataset
images_train: path to training images directory from Dataset directory
images_test: path to testing images directory from Dataset directory
csv_train_labels: path to train.csv directory from Dataset directory
csv_test_labels: path to test.csv directory from Dataset directory
Returns: directory paths of training images,testing images,
training labels (in csv), testing labels (in csv).
"""
path_train_images = dataset_directory + images_train
path_test_images = dataset_directory + images_test
path_train_csv = dataset_directory + csv_train_labels
path_test_csv = dataset_directory + csv_test_labels
return path_train_images, path_test_images, path_train_csv, path_test_csv
def show_sample_image(files_dir):
"""
Purpose: Displays an images randomly from a directory of images
Args:
files_dir: Path to the directory where the images are located.
"""
list_train_files = glob.glob(files_dir + '/*.jpg')
filename = list_train_files[random.randint(0, len(list_train_files))]
img = plt.imread(filename)
plt.imshow(img)
plt.show()
pass
def load_from_csv(file_dir, cols_used):
"""
Purpose: To load csv files into a pandas dataframe,
and replace labels if multiclass classifications is not preferred
Args:
file_dir: path where csv is located
cols_used: columns to be considered while reading a csv to a pandas dataframe
Returns: pandas data frame with mentioned columns in cols_used
"""
# Load csv file into a pandas dataframe
data_frame_from_csv = pd.read_csv(file_dir, usecols=cols_used, dtype=str)
'''Code for assigning classes 0,1,2 to 0(Non proliferative) and 1(proliferative) '''
'''comment the below code if you want to do multi class classification'''
# Replacing dataframe columns with
data_frame_from_csv.loc[(data_frame_from_csv[data_frame_from_csv.columns[1]] == '0') |
(data_frame_from_csv[data_frame_from_csv.columns[1]] == '1'),
data_frame_from_csv.columns[1]] = '0'
data_frame_from_csv.loc[(data_frame_from_csv[data_frame_from_csv.columns[1]] == '2') |
(data_frame_from_csv[data_frame_from_csv.columns[1]] == '3') |
(data_frame_from_csv[data_frame_from_csv.columns[1]] == '4'),
data_frame_from_csv.columns[1]] = '1'
return data_frame_from_csv
def _append_file_format_extension2name(df_names_labels):
"""
Purpose: append file extenstion to the image name column in pandas dataframe
Args:
df_names_labels: pandas dataframe that contains Image names and corresponding labels
Returns:
"""
def _append_ext(fn):
return fn + ".jpg"
df_names_labels["Image name"] = df_names_labels["Image name"].apply(_append_ext)
return df_names_labels
def _show_sample_from_df_iter(df_iter_test_data):
"""
Purpose: To display sample image from data frame iterator(Its a method of ImageDataGenerator object),
to check fetched image
Args:
df_iter_test_data: A dataframe iterator which is returned from .flow_from_dataframe method
"""
# df_iter_test_data.next() returns a tuple of( batch of images, batch of labels)
t_sample_image, t_sample_label = df_iter_test_data.next()
# convert one numpy nd array from the fetched batch to a integer array for displaying image
'''If images are not rescaled uncomment the below line'''
# plt.imshow(t_sample_image[0].astype('uint8'))
'''If images are rescaled uncomment the below line'''
plt.imshow(t_sample_image[0])
# getting integer image label from one hot encoded label
image_label = (np.where(t_sample_label[0] == 1))[0].tolist()[0]
# plot image with integer label
plt.title("Class of the Image is %d" % image_label)
plt.show()
def _show_sample_from_ds_data(tf_ds, dataset_name):
"""
Purpose : To display images in a grid of 9x9, from tensor flow dataset(returned using a tf.data.Dataset.from_generator()),
to check fetched image from a sample batch(batch size should be grater than 9)
Args:
tf_ds:
"""
plt.figure(figsize=(10, 10))
plt.suptitle("Samples from augmented %s dataset" % dataset_name)
for images, labels in tf_ds.take(1):
for i in range(9):
ax = plt.subplot(3, 3, i + 1)
'''If images are not rescaled uncomment the below code'''
# plt.imshow(images[i].numpy().astype("uint8"))
'''If images are rescaled uncomment the below line'''
plt.imshow(images[i])
'''Uncomment below line for one hot coded labels'''
plt.title("Class of image: %d " % labels[i])
'''Uncomment below line for one hot coded labels'''
# plt.title("Class of image: %d " % ((np.where(labels[i] == 1))[0].tolist()[0]))
plt.axis("off")
plt.show()
@gin.configurable
def _build_dataset(df_pandas_dataframe, directory_of_images,
image_generator, class_mode, subset_name, img_height, img_width, shuffle_val):
"""
Purpose: To create a tensorflow data set from_generator using
ImageDataGenerator(using the method flow_from_dataframe)
Args:
df_pandas_dataframe: pandas dataframe containing Image file names and
their respective labels in corresponding columns
directory_of_images: path to where images of dataset to be built are located
image_generator: ImageDataGenerator instance of keras
class_mode: For multiclass mention categorical,
for other options check https://keras.io/api/preprocessing/image/#flowfromdataframe-method
subset_name: if validation split is mentioned for the respective ImageDataGenerator , then mention
subset name to be 'training' or 'validation'
Returns:
"""
dataframe_iterator = image_generator.flow_from_dataframe(df_pandas_dataframe,
directory=directory_of_images,
x_col=df_pandas_dataframe.columns[0],
y_col=df_pandas_dataframe.columns[1],
subset=subset_name,
seed=50,
target_size=(img_height, img_width),
batch_size=constants.N_BATCH_SIZE,
class_mode=class_mode, shuffle=shuffle_val)
# uncomment the following code to visualize the sample image after the generator
# _show_sample_from_df_iter(dataframe_iterator)
# fetches a batch(batch size = constants.N_BATCH_SIZE) of images and labels
images, labels = iter(dataframe_iterator.next())
print(images.shape, labels.shape)
ds_data = tf.data.Dataset.from_generator(lambda: dataframe_iterator,
output_types=(tf.float32, tf.uint8),
output_shapes=([None, images.shape[1], images.shape[2], 3], [None, ]))
# (images.shape, labels.shape))
return ds_data
@gin.configurable
def prepare_for_performance(ds_train, ds_val, ds_test, caching):
"""
Purpose: To well shuffle and batch the data,
then to prefetch the batch to be available to model as an input
Args:
caching:
ds_test: test data set
ds_val: validation data set(percentage of split from training data, mentioned in "constants.py")
ds_train: training data set
Returns: shuffled,batched, and prefetched
"""
'''Prepare training dataset'''
# ds_train = ds_train.map(crop2bb, num_parallel_calls=tf.data.experimental.AUTOTUNE)
# cache will have a complete list of the elements in the dataset, and it will be used on all subsequent iterations
if caching:
ds_train = ds_train.cache()
# shuffle and repeat
# ds_train = ds_train.shuffle(constants.N_SHUFFLE_BUFFER)
ds_train = ds_train.repeat(-1)
# prefetch data
ds_train = ds_train.prefetch(AUTOTUNE)
'''Prepare validation dataset'''
# cache will have a complete list of the elements in the dataset, and it will be used on all subsequent iterations
if caching:
ds_val = ds_val.cache()
# Shuffling not needed for validation and testing data
ds_val = ds_val.prefetch(AUTOTUNE)
'''Prepare test dataset'''
if caching:
ds_test = ds_test.cache()
ds_test = ds_test.prefetch(AUTOTUNE)
return ds_train, ds_val, ds_test
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,108
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/input_pipeline/preprocessing.py
|
import gin
import tensorflow as tf
import pandas as pd
@gin.configurable
def preprocess(image):
"""
PURPOSE: Dataset preprocessing: cropping and resizing
Args:
image: image to be preprocessed
"""
image_cbb = tf.image.crop_to_bounding_box(image, 0, 15, 256, 209)
image_resized = tf.image.resize(image_cbb, (256, 256))
return image_resized
def resampling(df_imbalanced, frac=1):
"""
Args:
df_imbalanced: imbalanced data frame of paths and labels
frac: frac argument in dataframe.sample(method)
Returns: Resampled Dataframe
"""
df_imbalanced = df_imbalanced.astype({'Retinopathy grade': int})
df_minority = df_imbalanced[df_imbalanced['Retinopathy grade'] == 0]
df_majority = df_imbalanced[df_imbalanced['Retinopathy grade'] == 1]
# Calculate the imbalance of data, minority class frequency- majority class frequency
difference = len(df_majority) - len(df_minority)
# print(difference)
df_sampled_from_minority = df_minority.sample(n=difference)
# print(train_df_new_0.head())
# concatenate the minority class, majority class and newly sampled class from minority
df_balanced_data = pd.concat([df_minority, df_majority, df_sampled_from_minority], axis=0)
# print(len(train_df))
# shuffle the resampled data
df_balanced_data = df_balanced_data.sample(frac=frac)
# convert the labels to strings to be accepted by flow from dataframe
df_balanced_data = df_balanced_data.astype({'Retinopathy grade': str})
return df_balanced_data
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,109
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/models/architectures.py
|
import gin
import tensorflow as tf
from models.layers import vgg_block
@gin.configurable
def vgg_like(input_shape, n_classes, base_filters, n_blocks, dense_units, dropout_rate):
"""Defines a VGG-like architecture.
Parameters:
input_shape (tuple: 3): input shape of the neural network
n_classes (int): number of classes, corresponding to the number of output neurons
base_filters (int): number of base filters, which are doubled for every VGG block
n_blocks (int): number of VGG blocks
dense_units (int): number of dense units
dropout_rate (float): dropout rate
Returns:
(keras.Model): keras model object
"""
assert n_blocks > 0, 'Number of blocks has to be at least 1.'
inputs = tf.keras.Input(input_shape)
out = vgg_block(inputs, base_filters)
for i in range(2, n_blocks):
out = vgg_block(out, base_filters * 2 ** (i))
out = tf.keras.layers.GlobalAveragePooling2D()(out)
out = tf.keras.layers.Dense(dense_units, activation=tf.nn.relu)(out)
out = tf.keras.layers.Dropout(dropout_rate)(out)
outputs = tf.keras.layers.Dense(n_classes)(out)
return tf.keras.Model(inputs=inputs, outputs=outputs, name='vgg_like')
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,110
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/tune.py
|
import logging
import gin
from ray import tune
from input_pipeline.datasets import load
from models.architectures import vgg_like
from train import Trainer
from utils import utils_params, utils_misc
def train_func(config):
# Hyperparameters
bindings = []
for key, value in config.items():
bindings.append(f'{key}={value}')
# generate folder structures
run_paths = utils_params.gen_run_folder(','.join(bindings))
# set loggers
utils_misc.set_loggers(run_paths['path_logs_train'], logging.INFO)
# gin-config
gin.parse_config_files_and_bindings(['/mnt/home/repos/dl-lab-skeleton/diabetic_retinopathy/configs/config.gin'], bindings)
utils_params.save_config(run_paths['path_gin'], gin.config_str())
# setup pipeline
ds_train, ds_val, ds_test, ds_info = load()
# model
model = vgg_like(input_shape=ds_info.features["image"].shape, n_classes=ds_info.features["label"].num_classes)
trainer = Trainer(model, ds_train, ds_val, ds_info, run_paths)
for val_accuracy in trainer.train():
tune.report(val_accuracy=val_accuracy)
analysis = tune.run(
train_func, num_samples=2, resources_per_trial={'gpu': 1, 'cpu': 4},
config={
"Trainer.total_steps": tune.grid_search([1e4]),
"vgg_like.base_filters": tune.choice([8, 16]),
"vgg_like.n_blocks": tune.choice([2, 3, 4, 5]),
"vgg_like.dense_units": tune.choice([32, 64]),
"vgg_like.dropout_rate": tune.uniform(0, 0.9),
})
print("Best config: ", analysis.get_best_config(metric="val_accuracy"))
# Get a dataframe for analyzing trial results.
df = analysis.dataframe()
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,111
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/train.py
|
import gin
import tensorflow as tf
import logging
@gin.configurable
class Trainer(object):
def __init__(self, model, ds_train, ds_val, ds_info, run_paths, total_steps, log_interval, ckpt_interval):
# Summary Writer
# ....
# Checkpoint Manager
# ...
# Loss objective
self.loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
self.optimizer = tf.keras.optimizers.Adam()
# Metrics
self.train_loss = tf.keras.metrics.Mean(name='train_loss')
self.train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='train_accuracy')
self.test_loss = tf.keras.metrics.Mean(name='test_loss')
self.test_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='test_accuracy')
self.model = model
self.ds_train = ds_train
self.ds_val = ds_val
self.ds_info = ds_info
self.run_paths = run_paths
self.total_steps = total_steps
self.log_interval = log_interval
self.ckpt_interval = ckpt_interval
@tf.function
def train_step(self, images, labels):
with tf.GradientTape() as tape:
# training=True is only needed if there are layers with different
# behavior during training versus inference (e.g. Dropout).
predictions = self.model(images, training=True)
loss = self.loss_object(labels, predictions)
gradients = tape.gradient(loss, self.model.trainable_variables)
self.optimizer.apply_gradients(zip(gradients, self.model.trainable_variables))
self.train_loss(loss)
self.train_accuracy(labels, predictions)
@tf.function
def test_step(self, images, labels):
# training=False is only needed if there are layers with different
# behavior during training versus inference (e.g. Dropout).
predictions = self.model(images, training=False)
t_loss = self.loss_object(labels, predictions)
self.test_loss(t_loss)
self.test_accuracy(labels, predictions)
def train(self):
for idx, (images, labels) in enumerate(self.ds_train):
step = idx + 1
self.train_step(images, labels)
if step % self.log_interval == 0:
# Reset test metrics
self.test_loss.reset_states()
self.test_accuracy.reset_states()
for test_images, test_labels in self.ds_val:
self.test_step(test_images, test_labels)
template = 'Step {}, Loss: {}, Accuracy: {}, Test Loss: {}, Test Accuracy: {}'
logging.info(template.format(step,
self.train_loss.result(),
self.train_accuracy.result() * 100,
self.test_loss.result(),
self.test_accuracy.result() * 100))
# Reset train metrics
self.train_loss.reset_states()
self.train_accuracy.reset_states()
# Write summary to tensorboard
# ...
yield self.test_accuracy.result().numpy()
if step % self.ckpt_interval == 0:
logging.info(f'Saving checkpoint to {self.run_paths["path_ckpts_train"]}.')
# Save checkpoint
# ...
if step % self.total_steps == 0:
logging.info(f'Finished training after {step} steps.')
# Save final checkpoint
# ...
return self.test_accuracy.result().numpy()
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,112
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/evaluation/metrics.py
|
import tensorflow as tf
class ConfusionMatrix(tf.keras.metrics.Metric):
def __init(self, name="confusion_matrix", **kwargs):
super(ConfusionMatrix, self).__init__(name=name, **kwargs)
# ...
def update_state(self, *args, **kwargs):
# ...
def result(self):
# ...
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,113
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/hyper_parameter_tuning/hparam_tuning.py
|
from tensorboard.plugins.hparams import api as hp
import constants
import tensorflow as tf
from input_pipeline import datasets2
import datetime
import numpy as np
from sklearn.metrics import classification_report, confusion_matrix
import seaborn as sns
import pandas as pd
from matplotlib import pyplot as plt
# uncomment below to tune on further parameters
'''
HP_CNN_DROPOUT = hp.HParam("fcn_dropout",display_name="CONV2D NW dropout",
description="Dropout rate for conv subnet.",
hp.RealInterval(0.1, 0.2))
HP_FC_DROPOUT = hp.HParam("fc_dropout",display_name="f.c. dropout",
description="Dropout rate for fully connected subnet.",
hp.RealInterval(0.2, 0.5))
'''
HP_EPOCHS = hp.HParam("epochs", hp.Discrete([100, 140]),
description="Number of epoch to run")
HP_NEURONS = hp.HParam("num_Dense_layer_neurons", hp.Discrete([128, 256]),
description="Neurons per dense layer")
HP_STRIDE = hp.HParam("stride_in_first_layer", hp.Discrete([2, 1]),
description="Value of stride in frist convolutional layer")
HP_L_RATE = hp.HParam("learning_rate", hp.Discrete([0.0001, 0.00001]),
description="Learning rate")
HP_METRIC = hp.Metric(constants.METRICS_ACCURACY, display_name='Accuracy')
# creating logs for different hyper-parameters
with tf.summary.create_file_writer('hp_log_dir/hparam_tuning').as_default():
hp.hparams_config(
hparams=[HP_NEURONS, HP_EPOCHS, HP_L_RATE, HP_STRIDE],
metrics=[HP_METRIC],
)
def run(run_dir, run_name, hparams, gen_train, gen_valid, gen_test):
with tf.summary.create_file_writer(run_dir).as_default():
hp.hparams(hparams) # record the values used in this trial
accuracy = train_test_model(gen_train, gen_valid, gen_test, hparams, run_name)
tf.summary.scalar(constants.METRICS_ACCURACY, accuracy, step=1)
###
def train_test_model(gen_train, gen_valid, gen_test, hparams, run_name):
inputs = tf.keras.layers.Input(constants.ip_shape)
out = tf.keras.layers.Conv2D(8, 3, hparams[HP_STRIDE], padding='same', activation=tf.nn.relu)(inputs)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPool2D((3, 3))(out)
out = tf.keras.layers.Conv2D(16, 3, padding='same', activation=tf.nn.relu)(out)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPool2D((2, 2))(out)
out = tf.keras.layers.Conv2D(32, 3, padding='same', activation=tf.nn.relu)(out)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPool2D((2, 2))(out)
out = tf.keras.layers.Conv2D(128, 3, padding='same', activation=tf.nn.relu)(out)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPool2D((2, 2))(out)
out = tf.keras.layers.Dropout(0.3)(out)
out = tf.keras.layers.Flatten()(out)
l2_reg = tf.keras.regularizers.l2(0.001)
# l1_l2_reg = tf.keras.regularizers.L1L2(l1=0.001,l2=0.001)
# tried 512 without following dropout of 0.3
out = tf.keras.layers.Dense(hparams[HP_NEURONS], activation='linear',
kernel_regularizer=l2_reg)(out)
out = tf.keras.activations.relu(out)
out = tf.keras.layers.Dropout(0.5)(out)
# out = tf.keras.layers.Dense(32, activation=tf.nn.relu)(out)
final_out = tf.keras.layers.Dense(2, activation=tf.nn.softmax)(out)
hp_model = tf.keras.Model(inputs=inputs, outputs=final_out, name="HP_tuning_DR_model")
opt = tf.optimizers.Adam(hparams[HP_L_RATE], name='ADAM')
hp_model.build((constants.N_BATCH_SIZE, constants.ip_shape))
hp_model.compile(optimizer=opt,
loss=tf.keras.losses.sparse_categorical_crossentropy,
metrics=constants.METRICS_ACCURACY)
print(hp_model.summary())
hp_model.fit(gen_train, batch_size=constants.N_BATCH_SIZE,
epochs=hparams[HP_EPOCHS], verbose=1,
steps_per_epoch=((constants.N_TRAIN_SIZE_POST_AUG // constants.N_BATCH_SIZE) + 1),
validation_data=gen_valid,
validation_steps=(constants.N_VALID_SIZE_POST_AUG // constants.N_BATCH_SIZE) + 1,
callbacks=call_backs(hparams, run_name))
loss, accuracy = hp_model.evaluate(gen_test, batch_size=constants.N_BATCH_SIZE,
verbose=1,
steps=(constants.N_TESTING_SET_COUNT // constants.N_BATCH_SIZE + 1),
)
save_test_results(gen_test, hp_model, run_name)
return accuracy
def call_backs(hparams, run_name):
# tensorboard call back
log_dir = './hp_log_dir/fit/' + datetime.datetime.now().strftime("%Y%m%d-%H%M%S") + "_" + run_name
tensorboard_callbk = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1,
write_graph=True,
write_images=True,
update_freq='epoch',
profile_batch=2,
embeddings_freq=1)
# model checkpoint call back
cpt_path = "./hp_log_dir/cpts/" + run_name + "_" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S") + \
"epochs:{epoch:03d}-val_accuracy:{val_accuracy:.3f}.h5"
# cpt_path = "./hp_log_dir/cpts/" + run_name + "_" + "cp-epochs:{epoch:03d}-val_accuracy:{val_accuracy:.3f}.ckpt"
# check point to save the model based on improving validation accuracy
checkpoint_callbk = tf.keras.callbacks.ModelCheckpoint(cpt_path,
monitor='val_accuracy',
verbose=1,
save_best_only=False,
mode='max', save_weights_only=True,
save_freq='epoch')
# csv logger call back
log_file_name = './hp_log_dir/csv_log/' + run_name + "_" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
csv_callbk = tf.keras.callbacks.CSVLogger(log_file_name, separator=',', append=True)
# keras callback for hzper param
hp_log_dir = './hp_log_dir/hparam_tuning/' + run_name
hp_callbk = hp.KerasCallback(hp_log_dir, hparams) # log hparams
callbacks_list = [checkpoint_callbk, tensorboard_callbk, csv_callbk, hp_callbk]
return callbacks_list
def run_hparam_tuning():
session_num = 0
# Get data from datasets.py or datasets2.py
# gen_train, gen_valid, gen_test = datasets.load()
gen_train, gen_valid, gen_test = datasets2.load_data()
for num_Dense_layer_neurons in HP_NEURONS.domain.values:
for epochs in HP_EPOCHS.domain.values:
for learning_rate in HP_L_RATE.domain.values:
for stride_in_first_layer in HP_STRIDE.domain.values:
hparams = {
HP_NEURONS: num_Dense_layer_neurons,
HP_EPOCHS: epochs,
HP_L_RATE: learning_rate,
HP_STRIDE: stride_in_first_layer,
}
run_name = "run-%d" % session_num
print('--- Starting trial: %s' % run_name)
print({h.name: hparams[h] for h in hparams})
run('hp_log_dir/hparam_tuning/' + run_name, run_name, hparams, gen_train, gen_valid, gen_test)
session_num += 1
def save_test_results(gen_test, saved_model, run_name):
true_labels = []
for data, labels in (gen_test.take((constants.N_TESTING_SET_COUNT // constants.N_BATCH_SIZE) + 1)):
true_labels.extend(labels.numpy().tolist())
print(true_labels)
# saved_model = tf.keras.models.load_model('20201215-190832SGD_100.h5')
test_model = saved_model.evaluate(gen_test,
batch_size=constants.N_BATCH_SIZE,
verbose=1, steps=4)
print(test_model)
y_pred = saved_model.predict(gen_test,
batch_size=constants.N_BATCH_SIZE,
steps=(constants.N_TESTING_SET_COUNT // constants.N_BATCH_SIZE) + 1,
verbose=1)
y_pred = np.argmax(y_pred, axis=1)
print(y_pred)
print('Confusion Matrix')
print(confusion_matrix(true_labels, y_pred))
plt.figure()
cm_plot = sns.heatmap(confusion_matrix(true_labels, y_pred), annot=True)
cm_fig = cm_plot.get_figure()
cm_fig.savefig("./hp_log_dir/results/%s_.png" % run_name)
print('Classification Report')
target_names = ['NPDR', 'PDR']
cr_data = classification_report(true_labels, y_pred, target_names=target_names, output_dict=True)
print(cr_data)
df_cr_data = pd.DataFrame(cr_data).transpose()
df_cr_data.to_csv("./hp_log_dir/results/%s_.csv" % run_name)
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,114
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/models/architecture.py
|
import gin
import tensorflow as tf
def vgg_base_3custom(ip_shape):
'''
# loading base model
base_model = VGG16(weights='imagenet', include_top=True, input_shape=ip_shape)
# freeze_layers(base_model)
base_model.summary()
# model = Model(input=base_model.input, output=base_model.get_layer('fc1').output)
# Freeze the layers except the last 4 layers
for layer in base_model.layers[:-3]:
layer.trainable = False
# Check the trainable status of the individual layers
for layer in base_model.layers:
print(layer, layer.trainable)
base_model.summary()
'''
# Create the model
# model = models.Sequential()
'''Testing random search param'''
inputs = tf.keras.layers.Input(ip_shape)
out = tf.keras.layers.Conv2D(8, 3, 2, padding='same', activation=tf.nn.relu)(inputs)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPool2D((3, 3))(out)
out = tf.keras.layers.Conv2D(16, 3, padding='same', activation=tf.nn.relu)(out)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPool2D((2, 2))(out)
'''
out = tf.keras.layers.Conv2D(32, kernel_size=3, strides=2, padding='same', activation=tf.nn.relu)(inputs)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPool2D((2, 2))(out)
out = tf.keras.layers.Dropout(0.25)(out)
'''
out = tf.keras.layers.Conv2D(32, 3, padding='same', activation=tf.nn.relu)(out)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPool2D((2, 2))(out)
out = tf.keras.layers.Conv2D(128, 3, padding='same', activation=tf.nn.relu)(out)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPool2D((2, 2))(out)
out = tf.keras.layers.Dropout(0.3)(out)
out = tf.keras.layers.Flatten()(out)
l2_reg = tf.keras.regularizers.l2(0.001)
# l1_l2_reg = tf.keras.regularizers.L1L2(l1=0.001,l2=0.001)
out = tf.keras.layers.Dense(128, activation='linear',
kernel_regularizer=l2_reg)(out)
out = tf.keras.activations.relu(out)
out = tf.keras.layers.Dropout(0.5)(out)
# out = tf.keras.layers.Dense(32, activation=tf.nn.relu)(out)
out = tf.keras.layers.Dense(2, activation=tf.nn.softmax)(out)
# Show a summary of the model. Check the number of trainable parameters
# model.summary()
model = tf.keras.Model(inputs=inputs, outputs=out, name='DBR_model')
return model
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,115
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/evaluation/eval.py
|
import tensorflow as tf
import constants
from sklearn.metrics import classification_report, confusion_matrix
import numpy as np
import seaborn as sns
from matplotlib import pyplot as plt
import pandas as pd
import os
def _classification_report_csv(report, conf_mat):
"""
Args:
report: classification report (type dict)
conf_mat: Confusion Matrix
Returns:
object: None
"""
dataframe = pd.DataFrame.from_dict(report)
if os.path.isdir(constants.results_PATH):
conf_mat.savefig(constants.results_PATH+"confusionmatrix.png")
dataframe.to_csv(constants.results_PATH + 'classification_report.csv', index=False)
else:
try:
os.makedirs(constants.results_PATH)
conf_mat.savefig(constants.results_PATH + "confusionmatrix.png")
dataframe.to_csv(constants.results_PATH + 'classification_report.csv', index=False)
except FileExistsError:
pass
except OSError:
raise
def evaluate(model, ds_test, opt, is_training, SAVE_RESULT=True, checkpoint_path=None):
true_labels = []
for data, labels in (ds_test.take((constants.N_TESTING_SET_COUNT // constants.N_BATCH_SIZE) + 1)):
true_labels.extend(labels.numpy().tolist())
print('\n True labels:\n', true_labels)
if is_training:
saved_model = model
else:
try:
_ = os.stat(checkpoint_path)
# if os.path.isfile(checkpoint_path):
print(os.path.sep, type(os.path.sep), type(checkpoint_path))
print("Loading Checkpoint model {}".format(checkpoint_path.split(os.sep)[-1]))
# For loading weights use loadedmodel.load_weights(checkpoint)
saved_model = tf.keras.models.load_model(checkpoint_path, compile=False)
#saved_model = model.load_weights(checkpoint_path)
# Compile the model
saved_model.compile(optimizer=tf.keras.optimizers.Adam(constants.H_LEARNING_RATE),
loss='sparse_categorical_crossentropy', metrics=['accuracy'])
print(saved_model.summary())
except FileNotFoundError:
raise
# Evaluate the model
print("\nEvaluating on test Dataset.....\n")
test_model = saved_model.evaluate(ds_test,
batch_size=constants.N_BATCH_SIZE,
steps=(constants.N_TESTING_SET_COUNT // constants.N_BATCH_SIZE) + 1,
verbose=1)
# print(test_model)
# Predict to calculate
print("\nPredicting on test Dataset.....\n")
y_pred = saved_model.predict(ds_test,
batch_size=constants.N_BATCH_SIZE,
steps=(constants.N_TESTING_SET_COUNT // constants.N_BATCH_SIZE) + 1,
verbose=1)
y_pred = np.argmax(y_pred, axis=1)
print('\n Predicted labels:\n', y_pred)
# y_true = np.asarray(y_true).astype('int32')
print('\n Confusion Matrix:\n')
print(confusion_matrix(true_labels, y_pred))
target_names = ['NRDR', 'RDR']
plt.figure()
sns.set(font_scale=1.8)
cm_plot = sns.heatmap(confusion_matrix(true_labels, y_pred), annot=True, cbar=True,
xticklabels=target_names, yticklabels=target_names, annot_kws={"size": 65})
cm_fig = cm_plot.get_figure()
#plt.show()
# Save classification report and confusion matrix to results folder
if SAVE_RESULT:
cr = classification_report(true_labels, y_pred, target_names=target_names, output_dict=True)
_classification_report_csv(cr, cm_fig)
cr = classification_report(true_labels, y_pred, target_names=target_names)
print('Classification Report:\n')
print("\n", cr, "\n")
return
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,116
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/models/transfer_learning_architecture.py
|
from tensorflow.keras.applications import ResNet50V2
import tensorflow as tf
import tensorflow.keras as keras
def transfer_learning(input_shape):
base_model = ResNet50V2(include_top=False, input_shape=(224, 224, 3), pooling='avg', weights='imagenet')
# Freeze the layers except the last 12 layers (which contains few sets of Conv layers and batch normalization
# layers)
count_layers = 0
for layer in base_model.layers[:-12]:
layer.trainable = False
count_layers = count_layers + 1
print(count_layers, "Number of layers in Resnet50")
# Check the trainable status of the individual layers
for layer in base_model.layers:
print(layer, layer.trainable)
base_model.summary()
# Keras input layer
inputs = keras.Input(shape=(256, 256, 3))
# preprocessing layer to resize image to 224*224, as Resnet input layer accepts 224,224,3
r_input = keras.layers.experimental.preprocessing.Resizing(224, 224)(inputs)
out = base_model(r_input)
out = keras.layers.Dense(16, activation=tf.nn.relu,kernel_regularizer=keras.regularizers.l1(0.0001))(out)
out = keras.layers.Dropout(0.6)(out)
out = keras.layers.Dense(2, activation=tf.nn.softmax)(out)
model = keras.Model(inputs, out)
# Model Summary
model.summary()
return model
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,117
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/models/layers.py
|
import gin
import tensorflow as tf
@gin.configurable
def vgg_block(inputs, filters, kernel_size):
"""A single VGG block consisting of two convolutional layers, followed by a max-pooling layer.
Parameters:
inputs (Tensor): input of the VGG block
filters (int): number of filters used for the convolutional layers
kernel_size (tuple: 2): kernel size used for the convolutional layers, e.g. (3, 3)
Returns:
(Tensor): output of the VGG block
"""
out = tf.keras.layers.Conv2D(filters, kernel_size, padding='same', activation=tf.nn.relu)(inputs)
out = tf.keras.layers.Conv2D(filters, kernel_size, padding='same', activation=tf.nn.relu)(out)
out = tf.keras.layers.MaxPool2D((2, 2))(out)
return out
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,118
|
sgaruda-sudo/Diabetic_Retinopathy
|
refs/heads/master
|
/input_pipeline/datasets2.py
|
import tensorflow as tf
import pandas as pd
import constants
import glob
import numpy as np
from input_pipeline.preprocessing import resampling
import tensorflow_addons as tfa
import matplotlib.pyplot as plt
import random
from sklearn.model_selection import train_test_split
AUTOTUNE = tf.data.experimental.AUTOTUNE
def build_dataset(files, labels, data_set_type):
"""
Args:
files:
labels:
data_set_type:
Returns:
"""
# Create tf data set
ds = tf.data.Dataset.from_tensor_slices((files, labels))
if data_set_type == 'train':
p_var = data_set_type
print("Buildling {} data set".format(p_var))
ds = ds.shuffle(constants.N_SHUFFLE_BUFFER)
ds = ds.cache()
ds = ds.map(augment_parse, num_parallel_calls=AUTOTUNE)
if data_set_type != 'train':
print("Buildling {} data set".format(data_set_type))
ds = ds.map(parse_func, AUTOTUNE)
ds = ds.batch(constants.N_BATCH_SIZE).prefetch(AUTOTUNE)
print(ds.element_spec)
return ds
@tf.function
def augment_parse(a_filename, a_label):
a_image_string = tf.io.read_file(a_filename)
a_image_decoded = tf.io.decode_jpeg(a_image_string, channels=3)
# original image dimension -2848*4288(H*W)
# process image by reducing the black background
a_image_bbcrp = tf.image.crop_to_bounding_box(a_image_decoded, 0, 266, 2848, 3426)
a_image_normal = tf.cast(a_image_bbcrp, tf.float32) / 255.0
a_image = tf.image.resize(a_image_normal, size=(256, 256))
# a_image_crp1 = tf.image.central_crop(a_image_normal, 0.85)
# augment by image flip and rotation
a_image = tf.image.random_flip_left_right(a_image)
a_image = tf.image.random_flip_up_down(a_image)
rot_range = random.randint(24, 36)
# below lone enables counterclockwise rotation and clockwise rotaion
# rot_range = random.randrange(-36, 36, 1)
a_image = tfa.image.rotate(a_image, tf.constant((np.pi / rot_range)),
interpolation='NEAREST')
return a_image, a_label
@tf.function
def parse_func(filename, label):
image_string = tf.io.read_file(filename)
image_decoded = tf.io.decode_jpeg(image_string, channels=3)
# original image dimension -2848*4288(H*W)
image_bbcrp = tf.image.crop_to_bounding_box(image_decoded, 0, 266, 2848, 3426)
image_normal = tf.cast(image_bbcrp, tf.float32) / 255.0
# image_crp1 = tf.image.central_crop(image_normal, 0.85)
image = tf.image.resize(image_normal, size=(256, 256))
# label = tf.one_hot(label) @ for multiclass classification
return image, label
def load_data():
tf_train_data, tf_valid_data, tf_test_data = get_datasets()
# print(np.shape(np_train_images), np.shape(np_train_labels))
return tf_train_data, tf_valid_data, tf_test_data
def get_datasets():
"""
PURPOSE: Read raw data, reassign labels, resampling, building respective
datasets of train, test,valid
Returns: train data, test data, validation data
"""
# list of image paths
list_image_paths = glob.glob(constants.path_train_img + '/*')
list_image_paths_test = glob.glob(constants.path_test_img + '/*')
# List of labels
df_imagenames_labels_train = pd.read_csv(constants.path_train_labels,
usecols=constants.COLUMN_LABELS)
df_imagenames_labels_test = pd.read_csv(constants.path_test_labels,
usecols=constants.COLUMN_LABELS)
# create a new column to store corresponding image paths
df_imagenames_labels_train['img_paths'] = list_image_paths
df_imagenames_labels_test['img_paths'] = list_image_paths_test
# print(df_imagenames_labels_test.head())
'''################## Reassign labels for binary classification ###################'''
# process labels, categorize (0,1 = 0[NPR]), and (2,3,4 = 1[PR])
df_imagenames_labels_train['Retinopathy grade'] = \
df_imagenames_labels_train['Retinopathy grade'].map({0: 0, 1: 0, 2: 1, 3: 1, 4: 1})
df_imagenames_labels_test['Retinopathy grade'] = \
df_imagenames_labels_test['Retinopathy grade'].map({0: 0, 1: 0, 2: 1, 3: 1, 4: 1})
print('Testing set:\n', df_imagenames_labels_test['Retinopathy grade'].value_counts())
# print("check the image labels: \n", (df_imagenames_labels_train.head()))
'''###### Random shuffle whole training data and split to train and validation #########'''
df_train_unbal, df_valid = train_test_split(df_imagenames_labels_train, test_size=0.2, random_state=42)
# print(df_train_unbal.head(-1), df_valid.head(-1))
'''#################################################################################'''
''' ################### resampling ##################### '''
print('Before resampling:\n', df_train_unbal['Retinopathy grade'].value_counts())
df_balanced = resampling(df_train_unbal, frac=1)
print('After resampling:\n', df_balanced['Retinopathy grade'].value_counts())
print("Shape of balanced train dataset:", df_balanced.shape)
'''###################### Building train, valid ans test data set #####################'''
train_ds = build_dataset(df_balanced['img_paths'].tolist(),
df_balanced['Retinopathy grade'].astype(int).tolist(), 'train')
valid_ds = build_dataset(df_valid['img_paths'].tolist(),
df_valid['Retinopathy grade'].astype(int).tolist(), 'valid')
test_ds = build_dataset(df_imagenames_labels_test['img_paths'].tolist(),
df_imagenames_labels_test['Retinopathy grade'].astype(int).tolist(), 'test')
# plot samples in a grid from all sets
plot_images(train_ds, 'training samples')
plot_images(valid_ds, 'validation samples')
plot_images(test_ds, 'testing samples')
'''###################################################################'''
return train_ds, valid_ds, test_ds
def plot_images(dataset, dataset_name):
"""
Args:
dataset: dataset object from which images have to be plotted
dataset_name: name of the type of split (train/test/valid)
"""
plt.figure(figsize=(10, 10))
plt.suptitle(dataset_name)
for images, labels in dataset.take(1):
labels_numpy = labels.numpy()
for i in range(9):
ax = plt.subplot(3, 3, i + 1)
plt.imshow(images[i].numpy())
# print((labels[i].numpy()))
# plt.title("class:%d" % labels_numpy[i])
plt.axis("on")
plt.show()
pass
|
{"/main.py": ["/models/transfer_learning_architecture.py", "/models/architecture.py", "/hyper_parameter_tuning/hparam_tuning.py"], "/input_pipeline/datasets.py": ["/input_pipeline/preprocessing.py"], "/models/architectures.py": ["/models/layers.py"], "/tune.py": ["/input_pipeline/datasets.py", "/models/architectures.py", "/train.py"], "/input_pipeline/datasets2.py": ["/input_pipeline/preprocessing.py"]}
|
5,119
|
sulamanijaz/employee_management
|
refs/heads/master
|
/employee_management/emp_manage_app/views.py
|
from django.contrib.auth import authenticate, login
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.http import HttpResponseRedirect, HttpResponse
from datetime import datetime
from django.db.models import Q
from forms import userform, addsubuser, addschedule
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from formtools.wizard.views import WizardView, SessionWizardView
from employee_management.emp_manage_app.models import User, EmployeeSchedule
from django.shortcuts import redirect
def index_home(request):
return render_to_response('employee/index.html', {
'request': request,
}, RequestContext(request, {}))
def login_user(request):
if request.method == 'POST':
username = request.POST['email']
password = request.POST['password']
# Use Django's machinery to attempt to see if the username/password
# combination is valid - a User object is returned if it is.
user = authenticate(username=username, password=password)
if user:
# Is the account active? It could have been disabled.
if user.is_active:
login(request, user)
return HttpResponseRedirect('/home/')
else:
variables = {
'form': userform
}
return render(request, 'employee/login.html', variables)
else:
# Bad login details were provided. So we can't log the user in.
variables = {
'form': userform,
'message':"Email or password incorrect",
'email':username
}
return render(request, 'employee/login.html', variables)
# The request is not a HTTP POST, so display the login form.
# This scenario would most likely be a HTTP GET.
else:
# No context variables to pass to the template system, hence the
# blank dictionary object...
return render_to_response('employee/login.html', {
'request': request, 'form': userform,
}, RequestContext(request, {}))
@login_required
def user_home(request):
user_object=User.objects.filter(parent_user=request.user.id)
user_count = user_object.count()
count = user_count + 1
total_emp_to_add = int(request.user.no_of_employees)-int(user_count)
return render_to_response('employee/home.html', {
'request': request,'emp_to_add':total_emp_to_add ,
'count':count ,'form': userform,
'user_obj':user_object
}, RequestContext(request, {}))
from django.contrib.auth import logout
# Use the login_required() decorator to ensure only those logged in can access the view.
@login_required
def user_logout(request):
# Since we know the user is logged in, we can now just log them out.
logout(request)
# Take the user back to the homepage.
return HttpResponseRedirect('/home/')
class ContactWizard(SessionWizardView):
template_name = 'employee/signup.html'
def done(self, form_list, form_dict ,**kwargs):
user_dict = []
for form in form_list:
user_dict.append(form.cleaned_data)
user_object=User.objects.create_superuser(user_dict[0]['email'], user_dict[1]['password'], fullname=user_dict[0]['fullname'],
no_of_employees=user_dict[1]['no_of_employees'], is_staff=False,
time_zone='india', parent_user = 0)
user = authenticate(username=user_object.email, password=user_dict[1]['password'])
login(self.request, user)
return redirect('/home/')
@login_required
def add_sub_user(request, msg=None):
user_object = User.objects.filter(parent_user=request.user.id)
user_count = user_object.count()
count = user_count + 1
total_emp_to_add = int(request.user.no_of_employees) - int(user_count)
t_emp = int(request.user.no_of_employees)
msgs=''
if msg:
msgs = msg
if request.method == 'GET':
return render_to_response('employee/addsubuser.html', {
'request': request, 'form': addsubuser,'count':count,
't_emp':t_emp,'msg':msgs
}, RequestContext(request, {}))
elif request.method == 'POST':
fullname = request.POST.get('fullname')
email = request.POST.get('email')
password = request.POST.get('password')
image = request.FILES['user_avatar']
if not addsubuser(request.POST).is_valid():
return render_to_response('employee/addsubuser.html', {
'request': request, 'form': addsubuser(request.POST),'count':count,
't_emp':t_emp
}, RequestContext(request, {}))
else:
User.objects.create_user(email, password, fullname=fullname,
no_of_employees=0, is_staff=True,
time_zone='india', parent_user = request.user.id,
user_avatar=image
)
return redirect('/add_user/')
@login_required
def emp_schedule(request):
msg = ''
user_object=User.objects.filter(parent_user=request.user.id)
if request.method == 'POST':
shift_starts=request.POST.get('shift_starts', None)
shift_ends = request.POST.get('shift_ends', None)
toBox_cats = request.POST.getlist('toBox_cats[]', None)
availability = request.POST.get('availability', None)
recurrance = request.POST.get('recurrance', None)
shift_starts = datetime.strptime(shift_starts, "%Y-%m-%d %H:%M")
shift_ends = datetime.strptime(shift_ends, "%Y-%m-%d %H:%M")
if toBox_cats:
emp_schedule_list = []
for user in toBox_cats:
emp_schedule_list.append(EmployeeSchedule(parent_user=User.objects.get(pk=request.user.id), shift_start=shift_starts,
shift_ends=shift_ends, employee_id=User.objects.get(pk=user),
availability=availability, recurring=recurrance)
)
EmployeeSchedule.objects.bulk_create([emp_sch_obj for emp_sch_obj in emp_schedule_list])
msg = "Schedule For selected users has been created successfully."
return render_to_response('employee/emp_schedule.html', {
'request': request,'user_object':user_object ,'msg':msg,'form': addschedule(),
}, RequestContext(request, {}))
@login_required
def emp_detail_shift(request, id):
emp_obj = EmployeeSchedule.objects.filter(employee_id=id).order_by('-shift_start')
print emp_obj.count(),'count'
return render_to_response('employee/detail_schedule_emp.html', {
'request': request, 'emp_obj': emp_obj
}, RequestContext(request, {}))
@login_required
def upload_image(request):
if request.method == 'POST' and request.FILES['my_file']:
myfile = request.FILES['my_file']
User.objects.filter(pk=request.user.id).update(user_avatar=myfile)
return redirect('/home/')
|
{"/employee_management/emp_manage_app/forms.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/templatetags/custom_tags.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/urls.py": ["/employee_management/emp_manage_app/views.py", "/employee_management/emp_manage_app/forms.py"]}
|
5,120
|
sulamanijaz/employee_management
|
refs/heads/master
|
/employee_management/emp_manage_app/admin.py
|
from django.contrib import admin
from employee_management import emp_manage_app
myModels = [emp_manage_app.models.User, emp_manage_app.models.Employees, emp_manage_app.models.EmployeeSchedule] # iterable list
admin.site.register(myModels)
# Register your models here.
|
{"/employee_management/emp_manage_app/forms.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/templatetags/custom_tags.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/urls.py": ["/employee_management/emp_manage_app/views.py", "/employee_management/emp_manage_app/forms.py"]}
|
5,121
|
sulamanijaz/employee_management
|
refs/heads/master
|
/employee_management/emp_manage_app/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import PermissionsMixin
from django.contrib.auth.models import UserManager
from django.contrib.auth.models import BaseUserManager, AbstractBaseUser
from django.conf import settings
from django.core.mail import send_mail
from django.core.validators import RegexValidator
# Create your models here.
class UserManager(BaseUserManager):
def _create_user(self, email, password, **extra_fields):
"""
Creates and saves a User with the given email and password.
"""
if not email:
raise ValueError('The given email must be set')
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, email, password=None, **extra_fields):
extra_fields.setdefault('is_superuser', False)
return self._create_user(email, password, **extra_fields)
def create_superuser(self, email, password, **extra_fields):
extra_fields.setdefault('is_superuser', True)
extra_fields.setdefault('is_staff', False)
if extra_fields.get('is_superuser') is not True:
raise ValueError('Superuser must have is_superuser=True.')
return self._create_user(email, password, **extra_fields)
class User(AbstractBaseUser, PermissionsMixin):
fullname = models.CharField(max_length=400)
phone_regex = RegexValidator(regex=r'^\+?1?\d{9,15}$',
message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.")
phone_number = models.CharField(validators=[phone_regex], blank=True, max_length=20)
email = models.EmailField(max_length=140, unique=True)
no_of_employees = models.IntegerField()
time_zone = models.CharField(max_length=400)
parent_user = models.IntegerField(blank=True)
is_staff = models.BooleanField(default=True)
user_avatar = models.ImageField(blank=True, upload_to='avatar/')
objects = UserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
def get_full_name(self):
'''
Returns the first_name plus the last_name, with a space in between.
'''
full_name = self.fullname
return full_name.strip()
def get_short_name(self):
'''
Returns the short name for the user.
'''
return self.fullname
def email_user(self, subject, message, from_email=None, **kwargs):
'''
Sends an email to this User.
'''
send_mail(subject, message, from_email, [self.email], **kwargs)
class Employees(models.Model):
parent_user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='emp_parent', on_delete=models.CASCADE)
employee_id = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='emp_id', on_delete=models.CASCADE)
check_intime = models.DateTimeField()
check_outime = models.DateTimeField()
total_hours = models.IntegerField()
class EmployeeSchedule(models.Model):
parent_user = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='sch_parent', on_delete=models.CASCADE)
employee_id = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='sch_employee', on_delete=models.CASCADE)
# Day_date = models.DateTimeField()
shift_start = models.DateTimeField()
shift_ends = models.DateTimeField()
availability = models.NullBooleanField(default=True)
recurring = models.CharField(max_length=200)
|
{"/employee_management/emp_manage_app/forms.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/templatetags/custom_tags.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/urls.py": ["/employee_management/emp_manage_app/views.py", "/employee_management/emp_manage_app/forms.py"]}
|
5,122
|
sulamanijaz/employee_management
|
refs/heads/master
|
/employee_management/emp_manage_app/forms.py
|
from django.forms import ModelForm, TextInput
from employee_management.emp_manage_app.models import User, EmployeeSchedule
from django import forms
from django.contrib.auth import authenticate
class userform(ModelForm):
class Meta:
model = User
fields = ['email', 'password']
widgets = {'password': forms.PasswordInput()}
def __init__(self, *args, **kwargs):
super(userform, self).__init__(*args, **kwargs)
self.fields['email'].widget = TextInput(attrs={
'id': 'emailID',
'placeholder': 'Enter Your email',
})
class signupform1(ModelForm):
class Meta:
model = User
fields = ['fullname','email', 'phone_number']
class signupform2(ModelForm):
class Meta:
model = User
fields = ['no_of_employees', 'password']
widgets = {'password': forms.PasswordInput()}
class addsubuser(ModelForm):
class Meta:
model = User
fields = ['fullname','email', 'password', 'phone_number', 'user_avatar']
widgets = {'password': forms.PasswordInput()}
class addschedule(ModelForm):
class Meta:
model = EmployeeSchedule
fields=['availability']
|
{"/employee_management/emp_manage_app/forms.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/templatetags/custom_tags.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/urls.py": ["/employee_management/emp_manage_app/views.py", "/employee_management/emp_manage_app/forms.py"]}
|
5,123
|
sulamanijaz/employee_management
|
refs/heads/master
|
/employee_management/emp_manage_app/templatetags/custom_tags.py
|
from django.template import Library
from employee_management.emp_manage_app.models import EmployeeSchedule
from datetime import datetime
register = Library()
@register.simple_tag(name='get_latest_sch')
def get_latest_sch(user_id, shift):
most_upcoming = EmployeeSchedule.objects.filter(employee_id=user_id).order_by('-shift_start')
if most_upcoming:
if shift == 'start':
schedule_shift_start = datetime.strftime(most_upcoming[0].shift_start, '%b %d %Y %I:%M')
return schedule_shift_start
else:
schedule_shift_end = datetime.strftime(most_upcoming[0].shift_ends, '%b %d %Y %I:%M')
return schedule_shift_end
else:
return None
|
{"/employee_management/emp_manage_app/forms.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/templatetags/custom_tags.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/urls.py": ["/employee_management/emp_manage_app/views.py", "/employee_management/emp_manage_app/forms.py"]}
|
5,124
|
sulamanijaz/employee_management
|
refs/heads/master
|
/employee_management/emp_manage_app/urls.py
|
from django.conf.urls import url
from . import views
from employee_management.emp_manage_app.views import ContactWizard
from employee_management.emp_manage_app.forms import signupform1, signupform2
urlpatterns = [
# ex: /polls/
url(r'^$', views.index_home, name='index'),
url(r'^login/$', views.login_user, name='login_user'),
url(r'^home/$', views.user_home, name='user_home'),
url(r'^logout/$', views.user_logout, name='logout_user'),
url(r'^add_user/$', views.add_sub_user, name='add_sub_user'),
url(r'^upload_avatar/$', views.upload_image, name='upload_avatar'),
url(r'^add_user/(?P<msg>[\w\-]+)/$', views.add_sub_user),
url(r'^schedule/$', views.emp_schedule, name='emp_schedule'),
url(r'^schedule_detail/(?P<id>[\d\-]+)/$$', views.emp_detail_shift, name='emp_schedule_detail'),
url(r'^signup/$', ContactWizard.as_view([signupform1, signupform2]), name='signup_user'),
# ex: /polls/5/
]
|
{"/employee_management/emp_manage_app/forms.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/templatetags/custom_tags.py": ["/employee_management/emp_manage_app/models.py"], "/employee_management/emp_manage_app/urls.py": ["/employee_management/emp_manage_app/views.py", "/employee_management/emp_manage_app/forms.py"]}
|
5,127
|
grbarker/Freyja
|
refs/heads/master
|
/migrations/versions/55208dc0638d_add_back_altered_tables_removed_.py
|
"""Add back altered tables. Removed birthdate from Employee table as well. Unnecessary column.
Revision ID: 55208dc0638d
Revises: c34031564651
Create Date: 2018-11-28 17:27:38.201034
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '55208dc0638d'
down_revision = 'c34031564651'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('category',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('categoryname', sa.String(length=255), nullable=True),
sa.Column('description', sa.Text(length=500), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_category_categoryname'), 'category', ['categoryname'], unique=True)
op.create_table('employee',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('employeeID', sa.Integer(), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.Column('lastname', sa.String(length=255), nullable=True),
sa.Column('firstname', sa.String(length=255), nullable=True),
sa.Column('notes', sa.Text(length=1000), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_employee_employeeID'), 'employee', ['employeeID'], unique=True)
op.create_index(op.f('ix_employee_firstname'), 'employee', ['firstname'], unique=False)
op.create_index(op.f('ix_employee_lastname'), 'employee', ['lastname'], unique=False)
op.create_table('shipper',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('shippername', sa.String(length=255), nullable=True),
sa.Column('phone', sa.String(length=25), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_shipper_phone'), 'shipper', ['phone'], unique=False)
op.create_index(op.f('ix_shipper_shippername'), 'shipper', ['shippername'], unique=False)
op.create_table('supplier',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('suppliername', sa.String(length=255), nullable=True),
sa.Column('contactname', sa.String(length=255), nullable=True),
sa.Column('address', sa.String(length=255), nullable=True),
sa.Column('city', sa.String(length=255), nullable=True),
sa.Column('postalcode', sa.String(length=255), nullable=True),
sa.Column('country', sa.String(length=255), nullable=True),
sa.Column('phone', sa.String(length=25), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_supplier_address'), 'supplier', ['address'], unique=False)
op.create_index(op.f('ix_supplier_city'), 'supplier', ['city'], unique=False)
op.create_index(op.f('ix_supplier_contactname'), 'supplier', ['contactname'], unique=False)
op.create_index(op.f('ix_supplier_country'), 'supplier', ['country'], unique=False)
op.create_index(op.f('ix_supplier_phone'), 'supplier', ['phone'], unique=False)
op.create_index(op.f('ix_supplier_postalcode'), 'supplier', ['postalcode'], unique=False)
op.create_index(op.f('ix_supplier_suppliername'), 'supplier', ['suppliername'], unique=False)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=64), nullable=True),
sa.Column('email', sa.String(length=120), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.Column('about_me', sa.String(length=255), nullable=True),
sa.Column('last_seen', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True)
op.create_table('customer',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('customername', sa.String(length=255), nullable=True),
sa.Column('address', sa.String(length=255), nullable=True),
sa.Column('city', sa.String(length=255), nullable=True),
sa.Column('postalcode', sa.String(length=255), nullable=True),
sa.Column('country', sa.String(length=255), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_customer_address'), 'customer', ['address'], unique=False)
op.create_index(op.f('ix_customer_city'), 'customer', ['city'], unique=False)
op.create_index(op.f('ix_customer_country'), 'customer', ['country'], unique=False)
op.create_index(op.f('ix_customer_customername'), 'customer', ['customername'], unique=False)
op.create_index(op.f('ix_customer_postalcode'), 'customer', ['postalcode'], unique=False)
op.create_table('followers',
sa.Column('follower_id', sa.Integer(), nullable=True),
sa.Column('followed_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['followed_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['follower_id'], ['user.id'], )
)
op.create_table('post',
sa.Column('body', sa.String(length=140), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_post_timestamp'), 'post', ['timestamp'], unique=False)
op.create_table('product',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('productname', sa.String(length=255), nullable=True),
sa.Column('supplier_id', sa.Integer(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('unit', sa.Integer(), nullable=True),
sa.Column('price', sa.Numeric(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['category.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['supplier.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_product_productname'), 'product', ['productname'], unique=False)
op.create_table('order',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('customer_id', sa.Integer(), nullable=True),
sa.Column('orderdate', sa.Date(), nullable=True),
sa.Column('shipper_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['customer_id'], ['customer.id'], ),
sa.ForeignKeyConstraint(['shipper_id'], ['shipper.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('order_detail',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_id', sa.Integer(), nullable=True),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.Column('quantity', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['order_id'], ['order.id'], ),
sa.ForeignKeyConstraint(['product_id'], ['product.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('order_detail')
op.drop_table('order')
op.drop_index(op.f('ix_product_productname'), table_name='product')
op.drop_table('product')
op.drop_index(op.f('ix_post_timestamp'), table_name='post')
op.drop_table('post')
op.drop_table('followers')
op.drop_index(op.f('ix_customer_postalcode'), table_name='customer')
op.drop_index(op.f('ix_customer_customername'), table_name='customer')
op.drop_index(op.f('ix_customer_country'), table_name='customer')
op.drop_index(op.f('ix_customer_city'), table_name='customer')
op.drop_index(op.f('ix_customer_address'), table_name='customer')
op.drop_table('customer')
op.drop_index(op.f('ix_user_username'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
op.drop_index(op.f('ix_supplier_suppliername'), table_name='supplier')
op.drop_index(op.f('ix_supplier_postalcode'), table_name='supplier')
op.drop_index(op.f('ix_supplier_phone'), table_name='supplier')
op.drop_index(op.f('ix_supplier_country'), table_name='supplier')
op.drop_index(op.f('ix_supplier_contactname'), table_name='supplier')
op.drop_index(op.f('ix_supplier_city'), table_name='supplier')
op.drop_index(op.f('ix_supplier_address'), table_name='supplier')
op.drop_table('supplier')
op.drop_index(op.f('ix_shipper_shippername'), table_name='shipper')
op.drop_index(op.f('ix_shipper_phone'), table_name='shipper')
op.drop_table('shipper')
op.drop_index(op.f('ix_employee_lastname'), table_name='employee')
op.drop_index(op.f('ix_employee_firstname'), table_name='employee')
op.drop_index(op.f('ix_employee_employeeID'), table_name='employee')
op.drop_table('employee')
op.drop_index(op.f('ix_category_categoryname'), table_name='category')
op.drop_table('category')
# ### end Alembic commands ###
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,128
|
grbarker/Freyja
|
refs/heads/master
|
/app/main/forms.py
|
##Form code initially taken from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-iii-web-forms
##then altered as necessary to fit the needs of the project
from flask import request
from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, PasswordField, BooleanField, SubmitField, SelectField
from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, Length
from app.models import User, Employee
class EditProfileForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
about_me = TextAreaField('About me', validators=[Length(min=0, max=140)])
submit = SubmitField('Submit')
##Next two pulled from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-vii-error-handling
def __init__(self, original_username, *args, **kwargs):
super(EditProfileForm, self).__init__(*args, **kwargs)
self.original_username = original_username
def validate_username(self, username):
if username.data != self.original_username:
user = User.query.filter_by(username=self.username.data).first()
if user is not None:
raise ValidationError('Please use a different username.')
class PostForm(FlaskForm):
post = TextAreaField('Say something', validators=[
DataRequired(), Length(min=1, max=140)])
submit = SubmitField('Submit')
class SortForm(FlaskForm):
sort_type = SelectField('Sort', coerce=int)
submit = SubmitField('Sort')
##From https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-xvi-full-text-search
class SearchForm(FlaskForm):
q = StringField('Search', validators=[DataRequired()])
def __init__(self, *args, **kwargs):
if 'formdata' not in kwargs:
kwargs['formdata'] = request.args
if 'csrf_enabled' not in kwargs:
kwargs['csrf_enabled'] = False
super(SearchForm, self).__init__(*args, **kwargs)
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,129
|
grbarker/Freyja
|
refs/heads/master
|
/app/errors/handlers.py
|
## Pulled from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-vii-error-handling
from flask import render_template
from app import db
from app.errors import bp
@bp.errorhandler(404)
def not_found_error(error):
return render_template('errors/404.html'), 404
@bp.errorhandler(500)
def internal_error(error):
db.session.rollback()
return render_template('errors/500.html'), 500
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,130
|
grbarker/Freyja
|
refs/heads/master
|
/app/auth/forms.py
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField, SelectField
from wtforms.validators import ValidationError, DataRequired, Email, EqualTo, Length
from app.models import User, Employee
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Remember Me')
submit = SubmitField('Sign In')
class RegistrationForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField(
'Repeat Password', validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Register')
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Please use a different username.')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user is not None:
raise ValidationError('Please use a different email address.')
class EmployeeRegistrationForm(FlaskForm):
employee_id = StringField('Employee ID', validators=[DataRequired()])
lastname = StringField('Last name', validators=[DataRequired()])
firstname = StringField('First name', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField(
'Repeat Password', validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Register')
def validate_employee_id(self, employee_id):
employee = Employee.query.filter_by(employeeID=employee_id.data).first()
if employee is not None:
raise ValidationError('Please use a different ID number.')
def validate_name(self, lastname, firstname):
employee = Employee.query.filter_by(lastname=lastname.data, firstname=firstname.data).first()
if employee is not None:
raise ValidationError('This name is already in use. Please use a different first and last name.')
##Pulled from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-x-email-support
class ResetPasswordRequestForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
submit = SubmitField('Request Password Reset')
##from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-x-email-support
class ResetPasswordForm(FlaskForm):
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField(
'Repeat Password', validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Request Password Reset')
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,131
|
grbarker/Freyja
|
refs/heads/master
|
/migrations/versions/10ef47bef304_remove_tables_to_alter_them_as_sqlite_.py
|
"""Remove tables to alter them as sqlite does not support droping or altering table columns.
Revision ID: 10ef47bef304
Revises: 9f614adf3ffa
Create Date: 2018-11-28 13:22:31.788466
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '10ef47bef304'
down_revision = '9f614adf3ffa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_category_categoryname', table_name='category')
op.drop_table('category')
op.drop_table('followers')
op.drop_index('ix_shipper_phone', table_name='shipper')
op.drop_index('ix_shipper_shippername', table_name='shipper')
op.drop_table('shipper')
op.drop_index('ix_supplier_address', table_name='supplier')
op.drop_index('ix_supplier_city', table_name='supplier')
op.drop_index('ix_supplier_contactname', table_name='supplier')
op.drop_index('ix_supplier_country', table_name='supplier')
op.drop_index('ix_supplier_phone', table_name='supplier')
op.drop_index('ix_supplier_postalcode', table_name='supplier')
op.drop_index('ix_supplier_suppliername', table_name='supplier')
op.drop_table('supplier')
op.drop_table('order_detail')
op.drop_index('ix_employee_firstname', table_name='employee')
op.drop_index('ix_employee_lastname', table_name='employee')
op.drop_table('employee')
op.drop_table('order')
op.drop_index('ix_post_timestamp', table_name='post')
op.drop_table('post')
op.drop_index('ix_customer_address', table_name='customer')
op.drop_index('ix_customer_city', table_name='customer')
op.drop_index('ix_customer_contactname', table_name='customer')
op.drop_index('ix_customer_country', table_name='customer')
op.drop_index('ix_customer_customername', table_name='customer')
op.drop_index('ix_customer_postalcode', table_name='customer')
op.drop_table('customer')
op.drop_index('ix_product_productname', table_name='product')
op.drop_table('product')
op.drop_index('ix_user_email', table_name='user')
op.drop_index('ix_user_username', table_name='user')
op.drop_table('user')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('username', sa.VARCHAR(length=64), nullable=True),
sa.Column('email', sa.VARCHAR(length=120), nullable=True),
sa.Column('password_hash', sa.VARCHAR(length=128), nullable=True),
sa.Column('about_me', sa.VARCHAR(length=255), nullable=True),
sa.Column('last_seen', sa.DATETIME(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_user_username', 'user', ['username'], unique=1)
op.create_index('ix_user_email', 'user', ['email'], unique=1)
op.create_table('product',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('productname', sa.VARCHAR(length=255), nullable=True),
sa.Column('supplier_id', sa.INTEGER(), nullable=True),
sa.Column('category_id', sa.INTEGER(), nullable=True),
sa.Column('unit', sa.INTEGER(), nullable=True),
sa.Column('price', sa.NUMERIC(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['category.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['supplier.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_product_productname', 'product', ['productname'], unique=False)
op.create_table('customer',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('customername', sa.VARCHAR(length=255), nullable=True),
sa.Column('contactname', sa.VARCHAR(length=255), nullable=True),
sa.Column('address', sa.VARCHAR(length=255), nullable=True),
sa.Column('city', sa.VARCHAR(length=255), nullable=True),
sa.Column('postalcode', sa.VARCHAR(length=255), nullable=True),
sa.Column('country', sa.VARCHAR(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_customer_postalcode', 'customer', ['postalcode'], unique=False)
op.create_index('ix_customer_customername', 'customer', ['customername'], unique=False)
op.create_index('ix_customer_country', 'customer', ['country'], unique=False)
op.create_index('ix_customer_contactname', 'customer', ['contactname'], unique=False)
op.create_index('ix_customer_city', 'customer', ['city'], unique=False)
op.create_index('ix_customer_address', 'customer', ['address'], unique=False)
op.create_table('post',
sa.Column('body', sa.VARCHAR(length=140), nullable=True),
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('timestamp', sa.DATETIME(), nullable=True),
sa.Column('user_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_post_timestamp', 'post', ['timestamp'], unique=False)
op.create_table('order',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('customer_id', sa.INTEGER(), nullable=True),
sa.Column('employee_id', sa.INTEGER(), nullable=True),
sa.Column('orderdate', sa.DATE(), nullable=True),
sa.Column('shipper_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['customer_id'], ['customer.id'], ),
sa.ForeignKeyConstraint(['employee_id'], ['employee.id'], ),
sa.ForeignKeyConstraint(['shipper_id'], ['shipper.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('employee',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('lastname', sa.VARCHAR(length=255), nullable=True),
sa.Column('firstname', sa.VARCHAR(length=255), nullable=True),
sa.Column('birthdate', sa.DATETIME(), nullable=True),
sa.Column('notes', sa.TEXT(length=1000), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_employee_lastname', 'employee', ['lastname'], unique=False)
op.create_index('ix_employee_firstname', 'employee', ['firstname'], unique=False)
op.create_table('order_detail',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('order_id', sa.INTEGER(), nullable=True),
sa.Column('product_id', sa.INTEGER(), nullable=True),
sa.Column('quantity', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['order_id'], ['order.id'], ),
sa.ForeignKeyConstraint(['product_id'], ['product.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('supplier',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('suppliername', sa.VARCHAR(length=255), nullable=True),
sa.Column('contactname', sa.VARCHAR(length=255), nullable=True),
sa.Column('address', sa.VARCHAR(length=255), nullable=True),
sa.Column('city', sa.VARCHAR(length=255), nullable=True),
sa.Column('postalcode', sa.VARCHAR(length=255), nullable=True),
sa.Column('country', sa.VARCHAR(length=255), nullable=True),
sa.Column('phone', sa.VARCHAR(length=25), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_supplier_suppliername', 'supplier', ['suppliername'], unique=False)
op.create_index('ix_supplier_postalcode', 'supplier', ['postalcode'], unique=False)
op.create_index('ix_supplier_phone', 'supplier', ['phone'], unique=False)
op.create_index('ix_supplier_country', 'supplier', ['country'], unique=False)
op.create_index('ix_supplier_contactname', 'supplier', ['contactname'], unique=False)
op.create_index('ix_supplier_city', 'supplier', ['city'], unique=False)
op.create_index('ix_supplier_address', 'supplier', ['address'], unique=False)
op.create_table('shipper',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('shippername', sa.VARCHAR(length=255), nullable=True),
sa.Column('phone', sa.VARCHAR(length=25), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_shipper_shippername', 'shipper', ['shippername'], unique=False)
op.create_index('ix_shipper_phone', 'shipper', ['phone'], unique=False)
op.create_table('followers',
sa.Column('follower_id', sa.INTEGER(), nullable=True),
sa.Column('followed_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['followed_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['follower_id'], ['user.id'], )
)
op.create_table('category',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('categoryname', sa.VARCHAR(length=255), nullable=True),
sa.Column('description', sa.TEXT(length=500), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_category_categoryname', 'category', ['categoryname'], unique=1)
# ### end Alembic commands ###
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,132
|
grbarker/Freyja
|
refs/heads/master
|
/migrations/versions/18f4e0722456_add_employeeid_password_hash_columns_.py
|
"""Add employeeID, password_hash columns and add set_password, check_password, and avatar funcs to Employee table. This was to allow for two sets of users. The general public and employees will have different access and abilities.
Revision ID: 18f4e0722456
Revises: a9287f3ba5b0
Create Date: 2018-11-28 14:17:22.836263
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '18f4e0722456'
down_revision = 'a9287f3ba5b0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('employee', sa.Column('employeeID', sa.Integer(), nullable=True))
op.add_column('employee', sa.Column('password_hash', sa.String(length=128), nullable=True))
op.create_index(op.f('ix_employee_employeeID'), 'employee', ['employeeID'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_employee_employeeID'), table_name='employee')
op.drop_column('employee', 'password_hash')
op.drop_column('employee', 'employeeID')
# ### end Alembic commands ###
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,133
|
grbarker/Freyja
|
refs/heads/master
|
/migrations/versions/5a099a3dde86_add_middlename_column_to_user_table.py
|
"""Add middlename column to user table.
Revision ID: 5a099a3dde86
Revises: 81162fe5d987
Create Date: 2018-11-29 00:29:52.750869
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5a099a3dde86'
down_revision = '81162fe5d987'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('middlename', sa.String(length=255), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'middlename')
# ### end Alembic commands ###
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,134
|
grbarker/Freyja
|
refs/heads/master
|
/app/models.py
|
import base64
import jwt
import os
from werkzeug.security import generate_password_hash, check_password_hash
from hashlib import md5
from flask import current_app, url_for
from flask_login import UserMixin
from datetime import datetime
from time import time
from app import db, login
from app.search import add_to_index, remove_from_index, query_index
class SearchableMixin(object):
@classmethod
def search(cls, expression, page, per_page):
ids, total = query_index(cls.__tablename__, expression, page, per_page)
if total == 0:
return cls.query.filter_by(id=0), 0
when = []
for i in range(len(ids)):
when.append((ids[i], i))
return cls.query.filter(cls.id.in_(ids)).order_by(
db.case(when, value=cls.id)), total
@classmethod
def before_commit(cls, session):
session._changes = {
'add': list(session.new),
'update': list(session.dirty),
'delete': list(session.deleted)
}
@classmethod
def after_commit(cls, session):
for obj in session._changes['add']:
if isinstance(obj, SearchableMixin):
add_to_index(obj.__tablename__, obj)
for obj in session._changes['update']:
if isinstance(obj, SearchableMixin):
add_to_index(obj.__tablename__, obj)
for obj in session._changes['delete']:
if isinstance(obj, SearchableMixin):
remove_from_index(obj.__tablename__, obj)
session._changes = None
@classmethod
def reindex(cls):
for obj in cls.query:
add_to_index(cls.__tablename__, obj)
db.event.listen(db.session, 'before_commit', SearchableMixin.before_commit)
db.event.listen(db.session, 'after_commit', SearchableMixin.after_commit)
##Pulled from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-viii-followers
followers = db.Table('followers',
db.Column('follower_id', db.Integer, db.ForeignKey('user.id')),
db.Column('followed_id', db.Integer, db.ForeignKey('user.id'))
)
class User(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), index=True, unique=True)
customername = db.Column(db.String(255), index=True)
lastname = db.Column(db.String(255), index=True)
middlename = db.Column(db.String(255))
firstname = db.Column(db.String(255), index=True)
email = db.Column(db.String(120), index=True, unique=True)
password_hash = db.Column(db.String(128))
about_me = db.Column(db.String(255))
last_seen = db.Column(db.DateTime, default=datetime.utcnow)
address = db.Column(db.String(255), index=True)
city = db.Column(db.String(255), index=True)
postalcode = db.Column(db.String(255), index=True)
country = db.Column(db.String(255), index=True)
orders = db.relationship('Order', backref='customer', lazy='dynamic')
posts = db.relationship('Post', backref='author', lazy='dynamic')
reviews = db.relationship('Review', backref='user', lazy='dynamic')
##Pulled from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-viii-followers
followed = db.relationship(
'User', secondary=followers,
primaryjoin=(followers.c.follower_id == id),
secondaryjoin=(followers.c.followed_id == id),
backref=db.backref('followers', lazy='dynamic'), lazy='dynamic')
def __repr__(self):
return '<User {}>'.format(self.username)
def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def avatar(self, size):
digest = md5(self.email.lower().encode('utf-8')).hexdigest()
return 'https://www.gravatar.com/avatar/{}?d=identicon&s={}'.format(
digest, size)
##Next 3 pulled from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-viii-followers
def follow(self, user):
if not self.is_following(user):
self.followed.append(user)
def unfollow(self, user):
if self.is_following(user):
self.followed.remove(user)
def is_following(self, user):
return self.followed.filter(
followers.c.followed_id == user.id).count() > 0
def followed_posts(self):
followed = Post.query.join(
followers, (followers.c.followed_id == Post.user_id)).filter(
followers.c.follower_id == self.id)
own = Post.query.filter_by(user_id=self.id)
return followed.union(own).order_by(Post.timestamp.desc())
##next 2 pulled from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-x-email-support
def get_reset_password_token(self, expires_in=600):
return jwt.encode(
{'reset_password': self.id, 'exp': time() + expires_in},
current_app.config['SECRET_KEY'], algorithm='HS256').decode('utf-8')
@staticmethod
def verify_reset_password_token(token):
try:
id = jwt.decode(token, current_app.config['SECRET_KEY'],
algorithms=['HS256'])['reset_password']
except:
return
return User.query.get(id)
def to_dict(self, include_email=False):
data = {
'id': self.id,
'username': self.username,
'lastname': self.lastname,
'middlename': self.middlename,
'firstname': self.firstname,
'last_seen': self.last_seen.isoformat() + 'Z',
'about_me': self.about_me,
'address': self.address,
'city': self.city,
'country': self.country,
'post_count': self.posts.count(),
'follower_count': self.followers.count(),
'followed_count': self.followed.count(),
'_links': {
'avatar': self.avatar(128)
}
}
if include_email:
data['email'] = self.email
return data
class Post(SearchableMixin, db.Model):
__searchable__ = ['body']
body = db.Column(db.String(140))
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return '<Post {}>'.format(self.body)
class Category(SearchableMixin, db.Model):
__searchable__ = ['categoryname']
id = db.Column(db.Integer, primary_key=True)
categoryname = db.Column(db.String(255), index=True, unique=True)
description = db.Column(db.Text(500))
products = db.relationship('Product', backref='category', lazy='dynamic')
class Employee(db.Model):
id = db.Column(db.Integer, primary_key=True)
employeeID = db.Column(db.Integer, index=True, unique=True)
password_hash = db.Column(db.String(128))
lastname = db.Column(db.String(255), index=True)
firstname = db.Column(db.String(255), index=True)
notes = db.Column(db.Text(1000))
def __repr__(self):
return '<Employee {}>'.format(self.id)
def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def avatar(self, size):
digest = md5(self.email.lower().encode('utf-8')).hexdigest()
return 'https://www.gravatar.com/avatar/{}?d=identicon&s={}'.format(
digest, size)
class Order(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
orderdate = db.Column(db.Date)
shipper_id = db.Column(db.Integer, db.ForeignKey('shipper.id'))
orderdetails = db.relationship('OrderDetail', backref='order', lazy='dynamic')
class OrderDetail(db.Model):
id = db.Column(db.Integer, primary_key=True)
order_id = db.Column(db.Integer, db.ForeignKey('order.id'))
product_id = db.Column(db.Integer, db.ForeignKey('product.id'))
quantity = db.Column(db.Integer)
def to_dict(self):
data = {
'id': self.id,
'order_id': self.order_id,
'product_id': self.product_id,
'order': self.order,
'product': self.product,
'quantity': self.quantity
}
return data
class Product(SearchableMixin, db.Model):
__searchable__ = ['productname']
id = db.Column(db.Integer, primary_key=True)
productname = db.Column(db.String(255), index=True)
supplier_id = db.Column(db.Integer, db.ForeignKey('supplier.id'))
category_id = db.Column(db.Integer, db.ForeignKey('category.id'))
unit = db.Column(db.Integer)
price = db.Column(db.Numeric(scale=2, asdecimal=True))
created = db.Column(db.DateTime, default=datetime.utcnow)
orderdetails = db.relationship('OrderDetail', backref='product', lazy='dynamic')
reviews = db.relationship('Review', backref='product', lazy='dynamic')
def get_rating(self):
ratings = []
for rev in self.reviews:
ratings.append(rev.rating)
return sum(ratings)/float(len(ratings))
class Shipper(db.Model):
id = db.Column(db.Integer, primary_key=True)
shippername = db.Column(db.String(255), index=True)
phone = db.Column(db.String(25), index=True)
orders = db.relationship('Order', backref='shipper', lazy='dynamic')
class Supplier(SearchableMixin, db.Model):
__searchable__ = ['suppliername']
id = db.Column(db.Integer, primary_key=True)
suppliername = db.Column(db.String(255), index=True)
contactname = db.Column(db.String(255), index=True)
address = db.Column(db.String(255), index=True)
city = db.Column(db.String(255), index=True)
postalcode = db.Column(db.String(255), index=True)
country = db.Column(db.String(255), index=True)
phone = db.Column(db.String(25), index=True)
products = db.relationship('Product', backref='supplier', lazy='dynamic')
class Review(db.Model):
id = db.Column(db.Integer, primary_key=True)
rating = db.Column(db.Integer, index=True)
review = db.Column(db.Text(1000))
comments = db.Column(db.Text(300))
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
product_id= db.Column(db.Integer, db.ForeignKey('product.id'))
def top_rated(self):
r = Review.query.group_by(Review.product_id).order_by(func.avg()).all()
return r
@login.user_loader
def load_user(id):
return User.query.get(int(id))
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,135
|
grbarker/Freyja
|
refs/heads/master
|
/db_populator5in1.py
|
from datetime import datetime
from app import db
from app.models import User, Category, Supplier, Shipper, Employee
u0 = User(username="MariaAnders",customername="Alfreds Futterkiste",lastname=None,middlename=None,firstname="Maria",email="AlfredsFutterkiste@example.com",address="Obere Str. 57",city="Berlin",postalcode="12209",country="Germany")
u0.set_password("Maria")
db.session.add(u0)
u1 = User(username="AnaTrujillo",customername="Ana Trujillo Emparedados y helados",lastname=None,middlename=None,firstname="Ana",email="AnaTrujilloEmparedadosyhelados@example.com",address="Avda. de la Constitución 2222",city="México D.F.",postalcode="05021",country="Mexico")
u1.set_password("Ana")
db.session.add(u1)
u2 = User(username="AntonioMoreno",customername="Antonio Moreno Taquería",lastname=None,middlename=None,firstname="Antonio",email="AntonioMorenoTaquería@example.com",address="Mataderos 2312",city="México D.F.",postalcode="05023",country="Mexico")
u2.set_password("Antonio")
db.session.add(u2)
u3 = User(username="ThomasHardy",customername="Around the Horn",lastname=None,middlename=None,firstname="Thomas",email="AroundtheHorn@example.com",address="120 Hanover Sq.",city="London",postalcode="WA1 1DP",country="UK")
u3.set_password("Thomas")
db.session.add(u3)
u4 = User(username="ChristinaBerglund",customername="Berglunds snabbköp",lastname=None,middlename=None,firstname="Christina",email="Berglundssnabbköp@example.com",address="Berguvsvägen 8",city="Luleå",postalcode="S-958 22",country="Sweden")
u4.set_password("Christina")
db.session.add(u4)
u5 = User(username="HannaMoos",customername="Blauer See Delikatessen",lastname=None,middlename=None,firstname="Hanna",email="BlauerSeeDelikatessen@example.com",address="Forsterstr. 57",city="Mannheim",postalcode="68306",country="Germany")
u5.set_password("Hanna")
db.session.add(u5)
u6 = User(username="FrédériqueCiteaux",customername="Blondel père et fils",lastname=None,middlename=None,firstname="Frédérique",email="Blondelpèreetfils@example.com",address="24, place Kléber",city="Strasbourg",postalcode="67000",country="France")
u6.set_password("Frédérique")
db.session.add(u6)
u7 = User(username="MartínSommer",customername="Bólido Comidas preparadas",lastname=None,middlename=None,firstname="Martín",email="BólidoComidaspreparadas@example.com",address="C/ Araquil, 67",city="Madrid",postalcode="28023",country="Spain")
u7.set_password("Martín")
db.session.add(u7)
u8 = User(username="LaurenceLebihans",customername="Bon app'",lastname=None,middlename=None,firstname="Laurence",email="Bonapp'@example.com",address="12, rue des Bouchers",city="Marseille",postalcode="13008",country="France")
u8.set_password("Laurence")
db.session.add(u8)
u9 = User(username="ElizabethLincoln",customername="Bottom-Dollar Marketse",lastname=None,middlename=None,firstname="Elizabeth",email="Bottom-DollarMarketse@example.com",address="23 Tsawassen Blvd.",city="Tsawassen",postalcode="T2F 8M4",country="Canada")
u9.set_password("Elizabeth")
db.session.add(u9)
u10 = User(username="VictoriaAshworth",customername="B's Beverages",lastname=None,middlename=None,firstname="Victoria",email="B'sBeverages@example.com",address="Fauntleroy Circus",city="London",postalcode="EC2 5NT",country="UK")
u10.set_password("Victoria")
db.session.add(u10)
u11 = User(username="PatricioSimpson",customername="Cactus Comidas para llevar",lastname=None,middlename=None,firstname="Patricio",email="CactusComidasparallevar@example.com",address="Cerrito 333",city="Buenos Aires",postalcode="1010",country="Argentina")
u11.set_password("Patricio")
db.session.add(u11)
u12 = User(username="FranciscoChang",customername="Centro comercial Moctezuma",lastname=None,middlename=None,firstname="Francisco",email="CentrocomercialMoctezuma@example.com",address="Sierras de Granada 9993",city="México D.F.",postalcode="05022",country="Mexico")
u12.set_password("Francisco")
db.session.add(u12)
u13 = User(username="YangWang",customername="Chop-suey Chinese",lastname=None,middlename=None,firstname="Yang",email="Chop-sueyChinese@example.com",address="Hauptstr. 29",city="Bern",postalcode="3012",country="Switzerland")
u13.set_password("Yang")
db.session.add(u13)
u14 = User(username="PedroAfonso",customername="Comércio Mineiro",lastname=None,middlename=None,firstname="Pedro",email="ComércioMineiro@example.com",address="Av. dos Lusíadas, 23",city="São Paulo",postalcode="05432-043",country="Brazil")
u14.set_password("Pedro")
db.session.add(u14)
u15 = User(username="ElizabethBrown",customername="Consolidated Holdings",lastname=None,middlename=None,firstname="Elizabeth",email="ConsolidatedHoldings@example.com",address="Berkeley Gardens 12 Brewery ",city="London",postalcode="WX1 6LT",country="UK")
u15.set_password("Elizabeth")
db.session.add(u15)
u16 = User(username="SvenOttlieb",customername="Drachenblut Delikatessend",lastname=None,middlename=None,firstname="Sven",email="DrachenblutDelikatessend@example.com",address="Walserweg 21",city="Aachen",postalcode="52066",country="Germany")
u16.set_password("Sven")
db.session.add(u16)
u17 = User(username="JanineLabrune",customername="Du monde entier",lastname=None,middlename=None,firstname="Janine",email="Dumondeentier@example.com",address="67, rue des Cinquante Otages",city="Nantes",postalcode="44000",country="France")
u17.set_password("Janine")
db.session.add(u17)
u18 = User(username="AnnDevon",customername="Eastern Connection",lastname=None,middlename=None,firstname="Ann",email="EasternConnection@example.com",address="35 King George",city="London",postalcode="WX3 6FW",country="UK")
u18.set_password("Ann")
db.session.add(u18)
u19 = User(username="RolandMendel",customername="Ernst Handel",lastname=None,middlename=None,firstname="Roland",email="ErnstHandel@example.com",address="Kirchgasse 6",city="Graz",postalcode="8010",country="Austria")
u19.set_password("Roland")
db.session.add(u19)
u20 = User(username="AriaCruz",customername="Familia Arquibaldo",lastname=None,middlename=None,firstname="Aria",email="FamiliaArquibaldo@example.com",address="Rua Orós, 92",city="São Paulo",postalcode="05442-030",country="Brazil")
u20.set_password("Aria")
db.session.add(u20)
u21 = User(username="DiegoRoel",customername="FISSA Fabrica Inter. Salchichas S.A.",lastname=None,middlename=None,firstname="Diego",email="FISSAFabricaInter.SalchichasS.A.@example.com",address="C/ Moralzarzal, 86",city="Madrid",postalcode="28034",country="Spain")
u21.set_password("Diego")
db.session.add(u21)
u22 = User(username="MartineRancé",customername="Folies gourmandes",lastname=None,middlename=None,firstname="Martine",email="Foliesgourmandes@example.com",address="184, chaussée de Tournai",city="Lille",postalcode="59000",country="France")
u22.set_password("Martine")
db.session.add(u22)
u23 = User(username="MariaLarsson",customername="Folk och fä HB",lastname=None,middlename=None,firstname="Maria",email="FolkochfäHB@example.com",address="Åkergatan 24",city="Bräcke",postalcode="S-844 67",country="Sweden")
u23.set_password("Maria")
db.session.add(u23)
u24 = User(username="PeterFranken",customername="Frankenversand",lastname=None,middlename=None,firstname="Peter",email="Frankenversand@example.com",address="Berliner Platz 43",city="München",postalcode="80805",country="Germany")
u24.set_password("Peter")
db.session.add(u24)
u25 = User(username="CarineSchmitt",customername="France restauration",lastname=None,middlename=None,firstname="Carine",email="Francerestauration@example.com",address="54, rue Royale",city="Nantes",postalcode="44000",country="France")
u25.set_password("Carine")
db.session.add(u25)
u26 = User(username="PaoloAccorti",customername="Franchi S.p.A.",lastname=None,middlename=None,firstname="Paolo",email="FranchiS.p.A.@example.com",address="Via Monte Bianco 34",city="Torino",postalcode="10100",country="Italy")
u26.set_password("Paolo")
db.session.add(u26)
u27 = User(username="LinoRodriguez",customername="Furia Bacalhau e Frutos do Mar",lastname=None,middlename=None,firstname="Lino",email="FuriaBacalhaueFrutosdoMar@example.com",address="Jardim das rosas n. 32",city="Lisboa",postalcode="1675",country="Portugal")
u27.set_password("Lino")
db.session.add(u27)
u28 = User(username="EduardoSaavedra",customername="Galería del gastrónomo",lastname=None,middlename=None,firstname="Eduardo",email="Galeríadelgastrónomo@example.com",address="Rambla de Cataluña, 23",city="Barcelona",postalcode="08022",country="Spain")
u28.set_password("Eduardo")
db.session.add(u28)
u29 = User(username="JoséPedroFreyre",customername="Godos Cocina Típica",lastname="Freyre",middlename="Pedro",firstname="José",email="GodosCocinaTípica@example.com",address="C/ Romero, 33",city="Sevilla",postalcode="41101",country="Spain")
u29.set_password("José")
db.session.add(u29)
u30 = User(username="AndréFonseca",customername="Gourmet Lanchonetes",lastname=None,middlename=None,firstname="André",email="GourmetLanchonetes@example.com",address="Av. Brasil, 442",city="Campinas",postalcode="04876-786",country="Brazil")
u30.set_password("André")
db.session.add(u30)
u31 = User(username="HowardSnyder",customername="Great Lakes Food Market",lastname=None,middlename=None,firstname="Howard",email="GreatLakesFoodMarket@example.com",address="2732 Baker Blvd.",city="Eugene",postalcode="97403",country="USA")
u31.set_password("Howard")
db.session.add(u31)
u32 = User(username="ManuelPereira",customername="GROSELLA-Restaurante",lastname=None,middlename=None,firstname="Manuel",email="GROSELLA-Restaurante@example.com",address="5ª Ave. Los Palos Grandes",city="Caracas",postalcode="1081",country="Venezuela")
u32.set_password("Manuel")
db.session.add(u32)
u33 = User(username="MarioPontes",customername="Hanari Carnes",lastname=None,middlename=None,firstname="Mario",email="HanariCarnes@example.com",address="Rua do Paço, 67",city="Rio de Janeiro",postalcode="05454-876",country="Brazil")
u33.set_password("Mario")
db.session.add(u33)
u34 = User(username="CarlosHernández",customername="HILARIÓN-Abastos",lastname=None,middlename=None,firstname="Carlos",email="HILARIÓN-Abastos@example.com",address="Carrera 22 con Ave. Carlos Soublette #8-35",city="San Cristóbal",postalcode="5022",country="Venezuela")
u34.set_password("Carlos")
db.session.add(u34)
u35 = User(username="YoshiLatimer",customername="Hungry Coyote Import Store",lastname=None,middlename=None,firstname="Yoshi",email="HungryCoyoteImportStore@example.com",address="City Center Plaza 516 Main St.",city="Elgin",postalcode="97827",country="USA")
u35.set_password("Yoshi")
db.session.add(u35)
u36 = User(username="PatriciaMcKenna",customername="Hungry Owl All-Night Grocers",lastname=None,middlename=None,firstname="Patricia",email="HungryOwlAll-NightGrocers@example.com",address="8 Johnstown Road",city="Cork",postalcode="",country="Ireland")
u36.set_password("Patricia")
db.session.add(u36)
u37 = User(username="HelenBennett",customername="Island Trading",lastname=None,middlename=None,firstname="Helen",email="IslandTrading@example.com",address="Garden House Crowther Way",city="Cowes",postalcode="PO31 7PJ",country="UK")
u37.set_password("Helen")
db.session.add(u37)
u38 = User(username="PhilipCramer",customername="Königlich Essen",lastname=None,middlename=None,firstname="Philip",email="KöniglichEssen@example.com",address="Maubelstr. 90",city="Brandenburg",postalcode="14776",country="Germany")
u38.set_password("Philip")
db.session.add(u38)
u39 = User(username="DanielTonini",customername="La corne d'abondance",lastname=None,middlename=None,firstname="Daniel",email="Lacorned'abondance@example.com",address="67, avenue de l'Europe",city="Versailles",postalcode="78000",country="France")
u39.set_password("Daniel")
db.session.add(u39)
u40 = User(username="AnnetteRoulet",customername="La maison d'Asie",lastname=None,middlename=None,firstname="Annette",email="Lamaisond'Asie@example.com",address="1 rue Alsace-Lorraine",city="Toulouse",postalcode="31000",country="France")
u40.set_password("Annette")
db.session.add(u40)
u41 = User(username="YoshiTannamuri",customername="Laughing Bacchus Wine Cellars",lastname=None,middlename=None,firstname="Yoshi",email="LaughingBacchusWineCellars@example.com",address="1900 Oak St.",city="Vancouver",postalcode="V3F 2K1",country="Canada")
u41.set_password("Yoshi")
db.session.add(u41)
u42 = User(username="JohnSteel",customername="Lazy K Kountry Store",lastname=None,middlename=None,firstname="John",email="LazyKKountryStore@example.com",address="12 Orchestra Terrace",city="Walla Walla",postalcode="99362",country="USA")
u42.set_password("John")
db.session.add(u42)
u43 = User(username="RenateMessner",customername="Lehmanns Marktstand",lastname=None,middlename=None,firstname="Renate",email="LehmannsMarktstand@example.com",address="Magazinweg 7",city="Frankfurt a.M. ",postalcode="60528",country="Germany")
u43.set_password("Renate")
db.session.add(u43)
u44 = User(username="JaimeYorres",customername="Let's Stop N Shop",lastname=None,middlename=None,firstname="Jaime",email="Let'sStopNShop@example.com",address="87 Polk St. Suite 5",city="San Francisco",postalcode="94117",country="USA")
u44.set_password("Jaime")
db.session.add(u44)
u45 = User(username="CarlosGonzález",customername="LILA-Supermercado",lastname=None,middlename=None,firstname="Carlos",email="LILA-Supermercado@example.com",address="Carrera 52 con Ave. Bolívar #65-98 Llano Largo",city="Barquisimeto",postalcode="3508",country="Venezuela")
u45.set_password("Carlos")
db.session.add(u45)
u46 = User(username="FelipeIzquierdo",customername="LINO-Delicateses",lastname=None,middlename=None,firstname="Felipe",email="LINO-Delicateses@example.com",address="Ave. 5 de Mayo Porlamar",city="I. de Margarita",postalcode="4980",country="Venezuela")
u46.set_password("Felipe")
db.session.add(u46)
u47 = User(username="FranWilson",customername="Lonesome Pine Restaurant",lastname=None,middlename=None,firstname="Fran",email="LonesomePineRestaurant@example.com",address="89 Chiaroscuro Rd.",city="Portland",postalcode="97219",country="USA")
u47.set_password("Fran")
db.session.add(u47)
u48 = User(username="GiovanniRovelli",customername="Magazzini Alimentari Riuniti",lastname=None,middlename=None,firstname="Giovanni",email="MagazziniAlimentariRiuniti@example.com",address="Via Ludovico il Moro 22",city="Bergamo",postalcode="24100",country="Italy")
u48.set_password("Giovanni")
db.session.add(u48)
u49 = User(username="CatherineDewey",customername="Maison Dewey",lastname=None,middlename=None,firstname="Catherine",email="MaisonDewey@example.com",address="Rue Joseph-Bens 532",city="Bruxelles",postalcode="B-1180",country="Belgium")
u49.set_password("Catherine")
db.session.add(u49)
u50 = User(username="JeanFresnière",customername="Mère Paillarde",lastname=None,middlename=None,firstname="Jean",email="MèrePaillarde@example.com",address="43 rue St. Laurent",city="Montréal",postalcode="H1J 1C3",country="Canada")
u50.set_password("Jean")
db.session.add(u50)
u51 = User(username="AlexanderFeuer",customername="Morgenstern Gesundkost",lastname=None,middlename=None,firstname="Alexander",email="MorgensternGesundkost@example.com",address="Heerstr. 22",city="Leipzig",postalcode="04179",country="Germany")
u51.set_password("Alexander")
db.session.add(u51)
u52 = User(username="SimonCrowther",customername="North/South",lastname=None,middlename=None,firstname="Simon",email="North/South@example.com",address="South House 300 Queensbridge",city="London",postalcode="SW7 1RZ",country="UK")
u52.set_password("Simon")
db.session.add(u52)
u53 = User(username="YvonneMoncada",customername="Océano Atlántico Ltda.",lastname=None,middlename=None,firstname="Yvonne",email="OcéanoAtlánticoLtda.@example.com",address="Ing. Gustavo Moncada 8585 Piso 20-A",city="Buenos Aires",postalcode="1010",country="Argentina")
u53.set_password("Yvonne")
db.session.add(u53)
u54 = User(username="RenePhillips",customername="Old World Delicatessen",lastname=None,middlename=None,firstname="Rene",email="OldWorldDelicatessen@example.com",address="2743 Bering St.",city="Anchorage",postalcode="99508",country="USA")
u54.set_password("Rene")
db.session.add(u54)
u55 = User(username="HenriettePfalzheim",customername="Ottilies Käseladen",lastname=None,middlename=None,firstname="Henriette",email="OttiliesKäseladen@example.com",address="Mehrheimerstr. 369",city="Köln",postalcode="50739",country="Germany")
u55.set_password("Henriette")
db.session.add(u55)
u56 = User(username="MarieBertrand",customername="Paris spécialités",lastname=None,middlename=None,firstname="Marie",email="Parisspécialités@example.com",address="265, boulevard Charonne",city="Paris",postalcode="75012",country="France")
u56.set_password("Marie")
db.session.add(u56)
u57 = User(username="GuillermoFernández",customername="Pericles Comidas clásicas",lastname=None,middlename=None,firstname="Guillermo",email="PericlesComidasclásicas@example.com",address="Calle Dr. Jorge Cash 321",city="México D.F.",postalcode="05033",country="Mexico")
u57.set_password("Guillermo")
db.session.add(u57)
u58 = User(username="GeorgPipps",customername="Piccolo und mehr",lastname=None,middlename=None,firstname="Georg",email="Piccoloundmehr@example.com",address="Geislweg 14",city="Salzburg",postalcode="5020",country="Austria")
u58.set_password("Georg")
db.session.add(u58)
u59 = User(username="IsabeldeCastro",customername="Princesa Isabel Vinhoss",lastname="Castro",middlename="de",firstname="Isabel",email="PrincesaIsabelVinhoss@example.com",address="Estrada da saúde n. 58",city="Lisboa",postalcode="1756",country="Portugal")
u59.set_password("Isabel")
db.session.add(u59)
u60 = User(username="BernardoBatista",customername="Que Delícia",lastname=None,middlename=None,firstname="Bernardo",email="QueDelícia@example.com",address="Rua da Panificadora, 12",city="Rio de Janeiro",postalcode="02389-673",country="Brazil")
u60.set_password("Bernardo")
db.session.add(u60)
u61 = User(username="LúciaCarvalho",customername="Queen Cozinha",lastname=None,middlename=None,firstname="Lúcia",email="QueenCozinha@example.com",address="Alameda dos Canàrios, 891",city="São Paulo",postalcode="05487-020",country="Brazil")
u61.set_password("Lúcia")
db.session.add(u61)
u62 = User(username="HorstKloss",customername="QUICK-Stop",lastname=None,middlename=None,firstname="Horst",email="QUICK-Stop@example.com",address="Taucherstraße 10",city="Cunewalde",postalcode="01307",country="Germany")
u62.set_password("Horst")
db.session.add(u62)
u63 = User(username="SergioGutiérrez",customername="Rancho grande",lastname=None,middlename=None,firstname="Sergio",email="Ranchogrande@example.com",address="Av. del Libertador 900",city="Buenos Aires",postalcode="1010",country="Argentina")
u63.set_password("Sergio")
db.session.add(u63)
u64 = User(username="PaulaWilson",customername="Rattlesnake Canyon Grocery",lastname=None,middlename=None,firstname="Paula",email="RattlesnakeCanyonGrocery@example.com",address="2817 Milton Dr.",city="Albuquerque",postalcode="87110",country="USA")
u64.set_password("Paula")
db.session.add(u64)
u65 = User(username="MaurizioMoroni",customername="Reggiani Caseifici",lastname=None,middlename=None,firstname="Maurizio",email="ReggianiCaseifici@example.com",address="Strada Provinciale 124",city="Reggio Emilia",postalcode="42100",country="Italy")
u65.set_password("Maurizio")
db.session.add(u65)
u66 = User(username="JaneteLimeira",customername="Ricardo Adocicados",lastname=None,middlename=None,firstname="Janete",email="RicardoAdocicados@example.com",address="Av. Copacabana, 267",city="Rio de Janeiro",postalcode="02389-890",country="Brazil")
u66.set_password("Janete")
db.session.add(u66)
u67 = User(username="MichaelHolz",customername="Richter Supermarkt",lastname=None,middlename=None,firstname="Michael",email="RichterSupermarkt@example.com",address="Grenzacherweg 237",city="Genève",postalcode="1203",country="Switzerland")
u67.set_password("Michael")
db.session.add(u67)
u68 = User(username="AlejandraCamino",customername="Romero y tomillo",lastname=None,middlename=None,firstname="Alejandra",email="Romeroytomillo@example.com",address="Gran Vía, 1",city="Madrid",postalcode="28001",country="Spain")
u68.set_password("Alejandra")
db.session.add(u68)
u69 = User(username="JonasBergulfsen",customername="Santé Gourmet",lastname=None,middlename=None,firstname="Jonas",email="SantéGourmet@example.com",address="Erling Skakkes gate 78",city="Stavern",postalcode="4110",country="Norway")
u69.set_password("Jonas")
db.session.add(u69)
u70 = User(username="JosePavarotti",customername="Save-a-lot Markets",lastname=None,middlename=None,firstname="Jose",email="Save-a-lotMarkets@example.com",address="187 Suffolk Ln.",city="Boise",postalcode="83720",country="USA")
u70.set_password("Jose")
db.session.add(u70)
u71 = User(username="HariKumar",customername="Seven Seas Imports",lastname=None,middlename=None,firstname="Hari",email="SevenSeasImports@example.com",address="90 Wadhurst Rd.",city="London",postalcode="OX15 4NB",country="UK")
u71.set_password("Hari")
db.session.add(u71)
u72 = User(username="JyttePetersen",customername="Simons bistro",lastname=None,middlename=None,firstname="Jytte",email="Simonsbistro@example.com",address="Vinbæltet 34",city="København",postalcode="1734",country="Denmark")
u72.set_password("Jytte")
db.session.add(u72)
u73 = User(username="DominiquePerrier",customername="Spécialités du monde",lastname=None,middlename=None,firstname="Dominique",email="Spécialitésdumonde@example.com",address="25, rue Lauriston",city="Paris",postalcode="75016",country="France")
u73.set_password("Dominique")
db.session.add(u73)
u74 = User(username="ArtBraunschweiger",customername="Split Rail Beer & Ale",lastname=None,middlename=None,firstname="Art",email="SplitRailBeer&Ale@example.com",address="P.O. Box 555",city="Lander",postalcode="82520",country="USA")
u74.set_password("Art")
db.session.add(u74)
u75 = User(username="PascaleCartrain",customername="Suprêmes délices",lastname=None,middlename=None,firstname="Pascale",email="Suprêmesdélices@example.com",address="Boulevard Tirou, 255",city="Charleroi",postalcode="B-6000",country="Belgium")
u75.set_password("Pascale")
db.session.add(u75)
u76 = User(username="LizNixon",customername="The Big Cheese",lastname=None,middlename=None,firstname="Liz",email="TheBigCheese@example.com",address="89 Jefferson Way Suite 2",city="Portland",postalcode="97201",country="USA")
u76.set_password("Liz")
db.session.add(u76)
u77 = User(username="LiuWong",customername="The Cracker Box",lastname=None,middlename=None,firstname="Liu",email="TheCrackerBox@example.com",address="55 Grizzly Peak Rd.",city="Butte",postalcode="59801",country="USA")
u77.set_password("Liu")
db.session.add(u77)
u78 = User(username="KarinJosephs",customername="Toms Spezialitäten",lastname=None,middlename=None,firstname="Karin",email="TomsSpezialitäten@example.com",address="Luisenstr. 48",city="Münster",postalcode="44087",country="Germany")
u78.set_password("Karin")
db.session.add(u78)
u79 = User(username="MiguelAngelPaolino",customername="Tortuga Restaurante",lastname="Paolino",middlename="Angel",firstname="Miguel",email="TortugaRestaurante@example.com",address="Avda. Azteca 123",city="México D.F.",postalcode="05033",country="Mexico")
u79.set_password("Miguel")
db.session.add(u79)
u80 = User(username="AnabelaDomingues",customername="Tradição Hipermercados",lastname=None,middlename=None,firstname="Anabela",email="TradiçãoHipermercados@example.com",address="Av. Inês de Castro, 414",city="São Paulo",postalcode="05634-030",country="Brazil")
u80.set_password("Anabela")
db.session.add(u80)
u81 = User(username="HelvetiusNagy",customername="Trail's Head Gourmet Provisioners",lastname=None,middlename=None,firstname="Helvetius",email="Trail'sHeadGourmetProvisioners@example.com",address="722 DaVinci Blvd.",city="Kirkland",postalcode="98034",country="USA")
u81.set_password("Helvetius")
db.session.add(u81)
u82 = User(username="PalleIbsen",customername="Vaffeljernet",lastname=None,middlename=None,firstname="Palle",email="Vaffeljernet@example.com",address="Smagsløget 45",city="Århus",postalcode="8200",country="Denmark")
u82.set_password("Palle")
db.session.add(u82)
u83 = User(username="MarySaveley",customername="Victuailles en stock",lastname=None,middlename=None,firstname="Mary",email="Victuaillesenstock@example.com",address="2, rue du Commerce",city="Lyon",postalcode="69004",country="France")
u83.set_password("Mary")
db.session.add(u83)
u84 = User(username="PaulHenriot",customername="Vins et alcools Chevalier",lastname=None,middlename=None,firstname="Paul",email="VinsetalcoolsChevalier@example.com",address="59 rue de l'Abbaye",city="Reims",postalcode="51100",country="France")
u84.set_password("Paul")
db.session.add(u84)
u85 = User(username="RitaMüller",customername="Die Wandernde Kuh",lastname=None,middlename=None,firstname="Rita",email="DieWanderndeKuh@example.com",address="Adenauerallee 900",city="Stuttgart",postalcode="70563",country="Germany")
u85.set_password("Rita")
db.session.add(u85)
u86 = User(username="PirkkoKoskitalo",customername="Wartian Herkku",lastname=None,middlename=None,firstname="Pirkko",email="WartianHerkku@example.com",address="Torikatu 38",city="Oulu",postalcode="90110",country="Finland")
u86.set_password("Pirkko")
db.session.add(u86)
u87 = User(username="PaulaParente",customername="Wellington Importadora",lastname=None,middlename=None,firstname="Paula",email="WellingtonImportadora@example.com",address="Rua do Mercado, 12",city="Resende",postalcode="08737-363",country="Brazil")
u87.set_password("Paula")
db.session.add(u87)
u88 = User(username="KarlJablonski",customername="White Clover Markets",lastname=None,middlename=None,firstname="Karl",email="WhiteCloverMarkets@example.com",address="305 - 14th Ave. S. Suite 3B",city="Seattle",postalcode="98128",country="USA")
u88.set_password("Karl")
db.session.add(u88)
u89 = User(username="MattiKarttunen",customername="Wilman Kala",lastname=None,middlename=None,firstname="Matti",email="WilmanKala@example.com",address="Keskuskatu 45",city="Helsinki",postalcode="21240",country="Finland")
u89.set_password("Matti")
db.session.add(u89)
u90 = User(username="Zbyszek",customername="Wolski",lastname=None,middlename=None,firstname="Zbyszek",email="Wolski@example.com",address="ul. Filtrowa 68",city="Walla",postalcode="01-012",country="Poland")
u90.set_password("Zbyszek")
db.session.add(u90)
db.session.commit()
su1 = Supplier(suppliername="Exotic Liquid",contactname="Charlotte Cooper",address="49 Gilbert St.",city="Londona",postalcode="EC1 4SD",country="UK",phone="(171) 555-2222")
db.session.add(su1)
su2 = Supplier(suppliername="New Orleans Cajun Delights",contactname="Shelley Burke",address="P.O. Box 78934",city="New Orleans",postalcode="70117",country="USA",phone="(100) 555-4822")
db.session.add(su2)
su3 = Supplier(suppliername="Grandma Kelly's Homestead",contactname="Regina Murphy",address="707 Oxford Rd.",city="Ann Arbor",postalcode="48104",country="USA",phone="(313) 555-5735")
db.session.add(su3)
su4 = Supplier(suppliername="Tokyo Traders",contactname="Yoshi Nagase",address="9-8 Sekimai Musashino-shi",city="Tokyo",postalcode="100",country="Japan",phone="(03) 3555-5011")
db.session.add(su4)
su5 = Supplier(suppliername="Cooperativa de Quesos 'Las Cabras'",contactname="Antonio del Valle Saavedra ",address="Calle del Rosal 4",city="Oviedo",postalcode="33007",country="Spain",phone="(98) 598 76 54")
db.session.add(su5)
su6 = Supplier(suppliername="Mayumi's",contactname="Mayumi Ohno",address="92 Setsuko Chuo-ku",city="Osaka",postalcode="545",country="Japan",phone="(06) 431-7877")
db.session.add(su6)
su7 = Supplier(suppliername="Pavlova, Ltd.",contactname="Ian Devling",address="74 Rose St. Moonie Ponds",city="Melbourne",postalcode="3058",country="Australia",phone="(03) 444-2343")
db.session.add(su7)
su8 = Supplier(suppliername="Specialty Biscuits, Ltd.",contactname="Peter Wilson",address="29 King's Way",city="Manchester",postalcode="M14 GSD",country="UK",phone="(161) 555-4448")
db.session.add(su8)
su9 = Supplier(suppliername="PB Knäckebröd AB",contactname="Lars Peterson",address="Kaloadagatan 13",city="Göteborg",postalcode="S-345 67",country="Sweden ",phone="031-987 65 43")
db.session.add(su9)
su10 = Supplier(suppliername="Refrescos Americanas LTDA",contactname="Carlos Diaz",address="Av. das Americanas 12.890",city="São Paulo",postalcode="5442",country="Brazil",phone="(11) 555 4640")
db.session.add(su10)
su11 = Supplier(suppliername="Heli Süßwaren GmbH & Co. KG",contactname="Petra Winkler",address="Tiergartenstraße 5",city="Berlin",postalcode="10785",country="Germany",phone="(010) 9984510")
db.session.add(su11)
su12 = Supplier(suppliername="Plutzer Lebensmittelgroßmärkte AG",contactname="Martin Bein",address="Bogenallee 51",city="Frankfurt",postalcode="60439",country="Germany",phone="(069) 992755")
db.session.add(su12)
su13 = Supplier(suppliername="Nord-Ost-Fisch Handelsgesellschaft mbH",contactname="Sven Petersen",address="Frahmredder 112a",city="Cuxhaven",postalcode="27478",country="Germany",phone="(04721) 8713")
db.session.add(su13)
su14 = Supplier(suppliername="Formaggi Fortini s.r.l.",contactname="Elio Rossi",address="Viale Dante, 75",city="Ravenna",postalcode="48100",country="Italy",phone="(0544) 60323")
db.session.add(su14)
su15 = Supplier(suppliername="Norske Meierier",contactname="Beate Vileid",address="Hatlevegen 5",city="Sandvika",postalcode="1320",country="Norway",phone="(0)2-953010")
db.session.add(su15)
su16 = Supplier(suppliername="Bigfoot Breweries",contactname="Cheryl Saylor",address="3400 - 8th Avenue Suite 210",city="Bend",postalcode="97101",country="USA",phone="(503) 555-9931")
db.session.add(su16)
su17 = Supplier(suppliername="Svensk Sjöföda AB",contactname="Michael Björn",address="Brovallavägen 231",city="Stockholm",postalcode="S-123 45",country="Sweden",phone="08-123 45 67")
db.session.add(su17)
su18 = Supplier(suppliername="Aux joyeux ecclésiastiques",contactname="Guylène Nodier",address="203, Rue des Francs-Bourgeois",city="Paris",postalcode="75004",country="France",phone="(1) 03.83.00.68")
db.session.add(su18)
su19 = Supplier(suppliername="New England Seafood Cannery",contactname="Robb Merchant",address="Order Processing Dept. 2100 Paul Revere Blvd.",city="Boston",postalcode="02134",country="USA",phone="(617) 555-3267")
db.session.add(su19)
su20 = Supplier(suppliername="Leka Trading",contactname="Chandra Leka",address="471 Serangoon Loop, Suite #402",city="Singapore",postalcode="0512",country="Singapore",phone="555-8787")
db.session.add(su20)
su21 = Supplier(suppliername="Lyngbysild",contactname="Niels Petersen",address="Lyngbysild Fiskebakken 10",city="Lyngby",postalcode="2800",country="Denmark",phone="43844108")
db.session.add(su21)
su22 = Supplier(suppliername="Zaanse Snoepfabriek",contactname="Dirk Luchte",address="Verkoop Rijnweg 22",city="Zaandam",postalcode="9999 ZZ",country="Netherlands",phone="(12345) 1212")
db.session.add(su22)
su23 = Supplier(suppliername="Karkki Oy",contactname="Anne Heikkonen",address="Valtakatu 12",city="Lappeenranta",postalcode="53120",country="Finland",phone="(953) 10956")
db.session.add(su23)
su24 = Supplier(suppliername="G'day, Mate",contactname="Wendy Mackenzie",address="170 Prince Edward Parade Hunter's Hill",city="Sydney",postalcode="2042",country="Australia",phone="(02) 555-5914")
db.session.add(su24)
su25 = Supplier(suppliername="Ma Maison",contactname="Jean-Guy Lauzon",address="2960 Rue St. Laurent",city="Montréal",postalcode="H1J 1C3",country="Canada",phone="(514) 555-9022")
db.session.add(su25)
su26 = Supplier(suppliername="Pasta Buttini s.r.l.",contactname="Giovanni Giudici",address="Via dei Gelsomini, 153",city="Salerno",postalcode="84100",country="Italy",phone="(089) 6547665")
db.session.add(su26)
su27 = Supplier(suppliername="Escargots Nouveaux",contactname="Marie Delamare",address="22, rue H. Voiron",city="Montceau",postalcode="71300",country="France",phone="85.57.00.07")
db.session.add(su27)
su28 = Supplier(suppliername="Gai pâturage",contactname="Eliane Noz",address="Bat. B 3, rue des Alpes",city="Annecy",postalcode="74000",country="France",phone="38.76.98.06")
db.session.add(su28)
su29 = Supplier(suppliername="Forêts d'érables",contactname="Chantal Goulet",address="148 rue Chasseur",city="Ste-Hyacinthe",postalcode="J2S 7S8",country="Canada",phone="(514) 555-2955")
db.session.add(su29)
db.session.commit()
e0 = Employee(lastname="Davolio",firstname="Nancy",notes="Education includes a BA in psychology from Colorado State University. She also completed (The Art of the Cold Call). Nancy is a member of 'Toastmasters International'.")
db.session.add(e0)
e1 = Employee(lastname="Fuller",firstname="Andrew",notes="Andrew received his BTS commercial and a Ph.D. in international marketing from the University of Dallas. He is fluent in French and Italian and reads German. He joined the company as a sales representative, was promoted to sales manager and was then named vice president of sales. Andrew is a member of the Sales Management Roundtable, the Seattle Chamber of Commerce, and the Pacific Rim Importers Association.")
db.session.add(e1)
e2 = Employee(lastname="Leverling",firstname="Janet",notes="Janet has a BS degree in chemistry from Boston College). She has also completed a certificate program in food retailing management. Janet was hired as a sales associate and was promoted to sales representative.")
db.session.add(e2)
e3 = Employee(lastname="Peacock",firstname="Margaret",notes="Margaret holds a BA in English literature from Concordia College and an MA from the American Institute of Culinary Arts. She was temporarily assigned to the London office before returning to her permanent post in Seattle.")
db.session.add(e3)
e4 = Employee(lastname="Buchanan",firstname="Steven",notes="Steven Buchanan graduated from St. Andrews University, Scotland, with a BSC degree. Upon joining the company as a sales representative, he spent 6 months in an orientation program at the Seattle office and then returned to his permanent post in London, where he was promoted to sales manager. Mr. Buchanan has completed the courses 'Successful Telemarketing' and 'International Sales Management'. He is fluent in French.")
db.session.add(e4)
e5 = Employee(lastname="Suyama",firstname="Michael",notes="Michael is a graduate of Sussex University (MA, economics) and the University of California at Los Angeles (MBA, marketing). He has also taken the courses 'Multi-Cultural Selling' and 'Time Management for the Sales Professional'. He is fluent in Japanese and can read and write French, Portuguese, and Spanish.")
db.session.add(e5)
e6 = Employee(lastname="King",firstname="Robert",notes="Robert King served in the Peace Corps and traveled extensively before completing his degree in English at the University of Michigan and then joining the company. After completing a course entitled 'Selling in Europe', he was transferred to the London office.")
db.session.add(e6)
e7 = Employee(lastname="Callahan",firstname="Laura",notes="Laura received a BA in psychology from the University of Washington. She has also completed a course in business French. She reads and writes French.")
db.session.add(e7)
e8 = Employee(lastname="Dodsworth",firstname="Anne",notes="Anne has a BA degree in English from St. Lawrence College. She is fluent in French and German.")
db.session.add(e8)
e9 = Employee(lastname="West",firstname="Adam",notes="An old chum.")
db.session.add(e9)
db.session.commit()
c0 = Category(categoryname="Beverages",description="Soft drinks, coffees, teas, beers, and ales")
db.session.add(c0)
c1 = Category(categoryname="Condiments",description="Sweet and savory sauces, relishes, spreads, and seasonings")
db.session.add(c1)
c2 = Category(categoryname="Confections",description="Desserts, candies, and sweet breads")
db.session.add(c2)
c3 = Category(categoryname="Dairy Products",description="Cheeses")
db.session.add(c3)
c4 = Category(categoryname="Grains/Cereals",description="Breads, crackers, pasta, and cereal")
db.session.add(c4)
c5 = Category(categoryname="Meat/Poultry",description="Prepared meats")
db.session.add(c5)
c6 = Category(categoryname="Produce",description="Dried fruit and bean curd")
db.session.add(c6)
c7 = Category(categoryname="Seafood",description="Seaweed and fish")
db.session.add(c7)
db.session.commit()
s1 = Shipper(shippername="Speedy Express",phone="(503) 555-9831")
db.session.add(s1)
s2 = Shipper(shippername="United Package",phone="(503) 555-3199")
db.session.add(s2)
s3 = Shipper(shippername="Federal Shipping",phone="(503) 555-9931")
db.session.add(s3)
db.session.commit()
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,136
|
grbarker/Freyja
|
refs/heads/master
|
/db_populator_dummy_posts.py
|
from app import create_app
app = create_app()
app.app_context().push()
from app import db
from app.models import *
import random
users = User.query.all()
dummy_posts = [
'hello', 'hi there', 'yarp', 'how are you', "what's up people!",
'what a test', 'haha', 'yes', 'no', 'I want some beer', 'Hello everyone!',
'How is everyone doing today?', "I'm doing good", "I have a question",
"Man do I love this website!", "I love makeup!", "I love beauty products",
"Can anyone recommend an eye liner?", "Good morning",
"Found some foundation that works well with my skin tone",
"Artus makes the best lip gloss", "one", "two", "three", "fouir", "five",
"six", "seven", "eight", "nine", "ten", "Party like it's 1999!", "Why?",
"How?", "I can't wait for the next sale.", "thanks", "Thanks"
]
for user in users:
posts =[]
while len(posts) < 7:
random_post = random.choice(dummy_posts)
if random_post not in posts:
posts.append(random_post)
post = Post(body = random_post, author=user)
db.session.add(post)
db.session.commit()
print('!!!!!!!!!!___DUMMY_____POSTS_____SUCCESSFULLY_____ADDED___!!!!!!!!!!')
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,137
|
grbarker/Freyja
|
refs/heads/master
|
/db_populator_products.py
|
from app import db
from app.models import Category, Supplier, Product
supplier = Supplier.query.get(int(1))
category = Category.query.get(int(1))
p1 = Product(productname="Chais", supplier=supplier, category=category, unit="10 boxes x 20 bags", price=18)
db.session.add(p1)
supplier = Supplier.query.get(int(1))
category = Category.query.get(int(1))
p2 = Product(productname="Chang", supplier=supplier, category=category, unit="24 - 12 oz bottles", price=19)
db.session.add(p2)
supplier = Supplier.query.get(int(1))
category = Category.query.get(int(2))
p3 = Product(productname="Aniseed Syrup", supplier=supplier, category=category, unit="12 - 55S0 ml bottles", price=10)
db.session.add(p3)
supplier = Supplier.query.get(int(2))
category = Category.query.get(int(2))
p4 = Product(productname="Chef Anton's Cajun Seasoning", supplier=supplier, category=category, unit="48 - 6 oz jars", price=22)
db.session.add(p4)
supplier = Supplier.query.get(int(2))
category = Category.query.get(int(2))
p5 = Product(productname="Chef Anton's Gumbo Mix", supplier=supplier, category=category, unit="36 boxes", price=21.35)
db.session.add(p5)
supplier = Supplier.query.get(int(3))
category = Category.query.get(int(2))
p6 = Product(productname="Grandma's Boysenberry Spread", supplier=supplier, category=category, unit="12 - 8 oz jars", price=25)
db.session.add(p6)
supplier = Supplier.query.get(int(3))
category = Category.query.get(int(7))
p7 = Product(productname="Uncle Bob's Organic Dried Pears", supplier=supplier, category=category, unit="12 - 1 lb pkgs.", price=30)
db.session.add(p7)
supplier = Supplier.query.get(int(3))
category = Category.query.get(int(2))
p8 = Product(productname="Northwoods Cranberry Sauce", supplier=supplier, category=category, unit="12 - 12 oz jars", price=40)
db.session.add(p8)
supplier = Supplier.query.get(int(4))
category = Category.query.get(int(6))
p9 = Product(productname="Mishi Kobe Niku", supplier=supplier, category=category, unit="18 - 500 g pkgs.", price=97)
db.session.add(p9)
supplier = Supplier.query.get(int(4))
category = Category.query.get(int(8))
p10 = Product(productname="Ikura", supplier=supplier, category=category, unit="12 - 200 ml jars", price=31)
db.session.add(p10)
supplier = Supplier.query.get(int(5))
category = Category.query.get(int(4))
p11 = Product(productname="Queso Cabrales", supplier=supplier, category=category, unit="1 kg pkg.", price=21)
db.session.add(p11)
supplier = Supplier.query.get(int(5))
category = Category.query.get(int(4))
p12 = Product(productname="Queso Manchego La Pastora", supplier=supplier, category=category, unit="10 - 500 g pkgs.", price=38)
db.session.add(p12)
supplier = Supplier.query.get(int(6))
category = Category.query.get(int(8))
p13 = Product(productname="Konbu", supplier=supplier, category=category, unit="2 kg box", price=6)
db.session.add(p13)
supplier = Supplier.query.get(int(6))
category = Category.query.get(int(7))
p14 = Product(productname="Tofu", supplier=supplier, category=category, unit="40 - 100 g pkgs.", price=23.25)
db.session.add(p14)
supplier = Supplier.query.get(int(6))
category = Category.query.get(int(2))
p15 = Product(productname="Genen Shouyu", supplier=supplier, category=category, unit="24 - 250 ml bottles", price=15.5)
db.session.add(p15)
supplier = Supplier.query.get(int(7))
category = Category.query.get(int(3))
p16 = Product(productname="Pavlova", supplier=supplier, category=category, unit="32 - 500 g boxes", price=17.45)
db.session.add(p16)
supplier = Supplier.query.get(int(7))
category = Category.query.get(int(6))
p17 = Product(productname="Alice Mutton", supplier=supplier, category=category, unit="20 - 1 kg tins", price=39)
db.session.add(p17)
supplier = Supplier.query.get(int(7))
category = Category.query.get(int(8))
p18 = Product(productname="Carnarvon Tigers", supplier=supplier, category=category, unit="16 kg pkg.", price=62.5)
db.session.add(p18)
supplier = Supplier.query.get(int(8))
category = Category.query.get(int(3))
p19 = Product(productname="Teatime Chocolate Biscuits", supplier=supplier, category=category, unit="10 boxes x 12 pieces", price=9.2)
db.session.add(p19)
supplier = Supplier.query.get(int(8))
category = Category.query.get(int(3))
p20 = Product(productname="Sir Rodney's Marmalade", supplier=supplier, category=category, unit="30 gift boxes", price=81)
db.session.add(p20)
supplier = Supplier.query.get(int(8))
category = Category.query.get(int(3))
p21 = Product(productname="Sir Rodney's Scones", supplier=supplier, category=category, unit="24 pkgs. x 4 pieces", price=10)
db.session.add(p21)
supplier = Supplier.query.get(int(9))
category = Category.query.get(int(5))
p22 = Product(productname="Gustaf's Knäckebröd", supplier=supplier, category=category, unit="24 - 500 g pkgs.", price=21)
db.session.add(p22)
supplier = Supplier.query.get(int(9))
category = Category.query.get(int(5))
p23 = Product(productname="Tunnbröd", supplier=supplier, category=category, unit="12 - 250 g pkgs.", price=9)
db.session.add(p23)
supplier = Supplier.query.get(int(10))
category = Category.query.get(int(1))
p24 = Product(productname="Guaraná Fantástica", supplier=supplier, category=category, unit="12 - 355 ml cans", price=4.5)
db.session.add(p24)
supplier = Supplier.query.get(int(11))
category = Category.query.get(int(3))
p25 = Product(productname="NuNuCa Nuß-Nougat-Creme", supplier=supplier, category=category, unit="20 - 450 g glasses", price=14)
db.session.add(p25)
supplier = Supplier.query.get(int(11))
category = Category.query.get(int(3))
p26 = Product(productname="Gumbär Gummibärchen", supplier=supplier, category=category, unit="100 - 250 g bags", price=31.23)
db.session.add(p26)
supplier = Supplier.query.get(int(11))
category = Category.query.get(int(3))
p27 = Product(productname="Schoggi Schokolade", supplier=supplier, category=category, unit="100 - 100 g pieces", price=43.9)
db.session.add(p27)
supplier = Supplier.query.get(int(12))
category = Category.query.get(int(7))
p28 = Product(productname="Rössle Sauerkraut", supplier=supplier, category=category, unit="25 - 825 g cans", price=45.6)
db.session.add(p28)
supplier = Supplier.query.get(int(12))
category = Category.query.get(int(6))
p29 = Product(productname="Thüringer Rostbratwurst", supplier=supplier, category=category, unit="50 bags x 30 sausgs.", price=123.79)
db.session.add(p29)
supplier = Supplier.query.get(int(13))
category = Category.query.get(int(8))
p30 = Product(productname="Nord-Ost Matjeshering", supplier=supplier, category=category, unit="10 - 200 g glasses", price=25.89)
db.session.add(p30)
supplier = Supplier.query.get(int(14))
category = Category.query.get(int(4))
p31 = Product(productname="Gorgonzola Telino", supplier=supplier, category=category, unit="12 - 100 g pkgs", price=12.5)
db.session.add(p31)
supplier = Supplier.query.get(int(14))
category = Category.query.get(int(4))
p32 = Product(productname="Mascarpone Fabioli", supplier=supplier, category=category, unit="24 - 200 g pkgs.", price=32)
db.session.add(p32)
supplier = Supplier.query.get(int(15))
category = Category.query.get(int(4))
p33 = Product(productname="Geitost", supplier=supplier, category=category, unit="500 g", price=2.5)
db.session.add(p33)
supplier = Supplier.query.get(int(16))
category = Category.query.get(int(1))
p34 = Product(productname="Sasquatch Ale", supplier=supplier, category=category, unit="24 - 12 oz bottles", price=14)
db.session.add(p34)
supplier = Supplier.query.get(int(16))
category = Category.query.get(int(1))
p35 = Product(productname="Steeleye Stout", supplier=supplier, category=category, unit="24 - 12 oz bottles", price=18)
db.session.add(p35)
supplier = Supplier.query.get(int(17))
category = Category.query.get(int(8))
p36 = Product(productname="Inlagd Sill", supplier=supplier, category=category, unit="24 - 250 g jars", price=19)
db.session.add(p36)
supplier = Supplier.query.get(int(17))
category = Category.query.get(int(8))
p37 = Product(productname="Gravad lax", supplier=supplier, category=category, unit="12 - 500 g pkgs.", price=26)
db.session.add(p37)
supplier = Supplier.query.get(int(18))
category = Category.query.get(int(1))
p38 = Product(productname="Côte de Blaye", supplier=supplier, category=category, unit="12 - 75 cl bottles", price=263.5)
db.session.add(p38)
supplier = Supplier.query.get(int(18))
category = Category.query.get(int(1))
p39 = Product(productname="Chartreuse verte", supplier=supplier, category=category, unit="750 cc per bottle", price=18)
db.session.add(p39)
supplier = Supplier.query.get(int(19))
category = Category.query.get(int(8))
p40 = Product(productname="Boston Crab Meat", supplier=supplier, category=category, unit="24 - 4 oz tins", price=18.4)
db.session.add(p40)
supplier = Supplier.query.get(int(19))
category = Category.query.get(int(8))
p41 = Product(productname="Jack's New England Clam Chowder", supplier=supplier, category=category, unit="12 - 12 oz cans", price=9.65)
db.session.add(p41)
supplier = Supplier.query.get(int(20))
category = Category.query.get(int(5))
p42 = Product(productname="Singaporean Hokkien Fried Mee", supplier=supplier, category=category, unit="32 - 1 kg pkgs.", price=14)
db.session.add(p42)
supplier = Supplier.query.get(int(20))
category = Category.query.get(int(1))
p43 = Product(productname="Ipoh Coffee", supplier=supplier, category=category, unit="16 - 500 g tins", price=46)
db.session.add(p43)
supplier = Supplier.query.get(int(20))
category = Category.query.get(int(2))
p44 = Product(productname="Gula Malacca", supplier=supplier, category=category, unit="20 - 2 kg bags", price=19.45)
db.session.add(p44)
supplier = Supplier.query.get(int(21))
category = Category.query.get(int(8))
p45 = Product(productname="Røgede sild", supplier=supplier, category=category, unit="1k pkg.", price=9.5)
db.session.add(p45)
supplier = Supplier.query.get(int(21))
category = Category.query.get(int(8))
p46 = Product(productname="Spegesild", supplier=supplier, category=category, unit="4 - 450 g glasses", price=12)
db.session.add(p46)
supplier = Supplier.query.get(int(22))
category = Category.query.get(int(3))
p47 = Product(productname="Zaanse koeken", supplier=supplier, category=category, unit="10 - 4 oz boxes", price=9.5)
db.session.add(p47)
supplier = Supplier.query.get(int(22))
category = Category.query.get(int(3))
p48 = Product(productname="Chocolade", supplier=supplier, category=category, unit="10 pkgs.", price=12.75)
db.session.add(p48)
supplier = Supplier.query.get(int(23))
category = Category.query.get(int(3))
p49 = Product(productname="Maxilaku", supplier=supplier, category=category, unit="24 - 50 g pkgs.", price=20)
db.session.add(p49)
supplier = Supplier.query.get(int(23))
category = Category.query.get(int(3))
p50 = Product(productname="Valkoinen suklaa", supplier=supplier, category=category, unit="12 - 100 g bars", price=16.25)
db.session.add(p50)
supplier = Supplier.query.get(int(24))
category = Category.query.get(int(7))
p51 = Product(productname="Manjimup Dried Apples", supplier=supplier, category=category, unit="50 - 300 g pkgs.", price=53)
db.session.add(p51)
supplier = Supplier.query.get(int(24))
category = Category.query.get(int(5))
p52 = Product(productname="Filo Mix", supplier=supplier, category=category, unit="16 - 2 kg boxes", price=7)
db.session.add(p52)
supplier = Supplier.query.get(int(24))
category = Category.query.get(int(6))
p53 = Product(productname="Perth Pasties", supplier=supplier, category=category, unit="48 pieces", price=32.8)
db.session.add(p53)
supplier = Supplier.query.get(int(25))
category = Category.query.get(int(6))
p54 = Product(productname="Tourtière", supplier=supplier, category=category, unit="16 pies", price=7.45)
db.session.add(p54)
supplier = Supplier.query.get(int(25))
category = Category.query.get(int(6))
p55 = Product(productname="Pâté chinois", supplier=supplier, category=category, unit="24 boxes x 2 pies", price=24)
db.session.add(p55)
supplier = Supplier.query.get(int(26))
category = Category.query.get(int(5))
p56 = Product(productname="Gnocchi di nonna Alice", supplier=supplier, category=category, unit="24 - 250 g pkgs.", price=38)
db.session.add(p56)
supplier = Supplier.query.get(int(26))
category = Category.query.get(int(5))
p57 = Product(productname="Ravioli Angelo", supplier=supplier, category=category, unit="24 - 250 g pkgs.", price=19.5)
db.session.add(p57)
supplier = Supplier.query.get(int(27))
category = Category.query.get(int(8))
p58 = Product(productname="Escargots de Bourgogne", supplier=supplier, category=category, unit="24 pieces", price=13.25)
db.session.add(p58)
supplier = Supplier.query.get(int(28))
category = Category.query.get(int(4))
p59 = Product(productname="Raclette Courdavault", supplier=supplier, category=category, unit="5 kg pkg.", price=55)
db.session.add(p59)
supplier = Supplier.query.get(int(28))
category = Category.query.get(int(4))
p60 = Product(productname="Camembert Pierrot", supplier=supplier, category=category, unit="15 - 300 g rounds", price=34)
db.session.add(p60)
supplier = Supplier.query.get(int(29))
category = Category.query.get(int(2))
p61 = Product(productname="Sirop d'érable", supplier=supplier, category=category, unit="24 - 500 ml bottles", price=28.5)
db.session.add(p61)
supplier = Supplier.query.get(int(29))
category = Category.query.get(int(3))
p62 = Product(productname="Tarte au sucre", supplier=supplier, category=category, unit="48 pies", price=49.3)
db.session.add(p62)
supplier = Supplier.query.get(int(7))
category = Category.query.get(int(2))
p63 = Product(productname="Vegie-spread", supplier=supplier, category=category, unit="15 - 625 g jars", price=43.9)
db.session.add(p63)
supplier = Supplier.query.get(int(12))
category = Category.query.get(int(5))
p64 = Product(productname="Wimmers gute Semmelknödel", supplier=supplier, category=category, unit="20 bags x 4 pieces", price=33.25)
db.session.add(p64)
supplier = Supplier.query.get(int(2))
category = Category.query.get(int(2))
p65 = Product(productname="Louisiana Fiery Hot Pepper Sauce", supplier=supplier, category=category, unit="32 - 8 oz bottles", price=21.05)
db.session.add(p65)
supplier = Supplier.query.get(int(2))
category = Category.query.get(int(2))
p66 = Product(productname="Louisiana Hot Spiced Okra", supplier=supplier, category=category, unit="24 - 8 oz jars", price=17)
db.session.add(p66)
supplier = Supplier.query.get(int(16))
category = Category.query.get(int(1))
p67 = Product(productname="Laughing Lumberjack Lager", supplier=supplier, category=category, unit="24 - 12 oz bottles", price=14)
db.session.add(p67)
supplier = Supplier.query.get(int(8))
category = Category.query.get(int(3))
p68 = Product(productname="Scottish Longbreads", supplier=supplier, category=category, unit="10 boxes x 8 pieces", price=12.5)
db.session.add(p68)
supplier = Supplier.query.get(int(15))
category = Category.query.get(int(4))
p69 = Product(productname="Gudbrandsdalsost", supplier=supplier, category=category, unit="10 kg pkg.", price=36)
db.session.add(p69)
supplier = Supplier.query.get(int(7))
category = Category.query.get(int(1))
p70 = Product(productname="Outback Lager", supplier=supplier, category=category, unit="24 - 355 ml bottles", price=15)
db.session.add(p70)
supplier = Supplier.query.get(int(15))
category = Category.query.get(int(4))
p71 = Product(productname="Fløtemysost", supplier=supplier, category=category, unit="10 - 500 g pkgs.", price=21.5)
db.session.add(p71)
supplier = Supplier.query.get(int(14))
category = Category.query.get(int(4))
p72 = Product(productname="Mozzarella di Giovanni", supplier=supplier, category=category, unit="24 - 200 g pkgs.", price=34.8)
db.session.add(p72)
supplier = Supplier.query.get(int(17))
category = Category.query.get(int(8))
p73 = Product(productname="Röd Kaviar", supplier=supplier, category=category, unit="24 - 150 g jars", price=15)
db.session.add(p73)
supplier = Supplier.query.get(int(4))
category = Category.query.get(int(7))
p74 = Product(productname="Longlife Tofu", supplier=supplier, category=category, unit="5 kg pkg.", price=10)
db.session.add(p74)
supplier = Supplier.query.get(int(12))
category = Category.query.get(int(1))
p75 = Product(productname="Rhönbräu Klosterbier", supplier=supplier, category=category, unit="24 - 0.5 l bottles", price=7.75)
db.session.add(p75)
supplier = Supplier.query.get(int(23))
category = Category.query.get(int(1))
p76 = Product(productname="Lakkalikööri", supplier=supplier, category=category, unit="500 ml ", price=18)
db.session.add(p76)
supplier = Supplier.query.get(int(12))
category = Category.query.get(int(2))
p77 = Product(productname="Original Frankfurter grüne Soße", supplier=supplier, category=category, unit="12 boxes", price=13)
db.session.add(p77)
db.session.commit()
print('Dummy products successfully added to the database!')
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,138
|
grbarker/Freyja
|
refs/heads/master
|
/db_populator_orderdetails.py
|
from app import db
from app.models import Order, Product, OrderDetail
order = Order.query.get(int(10248))
product = Product.query.get(int(11))
od0 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od0)
order = Order.query.get(int(10248))
product = Product.query.get(int(42))
od1 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od1)
order = Order.query.get(int(10248))
product = Product.query.get(int(72))
od2 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od2)
order = Order.query.get(int(10249))
product = Product.query.get(int(14))
od3 = OrderDetail(order=order, product=product, quantity=9)
db.session.add(od3)
order = Order.query.get(int(10249))
product = Product.query.get(int(51))
od4 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od4)
order = Order.query.get(int(10250))
product = Product.query.get(int(41))
od5 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od5)
order = Order.query.get(int(10250))
product = Product.query.get(int(51))
od6 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od6)
order = Order.query.get(int(10250))
product = Product.query.get(int(65))
od7 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od7)
order = Order.query.get(int(10251))
product = Product.query.get(int(22))
od8 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od8)
order = Order.query.get(int(10251))
product = Product.query.get(int(57))
od9 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od9)
order = Order.query.get(int(10251))
product = Product.query.get(int(65))
od10 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od10)
order = Order.query.get(int(10252))
product = Product.query.get(int(20))
od11 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od11)
order = Order.query.get(int(10252))
product = Product.query.get(int(33))
od12 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od12)
order = Order.query.get(int(10252))
product = Product.query.get(int(60))
od13 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od13)
order = Order.query.get(int(10253))
product = Product.query.get(int(31))
od14 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od14)
order = Order.query.get(int(10253))
product = Product.query.get(int(39))
od15 = OrderDetail(order=order, product=product, quantity=42)
db.session.add(od15)
order = Order.query.get(int(10253))
product = Product.query.get(int(49))
od16 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od16)
order = Order.query.get(int(10254))
product = Product.query.get(int(24))
od17 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od17)
order = Order.query.get(int(10254))
product = Product.query.get(int(55))
od18 = OrderDetail(order=order, product=product, quantity=21)
db.session.add(od18)
order = Order.query.get(int(10254))
product = Product.query.get(int(74))
od19 = OrderDetail(order=order, product=product, quantity=21)
db.session.add(od19)
order = Order.query.get(int(10255))
product = Product.query.get(int(2))
od20 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od20)
order = Order.query.get(int(10255))
product = Product.query.get(int(16))
od21 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od21)
order = Order.query.get(int(10255))
product = Product.query.get(int(36))
od22 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od22)
order = Order.query.get(int(10255))
product = Product.query.get(int(59))
od23 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od23)
order = Order.query.get(int(10256))
product = Product.query.get(int(53))
od24 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od24)
order = Order.query.get(int(10256))
product = Product.query.get(int(77))
od25 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od25)
order = Order.query.get(int(10257))
product = Product.query.get(int(27))
od26 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od26)
order = Order.query.get(int(10257))
product = Product.query.get(int(39))
od27 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od27)
order = Order.query.get(int(10257))
product = Product.query.get(int(77))
od28 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od28)
order = Order.query.get(int(10258))
product = Product.query.get(int(2))
od29 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od29)
order = Order.query.get(int(10258))
product = Product.query.get(int(5))
od30 = OrderDetail(order=order, product=product, quantity=65)
db.session.add(od30)
order = Order.query.get(int(10258))
product = Product.query.get(int(32))
od31 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od31)
order = Order.query.get(int(10259))
product = Product.query.get(int(21))
od32 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od32)
order = Order.query.get(int(10259))
product = Product.query.get(int(37))
od33 = OrderDetail(order=order, product=product, quantity=1)
db.session.add(od33)
order = Order.query.get(int(10260))
product = Product.query.get(int(41))
od34 = OrderDetail(order=order, product=product, quantity=16)
db.session.add(od34)
order = Order.query.get(int(10260))
product = Product.query.get(int(57))
od35 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od35)
order = Order.query.get(int(10260))
product = Product.query.get(int(62))
od36 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od36)
order = Order.query.get(int(10260))
product = Product.query.get(int(70))
od37 = OrderDetail(order=order, product=product, quantity=21)
db.session.add(od37)
order = Order.query.get(int(10261))
product = Product.query.get(int(21))
od38 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od38)
order = Order.query.get(int(10261))
product = Product.query.get(int(35))
od39 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od39)
order = Order.query.get(int(10262))
product = Product.query.get(int(5))
od40 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od40)
order = Order.query.get(int(10262))
product = Product.query.get(int(7))
od41 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od41)
order = Order.query.get(int(10262))
product = Product.query.get(int(56))
od42 = OrderDetail(order=order, product=product, quantity=2)
db.session.add(od42)
order = Order.query.get(int(10263))
product = Product.query.get(int(16))
od43 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od43)
order = Order.query.get(int(10263))
product = Product.query.get(int(24))
od44 = OrderDetail(order=order, product=product, quantity=28)
db.session.add(od44)
order = Order.query.get(int(10263))
product = Product.query.get(int(30))
od45 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od45)
order = Order.query.get(int(10263))
product = Product.query.get(int(74))
od46 = OrderDetail(order=order, product=product, quantity=36)
db.session.add(od46)
order = Order.query.get(int(10264))
product = Product.query.get(int(2))
od47 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od47)
order = Order.query.get(int(10264))
product = Product.query.get(int(41))
od48 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od48)
order = Order.query.get(int(10265))
product = Product.query.get(int(17))
od49 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od49)
order = Order.query.get(int(10265))
product = Product.query.get(int(70))
od50 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od50)
order = Order.query.get(int(10266))
product = Product.query.get(int(12))
od51 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od51)
order = Order.query.get(int(10267))
product = Product.query.get(int(40))
od52 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od52)
order = Order.query.get(int(10267))
product = Product.query.get(int(59))
od53 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od53)
order = Order.query.get(int(10267))
product = Product.query.get(int(76))
od54 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od54)
order = Order.query.get(int(10268))
product = Product.query.get(int(29))
od55 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od55)
order = Order.query.get(int(10268))
product = Product.query.get(int(72))
od56 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od56)
order = Order.query.get(int(10269))
product = Product.query.get(int(33))
od57 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od57)
order = Order.query.get(int(10269))
product = Product.query.get(int(72))
od58 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od58)
order = Order.query.get(int(10270))
product = Product.query.get(int(36))
od59 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od59)
order = Order.query.get(int(10270))
product = Product.query.get(int(43))
od60 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od60)
order = Order.query.get(int(10271))
product = Product.query.get(int(33))
od61 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od61)
order = Order.query.get(int(10272))
product = Product.query.get(int(20))
od62 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od62)
order = Order.query.get(int(10272))
product = Product.query.get(int(31))
od63 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od63)
order = Order.query.get(int(10272))
product = Product.query.get(int(72))
od64 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od64)
order = Order.query.get(int(10273))
product = Product.query.get(int(10))
od65 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od65)
order = Order.query.get(int(10273))
product = Product.query.get(int(31))
od66 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od66)
order = Order.query.get(int(10273))
product = Product.query.get(int(33))
od67 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od67)
order = Order.query.get(int(10273))
product = Product.query.get(int(40))
od68 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od68)
order = Order.query.get(int(10273))
product = Product.query.get(int(76))
od69 = OrderDetail(order=order, product=product, quantity=33)
db.session.add(od69)
order = Order.query.get(int(10274))
product = Product.query.get(int(71))
od70 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od70)
order = Order.query.get(int(10274))
product = Product.query.get(int(72))
od71 = OrderDetail(order=order, product=product, quantity=7)
db.session.add(od71)
order = Order.query.get(int(10275))
product = Product.query.get(int(24))
od72 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od72)
order = Order.query.get(int(10275))
product = Product.query.get(int(59))
od73 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od73)
order = Order.query.get(int(10276))
product = Product.query.get(int(10))
od74 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od74)
order = Order.query.get(int(10276))
product = Product.query.get(int(13))
od75 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od75)
order = Order.query.get(int(10277))
product = Product.query.get(int(28))
od76 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od76)
order = Order.query.get(int(10277))
product = Product.query.get(int(62))
od77 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od77)
order = Order.query.get(int(10278))
product = Product.query.get(int(44))
od78 = OrderDetail(order=order, product=product, quantity=16)
db.session.add(od78)
order = Order.query.get(int(10278))
product = Product.query.get(int(59))
od79 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od79)
order = Order.query.get(int(10278))
product = Product.query.get(int(63))
od80 = OrderDetail(order=order, product=product, quantity=8)
db.session.add(od80)
order = Order.query.get(int(10278))
product = Product.query.get(int(73))
od81 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od81)
order = Order.query.get(int(10279))
product = Product.query.get(int(17))
od82 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od82)
order = Order.query.get(int(10280))
product = Product.query.get(int(24))
od83 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od83)
order = Order.query.get(int(10280))
product = Product.query.get(int(55))
od84 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od84)
order = Order.query.get(int(10280))
product = Product.query.get(int(75))
od85 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od85)
order = Order.query.get(int(10281))
product = Product.query.get(int(19))
od86 = OrderDetail(order=order, product=product, quantity=1)
db.session.add(od86)
order = Order.query.get(int(10281))
product = Product.query.get(int(24))
od87 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od87)
order = Order.query.get(int(10281))
product = Product.query.get(int(35))
od88 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od88)
order = Order.query.get(int(10282))
product = Product.query.get(int(30))
od89 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od89)
order = Order.query.get(int(10282))
product = Product.query.get(int(57))
od90 = OrderDetail(order=order, product=product, quantity=2)
db.session.add(od90)
order = Order.query.get(int(10283))
product = Product.query.get(int(15))
od91 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od91)
order = Order.query.get(int(10283))
product = Product.query.get(int(19))
od92 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od92)
order = Order.query.get(int(10283))
product = Product.query.get(int(60))
od93 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od93)
order = Order.query.get(int(10283))
product = Product.query.get(int(72))
od94 = OrderDetail(order=order, product=product, quantity=3)
db.session.add(od94)
order = Order.query.get(int(10284))
product = Product.query.get(int(27))
od95 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od95)
order = Order.query.get(int(10284))
product = Product.query.get(int(44))
od96 = OrderDetail(order=order, product=product, quantity=21)
db.session.add(od96)
order = Order.query.get(int(10284))
product = Product.query.get(int(60))
od97 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od97)
order = Order.query.get(int(10284))
product = Product.query.get(int(67))
od98 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od98)
order = Order.query.get(int(10285))
product = Product.query.get(int(1))
od99 = OrderDetail(order=order, product=product, quantity=45)
db.session.add(od99)
order = Order.query.get(int(10285))
product = Product.query.get(int(40))
od100 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od100)
order = Order.query.get(int(10285))
product = Product.query.get(int(53))
od101 = OrderDetail(order=order, product=product, quantity=36)
db.session.add(od101)
order = Order.query.get(int(10286))
product = Product.query.get(int(35))
od102 = OrderDetail(order=order, product=product, quantity=100)
db.session.add(od102)
order = Order.query.get(int(10286))
product = Product.query.get(int(62))
od103 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od103)
order = Order.query.get(int(10287))
product = Product.query.get(int(16))
od104 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od104)
order = Order.query.get(int(10287))
product = Product.query.get(int(34))
od105 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od105)
order = Order.query.get(int(10287))
product = Product.query.get(int(46))
od106 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od106)
order = Order.query.get(int(10288))
product = Product.query.get(int(54))
od107 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od107)
order = Order.query.get(int(10288))
product = Product.query.get(int(68))
od108 = OrderDetail(order=order, product=product, quantity=3)
db.session.add(od108)
order = Order.query.get(int(10289))
product = Product.query.get(int(3))
od109 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od109)
order = Order.query.get(int(10289))
product = Product.query.get(int(64))
od110 = OrderDetail(order=order, product=product, quantity=9)
db.session.add(od110)
order = Order.query.get(int(10290))
product = Product.query.get(int(5))
od111 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od111)
order = Order.query.get(int(10290))
product = Product.query.get(int(29))
od112 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od112)
order = Order.query.get(int(10290))
product = Product.query.get(int(49))
od113 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od113)
order = Order.query.get(int(10290))
product = Product.query.get(int(77))
od114 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od114)
order = Order.query.get(int(10291))
product = Product.query.get(int(13))
od115 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od115)
order = Order.query.get(int(10291))
product = Product.query.get(int(44))
od116 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od116)
order = Order.query.get(int(10291))
product = Product.query.get(int(51))
od117 = OrderDetail(order=order, product=product, quantity=2)
db.session.add(od117)
order = Order.query.get(int(10292))
product = Product.query.get(int(20))
od118 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od118)
order = Order.query.get(int(10293))
product = Product.query.get(int(18))
od119 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od119)
order = Order.query.get(int(10293))
product = Product.query.get(int(24))
od120 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od120)
order = Order.query.get(int(10293))
product = Product.query.get(int(63))
od121 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od121)
order = Order.query.get(int(10293))
product = Product.query.get(int(75))
od122 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od122)
order = Order.query.get(int(10294))
product = Product.query.get(int(1))
od123 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od123)
order = Order.query.get(int(10294))
product = Product.query.get(int(17))
od124 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od124)
order = Order.query.get(int(10294))
product = Product.query.get(int(43))
od125 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od125)
order = Order.query.get(int(10294))
product = Product.query.get(int(60))
od126 = OrderDetail(order=order, product=product, quantity=21)
db.session.add(od126)
order = Order.query.get(int(10294))
product = Product.query.get(int(75))
od127 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od127)
order = Order.query.get(int(10295))
product = Product.query.get(int(56))
od128 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od128)
order = Order.query.get(int(10296))
product = Product.query.get(int(11))
od129 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od129)
order = Order.query.get(int(10296))
product = Product.query.get(int(16))
od130 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od130)
order = Order.query.get(int(10296))
product = Product.query.get(int(69))
od131 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od131)
order = Order.query.get(int(10297))
product = Product.query.get(int(39))
od132 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od132)
order = Order.query.get(int(10297))
product = Product.query.get(int(72))
od133 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od133)
order = Order.query.get(int(10298))
product = Product.query.get(int(2))
od134 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od134)
order = Order.query.get(int(10298))
product = Product.query.get(int(36))
od135 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od135)
order = Order.query.get(int(10298))
product = Product.query.get(int(59))
od136 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od136)
order = Order.query.get(int(10298))
product = Product.query.get(int(62))
od137 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od137)
order = Order.query.get(int(10299))
product = Product.query.get(int(19))
od138 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od138)
order = Order.query.get(int(10299))
product = Product.query.get(int(70))
od139 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od139)
order = Order.query.get(int(10300))
product = Product.query.get(int(66))
od140 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od140)
order = Order.query.get(int(10300))
product = Product.query.get(int(68))
od141 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od141)
order = Order.query.get(int(10301))
product = Product.query.get(int(40))
od142 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od142)
order = Order.query.get(int(10301))
product = Product.query.get(int(56))
od143 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od143)
order = Order.query.get(int(10302))
product = Product.query.get(int(17))
od144 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od144)
order = Order.query.get(int(10302))
product = Product.query.get(int(28))
od145 = OrderDetail(order=order, product=product, quantity=28)
db.session.add(od145)
order = Order.query.get(int(10302))
product = Product.query.get(int(43))
od146 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od146)
order = Order.query.get(int(10303))
product = Product.query.get(int(40))
od147 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od147)
order = Order.query.get(int(10303))
product = Product.query.get(int(65))
od148 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od148)
order = Order.query.get(int(10303))
product = Product.query.get(int(68))
od149 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od149)
order = Order.query.get(int(10304))
product = Product.query.get(int(49))
od150 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od150)
order = Order.query.get(int(10304))
product = Product.query.get(int(59))
od151 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od151)
order = Order.query.get(int(10304))
product = Product.query.get(int(71))
od152 = OrderDetail(order=order, product=product, quantity=2)
db.session.add(od152)
order = Order.query.get(int(10305))
product = Product.query.get(int(18))
od153 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od153)
order = Order.query.get(int(10305))
product = Product.query.get(int(29))
od154 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od154)
order = Order.query.get(int(10305))
product = Product.query.get(int(39))
od155 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od155)
order = Order.query.get(int(10306))
product = Product.query.get(int(30))
od156 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od156)
order = Order.query.get(int(10306))
product = Product.query.get(int(53))
od157 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od157)
order = Order.query.get(int(10306))
product = Product.query.get(int(54))
od158 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od158)
order = Order.query.get(int(10307))
product = Product.query.get(int(62))
od159 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od159)
order = Order.query.get(int(10307))
product = Product.query.get(int(68))
od160 = OrderDetail(order=order, product=product, quantity=3)
db.session.add(od160)
order = Order.query.get(int(10308))
product = Product.query.get(int(69))
od161 = OrderDetail(order=order, product=product, quantity=1)
db.session.add(od161)
order = Order.query.get(int(10308))
product = Product.query.get(int(70))
od162 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od162)
order = Order.query.get(int(10309))
product = Product.query.get(int(4))
od163 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od163)
order = Order.query.get(int(10309))
product = Product.query.get(int(6))
od164 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od164)
order = Order.query.get(int(10309))
product = Product.query.get(int(42))
od165 = OrderDetail(order=order, product=product, quantity=2)
db.session.add(od165)
order = Order.query.get(int(10309))
product = Product.query.get(int(43))
od166 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od166)
order = Order.query.get(int(10309))
product = Product.query.get(int(71))
od167 = OrderDetail(order=order, product=product, quantity=3)
db.session.add(od167)
order = Order.query.get(int(10310))
product = Product.query.get(int(16))
od168 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od168)
order = Order.query.get(int(10310))
product = Product.query.get(int(62))
od169 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od169)
order = Order.query.get(int(10311))
product = Product.query.get(int(42))
od170 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od170)
order = Order.query.get(int(10311))
product = Product.query.get(int(69))
od171 = OrderDetail(order=order, product=product, quantity=7)
db.session.add(od171)
order = Order.query.get(int(10312))
product = Product.query.get(int(28))
od172 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od172)
order = Order.query.get(int(10312))
product = Product.query.get(int(43))
od173 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od173)
order = Order.query.get(int(10312))
product = Product.query.get(int(53))
od174 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od174)
order = Order.query.get(int(10312))
product = Product.query.get(int(75))
od175 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od175)
order = Order.query.get(int(10313))
product = Product.query.get(int(36))
od176 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od176)
order = Order.query.get(int(10314))
product = Product.query.get(int(32))
od177 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od177)
order = Order.query.get(int(10314))
product = Product.query.get(int(58))
od178 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od178)
order = Order.query.get(int(10314))
product = Product.query.get(int(62))
od179 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od179)
order = Order.query.get(int(10315))
product = Product.query.get(int(34))
od180 = OrderDetail(order=order, product=product, quantity=14)
db.session.add(od180)
order = Order.query.get(int(10315))
product = Product.query.get(int(70))
od181 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od181)
order = Order.query.get(int(10316))
product = Product.query.get(int(41))
od182 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od182)
order = Order.query.get(int(10316))
product = Product.query.get(int(62))
od183 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od183)
order = Order.query.get(int(10317))
product = Product.query.get(int(1))
od184 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od184)
order = Order.query.get(int(10318))
product = Product.query.get(int(41))
od185 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od185)
order = Order.query.get(int(10318))
product = Product.query.get(int(76))
od186 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od186)
order = Order.query.get(int(10319))
product = Product.query.get(int(17))
od187 = OrderDetail(order=order, product=product, quantity=8)
db.session.add(od187)
order = Order.query.get(int(10319))
product = Product.query.get(int(28))
od188 = OrderDetail(order=order, product=product, quantity=14)
db.session.add(od188)
order = Order.query.get(int(10319))
product = Product.query.get(int(76))
od189 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od189)
order = Order.query.get(int(10320))
product = Product.query.get(int(71))
od190 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od190)
order = Order.query.get(int(10321))
product = Product.query.get(int(35))
od191 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od191)
order = Order.query.get(int(10322))
product = Product.query.get(int(52))
od192 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od192)
order = Order.query.get(int(10323))
product = Product.query.get(int(15))
od193 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od193)
order = Order.query.get(int(10323))
product = Product.query.get(int(25))
od194 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od194)
order = Order.query.get(int(10323))
product = Product.query.get(int(39))
od195 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od195)
order = Order.query.get(int(10324))
product = Product.query.get(int(16))
od196 = OrderDetail(order=order, product=product, quantity=21)
db.session.add(od196)
order = Order.query.get(int(10324))
product = Product.query.get(int(35))
od197 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od197)
order = Order.query.get(int(10324))
product = Product.query.get(int(46))
od198 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od198)
order = Order.query.get(int(10324))
product = Product.query.get(int(59))
od199 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od199)
order = Order.query.get(int(10324))
product = Product.query.get(int(63))
od200 = OrderDetail(order=order, product=product, quantity=80)
db.session.add(od200)
order = Order.query.get(int(10325))
product = Product.query.get(int(6))
od201 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od201)
order = Order.query.get(int(10325))
product = Product.query.get(int(13))
od202 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od202)
order = Order.query.get(int(10325))
product = Product.query.get(int(14))
od203 = OrderDetail(order=order, product=product, quantity=9)
db.session.add(od203)
order = Order.query.get(int(10325))
product = Product.query.get(int(31))
od204 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od204)
order = Order.query.get(int(10325))
product = Product.query.get(int(72))
od205 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od205)
order = Order.query.get(int(10326))
product = Product.query.get(int(4))
od206 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od206)
order = Order.query.get(int(10326))
product = Product.query.get(int(57))
od207 = OrderDetail(order=order, product=product, quantity=16)
db.session.add(od207)
order = Order.query.get(int(10326))
product = Product.query.get(int(75))
od208 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od208)
order = Order.query.get(int(10327))
product = Product.query.get(int(2))
od209 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od209)
order = Order.query.get(int(10327))
product = Product.query.get(int(11))
od210 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od210)
order = Order.query.get(int(10327))
product = Product.query.get(int(30))
od211 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od211)
order = Order.query.get(int(10327))
product = Product.query.get(int(58))
od212 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od212)
order = Order.query.get(int(10328))
product = Product.query.get(int(59))
od213 = OrderDetail(order=order, product=product, quantity=9)
db.session.add(od213)
order = Order.query.get(int(10328))
product = Product.query.get(int(65))
od214 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od214)
order = Order.query.get(int(10328))
product = Product.query.get(int(68))
od215 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od215)
order = Order.query.get(int(10329))
product = Product.query.get(int(19))
od216 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od216)
order = Order.query.get(int(10329))
product = Product.query.get(int(30))
od217 = OrderDetail(order=order, product=product, quantity=8)
db.session.add(od217)
order = Order.query.get(int(10329))
product = Product.query.get(int(38))
od218 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od218)
order = Order.query.get(int(10329))
product = Product.query.get(int(56))
od219 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od219)
order = Order.query.get(int(10330))
product = Product.query.get(int(26))
od220 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od220)
order = Order.query.get(int(10330))
product = Product.query.get(int(72))
od221 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od221)
order = Order.query.get(int(10331))
product = Product.query.get(int(54))
od222 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od222)
order = Order.query.get(int(10332))
product = Product.query.get(int(18))
od223 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od223)
order = Order.query.get(int(10332))
product = Product.query.get(int(42))
od224 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od224)
order = Order.query.get(int(10332))
product = Product.query.get(int(47))
od225 = OrderDetail(order=order, product=product, quantity=16)
db.session.add(od225)
order = Order.query.get(int(10333))
product = Product.query.get(int(14))
od226 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od226)
order = Order.query.get(int(10333))
product = Product.query.get(int(21))
od227 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od227)
order = Order.query.get(int(10333))
product = Product.query.get(int(71))
od228 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od228)
order = Order.query.get(int(10334))
product = Product.query.get(int(52))
od229 = OrderDetail(order=order, product=product, quantity=8)
db.session.add(od229)
order = Order.query.get(int(10334))
product = Product.query.get(int(68))
od230 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od230)
order = Order.query.get(int(10335))
product = Product.query.get(int(2))
od231 = OrderDetail(order=order, product=product, quantity=7)
db.session.add(od231)
order = Order.query.get(int(10335))
product = Product.query.get(int(31))
od232 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od232)
order = Order.query.get(int(10335))
product = Product.query.get(int(32))
od233 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od233)
order = Order.query.get(int(10335))
product = Product.query.get(int(51))
od234 = OrderDetail(order=order, product=product, quantity=48)
db.session.add(od234)
order = Order.query.get(int(10336))
product = Product.query.get(int(4))
od235 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od235)
order = Order.query.get(int(10337))
product = Product.query.get(int(23))
od236 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od236)
order = Order.query.get(int(10337))
product = Product.query.get(int(26))
od237 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od237)
order = Order.query.get(int(10337))
product = Product.query.get(int(36))
od238 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od238)
order = Order.query.get(int(10337))
product = Product.query.get(int(37))
od239 = OrderDetail(order=order, product=product, quantity=28)
db.session.add(od239)
order = Order.query.get(int(10337))
product = Product.query.get(int(72))
od240 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od240)
order = Order.query.get(int(10338))
product = Product.query.get(int(17))
od241 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od241)
order = Order.query.get(int(10338))
product = Product.query.get(int(30))
od242 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od242)
order = Order.query.get(int(10339))
product = Product.query.get(int(4))
od243 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od243)
order = Order.query.get(int(10339))
product = Product.query.get(int(17))
od244 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od244)
order = Order.query.get(int(10339))
product = Product.query.get(int(62))
od245 = OrderDetail(order=order, product=product, quantity=28)
db.session.add(od245)
order = Order.query.get(int(10340))
product = Product.query.get(int(18))
od246 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od246)
order = Order.query.get(int(10340))
product = Product.query.get(int(41))
od247 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od247)
order = Order.query.get(int(10340))
product = Product.query.get(int(43))
od248 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od248)
order = Order.query.get(int(10341))
product = Product.query.get(int(33))
od249 = OrderDetail(order=order, product=product, quantity=8)
db.session.add(od249)
order = Order.query.get(int(10341))
product = Product.query.get(int(59))
od250 = OrderDetail(order=order, product=product, quantity=9)
db.session.add(od250)
order = Order.query.get(int(10342))
product = Product.query.get(int(2))
od251 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od251)
order = Order.query.get(int(10342))
product = Product.query.get(int(31))
od252 = OrderDetail(order=order, product=product, quantity=56)
db.session.add(od252)
order = Order.query.get(int(10342))
product = Product.query.get(int(36))
od253 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od253)
order = Order.query.get(int(10342))
product = Product.query.get(int(55))
od254 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od254)
order = Order.query.get(int(10343))
product = Product.query.get(int(64))
od255 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od255)
order = Order.query.get(int(10343))
product = Product.query.get(int(68))
od256 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od256)
order = Order.query.get(int(10343))
product = Product.query.get(int(76))
od257 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od257)
order = Order.query.get(int(10344))
product = Product.query.get(int(4))
od258 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od258)
order = Order.query.get(int(10344))
product = Product.query.get(int(8))
od259 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od259)
order = Order.query.get(int(10345))
product = Product.query.get(int(8))
od260 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od260)
order = Order.query.get(int(10345))
product = Product.query.get(int(19))
od261 = OrderDetail(order=order, product=product, quantity=80)
db.session.add(od261)
order = Order.query.get(int(10345))
product = Product.query.get(int(42))
od262 = OrderDetail(order=order, product=product, quantity=9)
db.session.add(od262)
order = Order.query.get(int(10346))
product = Product.query.get(int(17))
od263 = OrderDetail(order=order, product=product, quantity=36)
db.session.add(od263)
order = Order.query.get(int(10346))
product = Product.query.get(int(56))
od264 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od264)
order = Order.query.get(int(10347))
product = Product.query.get(int(25))
od265 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od265)
order = Order.query.get(int(10347))
product = Product.query.get(int(39))
od266 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od266)
order = Order.query.get(int(10347))
product = Product.query.get(int(40))
od267 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od267)
order = Order.query.get(int(10347))
product = Product.query.get(int(75))
od268 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od268)
order = Order.query.get(int(10348))
product = Product.query.get(int(1))
od269 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od269)
order = Order.query.get(int(10348))
product = Product.query.get(int(23))
od270 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od270)
order = Order.query.get(int(10349))
product = Product.query.get(int(54))
od271 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od271)
order = Order.query.get(int(10350))
product = Product.query.get(int(50))
od272 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od272)
order = Order.query.get(int(10350))
product = Product.query.get(int(69))
od273 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od273)
order = Order.query.get(int(10351))
product = Product.query.get(int(38))
od274 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od274)
order = Order.query.get(int(10351))
product = Product.query.get(int(41))
od275 = OrderDetail(order=order, product=product, quantity=13)
db.session.add(od275)
order = Order.query.get(int(10351))
product = Product.query.get(int(44))
od276 = OrderDetail(order=order, product=product, quantity=77)
db.session.add(od276)
order = Order.query.get(int(10351))
product = Product.query.get(int(65))
od277 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od277)
order = Order.query.get(int(10352))
product = Product.query.get(int(24))
od278 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od278)
order = Order.query.get(int(10352))
product = Product.query.get(int(54))
od279 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od279)
order = Order.query.get(int(10353))
product = Product.query.get(int(11))
od280 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od280)
order = Order.query.get(int(10353))
product = Product.query.get(int(38))
od281 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od281)
order = Order.query.get(int(10354))
product = Product.query.get(int(1))
od282 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od282)
order = Order.query.get(int(10354))
product = Product.query.get(int(29))
od283 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od283)
order = Order.query.get(int(10355))
product = Product.query.get(int(24))
od284 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od284)
order = Order.query.get(int(10355))
product = Product.query.get(int(57))
od285 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od285)
order = Order.query.get(int(10356))
product = Product.query.get(int(31))
od286 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od286)
order = Order.query.get(int(10356))
product = Product.query.get(int(55))
od287 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od287)
order = Order.query.get(int(10356))
product = Product.query.get(int(69))
od288 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od288)
order = Order.query.get(int(10357))
product = Product.query.get(int(10))
od289 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od289)
order = Order.query.get(int(10357))
product = Product.query.get(int(26))
od290 = OrderDetail(order=order, product=product, quantity=16)
db.session.add(od290)
order = Order.query.get(int(10357))
product = Product.query.get(int(60))
od291 = OrderDetail(order=order, product=product, quantity=8)
db.session.add(od291)
order = Order.query.get(int(10358))
product = Product.query.get(int(24))
od292 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od292)
order = Order.query.get(int(10358))
product = Product.query.get(int(34))
od293 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od293)
order = Order.query.get(int(10358))
product = Product.query.get(int(36))
od294 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od294)
order = Order.query.get(int(10359))
product = Product.query.get(int(16))
od295 = OrderDetail(order=order, product=product, quantity=56)
db.session.add(od295)
order = Order.query.get(int(10359))
product = Product.query.get(int(31))
od296 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od296)
order = Order.query.get(int(10359))
product = Product.query.get(int(60))
od297 = OrderDetail(order=order, product=product, quantity=80)
db.session.add(od297)
order = Order.query.get(int(10360))
product = Product.query.get(int(28))
od298 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od298)
order = Order.query.get(int(10360))
product = Product.query.get(int(29))
od299 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od299)
order = Order.query.get(int(10360))
product = Product.query.get(int(38))
od300 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od300)
order = Order.query.get(int(10360))
product = Product.query.get(int(49))
od301 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od301)
order = Order.query.get(int(10360))
product = Product.query.get(int(54))
od302 = OrderDetail(order=order, product=product, quantity=28)
db.session.add(od302)
order = Order.query.get(int(10361))
product = Product.query.get(int(39))
od303 = OrderDetail(order=order, product=product, quantity=54)
db.session.add(od303)
order = Order.query.get(int(10361))
product = Product.query.get(int(60))
od304 = OrderDetail(order=order, product=product, quantity=55)
db.session.add(od304)
order = Order.query.get(int(10362))
product = Product.query.get(int(25))
od305 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od305)
order = Order.query.get(int(10362))
product = Product.query.get(int(51))
od306 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od306)
order = Order.query.get(int(10362))
product = Product.query.get(int(54))
od307 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od307)
order = Order.query.get(int(10363))
product = Product.query.get(int(31))
od308 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od308)
order = Order.query.get(int(10363))
product = Product.query.get(int(75))
od309 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od309)
order = Order.query.get(int(10363))
product = Product.query.get(int(76))
od310 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od310)
order = Order.query.get(int(10364))
product = Product.query.get(int(69))
od311 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od311)
order = Order.query.get(int(10364))
product = Product.query.get(int(71))
od312 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od312)
order = Order.query.get(int(10365))
product = Product.query.get(int(11))
od313 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od313)
order = Order.query.get(int(10366))
product = Product.query.get(int(65))
od314 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od314)
order = Order.query.get(int(10366))
product = Product.query.get(int(77))
od315 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od315)
order = Order.query.get(int(10367))
product = Product.query.get(int(34))
od316 = OrderDetail(order=order, product=product, quantity=36)
db.session.add(od316)
order = Order.query.get(int(10367))
product = Product.query.get(int(54))
od317 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od317)
order = Order.query.get(int(10367))
product = Product.query.get(int(65))
od318 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od318)
order = Order.query.get(int(10367))
product = Product.query.get(int(77))
od319 = OrderDetail(order=order, product=product, quantity=7)
db.session.add(od319)
order = Order.query.get(int(10368))
product = Product.query.get(int(21))
od320 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od320)
order = Order.query.get(int(10368))
product = Product.query.get(int(28))
od321 = OrderDetail(order=order, product=product, quantity=13)
db.session.add(od321)
order = Order.query.get(int(10368))
product = Product.query.get(int(57))
od322 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od322)
order = Order.query.get(int(10368))
product = Product.query.get(int(64))
od323 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od323)
order = Order.query.get(int(10369))
product = Product.query.get(int(29))
od324 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od324)
order = Order.query.get(int(10369))
product = Product.query.get(int(56))
od325 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od325)
order = Order.query.get(int(10370))
product = Product.query.get(int(1))
od326 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od326)
order = Order.query.get(int(10370))
product = Product.query.get(int(64))
od327 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od327)
order = Order.query.get(int(10370))
product = Product.query.get(int(74))
od328 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od328)
order = Order.query.get(int(10371))
product = Product.query.get(int(36))
od329 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od329)
order = Order.query.get(int(10372))
product = Product.query.get(int(20))
od330 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od330)
order = Order.query.get(int(10372))
product = Product.query.get(int(38))
od331 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od331)
order = Order.query.get(int(10372))
product = Product.query.get(int(60))
od332 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od332)
order = Order.query.get(int(10372))
product = Product.query.get(int(72))
od333 = OrderDetail(order=order, product=product, quantity=42)
db.session.add(od333)
order = Order.query.get(int(10373))
product = Product.query.get(int(58))
od334 = OrderDetail(order=order, product=product, quantity=80)
db.session.add(od334)
order = Order.query.get(int(10373))
product = Product.query.get(int(71))
od335 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od335)
order = Order.query.get(int(10374))
product = Product.query.get(int(31))
od336 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od336)
order = Order.query.get(int(10374))
product = Product.query.get(int(58))
od337 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od337)
order = Order.query.get(int(10375))
product = Product.query.get(int(14))
od338 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od338)
order = Order.query.get(int(10375))
product = Product.query.get(int(54))
od339 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od339)
order = Order.query.get(int(10376))
product = Product.query.get(int(31))
od340 = OrderDetail(order=order, product=product, quantity=42)
db.session.add(od340)
order = Order.query.get(int(10377))
product = Product.query.get(int(28))
od341 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od341)
order = Order.query.get(int(10377))
product = Product.query.get(int(39))
od342 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od342)
order = Order.query.get(int(10378))
product = Product.query.get(int(71))
od343 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od343)
order = Order.query.get(int(10379))
product = Product.query.get(int(41))
od344 = OrderDetail(order=order, product=product, quantity=8)
db.session.add(od344)
order = Order.query.get(int(10379))
product = Product.query.get(int(63))
od345 = OrderDetail(order=order, product=product, quantity=16)
db.session.add(od345)
order = Order.query.get(int(10379))
product = Product.query.get(int(65))
od346 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od346)
order = Order.query.get(int(10380))
product = Product.query.get(int(30))
od347 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od347)
order = Order.query.get(int(10380))
product = Product.query.get(int(53))
od348 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od348)
order = Order.query.get(int(10380))
product = Product.query.get(int(60))
od349 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od349)
order = Order.query.get(int(10380))
product = Product.query.get(int(70))
od350 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od350)
order = Order.query.get(int(10381))
product = Product.query.get(int(74))
od351 = OrderDetail(order=order, product=product, quantity=14)
db.session.add(od351)
order = Order.query.get(int(10382))
product = Product.query.get(int(5))
od352 = OrderDetail(order=order, product=product, quantity=32)
db.session.add(od352)
order = Order.query.get(int(10382))
product = Product.query.get(int(18))
od353 = OrderDetail(order=order, product=product, quantity=9)
db.session.add(od353)
order = Order.query.get(int(10382))
product = Product.query.get(int(29))
od354 = OrderDetail(order=order, product=product, quantity=14)
db.session.add(od354)
order = Order.query.get(int(10382))
product = Product.query.get(int(33))
od355 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od355)
order = Order.query.get(int(10382))
product = Product.query.get(int(74))
od356 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od356)
order = Order.query.get(int(10383))
product = Product.query.get(int(13))
od357 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od357)
order = Order.query.get(int(10383))
product = Product.query.get(int(50))
od358 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od358)
order = Order.query.get(int(10383))
product = Product.query.get(int(56))
od359 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od359)
order = Order.query.get(int(10384))
product = Product.query.get(int(20))
od360 = OrderDetail(order=order, product=product, quantity=28)
db.session.add(od360)
order = Order.query.get(int(10384))
product = Product.query.get(int(60))
od361 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od361)
order = Order.query.get(int(10385))
product = Product.query.get(int(7))
od362 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od362)
order = Order.query.get(int(10385))
product = Product.query.get(int(60))
od363 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od363)
order = Order.query.get(int(10385))
product = Product.query.get(int(68))
od364 = OrderDetail(order=order, product=product, quantity=8)
db.session.add(od364)
order = Order.query.get(int(10386))
product = Product.query.get(int(24))
od365 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od365)
order = Order.query.get(int(10386))
product = Product.query.get(int(34))
od366 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od366)
order = Order.query.get(int(10387))
product = Product.query.get(int(24))
od367 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od367)
order = Order.query.get(int(10387))
product = Product.query.get(int(28))
od368 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od368)
order = Order.query.get(int(10387))
product = Product.query.get(int(59))
od369 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od369)
order = Order.query.get(int(10387))
product = Product.query.get(int(71))
od370 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od370)
order = Order.query.get(int(10388))
product = Product.query.get(int(45))
od371 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od371)
order = Order.query.get(int(10388))
product = Product.query.get(int(52))
od372 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od372)
order = Order.query.get(int(10388))
product = Product.query.get(int(53))
od373 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od373)
order = Order.query.get(int(10389))
product = Product.query.get(int(10))
od374 = OrderDetail(order=order, product=product, quantity=16)
db.session.add(od374)
order = Order.query.get(int(10389))
product = Product.query.get(int(55))
od375 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od375)
order = Order.query.get(int(10389))
product = Product.query.get(int(62))
od376 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od376)
order = Order.query.get(int(10389))
product = Product.query.get(int(70))
od377 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od377)
order = Order.query.get(int(10390))
product = Product.query.get(int(31))
od378 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od378)
order = Order.query.get(int(10390))
product = Product.query.get(int(35))
od379 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od379)
order = Order.query.get(int(10390))
product = Product.query.get(int(46))
od380 = OrderDetail(order=order, product=product, quantity=45)
db.session.add(od380)
order = Order.query.get(int(10390))
product = Product.query.get(int(72))
od381 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od381)
order = Order.query.get(int(10391))
product = Product.query.get(int(13))
od382 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od382)
order = Order.query.get(int(10392))
product = Product.query.get(int(69))
od383 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od383)
order = Order.query.get(int(10393))
product = Product.query.get(int(2))
od384 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od384)
order = Order.query.get(int(10393))
product = Product.query.get(int(14))
od385 = OrderDetail(order=order, product=product, quantity=42)
db.session.add(od385)
order = Order.query.get(int(10393))
product = Product.query.get(int(25))
od386 = OrderDetail(order=order, product=product, quantity=7)
db.session.add(od386)
order = Order.query.get(int(10393))
product = Product.query.get(int(26))
od387 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od387)
order = Order.query.get(int(10393))
product = Product.query.get(int(31))
od388 = OrderDetail(order=order, product=product, quantity=32)
db.session.add(od388)
order = Order.query.get(int(10394))
product = Product.query.get(int(13))
od389 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od389)
order = Order.query.get(int(10394))
product = Product.query.get(int(62))
od390 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od390)
order = Order.query.get(int(10395))
product = Product.query.get(int(46))
od391 = OrderDetail(order=order, product=product, quantity=28)
db.session.add(od391)
order = Order.query.get(int(10395))
product = Product.query.get(int(53))
od392 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od392)
order = Order.query.get(int(10395))
product = Product.query.get(int(69))
od393 = OrderDetail(order=order, product=product, quantity=8)
db.session.add(od393)
order = Order.query.get(int(10396))
product = Product.query.get(int(23))
od394 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od394)
order = Order.query.get(int(10396))
product = Product.query.get(int(71))
od395 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od395)
order = Order.query.get(int(10396))
product = Product.query.get(int(72))
od396 = OrderDetail(order=order, product=product, quantity=21)
db.session.add(od396)
order = Order.query.get(int(10397))
product = Product.query.get(int(21))
od397 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od397)
order = Order.query.get(int(10397))
product = Product.query.get(int(51))
od398 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od398)
order = Order.query.get(int(10398))
product = Product.query.get(int(35))
od399 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od399)
order = Order.query.get(int(10398))
product = Product.query.get(int(55))
od400 = OrderDetail(order=order, product=product, quantity=120)
db.session.add(od400)
order = Order.query.get(int(10399))
product = Product.query.get(int(68))
od401 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od401)
order = Order.query.get(int(10399))
product = Product.query.get(int(71))
od402 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od402)
order = Order.query.get(int(10399))
product = Product.query.get(int(76))
od403 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od403)
order = Order.query.get(int(10399))
product = Product.query.get(int(77))
od404 = OrderDetail(order=order, product=product, quantity=14)
db.session.add(od404)
order = Order.query.get(int(10400))
product = Product.query.get(int(29))
od405 = OrderDetail(order=order, product=product, quantity=21)
db.session.add(od405)
order = Order.query.get(int(10400))
product = Product.query.get(int(35))
od406 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od406)
order = Order.query.get(int(10400))
product = Product.query.get(int(49))
od407 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od407)
order = Order.query.get(int(10401))
product = Product.query.get(int(30))
od408 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od408)
order = Order.query.get(int(10401))
product = Product.query.get(int(56))
od409 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od409)
order = Order.query.get(int(10401))
product = Product.query.get(int(65))
od410 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od410)
order = Order.query.get(int(10401))
product = Product.query.get(int(71))
od411 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od411)
order = Order.query.get(int(10402))
product = Product.query.get(int(23))
od412 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od412)
order = Order.query.get(int(10402))
product = Product.query.get(int(63))
od413 = OrderDetail(order=order, product=product, quantity=65)
db.session.add(od413)
order = Order.query.get(int(10403))
product = Product.query.get(int(16))
od414 = OrderDetail(order=order, product=product, quantity=21)
db.session.add(od414)
order = Order.query.get(int(10403))
product = Product.query.get(int(48))
od415 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od415)
order = Order.query.get(int(10404))
product = Product.query.get(int(26))
od416 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od416)
order = Order.query.get(int(10404))
product = Product.query.get(int(42))
od417 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od417)
order = Order.query.get(int(10404))
product = Product.query.get(int(49))
od418 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od418)
order = Order.query.get(int(10405))
product = Product.query.get(int(3))
od419 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od419)
order = Order.query.get(int(10406))
product = Product.query.get(int(1))
od420 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od420)
order = Order.query.get(int(10406))
product = Product.query.get(int(21))
od421 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od421)
order = Order.query.get(int(10406))
product = Product.query.get(int(28))
od422 = OrderDetail(order=order, product=product, quantity=42)
db.session.add(od422)
order = Order.query.get(int(10406))
product = Product.query.get(int(36))
od423 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od423)
order = Order.query.get(int(10406))
product = Product.query.get(int(40))
od424 = OrderDetail(order=order, product=product, quantity=2)
db.session.add(od424)
order = Order.query.get(int(10407))
product = Product.query.get(int(11))
od425 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od425)
order = Order.query.get(int(10407))
product = Product.query.get(int(69))
od426 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od426)
order = Order.query.get(int(10407))
product = Product.query.get(int(71))
od427 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od427)
order = Order.query.get(int(10408))
product = Product.query.get(int(37))
od428 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od428)
order = Order.query.get(int(10408))
product = Product.query.get(int(54))
od429 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od429)
order = Order.query.get(int(10408))
product = Product.query.get(int(62))
od430 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od430)
order = Order.query.get(int(10409))
product = Product.query.get(int(14))
od431 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od431)
order = Order.query.get(int(10409))
product = Product.query.get(int(21))
od432 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od432)
order = Order.query.get(int(10410))
product = Product.query.get(int(33))
od433 = OrderDetail(order=order, product=product, quantity=49)
db.session.add(od433)
order = Order.query.get(int(10410))
product = Product.query.get(int(59))
od434 = OrderDetail(order=order, product=product, quantity=16)
db.session.add(od434)
order = Order.query.get(int(10411))
product = Product.query.get(int(41))
od435 = OrderDetail(order=order, product=product, quantity=25)
db.session.add(od435)
order = Order.query.get(int(10411))
product = Product.query.get(int(44))
od436 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od436)
order = Order.query.get(int(10411))
product = Product.query.get(int(59))
od437 = OrderDetail(order=order, product=product, quantity=9)
db.session.add(od437)
order = Order.query.get(int(10412))
product = Product.query.get(int(14))
od438 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od438)
order = Order.query.get(int(10413))
product = Product.query.get(int(1))
od439 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od439)
order = Order.query.get(int(10413))
product = Product.query.get(int(62))
od440 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od440)
order = Order.query.get(int(10413))
product = Product.query.get(int(76))
od441 = OrderDetail(order=order, product=product, quantity=14)
db.session.add(od441)
order = Order.query.get(int(10414))
product = Product.query.get(int(19))
od442 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od442)
order = Order.query.get(int(10414))
product = Product.query.get(int(33))
od443 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od443)
order = Order.query.get(int(10415))
product = Product.query.get(int(17))
od444 = OrderDetail(order=order, product=product, quantity=2)
db.session.add(od444)
order = Order.query.get(int(10415))
product = Product.query.get(int(33))
od445 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od445)
order = Order.query.get(int(10416))
product = Product.query.get(int(19))
od446 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od446)
order = Order.query.get(int(10416))
product = Product.query.get(int(53))
od447 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od447)
order = Order.query.get(int(10416))
product = Product.query.get(int(57))
od448 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od448)
order = Order.query.get(int(10417))
product = Product.query.get(int(38))
od449 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od449)
order = Order.query.get(int(10417))
product = Product.query.get(int(46))
od450 = OrderDetail(order=order, product=product, quantity=2)
db.session.add(od450)
order = Order.query.get(int(10417))
product = Product.query.get(int(68))
od451 = OrderDetail(order=order, product=product, quantity=36)
db.session.add(od451)
order = Order.query.get(int(10417))
product = Product.query.get(int(77))
od452 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od452)
order = Order.query.get(int(10418))
product = Product.query.get(int(2))
od453 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od453)
order = Order.query.get(int(10418))
product = Product.query.get(int(47))
od454 = OrderDetail(order=order, product=product, quantity=55)
db.session.add(od454)
order = Order.query.get(int(10418))
product = Product.query.get(int(61))
od455 = OrderDetail(order=order, product=product, quantity=16)
db.session.add(od455)
order = Order.query.get(int(10418))
product = Product.query.get(int(74))
od456 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od456)
order = Order.query.get(int(10419))
product = Product.query.get(int(60))
od457 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od457)
order = Order.query.get(int(10419))
product = Product.query.get(int(69))
od458 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od458)
order = Order.query.get(int(10420))
product = Product.query.get(int(9))
od459 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od459)
order = Order.query.get(int(10420))
product = Product.query.get(int(13))
od460 = OrderDetail(order=order, product=product, quantity=2)
db.session.add(od460)
order = Order.query.get(int(10420))
product = Product.query.get(int(70))
od461 = OrderDetail(order=order, product=product, quantity=8)
db.session.add(od461)
order = Order.query.get(int(10420))
product = Product.query.get(int(73))
od462 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od462)
order = Order.query.get(int(10421))
product = Product.query.get(int(19))
od463 = OrderDetail(order=order, product=product, quantity=4)
db.session.add(od463)
order = Order.query.get(int(10421))
product = Product.query.get(int(26))
od464 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od464)
order = Order.query.get(int(10421))
product = Product.query.get(int(53))
od465 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od465)
order = Order.query.get(int(10421))
product = Product.query.get(int(77))
od466 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od466)
order = Order.query.get(int(10422))
product = Product.query.get(int(26))
od467 = OrderDetail(order=order, product=product, quantity=2)
db.session.add(od467)
order = Order.query.get(int(10423))
product = Product.query.get(int(31))
od468 = OrderDetail(order=order, product=product, quantity=14)
db.session.add(od468)
order = Order.query.get(int(10423))
product = Product.query.get(int(59))
od469 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od469)
order = Order.query.get(int(10424))
product = Product.query.get(int(35))
od470 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od470)
order = Order.query.get(int(10424))
product = Product.query.get(int(38))
od471 = OrderDetail(order=order, product=product, quantity=49)
db.session.add(od471)
order = Order.query.get(int(10424))
product = Product.query.get(int(68))
od472 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od472)
order = Order.query.get(int(10425))
product = Product.query.get(int(55))
od473 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od473)
order = Order.query.get(int(10425))
product = Product.query.get(int(76))
od474 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od474)
order = Order.query.get(int(10426))
product = Product.query.get(int(56))
od475 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od475)
order = Order.query.get(int(10426))
product = Product.query.get(int(64))
od476 = OrderDetail(order=order, product=product, quantity=7)
db.session.add(od476)
order = Order.query.get(int(10427))
product = Product.query.get(int(14))
od477 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od477)
order = Order.query.get(int(10428))
product = Product.query.get(int(46))
od478 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od478)
order = Order.query.get(int(10429))
product = Product.query.get(int(50))
od479 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od479)
order = Order.query.get(int(10429))
product = Product.query.get(int(63))
od480 = OrderDetail(order=order, product=product, quantity=35)
db.session.add(od480)
order = Order.query.get(int(10430))
product = Product.query.get(int(17))
od481 = OrderDetail(order=order, product=product, quantity=45)
db.session.add(od481)
order = Order.query.get(int(10430))
product = Product.query.get(int(21))
od482 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od482)
order = Order.query.get(int(10430))
product = Product.query.get(int(56))
od483 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od483)
order = Order.query.get(int(10430))
product = Product.query.get(int(59))
od484 = OrderDetail(order=order, product=product, quantity=70)
db.session.add(od484)
order = Order.query.get(int(10431))
product = Product.query.get(int(17))
od485 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od485)
order = Order.query.get(int(10431))
product = Product.query.get(int(40))
od486 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od486)
order = Order.query.get(int(10431))
product = Product.query.get(int(47))
od487 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od487)
order = Order.query.get(int(10432))
product = Product.query.get(int(26))
od488 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od488)
order = Order.query.get(int(10432))
product = Product.query.get(int(54))
od489 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od489)
order = Order.query.get(int(10433))
product = Product.query.get(int(56))
od490 = OrderDetail(order=order, product=product, quantity=28)
db.session.add(od490)
order = Order.query.get(int(10434))
product = Product.query.get(int(11))
od491 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od491)
order = Order.query.get(int(10434))
product = Product.query.get(int(76))
od492 = OrderDetail(order=order, product=product, quantity=18)
db.session.add(od492)
order = Order.query.get(int(10435))
product = Product.query.get(int(2))
od493 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od493)
order = Order.query.get(int(10435))
product = Product.query.get(int(22))
od494 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od494)
order = Order.query.get(int(10435))
product = Product.query.get(int(72))
od495 = OrderDetail(order=order, product=product, quantity=10)
db.session.add(od495)
order = Order.query.get(int(10436))
product = Product.query.get(int(46))
od496 = OrderDetail(order=order, product=product, quantity=5)
db.session.add(od496)
order = Order.query.get(int(10436))
product = Product.query.get(int(56))
od497 = OrderDetail(order=order, product=product, quantity=40)
db.session.add(od497)
order = Order.query.get(int(10436))
product = Product.query.get(int(64))
od498 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od498)
order = Order.query.get(int(10436))
product = Product.query.get(int(75))
od499 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od499)
order = Order.query.get(int(10437))
product = Product.query.get(int(53))
od500 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od500)
order = Order.query.get(int(10438))
product = Product.query.get(int(19))
od501 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od501)
order = Order.query.get(int(10438))
product = Product.query.get(int(34))
od502 = OrderDetail(order=order, product=product, quantity=20)
db.session.add(od502)
order = Order.query.get(int(10438))
product = Product.query.get(int(57))
od503 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od503)
order = Order.query.get(int(10439))
product = Product.query.get(int(12))
od504 = OrderDetail(order=order, product=product, quantity=15)
db.session.add(od504)
order = Order.query.get(int(10439))
product = Product.query.get(int(16))
od505 = OrderDetail(order=order, product=product, quantity=16)
db.session.add(od505)
order = Order.query.get(int(10439))
product = Product.query.get(int(64))
od506 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od506)
order = Order.query.get(int(10439))
product = Product.query.get(int(74))
od507 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od507)
order = Order.query.get(int(10440))
product = Product.query.get(int(2))
od508 = OrderDetail(order=order, product=product, quantity=45)
db.session.add(od508)
order = Order.query.get(int(10440))
product = Product.query.get(int(16))
od509 = OrderDetail(order=order, product=product, quantity=49)
db.session.add(od509)
order = Order.query.get(int(10440))
product = Product.query.get(int(29))
od510 = OrderDetail(order=order, product=product, quantity=24)
db.session.add(od510)
order = Order.query.get(int(10440))
product = Product.query.get(int(61))
od511 = OrderDetail(order=order, product=product, quantity=90)
db.session.add(od511)
order = Order.query.get(int(10441))
product = Product.query.get(int(27))
od512 = OrderDetail(order=order, product=product, quantity=50)
db.session.add(od512)
order = Order.query.get(int(10442))
product = Product.query.get(int(11))
od513 = OrderDetail(order=order, product=product, quantity=30)
db.session.add(od513)
order = Order.query.get(int(10442))
product = Product.query.get(int(54))
od514 = OrderDetail(order=order, product=product, quantity=80)
db.session.add(od514)
order = Order.query.get(int(10442))
product = Product.query.get(int(66))
od515 = OrderDetail(order=order, product=product, quantity=60)
db.session.add(od515)
order = Order.query.get(int(10443))
product = Product.query.get(int(11))
od516 = OrderDetail(order=order, product=product, quantity=6)
db.session.add(od516)
order = Order.query.get(int(10443))
product = Product.query.get(int(28))
od517 = OrderDetail(order=order, product=product, quantity=12)
db.session.add(od517)
db.session.commit()
print('Dummy OrderDetails added to db -- SUCCESS')
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,139
|
grbarker/Freyja
|
refs/heads/master
|
/db_populator_orders.py
|
from datetime import datetime
from app import db
from app.models import User, Order, Shipper
user = User.query.get(int(90))
shipper = Shipper.query.get(int(3))
o10248 = Order(id=10248, customer=user, orderdate=datetime(1996, 7, 4), shipper=shipper)
db.session.add(o10248)
user = User.query.get(int(81))
shipper = Shipper.query.get(int(1))
o10249 = Order(id=10249, customer=user, orderdate=datetime(1996, 7, 5), shipper=shipper)
db.session.add(o10249)
user = User.query.get(int(34))
shipper = Shipper.query.get(int(2))
o10250 = Order(id=10250, customer=user, orderdate=datetime(1996, 7, 8), shipper=shipper)
db.session.add(o10250)
user = User.query.get(int(84))
shipper = Shipper.query.get(int(1))
o10251 = Order(id=10251, customer=user, orderdate=datetime(1996, 7, 8), shipper=shipper)
db.session.add(o10251)
user = User.query.get(int(76))
shipper = Shipper.query.get(int(2))
o10252 = Order(id=10252, customer=user, orderdate=datetime(1996, 7, 9), shipper=shipper)
db.session.add(o10252)
user = User.query.get(int(34))
shipper = Shipper.query.get(int(2))
o10253 = Order(id=10253, customer=user, orderdate=datetime(1996, 7, 10), shipper=shipper)
db.session.add(o10253)
user = User.query.get(int(14))
shipper = Shipper.query.get(int(2))
o10254 = Order(id=10254, customer=user, orderdate=datetime(1996, 7, 11), shipper=shipper)
db.session.add(o10254)
user = User.query.get(int(68))
shipper = Shipper.query.get(int(3))
o10255 = Order(id=10255, customer=user, orderdate=datetime(1996, 7, 12), shipper=shipper)
db.session.add(o10255)
user = User.query.get(int(88))
shipper = Shipper.query.get(int(2))
o10256 = Order(id=10256, customer=user, orderdate=datetime(1996, 7, 15), shipper=shipper)
db.session.add(o10256)
user = User.query.get(int(35))
shipper = Shipper.query.get(int(3))
o10257 = Order(id=10257, customer=user, orderdate=datetime(1996, 7, 16), shipper=shipper)
db.session.add(o10257)
user = User.query.get(int(20))
shipper = Shipper.query.get(int(1))
o10258 = Order(id=10258, customer=user, orderdate=datetime(1996, 7, 17), shipper=shipper)
db.session.add(o10258)
user = User.query.get(int(13))
shipper = Shipper.query.get(int(3))
o10259 = Order(id=10259, customer=user, orderdate=datetime(1996, 7, 18), shipper=shipper)
db.session.add(o10259)
user = User.query.get(int(55))
shipper = Shipper.query.get(int(1))
o10260 = Order(id=10260, customer=user, orderdate=datetime(1996, 7, 19), shipper=shipper)
db.session.add(o10260)
user = User.query.get(int(61))
shipper = Shipper.query.get(int(2))
o10261 = Order(id=10261, customer=user, orderdate=datetime(1996, 7, 19), shipper=shipper)
db.session.add(o10261)
user = User.query.get(int(65))
shipper = Shipper.query.get(int(3))
o10262 = Order(id=10262, customer=user, orderdate=datetime(1996, 7, 22), shipper=shipper)
db.session.add(o10262)
user = User.query.get(int(20))
shipper = Shipper.query.get(int(3))
o10263 = Order(id=10263, customer=user, orderdate=datetime(1996, 7, 23), shipper=shipper)
db.session.add(o10263)
user = User.query.get(int(24))
shipper = Shipper.query.get(int(3))
o10264 = Order(id=10264, customer=user, orderdate=datetime(1996, 7, 24), shipper=shipper)
db.session.add(o10264)
user = User.query.get(int(7))
shipper = Shipper.query.get(int(1))
o10265 = Order(id=10265, customer=user, orderdate=datetime(1996, 7, 25), shipper=shipper)
db.session.add(o10265)
user = User.query.get(int(87))
shipper = Shipper.query.get(int(3))
o10266 = Order(id=10266, customer=user, orderdate=datetime(1996, 7, 26), shipper=shipper)
db.session.add(o10266)
user = User.query.get(int(25))
shipper = Shipper.query.get(int(1))
o10267 = Order(id=10267, customer=user, orderdate=datetime(1996, 7, 29), shipper=shipper)
db.session.add(o10267)
user = User.query.get(int(33))
shipper = Shipper.query.get(int(3))
o10268 = Order(id=10268, customer=user, orderdate=datetime(1996, 7, 30), shipper=shipper)
db.session.add(o10268)
user = User.query.get(int(89))
shipper = Shipper.query.get(int(1))
o10269 = Order(id=10269, customer=user, orderdate=datetime(1996, 7, 31), shipper=shipper)
db.session.add(o10269)
user = User.query.get(int(87))
shipper = Shipper.query.get(int(1))
o10270 = Order(id=10270, customer=user, orderdate=datetime(1996, 8, 1), shipper=shipper)
db.session.add(o10270)
user = User.query.get(int(75))
shipper = Shipper.query.get(int(2))
o10271 = Order(id=10271, customer=user, orderdate=datetime(1996, 8, 1), shipper=shipper)
db.session.add(o10271)
user = User.query.get(int(65))
shipper = Shipper.query.get(int(2))
o10272 = Order(id=10272, customer=user, orderdate=datetime(1996, 8, 2), shipper=shipper)
db.session.add(o10272)
user = User.query.get(int(63))
shipper = Shipper.query.get(int(3))
o10273 = Order(id=10273, customer=user, orderdate=datetime(1996, 8, 5), shipper=shipper)
db.session.add(o10273)
user = User.query.get(int(85))
shipper = Shipper.query.get(int(1))
o10274 = Order(id=10274, customer=user, orderdate=datetime(1996, 8, 6), shipper=shipper)
db.session.add(o10274)
user = User.query.get(int(49))
shipper = Shipper.query.get(int(1))
o10275 = Order(id=10275, customer=user, orderdate=datetime(1996, 8, 7), shipper=shipper)
db.session.add(o10275)
user = User.query.get(int(80))
shipper = Shipper.query.get(int(3))
o10276 = Order(id=10276, customer=user, orderdate=datetime(1996, 8, 8), shipper=shipper)
db.session.add(o10276)
user = User.query.get(int(52))
shipper = Shipper.query.get(int(3))
o10277 = Order(id=10277, customer=user, orderdate=datetime(1996, 8, 9), shipper=shipper)
db.session.add(o10277)
user = User.query.get(int(5))
shipper = Shipper.query.get(int(2))
o10278 = Order(id=10278, customer=user, orderdate=datetime(1996, 8, 12), shipper=shipper)
db.session.add(o10278)
user = User.query.get(int(44))
shipper = Shipper.query.get(int(2))
o10279 = Order(id=10279, customer=user, orderdate=datetime(1996, 8, 13), shipper=shipper)
db.session.add(o10279)
user = User.query.get(int(5))
shipper = Shipper.query.get(int(1))
o10280 = Order(id=10280, customer=user, orderdate=datetime(1996, 8, 14), shipper=shipper)
db.session.add(o10280)
user = User.query.get(int(69))
shipper = Shipper.query.get(int(1))
o10281 = Order(id=10281, customer=user, orderdate=datetime(1996, 8, 14), shipper=shipper)
db.session.add(o10281)
user = User.query.get(int(69))
shipper = Shipper.query.get(int(1))
o10282 = Order(id=10282, customer=user, orderdate=datetime(1996, 8, 15), shipper=shipper)
db.session.add(o10282)
user = User.query.get(int(46))
shipper = Shipper.query.get(int(3))
o10283 = Order(id=10283, customer=user, orderdate=datetime(1996, 8, 16), shipper=shipper)
db.session.add(o10283)
user = User.query.get(int(44))
shipper = Shipper.query.get(int(1))
o10284 = Order(id=10284, customer=user, orderdate=datetime(1996, 8, 19), shipper=shipper)
db.session.add(o10284)
user = User.query.get(int(63))
shipper = Shipper.query.get(int(2))
o10285 = Order(id=10285, customer=user, orderdate=datetime(1996, 8, 20), shipper=shipper)
db.session.add(o10285)
user = User.query.get(int(63))
shipper = Shipper.query.get(int(3))
o10286 = Order(id=10286, customer=user, orderdate=datetime(1996, 8, 21), shipper=shipper)
db.session.add(o10286)
user = User.query.get(int(67))
shipper = Shipper.query.get(int(3))
o10287 = Order(id=10287, customer=user, orderdate=datetime(1996, 8, 22), shipper=shipper)
db.session.add(o10287)
user = User.query.get(int(66))
shipper = Shipper.query.get(int(1))
o10288 = Order(id=10288, customer=user, orderdate=datetime(1996, 8, 23), shipper=shipper)
db.session.add(o10288)
user = User.query.get(int(11))
shipper = Shipper.query.get(int(3))
o10289 = Order(id=10289, customer=user, orderdate=datetime(1996, 8, 26), shipper=shipper)
db.session.add(o10289)
user = User.query.get(int(15))
shipper = Shipper.query.get(int(1))
o10290 = Order(id=10290, customer=user, orderdate=datetime(1996, 8, 27), shipper=shipper)
db.session.add(o10290)
user = User.query.get(int(61))
shipper = Shipper.query.get(int(2))
o10291 = Order(id=10291, customer=user, orderdate=datetime(1996, 8, 27), shipper=shipper)
db.session.add(o10291)
user = User.query.get(int(81))
shipper = Shipper.query.get(int(2))
o10292 = Order(id=10292, customer=user, orderdate=datetime(1996, 8, 28), shipper=shipper)
db.session.add(o10292)
user = User.query.get(int(80))
shipper = Shipper.query.get(int(3))
o10293 = Order(id=10293, customer=user, orderdate=datetime(1996, 8, 29), shipper=shipper)
db.session.add(o10293)
user = User.query.get(int(65))
shipper = Shipper.query.get(int(2))
o10294 = Order(id=10294, customer=user, orderdate=datetime(1996, 8, 30), shipper=shipper)
db.session.add(o10294)
user = User.query.get(int(85))
shipper = Shipper.query.get(int(2))
o10295 = Order(id=10295, customer=user, orderdate=datetime(1996, 9, 2), shipper=shipper)
db.session.add(o10295)
user = User.query.get(int(46))
shipper = Shipper.query.get(int(1))
o10296 = Order(id=10296, customer=user, orderdate=datetime(1996, 9, 3), shipper=shipper)
db.session.add(o10296)
user = User.query.get(int(7))
shipper = Shipper.query.get(int(2))
o10297 = Order(id=10297, customer=user, orderdate=datetime(1996, 9, 4), shipper=shipper)
db.session.add(o10297)
user = User.query.get(int(37))
shipper = Shipper.query.get(int(2))
o10298 = Order(id=10298, customer=user, orderdate=datetime(1996, 9, 5), shipper=shipper)
db.session.add(o10298)
user = User.query.get(int(67))
shipper = Shipper.query.get(int(2))
o10299 = Order(id=10299, customer=user, orderdate=datetime(1996, 9, 6), shipper=shipper)
db.session.add(o10299)
user = User.query.get(int(49))
shipper = Shipper.query.get(int(2))
o10300 = Order(id=10300, customer=user, orderdate=datetime(1996, 9, 9), shipper=shipper)
db.session.add(o10300)
user = User.query.get(int(86))
shipper = Shipper.query.get(int(2))
o10301 = Order(id=10301, customer=user, orderdate=datetime(1996, 9, 9), shipper=shipper)
db.session.add(o10301)
user = User.query.get(int(76))
shipper = Shipper.query.get(int(2))
o10302 = Order(id=10302, customer=user, orderdate=datetime(1996, 9, 10), shipper=shipper)
db.session.add(o10302)
user = User.query.get(int(30))
shipper = Shipper.query.get(int(2))
o10303 = Order(id=10303, customer=user, orderdate=datetime(1996, 9, 11), shipper=shipper)
db.session.add(o10303)
user = User.query.get(int(80))
shipper = Shipper.query.get(int(2))
o10304 = Order(id=10304, customer=user, orderdate=datetime(1996, 9, 12), shipper=shipper)
db.session.add(o10304)
user = User.query.get(int(55))
shipper = Shipper.query.get(int(3))
o10305 = Order(id=10305, customer=user, orderdate=datetime(1996, 9, 13), shipper=shipper)
db.session.add(o10305)
user = User.query.get(int(69))
shipper = Shipper.query.get(int(3))
o10306 = Order(id=10306, customer=user, orderdate=datetime(1996, 9, 16), shipper=shipper)
db.session.add(o10306)
user = User.query.get(int(48))
shipper = Shipper.query.get(int(2))
o10307 = Order(id=10307, customer=user, orderdate=datetime(1996, 9, 17), shipper=shipper)
db.session.add(o10307)
user = User.query.get(int(2))
shipper = Shipper.query.get(int(3))
o10308 = Order(id=10308, customer=user, orderdate=datetime(1996, 9, 18), shipper=shipper)
db.session.add(o10308)
user = User.query.get(int(37))
shipper = Shipper.query.get(int(1))
o10309 = Order(id=10309, customer=user, orderdate=datetime(1996, 9, 19), shipper=shipper)
db.session.add(o10309)
user = User.query.get(int(77))
shipper = Shipper.query.get(int(2))
o10310 = Order(id=10310, customer=user, orderdate=datetime(1996, 9, 20), shipper=shipper)
db.session.add(o10310)
user = User.query.get(int(18))
shipper = Shipper.query.get(int(3))
o10311 = Order(id=10311, customer=user, orderdate=datetime(1996, 9, 20), shipper=shipper)
db.session.add(o10311)
user = User.query.get(int(86))
shipper = Shipper.query.get(int(2))
o10312 = Order(id=10312, customer=user, orderdate=datetime(1996, 9, 23), shipper=shipper)
db.session.add(o10312)
user = User.query.get(int(63))
shipper = Shipper.query.get(int(2))
o10313 = Order(id=10313, customer=user, orderdate=datetime(1996, 9, 24), shipper=shipper)
db.session.add(o10313)
user = User.query.get(int(65))
shipper = Shipper.query.get(int(2))
o10314 = Order(id=10314, customer=user, orderdate=datetime(1996, 9, 25), shipper=shipper)
db.session.add(o10314)
user = User.query.get(int(38))
shipper = Shipper.query.get(int(2))
o10315 = Order(id=10315, customer=user, orderdate=datetime(1996, 9, 26), shipper=shipper)
db.session.add(o10315)
user = User.query.get(int(65))
shipper = Shipper.query.get(int(3))
o10316 = Order(id=10316, customer=user, orderdate=datetime(1996, 9, 27), shipper=shipper)
db.session.add(o10316)
user = User.query.get(int(48))
shipper = Shipper.query.get(int(1))
o10317 = Order(id=10317, customer=user, orderdate=datetime(1996, 9, 30), shipper=shipper)
db.session.add(o10317)
user = User.query.get(int(38))
shipper = Shipper.query.get(int(2))
o10318 = Order(id=10318, customer=user, orderdate=datetime(1996, 10, 1), shipper=shipper)
db.session.add(o10318)
user = User.query.get(int(80))
shipper = Shipper.query.get(int(3))
o10319 = Order(id=10319, customer=user, orderdate=datetime(1996, 10, 2), shipper=shipper)
db.session.add(o10319)
user = User.query.get(int(87))
shipper = Shipper.query.get(int(3))
o10320 = Order(id=10320, customer=user, orderdate=datetime(1996, 10, 3), shipper=shipper)
db.session.add(o10320)
user = User.query.get(int(38))
shipper = Shipper.query.get(int(2))
o10321 = Order(id=10321, customer=user, orderdate=datetime(1996, 10, 3), shipper=shipper)
db.session.add(o10321)
user = User.query.get(int(58))
shipper = Shipper.query.get(int(3))
o10322 = Order(id=10322, customer=user, orderdate=datetime(1996, 10, 4), shipper=shipper)
db.session.add(o10322)
user = User.query.get(int(39))
shipper = Shipper.query.get(int(1))
o10323 = Order(id=10323, customer=user, orderdate=datetime(1996, 10, 7), shipper=shipper)
db.session.add(o10323)
user = User.query.get(int(71))
shipper = Shipper.query.get(int(1))
o10324 = Order(id=10324, customer=user, orderdate=datetime(1996, 10, 8), shipper=shipper)
db.session.add(o10324)
user = User.query.get(int(39))
shipper = Shipper.query.get(int(3))
o10325 = Order(id=10325, customer=user, orderdate=datetime(1996, 10, 9), shipper=shipper)
db.session.add(o10325)
user = User.query.get(int(8))
shipper = Shipper.query.get(int(2))
o10326 = Order(id=10326, customer=user, orderdate=datetime(1996, 10, 10), shipper=shipper)
db.session.add(o10326)
user = User.query.get(int(24))
shipper = Shipper.query.get(int(1))
o10327 = Order(id=10327, customer=user, orderdate=datetime(1996, 10, 11), shipper=shipper)
db.session.add(o10327)
user = User.query.get(int(28))
shipper = Shipper.query.get(int(3))
o10328 = Order(id=10328, customer=user, orderdate=datetime(1996, 10, 14), shipper=shipper)
db.session.add(o10328)
user = User.query.get(int(75))
shipper = Shipper.query.get(int(2))
o10329 = Order(id=10329, customer=user, orderdate=datetime(1996, 10, 15), shipper=shipper)
db.session.add(o10329)
user = User.query.get(int(46))
shipper = Shipper.query.get(int(1))
o10330 = Order(id=10330, customer=user, orderdate=datetime(1996, 10, 16), shipper=shipper)
db.session.add(o10330)
user = User.query.get(int(9))
shipper = Shipper.query.get(int(1))
o10331 = Order(id=10331, customer=user, orderdate=datetime(1996, 10, 16), shipper=shipper)
db.session.add(o10331)
user = User.query.get(int(51))
shipper = Shipper.query.get(int(2))
o10332 = Order(id=10332, customer=user, orderdate=datetime(1996, 10, 17), shipper=shipper)
db.session.add(o10332)
user = User.query.get(int(87))
shipper = Shipper.query.get(int(3))
o10333 = Order(id=10333, customer=user, orderdate=datetime(1996, 10, 18), shipper=shipper)
db.session.add(o10333)
user = User.query.get(int(84))
shipper = Shipper.query.get(int(2))
o10334 = Order(id=10334, customer=user, orderdate=datetime(1996, 10, 21), shipper=shipper)
db.session.add(o10334)
user = User.query.get(int(37))
shipper = Shipper.query.get(int(2))
o10335 = Order(id=10335, customer=user, orderdate=datetime(1996, 10, 22), shipper=shipper)
db.session.add(o10335)
user = User.query.get(int(60))
shipper = Shipper.query.get(int(2))
o10336 = Order(id=10336, customer=user, orderdate=datetime(1996, 10, 23), shipper=shipper)
db.session.add(o10336)
user = User.query.get(int(25))
shipper = Shipper.query.get(int(3))
o10337 = Order(id=10337, customer=user, orderdate=datetime(1996, 10, 24), shipper=shipper)
db.session.add(o10337)
user = User.query.get(int(55))
shipper = Shipper.query.get(int(3))
o10338 = Order(id=10338, customer=user, orderdate=datetime(1996, 10, 25), shipper=shipper)
db.session.add(o10338)
user = User.query.get(int(51))
shipper = Shipper.query.get(int(2))
o10339 = Order(id=10339, customer=user, orderdate=datetime(1996, 10, 28), shipper=shipper)
db.session.add(o10339)
user = User.query.get(int(9))
shipper = Shipper.query.get(int(3))
o10340 = Order(id=10340, customer=user, orderdate=datetime(1996, 10, 29), shipper=shipper)
db.session.add(o10340)
user = User.query.get(int(73))
shipper = Shipper.query.get(int(3))
o10341 = Order(id=10341, customer=user, orderdate=datetime(1996, 10, 29), shipper=shipper)
db.session.add(o10341)
user = User.query.get(int(25))
shipper = Shipper.query.get(int(2))
o10342 = Order(id=10342, customer=user, orderdate=datetime(1996, 10, 30), shipper=shipper)
db.session.add(o10342)
user = User.query.get(int(44))
shipper = Shipper.query.get(int(1))
o10343 = Order(id=10343, customer=user, orderdate=datetime(1996, 10, 31), shipper=shipper)
db.session.add(o10343)
user = User.query.get(int(89))
shipper = Shipper.query.get(int(2))
o10344 = Order(id=10344, customer=user, orderdate=datetime(1996, 11, 1), shipper=shipper)
db.session.add(o10344)
user = User.query.get(int(63))
shipper = Shipper.query.get(int(2))
o10345 = Order(id=10345, customer=user, orderdate=datetime(1996, 11, 4), shipper=shipper)
db.session.add(o10345)
user = User.query.get(int(65))
shipper = Shipper.query.get(int(3))
o10346 = Order(id=10346, customer=user, orderdate=datetime(1996, 11, 5), shipper=shipper)
db.session.add(o10346)
user = User.query.get(int(21))
shipper = Shipper.query.get(int(3))
o10347 = Order(id=10347, customer=user, orderdate=datetime(1996, 11, 6), shipper=shipper)
db.session.add(o10347)
user = User.query.get(int(86))
shipper = Shipper.query.get(int(2))
o10348 = Order(id=10348, customer=user, orderdate=datetime(1996, 11, 7), shipper=shipper)
db.session.add(o10348)
user = User.query.get(int(75))
shipper = Shipper.query.get(int(1))
o10349 = Order(id=10349, customer=user, orderdate=datetime(1996, 11, 8), shipper=shipper)
db.session.add(o10349)
user = User.query.get(int(41))
shipper = Shipper.query.get(int(2))
o10350 = Order(id=10350, customer=user, orderdate=datetime(1996, 11, 11), shipper=shipper)
db.session.add(o10350)
user = User.query.get(int(20))
shipper = Shipper.query.get(int(1))
o10351 = Order(id=10351, customer=user, orderdate=datetime(1996, 11, 11), shipper=shipper)
db.session.add(o10351)
user = User.query.get(int(28))
shipper = Shipper.query.get(int(3))
o10352 = Order(id=10352, customer=user, orderdate=datetime(1996, 11, 12), shipper=shipper)
db.session.add(o10352)
user = User.query.get(int(59))
shipper = Shipper.query.get(int(3))
o10353 = Order(id=10353, customer=user, orderdate=datetime(1996, 11, 13), shipper=shipper)
db.session.add(o10353)
user = User.query.get(int(58))
shipper = Shipper.query.get(int(3))
o10354 = Order(id=10354, customer=user, orderdate=datetime(1996, 11, 14), shipper=shipper)
db.session.add(o10354)
user = User.query.get(int(4))
shipper = Shipper.query.get(int(1))
o10355 = Order(id=10355, customer=user, orderdate=datetime(1996, 11, 15), shipper=shipper)
db.session.add(o10355)
user = User.query.get(int(86))
shipper = Shipper.query.get(int(2))
o10356 = Order(id=10356, customer=user, orderdate=datetime(1996, 11, 18), shipper=shipper)
db.session.add(o10356)
user = User.query.get(int(46))
shipper = Shipper.query.get(int(3))
o10357 = Order(id=10357, customer=user, orderdate=datetime(1996, 11, 19), shipper=shipper)
db.session.add(o10357)
user = User.query.get(int(41))
shipper = Shipper.query.get(int(1))
o10358 = Order(id=10358, customer=user, orderdate=datetime(1996, 11, 20), shipper=shipper)
db.session.add(o10358)
user = User.query.get(int(72))
shipper = Shipper.query.get(int(3))
o10359 = Order(id=10359, customer=user, orderdate=datetime(1996, 11, 21), shipper=shipper)
db.session.add(o10359)
user = User.query.get(int(7))
shipper = Shipper.query.get(int(3))
o10360 = Order(id=10360, customer=user, orderdate=datetime(1996, 11, 22), shipper=shipper)
db.session.add(o10360)
user = User.query.get(int(63))
shipper = Shipper.query.get(int(2))
o10361 = Order(id=10361, customer=user, orderdate=datetime(1996, 11, 22), shipper=shipper)
db.session.add(o10361)
user = User.query.get(int(9))
shipper = Shipper.query.get(int(1))
o10362 = Order(id=10362, customer=user, orderdate=datetime(1996, 11, 25), shipper=shipper)
db.session.add(o10362)
user = User.query.get(int(17))
shipper = Shipper.query.get(int(3))
o10363 = Order(id=10363, customer=user, orderdate=datetime(1996, 11, 26), shipper=shipper)
db.session.add(o10363)
user = User.query.get(int(19))
shipper = Shipper.query.get(int(1))
o10364 = Order(id=10364, customer=user, orderdate=datetime(1996, 11, 26), shipper=shipper)
db.session.add(o10364)
user = User.query.get(int(3))
shipper = Shipper.query.get(int(2))
o10365 = Order(id=10365, customer=user, orderdate=datetime(1996, 11, 27), shipper=shipper)
db.session.add(o10365)
user = User.query.get(int(29))
shipper = Shipper.query.get(int(2))
o10366 = Order(id=10366, customer=user, orderdate=datetime(1996, 11, 28), shipper=shipper)
db.session.add(o10366)
user = User.query.get(int(83))
shipper = Shipper.query.get(int(3))
o10367 = Order(id=10367, customer=user, orderdate=datetime(1996, 11, 28), shipper=shipper)
db.session.add(o10367)
user = User.query.get(int(20))
shipper = Shipper.query.get(int(2))
o10368 = Order(id=10368, customer=user, orderdate=datetime(1996, 11, 29), shipper=shipper)
db.session.add(o10368)
user = User.query.get(int(75))
shipper = Shipper.query.get(int(2))
o10369 = Order(id=10369, customer=user, orderdate=datetime(1996, 12, 2), shipper=shipper)
db.session.add(o10369)
user = User.query.get(int(14))
shipper = Shipper.query.get(int(2))
o10370 = Order(id=10370, customer=user, orderdate=datetime(1996, 12, 3), shipper=shipper)
db.session.add(o10370)
user = User.query.get(int(41))
shipper = Shipper.query.get(int(1))
o10371 = Order(id=10371, customer=user, orderdate=datetime(1996, 12, 3), shipper=shipper)
db.session.add(o10371)
user = User.query.get(int(62))
shipper = Shipper.query.get(int(2))
o10372 = Order(id=10372, customer=user, orderdate=datetime(1996, 12, 4), shipper=shipper)
db.session.add(o10372)
user = User.query.get(int(37))
shipper = Shipper.query.get(int(3))
o10373 = Order(id=10373, customer=user, orderdate=datetime(1996, 12, 5), shipper=shipper)
db.session.add(o10373)
user = User.query.get(int(91))
shipper = Shipper.query.get(int(3))
o10374 = Order(id=10374, customer=user, orderdate=datetime(1996, 12, 5), shipper=shipper)
db.session.add(o10374)
user = User.query.get(int(36))
shipper = Shipper.query.get(int(2))
o10375 = Order(id=10375, customer=user, orderdate=datetime(1996, 12, 6), shipper=shipper)
db.session.add(o10375)
user = User.query.get(int(51))
shipper = Shipper.query.get(int(2))
o10376 = Order(id=10376, customer=user, orderdate=datetime(1996, 12, 9), shipper=shipper)
db.session.add(o10376)
user = User.query.get(int(72))
shipper = Shipper.query.get(int(3))
o10377 = Order(id=10377, customer=user, orderdate=datetime(1996, 12, 9), shipper=shipper)
db.session.add(o10377)
user = User.query.get(int(24))
shipper = Shipper.query.get(int(3))
o10378 = Order(id=10378, customer=user, orderdate=datetime(1996, 12, 10), shipper=shipper)
db.session.add(o10378)
user = User.query.get(int(61))
shipper = Shipper.query.get(int(1))
o10379 = Order(id=10379, customer=user, orderdate=datetime(1996, 12, 11), shipper=shipper)
db.session.add(o10379)
user = User.query.get(int(37))
shipper = Shipper.query.get(int(3))
o10380 = Order(id=10380, customer=user, orderdate=datetime(1996, 12, 12), shipper=shipper)
db.session.add(o10380)
user = User.query.get(int(46))
shipper = Shipper.query.get(int(3))
o10381 = Order(id=10381, customer=user, orderdate=datetime(1996, 12, 12), shipper=shipper)
db.session.add(o10381)
user = User.query.get(int(20))
shipper = Shipper.query.get(int(1))
o10382 = Order(id=10382, customer=user, orderdate=datetime(1996, 12, 13), shipper=shipper)
db.session.add(o10382)
user = User.query.get(int(4))
shipper = Shipper.query.get(int(3))
o10383 = Order(id=10383, customer=user, orderdate=datetime(1996, 12, 16), shipper=shipper)
db.session.add(o10383)
user = User.query.get(int(5))
shipper = Shipper.query.get(int(3))
o10384 = Order(id=10384, customer=user, orderdate=datetime(1996, 12, 16), shipper=shipper)
db.session.add(o10384)
user = User.query.get(int(75))
shipper = Shipper.query.get(int(2))
o10385 = Order(id=10385, customer=user, orderdate=datetime(1996, 12, 17), shipper=shipper)
db.session.add(o10385)
user = User.query.get(int(21))
shipper = Shipper.query.get(int(3))
o10386 = Order(id=10386, customer=user, orderdate=datetime(1996, 12, 18), shipper=shipper)
db.session.add(o10386)
user = User.query.get(int(70))
shipper = Shipper.query.get(int(2))
o10387 = Order(id=10387, customer=user, orderdate=datetime(1996, 12, 18), shipper=shipper)
db.session.add(o10387)
user = User.query.get(int(72))
shipper = Shipper.query.get(int(1))
o10388 = Order(id=10388, customer=user, orderdate=datetime(1996, 12, 19), shipper=shipper)
db.session.add(o10388)
user = User.query.get(int(10))
shipper = Shipper.query.get(int(2))
o10389 = Order(id=10389, customer=user, orderdate=datetime(1996, 12, 20), shipper=shipper)
db.session.add(o10389)
user = User.query.get(int(20))
shipper = Shipper.query.get(int(1))
o10390 = Order(id=10390, customer=user, orderdate=datetime(1996, 12, 23), shipper=shipper)
db.session.add(o10390)
user = User.query.get(int(17))
shipper = Shipper.query.get(int(3))
o10391 = Order(id=10391, customer=user, orderdate=datetime(1996, 12, 23), shipper=shipper)
db.session.add(o10391)
user = User.query.get(int(59))
shipper = Shipper.query.get(int(3))
o10392 = Order(id=10392, customer=user, orderdate=datetime(1996, 12, 24), shipper=shipper)
db.session.add(o10392)
user = User.query.get(int(71))
shipper = Shipper.query.get(int(3))
o10393 = Order(id=10393, customer=user, orderdate=datetime(1996, 12, 25), shipper=shipper)
db.session.add(o10393)
user = User.query.get(int(36))
shipper = Shipper.query.get(int(3))
o10394 = Order(id=10394, customer=user, orderdate=datetime(1996, 12, 25), shipper=shipper)
db.session.add(o10394)
user = User.query.get(int(35))
shipper = Shipper.query.get(int(1))
o10395 = Order(id=10395, customer=user, orderdate=datetime(1996, 12, 26), shipper=shipper)
db.session.add(o10395)
user = User.query.get(int(25))
shipper = Shipper.query.get(int(3))
o10396 = Order(id=10396, customer=user, orderdate=datetime(1996, 12, 27), shipper=shipper)
db.session.add(o10396)
user = User.query.get(int(60))
shipper = Shipper.query.get(int(1))
o10397 = Order(id=10397, customer=user, orderdate=datetime(1996, 12, 27), shipper=shipper)
db.session.add(o10397)
user = User.query.get(int(71))
shipper = Shipper.query.get(int(3))
o10398 = Order(id=10398, customer=user, orderdate=datetime(1996, 12, 30), shipper=shipper)
db.session.add(o10398)
user = User.query.get(int(83))
shipper = Shipper.query.get(int(3))
o10399 = Order(id=10399, customer=user, orderdate=datetime(1996, 12, 31), shipper=shipper)
db.session.add(o10399)
user = User.query.get(int(19))
shipper = Shipper.query.get(int(3))
o10400 = Order(id=10400, customer=user, orderdate=datetime(1997, 1, 1), shipper=shipper)
db.session.add(o10400)
user = User.query.get(int(65))
shipper = Shipper.query.get(int(1))
o10401 = Order(id=10401, customer=user, orderdate=datetime(1997, 1, 1), shipper=shipper)
db.session.add(o10401)
user = User.query.get(int(20))
shipper = Shipper.query.get(int(2))
o10402 = Order(id=10402, customer=user, orderdate=datetime(1997, 1, 2), shipper=shipper)
db.session.add(o10402)
user = User.query.get(int(20))
shipper = Shipper.query.get(int(3))
o10403 = Order(id=10403, customer=user, orderdate=datetime(1997, 1, 3), shipper=shipper)
db.session.add(o10403)
user = User.query.get(int(49))
shipper = Shipper.query.get(int(1))
o10404 = Order(id=10404, customer=user, orderdate=datetime(1997, 1, 3), shipper=shipper)
db.session.add(o10404)
user = User.query.get(int(47))
shipper = Shipper.query.get(int(1))
o10405 = Order(id=10405, customer=user, orderdate=datetime(1997, 1, 6), shipper=shipper)
db.session.add(o10405)
user = User.query.get(int(62))
shipper = Shipper.query.get(int(1))
o10406 = Order(id=10406, customer=user, orderdate=datetime(1997, 1, 7), shipper=shipper)
db.session.add(o10406)
user = User.query.get(int(56))
shipper = Shipper.query.get(int(2))
o10407 = Order(id=10407, customer=user, orderdate=datetime(1997, 1, 7), shipper=shipper)
db.session.add(o10407)
user = User.query.get(int(23))
shipper = Shipper.query.get(int(1))
o10408 = Order(id=10408, customer=user, orderdate=datetime(1997, 1, 8), shipper=shipper)
db.session.add(o10408)
user = User.query.get(int(54))
shipper = Shipper.query.get(int(1))
o10409 = Order(id=10409, customer=user, orderdate=datetime(1997, 1, 9), shipper=shipper)
db.session.add(o10409)
user = User.query.get(int(10))
shipper = Shipper.query.get(int(3))
o10410 = Order(id=10410, customer=user, orderdate=datetime(1997, 1, 10), shipper=shipper)
db.session.add(o10410)
user = User.query.get(int(10))
shipper = Shipper.query.get(int(3))
o10411 = Order(id=10411, customer=user, orderdate=datetime(1997, 1, 10), shipper=shipper)
db.session.add(o10411)
user = User.query.get(int(87))
shipper = Shipper.query.get(int(2))
o10412 = Order(id=10412, customer=user, orderdate=datetime(1997, 1, 13), shipper=shipper)
db.session.add(o10412)
user = User.query.get(int(41))
shipper = Shipper.query.get(int(2))
o10413 = Order(id=10413, customer=user, orderdate=datetime(1997, 1, 14), shipper=shipper)
db.session.add(o10413)
user = User.query.get(int(21))
shipper = Shipper.query.get(int(3))
o10414 = Order(id=10414, customer=user, orderdate=datetime(1997, 1, 14), shipper=shipper)
db.session.add(o10414)
user = User.query.get(int(36))
shipper = Shipper.query.get(int(1))
o10415 = Order(id=10415, customer=user, orderdate=datetime(1997, 1, 15), shipper=shipper)
db.session.add(o10415)
user = User.query.get(int(87))
shipper = Shipper.query.get(int(3))
o10416 = Order(id=10416, customer=user, orderdate=datetime(1997, 1, 16), shipper=shipper)
db.session.add(o10416)
user = User.query.get(int(73))
shipper = Shipper.query.get(int(3))
o10417 = Order(id=10417, customer=user, orderdate=datetime(1997, 1, 16), shipper=shipper)
db.session.add(o10417)
user = User.query.get(int(63))
shipper = Shipper.query.get(int(1))
o10418 = Order(id=10418, customer=user, orderdate=datetime(1997, 1, 17), shipper=shipper)
db.session.add(o10418)
user = User.query.get(int(68))
shipper = Shipper.query.get(int(2))
o10419 = Order(id=10419, customer=user, orderdate=datetime(1997, 1, 20), shipper=shipper)
db.session.add(o10419)
user = User.query.get(int(88))
shipper = Shipper.query.get(int(1))
o10420 = Order(id=10420, customer=user, orderdate=datetime(1997, 1, 21), shipper=shipper)
db.session.add(o10420)
user = User.query.get(int(61))
shipper = Shipper.query.get(int(1))
o10421 = Order(id=10421, customer=user, orderdate=datetime(1997, 1, 21), shipper=shipper)
db.session.add(o10421)
user = User.query.get(int(27))
shipper = Shipper.query.get(int(1))
o10422 = Order(id=10422, customer=user, orderdate=datetime(1997, 1, 22), shipper=shipper)
db.session.add(o10422)
user = User.query.get(int(31))
shipper = Shipper.query.get(int(3))
o10423 = Order(id=10423, customer=user, orderdate=datetime(1997, 1, 23), shipper=shipper)
db.session.add(o10423)
user = User.query.get(int(51))
shipper = Shipper.query.get(int(2))
o10424 = Order(id=10424, customer=user, orderdate=datetime(1997, 1, 23), shipper=shipper)
db.session.add(o10424)
user = User.query.get(int(41))
shipper = Shipper.query.get(int(2))
o10425 = Order(id=10425, customer=user, orderdate=datetime(1997, 1, 24), shipper=shipper)
db.session.add(o10425)
user = User.query.get(int(29))
shipper = Shipper.query.get(int(1))
o10426 = Order(id=10426, customer=user, orderdate=datetime(1997, 1, 27), shipper=shipper)
db.session.add(o10426)
user = User.query.get(int(59))
shipper = Shipper.query.get(int(2))
o10427 = Order(id=10427, customer=user, orderdate=datetime(1997, 1, 27), shipper=shipper)
db.session.add(o10427)
user = User.query.get(int(66))
shipper = Shipper.query.get(int(1))
o10428 = Order(id=10428, customer=user, orderdate=datetime(1997, 1, 28), shipper=shipper)
db.session.add(o10428)
user = User.query.get(int(37))
shipper = Shipper.query.get(int(2))
o10429 = Order(id=10429, customer=user, orderdate=datetime(1997, 1, 29), shipper=shipper)
db.session.add(o10429)
user = User.query.get(int(20))
shipper = Shipper.query.get(int(1))
o10430 = Order(id=10430, customer=user, orderdate=datetime(1997, 1, 30), shipper=shipper)
db.session.add(o10430)
user = User.query.get(int(10))
shipper = Shipper.query.get(int(2))
o10431 = Order(id=10431, customer=user, orderdate=datetime(1997, 1, 30), shipper=shipper)
db.session.add(o10431)
user = User.query.get(int(75))
shipper = Shipper.query.get(int(2))
o10432 = Order(id=10432, customer=user, orderdate=datetime(1997, 1, 31), shipper=shipper)
db.session.add(o10432)
user = User.query.get(int(60))
shipper = Shipper.query.get(int(3))
o10433 = Order(id=10433, customer=user, orderdate=datetime(1997, 2, 3), shipper=shipper)
db.session.add(o10433)
user = User.query.get(int(24))
shipper = Shipper.query.get(int(2))
o10434 = Order(id=10434, customer=user, orderdate=datetime(1997, 2, 3), shipper=shipper)
db.session.add(o10434)
user = User.query.get(int(16))
shipper = Shipper.query.get(int(2))
o10435 = Order(id=10435, customer=user, orderdate=datetime(1997, 2, 4), shipper=shipper)
db.session.add(o10435)
user = User.query.get(int(7))
shipper = Shipper.query.get(int(2))
o10436 = Order(id=10436, customer=user, orderdate=datetime(1997, 2, 5), shipper=shipper)
db.session.add(o10436)
user = User.query.get(int(87))
shipper = Shipper.query.get(int(1))
o10437 = Order(id=10437, customer=user, orderdate=datetime(1997, 2, 5), shipper=shipper)
db.session.add(o10437)
user = User.query.get(int(79))
shipper = Shipper.query.get(int(2))
o10438 = Order(id=10438, customer=user, orderdate=datetime(1997, 2, 6), shipper=shipper)
db.session.add(o10438)
user = User.query.get(int(51))
shipper = Shipper.query.get(int(3))
o10439 = Order(id=10439, customer=user, orderdate=datetime(1997, 2, 7), shipper=shipper)
db.session.add(o10439)
user = User.query.get(int(71))
shipper = Shipper.query.get(int(2))
o10440 = Order(id=10440, customer=user, orderdate=datetime(1997, 2, 10), shipper=shipper)
db.session.add(o10440)
user = User.query.get(int(55))
shipper = Shipper.query.get(int(2))
o10441 = Order(id=10441, customer=user, orderdate=datetime(1997, 2, 10), shipper=shipper)
db.session.add(o10441)
user = User.query.get(int(20))
shipper = Shipper.query.get(int(2))
o10442 = Order(id=10442, customer=user, orderdate=datetime(1997, 2, 11), shipper=shipper)
db.session.add(o10442)
user = User.query.get(int(66))
shipper = Shipper.query.get(int(1))
o10443 = Order(id=10443, customer=user, orderdate=datetime(1997, 2, 12), shipper=shipper)
db.session.add(o10443)
db.session.commit()
print('Dummy Orders successfully added to database!')
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,140
|
grbarker/Freyja
|
refs/heads/master
|
/populator_maker_products.py
|
products_array = [
(1,"Chais",1,1,"10 boxes x 20 bags",18),
(2,"Chang",1,1,"24 - 12 oz bottles",19),
(3,"Aniseed Syrup",1,2,"12 - 55S0 ml bottles",10),
(4,"Chef Anton\'s Cajun Seasoning",2,2,"48 - 6 oz jars",22),
(5,"Chef Anton\'s Gumbo Mix",2,2,"36 boxes",21.35),
(6,"Grandma\'s Boysenberry Spread",3,2,"12 - 8 oz jars",25),
(7,"Uncle Bob\'s Organic Dried Pears",3,7,"12 - 1 lb pkgs.",30),
(8,"Northwoods Cranberry Sauce",3,2,"12 - 12 oz jars",40),
(9,"Mishi Kobe Niku",4,6,"18 - 500 g pkgs.",97),
(10,"Ikura",4,8,"12 - 200 ml jars",31),
(11,"Queso Cabrales",5,4,"1 kg pkg.",21),
(12,"Queso Manchego La Pastora",5,4,"10 - 500 g pkgs.",38),
(13,"Konbu",6,8,"2 kg box",6),
(14,"Tofu",6,7,"40 - 100 g pkgs.",23.25),
(15,"Genen Shouyu",6,2,"24 - 250 ml bottles",15.5),
(16,"Pavlova",7,3,"32 - 500 g boxes",17.45),
(17,"Alice Mutton",7,6,"20 - 1 kg tins",39),
(18,"Carnarvon Tigers",7,8,"16 kg pkg.",62.5),
(19,"Teatime Chocolate Biscuits",8,3,"10 boxes x 12 pieces",9.2),
(20,"Sir Rodney\'s Marmalade",8,3,"30 gift boxes",81),
(21,"Sir Rodney\'s Scones",8,3,"24 pkgs. x 4 pieces",10),
(22,"Gustaf\'s Knäckebröd",9,5,"24 - 500 g pkgs.",21),
(23,"Tunnbröd",9,5,"12 - 250 g pkgs.",9),
(24,"Guaraná Fantástica",10,1,"12 - 355 ml cans",4.5),
(25,"NuNuCa Nuß-Nougat-Creme",11,3,"20 - 450 g glasses",14),
(26,"Gumbär Gummibärchen",11,3,"100 - 250 g bags",31.23),
(27,"Schoggi Schokolade",11,3,"100 - 100 g pieces",43.9),
(28,"Rössle Sauerkraut",12,7,"25 - 825 g cans",45.6),
(29,"Thüringer Rostbratwurst",12,6,"50 bags x 30 sausgs.",123.79),
(30,"Nord-Ost Matjeshering",13,8,"10 - 200 g glasses",25.89),
(31,"Gorgonzola Telino",14,4,"12 - 100 g pkgs",12.5),
(32,"Mascarpone Fabioli",14,4,"24 - 200 g pkgs.",32),
(33,"Geitost",15,4,"500 g",2.5),
(34,"Sasquatch Ale",16,1,"24 - 12 oz bottles",14),
(35,"Steeleye Stout",16,1,"24 - 12 oz bottles",18),
(36,"Inlagd Sill",17,8,"24 - 250 g jars",19),
(37,"Gravad lax",17,8,"12 - 500 g pkgs.",26),
(38,"Côte de Blaye",18,1,"12 - 75 cl bottles",263.5),
(39,"Chartreuse verte",18,1,"750 cc per bottle",18),
(40,"Boston Crab Meat",19,8,"24 - 4 oz tins",18.4),
(41,"Jack\'s New England Clam Chowder",19,8,"12 - 12 oz cans",9.65),
(42,"Singaporean Hokkien Fried Mee",20,5,"32 - 1 kg pkgs.",14),
(43,"Ipoh Coffee",20,1,"16 - 500 g tins",46),
(44,"Gula Malacca",20,2,"20 - 2 kg bags",19.45),
(45,"Røgede sild",21,8,"1k pkg.",9.5),
(46,"Spegesild",21,8,"4 - 450 g glasses",12),
(47,"Zaanse koeken",22,3,"10 - 4 oz boxes",9.5),
(48,"Chocolade",22,3,"10 pkgs.",12.75),
(49,"Maxilaku",23,3,"24 - 50 g pkgs.",20),
(50,"Valkoinen suklaa",23,3,"12 - 100 g bars",16.25),
(51,"Manjimup Dried Apples",24,7,"50 - 300 g pkgs.",53),
(52,"Filo Mix",24,5,"16 - 2 kg boxes",7),
(53,"Perth Pasties",24,6,"48 pieces",32.8),
(54,"Tourtière",25,6,"16 pies",7.45),
(55,"Pâté chinois",25,6,"24 boxes x 2 pies",24),
(56,"Gnocchi di nonna Alice",26,5,"24 - 250 g pkgs.",38),
(57,"Ravioli Angelo",26,5,"24 - 250 g pkgs.",19.5),
(58,"Escargots de Bourgogne",27,8,"24 pieces",13.25),
(59,"Raclette Courdavault",28,4,"5 kg pkg.",55),
(60,"Camembert Pierrot",28,4,"15 - 300 g rounds",34),
(61,"Sirop d\'érable",29,2,"24 - 500 ml bottles",28.5),
(62,"Tarte au sucre",29,3,"48 pies",49.3),
(63,"Vegie-spread",7,2,"15 - 625 g jars",43.9),
(64,"Wimmers gute Semmelknödel",12,5,"20 bags x 4 pieces",33.25),
(65,"Louisiana Fiery Hot Pepper Sauce",2,2,"32 - 8 oz bottles",21.05),
(66,"Louisiana Hot Spiced Okra",2,2,"24 - 8 oz jars",17),
(67,"Laughing Lumberjack Lager",16,1,"24 - 12 oz bottles",14),
(68,"Scottish Longbreads",8,3,"10 boxes x 8 pieces",12.5),
(69,"Gudbrandsdalsost",15,4,"10 kg pkg.",36),
(70,"Outback Lager",7,1,"24 - 355 ml bottles",15),
(71,"Fløtemysost",15,4,"10 - 500 g pkgs.",21.5),
(72,"Mozzarella di Giovanni",14,4,"24 - 200 g pkgs.",34.8),
(73,"Röd Kaviar",17,8,"24 - 150 g jars",15),
(74,"Longlife Tofu",4,7,"5 kg pkg.",10),
(75,"Rhönbräu Klosterbier",12,1,"24 - 0.5 l bottles",7.75),
(76,"Lakkalikööri",23,1,"500 ml ",18),
(77,"Original Frankfurter grüne Soße",12,2,"12 boxes",13)
]
for p in products_array:
print('''supplier = Supplier.query.get(int({}))
category = Category.query.get(int({}))
p{} = Product(productname="{}", supplier=supplier, category=category, unit="{}", price={})
db.session.add(p{})\n'''.format(p[2], p[3], p[0], p[1], p[4], p[5], p[0]))
print('db.session.commit()\n\n\n\n\n\n')
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,141
|
grbarker/Freyja
|
refs/heads/master
|
/freyja.py
|
from app import create_app, db
from app.models import User, Post, Category, Employee, Order, OrderDetail, Product, Shipper, Supplier, Review
app = create_app()
@app.shell_context_processor
def make_shell_context():
return {
'db': db,
'User': User,
'Post': Post,
'Category': Category,
'Employee': Employee,
'OrderDetail': OrderDetail,
'Order': Order,
'Product': Product,
'Shipper': Shipper,
'Supplier': Supplier,
'Review': Review
}
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,142
|
grbarker/Freyja
|
refs/heads/master
|
/migrations/versions/9f614adf3ffa_add_back_tables_after_altering_them.py
|
"""Add back tables after altering them.
Revision ID: 9f614adf3ffa
Revises: 7180ba27f86a
Create Date: 2018-11-28 00:29:05.680512
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9f614adf3ffa'
down_revision = '7180ba27f86a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('category',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('categoryname', sa.String(length=255), nullable=True),
sa.Column('description', sa.Text(length=500), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_category_categoryname'), 'category', ['categoryname'], unique=True)
op.create_table('customer',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('customername', sa.String(length=255), nullable=True),
sa.Column('contactname', sa.String(length=255), nullable=True),
sa.Column('address', sa.String(length=255), nullable=True),
sa.Column('city', sa.String(length=255), nullable=True),
sa.Column('postalcode', sa.String(length=255), nullable=True),
sa.Column('country', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_customer_address'), 'customer', ['address'], unique=False)
op.create_index(op.f('ix_customer_city'), 'customer', ['city'], unique=False)
op.create_index(op.f('ix_customer_contactname'), 'customer', ['contactname'], unique=False)
op.create_index(op.f('ix_customer_country'), 'customer', ['country'], unique=False)
op.create_index(op.f('ix_customer_customername'), 'customer', ['customername'], unique=False)
op.create_index(op.f('ix_customer_postalcode'), 'customer', ['postalcode'], unique=False)
op.create_table('employee',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('lastname', sa.String(length=255), nullable=True),
sa.Column('firstname', sa.String(length=255), nullable=True),
sa.Column('birthdate', sa.DateTime(), nullable=True),
sa.Column('notes', sa.Text(length=1000), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_employee_firstname'), 'employee', ['firstname'], unique=False)
op.create_index(op.f('ix_employee_lastname'), 'employee', ['lastname'], unique=False)
op.create_table('shipper',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('shippername', sa.String(length=255), nullable=True),
sa.Column('phone', sa.String(length=25), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_shipper_phone'), 'shipper', ['phone'], unique=False)
op.create_index(op.f('ix_shipper_shippername'), 'shipper', ['shippername'], unique=False)
op.create_table('supplier',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('suppliername', sa.String(length=255), nullable=True),
sa.Column('contactname', sa.String(length=255), nullable=True),
sa.Column('address', sa.String(length=255), nullable=True),
sa.Column('city', sa.String(length=255), nullable=True),
sa.Column('postalcode', sa.String(length=255), nullable=True),
sa.Column('country', sa.String(length=255), nullable=True),
sa.Column('phone', sa.String(length=25), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_supplier_address'), 'supplier', ['address'], unique=False)
op.create_index(op.f('ix_supplier_city'), 'supplier', ['city'], unique=False)
op.create_index(op.f('ix_supplier_contactname'), 'supplier', ['contactname'], unique=False)
op.create_index(op.f('ix_supplier_country'), 'supplier', ['country'], unique=False)
op.create_index(op.f('ix_supplier_phone'), 'supplier', ['phone'], unique=False)
op.create_index(op.f('ix_supplier_postalcode'), 'supplier', ['postalcode'], unique=False)
op.create_index(op.f('ix_supplier_suppliername'), 'supplier', ['suppliername'], unique=False)
op.create_table('order',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('customer_id', sa.Integer(), nullable=True),
sa.Column('employee_id', sa.Integer(), nullable=True),
sa.Column('orderdate', sa.Date(), nullable=True),
sa.Column('shipper_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['customer_id'], ['customer.id'], ),
sa.ForeignKeyConstraint(['employee_id'], ['employee.id'], ),
sa.ForeignKeyConstraint(['shipper_id'], ['shipper.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('product',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('productname', sa.String(length=255), nullable=True),
sa.Column('supplier_id', sa.Integer(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('unit', sa.Integer(), nullable=True),
sa.Column('price', sa.Numeric(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['category.id'], ),
sa.ForeignKeyConstraint(['supplier_id'], ['supplier.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_product_productname'), 'product', ['productname'], unique=False)
op.create_table('order_detail',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_id', sa.Integer(), nullable=True),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.Column('quantity', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['order_id'], ['order.id'], ),
sa.ForeignKeyConstraint(['product_id'], ['product.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('order_detail')
op.drop_index(op.f('ix_product_productname'), table_name='product')
op.drop_table('product')
op.drop_table('order')
op.drop_index(op.f('ix_supplier_suppliername'), table_name='supplier')
op.drop_index(op.f('ix_supplier_postalcode'), table_name='supplier')
op.drop_index(op.f('ix_supplier_phone'), table_name='supplier')
op.drop_index(op.f('ix_supplier_country'), table_name='supplier')
op.drop_index(op.f('ix_supplier_contactname'), table_name='supplier')
op.drop_index(op.f('ix_supplier_city'), table_name='supplier')
op.drop_index(op.f('ix_supplier_address'), table_name='supplier')
op.drop_table('supplier')
op.drop_index(op.f('ix_shipper_shippername'), table_name='shipper')
op.drop_index(op.f('ix_shipper_phone'), table_name='shipper')
op.drop_table('shipper')
op.drop_index(op.f('ix_employee_lastname'), table_name='employee')
op.drop_index(op.f('ix_employee_firstname'), table_name='employee')
op.drop_table('employee')
op.drop_index(op.f('ix_customer_postalcode'), table_name='customer')
op.drop_index(op.f('ix_customer_customername'), table_name='customer')
op.drop_index(op.f('ix_customer_country'), table_name='customer')
op.drop_index(op.f('ix_customer_contactname'), table_name='customer')
op.drop_index(op.f('ix_customer_city'), table_name='customer')
op.drop_index(op.f('ix_customer_address'), table_name='customer')
op.drop_table('customer')
op.drop_index(op.f('ix_category_categoryname'), table_name='category')
op.drop_table('category')
# ### end Alembic commands ###
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,143
|
grbarker/Freyja
|
refs/heads/master
|
/pop_maker_us_cats_emps_ships_sups.py
|
from datetime import datetime
print('''from datetime import datetime
from app import db
from app.models import User, Category, Supplier, Shipper, Employee\n\n\n''')
customers_array = [
("Alfreds Futterkiste","Maria Anders","Obere Str. 57","Berlin","12209","Germany"),
("Ana Trujillo Emparedados y helados","Ana Trujillo","Avda. de la Constitución 2222","México D.F.","05021","Mexico"),
("Antonio Moreno Taquería","Antonio Moreno","Mataderos 2312","México D.F.","05023","Mexico"),
("Around the Horn","Thomas Hardy","120 Hanover Sq.","London","WA1 1DP","UK"),
("Berglunds snabbköp","Christina Berglund","Berguvsvägen 8","Luleå","S-958 22","Sweden"),
("Blauer See Delikatessen","Hanna Moos","Forsterstr. 57","Mannheim","68306","Germany"),
("Blondel père et fils","Frédérique Citeaux","24, place Kléber","Strasbourg","67000","France"),
("Bólido Comidas preparadas","Martín Sommer","C/ Araquil, 67","Madrid","28023","Spain"),
("Bon app'","Laurence Lebihans","12, rue des Bouchers","Marseille","13008","France"),
("Bottom-Dollar Marketse","Elizabeth Lincoln","23 Tsawassen Blvd.","Tsawassen","T2F 8M4","Canada"),
("B's Beverages","Victoria Ashworth","Fauntleroy Circus","London","EC2 5NT","UK"),
("Cactus Comidas para llevar","Patricio Simpson","Cerrito 333","Buenos Aires","1010","Argentina"),
("Centro comercial Moctezuma","Francisco Chang","Sierras de Granada 9993","México D.F.","05022","Mexico"),
("Chop-suey Chinese","Yang Wang","Hauptstr. 29","Bern","3012","Switzerland"),
("Comércio Mineiro","Pedro Afonso","Av. dos Lusíadas, 23","São Paulo","05432-043","Brazil"),
("Consolidated Holdings","Elizabeth Brown","Berkeley Gardens 12 Brewery ","London","WX1 6LT","UK"),
("Drachenblut Delikatessend","Sven Ottlieb","Walserweg 21","Aachen","52066","Germany"),
("Du monde entier","Janine Labrune","67, rue des Cinquante Otages","Nantes","44000","France"),
("Eastern Connection","Ann Devon","35 King George","London","WX3 6FW","UK"),
("Ernst Handel","Roland Mendel","Kirchgasse 6","Graz","8010","Austria"),
("Familia Arquibaldo","Aria Cruz","Rua Orós, 92","São Paulo","05442-030","Brazil"),
("FISSA Fabrica Inter. Salchichas S.A.","Diego Roel","C/ Moralzarzal, 86","Madrid","28034","Spain"),
("Folies gourmandes","Martine Rancé","184, chaussée de Tournai","Lille","59000","France"),
("Folk och fä HB","Maria Larsson","Åkergatan 24","Bräcke","S-844 67","Sweden"),
("Frankenversand","Peter Franken","Berliner Platz 43","München","80805","Germany"),
("France restauration","Carine Schmitt","54, rue Royale","Nantes","44000","France"),
("Franchi S.p.A.","Paolo Accorti","Via Monte Bianco 34","Torino","10100","Italy"),
("Furia Bacalhau e Frutos do Mar","Lino Rodriguez ","Jardim das rosas n. 32","Lisboa","1675","Portugal"),
("Galería del gastrónomo","Eduardo Saavedra","Rambla de Cataluña, 23","Barcelona","08022","Spain"),
("Godos Cocina Típica","José Pedro Freyre","C/ Romero, 33","Sevilla","41101","Spain"),
("Gourmet Lanchonetes","André Fonseca","Av. Brasil, 442","Campinas","04876-786","Brazil"),
("Great Lakes Food Market","Howard Snyder","2732 Baker Blvd.","Eugene","97403","USA"),
("GROSELLA-Restaurante","Manuel Pereira","5ª Ave. Los Palos Grandes","Caracas","1081","Venezuela"),
("Hanari Carnes","Mario Pontes","Rua do Paço, 67","Rio de Janeiro","05454-876","Brazil"),
("HILARIÓN-Abastos","Carlos Hernández","Carrera 22 con Ave. Carlos Soublette #8-35","San Cristóbal","5022","Venezuela"),
("Hungry Coyote Import Store","Yoshi Latimer","City Center Plaza 516 Main St.","Elgin","97827","USA"),
("Hungry Owl All-Night Grocers","Patricia McKenna","8 Johnstown Road","Cork","","Ireland"),
("Island Trading","Helen Bennett","Garden House Crowther Way","Cowes","PO31 7PJ","UK"),
("Königlich Essen","Philip Cramer","Maubelstr. 90","Brandenburg","14776","Germany"),
("La corne d'abondance","Daniel Tonini","67, avenue de l'Europe","Versailles","78000","France"),
("La maison d'Asie","Annette Roulet","1 rue Alsace-Lorraine","Toulouse","31000","France"),
("Laughing Bacchus Wine Cellars","Yoshi Tannamuri","1900 Oak St.","Vancouver","V3F 2K1","Canada"),
("Lazy K Kountry Store","John Steel","12 Orchestra Terrace","Walla Walla","99362","USA"),
("Lehmanns Marktstand","Renate Messner","Magazinweg 7","Frankfurt a.M. ","60528","Germany"),
("Let's Stop N Shop","Jaime Yorres","87 Polk St. Suite 5","San Francisco","94117","USA"),
("LILA-Supermercado","Carlos González","Carrera 52 con Ave. Bolívar #65-98 Llano Largo","Barquisimeto","3508","Venezuela"),
("LINO-Delicateses","Felipe Izquierdo","Ave. 5 de Mayo Porlamar","I. de Margarita","4980","Venezuela"),
("Lonesome Pine Restaurant","Fran Wilson","89 Chiaroscuro Rd.","Portland","97219","USA"),
("Magazzini Alimentari Riuniti","Giovanni Rovelli","Via Ludovico il Moro 22","Bergamo","24100","Italy"),
("Maison Dewey","Catherine Dewey","Rue Joseph-Bens 532","Bruxelles","B-1180","Belgium"),
("Mère Paillarde","Jean Fresnière","43 rue St. Laurent","Montréal","H1J 1C3","Canada"),
("Morgenstern Gesundkost","Alexander Feuer","Heerstr. 22","Leipzig","04179","Germany"),
("North/South","Simon Crowther","South House 300 Queensbridge","London","SW7 1RZ","UK"),
("Océano Atlántico Ltda.","Yvonne Moncada","Ing. Gustavo Moncada 8585 Piso 20-A","Buenos Aires","1010","Argentina"),
("Old World Delicatessen","Rene Phillips","2743 Bering St.","Anchorage","99508","USA"),
("Ottilies Käseladen","Henriette Pfalzheim","Mehrheimerstr. 369","Köln","50739","Germany"),
("Paris spécialités","Marie Bertrand","265, boulevard Charonne","Paris","75012","France"),
("Pericles Comidas clásicas","Guillermo Fernández","Calle Dr. Jorge Cash 321","México D.F.","05033","Mexico"),
("Piccolo und mehr","Georg Pipps","Geislweg 14","Salzburg","5020","Austria"),
("Princesa Isabel Vinhoss","Isabel de Castro","Estrada da saúde n. 58","Lisboa","1756","Portugal"),
("Que Delícia","Bernardo Batista","Rua da Panificadora, 12","Rio de Janeiro","02389-673","Brazil"),
("Queen Cozinha","Lúcia Carvalho","Alameda dos Canàrios, 891","São Paulo","05487-020","Brazil"),
("QUICK-Stop","Horst Kloss","Taucherstraße 10","Cunewalde","01307","Germany"),
("Rancho grande","Sergio Gutiérrez","Av. del Libertador 900","Buenos Aires","1010","Argentina"),
("Rattlesnake Canyon Grocery","Paula Wilson","2817 Milton Dr.","Albuquerque","87110","USA"),
("Reggiani Caseifici","Maurizio Moroni","Strada Provinciale 124","Reggio Emilia","42100","Italy"),
("Ricardo Adocicados","Janete Limeira","Av. Copacabana, 267","Rio de Janeiro","02389-890","Brazil"),
("Richter Supermarkt","Michael Holz","Grenzacherweg 237","Genève","1203","Switzerland"),
("Romero y tomillo","Alejandra Camino","Gran Vía, 1","Madrid","28001","Spain"),
("Santé Gourmet","Jonas Bergulfsen","Erling Skakkes gate 78","Stavern","4110","Norway"),
("Save-a-lot Markets","Jose Pavarotti","187 Suffolk Ln.","Boise","83720","USA"),
("Seven Seas Imports","Hari Kumar","90 Wadhurst Rd.","London","OX15 4NB","UK"),
("Simons bistro","Jytte Petersen","Vinbæltet 34","København","1734","Denmark"),
("Spécialités du monde","Dominique Perrier","25, rue Lauriston","Paris","75016","France"),
("Split Rail Beer & Ale","Art Braunschweiger","P.O. Box 555","Lander","82520","USA"),
("Suprêmes délices","Pascale Cartrain","Boulevard Tirou, 255","Charleroi","B-6000","Belgium"),
("The Big Cheese","Liz Nixon","89 Jefferson Way Suite 2","Portland","97201","USA"),
("The Cracker Box","Liu Wong","55 Grizzly Peak Rd.","Butte","59801","USA"),
("Toms Spezialitäten","Karin Josephs","Luisenstr. 48","Münster","44087","Germany"),
("Tortuga Restaurante","Miguel Angel Paolino","Avda. Azteca 123","México D.F.","05033","Mexico"),
("Tradição Hipermercados","Anabela Domingues","Av. Inês de Castro, 414","São Paulo","05634-030","Brazil"),
("Trail's Head Gourmet Provisioners","Helvetius Nagy","722 DaVinci Blvd.","Kirkland","98034","USA"),
("Vaffeljernet","Palle Ibsen","Smagsløget 45","Århus","8200","Denmark"),
("Victuailles en stock","Mary Saveley","2, rue du Commerce","Lyon","69004","France"),
("Vins et alcools Chevalier","Paul Henriot","59 rue de l'Abbaye","Reims","51100","France"),
("Die Wandernde Kuh","Rita Müller","Adenauerallee 900","Stuttgart","70563","Germany"),
("Wartian Herkku","Pirkko Koskitalo","Torikatu 38","Oulu","90110","Finland"),
("Wellington Importadora","Paula Parente","Rua do Mercado, 12","Resende","08737-363","Brazil"),
("White Clover Markets","Karl Jablonski","305 - 14th Ave. S. Suite 3B","Seattle","98128","USA"),
("Wilman Kala","Matti Karttunen","Keskuskatu 45","Helsinki","21240","Finland"),
("Wolski","Zbyszek","ul. Filtrowa 68","Walla","01-012","Poland")
]
for u in customers_array:
#Separate the names data into lastname, middlename, firstname
name_list = u[1].split()
firstname = name_list[0]
#Make up usernames from the given contact names
username = u[1].replace(" ", "")
#Make up dummy emails from the given customer names
email = u[0].replace(" ", "") + "@example.com"
if len(name_list) > 2:
middlename = name_list[1]
lastname = name_list[2]
middlenameinsert = 'middlename="' + middlename + '"'
lastnameinsert = 'lastname="' + lastname + '"'
else:
middlename = None
lastname = None
middlenameinsert = 'middlename=None'
lastnameinsert = 'lastname=None'
print('''u{} = User(username="{}",customername="{}",{},{},firstname="{}",email="{}",address="{}",city="{}",postalcode="{}",country="{}")\nu{}.set_password("{}")\ndb.session.add(u{})'''
.format(
customers_array.index(u),
username, u[0], lastnameinsert, middlenameinsert,
firstname, email, u[2], u[3], u[4],
u[5], customers_array.index(u), firstname, customers_array.index(u)
)
)
print('db.session.commit()\n\n\n')
suppliers_array = [
(1,"Exotic Liquid","Charlotte Cooper","49 Gilbert St.","Londona","EC1 4SD","UK","(171) 555-2222"),
(2,"New Orleans Cajun Delights","Shelley Burke","P.O. Box 78934","New Orleans","70117","USA","(100) 555-4822"),
(3,"Grandma Kelly's Homestead","Regina Murphy","707 Oxford Rd.","Ann Arbor","48104","USA","(313) 555-5735"),
(4,"Tokyo Traders","Yoshi Nagase","9-8 Sekimai Musashino-shi","Tokyo","100","Japan","(03) 3555-5011"),
(5,"Cooperativa de Quesos 'Las Cabras'","Antonio del Valle Saavedra ","Calle del Rosal 4","Oviedo","33007","Spain","(98) 598 76 54"),
(6,"Mayumi's","Mayumi Ohno","92 Setsuko Chuo-ku","Osaka","545","Japan","(06) 431-7877"),
(7,"Pavlova, Ltd.","Ian Devling","74 Rose St. Moonie Ponds","Melbourne","3058","Australia","(03) 444-2343"),
(8,"Specialty Biscuits, Ltd.","Peter Wilson","29 King's Way","Manchester","M14 GSD","UK","(161) 555-4448"),
(9,"PB Knäckebröd AB","Lars Peterson","Kaloadagatan 13","Göteborg","S-345 67","Sweden ","031-987 65 43"),
(10,"Refrescos Americanas LTDA","Carlos Diaz","Av. das Americanas 12.890","São Paulo","5442","Brazil","(11) 555 4640"),
(11,"Heli Süßwaren GmbH & Co. KG","Petra Winkler","Tiergartenstraße 5","Berlin","10785","Germany","(010) 9984510"),
(12,"Plutzer Lebensmittelgroßmärkte AG","Martin Bein","Bogenallee 51","Frankfurt","60439","Germany","(069) 992755"),
(13,"Nord-Ost-Fisch Handelsgesellschaft mbH","Sven Petersen","Frahmredder 112a","Cuxhaven","27478","Germany","(04721) 8713"),
(14,"Formaggi Fortini s.r.l.","Elio Rossi","Viale Dante, 75","Ravenna","48100","Italy","(0544) 60323"),
(15,"Norske Meierier","Beate Vileid","Hatlevegen 5","Sandvika","1320","Norway","(0)2-953010"),
(16,"Bigfoot Breweries","Cheryl Saylor","3400 - 8th Avenue Suite 210","Bend","97101","USA","(503) 555-9931"),
(17,"Svensk Sjöföda AB","Michael Björn","Brovallavägen 231","Stockholm","S-123 45","Sweden","08-123 45 67"),
(18,"Aux joyeux ecclésiastiques","Guylène Nodier","203, Rue des Francs-Bourgeois","Paris","75004","France","(1) 03.83.00.68"),
(19,"New England Seafood Cannery","Robb Merchant","Order Processing Dept. 2100 Paul Revere Blvd.","Boston","02134","USA","(617) 555-3267"),
(20,"Leka Trading","Chandra Leka","471 Serangoon Loop, Suite #402","Singapore","0512","Singapore","555-8787"),
(21,"Lyngbysild","Niels Petersen","Lyngbysild Fiskebakken 10","Lyngby","2800","Denmark","43844108"),
(22,"Zaanse Snoepfabriek","Dirk Luchte","Verkoop Rijnweg 22","Zaandam","9999 ZZ","Netherlands","(12345) 1212"),
(23,"Karkki Oy","Anne Heikkonen","Valtakatu 12","Lappeenranta","53120","Finland","(953) 10956"),
(24,"G'day, Mate","Wendy Mackenzie","170 Prince Edward Parade Hunter's Hill","Sydney","2042","Australia","(02) 555-5914"),
(25,"Ma Maison","Jean-Guy Lauzon","2960 Rue St. Laurent","Montréal","H1J 1C3","Canada","(514) 555-9022"),
(26,"Pasta Buttini s.r.l.","Giovanni Giudici","Via dei Gelsomini, 153","Salerno","84100","Italy","(089) 6547665"),
(27,"Escargots Nouveaux","Marie Delamare","22, rue H. Voiron","Montceau","71300","France","85.57.00.07"),
(28,"Gai pâturage","Eliane Noz","Bat. B 3, rue des Alpes","Annecy","74000","France","38.76.98.06"),
(29,"Forêts d'érables","Chantal Goulet","148 rue Chasseur","Ste-Hyacinthe","J2S 7S8","Canada","(514) 555-2955")
]
for su in suppliers_array:
print('su{} = Supplier(suppliername="{}",contactname="{}",address="{}",city="{}",postalcode="{}",country="{}",phone="{}")\ndb.session.add(su{})'
.format(su[0], su[1], su[2], su[3], su[4], su[5], su[6], su[7], su[0]))
print('db.session.commit()\n\n\n')
employees_array = [
("Davolio","Nancy","1968-12-08","EmpID1.pic","""Education includes a BA in psychology from Colorado State University. She also completed (The Art of the Cold Call). Nancy is a member of 'Toastmasters International'."""),
("Fuller","Andrew","1952-02-19","EmpID2.pic","""Andrew received his BTS commercial and a Ph.D. in international marketing from the University of Dallas. He is fluent in French and Italian and reads German. He joined the company as a sales representative, was promoted to sales manager and was then named vice president of sales. Andrew is a member of the Sales Management Roundtable, the Seattle Chamber of Commerce, and the Pacific Rim Importers Association."""),
("Leverling","Janet","1963-08-30","EmpID3.pic","""Janet has a BS degree in chemistry from Boston College). She has also completed a certificate program in food retailing management. Janet was hired as a sales associate and was promoted to sales representative."""),
("Peacock","Margaret","1958-09-19","EmpID4.pic","""Margaret holds a BA in English literature from Concordia College and an MA from the American Institute of Culinary Arts. She was temporarily assigned to the London office before returning to her permanent post in Seattle."""),
("Buchanan","Steven","1955-03-04","EmpID5.pic","""Steven Buchanan graduated from St. Andrews University, Scotland, with a BSC degree. Upon joining the company as a sales representative, he spent 6 months in an orientation program at the Seattle office and then returned to his permanent post in London, where he was promoted to sales manager. Mr. Buchanan has completed the courses 'Successful Telemarketing' and 'International Sales Management'. He is fluent in French."""),
("Suyama","Michael","1963-07-02","EmpID6.pic","""Michael is a graduate of Sussex University (MA, economics) and the University of California at Los Angeles (MBA, marketing). He has also taken the courses 'Multi-Cultural Selling' and 'Time Management for the Sales Professional'. He is fluent in Japanese and can read and write French, Portuguese, and Spanish."""),
("King","Robert","1960-05-29","EmpID7.pic","""Robert King served in the Peace Corps and traveled extensively before completing his degree in English at the University of Michigan and then joining the company. After completing a course entitled 'Selling in Europe', he was transferred to the London office."""),
("Callahan","Laura","1958-01-09","EmpID8.pic","""Laura received a BA in psychology from the University of Washington. She has also completed a course in business French. She reads and writes French."""),
("Dodsworth","Anne","1969-07-02","EmpID9.pic","""Anne has a BA degree in English from St. Lawrence College. She is fluent in French and German."""),
("West","Adam","1928-09-19","EmpID10.pic","""An old chum.""")
]
formatted_date_employees_array = [
]
for e in employees_array:
date = e[2].split("-")
formatted_date = datetime(int(date[0]), int(date[1]), int(date[2]))
formatted_date_employee = (e[0], e[1], formatted_date, e[3], e[4])
formatted_date_employees_array.append(formatted_date_employee)
for e in formatted_date_employees_array:
print('e{} = Employee(lastname="{}",firstname="{}",notes="{}")\ndb.session.add(e{})'
.format(formatted_date_employees_array.index(e), e[0], e[1], e[4], formatted_date_employees_array.index(e)))
print('db.session.commit()\n\n\n')
categories_array = [
("Beverages","Soft drinks, coffees, teas, beers, and ales"),
("Condiments","Sweet and savory sauces, relishes, spreads, and seasonings"),
("Confections","Desserts, candies, and sweet breads"),
("Dairy Products","Cheeses"),
("Grains/Cereals","Breads, crackers, pasta, and cereal"),
("Meat/Poultry","Prepared meats"),
("Produce","Dried fruit and bean curd"),
("Seafood","Seaweed and fish")
]
for c in categories_array:
print('c{} = Category(categoryname="{}",description="{}")\ndb.session.add(c{})'
.format(categories_array.index(c), c[0], c[1], categories_array.index(c)))
print('db.session.commit()\n\n\n')
shippers_array = [
(1,"Speedy Express","(503) 555-9831"),
(2,"United Package","(503) 555-3199"),
(3,"Federal Shipping","(503) 555-9931")
]
for s in shippers_array:
print('s{} = Shipper(shippername="{}",phone="{}")\ndb.session.add(s{})'.format(s[0], s[1], s[2], s[0]))
print('db.session.commit()\n\n')
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,144
|
grbarker/Freyja
|
refs/heads/master
|
/migrations/versions/6c9d78373dab_add_review_table_and_relationships_to_.py
|
"""Add Review table and relationships to Product and User table.
Revision ID: 6c9d78373dab
Revises: b8bcef009fb0
Create Date: 2019-01-08 17:19:41.947487
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '6c9d78373dab'
down_revision = 'b8bcef009fb0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('review',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('rating', sa.Integer(), nullable=True),
sa.Column('review', sa.Text(length=1000), nullable=True),
sa.Column('comments', sa.Text(length=300), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['product_id'], ['product.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_review_rating'), 'review', ['rating'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_review_rating'), table_name='review')
op.drop_table('review')
# ### end Alembic commands ###
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,145
|
grbarker/Freyja
|
refs/heads/master
|
/migrations/versions/81162fe5d987_add_first_last_name_fields_to_user_table.py
|
"""Add first/last name fields to user table.
Revision ID: 81162fe5d987
Revises: 4e8beae024e9
Create Date: 2018-11-28 22:14:00.933976
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '81162fe5d987'
down_revision = '4e8beae024e9'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('firstname', sa.String(length=255), nullable=True))
op.add_column('user', sa.Column('lastname', sa.String(length=255), nullable=True))
op.create_index(op.f('ix_user_firstname'), 'user', ['firstname'], unique=False)
op.create_index(op.f('ix_user_lastname'), 'user', ['lastname'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_user_lastname'), table_name='user')
op.drop_index(op.f('ix_user_firstname'), table_name='user')
op.drop_column('user', 'lastname')
op.drop_column('user', 'firstname')
# ### end Alembic commands ###
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,146
|
grbarker/Freyja
|
refs/heads/master
|
/app/main/routes.py
|
##Form code initially taken from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-ii-templates
##then altered as necessary to fit the needs of the project
import collections
from sqlalchemy.sql import text
from sqlalchemy import create_engine, desc, func
from statistics import mean, median
from flask import render_template, flash, redirect, url_for, request, session, current_app, g
from sqlalchemy import asc, desc
from werkzeug.urls import url_parse
from datetime import datetime
from flask_login import current_user, login_user, logout_user, login_required
from app import db
from app.main import bp
from app.models import User, Post, Employee, Product, Review
from app.main.forms import EditProfileForm, PostForm, SortForm, SearchForm
@bp.route('/product/<id>', methods=['GET', 'POST'])
def product(id):
r = []
product = Product.query.filter_by(id=id).first()
for review in product.reviews:
r.append(review.rating)
rating = round(mean(r), 1)
med = median(r)
name = product.productname.capitalize()
category = product.category
#products is array of all the other products ordered by
#the poeple who ordered the specified product
products = []
#The next array is the products bought together with the specified product
#paired_products-->pp
pps = []
for od in product.orderdetails:
orders = od.order.customer.orders.all()
for o in orders:
orderdetails = o.orderdetails.all()
for od in orderdetails:
product = od.product
products.append(product)
counted_products = collections.Counter(products).most_common(6)
for od in product.orderdetails:
order = od.order
orderdetails = order.orderdetails.all()
for od in orderdetails:
p = od.product
if p.id != product.id:
pps.append(p)
counted_pps = collections.Counter(pps).most_common(6)
##Not going to paginate the products as of now. The products are the top
##products also bought by people who ordered this product. The number has
##been limited to 8 so there is no need for pagination yet. It may be
##added later. Most likely there will just be a link to all the other
##products ordered by the people.Eventually the same functionality will
##be added for viewing a product.
return render_template('product.html', title=name, product=product, name=name,
rating=rating, category=category, products=counted_products,
pps=counted_pps, median=med)
@bp.route('/products', methods=['GET', 'POST'])
def products():
form = SortForm()
page = request.args.get('page', 1, type=int)
sort = request.args.get('sort', 1, type=int)
top_rated = False
##The sort arg of the request url is taken and compared to the hardcoded choices to find the
##matching choice, which is then taken from its place and put at
##the front of the array. This is done becase the SelectField of the form defaults to showing
##the first choice before the drop down is opened and it was desired to have the currently
##applied sort to be showing so it didn't confuse the user by showing Featured when the products
##are actually sorted by Price: Low to High. This will need to be addressed again when the choices
##array is decided upon(i.e. static or dynamic). So far I only needed a static, hardcoded set to work with.
choices = [(1, 'Featured'), (2, 'Top Rated'), (3, 'Price: Low to High'), (4, 'Price: High to Low'), (5, 'Newest')]
for choice in choices:
if sort == choice[0]:
choices.remove(choice)
choices.insert(0, choice)
##The desired choice is put in the beginning of the choices array so it is shown as the default.
form.sort_type.choices = choices
if sort == 1:
products = Product.query.paginate(page, 24, False)
elif sort == 2:
top_rated = True
rs = Review.query.\
with_entities(
func.avg(Review.rating).label('average'),
Review.product_id.label('product_id')).\
group_by(Review.product_id).subquery()
products = db.session.query(Product, rs).\
join(rs, Product.id == rs.c.product_id).\
order_by(desc(rs.c.average)).paginate(page, 24, False)
elif sort == 3:
products = Product.query.order_by(asc(Product.price)).paginate(page, 24, False)
elif sort == 4:
products = Product.query.order_by(desc(Product.price)).paginate(page, 24, False)
elif sort == 5:
products = Product.query.order_by(desc(Product.created)).paginate(page, 24, False)
next_url = url_for('main.products', page=products.next_num, sort=sort) \
if products.has_next else None
prev_url = url_for('main.products', page=products.prev_num, sort=sort) \
if products.has_prev else None
if form.validate_on_submit():
page = request.args.get('page', 1, type=int)
#flash('Page: ' + str(page))
#flash('Sort: ' + str(form.sort_type.data))
sort = form.sort_type.data
return redirect(url_for('main.products', sort=sort))
return render_template('products.html', title='Products',
products=products.items, next_url=next_url,
prev_url=prev_url, form=form, top_rated=top_rated)
@bp.route('/', methods=['GET', 'POST'])
@bp.route('/index', methods=['GET', 'POST'])
@login_required
def index():
form = PostForm()
if form.validate_on_submit():
post = Post(body=form.post.data, author=current_user)
db.session.add(post)
db.session.commit()
flash('Your post is now live!')
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
posts = current_user.followed_posts().paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.index', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('main.index', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title='Home', form=form,
posts=posts.items, next_url=next_url,
prev_url=prev_url)
@bp.route('/explore')
@login_required
def explore():
page = request.args.get('page', 1, type=int)
posts = Post.query.order_by(Post.timestamp.desc()).paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.explore', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('main.explore', page=posts.prev_num) \
if posts.has_prev else None
return render_template("index.html", title='Explore', posts=posts.items,
next_url=next_url, prev_url=prev_url)
@bp.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
posts = user.posts.order_by(Post.timestamp.desc()).paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.user', username=user.username, page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('main.user', username=user.username, page=posts.prev_num) \
if posts.has_prev else None
return render_template('user.html', user=user, posts=posts.items,
next_url=next_url, prev_url=prev_url)
##Taken from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-vi-profile-page-and-avatars
@bp.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm(current_user.username)
if form.validate_on_submit():
current_user.username = form.username.data
current_user.about_me = form.about_me.data
db.session.commit()
flash('Your changes have been saved.')
return redirect(url_for('main.edit_profile'))
elif request.method == 'GET':
form.username.data = current_user.username
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', title='Edit Profile',
form=form)
##Next two pulled from https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-viii-followers
@bp.route('/follow/<username>')
@login_required
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('User {} not found.'.format(username))
return redirect(url_for('main.index'))
if user == current_user:
flash('You cannot follow yourself!')
return redirect(url_for('main.user', username=username))
current_user.follow(user)
db.session.commit()
flash('You are following {}!'.format(username))
return redirect(url_for('main.user', username=username))
@bp.route('/unfollow/<username>')
@login_required
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('User {} not found.'.format(username))
return redirect(url_for('main.index'))
if user == current_user:
flash('You cannot unfollow yourself!')
return redirect(url_for('main.user', username=username))
current_user.unfollow(user)
db.session.commit()
flash('You are not following {}.'.format(username))
return redirect(url_for('main.user', username=username))
@bp.route('/search')
@login_required
def search():
if not g.search_form.validate():
return redirect(url_for('main.explore'))
page = request.args.get('page', 1, type=int)
posts, total = Post.search(g.search_form.q.data, page,
current_app.config['POSTS_PER_PAGE'])
next_url = url_for('main.search', q=g.search_form.q.data, page=page + 1) \
if total > page * current_app.config['POSTS_PER_PAGE'] else None
prev_url = url_for('main.search', q=g.search_form.q.data, page=page - 1) \
if page > 1 else None
return render_template('search.html', title='Search', posts=posts,
next_url=next_url, prev_url=prev_url)
@bp.before_app_request
def before_request():
if current_user.is_authenticated:
current_user.last_seen = datetime.utcnow()
db.session.commit()
g.search_form = SearchForm()
|
{"/app/main/forms.py": ["/app/models.py"], "/app/auth/forms.py": ["/app/models.py"], "/db_populator5in1.py": ["/app/models.py"], "/db_populator_dummy_posts.py": ["/app/models.py"], "/db_populator_products.py": ["/app/models.py"], "/db_populator_orderdetails.py": ["/app/models.py"], "/db_populator_orders.py": ["/app/models.py"], "/freyja.py": ["/app/models.py"], "/app/main/routes.py": ["/app/models.py", "/app/main/forms.py"]}
|
5,156
|
Hanlen520/base_image
|
refs/heads/master
|
/baseImage/exceptions.py
|
# -*- coding: utf-8 -*-
class BaseError(Exception):
""" There was an exception that occurred while handling BaseImage"""
def __init__(self, message="", *args, **kwargs):
self.message = message
def __repr__(self):
return repr(self.message)
class NoImageDataError(BaseError):
""" No Image Data in variable"""
class WriteImageError(BaseError):
""" An error occurred while writing """
class TransformError(BaseError):
""" An error occurred while transform Image Data to gpu/cpu """
class ReadImageError(BaseError):
""" An error occurred while Read Image """
|
{"/baseImage/utils.py": ["/baseImage/exceptions.py"], "/baseImage/__init__.py": ["/baseImage/base_image.py", "/baseImage/coordinate.py"], "/baseImage/base_image.py": ["/baseImage/coordinate.py", "/baseImage/utils.py", "/baseImage/exceptions.py"]}
|
5,157
|
Hanlen520/base_image
|
refs/heads/master
|
/baseImage/utils.py
|
import os
import time
import cv2
import numpy as np
from .exceptions import ReadImageError
def check_file(fileName: str):
"""check file in path"""
return os.path.isfile('{}'.format(fileName))
def check_image_valid(image):
"""检查图像是否有效"""
if image is not None and image.any():
return True
else:
return False
def read_image(filename: str, flags: int = cv2.IMREAD_COLOR):
"""cv2.imread的加强版"""
if check_file(filename) is False:
raise ReadImageError("File not found in path:'{}''".format(filename))
img = cv2.imdecode(np.fromfile(filename, dtype=np.uint8), flags)
if check_image_valid(img):
return img
else:
raise ReadImageError('cv2 decode Error, path:{}, flags={}', filename, flags)
def bytes_2_img(byte) -> np.ndarray:
"""bytes转换成cv2可读取格式"""
img = cv2.imdecode(np.array(bytearray(byte)), 1)
if img is None:
raise ValueError('decode bytes to image error, param=\n\'{}\''.format(byte))
return img
class auto_increment(object):
def __init__(self):
self._val = 0
def __call__(self):
self._val += 1
return self._val
|
{"/baseImage/utils.py": ["/baseImage/exceptions.py"], "/baseImage/__init__.py": ["/baseImage/base_image.py", "/baseImage/coordinate.py"], "/baseImage/base_image.py": ["/baseImage/coordinate.py", "/baseImage/utils.py", "/baseImage/exceptions.py"]}
|
5,158
|
Hanlen520/base_image
|
refs/heads/master
|
/setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='baseImage',
version='1.0.5',
author='hakaboom',
author_email='1534225986@qq.com',
license='Apache License 2.0',
description='This is a secondary package of OpenCV,for manage image data',
url='https://github.com/hakaboom/base_image',
packages=['baseImage'],
install_requires=['colorama>=0.4.4',
"loguru>=0.5.3",
"pydantic",
],
)
|
{"/baseImage/utils.py": ["/baseImage/exceptions.py"], "/baseImage/__init__.py": ["/baseImage/base_image.py", "/baseImage/coordinate.py"], "/baseImage/base_image.py": ["/baseImage/coordinate.py", "/baseImage/utils.py", "/baseImage/exceptions.py"]}
|
5,159
|
Hanlen520/base_image
|
refs/heads/master
|
/main.py
|
"""
python setup.py sdist
twine upload dist/*
"""
|
{"/baseImage/utils.py": ["/baseImage/exceptions.py"], "/baseImage/__init__.py": ["/baseImage/base_image.py", "/baseImage/coordinate.py"], "/baseImage/base_image.py": ["/baseImage/coordinate.py", "/baseImage/utils.py", "/baseImage/exceptions.py"]}
|
5,160
|
Hanlen520/base_image
|
refs/heads/master
|
/baseImage/__init__.py
|
# -*- coding: utf-8 -*-
from .base_image import IMAGE
from .coordinate import Rect, Point, Size
import cv2
name = 'base_image'
def create(img=None, flags=cv2.IMREAD_COLOR, path=''):
return IMAGE(img, flags, path)
__all__ = ['create', 'Rect', 'Point', 'Size', 'IMAGE']
|
{"/baseImage/utils.py": ["/baseImage/exceptions.py"], "/baseImage/__init__.py": ["/baseImage/base_image.py", "/baseImage/coordinate.py"], "/baseImage/base_image.py": ["/baseImage/coordinate.py", "/baseImage/utils.py", "/baseImage/exceptions.py"]}
|
5,161
|
Hanlen520/base_image
|
refs/heads/master
|
/baseImage/base_image.py
|
#! usr/bin/python
# -*- coding:utf-8 -*-
import cv2
from .coordinate import Rect
from .utils import read_image, bytes_2_img, auto_increment
from .exceptions import NoImageDataError, WriteImageError, TransformError
import numpy as np
class _image(object):
def __init__(self, img=None, flags=cv2.IMREAD_COLOR, path=''):
"""
基础构造函数
:param img: 图片数据
:param flags: 写入图片的cv flags
:param path: 默认的图片路径, 在读取和写入图片是起到作用
:return: None
"""
self.tmp_path = path
self.image_data = None
if img is not None:
self.imwrite(img, flags)
def save2path(self, path=None):
"""
写入图片到文件
:param path: 写入的文件路径
:return: None
"""
path = path or self.path
cv2.imwrite(path, self.imread())
def imwrite(self, img, flags: int = cv2.IMREAD_COLOR):
"""
往缓存中写入图片数据
:param img: 写入的图片数据,可以是图片路径/bytes/numpy.ndarray/cuda_GpuMat/IMAGE
:param flags: 写入图片的cv flags
:return: None
"""
if isinstance(img, str):
self.image_data = read_image('{}{}'.format(self.tmp_path, img), flags)
elif isinstance(img, bytes):
self.image_data = bytes_2_img(img)
elif isinstance(img, np.ndarray):
self.image_data = img.copy()
elif isinstance(img, cv2.cuda_GpuMat):
self.image_data = img.clone()
elif isinstance(img, _image):
raise TypeError('Please use the clone function')
else:
raise WriteImageError('Unknown params, type:{}, img={} '.format(type(img), img))
def imread(self) -> np.ndarray:
"""
读取图片数据 (内部会自动转换为cpu格式)
:return: 图片数据(type: numpy.ndarray)
"""
if self.image_data is not None:
if self.type == 'cpu':
return self.image_data
else:
self.transform_cpu()
return self.image_data
else:
raise NoImageDataError('No Image Data in variable')
def download(self) -> cv2.cuda_GpuMat:
"""
读取图片数据 (内部会自动转换为gpu格式)
:return: 图片数据(type: cuda_GpuMat)
"""
if self.image_data is not None:
if self.type == 'gpu':
return self.image_data
else:
self.transform_gpu()
return self.image_data
else:
raise NoImageDataError('No Image Data in variable')
def clean_image(self):
"""
清除缓存
:return: None
"""
self.image_data = None
@property
def shape(self) -> tuple:
"""
获取图片的行、宽、通道数
:return: 行、宽、通道数
"""
if self.type == 'cpu':
return self.imread().shape
else:
return self.download().size()[::-1] + (self.download().channels(),)
@property
def size(self) -> tuple:
"""
获取图片的行、宽
:return: 行、宽
"""
if self.type == 'cpu':
return self.imread().shape[:-1]
else:
return self.download().size()[::-1]
def clone(self):
"""
返回一份copy的IMAGE
:return: IMAGE
"""
if self.type == 'cpu':
return IMAGE(self.imread(), self.path)
else:
return IMAGE(self.download(), self.path)
@property
def path(self):
"""
获取图片的默认存放路径
:return: tmp_path
"""
return self.tmp_path
def transform_gpu(self):
"""
将图片数据转换为cuda_GpuMat
:return: None
"""
img = self.image_data
if isinstance(img, np.ndarray):
img = cv2.cuda_GpuMat()
img.upload(self.imread())
self.imwrite(img)
elif isinstance(img, cv2.cuda_GpuMat):
pass
else:
raise TransformError('transform Error, img type={}'.format(type(img)))
def transform_cpu(self):
"""
将图片数据转换为numpy.ndarray
:return: None
"""
img = self.image_data
if isinstance(img, cv2.cuda_GpuMat):
img = img.download()
self.imwrite(img)
elif isinstance(img, np.ndarray):
pass
else:
raise TransformError('transform Error, img type={}'.format(type(img)))
@property
def type(self):
"""
获取图片数据的类型
:return: 'cpu'/'gpu'
"""
if isinstance(self.image_data, np.ndarray):
return 'cpu'
elif isinstance(self.image_data, cv2.cuda_GpuMat):
return 'gpu'
class IMAGE(_image):
SHOW_INDEX = auto_increment()
def imshow(self, title: str = None):
"""
以GUI显示图片
:param title: cv窗口的名称, 不填写会自动分配
:return: None
"""
title = str(title or self.SHOW_INDEX())
cv2.namedWindow(title, cv2.WINDOW_KEEPRATIO)
cv2.imshow(title, self.imread())
def rotate(self, angle: int = 90, clockwise: bool = True):
"""
旋转图片
:param angle: 旋转角度, 默认为90
:param clockwise: True-顺时针旋转, False-逆时针旋转
:return: self
"""
img = self.imread()
if clockwise:
angle = 360 - angle
rows, cols, _ = img.shape
center = (cols / 2, rows / 2)
mask = img.copy()
mask[:, :] = 255
M = cv2.getRotationMatrix2D(center, angle, 1)
top_right = np.array((cols, 0)) - np.array(center)
bottom_right = np.array((cols, rows)) - np.array(center)
top_right_after_rot = M[0:2, 0:2].dot(top_right)
bottom_right_after_rot = M[0:2, 0:2].dot(bottom_right)
new_width = max(int(abs(bottom_right_after_rot[0] * 2) + 0.5), int(abs(top_right_after_rot[0] * 2) + 0.5))
new_height = max(int(abs(top_right_after_rot[1] * 2) + 0.5), int(abs(bottom_right_after_rot[1] * 2) + 0.5))
offset_x, offset_y = (new_width - cols) / 2, (new_height - rows) / 2
M[0, 2] += offset_x
M[1, 2] += offset_y
self.imwrite(cv2.warpAffine(img, M, (new_width, new_height)))
return self
def crop_image(self, rect):
"""
区域范围截图,并将截取的区域构建新的IMAGE
:param rect: 需要截图的范围,可以是Rect/[x,y,width,height]/(x,y,width,height)
:return: 截取的区域
"""
img = self.imread()
height, width = self.size
if isinstance(rect, (list, tuple)) and len(rect) == 4:
rect = Rect(*rect)
elif isinstance(rect, Rect):
pass
else:
raise ValueError('unknown rect: type={}, rect={}'.format(type(rect), rect))
if not Rect(0, 0, width, height).contains(rect):
raise OverflowError('Rect不能超出屏幕 rect={}, tl={}, br={}'.format(rect, rect.tl, rect.br))
# 获取在图像中的实际有效区域:
x_min, y_min = int(rect.tl.x), int(rect.tl.y)
x_max, y_max = int(rect.br.x), int(rect.br.y)
return IMAGE(img[y_min:y_max, x_min:x_max])
def binarization(self):
"""
使用大津法将图片二值化,并返回新的IMAGE
:return: new IMAGE
"""
gray_img = self.cvtColor(dst=cv2.COLOR_BGR2GRAY)
if self.type == 'cpu':
retval, dst = cv2.threshold(gray_img, 0, 255, cv2.THRESH_OTSU)
return IMAGE(dst)
else:
# cuda.threshold 不支持大津法
retval, dst = cv2.threshold(gray_img.download(), 0, 255, cv2.THRESH_OTSU)
img = cv2.cuda_GpuMat()
img.upload(dst)
return IMAGE(img)
def rectangle(self, rect: Rect):
"""
在图像上画出矩形
:param rect: 需要截图的范围,可以是Rect/[x,y,width,height]/(x,y,width,height)
:return: None
"""
pt1 = rect.tl
pt2 = rect.br
cv2.rectangle(self.imread(), (pt1.x, pt1.y), (pt2.x, pt2.y), (0, 255, 0), 2)
def resize(self, w, h):
"""
调整图片大小
:param w: 需要设定的宽
:param h: 需要设定的厂
:return: self
"""
if self.type == 'cpu':
img = cv2.resize(self.imread(), (int(w), int(h)))
else:
img = cv2.cuda.resize(self.download(), (int(w), int(h)))
self.imwrite(img)
return self
def cv2_to_base64(self):
"""
将图片数据转换为base64格式
:return: base64格式的图片数据
"""
data = cv2.imencode('.png', self.imread())
return data
def cvtColor(self, dst):
"""
转换图片颜色空间
:param dst: Destination image
:return: cuda_GpuMat/numpy.ndarry
"""
if self.type == 'cpu':
return cv2.cvtColor(self.imread(), dst)
else:
return cv2.cuda.cvtColor(self.download(), dst)
def rgb_2_gray(self):
return self.cvtColor(cv2.COLOR_BGR2GRAY)
|
{"/baseImage/utils.py": ["/baseImage/exceptions.py"], "/baseImage/__init__.py": ["/baseImage/base_image.py", "/baseImage/coordinate.py"], "/baseImage/base_image.py": ["/baseImage/coordinate.py", "/baseImage/utils.py", "/baseImage/exceptions.py"]}
|
5,162
|
Hanlen520/base_image
|
refs/heads/master
|
/baseImage/coordinate.py
|
#! usr/bin/python
# -*- coding:utf-8 -*-
"""
坐标系转换---从原来叉叉助手框架转移过来的
包含了锚点模式,适用于各种分辨率,刘海屏的坐标适配
"""
from typing import Union
from loguru import logger
from pydantic import BaseModel
class display_type(BaseModel):
"""
top, bottom为上下黑边, left和right为左右黑边, widht为宽, height为高
width需要大于height
"""
width: int
height: int
top = 0
bottom = 0
left = 0
right = 0
x = 0
y = 0
class Point(object):
"""
Point.ZERO :一个x,y均为0的Point
Point.INVALID :一个x,y均为-1的Point
Point(void) :构造一个x,y均为0的Point
Point(x:int , y:int) :根据x,y构造一个Point
Point(Point) :根据point,拷贝一个新的Point
Point.x :x坐标
Point.y :y坐标
支持 +,-,*,/,==操作
"""
def __init__(self, x: int, y: int,
anchor_mode: str = 'Middle', anchor_x: int = 0, anchor_y: int = 0):
"""
构建一个点
:param x: x轴坐标
:param y: y轴坐标
:param kwargs:
"""
self.x = x
self.y = y
self.anchor_mode = anchor_mode
self.anchor_x = anchor_x
self.anchor_y = anchor_y
def __str__(self):
return '<Point [{:.1f}, {:.1f}]>'.format(self.x, self.y)
def __add__(self, other):
if type(other) == Point:
return Point(self.x + other.x, self.y + other.y)
raise logger.error('目标对象不是Point类,请检查')
def __sub__(self, other):
if type(other) == Point:
return Point(self.x - other.x, self.y - other.y)
raise logger.error('目标对象不是Point类,请检查')
def __mul__(self, other):
if type(other) == int:
return Point(self.x * other, self.y * other)
raise logger.error('目标对象不是int类,请检查')
def __truediv__(self, other):
if type(other) == int:
return Point(self.x / other, self.y / other)
raise logger.error('目标对象不是int类,请检查')
def __eq__(self, other):
if type(other) == Point:
return self.x == other.x and self.y == other.y
else:
logger.error('目标对象不是Point类,请检查')
return False
Point.ZERO = Point(0, 0)
Point.INVALID = Point(-1, -1)
class Size(object):
"""
Size.ZERO :一个width,height均为0的Size
Size.INVALID :一个width,height均为-1的Size
Size(void) :构造一个width,height均为0的Size
Size(width:int , height:int) :根据width,height构造一个Size
Size(Size) :根据Size,拷贝一个新的Size
Size.width :Size的宽
Size.height :Size的高
支持 +,-,*,/,==操作
"""
def __init__(self, width: int, height: int):
self.width = width
self.height = height
def __str__(self):
return '<Size [{} x {}]>'.format(self.width,self.height)
def __add__(self, other):
if type(other) == Size:
return Size(self.width + other.width, self.height + other.height)
raise logger.error('目标对象不是Size类,请检查')
def __sub__(self, other):
if type(other) == Size:
return Size(self.width - other.width, self.height - other.height)
raise logger.error('目标对象不是Size类,请检查')
def __mul__(self, other):
if type(other) == int:
return Size(self.width * other, self.height * other)
raise logger.error('目标对象不是int类,请检查')
def __truediv__(self, other):
if type(other) == int:
return Size(self.width / other, self.height / other)
raise logger.error('目标对象不是int类,请检查')
def __eq__(self, other):
if type(other) == Point:
return self.width == other.width and self.height == other.height
else:
logger.error('目标对象不是Size类,请检查')
return False
def __lt__(self, other):
if type(other) == Size:
return self.width*self.height < other.width*other.height
else:
logger.error('目标对象不是Size类,请检查')
return False
def __gt__(self, other):
if type(other) == Size:
return self.width*self.height > other.width*other.height
else:
logger.error('目标对象不是Size类,请检查')
return False
def __le__(self, other):
if type(other) == Size:
return self.width*self.height <= other.width*other.height
else:
logger.error('目标对象不是Size类,请检查')
return False
def __ge__(self, other):
if type(other) == Size:
return self.width*self.height >= other.width*other.height
else:
logger.error('目标对象不是Size类,请检查')
return False
Size.ZERO = Size(0, 0)
Size.INVALID = Size(-1, -1)
class Rect(object):
def __init__(self, x, y, width, height):
self.x = x
self.y = y
self.width = width
self.height = height
def __str__(self):
return '<Rect [Point({}, {}), Size[{}, {}]]'.format(
self.x, self.y, self.width, self.height)
@property
def size(self):
return Size(self.width, self.height)
@property
def tl(self):
"""返回当前Rect的左上角Point坐标"""
return Point(self.x, self.y)
@property
def br(self):
"""返回当前Rect的右下角Point坐标"""
return Point(self.x+self.width, self.y+self.height)
@property
def middle(self):
return Point(self.x+self.width/2, self.y+self.height/2)
def contains(self, v):
"""判断Point,或者Rect是否在当前Rect范围中"""
if isinstance(v, Point):
tl, br = self.tl, self.br
if tl.x <= v.x <= br.x and tl.y <= v.y <= br.y:
return True
elif isinstance(v, Rect):
"""判断左上,右下顶点坐标即可"""
if self.contains(v.tl) and self.contains(v.br):
return True
return False
@staticmethod
def create_by_point_size(point: Point, size: Size):
return Rect(point.x, point.y, size.width, size.height)
@staticmethod
def create_by_2_point(tl_point: Point, br_point: Point):
return Rect(tl_point.x, tl_point.y, br_point.x-tl_point.x, br_point.y-tl_point.y)
Rect.ZERO = Rect(0, 0, 0, 0)
class Anchor_transform(object):
@staticmethod
def Middle(x, y, dev, cur, mainPoint_scale):
x = cur.x / 2 - ((dev.x / 2 - x) * mainPoint_scale['x']) + cur.left
y = cur.y / 2 - ((dev.y / 2 - y) * mainPoint_scale['y']) + cur.top
return x, y
@staticmethod
def Left(x, y, dev, cur, mainPoint_scale):
x = x * mainPoint_scale['x'] + cur.left
y = cur.y/2-((dev.y/2-y)*mainPoint_scale['y'])+cur.top
return x, y
@staticmethod
def Right(x, y, dev, cur, mainPoint_scale):
x = cur.x-((dev.x-x) * mainPoint_scale['x'])+cur.left
y = cur.y/2-((dev.y/2-y) * mainPoint_scale['y'])+cur.top
return x, y
@staticmethod
def top(x, y, dev, cur, mainPoint_scale):
x = cur.x / 2 - ((dev.x / 2 - x) * mainPoint_scale['x']) + cur.left
y = y * mainPoint_scale['y'] + cur.top
return x, y
@staticmethod
def Bottom(x, y, dev, cur, mainPoint_scale):
x = cur.x / 2 - ((dev.x / 2 - x) * mainPoint_scale['x']) + cur.left
y = cur.y - ((dev.y - y) * mainPoint_scale['y']) + cur.top
return x, y
@staticmethod
def Left_top(x, y, dev, cur, mainPoint_scale):
x = x * mainPoint_scale['x'] + cur.left
y = y * mainPoint_scale['y'] + cur.top
return x, y
@staticmethod
def Left_bottom(x, y, dev, cur, mainPoint_scale):
x = x * mainPoint_scale['x'] + cur.left
y = cur.y - ((dev.y - y) * mainPoint_scale['y']) + cur.top
return x, y
@staticmethod
def Right_top(x, y, dev, cur, mainPoint_scale):
x = cur.x - ((dev.x - x) * mainPoint_scale['x']) + cur.left
y = y * mainPoint_scale['y'] + cur.top
return x, y
@staticmethod
def Right_bottom(x, y, dev, cur, mainPoint_scale):
"""锚点右下"""
x = cur.x - ((dev.x-x)*mainPoint_scale['x']) + cur.left
y = cur.y - ((dev.y-y)*mainPoint_scale['y']) + cur.top
return x, y
class Anchor(object):
def __init__(self, dev: dict, cur: dict, orientation: int):
dev = display_type(**dev)
cur = display_type(**cur)
self.dev, self.cur = dev, cur
if orientation == 1 or orientation == 2:
dev_x = dev.width - dev.left - dev.right
dev_y = dev.height - dev.top - dev.bottom
cur_x = cur.width - cur.left - cur.right
cur_y = cur.height - cur.top - cur.bottom
elif orientation == 3:
dev_x = dev.height - dev.top - dev.bottom
dev_y = dev.width - dev.left - dev.right
cur_x = cur.height - cur.top - cur.bottom
cur_y = cur.width - cur.left - cur.right
else:
raise ValueError('没有定义orientation')
dev.x, dev.y = dev_x, dev_y
cur.x, cur.y = cur_x, cur_y
scale_x = cur_x / dev_x
scale_y = cur_y / dev_y
# mainPoint_scale_mode x,y:'width','height'
self.mainPoint_scale = {
'x': scale_x,
'y': scale_y,
}
#
self.appurtenant_scale = {
'x': scale_x,
'y': scale_y,
}
def point(self, x: int, y: int, anchor_mode: str = 'Middle', anchor_x: int = 0, anchor_y: int = 0):
point = Point(x=x, y=y, anchor_mode=anchor_mode, anchor_x=anchor_x, anchor_y=anchor_y)
point.x, point.y = self.transform(point)
return point
def size(self, width: int, height: int):
size = Size(width=width, height=height)
size.width, size.height = self.transform(size)
return size
def transform(self, args: Union[Point, Size]):
if isinstance(args, Point):
# 计算锚点坐标
anchor_x, anchor_y = self._count_anchor_point(args)
# 计算从属点坐标
x, y = self._count_appurtenant_point(args, anchor_x, anchor_y)
return x, y
elif isinstance(args, Size):
width = args.width * self.mainPoint_scale['x']
height = args.height * self.mainPoint_scale['y']
return width, height
else:
raise ValueError('转换未知的类型: {}'.format(args))
def _count_appurtenant_point(self, point, anchor_x, anchor_y):
"""计算锚点从属点坐标"""
x = anchor_x + (point.x - point.anchor_x)*self.appurtenant_scale['x']
y = anchor_y + (point.y - point.anchor_y)*self.appurtenant_scale['y']
return x, y
def _count_anchor_point(self, point):
"""计算锚点坐标"""
anchor_fun = getattr(Anchor_transform, point.anchor_mode)
x = point.anchor_x - self.dev.left
y = point.anchor_y - self.dev.top
x, y = anchor_fun(x, y, self.dev, self.cur, self.mainPoint_scale)
return x, y
|
{"/baseImage/utils.py": ["/baseImage/exceptions.py"], "/baseImage/__init__.py": ["/baseImage/base_image.py", "/baseImage/coordinate.py"], "/baseImage/base_image.py": ["/baseImage/coordinate.py", "/baseImage/utils.py", "/baseImage/exceptions.py"]}
|
5,178
|
AvatarSenju/django-first
|
refs/heads/master
|
/posts/views.py
|
from django.contrib import messages
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render,get_object_or_404,redirect
#from bs4 import BeautifulSoup
from .models import Post
from .forms import PostForm
# Create your views here.
def create(request):
form =PostForm(request.POST or None)
if form.is_valid():
instance = form.save(commit = False)
instance.save()
messages.success(request,"Success")
return HttpResponseRedirect(instance.get_absolute_url())
# else:
# messages.error(request,"NOT DONE")
context = {
"form":form,
}
return render(request,"post_form.html",context)
def details(request,id):
instance = get_object_or_404(Post,id=id)
context ={
"title":instance.title,
"instance":instance
}
return render(request,"post_details.html",context)
def update(request,id=None ):
instance = get_object_or_404(Post,id=id)
form =PostForm(request.POST or None ,instance=instance)
if form.is_valid():
instance = form.save(commit = False)
instance.save()
messages.success(request,"Success")
return HttpResponseRedirect(instance.get_absolute_url())
# else:
# messages.error(request,"NOT DONE")
context ={
"title":instance.title,
"instance":instance,
"form":form,
}
return render(request,"post_form.html",context)
def retrive(request,id=None):
ins=get_object_or_404(Post,id=id)
context={"title":ins.updated}
return render(request,"base.html",context)
def delete(request,id=None):
instance = get_object_or_404(Post,id=id)
messages.success(request,"Deleted")
instance.delete()
return redirect("posts:lists")
def listss(request):
queryset = Post.objects.all()
context = {
"object_list":queryset,
"title":"List"
}
return render(request,"post_list.html",context)
|
{"/posts/views.py": ["/posts/models.py"], "/posts/admin.py": ["/posts/models.py"]}
|
5,179
|
AvatarSenju/django-first
|
refs/heads/master
|
/posts/admin.py
|
from django.contrib import admin
from posts.models import Post
# Register your models here.
class PostModel(admin.ModelAdmin):
list_display=["title","updated","timestamp","title"]
list_display_links=["title"]
list_filter=["title"]
admin.site.register(Post,PostModel)
|
{"/posts/views.py": ["/posts/models.py"], "/posts/admin.py": ["/posts/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.