index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
61,815 | elees1219/LineBot | refs/heads/master | /app.py | # -*- coding: utf-8 -*-
# import custom module
from bot import webpage_auto_gen, game_objects
from bot.system import line_api_proc, string_can_be_int, system_data
import msg_handler
import errno, os, sys, tempfile
import traceback
import validators
import time
from collections import defaultdict
from urlparse import urlparse
from cgi import escape
from datetime import datetime, timedelta
from error import error
from flask import Flask, request, url_for
from math import *
# import for Oxford Dictionary
import httplib
import requests
import json
# Database import
from db import kw_dict_mgr, kwdict_col, group_ban, gb_col, message_tracker, msg_track_col, msg_event_type
# tool import
from tool import mff, random_gen, txt_calc
# games import
import game
# import from LINE Messaging API
from linebot import (
LineBotApi, WebhookHandler, exceptions
)
from linebot.models import (
MessageEvent, TextMessage, TextSendMessage,
SourceUser, SourceGroup, SourceRoom,
TemplateSendMessage, ConfirmTemplate, MessageTemplateAction,
ButtonsTemplate, URITemplateAction, PostbackTemplateAction,
CarouselTemplate, CarouselColumn, PostbackEvent,
StickerMessage, StickerSendMessage, LocationMessage, LocationSendMessage,
ImageMessage, VideoMessage, AudioMessage,
UnfollowEvent, FollowEvent, JoinEvent, LeaveEvent, BeaconEvent
)
# Databases initialization
kwd = kw_dict_mgr("postgres", os.environ["DATABASE_URL"])
gb = group_ban("postgres", os.environ["DATABASE_URL"])
msg_track = message_tracker("postgres", os.environ["DATABASE_URL"])
# Main initialization
app = Flask(__name__)
sys_data = system_data()
game_data = game_objects()
# Line Bot Environment initialization
MAIN_UID_OLD = 'Ud5a2b5bb5eca86342d3ed75d1d606e2c'
MAIN_UID = 'U089d534654e2c5774624e8d8c813000e'
main_silent = False
administrator = os.getenv('ADMIN', None)
group_admin = os.getenv('G_ADMIN', None)
group_mod = os.getenv('G_MOD', None)
if administrator is None:
print('The SHA224 of ADMIN not defined. Program will be terminated.')
sys.exit(1)
if group_admin is None:
print('The SHA224 of G_ADMIN not defined. Program will be terminated.')
sys.exit(1)
if group_mod is None:
print('The SHA224 of G_MOD not defined. Program will be terminated.')
sys.exit(1)
channel_secret = os.getenv('LINE_CHANNEL_SECRET', None)
channel_access_token = os.getenv('LINE_CHANNEL_ACCESS_TOKEN', None)
if channel_secret is None:
print('Specify LINE_CHANNEL_SECRET as environment variable.')
sys.exit(1)
if channel_access_token is None:
print('Specify LINE_CHANNEL_ACCESS_TOKEN as environment variable.')
sys.exit(1)
api = LineBotApi(channel_access_token)
handler = WebhookHandler(channel_secret)
line_api = line_api_proc(api)
# Oxford Dictionary Environment initialization
oxford_dict_obj = msg_handler.oxford_dict('en')
# File path
static_tmp_path = os.path.join(os.path.dirname(__file__), 'static', 'tmp')
# Webpage auto generator
webpage_generator = webpage_auto_gen.webpage()
# Message handler initialization
command_executor = msg_handler.text_msg(line_api, kwd, gb, msg_track, oxford_dict_obj, [group_mod, group_admin, administrator], sys_data, game_data, webpage_generator)
game_executor = msg_handler.game_msg(game_data, line_api)
# function for create tmp dir for download content
def make_static_tmp_dir():
try:
os.makedirs(static_tmp_path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(static_tmp_path):
pass
else:
raise
# TODO: make error become object (time, detail, url, error type)
@app.route("/callback", methods=['POST'])
def callback():
# get X-Line-Signature header value
signature = request.headers['X-Line-Signature']
# get request body as text
body = request.get_data(as_text=True)
app.logger.info("Request body: " + body)
# handle webhook body
try:
handler.handle(body, signature)
except exceptions.InvalidSignatureError:
abort(400)
return 'OK'
@app.route("/error", methods=['GET'])
def get_error_list():
sys_data.view_webpage()
error_dict = defaultdict(str)
error_timestamp_list = webpage_generator.error_timestamp_list()
for timestamp in error_timestamp_list:
error_dict[datetime.fromtimestamp(float(timestamp)).strftime('%Y-%m-%d %H:%M:%S')] = request.url_root + url_for('get_error_message', timestamp=timestamp)[1:]
return webpage_auto_gen.webpage.html_render_error_list(sys_data.boot_up, error_dict)
@app.route("/error/<timestamp>", methods=['GET'])
def get_error_message(timestamp):
sys_data.view_webpage()
content = webpage_generator.get_content(webpage_auto_gen.content_type.Error, timestamp)
return webpage_auto_gen.webpage.html_render(content, u'錯誤訊息')
@app.route("/query/<timestamp>", methods=['GET'])
def full_query(timestamp):
sys_data.view_webpage()
content = webpage_generator.get_content(webpage_auto_gen.content_type.Query, timestamp)
return webpage_auto_gen.webpage.html_render(content, u'查詢結果')
@app.route("/info/<timestamp>", methods=['GET'])
def full_info(timestamp):
sys_data.view_webpage()
content = webpage_generator.get_content(webpage_auto_gen.content_type.Info, timestamp)
return webpage_auto_gen.webpage.html_render(content, u'詳細資料')
@app.route("/full/<timestamp>", methods=['GET'])
def full_content(timestamp):
sys_data.view_webpage()
content = webpage_generator.get_content(webpage_auto_gen.content_type.Text, timestamp)
return webpage_auto_gen.webpage.html_render(content, u'完整資訊')
@app.route("/ranking/<type>", methods=['GET'])
def full_ranking(type):
sys_data.view_webpage()
if type == 'user':
content = kw_dict_mgr.list_user_created_ranking(line_api, kwd.user_created_rank(50))
elif type == 'used':
content = kw_dict_mgr.list_keyword_ranking(kwd.order_by_usedrank(10000))
elif type == 'called':
content = kw_dict_mgr.list_keyword_recently_called(kwd.recently_called(10000))
else:
content = error.webpage.no_content()
return webpage_auto_gen.webpage.html_render(content, u'完整排名')
@handler.add(MessageEvent, message=TextMessage)
def handle_text_message(event):
global game_data
global command_executor
global game_executor
token = event.reply_token
text = event.message.text
src = event.source
splitter = '\n'
msg_track.log_message_activity(line_api_proc.source_channel_id(src), msg_event_type.recv_txt)
if text == '561563ed706e6f696abbe050ad79cf334b9262da6f83bc1dcf7328f2':
sys_data.intercept = not sys_data.intercept
api.reply_message(token, TextSendMessage(text='Bot {}.'.format('start to intercept messages' if sys_data.intercept else 'stop intercepting messages')))
return
elif sys_data.intercept:
intercept_text(event)
if text == '43afdb2391686dd9710c3879a86c04b5c0a2fc3391ffd2c6a05e4ce0': # SHA of CALCULATOR DEBUG
sys_data.calc_debug = not sys_data.calc_debug
print 'Calculator debugging {}.'.format('enabled' if sys_data.calc_debug else 'disabled')
if text == administrator:
sys_data.silence = not sys_data.silence
api.reply_message(token, TextSendMessage(text='Bot set to {}.'.format('Silent' if sys_data.silence else 'Active')))
return
elif sys_data.silence:
return
try:
if text == 'ERRORERRORERRORERROR':
raise Exception('THIS ERROR IS CREATED FOR TESTING PURPOSE.')
elif splitter in text:
head, cmd, oth = msg_handler.text_msg.split(text, splitter, 3)
if head == 'JC':
params = command_executor.split_verify(cmd, splitter, oth)
if isinstance(params, unicode):
api_reply(token, TextSendMessage(text=params), src)
return
# SQL Command
if cmd == 'S':
text = command_executor.S(src, params)
api_reply(token, TextSendMessage(text=text), src)
# ADD keyword & ADD top keyword
elif cmd == 'A' or cmd == 'M':
if sys_data.sys_cmd_dict[cmd].non_user_permission_required:
text = command_executor.M(src, params)
else:
text = command_executor.A(src, params)
api_reply(token, TextSendMessage(text=text), src)
# DELETE keyword & DELETE top keyword
elif cmd == 'D' or cmd == 'R':
if sys_data.sys_cmd_dict[cmd].non_user_permission_required:
text = command_executor.R(src, params)
else:
text = command_executor.D(src, params)
api_reply(token, TextSendMessage(text=text), src)
# QUERY keyword
elif cmd == 'Q':
text = command_executor.Q(src, params)
api_reply(token, TextSendMessage(text=text), src)
# INFO of keyword
elif cmd == 'I':
text = command_executor.I(src, params)
api_reply(token, TextSendMessage(text=text), src)
# RANKING
elif cmd == 'K':
text = command_executor.K(src, params)
api_reply(token, TextSendMessage(text=text), src)
# SPECIAL record
elif cmd == 'P':
text = command_executor.P(src, params)
api_reply(token, TextSendMessage(text=text), src)
# GROUP ban basic (info)
elif cmd == 'G':
text = command_executor.G(src, params)
api_reply(token, TextSendMessage(text=text), src)
# GROUP ban advance
elif cmd == 'GA':
texts = command_executor.GA(src, params)
api_reply(token, [TextSendMessage(text=text) for text in texts], src)
# get CHAT id
elif cmd == 'H':
output = command_executor.H(src, params)
api_reply(token, [TextSendMessage(text=output[0]), TextSendMessage(text=output[1])], src)
# SHA224 generator
elif cmd == 'SHA':
text = command_executor.SHA(src, params)
api_reply(token, TextSendMessage(text=text), src)
# Look up vocabulary in OXFORD Dictionary
elif cmd == 'O':
text = command_executor.O(src, params)
api_reply(token, TextSendMessage(text=text), src)
# RANDOM draw
elif cmd == 'RD':
text = command_executor.RD(src, params)
api_reply(token, TextSendMessage(text=text), src)
# last STICKER message
elif cmd == 'STK':
text = command_executor.STK(src, params)
api_reply(token, TextSendMessage(text=text), src)
# TRANSLATE text to URL form
elif cmd == 'T':
text = command_executor.T(src, params)
api_reply(token, TextSendMessage(text=text), src)
else:
sys_data.sys_cmd_dict[cmd].count -= 1
elif head == 'HELP':
data = msg_handler.text_msg.split(text, splitter, 2)
sys_data.helper_cmd_dict['MFF'].count += 1
# TODO: restruct helper
# TODO: Helper modulize
if data[1].upper().startswith('MFF'):
api_reply(token, [TextSendMessage(text=mff.mff_dmg_calc.help_code()),
TextSendMessage(text=u'下則訊息是訊息範本,您可以直接將其複製,更改其內容,然後使用。或是遵照以下格式輸入資料。\n\n{代碼(參見上方)} {參數}(%)\n\n例如:\nMFF\nSKC 100%\n魔力 1090%\n魔力 10.9\n\n欲察看更多範例,請前往 https://sites.google.com/view/jellybot/mff傷害計算'),
TextSendMessage(text=mff.mff_dmg_calc.help_sample())], src)
else:
job = mff.mff_dmg_calc.text_job_parser(data[1])
dmg_calc_dict = [[u'破防前非爆擊(弱點屬性)', mff.mff_dmg_calc.dmg_weak(job)],
[u'破防前爆擊(弱點屬性)', mff.mff_dmg_calc.dmg_crt_weak(job)],
[u'已破防非爆擊(弱點屬性)', mff.mff_dmg_calc.dmg_break_weak(job)],
[u'已破防爆擊(弱點屬性)', mff.mff_dmg_calc.dmg_break_crt_weak(job)],
[u'破防前非爆擊(非弱點屬性)', mff.mff_dmg_calc.dmg(job)],
[u'破防前爆擊(非弱點屬性)', mff.mff_dmg_calc.dmg_crt(job)],
[u'已破防非爆擊(非弱點屬性)', mff.mff_dmg_calc.dmg_break(job)],
[u'已破防爆擊(非弱點屬性)', mff.mff_dmg_calc.dmg_break_crt(job)]]
text = u'傷害表:'
for title, value in dmg_calc_dict:
text += u'\n\n'
text += u'{}\n首發: {:.0f}\n連發: {:.0f}\n累積傷害(依次): {}'.format(title,
value['first'],
value['continual'],
u', '.join('{:.0f}'.format(x) for x in value['list_of_sum']))
api_reply(token, TextSendMessage(text=text), src)
elif head == 'G':
if cmd not in sys_data.game_cmd_dict:
text = error.main.invalid_thing(u'遊戲', cmd)
api_reply(token, TextSendMessage(text=text), src)
return
max_prm = sys_data.game_cmd_dict[cmd].split_max
min_prm = sys_data.game_cmd_dict[cmd].split_min
params = msg_handler.text_msg.split(oth, splitter, max_prm)
if min_prm > len(params) - params.count(None):
text = error.main.lack_of_thing(u'參數')
api_reply(token, TextSendMessage(text=text), src)
return
params.insert(0, None)
# GAME - Rock-Paper-Scissor
if cmd == 'RPS':
text = game_executor.RPS(src, params)
api_reply(token, TextSendMessage(text=text), src)
else:
sys_data.game_cmd_dict[cmd].count -= 1
rps_obj = game_data.get_rps(line_api_proc.source_channel_id(src))
if rps_obj is not None:
rps_text = minigame_rps_capturing(rps_obj, False, text, line_api_proc.source_user_id(src))
if rps_text is not None:
api_reply(token, TextSendMessage(text=rps_text), src)
return
replied = auto_reply_system(token, text, False, src)
if not replied:
if (text.startswith('JC ') or text.startswith('HELP ') or text.startswith('G ')) and ((' ' or ' ') in text):
msg = u'小水母指令分隔字元已從【雙空格】修改為【換行】。'
msg += u'\n\n如欲輸入指令,請以換行分隔指令,例如:\nJC\nA\n你!\n我?'
msg += u'\n\n如果參數中要包含換行的話,請輸入【\\n】。\n另外,JC RD的文字抽籤中,原先以換行分隔,現在則以單空格分隔。'
text = error.main.miscellaneous(msg)
api_reply(token, TextSendMessage(text=text), src)
return
else:
calc_result = txt_calc.text_calculator.calc(text, sys_data.calc_debug)
if calc_result is not None:
sys_data.helper_cmd_dict['CALC'].count += 1
text = u'算式: {}\n\n計算結果: {}'.format('\n{}'.format(text) if '\n' in text else text, calc_result)
api_reply(token, TextSendMessage(text=text), src)
return
except exceptions.LineBotApiError as ex:
text = u'開機時間: {}\n\n'.format(sys_data.boot_up)
text += u'LINE API錯誤,狀態碼: {}\n\n'.format(ex.status_code)
for err in ex.error.details:
text += u'錯誤內容: {}\n錯誤訊息: {}\n'.format(err.property, err.message.decode("utf-8"))
error_msg = webpage_generator.rec_error(text, line_api_proc.source_channel_id(src))
api_reply(token, TextSendMessage(text=error_msg), src)
except Exception as exc:
text = u'開機時間: {}\n\n'.format(sys_data.boot_up)
exc_type, exc_obj, exc_tb = sys.exc_info()
text += u'錯誤種類: {}\n\n第{}行 - {}'.format(exc_type, exc_tb.tb_lineno, exc.message.decode("utf-8"))
error_msg = webpage_generator.rec_error(text, line_api_proc.source_channel_id(src))
api_reply(token, TextSendMessage(text=error_msg), src)
return
if text == 'confirm':
confirm_template = ConfirmTemplate(text='Do it?', actions=[
MessageTemplateAction(label='Yes', text='Yes!'),
MessageTemplateAction(label='No', text='No!'),
])
template_message = TemplateSendMessage(
alt_text='Confirm alt text', template=confirm_template)
api_reply(event.reply_token, template_message, src)
elif text == 'carousel':
carousel_template = CarouselTemplate(columns=[
CarouselColumn(text='hoge1', title='fuga1', actions=[
URITemplateAction(
label='Go to line.me', uri='https://line.me'),
PostbackTemplateAction(label='ping', data='ping')
]),
CarouselColumn(text='hoge2', title='fuga2', actions=[
PostbackTemplateAction(
label='ping with text', data='ping',
text='ping'),
MessageTemplateAction(label='Translate Rice', text='米')
]),
])
template_message = TemplateSendMessage(
alt_text='Buttons alt text', template=carousel_template)
api_reply(event.reply_token, template_message, src)
@handler.add(MessageEvent, message=StickerMessage)
def handle_sticker_message(event):
package_id = event.message.package_id
sticker_id = event.message.sticker_id
rep = event.reply_token
src = event.source
cid = line_api_proc.source_channel_id(src)
# TODO: Modulize handle received sticker message
sys_data.set_last_sticker(cid, sticker_id)
global game_data
rps_obj = game_data.get_rps(cid)
msg_track.log_message_activity(cid, msg_event_type.recv_stk)
if rps_obj is not None:
text = minigame_rps_capturing(rps_obj, True, sticker_id, line_api_proc.source_user_id(src))
if text is not None:
api_reply(rep, TextSendMessage(text=text), src)
return
if isinstance(event.source, SourceUser):
results = kwd.search_sticker_keyword(sticker_id)
if results is not None:
kwdata = u'相關回覆組ID: {}。\n'.format(u', '.join([unicode(result[int(kwdict_col.id)]) for result in results]))
else:
kwdata = u'無相關回覆組ID。\n'
api_reply(
rep,
[TextSendMessage(text=kwdata + u'貼圖圖包ID: {}\n貼圖圖片ID: {}'.format(package_id, sticker_id)),
TextSendMessage(text=u'圖片路徑(Android):\nemulated\\0\\Android\\data\\jp.naver.line.android\\stickers\\{}\\{}'.format(package_id, sticker_id)),
TextSendMessage(text=u'圖片路徑(Windows):\nC:\\Users\\USER_NAME\\AppData\\Local\\LINE\\Data\\Sticker\\{}\\{}'.format(package_id, sticker_id)),
TextSendMessage(text=u'圖片路徑(網路):\n{}'.format(kw_dict_mgr.sticker_png_url(sticker_id)))],
src
)
else:
auto_reply_system(rep, sticker_id, True, src)
# Incomplete
@handler.add(PostbackEvent)
def handle_postback(event):
return
if event.postback.data == 'ping':
api_reply(
event.reply_token, TextSendMessage(text='pong'), event.source)
# Incomplete
@handler.add(MessageEvent, message=LocationMessage)
def handle_location_message(event):
msg_track.log_message_activity(line_api_proc.source_channel_id(event.source), msg_event_type.recv_txt)
return
api_reply(
event.reply_token,
LocationSendMessage(
title=event.message.title, address=event.message.address,
latitude=event.message.latitude, longitude=event.message.longitude
),
event.source
)
# Incomplete
@handler.add(MessageEvent, message=(ImageMessage, VideoMessage, AudioMessage))
def handle_content_message(event):
msg_track.log_message_activity(line_api_proc.source_channel_id(event.source), msg_event_type.recv_txt)
return
if isinstance(event.message, ImageMessage):
ext = 'jpg'
elif isinstance(event.message, VideoMessage):
ext = 'mp4'
elif isinstance(event.message, AudioMessage):
ext = 'm4a'
else:
return
message_content = api.get_message_content(event.message.id)
with tempfile.NamedTemporaryFile(dir=static_tmp_path, prefix=ext + '-', delete=False) as tf:
for chunk in message_content.iter_content():
tf.write(chunk)
tempfile_path = tf.name
dist_path = tempfile_path + '.' + ext
dist_name = os.path.basename(dist_path)
os.rename(tempfile_path, dist_path)
api_reply(
event.reply_token, [
TextSendMessage(text='Save content.'),
TextSendMessage(text=request.host_url + os.path.join('static', 'tmp', dist_name))
], event.source)
@handler.add(FollowEvent)
def handle_follow(event):
api_reply(event.reply_token, introduction_template(), event.source)
# Incomplete
@handler.add(UnfollowEvent)
def handle_unfollow():
return
app.logger.info("Got Unfollow event")
@handler.add(JoinEvent)
def handle_join(event):
src = event.source
reply_token = event.reply_token
cid = line_api_proc.source_channel_id(src)
global command_executor
if not isinstance(event.source, SourceUser):
group_data = gb.get_group_by_id(cid)
if group_data is None:
added = gb.new_data(cid, MAIN_UID, administrator)
msg_track.new_data(cid)
api_reply(reply_token,
[TextMessage(text=u'群組資料註冊{}。'.format(u'成功' if added else u'失敗')),
introduction_template()],
cid)
else:
api_reply(reply_token,
[TextMessage(text=u'群組資料已存在。'),
TextMessage(text=command_exec.G(src, [None, None, None])),
introduction_template()],
cid)
def introduction_template():
buttons_template = ButtonsTemplate(
title=u'機器人簡介', text='歡迎使用小水母!',
actions=[
URITemplateAction(label=u'點此開啟使用說明', uri='https://sites.google.com/view/jellybot'),
URITemplateAction(label=u'點此導向開發者LINE帳號', uri='http://line.me/ti/p/~raenonx'),
MessageTemplateAction(label=u'點此查看群組資料', text='JC\nG')
])
template_message = TemplateSendMessage(
alt_text=u'機器人簡介', template=buttons_template)
return template_message
def api_reply(reply_token, msgs, src):
if not sys_data.silence:
if not isinstance(msgs, (list, tuple)):
msgs = [msgs]
for msg in msgs:
if isinstance(msg, TemplateSendMessage):
msg_track.log_message_activity(line_api_proc.source_channel_id(src), msg_event_type.send_stk)
elif isinstance(msg, TextSendMessage):
msg_track.log_message_activity(line_api_proc.source_channel_id(src), msg_event_type.send_txt)
if len(msg.text) > 2000:
api.reply_message(reply_token,
TextSendMessage(text=error.main.text_length_too_long(webpage_generator.rec_text(msgs))))
return
api.reply_message(reply_token, msgs)
else:
print '=================================================================='
print 'Bot set to silence. Expected message to reply will display below: '
print msgs
print '=================================================================='
def intercept_text(event):
user_id = line_api_proc.source_user_id(event.source)
user_profile = line_api.profile(user_id)
print '==========================================='
print 'From Channel ID \'{}\''.format(line_api_proc.source_channel_id(event.source))
print 'From User ID \'{}\' ({})'.format(user_id, user_profile.display_name.encode('utf-8') if user_profile is not None else 'unknown')
print 'Message \'{}\''.format(event.message.text.encode('utf-8'))
print '=================================================================='
def auto_reply_system(token, keyword, is_sticker_kw, src):
cid = line_api_proc.source_channel_id(src)
if gb.is_group_set_to_silence(cid):
return False
res = kwd.get_reply(keyword, is_sticker_kw)
if res is not None:
msg_track.log_message_activity(line_api_proc.source_channel_id(src), msg_event_type.recv_stk_repl if is_sticker_kw else msg_event_type.recv_txt_repl)
result = res[0]
reply = result[int(kwdict_col.reply)].decode('utf-8')
if result[int(kwdict_col.is_pic_reply)]:
line_profile = line_api.profile(result[int(kwdict_col.creator)])
api_reply(token, TemplateSendMessage(
alt_text=u'圖片/貼圖回覆.\n關鍵字ID: {}'.format(result[int(kwdict_col.id)]),
template=ButtonsTemplate(text=u'由{}製作。\n回覆組ID: {}'.format(
error.main.line_account_data_not_found() if line_profile is None else line_profile.display_name,
result[int(kwdict_col.id)]),
thumbnail_image_url=reply,
actions=[
URITemplateAction(label=u'原始圖片', uri=reply)
])), src)
return True
else:
api_reply(token,
TextSendMessage(text=reply),
src)
return True
return False
def minigame_rps_capturing(rps_obj, is_sticker, content, uid):
if rps_obj is not None and line_api_proc.is_valid_user_id(uid) and rps_obj.has_player(uid):
if rps_obj.enabled:
battle_item = rps_obj.find_battle_item(is_sticker, content)
if battle_item is not None:
result = rps_obj.play(battle_item, uid)
if result is not None:
return result
else:
sys_data.game_cmd_dict['RPS'].count += 1
if rps_obj.is_waiting_next:
return u'等待下一個玩家出拳中...'
if rps_obj.result_generated:
return rps_obj.result_text()
if __name__ == "__main__":
# create tmp dir for download content
make_static_tmp_dir()
app.run(port=os.environ['PORT'], host='0.0.0.0')
| {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,816 | elees1219/LineBot | refs/heads/master | /game/__init__.py | from .rps import (
rps, battle_item, battle_result
) | {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,817 | elees1219/LineBot | refs/heads/master | /tool/random_gen.py | # coding: utf-8
import random
class random_drawer(object):
@staticmethod
def draw_number(start, end):
start = int(start)
end = int(end)
return random.randint(start, end)
@staticmethod
def draw_text(text_list):
random.shuffle(text_list)
return random.choice(text_list)
@staticmethod
def draw_probability(probability):
return random.random() <= probability
| {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,818 | elees1219/LineBot | refs/heads/master | /db/__init__.py | from .kwdict import (
kw_dict_mgr, kwdict_col
)
from .groupban import (
group_ban, gb_col
)
from .msg_track import (
message_tracker, msg_track_col, msg_event_type
) | {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,819 | elees1219/LineBot | refs/heads/master | /bot/__init__.py | from .system import (
permission_verifier, permission, line_api_proc, system_data
)
from .webpage_auto_gen import (
webpage, content_type
)
from .game_object_holder import (
game_objects
) | {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,826 | wwj718/begin_django | refs/heads/master | /begin_django/settings.py | #coding:utf-8
"""
Django settings for
ing project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Django settings for bes project.
import os
ROOT_URLCONF = 'begin_django.urls'
WSGI_APPLICATION = 'begin_django.wsgi.application'
#for automatic deployment
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('wwj', '1162025955@qq.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'dev.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '' # Set to empty string for default. Not used with sqlite3.
}
}
TIME_ZONE = 'Asia/Shanghai'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'zh-cn'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
#USE_TZ = True
USE_TZ = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
#edit by wwj
PROJECT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
MEDIA_ROOT = os.path.join(PROJECT_PATH,'media/').replace('\\','/')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
#end wwj
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
static_dir = os.path.join(root_dir,'static')
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
static_dir,
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '2-s!ark#))jx1inssbr61-*-sxndosl6dkr=stq6523d$)kf!f'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
#edit by wwj for Django Grappelli
#python manage.py collectstatic 部署时用
#加了这个后前台找不到静态文件
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.static',# nice ! 加入这个后才能找到静态文件
'django.contrib.auth.context_processors.auth',
"django.core.context_processors.request",
#edit by wwj 为了在后台使用导航条 在所有admin模板中使用app_list
'apps.news.context_processors.app_list',
#end wwj
)
#end wwj
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
#分页插件
'pagination.middleware.PaginationMiddleware',
#'django.middleware.transaction.TransactionMiddleware'
#'django_cas.middleware.CASMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
tpl_dir = os.path.join(root_dir,'tpl')
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
tpl_dir
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
#edit by wwj
"apps.home",
"apps.news",
"apps.message",
'django_admin_bootstrapped',
#'grappelli',
#'filebrowser',
'django.contrib.admin',
##第三方插件
#百度富文本编辑器
'DjangoUeditor',
'pagination',
'sorl.thumbnail',
#end wwj
# Uncomment the next line to enable the admin:
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
#edit by wwj
UEDITOR_SETTINGS = {
'toolbars':{"testa":[['fullscreen', 'source', '|', 'undo', 'redo', '|','bold', 'italic', 'underline']],
"testb":[[ 'source', '|','bold', 'italic', 'underline']],
"mytoolbars":[['fullscreen', 'source', '|', 'undo', 'redo', '|',
'bold', 'italic', 'underline', 'fontborder', 'strikethrough', 'superscript', 'subscript', 'removeformat', 'formatmatch', 'autotypeset', 'blockquote', 'pasteplain', '|', 'forecolor', 'backcolor', 'insertorderedlist', 'insertunorderedlist', 'selectall', 'cleardoc', '|',
'rowspacingtop', 'rowspacingbottom', 'lineheight', '|',
'customstyle', 'paragraph', 'fontfamily', 'fontsize', '|',
'directionalityltr', 'directionalityrtl', 'indent', '|',
'justifyleft', 'justifycenter', 'justifyright', 'justifyjustify', '|', 'touppercase', 'tolowercase', '|',
'link', 'unlink', 'anchor', '|', 'imagenone', 'imageleft', 'imageright', 'imagecenter', '|',
'insertimage', 'emotion', 'scrawl', 'insertvideo', 'music', 'attachment', 'map', 'gmap', 'insertframe','insertcode', 'webapp', 'pagebreak', 'template', 'background', '|',
'horizontal', 'date', 'time', 'spechars', 'snapscreen', 'wordimage', '|',
'inserttable', 'deletetable', 'insertparagraphbeforetable', 'insertrow', 'deleterow', 'insertcol', 'deletecol', 'mergecells', 'mergeright', 'mergedown', 'splittocells', 'splittorows', 'splittocols', 'charts', '|',
'print', 'preview', 'searchreplace', 'help', 'drafts']],
},
'images_upload':{
'max_size':0,
'path':"asd"
},
'scrawl_upload':{
'path':'scrawlabc'
}
}
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,827 | wwj718/begin_django | refs/heads/master | /apps/message/views.py | #coding:utf-8
# Create your views here.
from .models import Message
from django.http import HttpResponse
from django.shortcuts import render
#from django.views.generic import list_detail
from django.shortcuts import get_object_or_404
#django.views.generic.list.ListView
def hello(request):
return HttpResponse("Hello message")
def message_message_list(request,template_name='message_list.html'):
"""
Returns a news detail page.
"""
message_list = Message.objects.all()
return render(request, template_name, {
'message_list': message_list,
})
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,828 | wwj718/begin_django | refs/heads/master | /apps/news/admin.py | #coding:utf-8
from django.contrib import admin
#from django.contrib.markup.templatetags.markup import restructuredtext
from .models import News,NewsCategory
#from django_admin_bootstrapped.widgets import GenericContentTypeSelect
class NewsAdmin(admin.ModelAdmin):
search_fields = ('title',)
fields = ('order','category', 'title', 'content_html','content_pic' )
list_display = ('order','category', 'title', 'view_times', 'create_time')
ordering = ('-create_time', )
#save_on_top = True
# def save_model(self, request, obj, form, change):
# #obj.author = request.user
# if not obj.summary:
# obj.summary = obj.content_html
# # if not obj.is_old:
# # obj.content_html = restructuredtext(obj.content)
# else:
# obj.content_html = obj.content_html.replace('\r\n', '<br/>')
# import re
# obj.content_html = re.sub(r"\[cc lang='\w+?'\]", '<pre>', obj.content_html)
# obj.content_html = obj.content_html.replace('[/cc]', '</pre>')
# obj.save()
class NewsCategoryAdmin(admin.ModelAdmin):
search_fields = ('name',)
list_display = ('name','create_time')
admin.site.register(News, NewsAdmin)
admin.site.register(NewsCategory, NewsCategoryAdmin)
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,829 | wwj718/begin_django | refs/heads/master | /apps/news/context_processors.py | #coding:utf-8
'''
@edit by wwj
#为了在后台使用导航条,在所有admin模板中使用apps
'''
from django.utils.text import capfirst
from django.db.models import get_models
from django.utils.safestring import mark_safe
from django.contrib.admin import ModelAdmin
#from django.contrib.admin.validation import BaseValidator
IGNORE_MODELS = (
"sites",
"sessions",
"admin",
"contenttypes",
'common',
)
def app_list(request):
'''
Get all models and add them to the context apps variable.
'''
user = request.user
app_dict = {}
admin_class = ModelAdmin
for model in get_models():
#BaseValidator.validate(self,admin_class, model)
model_admin = admin_class(model, None)
app_label = model._meta.app_label
if app_label in IGNORE_MODELS:
continue
has_module_perms = user.has_module_perms(app_label)
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'admin_url': mark_safe('%s/%s/' % (app_label, model.__name__.lower())),
}
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': app_label.title(),
'app_url': app_label + '/',
'has_module_perms': has_module_perms,
'models': [model_dict],
}
app_list = app_dict.values()
app_list.sort(key=lambda x: x['name'])
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
return {'apps': app_list} | {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,830 | wwj718/begin_django | refs/heads/master | /apps/message/urls.py | #coding=utf-8
from django.conf.urls import patterns, include, url
from .views import hello,message_message_list
from django.shortcuts import get_object_or_404
urlpatterns = patterns('',
url(r'^$',hello),
url(r'^list$' , message_message_list, name='message_message_list'),
)
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,831 | wwj718/begin_django | refs/heads/master | /profiles/urls.py | #coding=utf-8
from django.conf.urls import patterns, include, url
from .views import get_code
from userena import views as userena_views
from .forms import SignupFormExtra
urlpatterns = patterns('',
url(r'^$',userena_views.signup,{'signup_form': SignupFormExtra}),
url(r'^get_code/$', get_code),
url(r'^user_exist/$', get_code),
)
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,832 | wwj718/begin_django | refs/heads/master | /apps/message/models.py | #coding:utf-8
from django.db import models
#https://github.com/zhangfisher/DjangoUeditor/tree/master/TestApp
#看testapp学习使用
class Message(models.Model):
'''留言'''
#id = models.AutoField(primary_key=True) #不可改
name = models.CharField(blank=True,max_length=20, verbose_name=u'姓名')
email = models.EmailField()
content = models.CharField(max_length=100, verbose_name=u'内容')
response = models.CharField(blank=True,max_length=100, verbose_name=u'回复')
create_time = models.DateTimeField(u'创建时间', auto_now_add=True, editable=True)
def __unicode__(self):
return self.name
class Meta:
ordering = ['-create_time']
verbose_name_plural = verbose_name = u"留言"
#verbose_name_plural 复数形式
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,833 | wwj718/begin_django | refs/heads/master | /apps/news/models.py | #coding:utf-8
from django.db import models
#https://github.com/zhangfisher/DjangoUeditor/tree/master/TestApp
#看testapp学习使用
from django.db.models import permalink
from DjangoUeditor.models import UEditorField
from BeautifulSoup import BeautifulSoup
#新闻属性
ATTRIBUTE=(
(0, u'标红'),
(1, u'加粗'),
(2, u'首页'),
(3, u'置顶'),
(4, u'推荐'),
(5, u'头条'),
)
##url:https://github.com/the5fire/django_selfblog
class NewsCategory(models.Model):
'''栏目名称,作为新闻的外键'''
name = models.CharField(max_length=40, verbose_name=u'栏目名称')
create_time = models.DateTimeField(u'创建时间', auto_now_add=True)
update_time = models.DateTimeField(u'更新时间', auto_now=True)
def __unicode__(self):
return '%s' % (self.name)
class Meta:
ordering = ['-create_time']
verbose_name_plural = verbose_name = u"新闻栏目"
class News(models.Model):
'''新闻'''
#id = models.AutoField(primary_key=True) #不可改
order = models.IntegerField(default=0, verbose_name=u'新闻顺序')
category = models.ForeignKey(NewsCategory, verbose_name=u'新闻栏目')
title = models.CharField(max_length=100, verbose_name=u'新闻标题')
summary = models.TextField(blank=True,verbose_name=u'摘要')
content_html = UEditorField('新闻内容',height=200,width=500,default='test',imagePath='content_img',imageManagerPath="bb",toolbars="mytoolbars",options={"elementPathEnabled":True},filePath='bb',blank=True)
content_pic = models.CharField(blank=True,max_length=200, verbose_name=u'展示图片')
view_times = models.IntegerField(default=1,verbose_name=u'点击量')
keyword = models.CharField(max_length=100, null=True, blank=True, verbose_name=u'新闻关键字', help_text=u'用英文逗号分割')
attribute = models.CharField(max_length=100, null=True, blank=True, verbose_name=u'新闻属性')
status = models.IntegerField(default=0, choices=ATTRIBUTE, verbose_name=u'状态')
pic_url = models.URLField(blank=True, verbose_name=u'首页图片地址')
attachments = models.FileField(blank=True,upload_to='news_attachments', verbose_name=u'附件') #附件
create_time = models.DateTimeField(u'创建时间', auto_now_add=True, editable=True)
update_time = models.DateTimeField(u'更新时间', auto_now=True)
def save(self):
#取出第一张图片的html,使用正则
soup = BeautifulSoup(self.content_html)
self.content_pic = str(soup.first("img")) #soup.first("img") #只返回第一个pic,需要转化为str,否则是对象
if not self.content_pic :
self.content_pic = ''
super(News, self).save()
def __unicode__(self):
return self.title
class Meta:
ordering = ['-create_time']
verbose_name_plural = verbose_name = u"新闻"
@permalink
def get_absolute_url(self):
return ('news_news_details', None, {'id': self.id})
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,834 | wwj718/begin_django | refs/heads/master | /apps/news/urls.py | #coding=utf-8
from django.conf.urls import patterns, include, url
from .views import hello,news_news_details,news_news_list
from django.shortcuts import get_object_or_404
NEWS_URL = r'(?P<id>\d+)'
urlpatterns = patterns('',
url(r'^$',hello),
url(r'^%s$' % NEWS_URL, news_news_details, name='news_news_details'),
url(r'^list$' , news_news_list, name='news_news_list'),
)
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,835 | wwj718/begin_django | refs/heads/master | /profiles/__init__.py | #to extend django-userena
#don't hack it just extend | {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,836 | wwj718/begin_django | refs/heads/master | /apps/home/models.py | #coding=utf-8
from django.db import models
from sorl.thumbnail import ImageField
# Create your models here.
class HomePic(models.Model):
'''建一个模型,每次编辑'''
name = models.CharField(max_length=40, verbose_name=u'图片名称')
order = models.IntegerField(default=0, verbose_name=u'图片顺序')
pic = ImageField(blank=True,upload_to='home_img', verbose_name=u'图片')
create_time = models.DateTimeField(u'创建时间', auto_now_add=True)
update_time = models.DateTimeField(u'更新时间', auto_now=True)
def __unicode__(self):
return self.name
class Meta:
ordering = ['-create_time']
verbose_name_plural = verbose_name = u"首页图片"
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,837 | wwj718/begin_django | refs/heads/master | /apps/home/admin.py | #coding:utf-8
from django.contrib import admin
#from django.contrib.markup.templatetags.markup import restructuredtext
from .models import HomePic
from sorl.thumbnail.admin import AdminImageMixin
from sorl.thumbnail import get_thumbnail
class HomePicAdmin(admin.ModelAdmin,AdminImageMixin):
search_fields = ('name',)
fields = ('order','name', 'pic')
list_display = ('image_thumbnail','order','name', 'pic', 'create_time')
ordering = ('create_time', )
def image_thumbnail(self, obj):
'''缩略图'''
im = get_thumbnail(obj.pic, '80x80', quality=99)
return u"<img src='/media/%s' />" % im
image_thumbnail.allow_tags = True
#save_on_top = True
# def save_model(self, request, obj, form, change):
# '''在此提取'''
# #obj.author = request.user
# if not obj.summary:
# obj.summary = obj.content_html
# # if not obj.is_old:
# # obj.content_html = restructuredtext(obj.content)
# else:
# obj.content_html = obj.content_html.replace('\r\n', '<br/>')
# import re
# obj.content_html = re.sub(r"\[cc lang='\w+?'\]", '<pre>', obj.content_html)
# obj.content_html = obj.content_html.replace('[/cc]', '</pre>')
# obj.save()
admin.site.register(HomePic, HomePicAdmin)
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,838 | wwj718/begin_django | refs/heads/master | /apps/news/views.py | #coding:utf-8
# Create your views here.
from .models import News
from django.http import HttpResponse
from django.shortcuts import render
#from django.views.generic import list_detail
from django.shortcuts import get_object_or_404
#django.views.generic.list.ListView
def hello(request):
return HttpResponse("Hello news")
def news_news_details(request,id,template_name='news_details.html'):
"""
Returns a news list page.
"""
news = get_object_or_404(News, id=int(id))
return render(request, template_name, {
'news': news,
})
def news_news_list(request,template_name='news_list.html'):
"""
Returns a news detail page.
"""
news_list = News.objects.all()
return render(request, template_name, {
'news_list': news_list,
})
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,839 | wwj718/begin_django | refs/heads/master | /apps/home/views.py | #coding:utf-8
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
# Create your views here.
def hello(request):
return HttpResponse("hello home")
def index(request):
"""
"""
return render_to_response('index.html',{},RequestContext(request))
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,840 | wwj718/begin_django | refs/heads/master | /apps/home/urls.py | #coding=utf-8
from django.conf.urls import patterns, include, url
from .views import hello,index
#1.6有变?
#from django.views.generic.simple import direct_to_template
from django.views.generic import TemplateView
urlpatterns = patterns('',
url(r'', hello),
url(r'^index/$', index),
#呈现静态“关于”页面
(r'^about/$', TemplateView.as_view(template_name="about.html")),
) | {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,841 | wwj718/begin_django | refs/heads/master | /begin_django/urls.py | #coding:utf-8
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
#edit by wwj
from django.contrib import admin
admin.autodiscover()
# import xadmin
# xadmin.autodiscover()
#注册插件
# from xadmin.plugins import xversion
# xversion.register_models()
#end wwj
urlpatterns = patterns('',
#静态页,直达
url(r'^about/$', TemplateView.as_view(template_name="about.html")),
#edit by wwj
#url(r'^grappelli/', include('grappelli.urls')),
url(r'^home/', include("apps.home.urls")),
url(r'^news/', include("apps.news.urls")),
url(r'^message/', include("apps.message.urls")),
url(r'^admin/', include(admin.site.urls)),
#(r'^grappelli/', include('grappelli.urls')),
#(r'^filebrowser/',include('filebrowser.urls')),
#url(r'^xadmin/', include(xadmin.site.urls)),
url(r'^ueditor/',include('DjangoUeditor.urls' )),#for ueditor
#end wwj
)
urlpatterns += staticfiles_urlpatterns()
#edit wwj to use media
from django.conf.urls.static import static
from django.conf import settings
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
#urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
#end wwj
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,842 | wwj718/begin_django | refs/heads/master | /profiles/models.py | #coding:utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
from userena.models import UserenaBaseProfile
from userena.utils import user_model_label
import datetime
#添加角色
class Profile(UserenaBaseProfile):
""" Default profile """
GENDER_CHOICES = (
(1, _('Male')),
(2, _('Female')),
)
user = models.OneToOneField(user_model_label,
unique=True,
verbose_name=_('user'),
related_name='profile')
gender = models.PositiveSmallIntegerField(_('gender'),
choices=GENDER_CHOICES,
blank=True,
null=True)
mobile = models.CharField(max_length=20,unique=True)
#不可编辑!隐藏字段,只读
role = models.CharField(blank=True,max_length=20)
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,843 | wwj718/begin_django | refs/heads/master | /apps/message/admin.py | #coding:utf-8
from django.contrib import admin
#from django.contrib.markup.templatetags.markup import restructuredtext
from .models import Message
#from django_admin_bootstrapped.widgets import GenericContentTypeSelect
class MessageAdmin(admin.ModelAdmin):
list_display = ('name','email','content', 'create_time')
admin.site.register(Message, MessageAdmin)
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,844 | wwj718/begin_django | refs/heads/master | /profiles/forms.py | #coding=utf-8
from django import forms
from django.utils.translation import ugettext_lazy as _
from userena.forms import SignupForm
from captcha.fields import CaptchaField
from django.shortcuts import get_object_or_404
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, Submit, HTML, Button, Row, Field
from crispy_forms.bootstrap import AppendedText, PrependedText, FormActions
class SignupFormExtra(SignupForm):
mobile = forms.CharField(label=_(u'手机号'),max_length=30,required=True)
codesms = forms.CharField(label=_(u'手机验证码'),max_length=30,required=True)
captcha = CaptchaField(label=_(u'验证码')) #contact = forms.CharField(label=_(u'Contact'),max_length=30,required=False)
helper = FormHelper()
helper.form_id = 'register_form'
helper.form_action = ''
helper.layout = Layout(
Field('username',id='UserName'),
Field('password1',id='LoginPass'),
Field('password2',id='LoginPass2'),
Field('mobile', id='Mobile'),
PrependedText('codesms', '<a href="#" id="GetCode" disabled="disabled">点击获取验证码</a>',css_class='span1',id='SMS'),
Field('email',id='Email'),
Field('captcha'),
FormActions(
Submit('save_changes', u'注册', css_class="span2 btn-primary"),
)
)
def clean(self):
#多字段验证
cleaned_data=super(SignupFormExtra, self).clean()
return cleaned_data
def save(self):
"""
Override the save method to save the first and last name to the user
field.
"""
# Original save method returns the user
user = super(SignupFormExtra, self).save()
return user
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,845 | wwj718/begin_django | refs/heads/master | /apps/news/adminx.py | #coding:utf-8
import xadmin
from xadmin import views
from models import News,NewsSubject,NewsCommittee,NewsCategory
# from xadmin.layout import Main, TabHolder, Tab, Fieldset, Row, Col, AppendedText, Side
# from xadmin.plugins.inline import Inline
# from xadmin.plugins.batch import BatchChangeAction
from django import forms
#在django xadmin中使用 Ueditor
#http://blog.csdn.net/u012762088/article/details/14497105
class MainDashboard(object):
widgets = [
# [
# {"type": "html", "title": "Test Widget", "content": "<h3> Welcome to Xadmin! </h3><p>Join Online Group: <br/>QQ Qun : 282936295</p>"},
# {"type": "chart", "model": "app.accessrecord", 'chart': 'user_count', 'params': {'_p_date__gte': '2013-01-08', 'p': 1, '_p_date__lt': '2013-01-29'}},
# {"type": "list", "model": "app.host", 'params': {
# 'o':'-guarantee_date'}},
# ],
[
{"type": "qbutton", "title": "Quick Start", "btns": [{'model': News}, {'model':NewsSubject}, {'model':NewsCommittee}, {'model':NewsCategory}, {'title': "Google", 'url': "http://www.google.com"}]},
]
]
xadmin.site.register(views.website.IndexView, MainDashboard)
class BaseSetting(object):
enable_themes = True
use_bootswatch = True
xadmin.site.register(views.BaseAdminView, BaseSetting)
class GlobalSetting(object):
global_search_models = [News,NewsSubject,NewsCommittee,NewsCategory]
# global_models_icon = {
# Host: 'laptop', IDC: 'cloud'
# }
xadmin.site.register(views.CommAdminView, GlobalSetting)
# class MaintainInline(object):
# model = MaintainLog
# extra = 1
# style = 'accordion'
# class NewsForm(forms.ModelForm):
# class Meta:
# model = News
# attribute = forms.MultipleChoiceField(label=u'新闻属性', choices=ATTRIBUTE, widget=forms.CheckboxSelectMultiple())
#文章属性
ATTRIBUTE=(
(0, u'标红'),
(1, u'加粗'),
(2, u'首页'),
(3, u'置顶'),
(4, u'推荐'),
(5, u'头条'),
)
class NewsForm(forms.ModelForm):
class Meta:
model = News
attribute = forms.MultipleChoiceField(label=u'新闻属性', choices=ATTRIBUTE, widget=forms.CheckboxSelectMultiple())
#content_html=forms.CharField(label="内容",widget=UEditorWidget(width=800,height=500, imagePath='aa', filePath='bb',toolbars={}))
class NewsAdmin(object):
form = NewsForm
search_fields = ('title',)
fields = ('category', 'title', 'subtitle', 'summary', 'content_html', 'keyword',
'attribute', 'pic_url', 'attachments', 'subject', 'committee')
list_display = ('category', 'title', 'view_times', 'create_time')
ordering = ('-pub_time', )
save_on_top = True
#富文本,依赖于插件 xcms
def save_model(self, request, obj, form, change):
#obj.author = request.user
if not obj.summary:
obj.summary = obj.content_html
# if not obj.is_old:
# obj.content_html = restructuredtext(obj.content)
else:
obj.content_html = obj.content_html.replace('\r\n', '<br/>')
import re
obj.content_html = re.sub(r"\[cc lang='\w+?'\]", '<pre>', obj.content_html)
obj.content_html = obj.content_html.replace('[/cc]', '</pre>')
obj.save()
class NewsCategoryAdmin(object):
search_fields = ('name',)
list_display = ('name', 'parent', 'desc', 'create_time', 'status')
class NewsSubjectAdmin(object):
search_fields = ('name',)
list_display = ('name', 'create_time', 'update_time')
class NewsCommitteeAdmin(object):
search_fields = ('name',)
list_display = ('name', 'create_time', 'update_time')
xadmin.site.register(News, NewsAdmin)
xadmin.site.register(NewsSubject, NewsSubjectAdmin)
xadmin.site.register(NewsCommittee, NewsCommitteeAdmin)
xadmin.site.register(NewsCategory, NewsCategoryAdmin)
| {"/apps/message/views.py": ["/apps/message/models.py"], "/apps/news/admin.py": ["/apps/news/models.py"], "/apps/message/urls.py": ["/apps/message/views.py"], "/profiles/urls.py": ["/profiles/forms.py"], "/apps/news/urls.py": ["/apps/news/views.py"], "/apps/home/admin.py": ["/apps/home/models.py"], "/apps/news/views.py": ["/apps/news/models.py"], "/apps/home/urls.py": ["/apps/home/views.py"], "/apps/message/admin.py": ["/apps/message/models.py"]} |
61,849 | jcoupon/maskUtils | refs/heads/master | /python/hsc/maskUtils/drawRandoms.py | #!/usr/bin/env python
#
# Jean Coupon (jean.coupon@unige.ch)
#
# This product includes software developed by the
# LSST Project (http://www.lsst.org/).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the LSST License Statement and
# the GNU General Public License along with this program. If not,
# see <http://www.lsstcorp.org/LegalNotices/>.
#
import numpy
import errno
import os
import healpy
import lsst.pex.config as pexConfig
import lsst.pipe.base as pipeBase
import lsst.afw.image as afwImage
from lsst.afw.geom import Box2D
import lsst.afw.table as afwTable
import lsst.meas.base as measBase
import lsst.afw.geom as afwGeom
import lsst.afw.detection as afwDetection
from lsst.afw.geom import Span, SpanSet, Stencil
from lsst.afw.detection import Footprint
from lsst.pipe.tasks.coaddBase import CoaddBaseTask
from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
__all__ = ["DrawRandomsTask"]
class DrawRandomsConfig(CoaddBaseTask.ConfigClass):
"""configuration for drawRandoms
"""
N = pexConfig.Field("Number of random points per patch (supersedes Nden)", int, -1)
Nden = pexConfig.Field("Random number density per sq arcmin", float, 100)
clobber = pexConfig.Field("To overwrite existing file [default: True]", bool, True)
dirOutName = pexConfig.Field("Name of output directory (will write output files as dirOutName/FILTER/TRACT/PATCH/ran-FILTER-TRACT-PATCH.fits)", str, "")
fileOutName = pexConfig.Field("Name of output file (supersedes dirOutName)", str, "")
test = pexConfig.Field("To write a test table", bool, False)
seed = pexConfig.Field("Seed for random generator (default: based on patch id)", int, -1)
setPrimaryFlags = pexConfig.ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary source in tract/patch")
depthMapFileName = pexConfig.Field("Name of healpix file that records full depth", str, "")
def setDefaults(self):
pexConfig.Config.setDefaults(self)
class DrawRandomsTask(CoaddBaseTask):
_DefaultName = "drawRandoms"
ConfigClass = DrawRandomsConfig
class depthMap(object):
"""depthMap map info in
healpix format
"""
def __init__(self):
pass
def __init__(self, schema=None, *args, **kwargs):
CoaddBaseTask.__init__(self, *args, **kwargs)
if schema is None:
schema = afwTable.SourceTable.makeMinimalSchema()
self.schema = schema
self.makeSubtask("setPrimaryFlags", schema=self.schema)
self.depthMap.map = None
if self.config.depthMapFileName != "":
"""healpix map
"""
self.depthMap.nest = True
self.depthMap.map, h = healpy.fitsfunc.read_map(self.config.depthMapFileName, nest=self.depthMap.nest, h=True)
self.depthMap.header = dict(h)
self.depthMap.nside = self.depthMap.header['NSIDE']
def makeIdFactory(self, dataRef):
""" Return an IdFactory for setting the detection identifiers
The actual parameters used in the IdFactory are provided by
the butler (through the provided data reference).
"""
# see /data1a/ana/hscPipe5/Linux64/afw/5.2-hsc/tests/testSourceTable.py
# or /Users/coupon/local/source/hscPipe5/DarwinX86/afw/5.3-hsc/tests/testSourceTable.py
# changed long to int, conversion for python3, in expId
datasetName="MergedCoaddId"
expBits = dataRef.get(self.config.coaddName + datasetName + "_bits")
expId = int(dataRef.get(self.config.coaddName + datasetName))
return afwTable.IdFactory.makeSource(expId, 64 - expBits)
def run(self, dataRef, selectDataList=[]):
"""Draw randoms for a given patch
"""
# first test if the forced-src file exists
# do not process if the patch doesn't exist
try:
dataRef.get(self.config.coaddName + "Coadd_forced_src")
except:
self.log.info("No forced_src file found for %s. Skipping..." % (dataRef.dataId))
return
# verbose
self.log.info("Processing %s" % (dataRef.dataId))
# create a seed that depends on patch id
# so it is consistent among filters
if self.config.seed == -1:
p = [int(d) for d in dataRef.dataId["patch"].split(",") ]
numpy.random.seed(seed=dataRef.dataId["tract"]*10000+p[0]*10+ p[1])
else:
numpy.random.seed(seed=self.config.seed)
# compute sky mean and sky std_dev for this patch
# in 2" diameter apertures (~12 pixels x 0.17"/pixel)
# import source list for getting sky objects
sources = dataRef.get(self.config.coaddName + "Coadd_meas")
if True:
sky_apertures = sources['base_CircularApertureFlux_12_0_flux'][sources['merge_peak_sky']]
select = numpy.isfinite(sky_apertures)
sky_mean = numpy.mean(sky_apertures[select])
sky_std = numpy.std(sky_apertures[select])
# NOTE: to get 5-sigma limiting magnitudes:
# print -2.5*numpy.log10(5.0*sky_std/coadd.getCalib().getFluxMag0()[0])
else:
sky_mean = 0.0
sky_std = 0.0
# get coadd, coadd info and coadd psf object
coadd = dataRef.get(self.config.coaddName + "Coadd_calexp")
psf = coadd.getPsf()
var = coadd.getMaskedImage().getVariance().getArray()
skyInfo = self.getSkyInfo(dataRef)
# wcs and reference point (wrt tract)
# See http://hsca.ipmu.jp/hscsphinx_test/scripts/print_coord.html
# for coordinate routines.
wcs = coadd.getWcs()
xy0 = coadd.getXY0()
# dimension in pixels
dim = coadd.getDimensions()
# define measurement algorithms
# mostly copied from /data1a/ana/hscPipe5/Linux64/meas_base/5.3-hsc/tests/testInputCount.py
measureSourcesConfig = measBase.SingleFrameMeasurementConfig()
measureSourcesConfig.plugins.names = ['base_PixelFlags', 'base_PeakCentroid', 'base_InputCount', 'base_SdssShape']
measureSourcesConfig.slots.centroid = "base_PeakCentroid"
measureSourcesConfig.slots.psfFlux = None
measureSourcesConfig.slots.apFlux = None
measureSourcesConfig.slots.modelFlux = None
measureSourcesConfig.slots.instFlux = None
measureSourcesConfig.slots.calibFlux = None
measureSourcesConfig.slots.shape = None
# it seems it is still necessary to manually add the
# bright-star mask flag by hand
measureSourcesConfig.plugins['base_PixelFlags'].masksFpCenter.append("BRIGHT_OBJECT")
measureSourcesConfig.plugins['base_PixelFlags'].masksFpAnywhere.append("BRIGHT_OBJECT")
measureSourcesConfig.validate()
# add PSF shape
# sdssShape_psf = self.schema.addField("shape_sdss_psf", type="MomentsD", doc="PSF xx from SDSS algorithm", units="pixel")
# shape_sdss_psf = self.schema.addField("shape_sdss_psf", type="MomentsD", doc="PSF yy from SDSS algorithm", units="pixel")
# shape_sdss_psf = self.schema.addField("shape_sdss_psf", type="MomentsD", doc="PSF xy from SDSS algorithm", units="pixel")
# additional columns
# random number to adjust sky density
adjust_density = self.schema.addField("adjust_density", type=float, doc="Random number between [0:1] to adjust sky density", units='')
# sky mean and variance for the entire patch
sky_mean_key = self.schema.addField("sky_mean", type=float, doc="Mean of sky value in 2\" diamter apertures", units='count')
sky_std_key = self.schema.addField("sky_std", type=float, doc="Standard deviation of sky value in 2\" diamter apertures", units='count')
# pixel variance at random point position
pix_variance = self.schema.addField("pix_variance", type=float, doc="Pixel variance at random point position", units="flx^2")
# add healpix map value (if healpix map is given)
if self.depthMap.map is not None:
depth_key = self.schema.addField("isFullDepthColor", type="Flag", doc="True if full depth and full colors at point position", units='')
# task and output catalog
task = measBase.SingleFrameMeasurementTask(self.schema, config=measureSourcesConfig)
table = afwTable.SourceTable.make(self.schema, self.makeIdFactory(dataRef))
catalog = afwTable.SourceCatalog(table)
if self.config.N == -1:
# to output a constant random
# number density, first compute
# the area in degree
pixel_area = coadd.getWcs().getPixelScale().asDegrees()**2
area = pixel_area * dim[0] * dim[1]
N = self.iround(area*self.config.Nden*60.0*60.0)
else:
# fixed number if random points
N = self.config.N
# verbose
self.log.info("Drawing %d random points" % (N))
# loop over N random points
for i in range(N):
# for i in range(100):
# draw one random point
x = numpy.random.random()*(dim[0]-1)
y = numpy.random.random()*(dim[1]-1)
# get coordinates
radec = wcs.pixelToSky(afwGeom.Point2D(x + xy0.getX(), y + xy0.getY()))
xy = wcs.skyToPixel(radec)
# new record in table
record = catalog.addNew()
record.setCoord(radec)
# get PSF moments and evaluate size
#size_psf = 1.0
#try:
# shape_sdss_psf_val = psf.computeShape(afwGeom.Point2D(xy))
#except:
# pass
#else:
# record.set(shape_sdss_psf, shape_sdss_psf_val)
# size_psf = shape_sdss_psf_val.getDeterminantRadius()
# object has no footprint
radius = 0
spanset1 = SpanSet.fromShape(radius, stencil=Stencil.CIRCLE, offset=afwGeom.Point2I(xy))
foot = Footprint(spanset1)
foot.addPeak(xy[0], xy[1], 0.0)
record.setFootprint(foot)
# draw a number between 0 and 1 to adjust sky density
record.set(adjust_density, numpy.random.random())
# add sky properties
record.set(sky_mean_key, sky_mean)
record.set(sky_std_key, sky_std)
# add local (pixel) variance
record.set(pix_variance, float(var[self.iround(y), self.iround(x)]))
# required for setPrimaryFlags
record.set(catalog.getCentroidKey(), afwGeom.Point2D(xy))
# add healpix map value
if self.depthMap.map is not None:
mapIndex = healpy.pixelfunc.ang2pix(self.depthMap.nside, numpy.pi/2.0 - radec[1].asRadians(), radec[0].asRadians(), nest=self.depthMap.nest)
record.setFlag(depth_key, self.depthMap.map[mapIndex])
# run measurements
task.run(catalog, coadd)
self.setPrimaryFlags.run(catalog, skyInfo.skyMap, skyInfo.tractInfo, skyInfo.patchInfo, includeDeblend=False)
# write catalog
if self.config.fileOutName == "":
if self.config.dirOutName == "" :
fileOutName = dataRef.get(self.config.coaddName + "Coadd_forced_src_filename")[0].replace('forced_src', 'ran')
self.log.info("WARNING: the output file will be written in {0:s}.".format(fileOutName))
else:
fileOutName = "{0}/{1}/{2}/{3}/ran-{1}-{2}-{3}.fits".format(self.config.dirOutName,dataRef.dataId["filter"],dataRef.dataId["tract"],dataRef.dataId["patch"])
else:
fileOutName = self.config.fileOutName
self.mkdir_p(os.path.dirname(fileOutName))
catalog.writeFits(fileOutName)
# to do. Define output name in init (not in paf) and
# allow parallel processing
# write sources
# if self.config.doWriteSources:
# dataRef.put(result.sources, self.dataPrefix + 'src')
return
# Overload these if your task inherits from CmdLineTask
def _getConfigName(self):
return None
def _getEupsVersionsName(self):
return None
def _getMetadataName(self):
return None
def mkdir_p(self, path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def iround(self, x):
"""
iround(number) -> integer
Round a number to the nearest integer.
From https://www.daniweb.com/software-development/python/threads/299459/round-to-nearest-integer-
"""
return int(round(x) - .5) + (x > 0)
# def check_bit(self, bitmask, bit):
# return ((bitmask&(1<<bit))!=0)
# def int_robust(self, s):
# try:
# return int(s)
# except ValueError:
# return 0
| {"/python/hsc/maskUtils/__init__.py": ["/python/hsc/maskUtils/drawRandoms.py", "/python/hsc/maskUtils/createMultiRanCat.py"]} |
61,850 | jcoupon/maskUtils | refs/heads/master | /python/hsc/maskUtils/createMultiRanCat.py | #!/usr/bin/env python
#
# Jean Coupon (jean.coupon@unige.ch)
#
# This product includes software developed by the
# LSST Project (http://www.lsst.org/) and the HSC software team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the LSST License Statement and
# the GNU General Public License along with this program. If not,
# see <http://www.lsstcorp.org/LegalNotices/>.
#
import numpy as np
import errno
import os
from argparse import ArgumentError
import lsst.pex.config as pexConfig
from lsst.pipe.tasks.coaddBase import CoaddBaseTask
import lsst.afw.table as afwTable
__all__ = ["CreateMultiRanCatTask"]
class CreateMultiRanCatConfig(CoaddBaseTask.ConfigClass):
filters = pexConfig.Field("Name of filters to combine [default HSC-G^HSC-R^HSC-I^HSC-Z^HSC-Y]", str, "HSC-G^HSC-R^HSC-I^HSC-Z^HSC-Y")
fileOutName = pexConfig.Field("Name of output file", str, "")
dirOutName = pexConfig.Field("Name of output directory (will write output files as dirOutName/FILTER/TRACT/PATCH/MultiRanCat-FILTER-TRACT-PATCH.fits)", str, "")
dustSgpFileName = pexConfig.Field("Name of output file", str, "/Users/coupon/data/SchlegelDust/SFD_dust_4096_sgp.fits")
dustNgpFileName = pexConfig.Field("Name of output file", str, "/Users/coupon/data/SchlegelDust/SFD_dust_4096_ngp.fits")
hasDepthInfo = pexConfig.Field("If reference filter catalog has depth info", bool, False)
refFilterName = pexConfig.Field("Name of reference filter (default: HSC-I)", str, "HSC-I")
def setDefaults(self):
pexConfig.Config.setDefaults(self)
class CreateMultiRanCatTask(CoaddBaseTask):
"""A Task to merge catalogs
"""
_DefaultName = 'CreateMultiRanCat'
ConfigClass = CreateMultiRanCatConfig
class dustMap(object):
"""Dust map info
"""
def __init__(self):
pass
def __init__(self, schema=None, *args, **kwargs):
CoaddBaseTask.__init__(self, *args, **kwargs)
# ---------------------------------------------------------- #
# for Galactic extinction until https://hsc-jira.astro.princeton.edu/jira/browse/HSC-1350 is fixed
# ---------------------------------------------------------- #
from astropy.io import ascii,fits
import astropy.wcs as wcs
sFile = fits.open(self.config.dustSgpFileName)
nFile = fits.open(self.config.dustNgpFileName)
self.dustMap.sMap = sFile[0].data
self.dustMap.nMap = nFile[0].data
self.dustMap.sWcs = wcs.WCS(sFile[0].header)
self.dustMap.nWcs = wcs.WCS(nFile[0].header)
# ---------------------------------------------------------- #
#
# ---------------------------------------------------------- #
def iround(self, x):
"""iround(number) -> integer
Round a number to the nearest integer.
From https://www.daniweb.com/software-development/python/threads/299459/round-to-nearest-integer-
"""
return int(round(x) - .5) + (x > 0)
def readCatalog(self, dataRef, filterName):
"""Read input catalog
We read the input dataset provided by the 'inputDataset'
class variable.
"""
dataRef.dataId["filter"] = filterName
dirIntName = dataRef.getButler().mapper.root+"/"+self.config.coaddName+"Coadd-results"
fileInName = "{0}/{1}/{2}/{3}/ran-{1}-{2}-{3}.fits".format(dirIntName,dataRef.dataId["filter"],dataRef.dataId["tract"],dataRef.dataId["patch"])
catalog = afwTable.SourceCatalog.readFits(fileInName, 0)
#dataRef.get("deepCoadd_forced_src", immediate=True)
self.log.info("Read %d sources for filter %s: %s" % (len(catalog), filterName, dataRef.dataId))
return filterName, catalog
def readCoadd(self, dataRef, filterName):
"""Read input coadd
"""
dataRef.dataId["filter"] = filterName
coadd = dataRef.get("deepCoadd_calexp")
self.log.info("Read coadd for filter %s: %s" % (filterName, dataRef.dataId))
return filterName, coadd
def getDustCorrection(self, dustMap, ra, dec):
from astropy import units as u
from astropy.coordinates import SkyCoord
import astropy.wcs as wcs
coord = SkyCoord(ra=ra*u.degree, dec=dec*u.degree, frame='fk5')
if coord.galactic.b.degree > 0.0:
x, y = wcs.utils.skycoord_to_pixel(coord, dustMap.nWcs, origin=0)
return float(dustMap.nMap[self.iround(y), self.iround(x)])
else:
x, y = wcs.utils.skycoord_to_pixel(coord, dustMap.sWcs, origin=0)
return float(dustMap.sMap[self.iround(y), self.iround(x)])
def run(self, dataRef, selectDataList=[]):
self.log.info("Processing %s" % (dataRef.dataId))
filters = self.config.filters.split("^")
catalogs = dict(self.readCatalog(dataRef, f) for f in filters)
coadds = dict(self.readCoadd(dataRef, f) for f in filters)
wcs = coadds[filters[0]].getWcs()
pixel_scale = wcs.pixelScale().asDegrees()*3600.0
ref = catalogs[self.config.refFilterName]
# create new table table
mergedSchema = afwTable.Schema()
fields=[]
# define table fields
fields.append(mergedSchema.addField("id", type="L", doc="Unique id"))
fields.append(mergedSchema.addField("ra", type="F", doc="ra [deg]"))
fields.append(mergedSchema.addField("dec", type="F", doc="dec [deg]"))
fields.append(mergedSchema.addField("countInputs", type="I", doc="Number of input single exposures for the reference filter"))
fields.append(mergedSchema.addField("PSFDetRadius", type="F", doc="Determinant radius for the PSF at the object position = sigma if gaussian [arcsec]"))
fields.append(mergedSchema.addField("EB_V", type="F", doc="Milky Way dust E(B-V) [mag]"))
fields.append(mergedSchema.addField("isDuplicated", type="I", doc="1 if outside the inner tract or patch"))
fields.append(mergedSchema.addField("isEdge", type="I", doc="1 if offImage or in region masked EDGE or NO_DATA"))
fields.append(mergedSchema.addField("hasBadPhotometry", type="I", doc="1 if interpolated, saturated, suspect, has CR at center or near bright object"))
fields.append(mergedSchema.addField("isClean", type="I", doc="1 if none of other flags is set"))
if self.config.hasDepthInfo:
fields.append(mergedSchema.addField("isFullDepthColor", type="I", doc="1 if point located in full depth and color area"))
# create table object
merged = afwTable.BaseCatalog(mergedSchema)
N = len(ref)
for i in range(N):
# for i in range(100,110):
# create new record
record = merged.addNew()
coord = ref[i].get('coord')
# record if any of the filter is flagged as bad photometry
for f in filters:
hasBadPhotometry = (catalogs[f][i].get('flags.pixel.interpolated.center')) \
| (catalogs[f][i].get('flags.pixel.saturated.center')) \
| (catalogs[f][i].get('flags.pixel.suspect.center')) \
| (catalogs[f][i].get('flags.pixel.cr.center')) \
| (catalogs[f][i].get('flags.pixel.bad')) \
| (catalogs[f][i].get('flags.pixel.bright.object.center'))
if hasBadPhotometry:
break
isDuplicated = not ref[i].get('detect.is-primary')
isEdge = (ref[i].get('flags.pixel.offimage')) | (ref[i].get('flags.pixel.edge'))
isClean = (not hasBadPhotometry) & (not isDuplicated) & (not isEdge)
# record common info from reference filter
record.set(mergedSchema['id'].asKey(), ref[i].get('id'))
record.set(mergedSchema['ra'].asKey(), coord.toFk5().getRa().asDegrees())
record.set(mergedSchema['dec'].asKey(), coord.toFk5().getDec().asDegrees())
record.set(mergedSchema['countInputs'].asKey(), ref[i].get('countInputs'))
record.set(mergedSchema['PSFDetRadius'].asKey(), ref[i].get("shape.sdss.psf").getDeterminantRadius()*pixel_scale)
record.set(mergedSchema['isDuplicated'].asKey(), int(isDuplicated))
record.set(mergedSchema['isEdge'].asKey(), int(isEdge))
record.set(mergedSchema['hasBadPhotometry'].asKey(), int(hasBadPhotometry))
record.set(mergedSchema['isClean'].asKey(), int(isClean))
if self.config.hasDepthInfo:
record.set(mergedSchema['isFullDepthColor'].asKey(), int(ref[i].get('isFullDepthColor')))
# dust correction
EB_V = self.getDustCorrection(self.dustMap, record.get(mergedSchema['ra'].asKey()), record.get(mergedSchema['dec'].asKey()))
record.set(mergedSchema['EB_V'].asKey(), EB_V)
# write catalog
if self.config.fileOutName == "":
# get output dir
# TO DO: create new PAF
# see /Users/coupon/local/source/hscPipe/install/DarwinX86/solvetansip/6.5.1p_hsc/python/hsc/meas/tansip/utils.py
# and /Users/coupon/local/source/hscPipe/install/DarwinX86/pex_policy/HSC-4.0.0/tests/Policy_1.py
if self.config.dirOutName == "" :
dirOutName = dataRef.getButler().mapper.root+"/"+self.config.coaddName+"Coadd-results"
self.log.info("WARNING: the output file will be written in {0:s}.".format(dirOutName))
else:
dirOutName = self.config.dirOutName
fileOutName = "{0}/{1}/{2}/{3}/multiRanCat-{2}-{3}.fits".format(dirOutName,"merged",dataRef.dataId["tract"],dataRef.dataId["patch"])
else:
fileOutName = self.config.fileOutName
self.log.info("Writing {0:s}".format(fileOutName))
self.mkdir_p(os.path.dirname(fileOutName))
merged.writeFits(fileOutName)
# write catalog
#self.log.info("Writing {0:s}".format(self.config.fileOutName))
#if self.config.fileOutName == "":
# fileOutName = "{0}/{1}/{2}/{3}/MultiRanCat-{2}-{3}.fits".format(self.config.dirOutName,"merged",dataRef.dataId["tract"],dataRef.dataId["patch"])
# self.mkdir_p(os.path.dirname(fileOutName))
#else:
# fileOutName = self.config.fileOutName
#merged.writeFits(fileOutName)
return
# Don't forget to overload these
def _getConfigName(self):
return None
def _getEupsVersionsName(self):
return None
def _getMetadataName(self):
return None
def mkdir_p(self, path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
if __name__ == '__main__':
CreateMultiRanCatTask.parseAndRun()
| {"/python/hsc/maskUtils/__init__.py": ["/python/hsc/maskUtils/drawRandoms.py", "/python/hsc/maskUtils/createMultiRanCat.py"]} |
61,851 | jcoupon/maskUtils | refs/heads/master | /python/hsc/maskUtils/__init__.py | from .drawRandoms import DrawRandomsTask
from .createMultiRanCat import CreateMultiRanCatTask
| {"/python/hsc/maskUtils/__init__.py": ["/python/hsc/maskUtils/drawRandoms.py", "/python/hsc/maskUtils/createMultiRanCat.py"]} |
61,862 | saltycrane/django-test-utils | refs/heads/master | /test_project/test_app/tests/testmaker_tests.py | """
This file is to test testmaker. It will run over the polls app and with the crawler and with test maker outputting things. Hopefully this will provide a sane way to test testmaker.
"""
from django.test.testcases import TestCase
from test_utils.testmaker import Testmaker
from django.conf import settings
import os
class TestMakerTests(TestCase):
"""
Tests to test basic testmaker functionality.
"""
urls = "test_project.polls.urls"
fixtures = ['polls_testmaker.json']
def setUp(self):
self.tm = Testmaker()
self.tm.setup_logging('test_file', 'serialize_file')
Testmaker.enabled = True
self.tm.insert_middleware()
def tearDown(self):
#Teardown logging somehow?
os.remove('test_file')
os.remove('serialize_file')
def test_basic_testmaker(self):
self.client.get('/')
logs = open('test_file')
output = logs.read()
self.assertTrue(output.find('[<Poll: What\'s up?>, <Poll: Test poll>]') != -1)
def test_twill_processor(self):
settings.TESTMAKER_PROCESSOR = 'twill'
self.client.get('/')
self.client.get('/1/')
logs = open('test_file')
output = logs.read()
self.assertTrue(output.find('code 200') != -1)
def test_not_inserting_multiple_times(self):
"""
Test that the middleware will only be inserted once.
"""
self.tm.insert_middleware()
self.tm.insert_middleware()
middleware = settings.MIDDLEWARE_CLASSES
#A set of the middleware should be the same, meaning the item isn't in twice.
self.assertEqual(sorted(list(middleware)), sorted(list(set(middleware))))
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,863 | saltycrane/django-test-utils | refs/heads/master | /test_utils/utils/twill_runner.py | """
This code is originally by miracle2k:
http://bitbucket.org/miracle2k/djutils/src/97f92c32c621/djutils/test/twill.py
Integrates the twill web browsing scripting language with Django.
Provides two main functions, ``setup()`` and ``teardown``, that hook
(and unhook) a certain host name to the WSGI interface of your Django
app, making it possible to test your site using twill without actually
going through TCP/IP.
It also changes the twill browsing behaviour, so that relative urls
per default point to the intercept (e.g. your Django app), so long
as you don't browse away from that host. Further, you are allowed to
specify the target url as arguments to Django's ``reverse()``.
Usage:
from test_utils.utils import twill_runner as twill
twill.setup()
try:
twill.go('/') # --> Django WSGI
twill.code(200)
twill.go('http://google.com')
twill.go('/services') # --> http://google.com/services
twill.go('/list', default=True) # --> back to Django WSGI
twill.go('proj.app.views.func',
args=[1,2,3])
finally:
twill.teardown()
For more information about twill, see:
http://twill.idyll.org/
"""
# allows us to import global twill as opposed to this module
from __future__ import absolute_import
# TODO: import all names with a _-prefix to keep the namespace clean with the twill stuff?
import urlparse
import cookielib
import twill
import twill.commands
import twill.browser
from django.conf import settings
from django.core.servers.basehttp import AdminMediaHandler
from django.core.handlers.wsgi import WSGIHandler
from django.core.urlresolvers import reverse
from django.http import HttpRequest
from django.utils.datastructures import SortedDict
from django.contrib import auth
from django.core import signals
from django.db import close_connection
# make available through this module
from twill.commands import *
__all__ = ('INSTALLED', 'setup', 'teardown', 'reverse',) + tuple(twill.commands.__all__)
DEFAULT_HOST = '127.0.0.1'
DEFAULT_PORT = 9090
INSTALLED = SortedDict() # keep track of the installed hooks
class DjangoWsgiFix(object):
"""Django closes the database connection after every request;
this breaks the use of transactions in your tests. This wraps
around Django's WSGI interface and will disable the critical
signal handler for every request served.
Note that we really do need to do this individually a every
request, not just once when our WSGI hook is installed, since
Django's own test client does the same thing; it would reinstall
the signal handler if used in combination with us.
"""
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
signals.request_finished.disconnect(close_connection)
try:
return self.app(environ, start_response)
finally:
signals.request_finished.connect(close_connection)
def setup(host=None, port=None, allow_xhtml=True, propagate=True):
"""Install the WSGI hook for ``host`` and ``port``.
The default values will be used if host or port are not specified.
``allow_xhtml`` enables a workaround for the "not viewer HTML"
error when browsing sites that are determined to be XHTML, e.g.
featuring xhtml-ish mimetypes.
Unless ``propagate specifies otherwise``, the
``DEBUG_PROPAGATE_EXCEPTIONS`` will be enabled for better debugging:
when using twill, we don't really want to see 500 error pages,
but rather directly the exceptions that occured on the view side.
Multiple calls to this function will only result in one handler
for each host/port combination being installed.
"""
host = host or DEFAULT_HOST
port = port or DEFAULT_PORT
key = (host, port)
if not key in INSTALLED:
# installer wsgi handler
app = DjangoWsgiFix(AdminMediaHandler(WSGIHandler()))
twill.add_wsgi_intercept(host, port, lambda: app)
# start browser fresh
browser = get_browser()
browser.diverged = False
# enable xhtml mode if requested
_enable_xhtml(browser, allow_xhtml)
# init debug propagate setting, and remember old value
if propagate:
old_propgate_setting = settings.DEBUG_PROPAGATE_EXCEPTIONS
settings.DEBUG_PROPAGATE_EXCEPTIONS = True
else:
old_propgate_setting = None
INSTALLED[key] = (app, old_propgate_setting)
return browser
return False
def teardown(host=None, port=None):
"""Remove an installed WSGI hook for ``host`` and ```port``.
If no host or port is passed, the default values will be assumed.
If no hook is installed for the defaults, and both the host and
port are missing, the last hook installed will be removed.
Returns True if a hook was removed, otherwise False.
"""
both_missing = not host and not port
host = host or DEFAULT_HOST
port = port or DEFAULT_PORT
key = (host, port)
key_to_delete = None
if key in INSTALLED:
key_to_delete = key
if not key in INSTALLED and both_missing and len(INSTALLED) > 0:
host, port = key_to_delete = INSTALLED.keys()[-1]
if key_to_delete:
_, old_propagate = INSTALLED[key_to_delete]
del INSTALLED[key_to_delete]
result = True
if old_propagate is not None:
settings.DEBUG_PROPAGATE_EXCEPTIONS = old_propagate
else:
result = False
# note that our return value is just a guess according to our
# own records, we pass the request on to twill in any case
twill.remove_wsgi_intercept(host, port)
return result
def _enable_xhtml(browser, enable):
"""Twill (darcs from 19-09-2008) does not work with documents
identifying themselves as XHTML.
This is a workaround.
"""
factory = browser._browser._factory
factory.basic_factory._response_type_finder._allow_xhtml = \
factory.soup_factory._response_type_finder._allow_xhtml = \
enable
class _EasyTwillBrowser(twill.browser.TwillBrowser):
"""Custom version of twill's browser class that defaults relative
URLs to the last installed hook, if available.
It also supports reverse resolving, and some additional commands.
"""
def __init__(self, *args, **kwargs):
self.diverged = False
self._testing_ = False
super(_EasyTwillBrowser, self).__init__(*args, **kwargs)
def go(self, url, args=None, kwargs=None, default=None):
assert not ((args or kwargs) and default==False)
if args is not None or kwargs is not None:
url = reverse(url, args=args, kwargs=kwargs)
default = True # default is implied
if INSTALLED:
netloc = '%s:%s' % INSTALLED.keys()[-1]
urlbits = urlparse.urlsplit(url)
if not urlbits[0]:
if default:
# force "undiverge"
self.diverged = False
if not self.diverged:
url = urlparse.urlunsplit(('http', netloc)+urlbits[2:])
else:
self.diverged = True
if self._testing_: # hack that makes it simple for us to test this
return url
return super(_EasyTwillBrowser, self).go(url)
def login(self, **credentials):
"""Log the user with the given credentials into your Django
site.
To further simplify things, rather than giving the credentials,
you may pass a ``user`` parameter with the ``User`` instance you
want to login. Note that in this case the user will not be
further validated, i.e. it is possible to login an inactive user
this way.
This works regardless of the url currently browsed, but does
require the WSGI intercept to be setup.
Returns ``True`` if login was possible; ``False`` if the
provided credentials are incorrect, or the user is inactive,
or if the sessions framework is not available.
Based on ``django.test.client.Client.logout``.
Note: A ``twill.reload()`` will not refresh the cookies sent
with the request, so your login will not have any effect there.
This is different for ``logout``, since it actually invalidates
the session server-side, thus making the current key invalid.
"""
if not 'django.contrib.sessions' in settings.INSTALLED_APPS:
return False
host, port = INSTALLED.keys()[-1]
# determine the user we want to login
user = credentials.pop('user', None)
if user:
# Login expects the user object to reference it's backend.
# Since we're not going through ``authenticate``, we'll
# have to do this ourselves.
backend = auth.get_backends()[0]
user.backend = user.backend = "%s.%s" % (
backend.__module__, backend.__class__.__name__)
else:
user = auth.authenticate(**credentials)
if not user or not user.is_active:
return False
# create a fake request to use with ``auth.login``
request = HttpRequest()
request.session = __import__(settings.SESSION_ENGINE, {}, {}, ['']).SessionStore()
auth.login(request, user)
request.session.save()
# set the cookie to represent the session
self.cj.set_cookie(cookielib.Cookie(
version=None,
name=settings.SESSION_COOKIE_NAME,
value=request.session.session_key,
port=str(port), # must be a string
port_specified = False,
domain=host, #settings.SESSION_COOKIE_DOMAIN,
domain_specified=True,
domain_initial_dot=False,
path='/',
path_specified=True,
secure=settings.SESSION_COOKIE_SECURE or None,
expires=None,
discard=None,
comment=None,
comment_url=None,
rest=None
))
return True
def logout(self):
"""Log the current user out of your Django site.
This works regardless of the url currently browsed, but does
require the WSGI intercept to be setup.
Based on ``django.test.client.Client.logout``.
"""
host, port = INSTALLED.keys()[-1]
for cookie in self.cj:
if cookie.name == settings.SESSION_COOKIE_NAME \
and cookie.domain==host \
and (not cookie.port or str(cookie.port)==str(port)):
session = __import__(settings.SESSION_ENGINE, {}, {}, ['']).SessionStore()
session.delete(session_key=cookie.value)
self.cj.clear(cookie.domain, cookie.path, cookie.name)
return True
return False
def go(*args, **kwargs):
# replace the default ``go`` to make the additional
# arguments that our custom browser provides available.
browser = get_browser()
browser.go(*args, **kwargs)
return browser.get_url()
def login(*args, **kwargs):
return get_browser().login(*args, **kwargs)
def logout(*args, **kwargs):
return get_browser().logout(*args, **kwargs)
def reset_browser(*args, **kwargs):
# replace the default ``reset_browser`` to ensure
# that our custom browser class is used
result = twill.commands.reset_browser(*args, **kwargs)
twill.commands.browser = _EasyTwillBrowser()
return result
# Monkey-patch our custom browser into twill; this will be global, but
# will only have an actual effect when intercepts are installed through
# our module (via ``setup``).
# Unfortunately, twill pretty much forces us to use the same global
# state it does itself, lest us reimplement everything from
# ``twill.commands``. It's a bit of a shame, we could provide dedicated
# browser instances for each call to ``setup()``.
reset_browser()
def url(should_be=None):
"""Like the default ``url()``, but can be called without arguments,
in which case it returns the current url.
"""
if should_be is None:
return get_browser().get_url()
else:
return twill.commands.url(should_be)
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,864 | saltycrane/django-test-utils | refs/heads/master | /test_utils/testmaker/serializers/pickle_serializer.py | import base
import cPickle as pickle
from test_utils.testmaker.serializers import REQUEST_UNIQUE_STRING, RESPONSE_UNIQUE_STRING
class Serializer(base.Serializer):
def __init__(self, name='pickle'):
super(Serializer, self).__init__(name)
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = self.process_request(request)
self.ser.info(pickle.dumps(request_dict))
self.ser.info(REQUEST_UNIQUE_STRING)
def save_response(self, request, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = self.process_response(request.path, response)
try:
self.ser.info(pickle.dumps(response_dict))
self.ser.info(RESPONSE_UNIQUE_STRING)
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,865 | saltycrane/django-test-utils | refs/heads/master | /test_utils/testmaker/processors/django_processor.py | import base
TEST_TEMPLATE = \
""" def test_{{path}}_{{time}}(self):
r = self.client.{{method}}({{request_str}})"""
STATUS_TEMPLATE = \
""" self.assertEqual(r.status_code, {{status_code}})"""
CONTEXT_TEMPLATE = \
''' self.assertEqual(unicode(r.context["{{key}}"]), u"""{{value}}""")'''
class Processor(base.Processer):
"""Processes the serialized data. Generally to create some sort of test cases"""
def __init__(self, name='django'):
super(Processor, self).__init__(name)
def _get_template(self, templatename):
return {
'test': TEST_TEMPLATE,
'status': STATUS_TEMPLATE,
'context': CONTEXT_TEMPLATE,
}[templatename]
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,866 | saltycrane/django-test-utils | refs/heads/master | /test_project/test_app/tests/crawler_tests.py | """
This file is to test testmaker. It will run over the polls app and with the crawler and with test maker outputting things. Hopefully this will provide a sane way to test testmaker.
"""
from django.test.testcases import TestCase
from test_utils.crawler.base import Crawler
import logging
import os
class CrawlerTests(TestCase):
"""
Tests to test the Crawler API
"""
urls = "test_project.polls.urls"
fixtures = ['polls_testmaker.json']
def setUp(self):
self.log = logging.getLogger('crawler')
[self.log.removeHandler(h) for h in self.log.handlers]
self.log.setLevel(logging.DEBUG)
handler = logging.FileHandler('crawler_log', 'a')
handler.setFormatter(logging.Formatter('%(message)s'))
self.log.addHandler(handler)
def tearDown(self):
os.remove('crawler_log')
def test_basic_crawling(self):
c = Crawler('/')
c.run()
self.assertEqual(c.crawled, {'/': True, u'/1': True, u'/2': True})
def test_relative_crawling(self):
c = Crawler('/1')
c.run()
self.assertEqual(c.crawled, {u'/1': True})
def test_url_plugin(self):
conf_urls = {'this_wont_be_crawled': True}
c = Crawler('/', conf_urls=conf_urls)
c.run()
logs = open('crawler_log')
output = logs.read()
self.assertTrue(output.find('These patterns were not matched during the crawl: this_wont_be_crawled') != -1)
def test_time_plugin(self):
#This isn't testing much, but I can't know how long the time will take
c = Crawler('/')
c.run()
logs = open('crawler_log')
output = logs.read()
self.assertTrue(output.find('Time taken:') != -1)
def test_memory_plugin(self):
from test_utils.crawler.plugins.memory_plugin import Memory
Memory.active = True
c = Crawler('/')
c.run()
logs = open('crawler_log')
output = logs.read()
self.assertTrue(output.find('Memory consumed:') != -1)
#Guppy makes the tests take a lot longer, uncomment this if you want to
#test it.
"""
def test_guppy_plugin(self):
#This isn't testing much, but I can't know how long the time will take
from test_utils.crawler.plugins.guppy_plugin import ACTIVE, Heap
if ACTIVE:
Heap.active = True
c = Crawler('/')
c.run()
logs = open('crawler_log')
output = logs.read()
import ipdb; ipdb.set_trace()
self.assertTrue(output.find('heap') != -1)
else:
print "Skipping memory test, as guppy isn't installed"
"""
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,867 | saltycrane/django-test-utils | refs/heads/master | /test_utils/testmaker/__init__.py | import logging
import os
from os import path
from django.core import serializers as django_serializers
from test_utils.management.commands.relational_dumpdata import _relational_dumpdata
from django.template import Context, Template
from django.conf import settings
TESTMAKER_TEMPLATE = """\
#coding: utf-8
from django.test import TestCase
from django.test import Client
from django import template
from django.db.models import get_model
class Testmaker(TestCase):
{% if create_fixtures %}
fixtures = ["{{ fixture_file }}"]
{% else %}
#fixtures = ["{{ app_name }}_testmaker"]
{% endif %}
"""
class Testmaker(object):
enabled = False
#Have global log and serializer objects so that we never log things twice.
log = None
serializer = None
def __init__(self, app=None, verbosity=0, create_fixtures=False, fixture_format='xml', addrport='', **kwargs):
self.app = app
self.verbosity = verbosity
self.create_fixtures = create_fixtures
self.fixture_format = fixture_format
self.addrport = addrport
self.kwargs = kwargs
#Assume we're writing new tests until proven otherwise
self.new_tests = True
def prepare(self, insert_middleware=False):
self.set_paths()
if not hasattr(self, 'has_run_logging'):
self.setup_logging()
self.prepare_test_file()
if insert_middleware:
self.insert_middleware()
Testmaker.enabled = True
def set_paths(self):
if self.app:
self.app_name = self.app.__name__.split('.')[-2]
self.base_dir = path.dirname(self.app.__file__)
else:
self.app_name = 'tmp'
#TODO: Need to make this platform independent.
self.base_dir = '/tmp/testmaker/'
if not path.exists(self.base_dir):
os.mkdir(self.base_dir)
#Figure out where to store data
self.fixtures_dir = path.join(self.base_dir, 'fixtures')
self.fixture_file = path.join(self.fixtures_dir, '%s_testmaker.%s' % (self.app_name, self.fixture_format))
if self.create_fixtures:
if not path.exists(self.fixtures_dir):
os.mkdir(self.fixtures_dir)
#Setup test and serializer files
self.tests_dir = path.join(self.base_dir, 'tests')
self.test_file = path.join(self.tests_dir, '%s_testmaker.py' % (self.app_name))
#TODO: Make this have the correct file extension based on serializer used
self.serialize_file = path.join(self.tests_dir, '%s_testdata.serialized' % (self.app_name))
if not path.exists(self.tests_dir):
os.mkdir(self.tests_dir)
if path.exists(self.test_file):
#Already have tests there.
self.new_tests = False
if self.verbosity > 0:
print "Handling app '%s'" % self.app_name
print "Logging tests to %s" % self.test_file
if self.create_fixtures:
print "Logging fixtures to %s" % self.fixture_file
def setup_logging(self, test_file=None, serialize_file=None):
#supress other logging
logging.basicConfig(level=logging.CRITICAL,
filename=path.devnull)
#Override default if its passed in
if not test_file:
test_file = self.test_file
else:
self.test_file = test_file
log = logging.getLogger('testprocessor')
[log.removeHandler(h) for h in log.handlers]
log.setLevel(logging.INFO)
handler = logging.FileHandler(test_file, 'a')
handler.setFormatter(logging.Formatter('%(message)s'))
log.addHandler(handler)
Testmaker.log = log
#Override default if its passed in
if not serialize_file:
serialize_file = self.serialize_file
else:
self.serialize_file = serialize_file
log_s = logging.getLogger('testserializer')
[log_s.removeHandler(h) for h in log_s.handlers]
log_s.setLevel(logging.INFO)
handler_s = logging.FileHandler(self.serialize_file, 'a')
handler_s.setFormatter(logging.Formatter('%(message)s'))
log_s.addHandler(handler_s)
Testmaker.serializer = log_s
self.has_run_logging = True
def prepare_test_file(self):
if self.new_tests:
t = Template(TESTMAKER_TEMPLATE)
c = Context({
'create_fixtures': self.create_fixtures,
'app_name': self.app_name,
'fixture_file': self.fixture_file,
})
self.log.info(t.render(c))
else:
if self.verbosity > 0:
print "Appending to current log file"
def insert_middleware(self):
if self.verbosity > 0:
print "Inserting TestMaker logging server..."
if 'test_utils.testmaker.middleware.testmaker.TestMakerMiddleware' not in settings.MIDDLEWARE_CLASSES:
settings.MIDDLEWARE_CLASSES += ('test_utils.testmaker.middleware.testmaker.TestMakerMiddleware',)
def make_fixtures(self):
if self.verbosity > 0:
print "Creating fixture at " + self.fixture_file
objects, collected = _relational_dumpdata(self.app, set())
serial_file = open(self.fixture_file, 'a')
try:
django_serializers.serialize(self.fixture_format, objects, stream=serial_file, indent=4)
except Exception, e:
if self.verbosity > 0:
print ("Unable to serialize database: %s" % e)
@classmethod
def logfile(klass):
return klass.log.handlers[0].baseFilename
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,868 | saltycrane/django-test-utils | refs/heads/master | /test_utils/test_runners/profile.py | import cProfile
import pstats
from django.test.simple import run_tests as django_test_runner
def run_tests(test_labels, verbosity=1, interactive=True,
extra_tests=[], nodatabase=False):
"""
Test runner which displays basic profile data.
Needs some improvement, mostly here for Continuous Integration purposes.
"""
print "Using profiling test runner"
cProfile.runctx("django_test_runner(test_labels, verbosity, interactive, extra_tests)", globals(), locals(), filename="django_tests.profile")
stats = pstats.Stats('django_tests.profile')
stats.strip_dirs().sort_stats('time').print_stats(30)
return 0
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,869 | saltycrane/django-test-utils | refs/heads/master | /test_utils/testmaker/processors/base.py | import logging
import re
import time
from django.template.defaultfilters import slugify as base_slugify
from django.template import Template, Context
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from test_utils.templatetags import TemplateParser
TEST_TEMPLATE = 'Override in Subclass'
STATUS_TEMPLATE = 'Override in Subclass'
CONTEXT_TEMPLATE = 'Override in Subclass'
#DISCARD_CONTEXT_KEYS = ('LANGUAGES',)
DISCARD_CONTEXT_KEYS = []
def safe_dict(dict):
new_dic = {}
for key,val in dict.iteritems():
new_dic[key] = mark_safe(val)
return new_dic
def slugify(toslug):
"""
Turn dashs into underscores to sanitize for filenames
"""
return re.sub("-", "_", base_slugify(toslug))
class Processer(object):
"""Processes the serialized data. Generally to create some sort of test cases"""
def __init__(self, name):
self.name = name
self.log = logging.getLogger('testprocessor')
#self.log = logging.getLogger('testprocessor-%s' % self.name)
self.data = {}
def shall_we_proceed(self, request):
if 'media' in request.path or 'test_utils' in request.path:
return False
return True
def process_request(self, request):
raise NotImplementedError
def save_request(self, request):
""" Actually write the request out to a file """
if self.shall_we_proceed(request):
self._log_request(request)
def process_response(self, request, response):
raise NotImplementedError
def save_response(self, request, response):
if self.shall_we_proceed(request):
self._log_status(response)
if response.context and response.status_code != 404:
self._log_context(response.context)
#This is where template tag outputting would go
#Turned off until it gets betterer
"""
parser = TemplateParser(response.template[0], context)
parser.parse()
parser.create_tests()
"""
def _get_template(self, templatename):
"""Should be implemented in subclass"""
raise NotImplementedError
def _log_request(self, request):
method = request.method.lower()
request_str = "'%s', {" % request.path
for dikt in request.REQUEST.dicts:
for arg in dikt:
request_str += "'%s': '%s', " % (arg, request.REQUEST[arg])
request_str += "}"
template = Template(self._get_template('test'))
context = {
'path': slugify(request.path),
'time': slugify(time.time()),
'method': method,
'request_str': request_str,
}
context = Context(safe_dict(context))
self.log.info(template.render(context))
def _log_status(self, response):
template = Template(self._get_template('status'))
context = {
'status_code': response.status_code,
}
if response.status_code in [301, 302]:
context['location'] = response['Location']
context = Context(safe_dict(context))
self.log.info(template.render(context))
def _get_context_keys(self, context):
"""Get the keys from the contexts(list) """
keys = []
for d in context.dicts:
if isinstance(d, Context):
keys += self._get_context_keys(d)
keys += d.keys()
return keys
def _log_context(self, context):
template = Template(self._get_template('context'))
keys = []
if isinstance(context, list):
for c in context:
keys += self._get_context_keys(c)
else:
keys += self._get_context_keys(context)
keys = set(keys)
# Skip some keys
for discardkey in DISCARD_CONTEXT_KEYS:
keys.discard(discardkey)
for key in keys:
val = force_unicode(context[key])
con = {
'key': key,
'value': val,
}
con = Context(safe_dict(con))
try:
#Avoid memory addy's which will change.
if not re.search("0x\w+", val):
self.log.info(template.render(con))
except UnicodeDecodeError, e:
pass
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,870 | saltycrane/django-test-utils | refs/heads/master | /test_utils/views.py | from django.http import HttpResponse
import logging
from test_utils.testmaker.processors.base import slugify
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
filename = slugify(filename)
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
#tm.app_name = 'tmp'
#tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,871 | saltycrane/django-test-utils | refs/heads/master | /test_project/test_app/tests/__init__.py | from assertions_tests import *
from templatetags_tests import *
from testmaker_tests import *
from crawler_tests import *
import twill_tests
__test__ = {
'TWILL': twill_tests,
}
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,872 | saltycrane/django-test-utils | refs/heads/master | /test_utils/management/commands/crawlurls.py | from collections import defaultdict
from optparse import make_option
import logging
import sys
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.contrib.admindocs.views import extract_views_from_urlpatterns
from test_utils.crawler.base import Crawler
class LogStatsHandler(logging.Handler):
stats = defaultdict(int)
def emit(self, record):
self.stats[record.levelno] += 1
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-p', '--pdb', action='store_true', dest='pdb', default=False,
help='Pass -p to drop into pdb on an error'),
make_option('-d', '--depth', action='store', dest='depth', default=3,
help='Specify the depth to crawl.'),
make_option('-s', '--safe', action='store_true', dest='html', default=False,
help='Pass -s to check for html fragments in your pages.'),
make_option('-r', '--response', action='store_true', dest='response', default=False,
help='Pass -r to store the response objects.'),
make_option('-t', '--time', action='store_true', dest='time', default=False,
help='Pass -t to time your requests.'),
make_option('--enable-plugin', action='append', dest='plugins', default=[],
help='Enable the specified plugin'),
make_option("-o", '--output-dir', action='store', dest='output_dir', default=None,
help='If specified, store plugin output in the provided directory'),
make_option('--no-parent', action='store_true', dest="no_parent", default=False,
help='Do not crawl URLs which do not start with your base URL'),
make_option('-a', "--auth", action='store', dest='auth', default=None,
help='Authenticate (login:user,password:secret) before crawl')
)
help = "Displays all of the url matching routes for the project."
args = "[relative start url]"
def handle(self, *args, **options):
verbosity = int(options.get('verbosity', 1))
depth = int(options.get('depth', 3))
auth = _parse_auth(options.get('auth'))
if verbosity > 1:
log_level = logging.DEBUG
elif verbosity:
log_level = logging.INFO
else:
log_level = logging.WARN
crawl_logger = logging.getLogger('crawler')
crawl_logger.setLevel(logging.DEBUG)
crawl_logger.propagate = 0
log_stats = LogStatsHandler()
crawl_logger.addHandler(log_stats)
console = logging.StreamHandler()
console.setLevel(log_level)
console.setFormatter(logging.Formatter("%(name)s [%(levelname)s] %(module)s: %(message)s"))
crawl_logger.addHandler(console)
if len(args) > 1:
raise CommandError('Only one start url is currently supported.')
else:
start_url = args[0] if args else '/'
if settings.ADMIN_FOR:
settings_modules = [__import__(m, {}, {}, ['']) for m in settings.ADMIN_FOR]
else:
settings_modules = [settings]
conf_urls = {}
# Build the list URLs to test from urlpatterns:
for settings_mod in settings_modules:
try:
urlconf = __import__(settings_mod.ROOT_URLCONF, {}, {}, [''])
except Exception, e:
logging.exception("Error occurred while trying to load %s: %s", settings_mod.ROOT_URLCONF, str(e))
continue
view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns)
for (func, regex) in view_functions:
#Get function name and add it to the hash of URLConf urls
func_name = hasattr(func, '__name__') and func.__name__ or repr(func)
conf_urls[regex] = ['func.__module__', func_name]
c = Crawler(start_url,
conf_urls=conf_urls,
verbosity=verbosity,
output_dir=options.get("output_dir"),
ascend=not options.get("no_parent"),
auth=auth,
)
# Load plugins:
for p in options['plugins']:
# This nested try is somewhat unsightly but allows easy Pythonic
# usage ("--enable-plugin=tidy") instead of Java-esque
# "--enable-plugin=test_utils.crawler.plugins.tidy"
try:
try:
plugin_module = __import__(p)
except ImportError:
if not "." in p:
plugin_module = __import__(
"test_utils.crawler.plugins.%s" % p,
fromlist=["test_utils.crawler.plugins"]
)
else:
raise
c.plugins.append(plugin_module.PLUGIN())
except (ImportError, AttributeError), e:
crawl_logger.critical("Unable to load plugin %s: %s", p, e)
sys.exit(3)
c.run(max_depth=depth)
# We'll exit with a non-zero status if we had any errors
max_log_level = max(log_stats.stats.keys())
if max_log_level >= logging.ERROR:
sys.exit(2)
elif max_log_level >= logging.WARNING:
sys.exit(1)
else:
sys.exit(0)
def _parse_auth(auth):
"""
Parse auth string and return dict.
>>> _parse_auth('login:user,password:secret')
{'login': 'user', 'password': 'secret'}
>>> _parse_auth('name:user, token:top:secret')
{'name': 'user', 'token': 'top:secret'}
"""
if not auth:
return None
items = auth.split(',')
return dict(i.strip().split(':', 1) for i in items)
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,873 | saltycrane/django-test-utils | refs/heads/master | /setup.py | from setuptools import setup, find_packages
setup(
name = "django-test-utils",
version = "0.3",
packages = find_packages(),
author = "Eric Holscher",
author_email = "eric@ericholscher.com",
description = "A package to help testing in Django",
url = "http://github.com/ericholscher/django-test-utils/tree/master",
download_url='http://www.github.com/ericholscher/django-test-utils/tarball/0.3.0',
test_suite = "test_project.run_tests.run_tests",
include_package_data = True,
install_requires=[
'BeautifulSoup',
]
)
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,874 | saltycrane/django-test-utils | refs/heads/master | /test_utils/testmaker/serializers/__init__.py |
"""
Interfaces for serializing Django requests.
To add your own serializers, use the TEST_SERIALIZATION_MODULES setting::
TEST_SERIALIZATION_MODULES = {
'pickle': 'test_utils.testmaker.serializers.pickle_serializer',
'json': 'test_utils.testmaker.json_serializer',
}
"""
from django.conf import settings
from django.utils import importlib
# Built-in serialize
TEST_SERIALIZERS = {
'pickle': 'test_utils.testmaker.serializers.pickle_serializer',
'json': 'test_utils.testmaker.serializers.json_serializer',
}
REQUEST_UNIQUE_STRING = '---REQUEST_BREAK---'
RESPONSE_UNIQUE_STRING = '---RESPONSE_BREAK---'
_test_serializers = {}
def register_serializer(format, serializer_module, serializers=None):
""""Register a new serializer.
``serializer_module`` should be the fully qualified module name
for the serializer.
If ``serializers`` is provided, the registration will be added
to the provided dictionary.
If ``serializers`` is not provided, the registration will be made
directly into the global register of serializers. Adding serializers
directly is not a thread-safe operation.
"""
module = importlib.import_module(serializer_module)
if serializers is None:
_test_serializers[format] = module
else:
serializers[format] = module
def unregister_serializer(format):
"Unregister a given serializer. This is not a thread-safe operation."
del _test_serializers[format]
def get_serializer(format):
if not _test_serializers:
_load_test_serializers()
return _test_serializers[format].Serializer
def get_serializer_formats():
if not _test_serializers:
_load_test_serializers()
return _test_serializers.keys()
def get_deserializer(format):
if not _test_serializers:
_load_test_serializers()
return _test_serializers[format].Deserializer
def _load_test_serializers():
"""
Register built-in and settings-defined serializers. This is done lazily so
that user code has a chance to (e.g.) set up custom settings without
needing to be careful of import order.
"""
global _test_serializers
serializers = {}
for format in TEST_SERIALIZERS:
register_serializer(format, TEST_SERIALIZERS[format], serializers)
if hasattr(settings, "TEST_SERIALIZATION_MODULES"):
for format in settings.TEST_SERIALIZATION_MODULES:
register_serializer(format, settings.TEST_SERIALIZATION_MODULES[format], serializers)
_test_serializers = serializers
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,875 | saltycrane/django-test-utils | refs/heads/master | /test_project/test_app/tests/assertions_tests.py | from unittest import TestCase
from test_utils.assertions import DiffTestCaseMixin
class TestAssertions(TestCase, DiffTestCaseMixin):
"""
Tests to test assertions in test utils.
"""
def test_assert_no_diff_dict(self):
dict1 = {'I love': 'you'}
dict2 = {'I love': 'moo'}
try:
self.failIfDiff(dict1, dict2)
except AssertionError, e:
self.failIfDiff(e.message, """\n--- First \n\n+++ Second \n\n@@ -1,1 +1,1 @@\n\n-'I love':'you'\n+'I love':'moo'\n""")
def test_assert_no_diff_list(self):
list1 = ['I love', 'you']
list2 = ['I love', 'to moo']
try:
self.failIfDiff(list1, list2)
except AssertionError, e:
self.failIfDiff(e.message, """\n--- First \n\n+++ Second \n\n@@ -1,2 +1,2 @@\n\n 'I love'\n-'you'\n+'to moo'\n""")
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,876 | saltycrane/django-test-utils | refs/heads/master | /test_utils/management/commands/testmaker.py | from optparse import make_option
import logging, os
from os import path
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.core.management import call_command
from django.db import models
from test_utils.testmaker import Testmaker
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-a', '--app', action='store', dest='application',
default=None, help='The name of the application (in the current \
directory) to output data to. (defaults to currect directory)'),
make_option('-l', '--logdir', action='store', dest='logdirectory',
default=os.getcwd(), help='Directory to send tests and fixtures to. \
(defaults to currect directory)'),
make_option('-x', '--loud', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
make_option('-f', '--fixture', action='store_true', dest='fixture', default=False,
help='Pass -f to not create a fixture for the data.'),
make_option('--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.'),
)
help = 'Runs the test server with the testmaker output enabled'
args = '[server:port]'
def handle(self, addrport='', *args, **options):
app = options.get("application")
verbosity = int(options.get('verbosity', 1))
create_fixtures = options.get('fixture', False)
logdir = options.get('logdirectory')
fixture_format = options.get('format', 'xml')
if app:
app = models.get_app(app)
if not app:
#Don't serialize the whole DB :)
create_fixtures = False
testmaker = Testmaker(app, verbosity, create_fixtures, fixture_format, addrport)
testmaker.prepare(insert_middleware=True)
try:
call_command('runserver', addrport=addrport, use_reloader=False)
except SystemExit:
if create_fixtures:
testmaker.make_fixtures()
else:
raise
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,877 | saltycrane/django-test-utils | refs/heads/master | /test_project/test_app/tests/twill_tests.py | __doc__ = """
### test setup() and teardown() logic
>>> from test_utils.utils.twill_runner import *
>>> from django.conf import settings
>>> setup()
<..._EasyTwillBrowser object at ...>
>>> setup() # no duplicate registrations
False
>>> len(INSTALLED)
1
>>> teardown()
True
>>> len(INSTALLED)
0
>>> setup(host='myhost', port=40)
<..._EasyTwillBrowser object at ...>
>>> setup(host='myhost', port=10)
<..._EasyTwillBrowser object at ...>
>>> teardown(port=10) # exact match OR no arguments to pop last required
False
>>> teardown() # this will remove the last
True
>>> len(INSTALLED) # one handler is still registered
1
>>> teardown(host='myhost', port=40) # remove it by exact match
True
>>> len(INSTALLED)
0
>>> settings.DEBUG_PROPAGATE_EXCEPTIONS = False
>>> setup(propagate=True)
<..._EasyTwillBrowser object at ...>
>>> settings.DEBUG_PROPAGATE_EXCEPTIONS
True
>>> teardown()
True
>>> settings.DEBUG_PROPAGATE_EXCEPTIONS
False
>>> len(INSTALLED)
0
### test relative url handling ###
# Note that for simplicities sake we only
# check whether our custom code appended a
# host name; the twill browser base class
# never gets to see the urls, and we don't
# know what it makes of it.
# put browser into testing mode
>>> browser = get_browser()
>>> browser._testing_ = True
>>> setup(host='a', port=1)
<..._EasyTwillBrowser object at ...>
>>> browser.go('/')
'http://a:1/'
>>> browser.go('/index')
'http://a:1/index'
>>> browser.go('http://google.de')
'http://google.de'
>>> browser.go('/services')
'/services'
>>> browser.go('')
''
>>> browser.go('?foo=bar')
'?foo=bar'
>>> browser.go('/index', default=True)
'http://a:1/index'
# TODO: since we don't work with real urls, we don't get anything back. Improve.
>>> url()
>>> teardown()
True
>>> len(INSTALLED)
0
# leave testing mode again
>>> browser._testing_ = False
# TODO: test the login/logout methods
"""
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,878 | saltycrane/django-test-utils | refs/heads/master | /test_project/test_app/tests/templatetags_tests.py | import os
from django.test.testcases import TestCase
from django.template import Context, Template
from django.contrib.auth.models import User
from test_utils.templatetags import TemplateParser
from test_utils.testmaker import Testmaker
from django.contrib.auth.models import User
class Parsing(TestCase):
"""
Tests to test the parsing API
"""
def setUp(self):
self.tm = Testmaker()
self.tm.setup_logging('test_file', 'serialize_file')
def tearDown(self):
#Teardown logging somehow?
os.remove('test_file')
os.remove('serialize_file')
def test_basic_parsing(self):
user = User.objects.create_user('john', 'lennon@thebeatles.com', 'johnpassword')
user.save()
c = Context({'object': user})
t = TemplateParser('{% load comments %}{% get_comment_list for object as as_var %}{{ as_var }}', c)
t.parse()
self.assertEquals(t.template_calls[0], '{% get_comment_list for object as as_var %}')
self.assertEquals(t.loaded_classes[0], '{% load comments %}')
t.create_tests()
logs = open('test_file')
output = logs.read()
self.assertTrue(output.find("{'object': get_model('auth', 'user')") != -1)
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,879 | saltycrane/django-test-utils | refs/heads/master | /test_utils/templatetags/__init__.py | import re
import os
from django.conf import settings
from django.template.loaders.filesystem import load_template_source
from django import template
from test_utils.testmaker import Testmaker
DEFAULT_TAGS = ['autoescape' , 'block' , 'comment' , 'cycle' , 'debug' ,
'extends' , 'filter' , 'firstof' , 'if' , 'else', 'for', #No for so we can do loops
'ifchanged' , 'ifequal' , 'ifnotequal' , 'include' , 'load' , 'now' ,
'regroup' , 'spaceless' , 'ssi' , 'templatetag' , 'url' , 'widthratio' ,
'with' ]
tag_re = re.compile('({% (.*?) %})')
### Template Tag Maker stuff
class TemplateParser(object):
def __init__(self, template, context=None):
"""
Set the initial value of the template to be parsed
Allows for the template passed to be a string of a template name
or a string that represents a template.
"""
self.template = template
self.context = context
#Contains the strings of all loaded classes
self.loaded_classes = []
self.template_calls = []
self.tests = []
#Accept both template names and template strings
try:
self.template_string, self.filepath = load_template_source(template.name)
except:
self.template_string = template
self.filepath = None
def parse(self):
"""
Parse the template tag calls out of the template.
This is ugly because of having more than 1 tag on a line.
Thus we have to loop over the file, splitting on the regex, then
looping over the split, matching for our regex again.
Improvements welcome!
End result::
self.loaded_classes contains the load commands of classes loaded
self.template_calls contains the template calls
"""
for line in self.template_string.split('\n'):
split_line = tag_re.split(line)
if len(split_line) > 1:
for matched in split_line:
mat = tag_re.search(matched)
if mat:
full_command = mat.group(0)
cmd = mat.group(2).split()[0].strip() #get_comment_form etc
if cmd == 'load':
self.loaded_classes.append(full_command)
else:
if cmd not in DEFAULT_TAGS and cmd not in 'end'.join(DEFAULT_TAGS):
self.template_calls.append(full_command)
def create_tests(self):
"""
This yields a rendered template string to assert Equals against with
the outputted template.
"""
for tag_string in self.template_calls:
out_context = {}
context_name = ""
#Try and find anything in the string that's in the context
context_name = ''
bits = tag_string.split()
for bit_num, bit in enumerate(bits):
try:
out_context[bit] = template.Variable(bit).resolve(self.context)
except:
pass
if bit == 'as':
context_name = bits[bit_num+1]
if context_name:
con_string = "{{ %s }}" % context_name
else:
con_string = ""
template_string = "%s%s%s" % (''.join(self.loaded_classes), tag_string, con_string)
try:
template_obj = template.Template(template_string)
rendered_string = template_obj.render(template.Context(out_context))
except Exception, e:
print "EXCEPTION: %s" % e.message
rendered_string = ''
#self.tests.append(rendered_string)
self.output_ttag(template_string, rendered_string, out_context)
def output_ttag(self, template_str, output_str, context):
Testmaker.log.info(" tmpl = template.Template(u'%s')" % template_str)
context_str = "{"
for con in context:
try:
tmpl_obj = context[con]
#TODO: This will blow up on anything but a model.
#Would be cool to have a per-type serialization, prior art is
#in django's serializers and piston.
context_str += "'%s': get_model('%s', '%s').objects.get(pk=%s)," % (con, tmpl_obj._meta.app_label, tmpl_obj._meta.module_name, tmpl_obj.pk )
except:
#sometimes there be integers here
pass
context_str += "}"
#if output_str:
# Testmaker.log.info(" tmpl = template.Template(u'%s')" % template_str)
Testmaker.log.info(" context = template.Context(%s)" % context_str)
Testmaker.log.info(" self.assertEqual(tmpl.render(context), u'%s')\n" % output_str)
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,880 | saltycrane/django-test-utils | refs/heads/master | /test_utils/testmaker/processors/twill_processor.py | import base
TEST_TEMPLATE = """go {{ path }}"""
STATUS_TEMPLATE = """code {{ status_code }}"""
#CONTEXT_TEMPLATE = '''find {{value}}'''
CONTEXT_TEMPLATE = ''
class Processor(base.Processer):
"""Processes the serialized data. Generally to create some sort of test cases"""
def __init__(self, name='twill'):
super(Processor, self).__init__(name)
def _get_template(self, templatename):
return {
'test': TEST_TEMPLATE,
'status': STATUS_TEMPLATE,
'context': CONTEXT_TEMPLATE,
}[templatename]
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,881 | saltycrane/django-test-utils | refs/heads/master | /test_utils/crawler/plugins/base.py | from test_utils.crawler import signals as test_signals
class Plugin(object):
"""
This is a class to represent a plugin to the Crawler.
Subclass it and define a start or stop function to be called on requests.
Define a print_report function if your plugin outputs at the end of the run.
"""
global_data = {}
def __init__(self):
#This should be refactored to call each of the subclasses.
#Having them use the signal function signature is hacky..
if hasattr(self, 'pre_request'):
test_signals.pre_request.connect(self.pre_request)
if hasattr(self, 'post_request'):
test_signals.post_request.connect(self.post_request)
if hasattr(self, 'start_run'):
test_signals.start_run.connect(self.start_run)
if hasattr(self, 'finish_run'):
test_signals.finish_run.connect(self.finish_run)
if hasattr(self, 'urls_parsed'):
test_signals.urls_parsed.connect(self.urls_parsed)
self.data = self.global_data[self.__class__.__name__] = {}
# This will be updated when a run starts if the user wants output to
# be saved:
self.output_dir = None
"""
#These functions enable instance['test'] to save to instance.data
def __setitem__(self, key, val):
self.global_data[self.__class__.__name__][key] = val
def __getitem__(self, key):
return self.global_data[self.__class__.__name__][key]
"""
def set_output_dir(self, output_dir):
"""
Extension point for subclasses to open files, create directories, etc.
"""
self.output_dir = output_dir | {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,882 | saltycrane/django-test-utils | refs/heads/master | /test_utils/crawler/base.py | from HTMLParser import HTMLParseError
import logging
import os
import urlparse
from django.conf import settings
from django.db import transaction
from django.views.debug import cleanse_setting
from django.test.client import Client
from django.test.utils import setup_test_environment, teardown_test_environment
from test_utils.crawler import signals as test_signals
from test_utils.crawler.plugins.base import Plugin
LOG = logging.getLogger('crawler')
try:
import lxml.html
def link_extractor(html):
try:
tree = lxml.html.document_fromstring(html)
except lxml.etree.ParseError, e:
raise HTMLParseError(str(e), e.position)
for element, attribute, link, pos in tree.iterlinks():
yield link
except ImportError:
LOG.info("Processing documents with HTMLParser; install lxml for greater performance")
from HTMLParser import HTMLParser
def link_extractor(html):
class LinkExtractor(HTMLParser):
links = set()
def handle_starttag(self, tag, attrs):
self.links.update(
v for k, v in attrs if k == "href" or k =="src"
)
parser = LinkExtractor()
parser.feed(html)
parser.close()
return parser.links
class Crawler(object):
"""
This is a class that represents a URL crawler in python
"""
def __init__(self, base_url, conf_urls={}, verbosity=1, output_dir=None, ascend=True, **kwargs):
self.base_url = base_url
self.conf_urls = conf_urls
self.verbosity = verbosity
self.ascend = ascend
auth = kwargs.get('auth')
if output_dir:
assert os.path.isdir(output_dir)
self.output_dir = os.path.realpath(output_dir)
LOG.info("Output will be saved to %s" % self.output_dir)
else:
self.output_dir = None
#These two are what keep track of what to crawl and what has been.
self.not_crawled = [(0, 'START',self.base_url)]
self.crawled = {}
self.c = Client(REMOTE_ADDR='127.0.0.1')
if auth:
printable_auth = ', '.join(
'%s: %s' % (key, cleanse_setting(key.upper(), value))
for key, value in auth.items())
LOG.info('Log in with %s' % printable_auth)
self.c.login(**auth)
self.plugins = []
for plug in Plugin.__subclasses__():
active = getattr(plug, 'active', True)
if active:
#TODO: Check if plugin supports writing CSV (or to a file in general?)
self.plugins.append(plug())
def _parse_urls(self, url, resp):
parsed = urlparse.urlparse(url)
if resp['Content-Type'] == "text/html; charset=utf-8":
html = resp.content.decode("utf-8")
else:
html = resp.content
returned_urls = []
for link in link_extractor(html):
parsed_href = urlparse.urlparse(link)
if not parsed_href.path:
continue
if parsed_href.scheme and not parsed_href.netloc.startswith("testserver"):
LOG.debug("Skipping external link: %s", link)
continue
if parsed_href.path.startswith('/'):
returned_urls.append(link)
else:
# We'll use urlparse's urljoin since that handles things like <a href="../foo">
returned_urls.append(urlparse.urljoin(url, link))
return returned_urls
def get_url(self, from_url, to_url):
"""
Takes a url, and returns it with a list of links
This uses the Django test client.
"""
parsed = urlparse.urlparse(to_url)
request_dict = dict(urlparse.parse_qsl(parsed.query))
url_path = parsed.path
#url_path now contains the path, request_dict contains get params
LOG.debug("%s: link to %s with parameters %s", from_url, to_url, request_dict)
test_signals.pre_request.send(self, url=to_url, request_dict=request_dict)
resp = self.c.get(url_path, request_dict, follow=False)
test_signals.post_request.send(self, url=to_url, response=resp)
if resp.status_code in (301, 302):
location = resp["Location"]
if location.startswith("http://testserver"):
LOG.debug("%s: following redirect to %s", to_url, location)
# Mmm, recursion TODO: add a max redirects limit?
return self.get_url(from_url, location)
else:
LOG.info("%s: not following off-site redirect to %s", to_url, location)
return (resp, ())
elif 400 <= resp.status_code < 600:
# We'll avoid logging a warning for HTTP statuses which aren't in the
# official error ranges:
LOG.warning("%s links to %s, which returned HTTP status %d", from_url, url_path, resp.status_code)
return (resp, ())
if resp['Content-Type'].startswith("text/html"):
returned_urls = self._parse_urls(to_url, resp)
test_signals.urls_parsed.send(self, fro=to_url, returned_urls=returned_urls)
else:
returned_urls = list()
return (resp, returned_urls)
def run(self, max_depth=3):
for p in self.plugins:
p.set_output_dir(self.output_dir)
old_DEBUG = settings.DEBUG
settings.DEBUG = False
setup_test_environment()
test_signals.start_run.send(self)
# To avoid tainting our memory usage stats with startup overhead we'll
# do one extra request for the first page now:
self.c.get(*self.not_crawled[0][-1])
while self.not_crawled:
#Take top off not_crawled and evaluate it
current_depth, from_url, to_url = self.not_crawled.pop(0)
if current_depth > max_depth:
continue
transaction.enter_transaction_management()
try:
resp, returned_urls = self.get_url(from_url, to_url)
except HTMLParseError, e:
LOG.error("%s: unable to parse invalid HTML: %s", to_url, e)
except Exception, e:
LOG.exception("%s had unhandled exception: %s", to_url, e)
continue
finally:
transaction.rollback()
self.crawled[to_url] = True
#Find its links that haven't been crawled
for base_url in returned_urls:
if not self.ascend and not base_url.startswith(self.base_url):
LOG.debug("Skipping %s - outside scope of %s", base_url, self.base_url)
continue
if base_url not in [to for dep,fro,to in self.not_crawled] and not self.crawled.has_key(base_url):
self.not_crawled.append((current_depth+1, to_url, base_url))
test_signals.finish_run.send(self)
teardown_test_environment()
settings.DEBUG = old_DEBUG
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,883 | saltycrane/django-test-utils | refs/heads/master | /test_utils/management/commands/makefixture.py | """
"Make fixture" command.
Highly useful for making test fixtures. Use it to pick only few items
from your data to serialize, restricted by primary keys. By default
command also serializes foreign keys and m2m relations. You can turn
off related items serialization with --skip-related option.
How to use:
python manage.py makefixture
will display what models are installed
python manage.py makefixture User[:3]
or
python manage.py makefixture auth.User[:3]
or
python manage.py makefixture django.contrib.auth.User[:3]
will serialize users with ids 1 and 2, with assigned groups, permissions
and content types.
python manage.py makefixture YourModel[3] YourModel[6:10]
will serialize YourModel with key 3 and keys 6 to 9 inclusively.
Of course, you can serialize whole tables, and also different tables at
once, and use options of dumpdata:
python manage.py makefixture --format=xml --indent=4 YourModel[3] AnotherModel auth.User[:5] auth.Group
"""
# From http://www.djangosnippets.org/snippets/918/
#save into anyapp/management/commands/makefixture.py
#or back into django/core/management/commands/makefixture.py
#v0.1 -- current version
#known issues:
#no support for generic relations
#no support for one-to-one relations
from optparse import make_option
from django.core import serializers
from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from django.core.management.base import LabelCommand
from django.db.models.fields.related import ForeignKey
from django.db.models.fields.related import ManyToManyField
from django.db.models.loading import get_models
DEBUG = False
def model_name(m):
module = m.__module__.split('.')[:-1] # remove .models
return ".".join(module + [m._meta.object_name])
class Command(LabelCommand):
help = 'Output the contents of the database as a fixture of the given format.'
args = 'modelname[pk] or modelname[id1:id2] repeated one or more times'
option_list = BaseCommand.option_list + (
make_option('--skip-related', default=True, action='store_false', dest='propagate',
help='Specifies if we shall not add related objects.'),
make_option('--reverse', default=[], action='append', dest='reverse',
help="Reverse relations to follow (e.g. 'Job.task_set')."),
make_option('--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.'),
make_option('--indent', default=None, dest='indent', type='int',
help='Specifies the indent level to use when pretty-printing output'),
)
def handle_reverse(self, **options):
follow_reverse = options.get('reverse', [])
to_reverse = {}
for arg in follow_reverse:
try:
model_name, related_set_name = arg.rsplit(".", 1)
except:
raise CommandError("Bad fieldname on '--reverse %s'" % arg)
model = self.get_model_from_name(model_name)
try:
getattr(model, related_set_name)
except AttributeError:
raise CommandError("Field '%s' does not exist on model '%s'" % (
related_set_name, model_name))
to_reverse.setdefault(model, []).append(related_set_name)
return to_reverse
def handle_models(self, models, **options):
format = options.get('format','json')
indent = options.get('indent',None)
show_traceback = options.get('traceback', False)
propagate = options.get('propagate', True)
follow_reverse = self.handle_reverse(**options)
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
if format not in serializers.get_public_serializer_formats():
raise CommandError("Unknown serialization format: %s" % format)
try:
serializers.get_serializer(format)
except KeyError:
raise CommandError("Unknown serialization format: %s" % format)
objects = []
for model, slice in models:
if isinstance(slice, basestring):
objects.extend(model._default_manager.filter(pk__exact=slice))
elif not slice or type(slice) is list:
items = model._default_manager.all()
if slice and slice[0]:
items = items.filter(pk__gte=slice[0])
if slice and slice[1]:
items = items.filter(pk__lt=slice[1])
items = items.order_by(model._meta.pk.attname)
objects.extend(items)
else:
raise CommandError("Wrong slice: %s" % slice)
all = objects
if propagate:
collected = set([(x.__class__, x.pk) for x in all])
while objects:
related = []
for x in objects:
if DEBUG:
print "Adding %s[%s]" % (model_name(x), x.pk)
# follow forward relation fields
for f in x.__class__._meta.fields + x.__class__._meta.many_to_many:
if isinstance(f, ForeignKey):
new = getattr(x, f.name) # instantiate object
if new and not (new.__class__, new.pk) in collected:
collected.add((new.__class__, new.pk))
related.append(new)
if isinstance(f, ManyToManyField):
for new in getattr(x, f.name).all():
if new and not (new.__class__, new.pk) in collected:
collected.add((new.__class__, new.pk))
related.append(new)
# follow reverse relations as requested
for reverse_field in follow_reverse.get(x.__class__, []):
mgr = getattr(x, reverse_field)
for new in mgr.all():
if new and not (new.__class__, new.pk) in collected:
collected.add((new.__class__, new.pk))
related.append(new)
objects = related
all.extend(objects)
try:
return serializers.serialize(format, all, indent=indent)
except Exception, e:
if show_traceback:
raise
raise CommandError("Unable to serialize database: %s" % e)
def get_models(self):
return [(m, model_name(m)) for m in get_models()]
def get_model_from_name(self, search):
"""Given a name of a model, return the model object associated with it
The name can be either fully specified or uniquely matching the
end of the model name. e.g.
django.contrib.auth.User
or
auth.User
raises CommandError if model can't be found or uniquely determined
"""
models = [model for model, name in self.get_models()
if name.endswith('.'+name) or name == search]
if not models:
raise CommandError("Unknown model: %s" % search)
if len(models)>1:
raise CommandError("Ambiguous model name: %s" % search)
return models[0]
def handle_label(self, labels, **options):
parsed = []
for label in labels:
search, pks = label, ''
if '[' in label:
search, pks = label.split('[', 1)
slice = ''
if ':' in pks:
slice = pks.rstrip(']').split(':', 1)
elif pks:
slice = pks.rstrip(']')
model = self.get_model_from_name(search)
parsed.append((model, slice))
return self.handle_models(parsed, **options)
def list_models(self):
names = [name for _model, name in self.get_models()]
raise CommandError('Neither model name nor slice given. Installed model names: \n%s' % ",\n".join(names))
def handle(self, *labels, **options):
if not labels:
self.list_models()
output = []
label_output = self.handle_label(labels, **options)
if label_output:
output.append(label_output)
return '\n'.join(output)
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,884 | saltycrane/django-test-utils | refs/heads/master | /test_utils/test_runners/keep_database.py | from django.test.simple import *
import os
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
"""
worsk exactly as per normal test
but only creates the test_db if it doesn't yet exist
and does not destroy it when done
tables are flushed and fixtures loaded between tests as per usual
but if your schema has not changed then this saves significant amounts of time
and speeds up the test cycle
Run the unit tests for all the test labels in the provided list.
Labels must be of the form:
- app.TestClass.test_method
Run a single specific test method
- app.TestClass
Run all the test methods in a given class
- app
Search for doctests and unittests in the named application.
When looking for tests, the test runner will look in the models and
tests modules for the application.
A list of 'extra' tests may also be provided; these tests
will be added to the test suite.
Returns the number of tests that failed.
"""
setup_test_environment()
settings.DEBUG = False
suite = unittest.TestSuite()
if test_labels:
for label in test_labels:
if '.' in label:
suite.addTest(build_test(label))
else:
app = get_app(label)
suite.addTest(build_suite(app))
else:
for app in get_apps():
suite.addTest(build_suite(app))
for test in extra_tests:
suite.addTest(test)
suite = reorder_suite(suite, (TestCase,))
###Everything up to here is from django.test.simple
from django.db.backends import creation
from django.db import connections, DatabaseError
old_name = {}
for alias in connections:
connection = connections[alias]
old_name[alias] = settings.DATABASES[alias]['NAME']
if settings.DATABASES[alias]['TEST_NAME']:
settings.DATABASES[alias]['NAME'] = settings.DATABASES[alias]['TEST_NAME']
else:
settings.DATABASES[alias]['NAME'] = creation.TEST_DATABASE_PREFIX + settings.DATABASES[alias]['NAME']
connection.settings_dict["DATABASE_NAME"] = settings.DATABASES[alias]['NAME']
# does test db exist already ?
try:
if settings.DATABASES[alias]['ENGINE'] == 'sqlite3':
if not os.path.exists(settings.DATABASES[alias]['NAME']):
raise DatabaseError
connection.cursor()
except Exception:
print 'database %s does not exist. creating...' % alias
# db does not exist
# juggling ! create_test_db switches the DATABASE_NAME to the TEST_DATABASE_NAME
settings.DATABASES[alias]['NAME'] = old_name[alias]
connection.settings_dict["DATABASE_NAME"] = old_name[alias]
connection.creation.create_test_db(verbosity, autoclobber=True)
else:
connection.close()
settings.DATABASES[alias]['SUPPORTS_TRANSACTIONS'] = connection.creation._rollback_works()
result = unittest.TextTestRunner(verbosity=verbosity).run(suite)
for alias in settings.DATABASES:
#Since we don't call destory_test_db, we need to set the db name back.
settings.DATABASES[alias]['NAME'] = old_name[alias]
connection.settings_dict["DATABASE_NAME"] = old_name[alias]
teardown_test_environment()
return len(result.failures) + len(result.errors)
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,885 | saltycrane/django-test-utils | refs/heads/master | /test_utils/testmaker/serializers/base.py | import logging
import time
class Serializer(object):
"""A pluggable Serializer class"""
name = "base"
def __init__(self, name):
"""Constructor"""
self.name = name
self.ser = logging.getLogger('testserializer')
#self.ser = logging.getLogger('testserializer-%s' % self.name)
self.data = {}
def process_request(self, request):
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'GET': request.GET,
'POST': request.POST,
'REQUEST': request.REQUEST,
'method': request.method,
}
return request_dict
def save_request(self, request):
raise NotImplementedError
def process_response(self, path, response):
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
return response_dict
def save_response(self, request, response):
raise NotImplementedError
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,886 | saltycrane/django-test-utils | refs/heads/master | /test_utils/testmaker/processors/__init__.py |
"""
Interfaces for processing Django tests.
To add your own processors, use the TEST_PROCESSOR_MODULES setting::
TEST_PROCESSOR_MODULES = {
'django': 'test_utils.testmaker.processors.django',
'twill': 'test_utils.testmaker.processors.twill',
}
"""
from django.conf import settings
from django.utils import importlib
# Built-in processors
TEST_PROCESSORS = {
'django': 'test_utils.testmaker.processors.django_processor',
'twill': 'test_utils.testmaker.processors.twill_processor',
}
_test_processors = {}
def register_processor(format, processor_module, processors=None):
""""Register a new processor.
``processor_module`` should be the fully qualified module name
for the processor.
If ``processors`` is provided, the registration will be added
to the provided dictionary.
If ``processors`` is not provided, the registration will be made
directly into the global register of processors. Adding processors
directly is not a thread-safe operation.
"""
module = importlib.import_module(processor_module)
if processors is None:
_test_processors[format] = module
else:
processors[format] = module
def unregister_processor(format):
"Unregister a given processor. This is not a thread-safe operation."
del _test_processors[format]
def get_processor(format):
if not _test_processors:
_load_test_processors()
return _test_processors[format].Processor
def get_processor_formats():
if not _test_processors:
_load_test_processors()
return _test_processors.keys()
def _load_test_processors():
"""
Register built-in and settings-defined processors. This is done lazily so
that user code has a chance to (e.g.) set up custom settings without
needing to be careful of import order.
"""
global _test_processors
processors = {}
for format in TEST_PROCESSORS:
register_processor(format, TEST_PROCESSORS[format], processors)
if hasattr(settings, "TEST_PROCESSOR_MODULES"):
for format in settings.TEST_PROCESSOR_MODULES:
register_processor(format, settings.TEST_PROCESSOR_MODULES[format], processors)
_test_processors = processors
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,887 | saltycrane/django-test-utils | refs/heads/master | /test_utils/testmaker/replay.py | import sys
import re
import cPickle as pickle
from test_utils.testmaker import Testmaker
from test_utils.testmaker.processors.django_processor import Processor
class MockRequest(dict):
'Mocking a dict to allow attribute access'
def __getattr__(self, name):
return self[name]
class Replay(object):
def __init__(self, file_name, replay_file='replay_file'):
self.file_name = file_name
self.stream = open(self.file_name).readlines()
self.tm = Testmaker()
self.tm.setup_logging(replay_file, '/dev/null')
self.processor = Processor('replay_processor')
self.serial_obj = pickle
def process(self):
self.log = []
buffer = []
req_re = re.compile('---REQUEST_BREAK---')
res_re = re.compile('---RESPONSE_BREAK---')
for line in self.stream:
if req_re.search(line):
#process request
to_pickle = ''.join(buffer)
request = MockRequest(self.serial_obj.loads(to_pickle))
self.processor.save_request(request)
print request['path'], request['time']
buffer = []
elif res_re.search(line):
#process response
to_pickle = ''.join(buffer)
response = MockRequest(self.serial_obj.loads(to_pickle))
self.log.append(request, response)
self.processer.save_response(request, response)
print response['status_code'], response['time']
buffer = []
else:
buffer.append(line)
if __name__ == '__main__':
if len(sys.argv) == 2:
in_file = sys.argv[1]
else:
raise Exception('Need file name')
replay = Replay(in_file)
replay.process() | {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,888 | saltycrane/django-test-utils | refs/heads/master | /test_utils/testmaker/middleware/testmaker.py | from django.conf import settings
from django.test import Client
from django.test.utils import setup_test_environment
from django.template import Template, Context
from test_utils.testmaker import processors
from test_utils.testmaker import serializers
from test_utils.testmaker import Testmaker
#Remove at your own peril.
#Thar be sharks in these waters.
debug = getattr(settings, 'DEBUG', False)
"""
if not debug:
print "THIS CODE IS NOT MEANT FOR USE IN PRODUCTION"
else:
print "Loaded Testmaker Middleware"
"""
if not Testmaker.enabled:
testmaker = Testmaker(verbosity=0)
testmaker.prepare()
SHOW_TESTMAKER_HEADER = getattr(settings, 'SHOW_TESTMAKER_HEADER', False)
RESPONSE_TEMPLATE = Template("""
<div class="wrapper" style="background-color: red; padding: 5px; color: #fff; width: 100%;">
Testmaker: Logging to: {{ file }}
<form action="/test_utils/set_logging/">
<input type="text" name="filename">
<input type="submit" value="New Test">
</form>
<a href="/test_utils/show_log/">Show Log</a>
</div>
""")
class TestMakerMiddleware(object):
def __init__(self):
"""
Assign a Serializer and Processer
Serializers will be pluggable and allow for custom recording.
Processers will process the serializations into test formats.
"""
serializer_pref = getattr(settings, 'TESTMAKER_SERIALIZER', 'pickle')
processor_pref = getattr(settings, 'TESTMAKER_PROCESSOR', 'django')
self.serializer = serializers.get_serializer(serializer_pref)()
self.processor = processors.get_processor(processor_pref)()
def process_request(self, request):
"""
Run the request through the testmaker middleware.
This outputs the requests to the chosen Serializers.
Possible running it through one or many Processors
"""
#This is request.REQUEST to catch POST and GET
if 'test_client_true' not in request.REQUEST:
request.logfile = Testmaker.logfile()
self.serializer.save_request(request)
self.processor.save_request(request)
#We only want to re-run the request on idempotent requests
if request.method.lower() == "get":
setup_test_environment()
c = Client(REMOTE_ADDR='127.0.0.1')
getdict = request.GET.copy()
getdict['test_client_true'] = 'yes' #avoid recursion
response = c.get(request.path, getdict)
self.serializer.save_response(request, response)
self.processor.save_response(request, response)
return None
def process_response(self, request, response):
if 'test_client_true' not in request.REQUEST \
and SHOW_TESTMAKER_HEADER:
c = Context({'file': Testmaker.logfile()})
s = RESPONSE_TEMPLATE.render(c)
response.content = str(s) + str(response.content)
return response
| {"/test_project/test_app/tests/testmaker_tests.py": ["/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/serializers/pickle_serializer.py": ["/test_utils/testmaker/serializers/__init__.py"], "/test_project/test_app/tests/crawler_tests.py": ["/test_utils/crawler/base.py"], "/test_utils/views.py": ["/test_utils/testmaker/processors/base.py", "/test_utils/testmaker/__init__.py"], "/test_utils/management/commands/testmaker.py": ["/test_utils/testmaker/__init__.py"], "/test_project/test_app/tests/templatetags_tests.py": ["/test_utils/templatetags/__init__.py", "/test_utils/testmaker/__init__.py"], "/test_utils/testmaker/middleware/testmaker.py": ["/test_utils/testmaker/__init__.py"]} |
61,899 | cuulee/biketour | refs/heads/master | /twitter/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^timeline$', views.timeline, name='timeline'),
]
| {"/gps/views.py": ["/gps/models.py"]} |
61,900 | cuulee/biketour | refs/heads/master | /biketour/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic import RedirectView
from gps import views as gps_views
urlpatterns = patterns('',
url(r'^$', gps_views.map, name='home'),
url(r'^gps/', include('gps.urls')),
url(r'^twitter/', include('twitter.urls')),
url(r'^admin/', include(admin.site.urls)),
)
| {"/gps/views.py": ["/gps/models.py"]} |
61,901 | cuulee/biketour | refs/heads/master | /gps/migrations/0003_auto_20150530_0822.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('gps', '0002_auto_20150529_2135'),
]
operations = [
migrations.AlterField(
model_name='point',
name='accuracy',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='point',
name='battery',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='point',
name='direction',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='point',
name='google_altitude',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='point',
name='native_altitude',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='point',
name='provider',
field=models.CharField(null=True, max_length=50),
),
migrations.AlterField(
model_name='point',
name='satellites',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='point',
name='speed',
field=models.FloatField(null=True),
),
]
| {"/gps/views.py": ["/gps/models.py"]} |
61,902 | cuulee/biketour | refs/heads/master | /gps/models.py | from django.db import models
class Point(models.Model):
time = models.DateTimeField(db_index=True)
lat = models.FloatField()
lon = models.FloatField()
accuracy = models.FloatField(null=True)
speed = models.FloatField(null=True)
battery = models.FloatField(null=True)
satellites = models.FloatField(null=True)
direction = models.FloatField(null=True)
provider = models.CharField(max_length=50, null=True)
native_altitude = models.FloatField(null=True)
google_altitude = models.FloatField(null=True)
| {"/gps/views.py": ["/gps/models.py"]} |
61,903 | cuulee/biketour | refs/heads/master | /gps/views.py | from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from django.http import JsonResponse
from django.views.decorators.cache import cache_page
import datetime
import re
import googlemaps
import traceback
import dateutil.parser
import gpxpy
import gpxpy.gpx
from biketour.settings import GOOGLE_MAPS_API_KEY
from .models import Point
from .forms import UploadFileForm
def log(request):
point = Point()
point.time = dateutil.parser.parse(request.GET['time'])
point.lat = float(request.GET['lat'])
point.lon = float(request.GET['lon'])
point.speed = float(request.GET['speed'])
point.native_altitude = float(request.GET['altitude'])
point.accuracy = float(request.GET['accuracy'])
point.battery = float(request.GET['battery'])
point.satellites = int(request.GET['satellites'])
point.direction = float(request.GET['direction'])
point.provider = request.GET['provider']
try:
gmaps = googlemaps.Client(key=GOOGLE_MAPS_API_KEY)
result = gmaps.elevation((lat, lon))[0]
resolution = result['resolution']
point.google_altitude = result['elevation']
except Exception:
point.google_altitude = 0
point.save()
return HttpResponse(status=200)
def extract_point(point):
return {
'type': 'Feature',
'properties': {
'time': point.time,
'accuracy': point.accuracy,
'speed': point.speed,
'battery': point.battery,
'provider': point.provider,
'altitude': point.native_altitude,
'marker-symbol': 'bicycle',
'marker-color': '#2c3e50',
'marker-size': 'large',
},
'geometry': {
'type': 'Point',
'coordinates': [point.lon, point.lat],
}
}
def upload_gpx(request):
def create_point(parsed_point):
point = Point()
point.time = parsed_point.time
if not point.time:
point.time = datetime.datetime.now()
point.native_altitude = parsed_point.elevation
point.lat = parsed_point.latitude
point.lon = parsed_point.longitude
point.save()
def handle_uploaded_file(gpx_file):
content = gpx_file.read().decode("utf-8")
gpx = gpxpy.parse(content)
for track in gpx.tracks:
for segment in track.segments:
for point in segment.points:
create_point(point)
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
handle_uploaded_file(request.FILES['file'])
return HttpResponseRedirect('/')
else:
form = UploadFileForm()
return render(request, 'gps/upload.html', {'form': form})
@cache_page(30)
def track(request):
def extract_line(points):
coords = [[p['lon'], p['lat']] for p in points]
return {
'type': 'Feature',
'geometry': {
'type': 'LineString',
'coordinates': coords,
},
'properties': {
'stroke': '#2c3e50',
'stroke-width': 4
}
}
points = Point.objects.all().order_by('time').values('lon', 'lat')
return JsonResponse({
'type': 'FeatureCollection',
'features': [
extract_line(points),
]
}, safe=False)
def map(request):
return render(request, 'gps/map.html')
def current_position(request):
current_pos = Point.objects.all().order_by('-time')[0]
return JsonResponse({
'type': 'FeatureCollection',
'features': [
extract_point(current_pos)
]
}, safe=False)
| {"/gps/views.py": ["/gps/models.py"]} |
61,904 | cuulee/biketour | refs/heads/master | /twitter/views.py | import traceback
from django.shortcuts import render
from django.http import HttpResponse
from django.http import JsonResponse
from django.views.decorators.cache import cache_page
import tweepy
from biketour.settings import (TWITTER_ACCESS_KEY, TWITTER_ACCESS_SECRET,
TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET,
TWITTER_USER_TIMELINE)
MAKI_ICONS = [
'circle-stroked', 'circle', 'square-stroked', 'square', 'triangle-stroked',
'triangle', 'star-stroked', 'star', 'cross', 'marker-stroked', 'marker',
'religious-jewish', 'religious-christian', 'religious-muslim', 'cemetery',
'rocket', 'airport', 'heliport', 'rail', 'rail-metro', 'rail-light',
'bus', 'fuel', 'parking', 'parking-garage', 'airfield', 'roadblock',
'ferry', 'harbor', 'bicycle', 'park', 'park2', 'museum', 'lodging',
'monument', 'zoo', 'garden', 'campsite', 'theatre', 'art-gallery', 'pitch',
'soccer', 'america-football', 'tennis', 'basketball', 'baseball', 'golf',
'swimming', 'cricket', 'skiing', 'school', 'college', 'library', 'post',
'fire-station', 'town-hall', 'police', 'prison', 'embassy', 'beer',
'restaurant', 'cafe', 'shop', 'fast-food', 'bar', 'bank', 'grocery',
'cinema', 'pharmacy', 'hospital', 'danger', 'industrial', 'warehouse',
'commercial', 'building', 'place-of-worship', 'alcohol-shop', 'logging',
'oil-well', 'slaughterhouse', 'dam', 'water', 'wetland', 'disability',
'telephone', 'emergency-telephone', 'toilets', 'waste-basket', 'music',
'land-use', 'city', 'town', 'village', 'farm', 'bakery', 'dog-park',
'lighthouse', 'clothing-store', 'polling-place', 'playground', 'entrance',
'heart', 'london-underground', 'minefield', 'rail-underground','aerialway'
'rail-above', 'camera', 'laundry', 'car', 'suitcase', 'hairdresser',
'chemist', 'mobilephone', 'scooter', 'gift', 'ice-cream', 'dentist'
]
@cache_page(60)
def timeline(request):
auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET)
auth.set_access_token(TWITTER_ACCESS_KEY, TWITTER_ACCESS_SECRET)
api = tweepy.API(auth)
tweets = api.user_timeline(TWITTER_USER_TIMELINE)
points = []
cursor = tweepy.Cursor(api.user_timeline, id=TWITTER_USER_TIMELINE) \
.items(200)
for tweet in cursor:
try:
if tweet.geo:
point = extract_point(tweet)
points.append(point)
except Exception:
print(traceback.format_exc())
return JsonResponse({
'type': 'FeatureCollection',
'features': points
}, safe=False)
def extract_point(tweet):
lat, lon = tweet.geo['coordinates']
color = '#63b6e5'
icon = 'post'
photo_url = ''
if 'media' in tweet.entities:
for media_item in tweet.entities['media']:
if media_item['type'] == 'photo':
photo_url = media_item['media_url']
icon = 'camera'
if 'hashtags' in tweet.entities:
for hash_tag in tweet.entities['hashtags']:
icon_key = hash_tag['text'].lower()
if icon_key in MAKI_ICONS:
icon = icon_key
if icon == 'campsite':
color = '#fa946e'
if icon == 'restaurant' or icon == 'cafe':
color = '#c091e6'
return {
'type': 'Feature',
'geometry': {
'type': 'Point',
'coordinates': [lon, lat],
},
'properties': {
'time': tweet.created_at,
'photo': photo_url,
'text': tweet.text,
'marker-symbol': icon,
'marker-color': color,
'marker-size': 'large',
}
}
| {"/gps/views.py": ["/gps/models.py"]} |
61,905 | cuulee/biketour | refs/heads/master | /gps/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Point',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('time', models.DateTimeField()),
('lat', models.FloatField()),
('lon', models.FloatField()),
('accuracy', models.FloatField()),
('speed', models.FloatField()),
('battery', models.FloatField()),
('satellites', models.FloatField()),
('direction', models.FloatField()),
('provider', models.CharField(max_length=50)),
('native_altitude', models.FloatField()),
('google_altitude', models.FloatField()),
],
),
]
| {"/gps/views.py": ["/gps/models.py"]} |
61,906 | cuulee/biketour | refs/heads/master | /gps/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^log$', views.log, name='log'),
url(r'^track$', views.track, name='track'),
url(r'^track/current$', views.current_position, name='current_position'),
url(r'^map$', views.map, name='map'),
url(r'^upload$', views.upload_gpx, name='upload'),
]
| {"/gps/views.py": ["/gps/models.py"]} |
61,907 | openoakland/lovely-rita | refs/heads/master | /lovelyrita/addresses.py | from six import string_types
import re
import pandas as pd
# use sparingly, these take a lot of time to evaluate
REPLACEMENTS = [(r'^ONE ', '1 '),
(r'^TWO ', '2 '),
(' -', '-'),
(r' TERM$', ' TERMINAL'),
(r'^#', ''),
(r'\bB{1,2}LK{1,2}\s?', '')]
VALID_SUFFIXES = ['AVE', 'AVEN', 'BLOCK', 'BLVD', 'BOULEVARD', 'CIR', 'COURT',
'CREST', 'CREEK', 'CRK', 'DR', 'DRI', 'DRIVE', 'HBR', 'HTS',
'LANE', 'LOOP', 'PARKWAY', 'PKWAY', 'PKWY', 'PL', 'PLACE',
'PLAZA', 'PLZ', 'R', 'ROAD', 'SR', 'ST', 'STREET',
'TERR', 'TERRACE', 'VISTA', 'VW', 'WAY', 'WY']
def replace(addresses, replacements=REPLACEMENTS, inplace=True):
"""Replace text in addresses
Parameters
----------
addresses : pandas.Series
replacements : tuple list of tuples
Replacements provided as (pattern to replace, replacement). Multiple replacements can be
provided as a list of tuples
inplace : bool
Returns
-------
If inplace is False, returns the address series with the replacements made.
"""
if isinstance(replacements[0], string_types):
replacements = [replacements, ]
if inplace:
addr = addresses
else:
addr = addresses.copy()
for pattern, replacement in REPLACEMENTS:
addr.replace(pattern, replacement, regex=True, inplace=True)
if not inplace:
return addr
def parse_123_main_street(addresses):
"""Parse the common address format, e.g. 123 MAIN STREET
Parameter
---------
addresses : pandas.Series
Returns
-------
A DataFrame containing street name and street column for those rows that were successfully
parsed
"""
patt = re.compile(r'^(?P<street_number>\d+\-?\d?) (?P<street_name>[\w\d\s]+)')
street = addresses.str.extract(patt, expand=True)
street.dropna(inplace=True)
return street
def parse_P123_main_street(addresses):
"""Parse addresses that contain a prefix, e.g., P123-1 PARK STREET
Parameter
---------
addresses : pandas.Series
Returns
-------
A DataFrame containing street name and street column for those rows that were successfully
parsed
"""
patt = re.compile(r'^(?P<prefix>[A-Z]+)\-?(?P<street_number>\d+[\-\W]?\d?) '
'(?P<street_name>[\w\d\s]+)')
street = addresses.str.extract(patt, expand=True)
street.dropna(inplace=True)
drop_indices = []
for i, s in street.iterrows():
street_words = s.street_name.split(' ')
drop = True
for street_word in street_words:
if street_word.startswith(s.prefix[0]):
drop = False
if drop:
drop_indices.append(i)
street.drop(drop_indices, inplace=True)
return street
def parse_addresses(addresses):
"""Parse addresses into street name and number according to several rules.
Parameter
---------
addresses : pandas.Series
Returns
-------
A DataFrame containing street name and street column for those rows that were successfully
parsed
"""
# Many addresses are in parking lots. Those will not have street numbers, so we should treat
# them separately. We will only concern ourselves with potential street addresses.
lot_indices = addresses.str.contains('^[A-Z]LOT.*LOT$')
street_addresses = addresses.loc[~lot_indices]
street = pd.DataFrame({'street_name': None, 'street_number': None},
index=addresses.index)
new_street = parse_123_main_street(street_addresses)
street.update(new_street)
new_street = parse_P123_main_street(street_addresses)
street.update(new_street)
return street
| {"/lovelyrita/__main__.py": ["/lovelyrita/clean.py", "/lovelyrita/data.py"], "/lovelyrita/clean.py": ["/lovelyrita/addresses.py", "/lovelyrita/config.py"], "/lovelyrita/data.py": ["/lovelyrita/clean.py", "/lovelyrita/config.py"]} |
61,908 | openoakland/lovely-rita | refs/heads/master | /lovelyrita/config.py | import os
import json
from pathlib import Path
import appdirs
import lovelyrita
CONFIG_DIRECTORY = Path(appdirs.user_config_dir()) / 'lovelyrita'
CONFIG_DIRECTORY.mkdir(exist_ok=True)
CONFIG_PATH = CONFIG_DIRECTORY / 'options.json'
if CONFIG_PATH.exists():
with open(CONFIG_PATH, 'r') as f:
config = json.load(f)
else:
config = {}
GOOGLE_API_KEY = config.get('GOOGLE_API_KEY', None)
GOOGLE_API_URL = config.get('GOOGLE_API_URL', None)
POSTGIS_HOST = config.get('POSTGIS_HOST', 'localhost')
POSTGIS_PORT = config.get('POSTGIS_PORT', '5432')
POSTGIS_DATABASE = config.get('POSTGIS_DATABASE', 'postgres')
POSTGIS_USERNAME = config.get('POSTGIS_USERNAME', 'postgres')
POSTGIS_PASSWORD = config.get('POSTGIS_PASSWORD', '')
VALID_COLUMN_NAMES = config.get('VALID_COLUMN_NAMES', [])
DATETIME_FORMATS = ['%m/%d/%y %H:%M:%S', '%m/%d/%y %H:%M', '%Y-%m-%d %H:%M']
| {"/lovelyrita/__main__.py": ["/lovelyrita/clean.py", "/lovelyrita/data.py"], "/lovelyrita/clean.py": ["/lovelyrita/addresses.py", "/lovelyrita/config.py"], "/lovelyrita/data.py": ["/lovelyrita/clean.py", "/lovelyrita/config.py"]} |
61,909 | openoakland/lovely-rita | refs/heads/master | /lovelyrita/geocode.py | import re
import requests
import datetime
from urllib.parse import urlencode
import numpy as np
import psycopg2
import pandas as pd
from lovelyrita import config
GOOGLE_API_URL = config.GOOGLE_API_URL
GOOGLE_API_KEY = config.GOOGLE_API_KEY
POSTGIS_HOST = config.POSTGIS_HOST
POSTGIS_PORT = config.POSTGIS_PORT
POSTGIS_USERNAME = config.POSTGIS_USERNAME
POSTGIS_PASSWORD = config.POSTGIS_PASSWORD
POSTGIS_DATABASE = config.POSTGIS_DATABASE
class Geocoder(object):
def __init__(self, geocodes=None, api_url=GOOGLE_API_URL, api_key=GOOGLE_API_KEY):
if geocodes is None:
geocodes = pd.DataFrame(columns=('lat', 'lng', 'place_id', 'timestamp'))
geocodes.index.name = 'address'
self.geocodes = geocodes
self.api_url = GOOGLE_API_URL
self.api_key = GOOGLE_API_KEY
def geocode(self, address):
"""
Pull data from Google Maps API
Parameters
----------
address : str
"""
# check if query has already been run
try:
g = self.geocodes.loc[address]
return g['lat'], g['lng'], g['place_id']
except KeyError:
pass
query = {'address': address,
'key': self.api_key}
url = self.api_url + 'json?' + urlencode(query)
response = requests.get(url)
if response.status_code == 404:
raise Exception("404 error for {}".format(url))
content = response.json()
if content['status'] != 'OK':
raise Exception("Status not OK for {}".format(url))
place_id = content['results'][0]['place_id']
lat = content['results'][0]['geometry']['location']['lat']
lng = content['results'][0]['geometry']['location']['lng']
timestamp = str(datetime.datetime.now())
new_geocode = pd.Series({'place_id': place_id,
'lat': lat, 'lng': lng,
'timestamp': timestamp},
name=address)
self.geocodes = self.geocodes.append(new_geocode)
return lat, lng, place_id
class PostGISGeocoder(object):
def __init__(self, host=POSTGIS_HOST, port=POSTGIS_PORT, database=POSTGIS_DATABASE,
user=POSTGIS_USERNAME, password=POSTGIS_PASSWORD):
"""A PostGIS geocoder
"""
connection = psycopg2.connect(host=host, database=database,
user=user, port=port, password=password)
self.connection = connection
self.cursor = connection.cursor()
def geocode(self, address):
"""Get the latitude and longitude of an address
Parameters
----------
address : str
Returns
-------
A dictionary containing keys latitude, longitude, street_number, street_name, street_type, and
rating (a numeric value indicating how uncertain the geocoding is)
"""
patt = "[" + re.escape(r"()\'+!*") + "]"
address = re.sub(patt, '', address)
columns = ['rating', 'longitude', 'latitude',
'street_number', 'street_name', 'street_suffix']
query = ("""SELECT g.rating, ST_X(g.geomout) As lon, ST_Y(g.geomout) As lat, """
"""(addy).address As stno, (addy).streetname As street, """
"""(addy).streettypeabbrev As styp, """
"""(addy).location As city, """
"""(addy).stateabbrev As st,(addy).zip """
"""FROM geocode(%s, 1) As g;""")
self.cursor.execute(query, (address, ))
response = self.cursor.fetchone()
if response:
result = {k: v for k, v in zip(columns, response)}
else:
result = [None, ] * len(columns)
return result
def geocode_citations(citations, geocoder=None):
"""Geocode a DataFrame of citations
Parameters:
-----------
citations : pandas.DataFrame
rating : int
Returns:
--------
A DataFrame with the results of geocoding including columns:
['rating', 'longitude', 'latitude', 'street_number', 'street_name', 'street_suffix']
"""
if geocoder is None:
geocoder = PostGISGeocoder()
results = []
indices = []
try:
from progressbar import progressbar
iterator = progressbar(citations.iterrows(), max_value=len(citations))
except ImportError:
iterator = citations.iterrows()
for index, row in iterator:
address = ', '.join(row[['street', 'city', 'state']])
result = geocoder.geocode(address)
indices.append(index)
results.append(result)
return pd.DataFrame(results, index=indices)
| {"/lovelyrita/__main__.py": ["/lovelyrita/clean.py", "/lovelyrita/data.py"], "/lovelyrita/clean.py": ["/lovelyrita/addresses.py", "/lovelyrita/config.py"], "/lovelyrita/data.py": ["/lovelyrita/clean.py", "/lovelyrita/config.py"]} |
61,910 | openoakland/lovely-rita | refs/heads/master | /lovelyrita/__main__.py | from __future__ import print_function
import argparse
from lovelyrita.clean import clean
from lovelyrita.data import read_data, summarize, write_shapefile, to_geodataframe
def parse_arguments():
# Commands are called with `lovelyrita <subcommand> <args>`
parser = argparse.ArgumentParser(prog="lovelyrita")
subcommand = parser.add_subparsers(title="subcommand", dest="subcommand")
clean = subcommand.add_parser('clean', help="""Clean raw data file""")
clean.set_defaults(command_name='clean')
clean.add_argument('in_path', help="""File path to the raw data""")
clean.add_argument('out_path', help="""Output path""")
summarize = subcommand.add_parser('summarize', help=("""Generate a column summarize from
raw data file"""))
summarize.set_defaults(command_name='summarize')
summarize.add_argument('in_path', help="""Location of the raw data.""")
preprocess = subcommand.add_parser('convert', help=("""Convert between two file types"""))
preprocess.set_defaults(command_name='convert')
preprocess.add_argument('in_path', help="""Location of the raw data.""")
preprocess.add_argument('out_path', help="""Where to store the output data""")
preprocess.add_argument('--clean', action='store_true',
help="""Clean the input data before conversion""")
args = parser.parse_args()
return args
def main(args=None):
args = parse_arguments()
if args.subcommand == 'clean':
print('... Loading data from {}'.format(args.in_path))
df = read_data(args.in_path)
print('... Cleaning data')
df = clean(df)
print('... Writing output to {}'.format(args.out_path))
df.to_csv(args.out_path)
elif args.subcommand == 'summarize':
print('... Loading data from {}'.format(args.in_path))
df = read_data(args.in_path)
print(summarize(df))
elif args.subcommand == 'convert':
from lovelyrita.data import column_map
column_map['[latitude]'] = 'latitude'
column_map['[longitude]'] = 'longitude'
print('... Loading data from {}'.format(args.in_path))
df = read_data(args.in_path, column_map, clean=args.clean)
out_path = args.out_path
if out_path.endswith('.shp'):
print('... Converting to GeoDataFrame')
df = to_geodataframe(df)
print('... Writing output to {}'.format(out_path))
write_shapefile(df, out_path)
else:
raise NotImplementedError('Output file type not supported.')
if __name__ == "__main__":
main()
| {"/lovelyrita/__main__.py": ["/lovelyrita/clean.py", "/lovelyrita/data.py"], "/lovelyrita/clean.py": ["/lovelyrita/addresses.py", "/lovelyrita/config.py"], "/lovelyrita/data.py": ["/lovelyrita/clean.py", "/lovelyrita/config.py"]} |
61,911 | openoakland/lovely-rita | refs/heads/master | /setup.py | """A setuptools based setup module.
Based on the setup.py template at https://github.com/pypa/sampleproject
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
from codecs import open
import os.path as op
here = op.abspath(op.dirname(__file__))
# Get the long description from the README file
with open(op.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
# Arguments marked as "Required" below must be included for upload to PyPI.
# Fields marked as "Optional" may be commented out.
setup(
name='lovelyrita', # Required
version='0.1', # Required
description='Understanding parking enforcement data', # Required
long_description=long_description, # Optional
url='https://github.com/openoakland/lovely-rita', # Optional
author='OpenOakland', # Optional
author_email='', # Optional
# Classifiers help users find your project by categorizing it.
#
# For a list of valid classifiers, see
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[ # Optional
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
# 'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
# keywords='sample setuptools development', # Optional
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
# install_requires=[''], # Optional
# package_data={},
# For example, the following would provide a command called `sample` which
# executes the function `main` from this package when invoked:
entry_points={ # Optional
'console_scripts': [
'lovelyrita = lovelyrita.__main__:main',
],
},
)
| {"/lovelyrita/__main__.py": ["/lovelyrita/clean.py", "/lovelyrita/data.py"], "/lovelyrita/clean.py": ["/lovelyrita/addresses.py", "/lovelyrita/config.py"], "/lovelyrita/data.py": ["/lovelyrita/clean.py", "/lovelyrita/config.py"]} |
61,912 | openoakland/lovely-rita | refs/heads/master | /lovelyrita/clean.py | import time
from datetime import datetime
import numpy as np
import pandas as pd
from lovelyrita.addresses import replace
from lovelyrita.config import DATETIME_FORMATS
def impute_missing_times(datetimes, inplace=True):
"""Fill in missing times by interpolating surrounding times
Parameters
----------
datetimes : pandas.Series
inplace : bool
Returns
-------
The original Series with missing times replaced by interpolated times
"""
# get valid start and stop indices for null ranges
if not inplace:
datetimes = datetimes.copy()
n_rows = len(datetimes)
null_indices = datetimes.isnull().nonzero()[0]
if len(null_indices) > 0:
# remove all but first in consecutive sequences of nulls
valid_starts = null_indices[1:][np.diff(null_indices) > 1]
if null_indices[0] > 0:
valid_starts = np.r_[null_indices[0], valid_starts]
valid_starts -= 1
# remove all but final in consecutive sequences of nulls
valid_ends = null_indices[:-1][np.diff(null_indices) > 1]
if null_indices[-1] < (n_rows - 1):
valid_ends = np.r_[valid_ends, null_indices[-1]]
valid_ends += 1
for valid_start, valid_end in zip(valid_starts, valid_ends):
start_datetime = datetimes.iloc[valid_start]
end_datetime = datetimes.iloc[valid_end]
start_seconds = time.mktime(start_datetime.timetuple())
end_seconds = time.mktime(end_datetime.timetuple())
n = valid_end - valid_start + 1
interpolated_seconds = np.linspace(start_seconds, end_seconds, n)
interpolated_datetimes = [datetime.fromtimestamp(s) for s in interpolated_seconds]
for i, j in enumerate(range(valid_start + 1, valid_end)):
datetimes.iloc[j] = interpolated_datetimes[i]
if not inplace:
return datetimes
def find_dollar_columns(dataframe, nrows=100):
"""Find the columns in a DataFrame that contain dollar values
"""
def is_dollar_series(series):
# if not hasattr(series.iloc[0], 'startswith'):
# return False
for value in series.iloc[:nrows]:
if not value.startswith('$'):
return False
return True
return [column for column in dataframe
if is_dollar_series(dataframe[column].fillna('$').astype('str'))]
def convert_dollar_to_float(dollars, inplace=True):
"""Turns series of values (e.g., $434.44) into floats (e.g., 434.44)
Parameters
----------
dollars : pandas.Series of str
"""
if not inplace:
dollars = dollars.copy()
dollars.fillna('$0', inplace=True)
dollars.replace(r'\$', '', regex=True, inplace=True)
dollars = dollars.astype('float32', copy=False)
if not inplace:
return dollars
def infer_datetime_format(dt, datetime_formats=DATETIME_FORMATS):
"""Infer the datetime format for a Series
Parameters
----------
dt : pandas.Series
Returns
-------
The datetime format as a string
"""
for datetime_format in datetime_formats:
try:
dt = pd.to_datetime(dt.iloc[0], format=datetime_format)
return datetime_format
except ValueError:
pass
raise Exception('No datetime format detected for {}'.format(dt.iloc[0]))
def get_datetime(dataframe):
"""Get a datatime for each row in a DataFrame
Parameters
----------
dataframe : pandas.DataFrame
A dataframe with `ticket_issue_date` and `ticket_issue_time` columns
Returns
-------
A Series of datetime values
"""
dt = dataframe['ticket_issue_date'] + ' ' + dataframe['ticket_issue_time']
datetime_format = infer_datetime_format(dt)
return pd.to_datetime(dt, format=datetime_format)
def drop_null(dataframe, inplace=True):
"""Drop null tickets
Parameters
----------
dataframe : pandas.DataFrame
inplace : bool
Returns
-------
If `inplace` is False, returns the input dataframe with the null citations removed.
"""
if not inplace:
dataframe = dataframe.copy()
null_indices = dataframe.ticket_number.isnull()
dataframe.drop(index=dataframe.index[null_indices], inplace=True)
if not inplace:
return dataframe
def clean_voided(dataframe, add_indicator=True):
"""Detect voided citations
Parameters
----------
dataframe : pandas.DataFrame
add_indicator : bool
If True, add a column `voided` to the dataframe that indicates whether the ticket was
voided or not.
"""
void_indices = dataframe.street.str.contains(r'^Z?VOIDZ?')
dataframe['street'] = dataframe.street.str.replace(r'^Z?VOIDZ?', '')
dataframe['voided'] = void_indices
def clean(dataframe):
"""Apply a series of data cleaning steps to a dataframe of raw data
Parameters
----------
dataframe : pandas.DataFrame
Returns
-------
A cleaned DataFrame
"""
drop_null(dataframe)
clean_voided(dataframe)
replace(dataframe.street)
datetimes = get_datetime(dataframe)
impute_missing_times(datetimes)
dataframe['ticket_issue_datetime'] = datetimes
dataframe.drop(['ticket_issue_time', 'ticket_issue_date'], axis=1, inplace=True)
for column in find_dollar_columns(dataframe):
convert_dollar_to_float(dataframe[column], inplace=True)
return dataframe
| {"/lovelyrita/__main__.py": ["/lovelyrita/clean.py", "/lovelyrita/data.py"], "/lovelyrita/clean.py": ["/lovelyrita/addresses.py", "/lovelyrita/config.py"], "/lovelyrita/data.py": ["/lovelyrita/clean.py", "/lovelyrita/config.py"]} |
61,913 | openoakland/lovely-rita | refs/heads/master | /lovelyrita/utils.py | from __future__ import print_function
import numpy as np
import pandas as pd
def get_column_report(df):
"""Generate a summary of the data in a DataFrame
"""
column_report = []
for column in df.columns:
unique = df[column].unique()
sample = np.nan
for value in unique:
if value is not np.nan:
sample = value
break
nans = df[column].isnull().sum()
pct_nan = 100. * nans / df.shape[0]
column_report.append([column, df[column].dtype, len(unique), sample, nans, pct_nan])
columns = ["Column Name", "Data Type", "Unique Count",
"Sample Value", "NaNs", "% NaN"]
column_report = pd.DataFrame(column_report, columns=columns).round(2)
column_report.sort_values(by="NaNs", inplace=True)
return column_report
def get_uniques(df):
"""Return the unique values for each column
"""
for column in df.columns:
print(column, df[column].unique())
def get_addresses(df):
addresses = set()
for i, item in (df["Street"] + " " +
df["City"] + " " +
df["State"]).iteritems():
addresses.add(" ".join(item.lower().split()))
return list(addresses)
def output_addresses(df, file_out):
"""
"""
addresses = get_addresses(df)
with open(file_out, 'w') as output:
for address in addresses:
output.write(address + '\n')
return addresses
| {"/lovelyrita/__main__.py": ["/lovelyrita/clean.py", "/lovelyrita/data.py"], "/lovelyrita/clean.py": ["/lovelyrita/addresses.py", "/lovelyrita/config.py"], "/lovelyrita/data.py": ["/lovelyrita/clean.py", "/lovelyrita/config.py"]} |
61,914 | openoakland/lovely-rita | refs/heads/master | /lovelyrita/data.py | from __future__ import print_function
import numpy as np
import pandas as pd
from shapely.geometry import Point
import geopandas
from lovelyrita.clean import clean as clean_data
from lovelyrita.config import VALID_COLUMN_NAMES as valid_column_names
def read_data(paths, usecols=None, delimiter=',', clean=False):
"""Load data from a list of file paths.
Parameters
----------
paths : list
A list of file paths to the data to be loaded
usecols : list of str
If provided, only load these columns
dtype : dict
A dict containing key (column name) and value (data type)
delimiter : str
Returns
-------
A DataFrame containing the loaded data
"""
if not isinstance(paths, (tuple, list)):
paths = [paths, ]
dataframe = []
for path in paths:
if usecols is None:
usecols = get_column_names(path)
df = pd.read_csv(path, usecols=usecols, delimiter=delimiter)
df['street'] = df['street'].str.strip(' ')
if clean:
df = clean_data(df)
dataframe.append(df)
dataframe = pd.concat(dataframe).reset_index(drop=True)
return dataframe
def get_column_names(path, valid_column_names=valid_column_names):
"""Return the intersection of columns present in the dataset and valid column names
Parameters:
-----------
path : str
valid_column_names : list of st
Return:
-------
"""
column_names = pd.read_csv(path, nrows=1)
if valid_column_names is not None:
column_names = [n for n in column_names if n in valid_column_names]
else:
column_names = [n for n in column_names]
return column_names
def to_geodataframe(dataframe, copy=False, drop_null_geometry=True,
projection='epsg:4326'):
"""Convert a pandas DataFrame to geopandas DataFrame.
Parameters
----------
dataframe : pandas.DataFrame
Must contain latitude and longitude fields
copy : bool
drop_null_geometry : bool
projection : str
Returns
-------
A GeoDataFrame of the given DataFrame
"""
if copy:
df = dataframe.copy()
else:
df = dataframe
df.latitude = df.latitude.astype('float32')
df.longitude = df.longitude.astype('float32')
points = []
for x, y in zip(df.latitude, df.longitude):
if not x == 0:
points.append(Point(y, x))
else:
points.append(None)
df['geometry'] = points
df.drop(['latitude', 'longitude'], axis=1, inplace=True)
if drop_null_geometry:
df = df.loc[~df.geometry.isnull()]
# geopandas cannot handle datetime formats, so convert to string
for column in df.select_dtypes(include=['datetime']):
df[column] = df[column].dt.strftime('%m/%d/%y %H:%M:%S')
return geopandas.GeoDataFrame(df, geometry='geometry', crs={'init': projection})
def write_shapefile(geodataframe, path):
"""Write a geodataframe to a shapefile.
Parameters
----------
geodataframe : geopandas.GeoDataFrame
path : str
"""
geodataframe.to_file(path, driver='ESRI Shapefile')
def get_sample_value(series):
"""Return a sample value from a series
Parameters
----------
series : pandas.Series
Returns
-------
A sample value from the series or None if all values in the series are null
"""
unique = series.unique()
for value in unique:
if value is not np.nan:
return value
def summarize(dataframe):
"""Generate a summary of the data in a dataframe.
Parameters
----------
dataframe : pandas.DataFrame
Returns
-------
A DataFrame containing the data type, number of unique values, a sample value, number and
percent of null values
"""
column_report = []
for column in dataframe.columns:
unique = dataframe[column].unique()
sample = get_sample_value(dataframe[column])
n_null = dataframe[column].isnull().sum()
pct_null = 100. * n_null / dataframe.shape[0]
r = [column, dataframe[column].dtype, len(unique), sample, n_null, pct_null]
column_report.append(r)
columns = ["Column Name", "Data Type", "Unique Count", "Sample Value", "null", "% null"]
column_report = pd.DataFrame(column_report, columns=columns).round(2)
column_report.sort_values(by="null", inplace=True)
return column_report
| {"/lovelyrita/__main__.py": ["/lovelyrita/clean.py", "/lovelyrita/data.py"], "/lovelyrita/clean.py": ["/lovelyrita/addresses.py", "/lovelyrita/config.py"], "/lovelyrita/data.py": ["/lovelyrita/clean.py", "/lovelyrita/config.py"]} |
61,926 | utkuozsan/picon | refs/heads/master | /agent/setup.py | #! /usr/bin/env python3
import setuptools
import os
pkgs = setuptools.find_packages()
setuptools.setup(
name = "PiCon Registration Agent",
version = "0.0.1",
author = "Team Kickass",
author_email = "ryan@u13.net",
description = "PiCon registration agent for the PiCon console registry",
license = "BSD",
keywords = "RaspberryPi Terminal Server Console",
url = "http://nanog.org",
packages=pkgs,
install_requires = ['daemonize','pyroute2','ipaddress','netifaces'],
long_description="See PiCon README",
)
| {"/server/tests/initialize.py": ["/server/db.py"]} |
61,927 | utkuozsan/picon | refs/heads/master | /server/tests/initialize.py | from unittest import TestCase
from ..db import DB
import os
test_file = r'./blank.db'
original_file = None
class TestInitialize(TestCase):
def test_initialize(self):
db = DB()
global original_file
self.addCleanup(self.shutdown, db)
self.delete_test_file()
original_file = db.dbfile
db.dbfile = test_file
db.initialize()
devices = db.get_device_details()
self.assertEquals(len(devices), 0)
def delete_test_file(self):
try:
os.unlink(test_file)
except FileNotFoundError:
pass
def shutdown(self, db):
db.dbfile = original_file
db.initialize()
db.close()
self.delete_test_file()
| {"/server/tests/initialize.py": ["/server/db.py"]} |
61,928 | utkuozsan/picon | refs/heads/master | /agent/piconagent/piconagent.py | import traceback
import piconagent.sshchannelthread as sshchannelthread
import piconagent.utils as utils
from time import sleep
import logging
import math
import json,requests
class PiConAgent():
def __init__(self,endpoint='http://localhost/api/',headers={'content-type': 'application/json'},holdtime=300,interval=60):
# requests is too noisy for INFO
logging.basicConfig(level=logging.INFO)
logging.getLogger('requests').setLevel(logging.WARN)
self.endpoint = endpoint
self.headers = headers
self.holdtime = holdtime
self.interval = interval
self.sshChannelThread = None
def register(self):
body = {}
body['hostname'] = utils.getHostname()
body['sn'] = utils.getSerial()
try:
body['interfaces'] = utils.getInterfaces()
except Exception as e:
logging.error('Skipping this registration attempt because: ' + str(e))
logging.error("%d failed attempts in a row will result in the server declaring us dead (holdtime: %d, registration interval: %d)" % (math.ceil(self.holdtime/self.interval),self.holdtime,self.interval))
return False
body['ports'] = utils.getPorts()
body['holdtime'] = self.holdtime
jsonbody = json.dumps(body,sort_keys=True,indent=2)
try:
r = requests.post(self.endpoint+'register', data = jsonbody, headers = self.headers,timeout=2)
except Exception as e:
logging.error('PiCon registration attempt failed: ' + str(e))
return False
else:
logging.info('Successfully registered with endpoint ' + self.endpoint+'register')
logging.debug('Sent JSON in POST body:' + "\n" + jsonbody)
logging.debug('Received JSON in POST response:' + "\n" + r.text)
rjson = r.json()
if rjson is not None and 'tunnel' in rjson and 'server' in rjson['tunnel']:
self.tunnelserver=rjson['tunnel']['server']
if rjson is not None and 'tunnel' in rjson and 'port' in rjson['tunnel']:
self.tunnelport=rjson['tunnel']['port']
return True
def run(self):
while True:
regStatus = self.register()
if regStatus and (not self.sshChannelThread or not self.sshChannelThread.is_alive()) and self.tunnelport and self.tunnelserver:
if self.sshChannelThread is None and self.tunnelport and self.tunnelserver:
self.connectSSH()
elif regStatus:
logging.error("SSH tunnel connection closed unexpectedly, restarting connection to %s:%d" % (self.tunnelserver,self.tunnelport) )
self.connectSSH(restart=True)
sleep(self.interval)
def connectSSH(self,restart=False):
if restart:
logging.info("Retarting SSH tunnel connection to %s:%d" % (self.tunnelserver,self.tunnelport) )
else:
logging.info("Starting SSH tunnel connection to %s:%d" % (self.tunnelserver,self.tunnelport) )
self.sshChannelThread=sshchannelthread.sshChannelThread(tunnelserver=self.tunnelserver,tunnelport=self.tunnelport)
self.sshChannelThread.start()
def main():
# create an agent and register
a = PiConAgent('http://199.187.221.170:5000/api/')
a.register()
sys.stderr.write(PiConAgent.jsonbody)
if __name__ == "__main__":
main()
| {"/server/tests/initialize.py": ["/server/db.py"]} |
61,929 | utkuozsan/picon | refs/heads/master | /agent/piconagent/sshchannelthread.py | import asyncio, asyncssh, sys, threading
import logging
class sshChannelThread(threading.Thread):
def __init__(self,loop=asyncio.new_event_loop(),tunnelserver='localhost',tunnelport=2222):
self.loop = loop
super(sshChannelThread, self).__init__()
self.tunnelserver=tunnelserver
self.tunnelport=tunnelport
def run(self):
try:
asyncio.set_event_loop(self.loop)
self.loop.run_until_complete(self.run_client())
except (OSError, asyncssh.Error) as exc:
logging.critical('Failed to open SSH port forwarding channel: '+str(exc))
@asyncio.coroutine
def run_client(self):
with (yield from asyncssh.connect(self.tunnelserver)) as conn:
logging.info("SSH tunnel connection to %s localhost:%d is now open" % (self.tunnelserver,self.tunnelport) )
listener = yield from conn.forward_remote_port("", self.tunnelport, 'localhost', 22)
yield from listener.wait_closed()
yield from conn.wait_closed()
| {"/server/tests/initialize.py": ["/server/db.py"]} |
61,930 | utkuozsan/picon | refs/heads/master | /agent/agent-bootstrap.py | #! /usr/bin/env python3
from piconagent.piconagent import PiConAgent
import argparse
from daemonize import Daemonize
import logging
from urllib.parse import urlparse
parser = argparse.ArgumentParser(description = 'Run a PiCon agent')
parser.add_argument('endpoint',type=str, help='Base URL of the PiCon server API, e.g. http://picon.example.com/api/')
parser.add_argument('--daemonize',action='store_true',help='Run in background [default: False]')
parser.add_argument('--holdtime',type=int,help='Hold time: seconds for the server to wait before declaring this device unavailable [default: 300]', default=300)
parser.add_argument('--interval',type=int,help='Interval: seconds between registrations [default: 60]',default=60)
parser.add_argument('--pidfile',type=str,help='PID File: PID file location, used only if daemonizing [default: /tmp/picon-agent.pid]',default='/tmp/picon-agent.pid')
parser.add_argument('--logfile',type=str,help='Log File: log file location, [default: None]',default=None)
parser.add_argument('-d',dest='debug',action='store_true',help='Debug: Maximum verbosity (overrides -v)')
parser.add_argument('-v',dest='verbose',action='count',help='Verbose Level: Repeat up to 3 times')
args = parser.parse_args()
# Append '/' to endpoint URL if not already present
if args.endpoint[-1] != '/':
args.endpoint += '/'
def loggingLevelFromVerboseCount(vcount):
if vcount is None:
return logging.ERROR
elif vcount == 1:
return logging.WARNING
elif vcount == 2:
return logging.INFO
elif vcount >= 3:
return logging.DEBUG
else:
logger.critical('Undefined Verbosity Level: '+vcount)
return logging.ERROR
def main():
logLevel = loggingLevelFromVerboseCount(args.verbose)
if args.debug:
logLevel = logging.DEBUG
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',level=logLevel,filename=args.logfile)
logging.info('Starting PiCon Agent...')
a = PiConAgent(args.endpoint,holdtime=args.holdtime,interval=args.interval)
logging.info("Using endpoint %ss, holdtime %ss, reporting interval %s" % (args.endpoint,a.holdtime,a.interval))
a.run()
if args.daemonize:
daemon = Daemonize(app='picon-agent', pid=args.pidfile, action=main)
daemon.start()
logging.info("Daemonized, PID (%s) can be found at %s" % args.pidfile)
else:
main()
| {"/server/tests/initialize.py": ["/server/db.py"]} |
61,931 | utkuozsan/picon | refs/heads/master | /server/db.py | import os
import sqlite3
import datetime
import ipaddress
import sys
LISTENER_PORT_BASE = 10000
DBFILE = r'./server.db'
TUNNEL_SERVER = '2620:0:ce0:101:a00:27ff:feb0:faef'
class DB:
"""
Database connectivity object for Picon. Uses SQLite3 for storage.
Attributes:
dbfile: Location of the database file
"""
def __init__(self):
self._conn = None
self.dbfile = DBFILE
self.listener_port_base = LISTENER_PORT_BASE
self.initialize()
def initialize(self):
"""
Initialize the database connection. If the database file does not
exist, create it and initialize the schema.
:return: None
"""
dbfile_exists = os.path.isfile(self.dbfile)
self._conn = sqlite3.connect(self.dbfile)
if dbfile_exists:
if not self.is_schema_installed():
raise Exception("server.db does not have schema configured")
else:
self.create_schema()
def is_schema_installed(self):
"""
Returns true if tables are found in the database file, false if not.
:return: None
"""
c = self._conn.cursor()
c.execute("""
select name from sqlite_master where type='table';
""")
return len(c.fetchall()) > 0
def create_schema(self):
"""
Creates database schema in a blank database.
:return: None
"""
c = self._conn.cursor()
c.execute("""
create table devices (
dev_id integer primary key,
hostname text,
sn text,
first_seen datetime,
last_updated datetime,
holdtime int,
tunnelport int UNIQUE);
""")
c.execute("create table serialports (dev_id integer, port_name text);")
c.execute("""
create table interfaces (
dev_id integer,
int_name text,
state integer,
addr text,
ip_version integer);
""")
self._conn.commit()
def update_device(self, dev_data):
"""
Takes registration data transmitted by the Picon device and stores it
in the database.
:param dev_data: Data dictionary from the device, converted from JSON
format
:return: Returns tunnel port info as assigned by assign_tunnelport()
"""
dev_id = self.get_devid_by_sn(dev_data['sn'])
c = self._conn.cursor()
now = datetime.datetime.utcnow()
if dev_id is not None:
c.execute("""
update devices set
hostname=?
, sn=?
, last_updated=?
, holdtime=?
where dev_id=?;
""", [dev_data['hostname'], dev_data['sn'], now,
dev_data['holdtime'], dev_id])
self._conn.commit()
else:
c.execute("""
insert into devices (
hostname
, sn
, first_seen
, last_updated
, holdtime
)
values (?, ?, ?, ?, ?);
""", [dev_data['hostname'], dev_data['sn'], now, now,
dev_data['holdtime']])
self._conn.commit()
dev_id = self.get_devid_by_sn(dev_data['sn'])
self.update_interfaces(dev_id, dev_data['interfaces'])
self.update_serialports(dev_id, dev_data['ports'])
tunnel = self.assign_tunnelport(dev_id)
resp = {
"status": "ok",
"tunnel": tunnel
}
return resp
#def allocate_tunnelport(dev_id, highport,lowport):
# """
# Assigns a new TCP tunnel port to the specified existing device.
# :param dev_id: ID of target device
# :return: None
# """
# c = self._conn.cursor()
# now = datetime.datetime.utcnow()
# if dev_id is not None:
# c.execute("""
# UPDATE devices set tunnelport=(
# WITH RECURSIVE
# cnt(tunnelport) AS (
# SELECT 2220
# UNION ALL
# SELECT tunnelport+1 FROM cnt
# LIMIT 200
# ) SELECT tunnelport
# FROM cnt
# WHERE tunnelport
# NOT IN (SELECT
# tunnelport
# FROM devices
# WHERE tunnelport IS NOT NULL )
# LIMIT 1 )
# WHERE dev_id=6
# """, [dev_data['hostname'], dev_data['sn'], now,
# dev_data['holdtime'], dev_id])
# self._conn.commit()
# else:
# c.execute("""
# insert into devices (
# hostname
# , sn
# , first_seen
# , last_updated
# , holdtime
# )
# values (?, ?, ?, ?, ?);
# """, [dev_data['hostname'], dev_data['sn'], now, now,
# dev_data['holdtime']])
# self._conn.commit()
def get_interface_details(self, dev_id):
"""
Get interface details for a particular device.
:param dev_id: dev_id of the device
:return: dict() containing items keyed by interface name
"""
c = self._conn.cursor()
c.execute("""select int_name, state, addr, ip_version from
interfaces where dev_id=?""", [dev_id])
results = c.fetchall()
if_list = dict()
for r in results:
if r[0] not in if_list:
if_list[r[0]] = {
'addrs': [],
'state': r[1]
}
if_list[r[0]]['addrs'].append(r[2])
return if_list
def get_serialport_details(self, dev_id):
"""
Get available serial port details for a particular device.
:param dev_id: dev_id of the device
:return: list of serial port device names ("ttyUSB0, ttyUSB1")
"""
c = self._conn.cursor()
c.execute("select port_name from serialports where dev_id=?", [dev_id])
results = c.fetchall()
return [r[0] for r in results]
def get_device_details(self, dev_id=None):
"""
Get all available details for a particular device, or from all devices
if a device id is not provided.
:param dev_id: ID of a particular device, or None to return all devices
:return: List of dicts() describing all devices
"""
devlist = list()
c = self._conn.cursor()
if dev_id is None:
c.execute('select dev_id, hostname, sn, first_seen, last_updated, '
'holdtime from devices;')
else:
c.execute('select dev_id, hostname, sn, first_seen, last_updated, '
'holdtime from devices where dev_id=?', [dev_id])
results = c.fetchall()
for r in results:
dev_dict = dict()
dev_id = r[0]
dev_dict['dev_id'] = r[0]
dev_dict['hostname'] = r[1]
dev_dict['sn'] = r[2]
dev_dict['first_seen'] = r[3]
dev_dict['last_updated'] = r[4]
dev_dict['holdtime'] = r[5]
dev_dict['interfaces'] = self.get_interface_details(dev_id)
dev_dict['ports'] = self.get_serialport_details(dev_id)
devlist.append(dev_dict)
return devlist
def get_devid_by_sn(self, sn):
"""
Obtain a device id given a device serial number.
:param sn: string containing the serial number
:return: device ID if found, None otherwise
"""
c = self._conn.cursor()
c.execute("""
select
dev_id
from devices
where sn=?;
""", [sn])
results = c.fetchall()
if len(results) > 0:
return results[0][0]
return None
def update_interfaces(self, dev_id, iflist):
"""
Adds interfaces to the database for a particular device.
:param dev_id: Device ID of the unit
:param iflist: Data dict() describing the interfaces
:return: None
"""
self.delete_interfaces_by_devid(dev_id)
ifstates = [(dev_id, ifname, iflist[ifname]['state'])
for ifname in iflist]
insert_list = list()
for i in ifstates:
insert_list.extend([(i[0], i[1], i[2], addr,
self.ip_version(addr))
for addr in iflist[i[1]]['addrs']])
c = self._conn.cursor()
c.executemany("""
insert into interfaces (
dev_id
, int_name
, state
, addr
, ip_version
)
values (?, ?, ?, ?, ?);
""", insert_list)
self._conn.commit()
def delete_device_by_devid(self, dev_id):
"""
Deletes a device from all tables.
:param dev_id: Device id to delete
:return: None
"""
self.delete_interfaces_by_devid(dev_id)
self.delete_serialports_by_devid(dev_id)
self.delete_listener_port_by_devid(dev_id)
c = self._conn.cursor()
c.execute("delete from devices where dev_id=?;", [dev_id])
self._conn.commit()
def delete_interfaces_by_devid(self, dev_id):
"""
Deletes a device's interfaces from interface table.
:param dev_id: Device id to delete
:return: None
"""
c = self._conn.cursor()
c.execute("""
delete from interfaces where dev_id=?;
""", [dev_id])
self._conn.commit()
def update_serialports(self, dev_id, portlist):
"""
Add serial ports to the database.
:param dev_id: Device id of the owning device
:param portlist: List of str()'s describing the port names
:return: None
"""
self.delete_serialports_by_devid(dev_id)
c = self._conn.cursor()
insert_list = [(dev_id, p) for p in portlist]
c.executemany("""
insert into serialports (
dev_id
, port_name
)
values (?, ?);""", insert_list)
self._conn.commit()
def delete_serialports_by_devid(self, dev_id):
"""
Deletes a device's serial ports from serialports table.
:param dev_id: Device id to delete
:return: None
"""
c = self._conn.cursor()
c.execute("""
delete from serialports where dev_id=?;
""", [dev_id])
self._conn.commit()
@staticmethod
def ip_version(addr):
"""
Given an IPvX address, solves for X
:param addr: IP address as str()
:return: 4 if IPv4, 6 if IPv6
"""
i = None
try:
i = ipaddress.ip_address(addr)
except ipaddress.AddressValueError:
return None
if type(i) is ipaddress.IPv4Address:
return 4
if type(i) is ipaddress.IPv6Address:
return 6
return None
def assign_tunnelport(self, dev_id):
"""
Assigns a TCP listening port to a device for a reverse SSH tunnel.
Args:
dev_id: device requesting a port
Returns: assigned port number
"""
current_port = self.get_tunnelport_by_devid(dev_id)
if current_port is not None:
return {
"server": TUNNEL_SERVER,
"port": current_port
}
c = self._conn.cursor()
c.execute("BEGIN EXCLUSIVE TRANSACTION;") # locks database while we are looking for a port
assigned_port = 0
last_port = self.get_last_tunnelport_from_db(c)
if last_port is not None:
assigned_port = last_port + 1
else:
assigned_port = LISTENER_PORT_BASE
c.execute("""
UPDATE devices set tunnelport=(
WITH RECURSIVE
cnt(tunnelport) AS (
SELECT 2220
UNION ALL
SELECT tunnelport+1 FROM cnt
LIMIT 200
) SELECT tunnelport
FROM cnt
WHERE tunnelport
NOT IN (SELECT
tunnelport
FROM devices
WHERE tunnelport IS NOT NULL )
LIMIT 1 )
WHERE dev_id=?;""", [ dev_id ])
# c.execute("""
# update devices set
# tunnelport=?
# WHERE dev_id=?;""", [assigned_port, dev_id])
self._conn.commit()
return {
"tunnelserver": TUNNEL_SERVER,
"tunnelport": assigned_port
}
def get_tunnelport_by_devid(self, dev_id):
c = self._conn.cursor()
c.execute("select tunnelport from devices where dev_id=?;", [dev_id])
results = c.fetchall()
if len(results) > 0:
return results[0][0]
return None
def get_last_tunnelport_from_db(self, c=None):
if c is None:
c = self._conn.cursor()
c.execute("select tunnelport from devices order by tunnelport desc limit 1;")
results = c.fetchall()
if len(results) > 0:
return results[0][0]
else:
return None
def delete_listener_port_by_devid(self, dev_id):
"""
Removes a device's SSH tunnel port from devices table.
:param dev_id: Device id to delete
:return: None
"""
c = self._conn.cursor()
c.execute("""
update devices set
tunnelport=NULL
WHERE dev_id=?;""", [dev_id])
self._conn.commit()
def close(self):
"""
Close database file
:return: None
"""
self._conn.close()
| {"/server/tests/initialize.py": ["/server/db.py"]} |
61,932 | utkuozsan/picon | refs/heads/master | /agent/piconagent/utils.py | import requests,sys,os
import netifaces
import ipaddress
import socket
import json
import glob
from pyroute2 import IPRoute
import logging
def getPorts():
# scan for available ports. return a list of port names with /dev/ stripped off
ports = glob.glob('/dev/ttyS*') + glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyACM*')
ports = [ port.replace('/dev/','') for port in ports ]
return ports
def getInterfaceState(ifname):
try:
ip = IPRoute()
state = ip.get_links(ip.link_lookup(ifname=ifname))[0].get_attr('IFLA_OPERSTATE')
ip.close()
except Exception as e:
raise Exception("getInterfaceState: Collecting interface status for %s failed: %s" % (ifname,str(e)))
else:
if state == "UP":
return True
return False
def getInterfaces():
addrs = {}
for iface in netifaces.interfaces():
addrs[iface] = {}
addrs[iface]['addrs'] = []
afs = netifaces.ifaddresses(iface)
addrs[iface]['state'] = getInterfaceState(iface)
for af in afs:
if af == netifaces.AF_LINK:
continue
for addr in afs[af]:
# create ipaddress object out of the address
# however, link-locals have an interface from netifaces
# and ipaddress does not understand that, so we only take the address
address = ipaddress.ip_address(addr['addr'].split('%')[0])
# ipaddress greatly simplifies implementing this logic
if address.is_loopback or address.is_link_local:
continue
addrs[iface]['addrs'].append(str(address))
return addrs
def getHostname():
return socket.gethostname()
def getSerial():
# Extract serial from cpuinfo file
cpuserial = "0000000000000000"
try:
f = open('/proc/cpuinfo','r')
for line in f:
if line[0:6]=='Serial':
cpuserial = line[10:26]
f.close()
except:
cpuserial = "ERROR000000000"
return cpuserial
def main():
# buffer to assemble the POST body
body = {}
body['hostname'] = getHostname()
body['sn'] = getSerial()
body['interfaces'] = getInterfaces()
body['ports'] = getPorts()
jsonbody = json.dumps(body,sort_keys=True,indent=2)
sys.stderr.write(jsonbody)
if __name__ == "__main__":
main()
| {"/server/tests/initialize.py": ["/server/db.py"]} |
61,933 | utkuozsan/picon | refs/heads/master | /server/server.py | from datetime import datetime
from flask import Flask, request, jsonify, g, render_template
from werkzeug.exceptions import BadRequest, Unauthorized
import collections
import sqlite3
from db import DB
app = Flask(__name__)
def get_db():
db = getattr(g, '_database', None)
if db is None:
db = g._database = DB()
return db
@app.route("/")
def hello():
return render_template("index.html")
@app.route("/devices")
def devices():
db = get_db()
devices = db.get_device_details()
for device in devices:
last_updated = datetime.strptime(device['last_updated'],
"%Y-%m-%d %H:%M:%S.%f")
seen_ago = datetime.utcnow() - last_updated
device['seen_ago'] = "Status expires in {}s".format(
device['holdtime'] - seen_ago.seconds)
if seen_ago.seconds > device['holdtime']:
device['status'] = "dead"
else:
device['status'] = "alive"
return render_template("devices.html", devices=devices)
@app.route('/device/<int:dev_id>')
def device(dev_id):
db = get_db()
device = db.get_device_details(dev_id)
print(device)
return render_template('device.html', device=device[0])
@app.route('/api/register',methods=['POST'])
def register():
db = get_db()
data = request.get_json()
print(data)
response = db.update_device(data)
return jsonify(response)
@app.route("/api/devices")
def api_devices():
db = get_db()
devices = db.get_device_details()
return jsonify(devices)
@app.teardown_appcontext
def close_connection(exception):
db = get_db()
db.close()
if __name__ == "__main__":
app.run(host="::", debug=True)
| {"/server/tests/initialize.py": ["/server/db.py"]} |
61,934 | utkuozsan/picon | refs/heads/master | /server/tests/update_device.py | from unittest import TestCase
from .. import DB
import json
class TestUpdate_device(TestCase):
def test_update_device(self):
db = DB()
json_str = """
{
"hostname": "test-hostname",
"sn": "testsn123",
"interfaces": {
"lo": {
"state": true,
"addrs": [ "127.0.0.1" ]
},
"eth0": {
"state": true,
"addrs": [ "192.0.2.1", "10.10.1.1", "2000:1:2:3::1" ]
},
"eth1": {
"state": false,
"addrs": [ "172.16.1.1" ]
}
},
"ports": [
"ttyUSB0",
"ttyUSB1"
]
}
"""
dev_data = json.loads(json_str)
db.update_device(dev_data)
dev_id = db.get_devid_by_sn('testsn123')
dev_detail = db.get_device_details()
db.delete_device_by_devid(dev_id)
self.assertIsNotNone(dev_id)
| {"/server/tests/initialize.py": ["/server/db.py"]} |
61,958 | zackwalker/Python-Projects | refs/heads/master | /wedding/weddingidea/accounts/forms.py | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import Profile
from django.core.exceptions import ValidationError
#create a custom form and use forms.Form since we dont have to save to database
class CustomUserCreationForm(forms.Form):
username = forms.CharField(label='Enter Username', min_length=4,
max_length=150, widget=forms.TextInput(attrs={'class': 'form-control'}))
email = forms.EmailField(label='Enter email', widget=forms.EmailInput(attrs={'class': 'form-control'}))
password1 = forms.CharField(label='Enter password', widget=forms.PasswordInput(
attrs={'class': 'form-control'}))
password2 = forms.CharField(label='Confirm password', widget=forms.PasswordInput(
attrs={'class': 'form-control'}))
password1 = forms.CharField(label='Enter password', widget=forms.PasswordInput(attrs={'class': 'form-control'}))
password2 = forms.CharField(label='Confirm password', widget=forms.PasswordInput(attrs={'class': 'form-control'}))
class meta:
widgets = {
'username': forms.TextInput(attrs={'class': 'form-control'}),
# 'person2_last_name': forms.TextInput(attrs={'class': 'form-control'}),
# 'date_of_event': forms.DateTimeInput(attrs={'class': 'form-control'}),
'email': forms.EmailInput(attrs={'class': 'form-control'}),
}
#ensures the username doesnt already exist
def clean_username(self):
username = self.cleaned_data['username'].lower()
r = User.objects.filter(username=username)
if r.count():
raise ValidationError("Username already exists")
return username
# ensures email isnt already used
def clean_email(self):
email = self.cleaned_data['email'].lower()
r = User.objects.filter(email=email)
if r.count():
raise ValidationError("Email already exists")
return email
# makes sure that the passwords match
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2 and password1 != password2:
raise ValidationError("Password don't match")
return password2
#saves the data to be part of the User model
def save(self, commit=True):
user = User.objects.create_user(
self.cleaned_data['username'],
self.cleaned_data['email'],
self.cleaned_data['password1']
)
return user
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['name_of_event', 'date_of_event']
widgets = {
'name_of_event': forms.TextInput(attrs={'class': 'form-control'}),
'date_of_event': forms.DateTimeInput(attrs={'class': 'form-control'}),
}
| {"/wedding/weddingidea/accounts/forms.py": ["/wedding/weddingidea/accounts/models.py"], "/wedding/weddingidea/accounts/urls.py": ["/wedding/weddingidea/accounts/views.py"], "/wedding/weddingidea/accounts/views.py": ["/wedding/weddingidea/accounts/forms.py"]} |
61,959 | zackwalker/Python-Projects | refs/heads/master | /wedding/weddingidea/couples/models.py | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Couple(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
# person1_first_name = models.CharField(max_length=50)
# person1_last_name = models.CharField(max_length=50)
person2_first_name = models.CharField(max_length=50)
person2_last_name = models.CharField(max_length=50)
date_of_event = models.DateTimeField()
| {"/wedding/weddingidea/accounts/forms.py": ["/wedding/weddingidea/accounts/models.py"], "/wedding/weddingidea/accounts/urls.py": ["/wedding/weddingidea/accounts/views.py"], "/wedding/weddingidea/accounts/views.py": ["/wedding/weddingidea/accounts/forms.py"]} |
61,960 | zackwalker/Python-Projects | refs/heads/master | /wedding/weddingidea/accounts/urls.py | from django.urls import path
from .views import update_profile
urlpatterns = [
path('register/', update_profile, name='register'),
]
| {"/wedding/weddingidea/accounts/forms.py": ["/wedding/weddingidea/accounts/models.py"], "/wedding/weddingidea/accounts/urls.py": ["/wedding/weddingidea/accounts/views.py"], "/wedding/weddingidea/accounts/views.py": ["/wedding/weddingidea/accounts/forms.py"]} |
61,961 | zackwalker/Python-Projects | refs/heads/master | /wedding/weddingidea/couples/apps.py | from django.apps import AppConfig
class CouplesConfig(AppConfig):
name = 'couples'
| {"/wedding/weddingidea/accounts/forms.py": ["/wedding/weddingidea/accounts/models.py"], "/wedding/weddingidea/accounts/urls.py": ["/wedding/weddingidea/accounts/views.py"], "/wedding/weddingidea/accounts/views.py": ["/wedding/weddingidea/accounts/forms.py"]} |
61,962 | zackwalker/Python-Projects | refs/heads/master | /wedding/weddingidea/couples/urls.py | from django.urls import path
from . import views
from django.contrib.auth import views as auth_views
app_name = 'couples'
urlpatterns = [
path('', views.landing_page, name='home'),
]
| {"/wedding/weddingidea/accounts/forms.py": ["/wedding/weddingidea/accounts/models.py"], "/wedding/weddingidea/accounts/urls.py": ["/wedding/weddingidea/accounts/views.py"], "/wedding/weddingidea/accounts/views.py": ["/wedding/weddingidea/accounts/forms.py"]} |
61,963 | zackwalker/Python-Projects | refs/heads/master | /wedding/weddingidea/accounts/views.py | from django.shortcuts import render
from django.views import generic
from django.contrib.auth.forms import UserCreationForm
from django.urls import reverse_lazy
from .forms import ProfileForm, CustomUserCreationForm
def update_profile(request):
if request.method == 'POST':
profile = Profile.objects.get(user=request.user)
user_form = CustomUserCreationForm(request.POST)
profile_form = ProfileForm(request.POST, instance=request.user.profile)
if user_form.is_valid() and profile_form.is_valid():
user_form.save()
profile_form.save()
return redirect('/')
else:
user_form = CustomUserCreationForm()
profile = Profile.objects.get(user=request.user)
profile_form = ProfileForm(request.POST, instance=profile)
return render(request, 'registration/registration.html', {
'user_form': user_form,
'profile_form': profile_form
})
| {"/wedding/weddingidea/accounts/forms.py": ["/wedding/weddingidea/accounts/models.py"], "/wedding/weddingidea/accounts/urls.py": ["/wedding/weddingidea/accounts/views.py"], "/wedding/weddingidea/accounts/views.py": ["/wedding/weddingidea/accounts/forms.py"]} |
61,964 | zackwalker/Python-Projects | refs/heads/master | /wedding/weddingidea/accounts/models.py | from django.db import models
from django.contrib.auth.models import User, AbstractBaseUser
from django.db.models.signals import post_save
from django.dispatch import receiver
# from appName.models import table_name
# Create your models here.
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
name_of_event = models.CharField(max_length=50, null=True)
date_of_event = models.DateTimeField(null=True)
person2_first_name = models.CharField(max_length=50, null=True)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
# post_save.connect(create_user_profile, sender=User)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save()
| {"/wedding/weddingidea/accounts/forms.py": ["/wedding/weddingidea/accounts/models.py"], "/wedding/weddingidea/accounts/urls.py": ["/wedding/weddingidea/accounts/views.py"], "/wedding/weddingidea/accounts/views.py": ["/wedding/weddingidea/accounts/forms.py"]} |
61,965 | zackwalker/Python-Projects | refs/heads/master | /wedding/weddingidea/couples/views.py | from django.shortcuts import render, get_object_or_404, redirect
from django.views.generic import CreateView, DetailView, UpdateView, ListView, DeleteView
from django.http import HttpResponse
from django.urls import reverse_lazy
from django.contrib.auth import authenticate, login, logout
def landing_page(request):
return render(request, 'couples/landPage.html')
# login Page
# logout function
# todo/ needed venders remaining list
# vender list - small cards of each vender and a pic/address
# create search tool to find venders | {"/wedding/weddingidea/accounts/forms.py": ["/wedding/weddingidea/accounts/models.py"], "/wedding/weddingidea/accounts/urls.py": ["/wedding/weddingidea/accounts/views.py"], "/wedding/weddingidea/accounts/views.py": ["/wedding/weddingidea/accounts/forms.py"]} |
61,968 | USBdOngle/Image-2-GIF-Mosaic | refs/heads/master | /gifindexer.py | from PIL import Image
from sklearn.cluster import KMeans
import pickle
import random
import math
from tqdm import tqdm
ADD_TUPLES = lambda l1, l2: [ a + b for a, b in zip(l1, l2) ]
class GIFIndexer:
def __init__(self, maxColors, loadFile=""):
if loadFile != "" and isinstance(loadFile, str):
self.loadIndex(loadFile)
else:
self.__index = dict()
self.__kmeans = KMeans(n_clusters=maxColors)
self.__clusters = [ [] for _ in range(maxColors) ]
def __frameAvgPx(self, frame):
avgRGB = [0, 0, 0]
for row in range(frame.height):
for col in range(frame.width):
avgRGB = ADD_TUPLES(avgRGB, frame.getpixel((row, col)))
return ( avg / (frame.height * frame.width) for avg in avgRGB )
def __gifAvgPx(self, gif=[]):
gifAvg = [0, 0, 0]
for frame in gif:
gifAvg = ADD_TUPLES(gifAvg, self.__frameAvgPx(frame))
return [ int(avg / len(gif)) for avg in gifAvg ]
# return tuple in T with minimal Euclidean distance to t1
def __minRGBDistance(self, t1=(0,0,0), T=[]):
dist = lambda a, b: math.sqrt(sum([ (i-j)**2 for i, j in zip(a, b)]))
best = T[0]
minDist = dist(t1, T[0])
for tup in T:
d = dist(t1, tup)
if d < minDist:
best = tup
minDist = d
return best
def addToIndex(self, gif=[], name=""):
res = tuple(self.__gifAvgPx(gif))
if res in self.__index:
self.__index[res].append(name)
else:
self.__index[res] = [ name ]
# needs to be called when done adding new indexes to build rgb lookup
def finalize(self):
if len(self.__index) < 1:
raise ValueError
pixels = [ [ ch for ch in rgb ] for rgb in self.__index.keys() ]
clusters = self.__kmeans.fit_predict([ [ ch for ch in rgb ] for rgb in self.__index.keys() ])
for px, clust in zip(pixels, clusters):
self.__clusters[clust].append(px)
def getBestGIF(self, rgb=(0, 0, 0)):
cluster = self.__kmeans.predict([rgb])
pixel = self.__minRGBDistance(t1=rgb, T=self.__clusters[cluster[0]])
return random.choice(self.__index[tuple(pixel)])
# write index data to file
def saveIndex(self, file=""):
saveData = { 'kmeans': self.__kmeans, 'clusters': self.__clusters, 'index': self.__index }
pickle.dump(saveData, open(file, "wb"))
# load index data from file
def loadIndex(self, file=""):
loadData = pickle.load(open(file, "rb"))
self.__kmeans = loadData['kmeans']
self.__clusters = loadData['clusters']
self.__index = loadData['index']
if __name__ == "__main__":
testIndexer = GIFIndexer(512, "gifs/index")
testIndexer.finalize()
testIndexer.saveIndex("gifs/index")
res = testIndexer.getBestGIF((178,100,255))
print(res) | {"/gifscraper.py": ["/gifindexer.py"], "/main.py": ["/gifindexer.py", "/gifscraper.py"]} |
61,969 | USBdOngle/Image-2-GIF-Mosaic | refs/heads/master | /gifscraper.py | """
SIMPLE SCRIPT FOR SCRAPING, FORMATTING, AND CREATING INVERTED INDICES FOR GIFS
"""
from PIL import Image, ImageSequence
import requests as req
import json
from tqdm import tqdm
from os import remove
from time import sleep
from gifindexer import GIFIndexer
POLITENESS = 2 # time to sleep between API queries
# user parameters
GIFS_TO_PROCESS = 1000 # max number of GIFS to scrape before purposefully exiting
GIF_TARGET_SIZE = (32, 32) # (width, height) of saved GIFS, if this parameter is changed ALL previous data must be deleted
QUERY = "yes" # query to use when scraping GIFs, best to run multiple times with different QUERY values
TEMP_DIR = "temp/" # dir where work is done, can't be same as GIFS_DIR
GIFS_DIR = "gifs/" # final output location
INDEX_FILE = GIFS_DIR + "index" # index data, generally doesn't need to be changed
MAX_COLORS = 512 # don't modify unless you know what you're doing
# API query parameters
API_KEY = "" # <--- tenor.com GIF private key
LOCALE = "en_US"
CONTENT_FILTER = "high"
MEDIA_FILTER = "minimal"
AR_RANGE = "standard"
LIMIT = 50 # max value
API_URL = "https://api.tenor.com/v1/random?key={}&q={}&locale={}&contentfilter={}&media_filter={}&ar_range={}&limit={}" \
.format(API_KEY, QUERY, LOCALE, CONTENT_FILTER, MEDIA_FILTER, AR_RANGE, LIMIT)
if __name__ == "__main__":
indexer = GIFIndexer(MAX_COLORS, loadFile=INDEX_FILE)
gifs_processed = 0
nextPos = "0" # string for tenor API to continue search
while gifs_processed < GIFS_TO_PROCESS:
res = req.get(API_URL + "&pos={}".format(nextPos))
if res.status_code != 200:
print("Got response code {} exiting...".format(res.stats_code))
exit(1)
gifs = json.loads(res.content)
if 'next' not in gifs:
print("ran out of results... exiting")
break
else:
nextPos = gifs['next']
print("Processing next {} gifs".format(LIMIT))
for result in tqdm(gifs['results']):
gif_media = result['media'][0]['gif']
if gif_media['dims'][0] != gif_media['dims'][1]:
continue # only want sqauare gifs (aspect ratio 1:1)
download = req.get(gif_media['url'], allow_redirects=True)
if download.status_code != 200:
continue # skip this one
gif_fname = "{}{}.gif".format(TEMP_DIR, result['id'])
open(gif_fname, "wb").write(download.content) # save to temp directory
# resize image with resampling to final directory
with Image.open(gif_fname) as im:
output = []
for frame in ImageSequence.Iterator(im):
output.append(frame.resize(GIF_TARGET_SIZE).convert(mode="RGB"))
final_name = "{}{}".format(GIFS_DIR, gif_fname[gif_fname.find('/')+1:])
indexer.addToIndex(output, final_name)
output[0].save(final_name, save_all=True, append_images=output[1:])
remove(gif_fname) # delete temp file
gifs_processed += 1
print("{} / {} gifs acquired!".format(gifs_processed, GIFS_TO_PROCESS))
sleep(POLITENESS)
indexer.finalize()
indexer.saveIndex(INDEX_FILE)
| {"/gifscraper.py": ["/gifindexer.py"], "/main.py": ["/gifindexer.py", "/gifscraper.py"]} |
61,970 | USBdOngle/Image-2-GIF-Mosaic | refs/heads/master | /main.py | from gifindexer import GIFIndexer, ADD_TUPLES
from gifscraper import GIFS_DIR, INDEX_FILE, GIF_TARGET_SIZE, MAX_COLORS
from PIL import Image
from tqdm import tqdm
# input parameters
INPUT_IMAGE = "inputs/lenna.png"
FACTOR = 4 # amount of downsampling to apply to input (increase aggressively for large inputs)
FRAME_COUNT = 12
FRAME_DURATION = 100 # ms
INPUT_DIR = "inputs/"
OUTPUT_DIR = "outputs/"
OUTPUT_IMAGE = OUTPUT_DIR + INPUT_IMAGE.replace(INPUT_IMAGE[INPUT_IMAGE.rfind('.'):], ".gif")
class GIFWrapper():
def __init__(self, path):
self.__nextFrameIdx = 1
self.__im = Image.open(path)
def __del__(self):
self.__im.close()
def getIm(self):
return self.__im.convert(mode="RGB")
def seekNext(self):
try:
self.__im.seek(self.__nextFrameIdx)
self.__nextFrameIdx += 1
except:
self.__im.seek(0)
self.__nextFrameIdx = 1
def img_2_bitmap(img="", factor=FACTOR):
with Image.open(img) as im:
fit_factor = lambda x: x - (x % factor)
bitmap = []
im = im.convert(mode="RGB")
for row in range(0, fit_factor(im.height), factor):
map_row = []
for col in range(0, fit_factor(im.width), factor):
avg = (0, 0, 0) # get avg RGB value for block with side dimensions of factor
for y in range(row, row + factor):
for x in range(col, col + factor):
avg = ADD_TUPLES(avg, im.getpixel((x, y)))
map_row.append([ int(a / (factor ** 2)) for a in avg ])
bitmap.append(map_row)
return bitmap
def rgb_2_gifpath(bitmap, index):
return [ [ index.getBestGIF(px) for px in row ] for row in bitmap ]
def fill_frame(frame, path_map, cache):
for row in tqdm(range(len(path_map))):
for col in range(len(path_map[0])):
path = path_map[row][col]
if path not in cache:
cache[path] = GIFWrapper(path)
gifIm = cache[path].getIm()
base_y = row * GIF_TARGET_SIZE[1]
base_x = col * GIF_TARGET_SIZE[0]
for y in range(GIF_TARGET_SIZE[1]):
for x in range(GIF_TARGET_SIZE[0]):
frame.putpixel((base_x + x, base_y + y), gifIm.getpixel((x, y)))
# advance each gif by 1 frame
for gif in cache.values():
gif.seekNext()
if __name__ == "__main__":
index = GIFIndexer(MAX_COLORS, INDEX_FILE)
gif_paths_map = rgb_2_gifpath(img_2_bitmap(INPUT_IMAGE), index)
output_frames = []
cache = {} # will be shared between succesive calls of fill_frame to save io
frame_size = (len(gif_paths_map[0]) * GIF_TARGET_SIZE[1], len(gif_paths_map) * GIF_TARGET_SIZE[0])
for i in range(FRAME_COUNT):
frame = Image.new("RGB", frame_size)
fill_frame(frame, gif_paths_map, cache)
output_frames.append(frame)
print("Done frame {} / {}!".format(i+1, FRAME_COUNT))
print("Saving output to {}".format(OUTPUT_IMAGE))
output_frames[0].save(OUTPUT_IMAGE, save_all=True, optimize=False, append_images=output_frames[1:], loop=0, duration=FRAME_DURATION) | {"/gifscraper.py": ["/gifindexer.py"], "/main.py": ["/gifindexer.py", "/gifscraper.py"]} |
61,973 | atorrese/SGAGRO | refs/heads/main | /sale/migrations/0001_initial.py | # Generated by Django 3.1.3 on 2020-11-13 00:22
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('catalog', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(default=True)),
('Names', models.CharField(max_length=80, verbose_name='Nombres')),
('SurNames', models.CharField(max_length=80, verbose_name='Apellidos')),
('IdentificationCard', models.CharField(max_length=10, verbose_name='Cédula')),
('City', models.CharField(max_length=80, verbose_name='Ciudad')),
('Address', models.CharField(blank=True, max_length=120, null=True, verbose_name='Dirección')),
('Phone', models.CharField(max_length=88, verbose_name='Telefono')),
('Email', models.EmailField(max_length=200, verbose_name='Correo Electronico')),
],
options={
'verbose_name': 'Cliente',
'verbose_name_plural': 'Clientes',
'ordering': ('-created_at',),
},
),
migrations.CreateModel(
name='Seller',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(default=True)),
('Names', models.CharField(max_length=80, verbose_name='Nombres')),
('SurNames', models.CharField(max_length=80, verbose_name='Apellidos')),
('IdentificationCard', models.CharField(max_length=10, verbose_name='Cédula')),
('Birthdate', models.DateField(blank=True, null=True, verbose_name='Fecha de Nacimiento')),
('City', models.CharField(max_length=80, verbose_name='Ciudad')),
('Address', models.CharField(max_length=120, verbose_name='Dirección')),
('Phone', models.CharField(max_length=88, verbose_name='Telefono')),
('Email', models.EmailField(max_length=200, verbose_name='Correo Electronico')),
],
options={
'verbose_name': 'Vendedor',
'verbose_name_plural': 'Vendedores',
'ordering': ('-created_at',),
},
),
migrations.CreateModel(
name='Invoice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(default=True)),
('DateInvoice', models.DateField(default=django.utils.timezone.now)),
('WeekInvoice', models.PositiveIntegerField(blank=True, null=True, verbose_name='Semana Factura')),
('SubTotal', models.DecimalField(blank=True, decimal_places=2, default=0, max_digits=19, null=True)),
('TotalPay', models.DecimalField(blank=True, decimal_places=2, default=0, max_digits=19, null=True)),
('Discount', models.DecimalField(blank=True, decimal_places=2, default=0, max_digits=19, null=True)),
('Num_Porcent_Des', models.IntegerField(blank=True, null=True)),
('ClientId', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='sale.client', verbose_name='Cliente')),
('SellerId', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='sale.seller', verbose_name='Vendedor')),
],
options={
'verbose_name': 'Factura',
'verbose_name_plural': 'Facturas',
},
),
migrations.CreateModel(
name='DetailInvoice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(default=True)),
('Quantity', models.IntegerField(default=1)),
('Price', models.DecimalField(decimal_places=2, max_digits=19)),
('Cost', models.DecimalField(decimal_places=2, max_digits=19)),
('Utility', models.DecimalField(decimal_places=2, max_digits=19)),
('Total', models.DecimalField(decimal_places=2, max_digits=19)),
('InvoiceId', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sale.invoice', verbose_name='Factura')),
('ProductId', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='catalog.product', verbose_name='Producto')),
],
options={
'verbose_name': 'Detalle Factura',
'verbose_name_plural': 'Detalles de Factura',
},
),
]
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,974 | atorrese/SGAGRO | refs/heads/main | /SGAGRO/funciones2.py | STATUS_PAY =(
(1,'Solicitado'),
(2,'Entregado'),
(3,'Pagado'),
)
METHOD_PAYEMENT =(
(1,'Credito'),
(2,'Contado')
) | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,975 | atorrese/SGAGRO | refs/heads/main | /sale/urls.py | from django.urls import path
import sale.client.views as Client
import sale.seller.views as Seller
import sale.invoice.views as Invoice
import sale.order.views as Order
urlpatterns = [
#Routes client
path(route='client/', view= Client.Index.as_view(),name='client.index'),
path(route='client/create', view= Client.Create.as_view(),name='client.store'),
path(route='client/edit/<pk>', view= Client.Update.as_view(),name='client.update'),
path(route='client/delete/<pk>', view= Client.Delete.as_view(),name='client.delete'),
# Routes seller
path(route='seller/', view=Seller.Index.as_view(), name='seller.index'),
path(route='seller/create', view=Seller.Create.as_view(), name='seller.store'),
path(route='seller/edit/<pk>', view=Seller.Update.as_view(), name='seller.update'),
path(route='seller/delete/<pk>', view=Seller.Delete.as_view(), name='seller.delete'),
# Routes invoice
path(route='invoice/', view=Invoice.Index.as_view(), name='invoice.index'),
path(route='invoice/create', view=Invoice.Create.as_view(), name='invoice.store'),
path(route='invoice/edit/<pk>', view=Invoice.Update.as_view(), name='invoice.update'),
path(route='invoice/show/<pk>', view=Invoice.Show.as_view(), name='invoice.show'),
path(route='invoice/delete/<pk>', view=Invoice.Delete.as_view(), name='invoice.delete'),
# Routes Order Sale
path(route='order/', view=Order.Index.as_view(), name='ordersale.index'),
path(route='order/show/<pk>', view=Order.Show.as_view(), name='ordersale.show'),
path(route='order/change/', view=Order.change, name='ordersale.change'),
] | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,976 | atorrese/SGAGRO | refs/heads/main | /security/urls.py | """Security Urls"""
# Django
from django.urls import path
# Views
import security.views as security
import security.business.views as Bussiness
urlpatterns = [
path(route='', view=security.HomeView.as_view(), name='home'),
path(route='profile/', view=security.ProfileView.as_view(), name='profile'),
path(route='login/', view=security.LoginView.as_view(), name='login'),
path(route='logout/', view=security.LogoutView.as_view(), name='logout'),
path(route='register/', view=security.RegisterView.as_view(), name='register'),
path(route='setting/bussiness/<pk>',view=Bussiness.Update.as_view(),name='setting.update'),
path(route='setting/bussiness/',view=Bussiness.Create.as_view(),name='setting.store'),
path(route='webhook/',view=security.Webhook.as_view(),name='weekhook'),
path('dashboard/',security.Filterdashboard,name='dashboard')
]
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,977 | atorrese/SGAGRO | refs/heads/main | /security/models.py | from django.db import models
from django.contrib.auth.models import Group,User
from django.utils.safestring import mark_safe
from utils.mixins import OldDataMixin
#Clase Base Para eliminacion
class ModelBase(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
status = models.BooleanField(default=True)
class Meta:
abstract = True
#Clase Abstracta para Persona
'''class Persona(models.Model):
Names = models.CharField(verbose_name='Nombres',max_length=80)
SurNames = models.CharField(verbose_name='Apellidos',max_length=80)
IdentificationCard = models.CharField(verbose_name='Cédula',max_length=10)
Birthdate = models.DateField(verbose_name='Fecha de Nacimiento',null=True,blank=True)
Ciudad = models.ForeignKey(Canton, verbose_name='Ciudad',on_delete=models.PROTECT)
Address = models.CharField(verbose_name='Dirección',max_length=120)
References= models.CharField(verbose_name='Referencia Domiciliaria',max_length=150)
Phone = models.CharField(verbose_name='Telefono',max_length=88)
Email = models.EmailField(verbose_name= 'Correo Electronico',max_length=200)
class Meta:
abstract = True'''
class Module(models.Model):
url = models.CharField(max_length=100)
name = models.CharField(max_length=100)
icon = models.CharField(max_length=100)
description = models.CharField(max_length=100)
available = models.BooleanField(default=True)
order = models.IntegerField(default=0)
def __str__(self):
return '{}'.format(self.name)
class Meta:
verbose_name = 'Módulo'
verbose_name_plural = 'Módulos'
ordering = ('order',)
class GroupModule(models.Model):
name = models.CharField(max_length=100)
descripcion = models.CharField(max_length=200, blank=True)
icon = models.CharField(null=True,blank=True,max_length=100)
modules = models.ManyToManyField(Module)
groups = models.ManyToManyField(Group)
priority = models.IntegerField(null=True, blank=True)
def __str__(self):
return '{}'.format(self.name)
class Meta:
verbose_name = 'Grupo de Módulos'
verbose_name_plural = 'Grupos de Módulos'
ordering = ('priority',)
def module_active(self):
return self.modules.filter(available=True).order_by('order')
class Business(models.Model):
name = models.CharField(max_length=100)
alias = models.CharField(max_length=20)
description = models.CharField(max_length=200, blank=True)
icon = models.ImageField(upload_to='media/Business/icon/',null=False,blank=False,error_messages={'required':'Cargar Un Imagen Para El icono de la Empresa'})
def Icon(self):
if self.icon:
return mark_safe('<img src="%s" style="width:45px; height:45px;"/>'%self.icon.url)
else:
return 'imagen no disponible'
icon.short_description='Icon'
def __str__(self):
return '{}'.format(self.name)
class Meta:
verbose_name = 'Empresa'
verbose_name_plural = 'Empresas'
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,978 | atorrese/SGAGRO | refs/heads/main | /purchase/models.py | from django.db import models
from django.utils.timezone import now
from django.db import models
# Create your models here.
from SGAGRO.funciones2 import METHOD_PAYEMENT, STATUS_PAY
from catalog.models import Product
from security.models import ModelBase
class Provider(ModelBase):
BussinessName = models.CharField(verbose_name='Razón Social',max_length=80)
Ruc = models.CharField(verbose_name='Razón Social',max_length=13)
Phone = models.CharField(verbose_name='Telefono',max_length=80)
Email= models.EmailField(verbose_name='Correo Electronico', max_length=80)
def __str__(self):
return '{}'.format(self.BussinessName)
class Meta:
verbose_name = 'Proveedor'
verbose_name_plural = 'Proveedores'
ordering= ('-created_at',)
def have_orders(self):
return self.order_set.exists()
class Order(ModelBase):
ProviderId = models.ForeignKey(Provider,verbose_name='Proveedor',on_delete=models.PROTECT)
DateOrder =models.DateField(default=now)
WeekOrder = models.PositiveIntegerField(verbose_name='Semana Orden', blank=True, null=True)
DelieverOrder =models.DateField(null=True,blank=True)
#PaymentMethod =models.IntegerField(choices=METHOD_PAYEMENT,blank=True, null=True)
#StatusPay = models.IntegerField(choices=STATUS_PAY,blank=True,null=True)
Delivery = models.BooleanField(default=False)
TotalPay = models.DecimalField(max_digits=19,decimal_places=2)
def __str__(self):
return 'Fecha: {} Total:{}'.format(self.DateOrder,self.TotalPay)
class Meta:
verbose_name ='Pedido de Compra'
verbose_name_plural = 'Pedidos de Compras'
def get_Details(self):
return DetailOrder.objects.filter(OrderId=self)
def delete_detail(self):
for detail in self.detailorder_set.all():
detail.ProductId.Stock-= detail.Quantity
detail.ProductId.save()
detail.delete()
class DetailOrder(ModelBase):
ProductId = models.ForeignKey(Product,verbose_name='Producto',on_delete=models.PROTECT)
OrderId = models.ForeignKey(Order,verbose_name='Orden de Compra',on_delete=models.PROTECT)
Quantity = models.IntegerField(default=1)
Price = models.DecimalField(max_digits=19,decimal_places=2)
Discount = models.DecimalField(max_digits=19,decimal_places=2 ,default=0.00)
Total = models.DecimalField(max_digits=19,decimal_places=2)
def __str__(self):
return '{}'.format(self.ProductId.Name)
class Meta:
verbose_name ='Detalle Pedido de Compra'
verbose_name_plural = 'Detalles de Pedidos de Compra'
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,979 | atorrese/SGAGRO | refs/heads/main | /purchase/order/forms.py | """ Client Forms """
# Django
from django import forms
# App
from purchase.models import Order,Provider
from SGAGRO.funciones2 import METHOD_PAYEMENT,STATUS_PAY
class OrderForm(forms.ModelForm):
"""Formulario y validacion de Client"""
ProviderId= forms.ModelChoiceField(queryset=Provider.objects.filter(status=True))
DateOrder= forms.DateField()
# PaymentMethod= forms.ChoiceField(choices=METHOD_PAYEMENT, required=False)
#StatusPay= forms.ChoiceField(choices=STATUS_PAY , required=False)
TotalPay = forms.DecimalField()
def clean(self):
cleaned_data = super(OrderForm, self).clean()
return cleaned_data
class Meta:
model = Order
fields = ['ProviderId', 'DateOrder','TotalPay']
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,980 | atorrese/SGAGRO | refs/heads/main | /sale/client/forms.py | """ Client Forms """
# Django
from django import forms
# App
from sale.models import Client
class ClientForm(forms.ModelForm):
"""Formulario y validacion de Client"""
Names= forms.CharField(min_length=2)
SurNames= forms.CharField(min_length=2)
IdentificationCard = forms.CharField(min_length=10,max_length=13)
City = forms.CharField(min_length=2)
Address= forms.CharField(min_length=2)
Phone= forms.CharField(min_length=10)
Email = forms.EmailField(min_length=10)
def clean(self):
cleaned_data = super(ClientForm, self).clean()
return cleaned_data
class Meta:
model = Client
fields = ['Names', 'SurNames', 'IdentificationCard','City', 'Address','Phone','Email']
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,981 | atorrese/SGAGRO | refs/heads/main | /sale/migrations/0002_invoice_statusinvoice.py | # Generated by Django 3.1.3 on 2020-12-02 03:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sale', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='invoice',
name='StatusInvoice',
field=models.IntegerField(blank=True, choices=[(1, 'Solicitado'), (2, 'Entregado'), (3, 'Pagado')], null=True),
),
]
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.