repo_name string | branch_name string | path string | content string | context string | import_relationships dict |
|---|---|---|---|---|---|
0-1-0/marketbot | refs/heads/master | /tests.py | import utils
import pymongo
import unittest
class MailerTestCase(unittest.TestCase):
def test_basic(self):
db = pymongo.MongoClient()['marketbot']
order = list(db.orders.find({}))[-1]
resp = utils.Mailer().send_order('marketbottest@gmail.com', order)
self.assertEquals(resp.status_code, 202)
| # -*- coding: utf-8 -*-
import sendgrid
import os
from sendgrid.helpers.mail import *
import re
import requests
import json
WED_ADMIN_DOMAIN = open('domain').read().split('\n')[0]
def get_address(lat, lng):
resp = requests.get('http://maps.googleapis.com/maps/api/geocode/json?latlng=' + str(lat) + ',' + str(lng) + '&language=ru')
return json.loads(resp.content).get('results')[0].get('formatted_address')
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
return cls._instance
class Mailer(Singleton):
sg = sendgrid.SendGridAPIClient(apikey=os.environ.get('SENDGRID_API_KEY'))
def send(self, mail, subj, txt):
from_email = Email("order@botmarket.com")
subject = subj
to_email = Email(mail)
content = Content("text/plain", txt)
mail = Mail(from_email, subject, to_email, content)
return self.sg.client.mail.send.post(request_body=mail.get())
def send_order(self, mail, order):
res = 'Заказ\n====\n\n\n'
res += '\n'.join(i['name'].encode('utf-8') + ' x ' + str(i['count']) for i in order['items'])
res += '\n-----\n Итого: ' + str(order['total']) + ' руб.'
res += '\n-----\n Детали доставки: \n'
try:
res += '\n\n'.join(k.encode('utf-8') + ': ' + v.encode('utf-8') for k, v in order['delivery'].items())
except:
res += '\n\n'.join(k + ': ' + v for k, v in order['delivery'].items())
res = res.replace('Ваш', '')
return self.send(mail, 'Новый заказ!', res)
def striphtml(data):
p = re.compile(r'<[brai].*?>|<\/[a].*?>|<span.*?>|<\/span.*?>')
res = p.sub('\n', data)
return res.replace(' ', ' ').replace('—', '-')
| {
"imports": [
"/utils.py"
]
} |
0-1-0/marketbot | refs/heads/master | /webhook_listener.py | from gevent import monkey; monkey.patch_all()
import web
from web.wsgiserver import CherryPyWSGIServer
from app import MasterBot
CherryPyWSGIServer.ssl_certificate = "/home/ubuntu/webhook_cert.pem"
CherryPyWSGIServer.ssl_private_key = "/home/ubuntu/webhook_pkey.pem"
urls = ("/.*", "hello")
app = web.application(urls, globals())
mb = MasterBot({'token': open('token').read().replace('\n', '')})
class hello:
def POST(self):
token = web.ctx.path.split('/')[1]
mb.route_update(token, web.data())
return 'ok'
if __name__ == "__main__":
app.run()
| # -*- coding: utf-8 -*-
import gevent
from gevent import monkey; monkey.patch_all()
import telebot
from telebot import apihelper
from pymongo import MongoClient
from views import *
from utils import get_address
import botan
import time
botan_token = 'BLe0W1GY8SwbNijJ0H-lroERrA9BnK0t'
class Convo(object):
def __init__(self, data, bot):
self.bot = bot
self.token = bot.token
self.db = bot.db
self.chat_id = data['chat_id']
self.views = {}
self.path = data.get('path')
self.tmpdata = None
def get_current_view(self):
if self.path and self.path[0] in self.views:
return self.views[self.path[0]].route(self.path[1:])
return None
def get_bot_data(self):
return self.db.bots.find_one({'token': self.token})
def _send_msg(self, msg1, markup):
try:
apihelper.send_message(self.token, self.chat_id, msg1, reply_markup=markup, parse_mode='HTML')
except Exception, e:
self.bot.log_error({'func': '_send_msg', 'token': self.token, 'chat_id': self.chat_id, 'message': msg1, 'error': str(e)})
def send_message(self, msg, markup=None):
if self.chat_id:
msg1 = msg.replace('<br />', '.\n')
gevent.spawn(self._send_msg, msg1, markup)
return
def edit_message(self, message_id, msg, markup=None):
if self.chat_id:
msg1 = msg.replace('<br />', '.\n')
gevent.spawn(apihelper.edit_message_text, self.token, msg1, self.chat_id, message_id=message_id, reply_markup=markup, parse_mode='HTML')
return
def process_message(self, message):
try:
txt = message.text.encode('utf-8')
except:
if hasattr(message, 'contact') and message.contact is not None:
txt = message.contact.phone_number
if hasattr(message, 'location') and message.location is not None:
txt = get_address(message.location.latitude, message.location.longitude).encode('utf-8')
if txt:
self.send_message(txt)
self.get_current_view().process_message(txt)
def process_photo(self, photo):
self.get_current_view().process_photo(photo)
def process_sticker(self, sticker):
self.get_current_view().process_sticker(sticker)
def process_video(self, video):
self.get_current_view().process_video(video)
def process_callback(self, callback):
self.get_current_view().process_callback(callback)
def process_file(self, doc):
self.get_current_view().process_file(doc)
def set_path(self, path):
self.path = path
gevent.spawn(self.db.convos.update_one, {'bot_token': self.bot.token, 'chat_id': self.chat_id}, {'$set': {'path': path}})
def route(self, path):
self.set_path(path)
self.get_current_view().activate()
class MarketBotConvo(Convo):
def __init__(self, data, bot):
super(MarketBotConvo, self).__init__(data, bot)
self.current_basket = None
self.views['delivery'] = OrderCreatorView(self, [], final_message='Заказ сформирован!')
self.views['menu_cat_view'] = MenuCatView(self, msg="Выберите категорию:")
self.views['order_info'] = OrderInfoView(self, msg="Тут должны быть условия доставки", links={'Главное меню': ['main_view']})
self.views['contacts'] = ContactsInfoView(self, links={'Главное меню': ['main_view']})
self.views['history'] = HistoryView(self)
self.views['main_view'] = NavigationView(self, links={
"Меню": ['menu_cat_view'],
"История": ['history'],
"Доставка": ['order_info'], # ,
"Контакты": ['contacts'] # ContactsInfoView(self.ctx)
}, msg="Главное меню")
self.path = data.get('path')
if not self.get_current_view():
self.route(['main_view'])
class MainConvo(Convo):
def __init__(self, data, bot):
super(MainConvo, self).__init__(data, bot)
self.views['main_view'] = NavigationView(
self,
links={
"Добавить магазин": ['add_view'],
"Настройки": ['settings_view'],
"Заказы": ['orders_view'],
"Помощь": ['help_view'],
"Рассылка новостей": ['mailing_view']
},
msg="Главное меню"
)
self.views['help_view'] = HelpView(self, links={'Назад': ['main_view']})
self.views['add_view'] = BotCreatorView(self, [
TokenDetail('shop.token', name='API token.', desc='Для этого перейдите в @BotFather и нажмите /newbot для создания бота. Придумайте название бота (должно быть на русском языке) и ссылку на бот (на английском языке и заканчиваться на bot). Далее вы увидите API token, который нужно скопировать и отправить в этот чат.', ctx=self),
EmailDetail('shop.email', name='email для приема заказов', ctx=self),
FileDetail('shop.items', name='файл с описанием товаров или url магазина вконтакте', desc='<a href="https://github.com/0-1-0/marketbot/blob/master/sample.xlsx?raw=true">Пример файла</a>'),
TextDetail('shop.delivery_info', name='текст с условиями доставки'),
TextDetail('shop.contacts_info', name='текст с контактами для связи', value='telegram: @' + str(self.bot.bot.get_chat(self.chat_id).username)),
NumberDetail('shop.total_threshold', name='минимальную сумму заказа', value='0')
], final_message='Магазин создан!')
self.views['settings_view'] = SelectBotView(self, bot_view={'link': 'settings_view', 'view': SettingsView})
self.views['orders_view'] = SelectBotView(self, bot_view={'link': 'orders_view', 'view': OrdersView})
self.views['mailing_view'] = SelectBotView(self, bot_view={'link': 'mailing_view', 'view': MailingView})
self.path = data.get('path')
if not self.get_current_view():
self.route(['main_view'])
class Bot(object):
bots = {}
WEBHOOK_HOST = 'ec2-52-34-35-240.us-west-2.compute.amazonaws.com'
WEBHOOK_PORT = 8443
WEBHOOK_URL_BASE = "https://%s:%s" % (WEBHOOK_HOST, WEBHOOK_PORT)
WEBHOOK_SSL_CERT = '/home/ubuntu/webhook_cert.pem'
def __init__(self, token):
self.token = token
Bot.bots[self.token] = self
gevent.spawn(self.set_webhook, self.token)
def log_error(self, e):
pass
def set_webhook(self, token, retries=0):
try:
bot = telebot.TeleBot(token)
bot.remove_webhook()
bot.set_webhook(url=self.WEBHOOK_URL_BASE + '/' + bot.token + '/', certificate=open(self.WEBHOOK_SSL_CERT, 'r'))
print token, 'registered'
except Exception, e:
self.log_error(e)
print token, e
if retries < 2:
time.sleep(1)
self.set_webhook(token, retries+1)
class MarketBot(Bot):
convo_type = MarketBotConvo
def __init__(self, data, db=MongoClient()['marketbot']):
super(MarketBot, self).__init__(data['token'])
self.convos = {}
self.db = db
if not self.db.bots.update_one({'token': self.token}, {'$set': apihelper.get_me(self.token)}):
self.db.bots.insert_one({'token': self.token})
self.email = data.get('email')
self.last_update_id = data.get('last_update_id') or 0
self._init_bot()
for convo_data in self.db.convos.find({'bot_token': self.token}):
self.init_convo(convo_data)
def log_error(self, e):
gevent.spawn(self.db.errors.insert_one, {'error': str(e)})
def _init_bot(self, threaded=False):
self.bot = telebot.TeleBot(self.token, threaded=threaded, skip_pending=True)
self.bot.add_message_handler(self.goto_main, commands=['start'])
self.bot.add_callback_query_handler(self.process_callback, func=lambda call: True)
self.bot.add_message_handler(self.process_photo, content_types=['photo'])
self.bot.add_message_handler(self.process_video, content_types=['video'])
self.bot.add_message_handler(self.process_sticker, content_types=['sticker'])
self.bot.add_message_handler(self.process_file, content_types=['document'])
self.bot.add_message_handler(self.process_message, func=lambda message: True, content_types=['text', 'contact', 'location'])
def init_convo(self, convo_data):
self.convos[convo_data['chat_id']] = self.convo_type(convo_data, self)
def get_convo(self, chat_id):
if chat_id not in self.convos:
convo_data = {'chat_id': chat_id, 'bot_token': self.token}
self.db.convos.insert_one(convo_data)
self.init_convo(convo_data)
return self.convos[chat_id]
def goto_main(self, message):
convo = self.get_convo(message.chat.id)
convo.route(['main_view'])
def process_callback(self, callback):
convo = self.get_convo(callback.message.chat.id)
gevent.spawn(convo.process_callback, callback)
def process_message(self, message):
convo = self.get_convo(message.chat.id)
gevent.spawn(convo.process_message, message)
def start_bot(self, bot_data):
MarketBot(bot_data, self.db)
def process_file(self, doc):
convo = self.get_convo(doc.chat.id)
convo.process_file(doc)
def process_sticker(self, sticker):
convo = self.get_convo(sticker.chat.id)
convo.process_sticker(sticker)
def process_video(self, video):
convo = self.get_convo(video.chat.id)
convo.process_video(video)
def process_photo(self, photo):
convo = self.get_convo(photo.chat.id)
gevent.spawn(convo.process_photo, photo)
def update_last_id(self):
self.db.bots.update_one({'token': self.token}, {'$set': {'last_update_id': self.last_update_id}})
def process_redis_update(self, update):
if isinstance(update, basestring):
update = telebot.types.Update.de_json(update.encode('utf-8'))
if update.update_id > self.last_update_id:
self.last_update_id = update.update_id
gevent.spawn(self.bot.process_new_updates, [update])
gevent.spawn(self.update_last_id)
class MasterBot(MarketBot):
convo_type = MainConvo
def process_message(self, message):
gevent.spawn(botan.track, botan_token, message.chat.id, {'from_user': message.from_user.username}, message.text)
super(MasterBot, self).process_message(message)
def __init__(self, data):
super(MasterBot, self).__init__(data)
for bot_data in self.db.bots.find():
if bot_data['token'] != self.token:
try:
MarketBot(bot_data, self.db)
except Exception, e:
self.log_error(e)
def route_update(self, token, update):
if token in Bot.bots:
gevent.spawn(Bot.bots[token].process_redis_update, update)
return
if __name__ == "__main__":
m = MasterBot({'token': open('token').read().replace('\n', '')})
gevent.spawn(m.run).join()
| {
"imports": [
"/app.py"
]
} |
0-1-0/marketbot | refs/heads/master | /views.py | # -*- coding: utf-8 -*-
import gevent
from gevent import monkey; monkey.patch_all()
from telebot import types
import telebot
from telebot import apihelper
from validate_email import validate_email
import pymongo
from io import BytesIO
from StringIO import StringIO
from datetime import datetime
from utils import Mailer, striphtml, WED_ADMIN_DOMAIN
from collections import defaultdict
from vk_crawler import Crawler
from pyexcel_xls import get_data
import pandas as pd
import md5
from time import time
class MarkupMixin(object):
def mk_markup(self, command_list):
markup = types.ReplyKeyboardMarkup(row_width=2)
btns = [self.BTN(cmd) for cmd in command_list]
for btn in btns[:min(3, len(btns))]:
markup.row(btn)
markup.add(*btns[3:])
return markup
def BTN(self, txt, request_contact=None, request_location=None):
return types.KeyboardButton(txt, request_contact=request_contact, request_location=request_location)
def mk_inline_markup(self, command_list):
markup = types.InlineKeyboardMarkup(row_width=2)
btns = [types.InlineKeyboardButton(cmd, callback_data=cmd) for cmd in command_list]
for btn in btns:
markup.row(btn)
return markup
def btn(self, txt, callback_data):
return types.InlineKeyboardButton(txt, callback_data=callback_data)
class View(MarkupMixin):
def __init__(self, ctx, editable=True, msg=''):
self.ctx = ctx
self.editable = editable
self.active = False
self.message_id = None
self.msg = msg
self.views = {}
def route(self, path):
if path == []:
return self
else:
return self.get_subview(path[0]).route(path[1:])
def get_subview(self, _id):
return self.views.get(_id) or self
def process_message(self, message):
pass
def process_callback(self, callback):
pass
def process_photo(self, photo):
pass
def process_file(self, doc):
pass
def process_sticker(self, sticker):
pass
def process_video(self, video):
pass
def activate(self):
self.deactivate()
for v in self.ctx.views.values():
v.deactivate()
self.active = True
self.render()
def deactivate(self):
self.active = False
self.message_id = None
def get_msg(self):
return self.msg
def get_markup(self):
return None
def render(self):
if not (self.editable and self.message_id):
self.ctx.send_message(self.get_msg(), self.get_markup())
else:
self.ctx.edit_message(self.message_id, self.get_msg(), self.get_markup())
class NavigationView(View):
def __init__(self, ctx, links={}, msg=""):
self.links = links
super(NavigationView, self).__init__(ctx, False, msg)
def get_markup(self):
return self.mk_markup(sorted([l.decode('utf-8') for l in self.links.keys()]))
def process_message(self, message):
if message in self.links:
self.ctx.route(self.links[message])
class InlineNavigationView(NavigationView):
def get_markup(self):
markup = types.InlineKeyboardMarkup(row_width=2)
for k in self.links.keys():
markup.row(self.btn(k, callback_data=k))
return markup
def process_callback(self, callback):
cmd = callback.data
self.message_id = callback.message.message_id
self.process_message(cmd)
class OrderView(View):
def __init__(self, ctx, data):
self.ctx = ctx
self.data = data
self.editable = True
self.message_id = None
def get_msg(self):
res = 'Заказ #' + str(self.data['number']) + '\n'
res += 'Статус: ' + self.data['status'].encode('utf-8') + '\n'
res += '\n'.join(i['name'].encode('utf-8') + ' x ' + str(i['count']) for i in self.data['items'])
res += '\n-----\n Итого: ' + str(self.data['total']) + ' руб.'
res += '\n-----\n Детали доставки: \n'
res += '\n\n'.join(k.encode('utf-8') + ': ' + v.encode('utf-8') for k, v in self.data['delivery'].items())
res = res.replace('Ваш', '')
return res
def get_markup(self):
markup = types.InlineKeyboardMarkup(row_width=2)
if self.data['status'] == u'В обработке':
markup.row(self.btn(u'Завершить', str(self.data['number']) + ':complete'))
else:
markup.row(self.btn(u'Перенести в обработку', str(self.data['number']) + ':reactivate'))
return markup
def process_callback(self, callback):
action = callback.data.split(':')[1]
self.message_id = callback.message.message_id
if action == 'complete':
self.ctx.db.orders.update_one({'_id': self.data['_id']}, {'$set': {'status': 'Завершен'}})
self.data = self.ctx.db.orders.find_one({'_id': self.data['_id']})
self.render()
elif action == 'reactivate':
self.ctx.db.orders.update_one({'_id': self.data['_id']}, {'$set': {'status': 'В обработке'}})
self.data = self.ctx.db.orders.find_one({'_id': self.data['_id']})
self.render()
class AdminOrderView(View):
def __init__(self, ctx, bot_token, status=u'В обработке'):
self.ctx = ctx
self.token = bot_token
self.editable = True
self.status = status
self.orders = [OrderView(self.ctx, o) for o in self.ctx.db.orders.find({'token': self.token, 'status': status}).sort('date', pymongo.DESCENDING)]
self._orders = {}
for o in self.orders:
self._orders[str(o.data['number'])] = o
def render(self):
if len(self.orders) > 0:
self.ctx.send_message('Заказы', markup=self.mk_markup(['Еще 5', 'Главное меню']))
else:
self.ctx.send_message('Нет заказов', markup=self.mk_markup(['Главное меню']))
self.ptr = 0
self.render_5()
def render_5(self):
for order in self.orders[self.ptr: self.ptr + 5]:
order.render()
self.ptr += 5
def process_message(self, message):
if message == 'Главное меню':
self.ctx.route(['main_view'])
elif message == 'Еще 5':
self.render_5()
def process_callback(self, callback):
data = callback.data.encode('utf-8')
number, action = data.split(':')
self._orders[number].process_callback(callback)
class DetailsView(View):
def __init__(self, ctx, details, final_message=""):
self.ctx = ctx
self.details = details
self.ptr = 0
self.editable = False
self.filled = False
self.final_message = final_message
def activate(self):
self.filled = False
for d in self.details:
d.value = None
self.ptr = 0
super(DetailsView, self).activate()
def details_dict(self):
return {d._id: d.value for d in self.details}
def prefinalize(self):
pass
def finalize(self):
pass
def current(self):
return self.details[self.ptr]
def get_msg(self):
if self.filled:
res = self.final_message + '\n'
if not isinstance(self, BotCreatorView): # TODO /hack
for d in self.details:
res += (d.name + ": " + d.txt() + '\n')
return res
else:
res = 'Укажите ' + self.current().name
if self.current().is_filled():
try:
res += '\n(Сейчас: ' + self.current().value + ' )'
except:
try:
res += '\n(Сейчас: ' + self.current().value.encode('utf-8') + ' )'
except:
pass
res += '\n' + self.current().desc
return res
def get_markup(self):
if not self.filled:
markup = types.ReplyKeyboardMarkup()
if self.current().is_filled() or isinstance(self.current(), FileDetail):
markup.row(self.BTN('ОК'))
if self.current()._id == 'phone':
markup.row(self.BTN('отправить номер', request_contact=True))
if self.current()._id == 'address':
markup.row(self.BTN('отправить геолокацию', request_location=True))
if len(self.current().default_options) > 0:
markup.row(*[self.BTN(opt) for opt in self.current().default_options])
if self.ptr > 0:
markup.row(self.BTN('Назад'))
markup.row(self.BTN('Главное меню'))
return markup
else:
return None
def next(self):
if self.ptr + 1 < len(self.details):
if self.current().is_filled():
self.ptr += 1
self.render()
else:
self.filled = True
self.prefinalize()
self.render()
self.ctx.route(['main_view'])
self.finalize()
def prev(self):
if self.ptr > 0:
self.ptr -= 1
self.render()
def analyze_vk_link(self, url):
self.ctx.tmpdata = Crawler(url).fetch()
self.process_message('ОК')
def process_message(self, cmd):
if cmd == 'ОК':
if isinstance(self.current(), FileDetail) and self.ctx.tmpdata is not None:
if self.current().validate(self.ctx.tmpdata):
self.current().value = self.ctx.tmpdata
self.ctx.tmpdata = None
self.next()
else:
self.ctx.send_message('Неверный формат файла')
elif self.current().is_filled():
self.next()
else:
self.render()
elif cmd == 'Назад':
self.prev()
elif cmd == 'Главное меню':
self.ctx.route(['main_view'])
elif isinstance(self.current(), TextDetail):
if self.current().validate(cmd):
self.current().value = cmd
self.next()
else:
self.ctx.send_message('Неверный формат')
elif isinstance(self.current(), NumberDetail):
if self.current().validate(cmd):
self.current().value = cmd
self.next()
else:
self.ctx.send_message('Введите целое число')
elif isinstance(self.current(), FileDetail):
if 'vk.com' in cmd:
try:
gevent.spawn(self.analyze_vk_link, cmd)
self.ctx.send_message('Анализирую..')
self.ctx.tmpdata = None
except Exception:
self.ctx.send_message('Неверный формат магазина')
class BotCreatorView(DetailsView):
def prefinalize(self):
self._final_message = self.final_message
self.final_message += '\n Ссылка на бота: @' + telebot.TeleBot(self.details_dict().get('shop.token') or self.token).get_me().username.encode('utf-8')
def bot_data(self):
dd = self.details_dict()
link = md5.new()
link.update(dd['shop.token'] + dd['shop.token'][::-1])
return {
'admin': self.ctx.bot.bot.get_chat(self.ctx.chat_id).username,
'token': dd['shop.token'],
'items': dd['shop.items'],
'email': dd['shop.email'],
'chat_id': self.ctx.chat_id,
'delivery_info': dd['shop.delivery_info'],
'contacts_info': dd['shop.contacts_info'],
'total_threshold': dd['shop.total_threshold'],
'link': link.hexdigest()
}
def finalize(self):
self.final_message = self._final_message
bot_data = self.bot_data()
self.ctx.db.bots.save(bot_data)
self.ctx.bot.start_bot(bot_data)
def process_file(self, doc):
fid = doc.document.file_id
file_info = self.ctx.bot.bot.get_file(fid)
file_format = file_info.file_path.split('.')[-1]
if file_format in ['csv', 'xls', 'xlsx']:
content = self.ctx.bot.bot.download_file(file_info.file_path)
io = StringIO(content)
try:
df = pd.read_csv(io)
except:
excel_data = get_data(io)
_keys = excel_data.values()[0][0]
_values = excel_data.values()[0][1:]
_items = [dict(zip(_keys, rec)) for rec in _values]
df = pd.DataFrame(_items)
df_keys = {k.lower(): k for k in df.to_dict().keys()}
data = pd.DataFrame()
mapping = {
'id': ['id', 'product_id'],
'active': ['active', 'visible', u'активно'],
'cat': ['category', u'раздел 1', u'категория'],
'name': [u'наименование', 'name'],
'desc': [u'описание', 'description', 'description(html)'],
'price': ['price', u'цена'],
'img': ['img_url', u'изображение', u'ссылка на изображение']
}
for k, values in mapping.items():
for col_name in values:
if col_name in df_keys:
data[k] = df[df_keys[col_name]]
data['active'] = data['active'].map(lambda x: '1' if x in [1, 'y'] else '0')
items = data.T.to_dict().values()
if len(items) == 0:
raise Exception("no items added")
self.ctx.tmpdata = items
else:
pass
class ItemNode(View):
def __init__(self, menu_item, _id, ctx, menu):
self.editable = True
self.description = menu_item['desc']
self.img = menu_item['img']
self.count = 0
self.message_id = None
self.price = int(menu_item['price'])
self.name = menu_item['name']
self._id = _id
self.ctx = ctx
self.ordered = False
self.menu = menu
self.menu_item = menu_item
def to_dict(self):
return dict(self.menu_item.items() + {'count': self.count}.items())
def get_btn_txt(self):
res = str(self.price) + ' руб.'
if self.count > 0:
res += ' ' + str(self.count) + ' шт.'
return res
def get_add_callback(self):
return 'menu_item:' + str(self._id) + ':add'
def get_basket_callback(self):
return 'menu_item:' + str(self._id) + ':basket'
def sub(self):
self.count -= 1
self.render()
def add(self):
self.count += 1
self.render()
def get_markup(self):
markup = types.InlineKeyboardMarkup()
markup.row(types.InlineKeyboardButton(self.get_btn_txt(), callback_data=self.get_add_callback()))
if self.count > 0:
markup.row(types.InlineKeyboardButton('Добавить в корзину', callback_data=self.get_basket_callback()))
return markup
def get_total(self):
return self.count * self.price
def get_msg(self):
return (u'<a href="' + self.img + u'">' + self.name + u'</a>\n' + striphtml(self.description))[:500]
def process_callback(self, call):
self.message_id = call.message.message_id
_id, action = call.data.split(':')[1:]
if action == 'add':
self.count += 1
self.render()
if action == 'basket':
self.ordered = True
self.menu.goto_basket(call)
class BasketNode(View):
def __init__(self, menu):
self.menu = menu
self.chat_id = menu.ctx.chat_id
self.message_id = None
self.ctx = menu.ctx
self.items = []
self.item_ptr = 0
self.editable = True
self.ctx.current_basket = self
def to_dict(self):
return {
'chat_id': self.chat_id,
'items': [i.to_dict() for i in self.items if i.count > 0],
'total': self.get_total()
}
def get_ordered_items(self):
return filter(lambda i: i.ordered is True, self.menu.items.values())
def activate(self):
self.items = list(set(self.items + self.get_ordered_items()))
self.total_threshold = int(self.ctx.get_bot_data().get('total_threshold') or 0)
self.item_ptr = 0
def current_item(self):
return self.items[self.item_ptr]
def inc(self):
if self.item_ptr + 1 < len(self.items):
self.item_ptr += 1
self.render()
def dec(self):
if self.item_ptr - 1 > -1:
self.item_ptr -= 1
self.render()
def add(self):
self.current_item().add()
self.render()
def sub(self):
if self.current_item().count > 0:
self.current_item().sub()
self.render()
def get_total(self):
return sum(i.get_total() for i in self.items)
def __str__(self):
res = ""
for item in self.items:
if item.count > 0:
res += item.name.encode('utf-8') + " " + str(item.count) + "шт. " + str(self.current_item().get_total()) + ' руб\n'
res += 'Итого: ' + str(self.get_total()) + 'руб.'
return res
def get_msg(self):
if self.get_total() > 0:
res = 'Корзина:' + '\n\n'
res += self.current_item().get_msg().encode('utf-8') + '\n'
res += str(self.current_item().price) + ' * ' + str(self.current_item().count) + ' = ' + str(self.current_item().get_total()) + ' руб'
return res
else:
return 'В Корзине пусто'
def process_callback(self, call):
self.message_id = call.message.message_id
action = call.data.split(':')[-1]
if action == '>':
self.inc()
elif action == '<':
self.dec()
elif action == '+':
self.add()
elif action == '-':
self.sub()
elif action == '<<':
self.ctx.send_message('Минимальная сумма заказа ' + str(self.total_threshold) + ' рублей')
def get_markup(self):
if self.get_total() > 0:
markup = types.InlineKeyboardMarkup()
markup.row(
self.btn('-', 'basket:-'),
self.btn(str(self.current_item().count) + ' шт.', 'basket:cnt'),
self.btn('+', 'basket:+')
)
markup.row(self.btn('<', 'basket:<'), self.btn(str(self.item_ptr + 1) + '/' + str(len(self.items)), 'basket:ptr'), self.btn('>', 'basket:>'))
if self.get_total() < self.total_threshold:
markup.row(self.btn('Минимальная сумма заказа ' + str(self.total_threshold) + ' рублей', 'basket:<<'))
else:
markup.row(self.btn('Заказ на ' + str(self.get_total()) + ' р. Оформить?', 'link:delivery'))
return markup
else:
return None
class MenuNode(View):
def __init__(self, msg, menu_items, ctx, links, parent=None):
self.ctx = ctx
self.msg = msg
self.items = {}
self.basket = self.ctx.current_basket or BasketNode(self)
self.links = links
self.ptr = 0
self.editable = False
self.parent = parent
self.message_id = None
cnt = 0
for item in menu_items:
try:
_id = str(cnt)
self.items[_id] = ItemNode(item, _id, self.ctx, self)
cnt += 1
except Exception:
pass
def render(self):
super(MenuNode, self).render()
self.render_5()
def render_5(self):
for item in self.items.values()[self.ptr:self.ptr + 5]:
try:
item.render()
except Exception:
pass
self.ptr += 5
def process_message(self, message):
txt = message
if txt == 'Показать еще 5':
self.render()
elif txt == 'Назад':
self.ctx.route(['menu_cat_view'])
def get_msg(self):
return self.msg
def get_markup(self):
if self.ptr + 6 < len(self.items):
return self.mk_markup(['Показать еще 5', 'Назад'])
else:
return self.mk_markup(['Назад'])
def process_callback(self, call): # route callback to item node
self.message_id = call.message.message_id
data = call.data.encode('utf-8')
_type = data.split(':')[0]
if _type == 'menu_item':
node_id = data.split(':')[1]
if node_id in self.items:
self.items[node_id].process_callback(call)
elif _type == 'basket':
self.basket.process_callback(call)
elif _type == 'link':
ll = data.split(':')[1]
if ll in self.links:
self.ctx.route(self.links[ll])
def goto_basket(self, call):
self.basket.menu = self
self.basket.message_id = None
self.basket.activate()
self.basket.render()
class OrderCreatorView(DetailsView):
def __init__(self, ctx, details, final_message=""):
super(OrderCreatorView, self).__init__(ctx, details, final_message)
self.orders = list(self.ctx.db.orders.find({'token': self.ctx.bot.token, 'chat_id': self.ctx.chat_id}).sort('date', pymongo.DESCENDING))
if len(self.orders) > 0:
last_order = self.orders[0]['delivery']
else:
last_order = {}
def _get(v):
try:
return last_order.get(v.decode('utf-8')).encode('utf-8')
except:
return last_order.get(v.decode('utf-8'))
self.details = [
TextDetail('delivery_type', ['Доставка до дома', 'Самовывоз'], name='тип доставки', ctx=self.ctx, value=_get('тип доставки')),
TextDetail('address', name='Ваш адрес', ctx=self.ctx, value=_get('Ваш адрес')),
TextDetail('phone', name='Ваш телефон', ctx=self.ctx, value=_get('Ваш телефон')),
TextDetail('delivery_time', name='желаемое время доставки', ctx=self.ctx)
]
def activate(self):
self.filled = False
self.ptr = 0
super(DetailsView, self).activate()
def finalize(self):
order = self.ctx.current_basket.to_dict()
order['delivery'] = {}
for d in self.details:
order['delivery'][d.name] = d.txt()
order['date'] = datetime.utcnow()
order['status'] = 'В обработке'
order['token'] = self.ctx.token
order['number'] = len(self.orders)
self.ctx.db.orders.insert_one(order)
gevent.spawn(Mailer().send_order, self.ctx.get_bot_data()['email'], order)
self.ctx.current_basket = None
class UpdateBotView(BotCreatorView):
def __init__(self, ctx, token, details, final_message=""):
self.token = token
super(UpdateBotView, self).__init__(ctx, details, final_message=final_message)
def activate(self):
self.filled = False
self.ptr = 0
super(DetailsView, self).activate()
def get_markup(self):
markup = types.ReplyKeyboardMarkup()
if isinstance(self.current(), FileDetail):
markup.row(self.BTN('ОК'))
if self.current()._id == 'phone':
markup.row(self.BTN('отправить номер', request_contact=True))
if self.current()._id == 'address':
markup.row(self.BTN('отправить геолокацию', request_location=True))
if len(self.current().default_options) > 0:
markup.row(*[self.BTN(opt) for opt in self.current().default_options])
markup.row(self.BTN('Назад'))
return markup
def process_message(self, cmd):
if cmd == 'ОК':
if isinstance(self.current(), FileDetail) and self.ctx.tmpdata is not None:
if self.current().validate(self.ctx.tmpdata):
self.current().value = self.ctx.tmpdata
self.ctx.tmpdata = None
self.next()
else:
self.ctx.send_message('Неверный формат файла')
elif self.current().is_filled():
self.finalize()
self.ctx.route(['settings_view', self.token])
else:
self.render()
elif cmd == 'Назад':
self.ctx.route(['settings_view', self.token])
elif cmd == 'Главное меню':
self.ctx.route(['main_view'])
elif isinstance(self.current(), TextDetail):
if self.current().validate(cmd):
self.current().value = cmd
self.finalize()
self.ctx.route(['settings_view', self.token])
else:
self.ctx.send_message('Неверный формат')
elif isinstance(self.current(), NumberDetail):
if self.current().validate(cmd):
self.current().value = cmd
self.finalize()
self.ctx.route(['settings_view', self.token])
else:
self.ctx.send_message('Введите целое число')
elif isinstance(self.current(), FileDetail):
if 'vk.com' in cmd:
try:
# self.ctx.redis.publish('vk_input', json.dumps({'token': self.ctx.token, 'chat_id': self.ctx.chat_id, 'url': cmd}))
gevent.spawn(self.analyze_vk_link, cmd)
self.ctx.send_message('Анализирую..')
self.ctx.tmpdata = None
except Exception:
self.ctx.send_message('Неверный формат магазина')
def finalize(self):
self.ctx.db.bots.update_one({'token': self.token}, {'$set': {self.current()._id.split('.')[-1]: self.current().value}})
class MenuCatView(InlineNavigationView):
def __init__(self, ctx, msg=''):
super(MenuCatView, self).__init__(ctx, msg=msg)
self.init_categories()
def activate(self):
self.init_categories()
super(MenuCatView, self).activate()
def init_categories(self):
data = self.ctx.get_bot_data()['items']
self.categories = defaultdict(list)
for item_data in data:
self.categories[item_data['cat'].split('.')[0][:80]].append(item_data) # TODO HACK
if u'' in self.categories:
del self.categories[u'']
self.links = {cat: ['menu_cat_view', cat] for cat in self.categories.keys()}
self.views = {cat: MenuNode(cat, items, self.ctx, links={"delivery": ['delivery']}) for cat, items in self.categories.items()}
def process_message(self, cmd):
if cmd == 'Назад' or cmd == 'Главное меню':
self.ctx.route(['main_view'])
else:
super(MenuCatView, self).process_message(cmd)
def route(self, path):
if path == []:
self.views = {cat: MenuNode(cat, items, self.ctx, links={"delivery": ['delivery']}) for cat, items in self.categories.items()}
return super(MenuCatView, self).route(path)
def render(self):
self.ctx.send_message('Меню', markup=self.mk_markup(['Назад']))
super(MenuCatView, self).render()
class OrderInfoView(NavigationView):
def get_msg(self):
return self.ctx.get_bot_data().get('delivery_info') or 'Об условиях доставки пишите: @' + self.ctx.get_bot_data().get('admin')
class ContactsInfoView(NavigationView):
def get_msg(self):
return self.ctx.get_bot_data().get('contacts_info') or 'Чтобы узнать подробности свяжитесь с @' + self.ctx.get_bot_data().get('admin')
class HistoryItem(object):
def __init__(self, order):
self.order = order
def __str__(self):
res = str(self.order.get('date')).split('.')[0] + '\n\n'
res += '\n'.join(i['name'].encode('utf-8') + ' x ' + str(i['count']) for i in self.order['items'])
res += '\n-----\n Итого: ' + str(self.order['total']) + ' руб.'
res += '\n-----\n Детали доставки: \n-----\n'
try:
res += '\n'.join(k.encode('utf-8') + ': ' + v.encode('utf-8') for k, v in self.order['delivery'].items())
except:
try:
res += '\n'.join(k + ': ' + v for k, v in self.order['delivery'].items())
except:
pass
return res
class HistoryView(NavigationView):
def activate(self):
self.cursor = 0
self.orders = list(self.ctx.db.orders.find({'token': self.ctx.bot.token, 'chat_id': self.ctx.chat_id}).sort('date', pymongo.DESCENDING))
self.links = {
'Главное меню': ['main_view']
}
if len(self.orders) > 0:
self.links['Еще 5'] = ['history']
super(HistoryView, self).activate()
def render_5(self):
for order in self.orders[self.cursor:self.cursor + 5]:
self.ctx.send_message(str(HistoryItem(order)))
self.cursor += 5
def process_message(self, message):
if message == 'Еще 5':
self.render_5()
if message == 'Главное меню':
self.ctx.route(['main_view'])
def get_msg(self):
if len(self.orders) > 0:
self.render_5()
return ':)'
else:
return 'История заказов пуста'
class SelectBotView(NavigationView):
def __init__(self, ctx, links={}, msg="Выберите бота:", bot_view=None):
self.ctx = ctx
self.links = links
self.msg = msg
self.bot_view = bot_view
super(NavigationView, self).__init__(ctx, False, msg)
def get_subview(self, token):
if token not in self.views:
self.views[token] = self.bot_view['view'](self.ctx, token)
return super(SelectBotView, self).get_subview(token)
def activate(self):
self.links = {}
self.views = {}
for bot in self.ctx.db.bots.find({'chat_id': self.ctx.chat_id}):
self.links['@' + bot['username']] = [self.bot_view['link'], bot['token']]
self.links['Назад'] = ['main_view']
super(SelectBotView, self).activate()
class OrdersView(NavigationView):
def __init__(self, ctx, bot_token):
self.ctx = ctx
self.token = bot_token
self.editable = True
self.msg = 'Выберите статус заказа'
self.links = {
'В обработке': ['orders_view', self.token, 'in_processing'],
'Завершенные': ['orders_view', self.token, 'done'],
"Назад": ['orders_view']
}
self.message_id = None
self.views = {
'in_processing': AdminOrderView(self.ctx, self.token, status=u'В обработке'),
'done': AdminOrderView(self.ctx, self.token, status=u'Завершен')
}
class SettingsView(NavigationView):
def __init__(self, ctx, bot_token):
self.ctx = ctx
self.token = bot_token
self.editable = True
self.msg = 'Настройки'
self.links = {
'API token': ['settings_view', self.token, 'token_view'],
'E-mail': ['settings_view', self.token, 'email_view'],
'Загрузка товаров': ['settings_view', self.token, 'items_view'],
'Условия доставки': ['settings_view', self.token, 'delivery_info_view'],
'Контакты': ['settings_view', self.token, 'contacts_view'],
'Минимальная сумма заказа': ['settings_view', self.token, 'total_threshold_view'],
'Личный кабинет': ['settings_view', self.token, 'cabinet_view'],
'Назад': ['settings_view']
}
self.message_id = None
# if self.token not in self.views:
bot = self.ctx.db.bots.find_one({'chat_id': self.ctx.chat_id, 'token': self.token})
self.views = {
'token_view': UpdateBotView(self.ctx, self.token, [TokenDetail('shop.token', name='API token', ctx=self.ctx, value=bot['token'])]),
'email_view': UpdateBotView(self.ctx, self.token, [EmailDetail('shop.email', name='email для приема заказов', ctx=self.ctx, value=bot['email'])]),
'items_view': UpdateBotView(self.ctx, self.token, [FileDetail('shop.items', value=bot['items'], name='файл с описанием товаров или url магазина вконтакте и нажмите \'ОК\'', desc='<a href="https://github.com/0-1-0/marketbot/blob/master/sample.xlsx?raw=true">(Пример файла)</a>')]),
'delivery_info_view': UpdateBotView(self.ctx, self.token, [TextDetail('shop.delivery_info', name='текст с условиями доставки', value=bot.get('delivery_info'))]),
'contacts_view': UpdateBotView(self.ctx, self.token, [TextDetail('shop.contacts_info', name='текст с контактами для связи', value=bot.get('contacts_info'))]),
'total_threshold_view': UpdateBotView(self.ctx, self.token, [NumberDetail('shop.total_threshold', name='минимальную сумму заказа', value=bot.get('total_threshold'))]),
'cabinet_view': CabinetView(self.ctx, self.token)
}
def get_markup(self):
return self.mk_markup(sorted([l.decode('utf-8') for l in self.links.keys()]))
class CabinetView(NavigationView):
def __init__(self, ctx, bot_token):
self.ctx = ctx
self.token = bot_token
self.editable = True
self.msg = 'Ссылка действительна в течение часа'
self.links = {'Назад': ['settings_view', self.token]}
def get_markup(self):
markup = types.ReplyKeyboardMarkup()
markup.row(self.BTN('Получить ссылку'))
markup.row(self.BTN('Назад'))
return markup
def process_message(self, cmd):
if cmd == 'Получить ссылку':
first_part = md5.new()
second_part = md5.new()
first_part.update(str(int(time() / 60 / 60)))
second_part.update(self.token + self.token[::-1])
link = WED_ADMIN_DOMAIN + first_part.hexdigest() + second_part.hexdigest()
self.ctx.send_message(link)
elif cmd == 'Назад':
self.ctx.route(['settings_view', self.token])
else:
pass
class HelpView(NavigationView):
def get_msg(self):
return "По всем вопросам обращайтесь к @NikolaII :)"
class MailingView(NavigationView):
def __init__(self, ctx, bot_token):
self.ctx = ctx
self.token = bot_token
self.editable = True
self.msg = 'Введите текст, прикрепите фото или стикер рассылки'
self.links = {"Назад": ['mailing_view']}
def process_message(self, message):
if message in self.links:
self.ctx.route(self.links[message])
else:
for convo in self.ctx.db.convos.find({'bot_token': self.token}):
gevent.spawn(apihelper.send_message, self.token, convo['chat_id'], message, reply_markup=None, parse_mode='HTML')
def process_file(self, doc):
fid = doc.document.file_id
file_info = self.ctx.bot.bot.get_file(fid)
content = self.ctx.bot.bot.download_file(file_info.file_path)
file_format = file_info.file_path.split('.')[-1]
if file_format in ['gif', 'mp4']:
for convo in self.ctx.db.convos.find({'bot_token': self.token}):
doc = BytesIO(content)
doc.name = fid + '.' + file_format
gevent.spawn(apihelper.send_data, self.token, convo['chat_id'], doc, 'document', reply_markup=None)
else:
pass
def process_photo(self, photo):
caption = photo.caption
fid = photo.photo[-1].file_id
file_info = self.ctx.bot.bot.get_file(fid)
content = self.ctx.bot.bot.download_file(file_info.file_path)
file_format = file_info.file_path.split('.')[-1]
for convo in self.ctx.db.convos.find({'bot_token': self.token}):
photo = BytesIO(content)
photo.name = fid + '.' + file_format
gevent.spawn(apihelper.send_photo, self.token, convo['chat_id'], photo, caption=caption, reply_markup=None)
def process_sticker(self, sticker):
fid = sticker.sticker.file_id
file_info = self.ctx.bot.bot.get_file(fid)
content = self.ctx.bot.bot.download_file(file_info.file_path)
file_format = file_info.file_path.split('.')[-1]
for convo in self.ctx.db.convos.find({'bot_token': self.token}):
doc = BytesIO(content)
doc.name = fid + '.' + file_format
gevent.spawn(apihelper.send_data, self.token, convo['chat_id'], doc, 'sticker', reply_markup=None)
def process_video(self, video):
caption = video.caption
duration = video.video.duration
fid = video.video.file_id
file_info = self.ctx.bot.bot.get_file(fid)
content = self.ctx.bot.bot.download_file(file_info.file_path)
file_format = file_info.file_path.split('.')[-1]
for convo in self.ctx.db.convos.find({'bot_token': self.token}):
video = BytesIO(content)
video.name = fid + '.' + file_format
gevent.spawn(apihelper.send_video, self.token, convo['chat_id'], video, caption=caption, duration=duration, reply_markup=None)
class Detail(object):
def __init__(self, _id, default_options=[], name='', desc='', value=None, ctx=None):
self._id = _id
self.default_options = default_options
self.name = name
self.desc = desc
self.value = value
self.ctx = ctx
def is_filled(self):
return self.value is not None
def validate(self, value):
return True
def txt(self):
return str(self.value)
class TextDetail(Detail):
pass
class NumberDetail(Detail):
def validate(self, value):
try:
int(value)
return True
except ValueError:
return False
class TokenDetail(TextDetail):
def validate(self, value):
if self.ctx.db.bots.find_one({'token': value}) is not None:
return False
try:
b = telebot.TeleBot(value)
b.get_me()
except:
return False
return True
class EmailDetail(TextDetail):
def validate(self, value):
return validate_email(value)
class FileDetail(Detail):
def validate(self, value):
return value is not None
def txt(self):
if self.value:
return 'Заполнено'
else:
return 'Не заполнено'
| # -*- coding: utf-8 -*-
import sendgrid
import os
from sendgrid.helpers.mail import *
import re
import requests
import json
WED_ADMIN_DOMAIN = open('domain').read().split('\n')[0]
def get_address(lat, lng):
resp = requests.get('http://maps.googleapis.com/maps/api/geocode/json?latlng=' + str(lat) + ',' + str(lng) + '&language=ru')
return json.loads(resp.content).get('results')[0].get('formatted_address')
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
return cls._instance
class Mailer(Singleton):
sg = sendgrid.SendGridAPIClient(apikey=os.environ.get('SENDGRID_API_KEY'))
def send(self, mail, subj, txt):
from_email = Email("order@botmarket.com")
subject = subj
to_email = Email(mail)
content = Content("text/plain", txt)
mail = Mail(from_email, subject, to_email, content)
return self.sg.client.mail.send.post(request_body=mail.get())
def send_order(self, mail, order):
res = 'Заказ\n====\n\n\n'
res += '\n'.join(i['name'].encode('utf-8') + ' x ' + str(i['count']) for i in order['items'])
res += '\n-----\n Итого: ' + str(order['total']) + ' руб.'
res += '\n-----\n Детали доставки: \n'
try:
res += '\n\n'.join(k.encode('utf-8') + ': ' + v.encode('utf-8') for k, v in order['delivery'].items())
except:
res += '\n\n'.join(k + ': ' + v for k, v in order['delivery'].items())
res = res.replace('Ваш', '')
return self.send(mail, 'Новый заказ!', res)
def striphtml(data):
p = re.compile(r'<[brai].*?>|<\/[a].*?>|<span.*?>|<\/span.*?>')
res = p.sub('\n', data)
return res.replace(' ', ' ').replace('—', '-')
--- FILE SEPARATOR ---
from datetime import datetime
from grab.spider import Spider, Task
import json
import logging
import re
from selenium import webdriver
# data = {}
class Crawler(Spider):
def __init__(self, url):
self.initial_urls = [url]
self.data = []
self.d = webdriver.PhantomJS()
super(Crawler, self).__init__()
def task_initial(self, grab, task):
shop_offset = 0
print("Try to parse: " + task.url)
shop_url_selector = grab.doc.select('//*[@id="ui_market_items_load_more"]').attr('onclick')
re_shop_url = re.compile('market-(\d{1,12})+')
shop_url = re_shop_url.search(shop_url_selector).group(0) # 'market-NNNNNN'
shop_number = re_shop_url.search(shop_url_selector).group(1) # 'NNNNNN'
shop_full_url = ("https://vk.com/" + shop_url)
print(shop_url)
shop_itemscount = grab.doc.select('//*[@class="module clear market_module _module"]//*[@class="header_count fl_l"]').text()
while shop_offset < int(shop_itemscount):
yield Task('showcase', url=shop_full_url + '?offset=' + str(shop_offset), shop_key=shop_url, shop_num=shop_number, offset=shop_offset)
shop_offset += 24
def task_showcase(self, grab, task):
print("Go: " + task.url)
re_price = re.compile('>(\d+)\D(\d*)')
item_id = 0 + task.offset
for item_node in grab.doc.select('//div[@class="market_list"]/div'):
item_id += 1
item_attributes = {}
item_native_id = item_node.attr('data-id')
item_img = item_node.select('div/div/a/img').attr('src')
item_price_raw = item_node.select('*/div[@class="market_row_price"]').html()
item_price = int(re_price.search(item_price_raw).group(1))
item_price_2 = re_price.search(item_price_raw).group(2)
if item_price_2: # remove digit delimiter if price > 1000 (dumb, but working)
item_price = item_price * 1000 + int(item_price_2)
item_attributes = {"id": item_id,
"native_id": item_native_id,
"img": item_img,
"price": item_price,
"name": "",
"cat": ""}
self.item_details(item_attributes=item_attributes, shop=task.shop_num, item_native_id=item_native_id, item_key=item_id)
def item_details(self, item_attributes, shop, item_native_id, item_key):
d = self.d
url = 'http://vk.com/market-' + str(shop) + '?w=product-' + str(shop) + '_' + str(item_native_id)
d.get(url)
d.implicitly_wait(.9)
item_desc = d.find_element_by_id("market_item_description").text
item_cat = d.find_element_by_class_name("market_item_category").text
item_attributes['desc'] = item_desc
item_attributes['name'] = item_desc.split('.')[0][:80] # TODO hack
item_attributes['cat'] = item_cat
self.data.append(item_attributes)
def fetch(self):
self.run()
return self.data
# def export_file(data,filename):
# filename = filename
# with open(filename, 'w') as f:
# json.dump(data, f)
# return json.dumps(data)
def main():
print Crawler('https://vk.com/spark.design').fetch()
if __name__ == '__main__':
main()
| {
"imports": [
"/utils.py",
"/vk_crawler.py"
]
} |
0-1-0/marketbot | refs/heads/master | /polling_listener.py | from gevent import monkey; monkey.patch_all()
from utils import Singleton
import telebot
import copy
import json
from app import MasterBot, Bot
class PollingProcessor(Singleton):
tokens = {}
mb = MasterBot({'token': open('token').read().replace('\n', '')})
def get_updates(self, silent=False):
res = False
for token in copy.copy(Bot.bots.keys()):
updates = telebot.apihelper.get_updates(token, offset=self.tokens.get(token) or 0)
for update in updates:
if update['update_id'] > self.tokens.get(token):
self.tokens[token] = update['update_id']
res = True
if not silent:
self.mb.route_update(token, json.dumps(update))
return res
def start(self):
while self.get_updates(silent=True):
pass
while True:
self.get_updates()
if __name__ == "__main__":
PollingProcessor().start()
| # -*- coding: utf-8 -*-
import sendgrid
import os
from sendgrid.helpers.mail import *
import re
import requests
import json
WED_ADMIN_DOMAIN = open('domain').read().split('\n')[0]
def get_address(lat, lng):
resp = requests.get('http://maps.googleapis.com/maps/api/geocode/json?latlng=' + str(lat) + ',' + str(lng) + '&language=ru')
return json.loads(resp.content).get('results')[0].get('formatted_address')
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
return cls._instance
class Mailer(Singleton):
sg = sendgrid.SendGridAPIClient(apikey=os.environ.get('SENDGRID_API_KEY'))
def send(self, mail, subj, txt):
from_email = Email("order@botmarket.com")
subject = subj
to_email = Email(mail)
content = Content("text/plain", txt)
mail = Mail(from_email, subject, to_email, content)
return self.sg.client.mail.send.post(request_body=mail.get())
def send_order(self, mail, order):
res = 'Заказ\n====\n\n\n'
res += '\n'.join(i['name'].encode('utf-8') + ' x ' + str(i['count']) for i in order['items'])
res += '\n-----\n Итого: ' + str(order['total']) + ' руб.'
res += '\n-----\n Детали доставки: \n'
try:
res += '\n\n'.join(k.encode('utf-8') + ': ' + v.encode('utf-8') for k, v in order['delivery'].items())
except:
res += '\n\n'.join(k + ': ' + v for k, v in order['delivery'].items())
res = res.replace('Ваш', '')
return self.send(mail, 'Новый заказ!', res)
def striphtml(data):
p = re.compile(r'<[brai].*?>|<\/[a].*?>|<span.*?>|<\/span.*?>')
res = p.sub('\n', data)
return res.replace(' ', ' ').replace('—', '-')
--- FILE SEPARATOR ---
# -*- coding: utf-8 -*-
import gevent
from gevent import monkey; monkey.patch_all()
import telebot
from telebot import apihelper
from pymongo import MongoClient
from views import *
from utils import get_address
import botan
import time
botan_token = 'BLe0W1GY8SwbNijJ0H-lroERrA9BnK0t'
class Convo(object):
def __init__(self, data, bot):
self.bot = bot
self.token = bot.token
self.db = bot.db
self.chat_id = data['chat_id']
self.views = {}
self.path = data.get('path')
self.tmpdata = None
def get_current_view(self):
if self.path and self.path[0] in self.views:
return self.views[self.path[0]].route(self.path[1:])
return None
def get_bot_data(self):
return self.db.bots.find_one({'token': self.token})
def _send_msg(self, msg1, markup):
try:
apihelper.send_message(self.token, self.chat_id, msg1, reply_markup=markup, parse_mode='HTML')
except Exception, e:
self.bot.log_error({'func': '_send_msg', 'token': self.token, 'chat_id': self.chat_id, 'message': msg1, 'error': str(e)})
def send_message(self, msg, markup=None):
if self.chat_id:
msg1 = msg.replace('<br />', '.\n')
gevent.spawn(self._send_msg, msg1, markup)
return
def edit_message(self, message_id, msg, markup=None):
if self.chat_id:
msg1 = msg.replace('<br />', '.\n')
gevent.spawn(apihelper.edit_message_text, self.token, msg1, self.chat_id, message_id=message_id, reply_markup=markup, parse_mode='HTML')
return
def process_message(self, message):
try:
txt = message.text.encode('utf-8')
except:
if hasattr(message, 'contact') and message.contact is not None:
txt = message.contact.phone_number
if hasattr(message, 'location') and message.location is not None:
txt = get_address(message.location.latitude, message.location.longitude).encode('utf-8')
if txt:
self.send_message(txt)
self.get_current_view().process_message(txt)
def process_photo(self, photo):
self.get_current_view().process_photo(photo)
def process_sticker(self, sticker):
self.get_current_view().process_sticker(sticker)
def process_video(self, video):
self.get_current_view().process_video(video)
def process_callback(self, callback):
self.get_current_view().process_callback(callback)
def process_file(self, doc):
self.get_current_view().process_file(doc)
def set_path(self, path):
self.path = path
gevent.spawn(self.db.convos.update_one, {'bot_token': self.bot.token, 'chat_id': self.chat_id}, {'$set': {'path': path}})
def route(self, path):
self.set_path(path)
self.get_current_view().activate()
class MarketBotConvo(Convo):
def __init__(self, data, bot):
super(MarketBotConvo, self).__init__(data, bot)
self.current_basket = None
self.views['delivery'] = OrderCreatorView(self, [], final_message='Заказ сформирован!')
self.views['menu_cat_view'] = MenuCatView(self, msg="Выберите категорию:")
self.views['order_info'] = OrderInfoView(self, msg="Тут должны быть условия доставки", links={'Главное меню': ['main_view']})
self.views['contacts'] = ContactsInfoView(self, links={'Главное меню': ['main_view']})
self.views['history'] = HistoryView(self)
self.views['main_view'] = NavigationView(self, links={
"Меню": ['menu_cat_view'],
"История": ['history'],
"Доставка": ['order_info'], # ,
"Контакты": ['contacts'] # ContactsInfoView(self.ctx)
}, msg="Главное меню")
self.path = data.get('path')
if not self.get_current_view():
self.route(['main_view'])
class MainConvo(Convo):
def __init__(self, data, bot):
super(MainConvo, self).__init__(data, bot)
self.views['main_view'] = NavigationView(
self,
links={
"Добавить магазин": ['add_view'],
"Настройки": ['settings_view'],
"Заказы": ['orders_view'],
"Помощь": ['help_view'],
"Рассылка новостей": ['mailing_view']
},
msg="Главное меню"
)
self.views['help_view'] = HelpView(self, links={'Назад': ['main_view']})
self.views['add_view'] = BotCreatorView(self, [
TokenDetail('shop.token', name='API token.', desc='Для этого перейдите в @BotFather и нажмите /newbot для создания бота. Придумайте название бота (должно быть на русском языке) и ссылку на бот (на английском языке и заканчиваться на bot). Далее вы увидите API token, который нужно скопировать и отправить в этот чат.', ctx=self),
EmailDetail('shop.email', name='email для приема заказов', ctx=self),
FileDetail('shop.items', name='файл с описанием товаров или url магазина вконтакте', desc='<a href="https://github.com/0-1-0/marketbot/blob/master/sample.xlsx?raw=true">Пример файла</a>'),
TextDetail('shop.delivery_info', name='текст с условиями доставки'),
TextDetail('shop.contacts_info', name='текст с контактами для связи', value='telegram: @' + str(self.bot.bot.get_chat(self.chat_id).username)),
NumberDetail('shop.total_threshold', name='минимальную сумму заказа', value='0')
], final_message='Магазин создан!')
self.views['settings_view'] = SelectBotView(self, bot_view={'link': 'settings_view', 'view': SettingsView})
self.views['orders_view'] = SelectBotView(self, bot_view={'link': 'orders_view', 'view': OrdersView})
self.views['mailing_view'] = SelectBotView(self, bot_view={'link': 'mailing_view', 'view': MailingView})
self.path = data.get('path')
if not self.get_current_view():
self.route(['main_view'])
class Bot(object):
bots = {}
WEBHOOK_HOST = 'ec2-52-34-35-240.us-west-2.compute.amazonaws.com'
WEBHOOK_PORT = 8443
WEBHOOK_URL_BASE = "https://%s:%s" % (WEBHOOK_HOST, WEBHOOK_PORT)
WEBHOOK_SSL_CERT = '/home/ubuntu/webhook_cert.pem'
def __init__(self, token):
self.token = token
Bot.bots[self.token] = self
gevent.spawn(self.set_webhook, self.token)
def log_error(self, e):
pass
def set_webhook(self, token, retries=0):
try:
bot = telebot.TeleBot(token)
bot.remove_webhook()
bot.set_webhook(url=self.WEBHOOK_URL_BASE + '/' + bot.token + '/', certificate=open(self.WEBHOOK_SSL_CERT, 'r'))
print token, 'registered'
except Exception, e:
self.log_error(e)
print token, e
if retries < 2:
time.sleep(1)
self.set_webhook(token, retries+1)
class MarketBot(Bot):
convo_type = MarketBotConvo
def __init__(self, data, db=MongoClient()['marketbot']):
super(MarketBot, self).__init__(data['token'])
self.convos = {}
self.db = db
if not self.db.bots.update_one({'token': self.token}, {'$set': apihelper.get_me(self.token)}):
self.db.bots.insert_one({'token': self.token})
self.email = data.get('email')
self.last_update_id = data.get('last_update_id') or 0
self._init_bot()
for convo_data in self.db.convos.find({'bot_token': self.token}):
self.init_convo(convo_data)
def log_error(self, e):
gevent.spawn(self.db.errors.insert_one, {'error': str(e)})
def _init_bot(self, threaded=False):
self.bot = telebot.TeleBot(self.token, threaded=threaded, skip_pending=True)
self.bot.add_message_handler(self.goto_main, commands=['start'])
self.bot.add_callback_query_handler(self.process_callback, func=lambda call: True)
self.bot.add_message_handler(self.process_photo, content_types=['photo'])
self.bot.add_message_handler(self.process_video, content_types=['video'])
self.bot.add_message_handler(self.process_sticker, content_types=['sticker'])
self.bot.add_message_handler(self.process_file, content_types=['document'])
self.bot.add_message_handler(self.process_message, func=lambda message: True, content_types=['text', 'contact', 'location'])
def init_convo(self, convo_data):
self.convos[convo_data['chat_id']] = self.convo_type(convo_data, self)
def get_convo(self, chat_id):
if chat_id not in self.convos:
convo_data = {'chat_id': chat_id, 'bot_token': self.token}
self.db.convos.insert_one(convo_data)
self.init_convo(convo_data)
return self.convos[chat_id]
def goto_main(self, message):
convo = self.get_convo(message.chat.id)
convo.route(['main_view'])
def process_callback(self, callback):
convo = self.get_convo(callback.message.chat.id)
gevent.spawn(convo.process_callback, callback)
def process_message(self, message):
convo = self.get_convo(message.chat.id)
gevent.spawn(convo.process_message, message)
def start_bot(self, bot_data):
MarketBot(bot_data, self.db)
def process_file(self, doc):
convo = self.get_convo(doc.chat.id)
convo.process_file(doc)
def process_sticker(self, sticker):
convo = self.get_convo(sticker.chat.id)
convo.process_sticker(sticker)
def process_video(self, video):
convo = self.get_convo(video.chat.id)
convo.process_video(video)
def process_photo(self, photo):
convo = self.get_convo(photo.chat.id)
gevent.spawn(convo.process_photo, photo)
def update_last_id(self):
self.db.bots.update_one({'token': self.token}, {'$set': {'last_update_id': self.last_update_id}})
def process_redis_update(self, update):
if isinstance(update, basestring):
update = telebot.types.Update.de_json(update.encode('utf-8'))
if update.update_id > self.last_update_id:
self.last_update_id = update.update_id
gevent.spawn(self.bot.process_new_updates, [update])
gevent.spawn(self.update_last_id)
class MasterBot(MarketBot):
convo_type = MainConvo
def process_message(self, message):
gevent.spawn(botan.track, botan_token, message.chat.id, {'from_user': message.from_user.username}, message.text)
super(MasterBot, self).process_message(message)
def __init__(self, data):
super(MasterBot, self).__init__(data)
for bot_data in self.db.bots.find():
if bot_data['token'] != self.token:
try:
MarketBot(bot_data, self.db)
except Exception, e:
self.log_error(e)
def route_update(self, token, update):
if token in Bot.bots:
gevent.spawn(Bot.bots[token].process_redis_update, update)
return
if __name__ == "__main__":
m = MasterBot({'token': open('token').read().replace('\n', '')})
gevent.spawn(m.run).join()
| {
"imports": [
"/utils.py",
"/app.py"
]
} |
00-a/Staff | refs/heads/master | /staff/serializers.py | from rest_framework import serializers
from .models import Employee
class RecursiveSerializer(serializers.Serializer):
"""Recursive for employee children"""
def to_representation(self, data):
serializer = self.parent.parent.__class__(data, context=self.context)
return serializer.data
class StaffListSerializer(serializers.ModelSerializer):
"""List of staff"""
position = serializers.SlugRelatedField(slug_field="name", read_only=True)
children = RecursiveSerializer(many=True)
class Meta:
model = Employee
fields = '__all__'
class ChildrenEmployeeDetailSerializer(serializers.ModelSerializer):
"""Serializer for employee children in detail view"""
class Meta:
model = Employee
fields = ('name', 'surname')
class EmployeeDetailSerializer(serializers.ModelSerializer):
"""Details of employee"""
position = serializers.SlugRelatedField(slug_field="name", read_only=True)
parent = serializers.SlugRelatedField(slug_field="name", read_only=True)
children = ChildrenEmployeeDetailSerializer(many=True)
class Meta:
model = Employee
fields = '__all__'
class EmployeeCreateSerializer(serializers.ModelSerializer):
"""Create a new employee"""
class Meta:
model = Employee
fields = '__all__'
| from django.db import models
class Employee(models.Model):
"""Employee. Parent - employee chief"""
name = models.CharField(max_length=50)
surname = models.CharField(max_length=50)
position = models.ForeignKey('Position', on_delete=models.SET_NULL, null=True)
salary = models.PositiveIntegerField(default=0)
parent = models.ForeignKey('self', on_delete=models.SET_NULL, null=True, blank=True, related_name='children',
verbose_name='Chief')
photo = models.ImageField(upload_to='staffphotos/', blank=True)
employment_date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return f'{self.name} {self.surname}'
class Meta:
verbose_name = 'Employee'
verbose_name_plural = 'Staff'
class Position(models.Model):
"""Employee position"""
name = models.CharField(max_length=100, verbose_name='Position name')
def __str__(self):
return self.name
| {
"imports": [
"/staff/models.py"
]
} |
00-a/Staff | refs/heads/master | /staff/urls.py | from django.urls import path
from .views import StaffListView, EmployeeDetailView, EmployeeCreateView
urlpatterns = [
path('staff/', StaffListView.as_view()),
path('staff/<int:pk>', EmployeeDetailView.as_view()),
path('staff/create', EmployeeCreateView.as_view())
] | from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import generics
from rest_framework.pagination import PageNumberPagination
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from .models import Employee
from .serializers import StaffListSerializer, EmployeeDetailSerializer, EmployeeCreateSerializer
from .services import StaffFilter
class StaffListView(generics.ListAPIView):
"""List of staff"""
serializer_class = StaffListSerializer
filter_backends = (DjangoFilterBackend,)
filterset_class = StaffFilter
pagination_class = PageNumberPagination
permission_classes = [IsAdminUser]
queryset = Employee.objects.filter(parent=None)
class EmployeeDetailView(generics.RetrieveAPIView):
"""Employee detail"""
queryset = Employee.objects.all()
permission_classes = [IsAuthenticated]
serializer_class = EmployeeDetailSerializer
class EmployeeCreateView(generics.CreateAPIView):
"""Create a new employee"""
permission_classes = [IsAuthenticated]
serializer_class = EmployeeCreateSerializer
| {
"imports": [
"/staff/views.py"
]
} |
00-a/Staff | refs/heads/master | /staff/views.py | from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import generics
from rest_framework.pagination import PageNumberPagination
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from .models import Employee
from .serializers import StaffListSerializer, EmployeeDetailSerializer, EmployeeCreateSerializer
from .services import StaffFilter
class StaffListView(generics.ListAPIView):
"""List of staff"""
serializer_class = StaffListSerializer
filter_backends = (DjangoFilterBackend,)
filterset_class = StaffFilter
pagination_class = PageNumberPagination
permission_classes = [IsAdminUser]
queryset = Employee.objects.filter(parent=None)
class EmployeeDetailView(generics.RetrieveAPIView):
"""Employee detail"""
queryset = Employee.objects.all()
permission_classes = [IsAuthenticated]
serializer_class = EmployeeDetailSerializer
class EmployeeCreateView(generics.CreateAPIView):
"""Create a new employee"""
permission_classes = [IsAuthenticated]
serializer_class = EmployeeCreateSerializer
| from django.db import models
class Employee(models.Model):
"""Employee. Parent - employee chief"""
name = models.CharField(max_length=50)
surname = models.CharField(max_length=50)
position = models.ForeignKey('Position', on_delete=models.SET_NULL, null=True)
salary = models.PositiveIntegerField(default=0)
parent = models.ForeignKey('self', on_delete=models.SET_NULL, null=True, blank=True, related_name='children',
verbose_name='Chief')
photo = models.ImageField(upload_to='staffphotos/', blank=True)
employment_date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return f'{self.name} {self.surname}'
class Meta:
verbose_name = 'Employee'
verbose_name_plural = 'Staff'
class Position(models.Model):
"""Employee position"""
name = models.CharField(max_length=100, verbose_name='Position name')
def __str__(self):
return self.name
--- FILE SEPARATOR ---
from rest_framework import serializers
from .models import Employee
class RecursiveSerializer(serializers.Serializer):
"""Recursive for employee children"""
def to_representation(self, data):
serializer = self.parent.parent.__class__(data, context=self.context)
return serializer.data
class StaffListSerializer(serializers.ModelSerializer):
"""List of staff"""
position = serializers.SlugRelatedField(slug_field="name", read_only=True)
children = RecursiveSerializer(many=True)
class Meta:
model = Employee
fields = '__all__'
class ChildrenEmployeeDetailSerializer(serializers.ModelSerializer):
"""Serializer for employee children in detail view"""
class Meta:
model = Employee
fields = ('name', 'surname')
class EmployeeDetailSerializer(serializers.ModelSerializer):
"""Details of employee"""
position = serializers.SlugRelatedField(slug_field="name", read_only=True)
parent = serializers.SlugRelatedField(slug_field="name", read_only=True)
children = ChildrenEmployeeDetailSerializer(many=True)
class Meta:
model = Employee
fields = '__all__'
class EmployeeCreateSerializer(serializers.ModelSerializer):
"""Create a new employee"""
class Meta:
model = Employee
fields = '__all__'
--- FILE SEPARATOR ---
from django_filters import rest_framework as filters
from staff.models import Employee
class StaffFilter(filters.FilterSet):
salary = filters.RangeFilter()
employment_date = filters.RangeFilter()
class Meta:
model = Employee
fields = ('position', 'salary', 'employment_date')
| {
"imports": [
"/staff/models.py",
"/staff/serializers.py",
"/staff/services.py"
]
} |
00-a/Staff | refs/heads/master | /staff/services.py | from django_filters import rest_framework as filters
from staff.models import Employee
class StaffFilter(filters.FilterSet):
salary = filters.RangeFilter()
employment_date = filters.RangeFilter()
class Meta:
model = Employee
fields = ('position', 'salary', 'employment_date')
| from django.db import models
class Employee(models.Model):
"""Employee. Parent - employee chief"""
name = models.CharField(max_length=50)
surname = models.CharField(max_length=50)
position = models.ForeignKey('Position', on_delete=models.SET_NULL, null=True)
salary = models.PositiveIntegerField(default=0)
parent = models.ForeignKey('self', on_delete=models.SET_NULL, null=True, blank=True, related_name='children',
verbose_name='Chief')
photo = models.ImageField(upload_to='staffphotos/', blank=True)
employment_date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return f'{self.name} {self.surname}'
class Meta:
verbose_name = 'Employee'
verbose_name_plural = 'Staff'
class Position(models.Model):
"""Employee position"""
name = models.CharField(max_length=100, verbose_name='Position name')
def __str__(self):
return self.name
| {
"imports": [
"/staff/models.py"
]
} |
0000duck/hpp_source_code | refs/heads/master | /install/lib/python2.7/dist-packages/hpp/corbaserver/manipulation/__init__.py | import hpp_idl.hpp.manipulation_idl
from .client import Client
from .problem_solver import ProblemSolver, newProblem
from .constraint_graph import ConstraintGraph
from .constraint_graph_factory import ConstraintGraphFactory
from .constraints import Constraints
from .robot import CorbaClient, Robot
from hpp.corbaserver import loadServerPlugin, createContext
from hpp_idl.hpp.corbaserver.manipulation import Rule
| #!/usr/bin/env python
#
# Copyright (c) 2014 CNRS
# Author: Florent Lamiraux
#
# This file is part of hpp-manipulation-corba.
# hpp-manipulation-corba is free software: you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# hpp-manipulation-corba is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# hpp-manipulation-corba. If not, see
# <http://www.gnu.org/licenses/>.
from hpp.corbaserver.client import Client as _Parent
from hpp_idl.hpp.corbaserver.manipulation import Graph, Robot, Problem
class Client (_Parent):
"""
Connect and create clients for hpp-manipulation library.
"""
defaultClients = {
'graph' : Graph,
'problem': Problem,
'robot' : Robot,
}
def __init__(self, url = None, context = "corbaserver"):
"""
Initialize CORBA and create default clients.
:param url: URL in the IOR, corbaloc, corbalocs, and corbanames formats.
For a remote corba server, use
url = "corbaloc:iiop:<host>:<port>/NameService"
"""
self._initOrb (url)
self._makeClients ("manipulation", self.defaultClients, context)
--- FILE SEPARATOR ---
#!/usr/bin/env python
#
# Copyright (c) 2014 CNRS
# Author: Florent Lamiraux
#
# This file is part of hpp-manipulation-corba.
# hpp-manipulation-corba is free software: you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# hpp-manipulation-corba is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# hpp-manipulation-corba. If not, see
# <http://www.gnu.org/licenses/>.
def newProblem (client = None, name = None):
from hpp.corbaserver.problem_solver import newProblem
if client is None:
from hpp.corbaserver.manipulation import Client
client = Client()
newProblem (client = client, name = name)
from hpp.corbaserver.problem_solver import _convertToCorbaAny, ProblemSolver as Parent
## Definition of a manipulation planning problem
#
# This class wraps the Corba client to the server implemented by
# libhpp-manipulation-corba.so
#
# Some method implemented by the server can be considered as private. The
# goal of this class is to hide them and to expose those that can be
# considered as public.
class ProblemSolver (Parent):
def __init__ (self, robot):
super (ProblemSolver, self).__init__ (robot, hppcorbaClient = robot.client.basic)
## Select a problem by its name.
# If no problem with this name exists, a new
# hpp::manipulation::ProblemSolver is created and selected.
# \param name the problem name.
# \return true if a new problem was created.
def selectProblem (self, name):
return self.client.manipulation.problem.selectProblem (name)
## Return a list of available elements of type type
# \param type enter "type" to know what types I know of.
# This is case insensitive.
def getAvailable (self, type):
if type.lower () == "type":
res = self.client.basic.problem.getAvailable (type) + \
self.client.manipulation.problem.getAvailable (type)
return res
try:
return self.client.basic.problem.getAvailable (type)
except:
return self.client.manipulation.problem.getAvailable (type)
## Return a list of selected elements of type type
# \param type enter "type" to know what types I know of.
# This is case insensitive.
# \note For most of the types, the list will contain only one element.
def getSelected (self, type):
try:
return self.client.basic.problem.getSelected (type)
except:
return self.client.manipulation.problem.getSelected (type)
## \name Constraints
# \{
## Create placement and pre-placement constraints
#
# \param width set to None to skip creation of pre-placement constraint
#
# See hpp::corbaserver::manipulation::Problem::createPlacementConstraint
# and hpp::corbaserver::manipulation::Problem::createPrePlacementConstraint
def createPlacementConstraints (self, placementName, shapeName, envContactName, width = 0.05):
name = placementName
self.client.manipulation.problem.createPlacementConstraint (name, shapeName, envContactName)
if width is not None:
prename = "pre_" + name
self.client.manipulation.problem.createPrePlacementConstraint (prename, shapeName, envContactName, width)
return name, prename
return name
## Return balance constraints created by method
# ProblemSolver.createStaticStabilityConstraints
def balanceConstraints (self):
return self.balanceConstraints_
## Get whether right hand side of a numerical constraint is constant
# \param constraintName Name of the numerical constraint,
# \return whether right hand side is constant
# \note LockedJoint have non constant right hand side
def getConstantRightHandSide (self, constraintName) :
if constraintName in self.getAvailable ('LockedJoint'):
return False
return self.client.basic.problem.getConstantRightHandSide \
(constraintName)
## Lock degree of freedom of a FreeFlyer joint
# \param freeflyerBname base name of the joint
# (It will be completed by '_xyz' and '_SO3'),
# \param lockJointBname base name of the LockedJoint constraints
# (It will be completed by '_xyz' and '_SO3'),
# \param values config of the locked joints (7 float)
def lockFreeFlyerJoint (self, freeflyerBname, lockJointBname,
values = (0,0,0,0,0,0,1)):
lockedJoints = list ()
self.createLockedJoint (lockJointBname, freeflyerBname, values)
lockedJoints.append (lockJointBname)
return lockedJoints
## Lock degree of freedom of a planar joint
# \param jointName name of the joint
# (It will be completed by '_xy' and '_rz'),
# \param lockJointName name of the LockedJoint constraint
# \param values config of the locked joints (4 float)
def lockPlanarJoint (self, jointName, lockJointName, values = (0,0,1,0)):
lockedJoints = list ()
self.createLockedJoint (lockJointName, jointName, values)
lockedJoints.append (lockJointName)
return lockedJoints
## \}
## \name Solve problem and get paths
# \{
## Set the problem target to stateId
# The planner will look for a path from the init configuration to a configuration in
# state stateId
def setTargetState (self, stateId):
self.client.manipulation.problem.setTargetState(stateId)
## \}
--- FILE SEPARATOR ---
#!/usr/bin/env python
#
# Copyright (c) 2017 CNRS
# Author: Joseph Mirabel
#
# This file is part of hpp-manipulation-corba.
# hpp-manipulation-corba is free software: you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# hpp-manipulation-corba is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# hpp-manipulation-corba. If not, see
# <http://www.gnu.org/licenses/>.
import re, abc, sys
from .constraints import Constraints
def _removeEmptyConstraints (problem, constraints):
return [ n for n in constraints if problem.getConstraintDimensions(n)[2] > 0 ]
class Rules(object):
def __init__ (self, grippers, handles, rules):
rs = []
status = []
for r in rules:
# replace empty strings by the corresponding regexp "^$", otherwise "" matches with all strings.
for i in range(len(r.grippers)):
if r.grippers[i] == "": r.grippers[i] = "^$"
for i in range(len(r.handles)):
if r.handles[i] == "": r.handles[i] = "^$"
handlesRegex = [ None ] * len(grippers)
for j, gr in enumerate (r.grippers):
grc = re.compile (gr)
for i, g in enumerate (grippers):
if grc.match (g):
assert handlesRegex[i] is None
handlesRegex[i] = re.compile(r.handles[j])
status.append(r.link)
rs.append (tuple(handlesRegex))
self.rules = tuple(rs)
self.status = tuple(status)
self.handles = tuple(handles)
self.defaultAcceptation = False
def __call__ (self, grasps):
for r, s in zip(self.rules, self.status):
apply = True
for i, h in enumerate(r):
if h is not None and not h.match("" if grasps[i] is None else self.handles[grasps[i]]):
# This rule does not apply
apply = False
break
if apply: return s
return self.defaultAcceptation
if sys.version_info.major == 2:
class ABC:
""" Python 2.7 equivalent to abc.ABC Python 3 class."""
__metaclass__ = abc.ABCMeta
else:
from abc import ABC
## An abstract class which is loops over the different (gripper, handle) associations.
#
# The behaviour can be tuned by setting the callback functions:
# - \ref graspIsAllowed (redundant with \ref setRules)
# - \ref constraint_graph_factory_algo_callbacks "Algorithm steps"
class GraphFactoryAbstract(ABC):
def __init__(self):
## Reduces the problem combinatorial.
# Function called to check whether a grasps is allowed.
# It takes as input a list of handle indices (or None) such
# that i-th \ref grippers grasps `grasps[i]`-th \ref handles.
# It must return a boolean
#
# It defaults to: \code lambda x : True
self.graspIsAllowed = lambda x : True
## \name Internal variables
# \{
self.states = dict()
self.transitions = set()
## the handle names
self.handles = tuple() # strings
## the gripper names
self.grippers = tuple() # strings
## the names of contact on the environment
self.envContacts = tuple () # strings
## the object names
self.objects = tuple () # strings
## See \ref setObjects
self.handlesPerObjects = tuple () # object index to handle indixes
## See \ref setObjects
self.objectFromHandle = tuple () # handle index to object index
## See \ref setObjects
self.contactsPerObjects = tuple ()# object index to contact names
## \}
## \name Main API
# \{
## \param grippers list of gripper names to be considered
def setGrippers(self, grippers):
assert isinstance (grippers, (list, tuple))
self.grippers = tuple(grippers)
## \param objects list of object names to be considered
## \param handlesPerObjects a list of list of handle names.
## \param contactsPerObjects a list of list of contact names.
## handlesPerObjects and contactsPerObjects must have one list for each object, in the same order.
def setObjects(self, objects, handlesPerObjects, contactsPerObjects):
self.objects = tuple(objects)
handles = []
hpo = []
cpo = []
ofh = []
for io, o in enumerate (self.objects):
hpo.append( tuple(range(len(handles), len(handles) + len(handlesPerObjects[io])) ) )
handles.extend(handlesPerObjects[io])
ofh.extend( [ io, ] * len(handlesPerObjects[io]) )
cpo.append( tuple(contactsPerObjects[io]) )
self.handles = tuple(handles)
self.handlesPerObjects = tuple(hpo)
self.objectFromHandle = tuple(ofh)
self.contactsPerObjects = tuple(cpo)
## \param envContacts contact on the environment to be considered.
def environmentContacts (self, envContacts):
self.envContacts = tuple(envContacts)
## Set the function \ref graspIsAllowed
## \param rules a list of Rule objects
def setRules (self, rules):
self.graspIsAllowed = Rules(self.grippers, self.handles, rules)
## Go through the combinatorial defined by the grippers and handles
# and create the states and transitions.
def generate(self):
grasps = ( None, ) * len(self.grippers)
self._recurse(self.grippers, self.handles, grasps, 0)
## \}
## \name Abstract methods of the algorithm
# \anchor constraint_graph_factory_algo_callbacks
# \{
## Create a new state.
# \param grasps a handle index for each gripper, as in GraphFactoryAbstract.graspIsAllowed.
# \param priority the state priority.
# \return an object representing the state.
@abc.abstractmethod
def makeState(self, grasps, priority): return grasps
## Create a loop transition.
# \param state: an object returned by \ref makeState which represent the state
@abc.abstractmethod
def makeLoopTransition(self, state): pass
## Create two transitions between two different states.
# \param stateFrom: same as grasps in \ref makeState
# \param stateTo: same as grasps in \ref makeState
# \param ig: index if the grasp vector that changes, i.e. such that
# - \f$ stateFrom.grasps[i_g] \neq stateTo.grasps[i_g] \f$
# - \f$ \forall i \neq i_g, stateFrom.grasps[i] = stateTo.grasps[i] \f$
@abc.abstractmethod
def makeTransition(self, stateFrom, stateTo, ig): pass
## \}
def _makeState(self, grasps, priority):
if grasps not in self.states:
state = self.makeState (grasps, priority)
self.states[grasps] = state
# Create loop transition
self.makeLoopTransition (state)
else:
state = self.states [grasps]
return state
def _isObjectGrasped(self, grasps, object):
for h in self.handlesPerObjects[object]:
if h in grasps:
return True
return False
def _stateName (self, grasps, abbrev = False):
sepGH = "-" if abbrev else " grasps "
sep = ":" if abbrev else " : "
name = sep.join([ (str(ig) if abbrev else self.grippers[ig]) + sepGH + (str(ih) if abbrev else self.handles[ih]) for ig,ih in enumerate(grasps) if ih is not None ])
if len(name) == 0: return "f" if abbrev else "free"
return name
def _transitionNames (self, sFrom, sTo, ig):
g = self.grippers[ig]
h = self.handles[sTo.grasps[ig]]
sep = " | "
return (g + " > " + h + sep + self._stateName(sFrom.grasps, True),
g + " < " + h + sep + self._stateName(sTo.grasps, True),)
def _loopTransitionName (self, grasps):
return "Loop | " + self._stateName(grasps, True)
def _recurse(self, grippers, handles, grasps, depth):
isAllowed = self.graspIsAllowed (grasps)
if isAllowed: current = self._makeState (grasps, depth)
if len(grippers) == 0 or len(handles) == 0: return
for ig, g in enumerate(grippers):
ngrippers = grippers[:ig] + grippers[ig+1:]
isg = self.grippers.index(g)
for ih, h in enumerate(handles):
nhandles = handles[:ih] + handles[ih+1:]
ish = self.handles.index(h)
nGrasps = grasps[:isg] + (ish, ) + grasps[isg+1:]
nextIsAllowed = self.graspIsAllowed (nGrasps)
if nextIsAllowed: next = self._makeState (nGrasps, depth + 1)
if isAllowed and nextIsAllowed:
self.makeTransition (current, next, isg)
self._recurse (ngrippers, nhandles, nGrasps, depth + 2)
## An abstract class which stores the constraints.
#
# Child classes are responsible for building them.
# - \ref buildGrasp
# - \ref buildPlacement
class ConstraintFactoryAbstract(ABC):
def __init__(self, graphfactory):
self._grasp = dict()
self._placement = dict()
self.graphfactory = graphfactory
## \name Accessors to the different elementary constraints
# \{
def getGrasp(self, gripper, handle):
if isinstance(gripper, str): ig = self.graphfactory.grippers.index(gripper)
else: ig = gripper
if isinstance(handle, str): ih = self.graphfactory.handles.index(handle)
else: ih = handle
k = (ig, ih)
if k not in self._grasp:
self._grasp[k] = self.buildGrasp(self.graphfactory.grippers[ig], None if ih is None else self.graphfactory.handles[ih])
assert isinstance (self._grasp[k], dict)
return self._grasp[k]
def g (self, gripper, handle, what):
return self.getGrasp(gripper, handle)[what]
def getPlacement(self, object):
if isinstance(object, str): io = self.graphfactory.objects.index(object)
else: io = object
k = io
if k not in self._placement:
self._placement[k] = self.buildPlacement(self.graphfactory.objects[io])
return self._placement[k]
def p (self, object, what):
return self.getPlacement(object)[what]
## \}
## Function called to create grasp constraints.
# Must return a tuple of Constraints objects as:
# - constraint that validates the grasp
# - constraint that parameterizes the graph
# - constraint that validates the pre-grasp
# \param g gripper string
# \param h handle string
@abc.abstractmethod
def buildGrasp (self, g, h):
return (None, None, None,)
## Function called to create placement constraints.
# Must return a tuple of Constraints objects as:
# - constraint that validates placement
# - constraint that parameterizes placement
# - constraint that validates pre-placement
# \param o string
@abc.abstractmethod
def buildPlacement (self, o):
return (None, None, None,)
## Default implementation of ConstraintFactoryAbstract
class ConstraintFactory(ConstraintFactoryAbstract):
gfields = ('grasp', 'graspComplement', 'preGrasp')
pfields = ('placement', 'placementComplement', 'prePlacement')
def __init__ (self, graphfactory, graph):
super (ConstraintFactory, self).__init__(graphfactory)
self.graph = graph
## Select whether placement should be strict or relaxed.
# \sa buildStrictPlacement, buildRelaxedPlacement
self.strict = False
## Calls ConstraintGraph.createGraph and ConstraintGraph.createPreGrasp
## \param g gripper string
## \param h handle string
def buildGrasp (self, g, h):
n = g + " grasps " + h
pn = g + " pregrasps " + h
self.graph.createGrasp (n, g, h)
self.graph.createPreGrasp (pn, g, h)
return dict ( list(zip (self.gfields, (
Constraints (numConstraints = _removeEmptyConstraints(self.graph.clientBasic.problem, [ n, ])),
Constraints (numConstraints = _removeEmptyConstraints(self.graph.clientBasic.problem, [ n + "/complement", ])),
Constraints (numConstraints = _removeEmptyConstraints(self.graph.clientBasic.problem, [ pn, ])),
))))
def buildPlacement (self, o):
if self.strict:
return self.buildStrictPlacement (o)
else:
return self.buildRelaxedPlacement (o)
## This implements strict placement manifolds,
## where the parameterization constraints is the complement
## of the placement constraint.
## \param o string
def buildStrictPlacement (self, o):
n = "place_" + o
pn = "preplace_" + o
width = 0.05
io = self.graphfactory.objects.index(o)
placeAlreadyCreated = n in self.graph.clientBasic.problem.getAvailable ("numericalconstraint")
if (len(self.graphfactory.contactsPerObjects[io]) == 0 or len(self.graphfactory.envContacts) == 0) and not placeAlreadyCreated:
ljs = []
for n in self.graph.clientBasic.robot.getJointNames():
if n.startswith(o + "/"):
ljs.append(n)
q = self.graph.clientBasic.robot.getJointConfig(n)
self.graph.clientBasic.problem.createLockedJoint(n, n, q)
return dict ( list(zip (self.pfields, (Constraints (), Constraints (lockedJoints = ljs), Constraints (),))))
if not placeAlreadyCreated:
self.graph.client.problem.createPlacementConstraint (n, self.graphfactory.contactsPerObjects[io], self.graphfactory.envContacts)
if not pn in self.graph.clientBasic.problem.getAvailable ("numericalconstraint"):
self.graph.client.problem.createPrePlacementConstraint (pn, self.graphfactory.contactsPerObjects[io], self.graphfactory.envContacts, width)
return dict ( list(zip (self.pfields, (
Constraints (numConstraints = _removeEmptyConstraints(self.graph.clientBasic.problem, [ n, ])),
Constraints (numConstraints = _removeEmptyConstraints(self.graph.clientBasic.problem, [ n + "/complement", ])),
Constraints (numConstraints = _removeEmptyConstraints(self.graph.clientBasic.problem, [ pn, ])),
))))
## This implements relaxed placement manifolds,
## where the parameterization constraints is the LockedJoint of
## the object root joint
## \param o string
def buildRelaxedPlacement (self, o):
n = "place_" + o
pn = "preplace_" + o
width = 0.05
io = self.graphfactory.objects.index(o)
ljs = []
for jn in self.graph.clientBasic.robot.getJointNames():
if jn.startswith(o + "/"):
ljs.append(jn)
q = self.graph.clientBasic.robot.getJointConfig(jn)
self.graph.clientBasic.problem.createLockedJoint(jn, jn, q)
placeAlreadyCreated = n in self.graph.clientBasic.problem.getAvailable ("numericalconstraint")
if (len(self.graphfactory.contactsPerObjects[io]) == 0 or len(self.graphfactory.envContacts) == 0) and not placeAlreadyCreated:
return dict ( list(zip (self.pfields, (Constraints (), Constraints (lockedJoints = ljs), Constraints (),))))
if not placeAlreadyCreated:
self.graph.client.problem.createPlacementConstraint (n, self.graphfactory.contactsPerObjects[io], self.graphfactory.envContacts)
if not pn in self.graph.clientBasic.problem.getAvailable ("numericalconstraint"):
self.graph.client.problem.createPrePlacementConstraint (pn, self.graphfactory.contactsPerObjects[io], self.graphfactory.envContacts, width)
return dict ( list(zip (self.pfields, (
Constraints (numConstraints = _removeEmptyConstraints(self.graph.clientBasic.problem, [ n, ])),
Constraints (lockedJoints = ljs),
Constraints (numConstraints = _removeEmptyConstraints(self.graph.clientBasic.problem, [ pn, ])),))))
## Default implementation of ConstraintGraphFactory
#
# The minimal usage is the following:
# \code
# graph = ConstraintGraph (robot, "graph")
#
# # Required calls
# factory = ConstraintGraphFactory (graph)
# factory.setGrippers (["gripper1", ... ])
# factory.setObjects (["object1", ], [ [ "object1/handle1", ... ] ], [ [] ])
#
# # Optionally
# factory.environmentContacts (["contact1", ... ])
# factory.setRules ([ Rule (["gripper1", ..], ["handle1", ...], True), ... ])
#
# factory.generate ()
# # graph is initialized
# \endcode
class ConstraintGraphFactory(GraphFactoryAbstract):
class StateAndManifold:
def __init__ (self, factory, grasps, id, name):
self.grasps = grasps
self.id = id
self.name = name
self.manifold = Constraints()
self.foliation = Constraints()
# Add the grasps
for ig, ih in enumerate(grasps):
if ih is not None:
self.manifold += factory.constraints.g (ig, ih, 'grasp')
self.foliation += factory.constraints.g (ig, ih, 'graspComplement')
# Add the placement constraints
for io, object in enumerate(factory.objects):
if not factory._isObjectGrasped(grasps, io):
self.manifold += factory.constraints.p (object, 'placement')
self.foliation += factory.constraints.p (object, 'placementComplement')
## \param graph an instance of ConstraintGraph
def __init__(self, graph):
super (ConstraintGraphFactory, self).__init__()
## Stores the constraints in a child class of ConstraintFactoryAbstract
self.constraints = ConstraintFactory (self, graph)
self.graph = graph
## \name Default functions
# \{
def makeState(self, grasps, priority):
# Create state
name = self._stateName (grasps)
nid = self.graph.createNode (name, False, priority)
state = ConstraintGraphFactory.StateAndManifold (self, grasps, nid, name)
# Add the constraints
self.graph.addConstraints (node = name, constraints = state.manifold)
return state
def makeLoopTransition(self, state):
n = self._loopTransitionName (state.grasps)
self.graph.createEdge (state.name, state.name, n, weight = 0, isInNode = state.name)
self.graph.addConstraints (edge = n, constraints = state.foliation)
def makeTransition(self, stateFrom, stateTo, ig):
sf = stateFrom
st = stateTo
grasps = sf.grasps
nGrasps = st.grasps
names = self._transitionNames(sf, st, ig)
if names in self.transitions:
return
iobj = self.objectFromHandle [st.grasps[ig]]
obj = self.objects[iobj]
noPlace = self._isObjectGrasped (sf.grasps, iobj)
gc = self.constraints.g (ig, st.grasps[ig], 'grasp')
gcc = self.constraints.g (ig, st.grasps[ig], 'graspComplement')
pgc = self.constraints.g (ig, st.grasps[ig], 'preGrasp')
if noPlace:
pc = Constraints()
pcc = Constraints()
ppc = Constraints()
else:
pc = self.constraints.p (self.objectFromHandle[st.grasps[ig]], 'placement')
pcc = self.constraints.p (self.objectFromHandle[st.grasps[ig]], 'placementComplement')
ppc = self.constraints.p (self.objectFromHandle[st.grasps[ig]], 'prePlacement')
manifold = sf.manifold - pc
# The different cases:
pregrasp = not pgc.empty()
intersec = (not gc.empty()) and (not pc.empty())
preplace = not ppc.empty()
nWaypoints = pregrasp + intersec + preplace
nTransitions = 1 + nWaypoints
nStates = 2 + nWaypoints
def _createWaypointState (name, constraints):
self.graph.createNode (name, True)
self.graph.addConstraints (node = name, constraints = constraints)
return name
# Create waypoint states
intersection = 0
wStates = [ sf.name, ]
if pregrasp:
wStates.append (_createWaypointState (names[0] + "_pregrasp",
pc + pgc + manifold))
if intersec:
wStates.append (_createWaypointState (names[0] + "_intersec",
pc + gc + manifold))
if preplace:
wStates.append (_createWaypointState (names[0] + "_preplace",
ppc + gc + manifold))
wStates.append(st.name)
# Link waypoints
transitions = names[:]
if nWaypoints > 0:
self.graph.createWaypointEdge (sf.name, st.name, names[0], nWaypoints, automaticBuilder = False)
self.graph.createWaypointEdge (st.name, sf.name, names[1], nWaypoints, automaticBuilder = False)
wTransitions = []
for i in range(nTransitions):
nf = "{0}_{1}{2}".format(names[0], i, i+1)
nb = "{0}_{2}{1}".format(names[1], i, i+1)
self.graph.createEdge (wStates[i], wStates[i+1], nf, -1)
self.graph.createEdge (wStates[i+1], wStates[i], nb, -1)
self.graph.graph.setWaypoint (self.graph.edges[transitions[0]],
i, self.graph.edges[nf], self.graph.nodes[wStates[i+1]])
self.graph.graph.setWaypoint (self.graph.edges[transitions[1]],
nTransitions - 1 - i, self.graph.edges[nb], self.graph.nodes[wStates[i]])
wTransitions.append ( (nf, nb) )
# Set states
M = 0 if gc.empty() else 1 + pregrasp
for i in range(M):
self.graph.setContainingNode (wTransitions[i][0], sf.name)
self.graph.addConstraints (edge = wTransitions[i][0], constraints = sf.foliation)
self.graph.setContainingNode (wTransitions[i][1], sf.name)
self.graph.addConstraints (edge = wTransitions[i][1], constraints = sf.foliation)
for i in range(M, nTransitions):
self.graph.setContainingNode (wTransitions[i][0], st.name)
self.graph.addConstraints (edge = wTransitions[i][0], constraints = st.foliation)
self.graph.setContainingNode (wTransitions[i][1], st.name)
self.graph.addConstraints (edge = wTransitions[i][1], constraints = st.foliation)
# Set all to short except first one.
for i in range(nTransitions - 1):
self.graph.setShort (wTransitions[i + 1][0], True)
self.graph.setShort (wTransitions[i ][1], True)
else:
#TODO This case will likely never happen
raise NotImplementedError("This case has not been implemented")
self.graph.createEdge (sf.name, st.name, names[0])
self.graph.createEdge (st.name, sf.name, names[1])
self.transitions.add(names)
## \}
--- FILE SEPARATOR ---
#!/usr/bin/env python
# Copyright (c) 2014 CNRS
# Author: Florent Lamiraux
#
# This file is part of hpp-manipulation-corba.
# hpp-manipulation-corba is free software: you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# hpp-manipulation-corba is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# hpp-manipulation-corba. If not, see
# <http://www.gnu.org/licenses/>.
import warnings
from hpp import Transform
from hpp.corbaserver.manipulation import Client as ManipulationClient
from hpp.corbaserver import Client as BasicClient
from hpp.corbaserver.robot import Robot as Parent
## Corba clients to the various servers
#
class CorbaClient:
"""
Container for corba clients to various interfaces.
"""
def __init__ (self, url = None, context = "corbaserver"):
self.basic = BasicClient (url = url, context = context)
self.manipulation = ManipulationClient (url = url, context = context)
## Load and handle a composite robot for manipulation planning
#
# A composite robot is a kinematic chain composed of several sub-kinematic
# chains rooted at an anchor joint.
class Robot (Parent):
## Constructor
# \param robotName name of the first robot that is loaded now,
# \param rootJointType type of root joint among ("freeflyer", "planar",
# "anchor"),
# \param load whether to actually load urdf files. Set to no if you only
# want to initialize a corba client to an already initialized
# problem.
def __init__ (self, compositeName = None, robotName = None, rootJointType = None, load = True, client = None):
if client is None: client = CorbaClient()
super (Robot, self).__init__ (robotName = compositeName,
rootJointType = rootJointType,
load = False, client = client,
hppcorbaClient = client.basic)
self.rootJointType = dict()
if compositeName is None:
load = False
self.load = load
self.robotNames = list()
if robotName is None:
if load:
self.client.basic.robot.createRobot (self.name)
else:
self.loadModel (robotName, rootJointType)
## Virtual function to load the robot model
def loadModel (self, robotName, rootJointType):
if self.load:
self.client.basic.robot.createRobot (self.name)
self.insertRobotModel (robotName, rootJointType, self.packageName,
self.urdfName, self.urdfSuffix, self.srdfSuffix)
## Load robot model and insert it in the device
#
# \param robotName key of the robot in hpp::manipulation::ProblemSolver object
# map (see hpp::manipulation::ProblemSolver::addRobot)
# \param rootJointType type of root joint among "anchor", "freeflyer",
# "planar",
# \param packageName Name of the ROS package containing the model,
# \param modelName Name of the package containing the model
# \param urdfSuffix suffix for urdf file,
#
# The ros url are built as follows:
# \li "package://${packageName}/urdf/${modelName}${urdfSuffix}.urdf"
# \li "package://${packageName}/srdf/${modelName}${srdfSuffix}.srdf"
def insertRobotModel (self, robotName, rootJointType, packageName,
modelName, urdfSuffix, srdfSuffix):
if self.load:
self.client.manipulation.robot.insertRobotModel (robotName,
rootJointType, packageName, modelName, urdfSuffix,
srdfSuffix)
self.robotNames.append (robotName)
self.rootJointType[robotName] = rootJointType
self.rebuildRanks ()
## Insert robot model as a child of a frame of the Device
#
# \param robotName key of the robot in ProblemSolver object map
# (see hpp::manipulation::ProblemSolver::addRobot)
# \param frameName name of the existing frame that will the root of the added robot,
# \param rootJointType type of root joint among "anchor", "freeflyer",
# "planar",
# \param packageName Name of the ROS package containing the model,
# \param modelName Name of the package containing the model
# \param urdfSuffix suffix for urdf file,
#
# The ros url are built as follows:
# "package://${packageName}/urdf/${modelName}${urdfSuffix}.urdf"
# "package://${packageName}/srdf/${modelName}${srdfSuffix}.srdf"
#
def insertRobotModelOnFrame (self, robotName, frameName, rootJointType,
packageName, modelName, urdfSuffix, srdfSuffix):
if self.load:
self.client.manipulation.robot.insertRobotModelOnFrame (robotName,
frameName, rootJointType, packageName, modelName,
urdfSuffix, srdfSuffix)
self.robotNames.append (robotName)
self.rootJointType[robotName] = rootJointType
self.rebuildRanks ()
## Same as Robot.insertRobotModel
#
# \param urdfString XML string of the URDF,
# \param srdfString XML string of the SRDF
def insertRobotModelFromString (self, robotName, rootJointType, urdfString, srdfString):
if self.load:
self.client.manipulation.robot.insertRobotModelFromString (robotName,
rootJointType, urdfString, srdfString)
self.robotNames.append (robotName)
self.rootJointType[robotName] = rootJointType
self.rebuildRanks ()
## Load a SRDF for the robot. Several SRDF can thus be loaded for the same robot
#
# \param robotName key of the robot in hpp::manipulation::Device object
# map (see hpp::manipulation::Device)
# \param packageName Name of the ROS package containing the model,
# \param modelName Name of the package containing the model
# \param srdfSuffix suffix for srdf file,
#
# The ros url are built as follows:
# \li "package://${packageName}/srdf/${modelName}${srdfSuffix}.srdf"
def insertRobotSRDFModel (self, robotName, packageName,
modelName, srdfSuffix):
if self.load:
self.client.manipulation.robot.insertRobotSRDFModel (robotName,
packageName, modelName, srdfSuffix)
## Load humanoid robot model and insert it in the device
#
# \param robotName key of the robot in ProblemSolver object map
# (see hpp::manipulation::ProblemSolver::addRobot)
# \param rootJointType type of root joint among "anchor", "freeflyer",
# "planar",
# \param packageName Name of the ROS package containing the model,
# \param modelName Name of the package containing the model
# \param urdfSuffix suffix for urdf file,
#
# The ros url are built as follows:
# \li "package://${packageName}/urdf/${modelName}${urdfSuffix}.urdf"
# \li "package://${packageName}/srdf/${modelName}${srdfSuffix}.srdf"
def insertHumanoidModel (self, robotName, rootJointType, packageName,
modelName, urdfSuffix, srdfSuffix):
if self.load:
self.client.manipulation.robot.insertHumanoidModel \
(robotName, rootJointType, packageName, modelName,
urdfSuffix, srdfSuffix)
self.robotNames.append (robotName)
self.rootJointType[robotName] = rootJointType
self.rebuildRanks ()
def loadHumanoidModel (self, robotName, rootJointType, packageName,
modelName, urdfSuffix, srdfSuffix):
self.insertHumanoidModel (robotName, rootJointType, packageName,
modelName, urdfSuffix, srdfSuffix)
## Load environment model and store in local map.
# Contact surfaces are build from the corresping srdf file.
# See hpp-manipulation-urdf for more details about contact surface
# specifications.
#
# \param envName key of the object in ProblemSolver object map
# (see hpp::manipulation::ProblemSolver::addRobot)
# \param packageName Name of the ROS package containing the model,
# \param modelName Name of the package containing the model
# \param urdfSuffix suffix for urdf file,
# \param srdfSuffix suffix for srdf file.
#
# The ros url are built as follows:
# \li "package://${packageName}/urdf/${modelName}${urdfSuffix}.urdf"
# \li "package://${packageName}/srdf/${modelName}${srdfSuffix}.srdf"
def loadEnvironmentModel (self, packageName, modelName,
urdfSuffix, srdfSuffix, envName):
if self.load:
self.client.manipulation.robot.loadEnvironmentModel (packageName,
modelName, urdfSuffix, srdfSuffix, envName)
self.rootJointType[envName] = "Anchor"
## \name Joints
#\{
## Set the position of root joint of a robot in world frame
## \param robotName key of the robot in ProblemSolver object map.
## \param position constant position of the root joint in world frame in
## initial configuration.
def setRootJointPosition (self, robotName, position):
return self.client.manipulation.robot.setRootJointPosition (robotName, position)
## \}
## \name Bodies
# \{
## Return the joint name in which a gripper is and the position relatively
# to the joint
def getGripperPositionInJoint (self, gripperName):
return self.client.manipulation.robot.getGripperPositionInJoint (gripperName)
## Return the joint name in which a handle is and the position relatively
# to the joint
def getHandlePositionInJoint (self, handleName):
return self.client.manipulation.robot.getHandlePositionInJoint (handleName)
## \}
from hpp.corbaserver.robot import StaticStabilityConstraintsFactory
class HumanoidRobot (Robot, StaticStabilityConstraintsFactory):
## Constructor
# \param compositeName name of the composite robot that will be built later,
# \param robotName name of the first robot that is loaded now,
# \param rootJointType type of root joint among ("freeflyer", "planar",
# "anchor"),
def __init__ (self, compositeName = None, robotName = None, rootJointType = None, load = True, client = None):
Robot.__init__ (self, compositeName, robotName, rootJointType, load, client)
def loadModel (self, robotName, rootJointType):
self.client.basic.robot.createRobot (self.name)
self.insertHumanoidModel \
(robotName, rootJointType, self.packageName, self.urdfName,
self.urdfSuffix, self.srdfSuffix)
| {
"imports": [
"/install/lib/python2.7/dist-packages/hpp/corbaserver/manipulation/client.py",
"/install/lib/python2.7/dist-packages/hpp/corbaserver/manipulation/problem_solver.py",
"/install/lib/python2.7/dist-packages/hpp/corbaserver/manipulation/constraint_graph_factory.py",
"/install/lib/python2.7/dist-packages/hpp/corbaserver/manipulation/robot.py"
]
} |
0023jas/Obelisk-Python-Wallet | refs/heads/main | /obelisk.py | from web3 import Web3
from pubKeyGen import EccMultiply, GPoint
from privKeyGen import genPrivKey
from walletDecryption import walletDecryption
from walletInteractions import getEth
from qr import qrGenerate
import os
import time
import glob
#Used to store and encrypt wallet
from Crypto.Cipher import AES
from Crypto.Protocol.KDF import scrypt
from Crypto.Util.Padding import pad, unpad
from Crypto.Random import get_random_bytes
import json
#Used for Bip39Mnemonic
from bip_utils import Bip39MnemonicGenerator, Bip39SeedGenerator, Bip44, Bip44Coins, Bip44Changes
appRunning = True
#Individual Transaction/Personal Information
def createWallet():
os.system('cls' if os.name == 'nt' else 'clear')
print(" ╔═╗┌─┐┌┐┌┌─┐┬─┐┌─┐┌┬┐┌─┐ ╦ ╦┌─┐┬ ┬ ┌─┐┌┬┐")
print(" ║ ╦├┤ │││├┤ ├┬┘├─┤ │ ├┤ ║║║├─┤│ │ ├┤ │ ")
print(" ╚═╝└─┘┘└┘└─┘┴└─┴ ┴ ┴ └─┘ ╚╩╝┴ ┴┴─┘┴─┘└─┘ ┴ ")
print("")
print(" ############################################")
print("")
print(" > Please Input a Secure Password")
print("")
password = input(" > ")
time.sleep(2)
print("")
print(" > Generating Wallet")
privKey = genPrivKey()
PublicKey = EccMultiply(GPoint,privKey)
PublicKey = hex(PublicKey[0])[2:] + hex(PublicKey[1])[2:]
address = Web3.keccak(hexstr = PublicKey).hex()
address = "0x" + address[-40:]
address = Web3.toChecksumAddress(address)
time.sleep(2)
print("")
print(" > Encrypting Wallet")
salt = get_random_bytes(16)
key = scrypt(password, salt, 32, N=2**20, r = 8, p = 1)
privKey = hex(privKey)[2:]
data = str(privKey).encode('utf-8')
cipher = AES.new(key, AES.MODE_CBC)
ct_bytes = cipher.encrypt(pad(data, AES.block_size))
salt = salt.hex()
iv = cipher.iv.hex()
ct = ct_bytes.hex()
output = {"salt" : salt, "initialization vector" : iv, "encrypted private key" : ct}
with open("wallets/" + address + '.txt', 'w') as json_file:
json.dump(output, json_file)
qrGenerate(address)
print("")
print(" > Wallet Created")
time.sleep(2)
print("")
print(" > Do you want to return to the home screen [y/n]?")
print("")
userInput = input(" > ")
print("")
userInputCheck = False
while userInputCheck == False:
if userInput == 'y':
userInputCheck = True
return True
elif userInput == 'n':
userInputCheck = True
return False
else:
print(" > Only y and n are correct inputs")
def decodeWallet():
walletSelect = False
while walletSelect == False:
os.system('cls' if os.name == 'nt' else 'clear')
print(" ╔═╗┌─┐┬ ┌─┐┌─┐┌┬┐ ╦ ╦┌─┐┬ ┬ ┌─┐┌┬┐")
print(" ╚═╗├┤ │ ├┤ │ │ ║║║├─┤│ │ ├┤ │")
print(" ╚═╝└─┘┴─┘└─┘└─┘ ┴ ╚╩╝┴ ┴┴─┘┴─┘└─┘ ┴")
print("")
print(" ######################################")
print("")
availableWallets = os.listdir("wallets")
for wallet in range(len(availableWallets)):
print(" " + str(wallet+1) + ": " + availableWallets[wallet][:-4])
print("")
walletSelector = input(" > ")
if 0 < int(walletSelector) <= len(availableWallets):
walletName = availableWallets[int(walletSelector)-1]
with open("wallets/" + walletName) as f:
data = json.load(f)
#print(data)
"""
print(" Enter Password:")
print("")
password = input(" > ")
"""
address = walletName[:-4]
walletSelect = True
walletDecryption(data, address)
return True
def migrateWallet():
os.system('cls' if os.name == 'nt' else 'clear')
print(" ╔╦╗┬┌─┐┬─┐┌─┐┌┬┐┌─┐ ╦ ╦┌─┐┬ ┬ ┌─┐┌┬┐")
print(" ║║║││ ┬├┬┘├─┤ │ ├┤ ║║║├─┤│ │ ├┤ │ ")
print(" ╩ ╩┴└─┘┴└─┴ ┴ ┴ └─┘ ╚╩╝┴ ┴┴─┘┴─┘└─┘ ┴ ")
print("")
print(" #######################################")
print("")
print(" 1: Migrate Through Private Key")
print("")
print(" 2: Migrate Through 12 Word Phrase")
print("")
userInput = input(" > ")
print("")
if userInput == "1":
print(" Type Private Key:")
print("")
privKey = input(" > ")
privKey = int(privKey, 16)
print("")
print(" Type Strong Password:")
print("")
password = input(" > ")
PublicKey = EccMultiply(GPoint, privKey)
PublicKey = hex(PublicKey[0])[2:] + hex(PublicKey[1])[2:]
address = Web3.keccak(hexstr = PublicKey).hex()
address = "0x" + address[-40:]
address = Web3.toChecksumAddress(address)
time.sleep(2)
print("")
print("> Encrypting Wallet")
salt = get_random_bytes(16)
key = scrypt(password, salt, 32, N=2**20, r = 8, p = 1)
privKey = hex(privKey)[2:]
data = str(privKey).encode('utf-8')
cipher = AES.new(key, AES.MODE_CBC)
ct_bytes = cipher.encrypt(pad(data, AES.block_size))
salt = salt.hex()
iv = cipher.iv.hex()
ct = ct_bytes.hex()
output = {"salt" : salt, "initialization vector" : iv, "encrypted private key" : ct}
with open("wallets/" + address + '.txt', 'w') as json_file:
json.dump(output, json_file)
print("")
print("> Wallet Created")
time.sleep(2)
startWallet()
elif userInput == "2":
print(" Type 12 Words:")
mnemonic = input(" > ")
seed_bytes = Bip39SeedGenerator(mnemonic).Generate()
bip_obj_mst = Bip44.FromSeed(seed_bytes, Bip44Coins.ETHEREUM)
bip_obj_acc = bip_obj_mst.Purpose().Coin().Account(0)
bip_obj_chain = bip_obj_acc.Change(Bip44Changes.CHAIN_EXT)
accountFound = False
accountNumber = 0
while accountFound == False:
bip_obj_addr = bip_obj_chain.AddressIndex(accountNumber)
checkAddress = getEth(bip_obj_addr.PublicKey().ToAddress())
if checkAddress > 0:
accountFound = True
privKey = bip_obj_addr.PrivateKey().Raw().ToHex()
privKey = int(privKey, 16)
print("")
print(" > Found Wallet!")
time.sleep(2)
print("")
print(" Type Strong Password:")
print("")
password = input(" > ")
PublicKey = EccMultiply(GPoint, privKey)
PublicKey = hex(PublicKey[0])[2:] + hex(PublicKey[1])[2:]
address = Web3.keccak(hexstr = PublicKey).hex()
address = "0x" + address[-40:]
address = Web3.toChecksumAddress(address)
time.sleep(2)
print("")
print("> Encrypting Wallet")
salt = get_random_bytes(16)
key = scrypt(password, salt, 32, N=2**20, r = 8, p = 1)
privKey = hex(privKey)[2:]
data = str(privKey).encode('utf-8')
cipher = AES.new(key, AES.MODE_CBC)
ct_bytes = cipher.encrypt(pad(data, AES.block_size))
salt = salt.hex()
iv = cipher.iv.hex()
ct = ct_bytes.hex()
output = {"salt" : salt, "initialization vector" : iv, "encrypted private key" : ct}
with open("wallets/" + address + '.txt', 'w') as json_file:
json.dump(output, json_file)
print("")
print("> Wallet Created")
time.sleep(2)
startWallet()
def startWallet():
userOption = False
while(userOption == False):
os.system('cls' if os.name == 'nt' else 'clear')
print("")
print(" ██████╗ ██████╗ ███████╗██╗ ██╗███████╗██╗ ██╗")
print(" ██╔═══██╗██╔══██╗██╔════╝██║ ██║██╔════╝██║ ██╔╝")
print(" ██║ ██║██████╔╝█████╗ ██║ ██║███████╗█████╔╝ ")
print(" ██║ ██║██╔══██╗██╔══╝ ██║ ██║╚════██║██╔═██╗ ")
print(" ╚██████╔╝██████╔╝███████╗███████╗██║███████║██║ ██╗")
print(" ╚═════╝ ╚═════╝ ╚══════╝╚══════╝╚═╝╚══════╝╚═╝ ╚═╝")
print("")
print(" ####################################################")
print("")
print(" 1: Access Saved Wallet")
print("")
print(" 2: Generate New Wallet")
print("")
print(" 3: Migrate Wallet")
print("")
print(" 4: Exit Obelisk")
print("")
userInput = input(" > ")
if userInput != "1" and userInput != "2" and userInput != "3" and userInput != "4":
os.system('cls' if os.name == 'nt' else 'clear')
print("")
print(" > Oops, wrong input!")
print("")
print(" > 1, 2, 3, or 4 is the only acceptable input")
print("")
print(" > Hit enter to continue")
print("")
errorInput = input(" > ")
else:
userOption = True
print(userOption)
if userInput == "1":
appStatus = decodeWallet()
return appStatus
elif userInput == "2":
appStatus = createWallet()
return appStatus
elif userInput == "3":
migrateWallet()
elif userInput == "4":
os.system('cls' if os.name == 'nt' else 'clear')
return False
while appRunning == True:
appRunning = startWallet()
| from random import randint
def genPrivKey():
N=int("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141", 16)
private_key = randint(1, N)
return private_key
--- FILE SEPARATOR ---
from Crypto.Cipher import AES
from Crypto.Protocol.KDF import scrypt
from Crypto.Util.Padding import pad, unpad
from Crypto.Random import get_random_bytes
import json
import time
import os
from walletInteractions import sendEth, getEth, receiveEth
from currencyConvert import weiToEth
#Used to run getWalletStats
import requests
from bs4 import BeautifulSoup
import re
import requests
def getWalletStats():
statsList = []
#Get Ethereum Value
ethLink = requests.get('https://coinmarketcap.com/currencies/ethereum/')
ethSoup = BeautifulSoup(ethLink.content, 'html.parser')
ethPrice = ethSoup.findAll('td')
ethPrice = ethPrice[0].contents
ethPrice = re.sub('\ USD$', '', ethPrice[0])
ethPrice = ethPrice[1:]
ethPrice = float(ethPrice.replace(',',''))
statsList.append(ethPrice)
#Get Low, Medium, and High Gas Fees
gasLink = requests.get('https://ethgasstation.info/')
gasSoup = BeautifulSoup(gasLink.content, 'html.parser')
lowFee = gasSoup.find('div', {'class':'safe_low'})
avgFee = gasSoup.find('div', {'class':'standard'})
highFee = gasSoup.find('div', {'class':'fast'})
statsList.append(int(lowFee.contents[0]))
statsList.append(int(avgFee.contents[0]))
statsList.append(int(highFee.contents[0]))
return statsList
def runWallet(privateKey, address):
userQuit = False
while userQuit == False:
os.system('cls' if os.name == 'nt' else 'clear')
ethStats = getWalletStats()
walletEthereumValue = round(weiToEth(getEth(address)), 6)
walletDollarValue = round(walletEthereumValue * ethStats[0], 2)
lowGas = ethStats[1]
avgGas = ethStats[2]
highGas = ethStats[3]
print("")
print(" ╔═╗┌┐ ┌─┐┬ ┬┌─┐┬┌─ ╦ ╦┌─┐┬ ┬ ┌─┐┌┬┐")
print(" ║ ║├┴┐├┤ │ │└─┐├┴┐ ║║║├─┤│ │ ├┤ │ ")
print(" ╚═╝└─┘└─┘┴─┘┴└─┘┴ ┴ ╚╩╝┴ ┴┴─┘┴─┘└─┘ ┴ ")
print("")
print(" #######################################")
print("")
print(" Address: " + address)
print(" Ethereum Value: Ξ" + str(walletEthereumValue))
print(" Dollar Value: $" + str(walletDollarValue))
print("")
print(" Current Gas Prices in Gwei:")
print(" Low: " + str(lowGas) + ", Average: " + str(avgGas) + ", High: " + str(highGas))
print("")
print(" Actions: ")
print(" 1: Send")
print(" 2: Receive")
print(" 3: Exit")
print("")
userInput = input(" > ")
if userInput == "1":
sendEth(privateKey, address)
elif userInput == "2":
receiveEth(address)
elif userInput == "3":
os.system('cls' if os.name == 'nt' else 'clear')
userQuit = True
def walletDecryption(data, address):
correctPassword = False
while correctPassword == False:
print("")
print(" Enter Password or type n to return:")
print("")
password = input(" > ")
salt = data['salt']
iv = data['initialization vector']
ct = data['encrypted private key']
salt = bytes.fromhex(salt)
iv = bytes.fromhex(iv)
ct = bytes.fromhex(ct)
key = scrypt(password, salt, 32, N = 2**20, r = 8, p = 1)
cipher = AES.new(key, AES.MODE_CBC, iv)
try:
pt = unpad(cipher.decrypt(ct), AES.block_size)
privateKey = pt.decode('utf-8')
#print(privateKey)
#time.sleep(5)
correctPassword = True
runWallet(privateKey, address)
except:
if password == 'n':
correctPassword = True
print("")
print(" > Returning to home")
time.sleep(2)
else:
print("")
print(" > wrong password entered")
time.sleep(2)
--- FILE SEPARATOR ---
from web3 import Web3
from PIL import Image
from currencyConvert import ethToWei, gweiToWei
import time
w3 = Web3(Web3.HTTPProvider('https://mainnet.infura.io/v3/5d6be6c002c744358836ac43f459d453'))
def getEth(address):
return w3.eth.get_balance(address)
def sendEth(privateKey, address):
#Typed as String must Remain String
print("")
print(" Enter the Receiver Address:")
receiveAddress = input(" > ")
#Typed in Eth needs to be Wei
print("")
print(" Enter the Amount you Want to Send in Eth:")
sendAmount = int(ethToWei(float(input(" > "))))
#print(sendAmount)
#Typed in Gwei needs to be wei
print("")
print(" Type Gas Fee in Gwei:")
gasFee = gweiToWei(int(input(" > ")))
signed_txn = w3.eth.account.signTransaction(dict(
nonce=w3.eth.getTransactionCount(address),
gasPrice=gasFee,
gas=21000,
to=receiveAddress,
value=sendAmount,
data=b'',
),
privateKey,
)
w3.eth.sendRawTransaction(signed_txn.rawTransaction)
print("")
print("Your transaction is sent! check it out here: etherscan.io/address/"+address)
time.sleep(10)
def receiveEth(address):
print("")
print(" Wallet Address: " + address)
print("")
image = Image.open('qr/' + address + ".png")
image.show()
time.sleep(5)
--- FILE SEPARATOR ---
import qrcode
#Takes in Address, Saves QR in /qr
def qrGenerate(inputAddress):
addressName = inputAddress
img = qrcode.make(addressName)
img.save("qr/" + addressName + '.png')
return addressName
| {
"imports": [
"/privKeyGen.py",
"/walletDecryption.py",
"/walletInteractions.py",
"/qr.py"
]
} |
0023jas/Obelisk-Python-Wallet | refs/heads/main | /walletDecryption.py | from Crypto.Cipher import AES
from Crypto.Protocol.KDF import scrypt
from Crypto.Util.Padding import pad, unpad
from Crypto.Random import get_random_bytes
import json
import time
import os
from walletInteractions import sendEth, getEth, receiveEth
from currencyConvert import weiToEth
#Used to run getWalletStats
import requests
from bs4 import BeautifulSoup
import re
import requests
def getWalletStats():
statsList = []
#Get Ethereum Value
ethLink = requests.get('https://coinmarketcap.com/currencies/ethereum/')
ethSoup = BeautifulSoup(ethLink.content, 'html.parser')
ethPrice = ethSoup.findAll('td')
ethPrice = ethPrice[0].contents
ethPrice = re.sub('\ USD$', '', ethPrice[0])
ethPrice = ethPrice[1:]
ethPrice = float(ethPrice.replace(',',''))
statsList.append(ethPrice)
#Get Low, Medium, and High Gas Fees
gasLink = requests.get('https://ethgasstation.info/')
gasSoup = BeautifulSoup(gasLink.content, 'html.parser')
lowFee = gasSoup.find('div', {'class':'safe_low'})
avgFee = gasSoup.find('div', {'class':'standard'})
highFee = gasSoup.find('div', {'class':'fast'})
statsList.append(int(lowFee.contents[0]))
statsList.append(int(avgFee.contents[0]))
statsList.append(int(highFee.contents[0]))
return statsList
def runWallet(privateKey, address):
userQuit = False
while userQuit == False:
os.system('cls' if os.name == 'nt' else 'clear')
ethStats = getWalletStats()
walletEthereumValue = round(weiToEth(getEth(address)), 6)
walletDollarValue = round(walletEthereumValue * ethStats[0], 2)
lowGas = ethStats[1]
avgGas = ethStats[2]
highGas = ethStats[3]
print("")
print(" ╔═╗┌┐ ┌─┐┬ ┬┌─┐┬┌─ ╦ ╦┌─┐┬ ┬ ┌─┐┌┬┐")
print(" ║ ║├┴┐├┤ │ │└─┐├┴┐ ║║║├─┤│ │ ├┤ │ ")
print(" ╚═╝└─┘└─┘┴─┘┴└─┘┴ ┴ ╚╩╝┴ ┴┴─┘┴─┘└─┘ ┴ ")
print("")
print(" #######################################")
print("")
print(" Address: " + address)
print(" Ethereum Value: Ξ" + str(walletEthereumValue))
print(" Dollar Value: $" + str(walletDollarValue))
print("")
print(" Current Gas Prices in Gwei:")
print(" Low: " + str(lowGas) + ", Average: " + str(avgGas) + ", High: " + str(highGas))
print("")
print(" Actions: ")
print(" 1: Send")
print(" 2: Receive")
print(" 3: Exit")
print("")
userInput = input(" > ")
if userInput == "1":
sendEth(privateKey, address)
elif userInput == "2":
receiveEth(address)
elif userInput == "3":
os.system('cls' if os.name == 'nt' else 'clear')
userQuit = True
def walletDecryption(data, address):
correctPassword = False
while correctPassword == False:
print("")
print(" Enter Password or type n to return:")
print("")
password = input(" > ")
salt = data['salt']
iv = data['initialization vector']
ct = data['encrypted private key']
salt = bytes.fromhex(salt)
iv = bytes.fromhex(iv)
ct = bytes.fromhex(ct)
key = scrypt(password, salt, 32, N = 2**20, r = 8, p = 1)
cipher = AES.new(key, AES.MODE_CBC, iv)
try:
pt = unpad(cipher.decrypt(ct), AES.block_size)
privateKey = pt.decode('utf-8')
#print(privateKey)
#time.sleep(5)
correctPassword = True
runWallet(privateKey, address)
except:
if password == 'n':
correctPassword = True
print("")
print(" > Returning to home")
time.sleep(2)
else:
print("")
print(" > wrong password entered")
time.sleep(2)
| from web3 import Web3
from PIL import Image
from currencyConvert import ethToWei, gweiToWei
import time
w3 = Web3(Web3.HTTPProvider('https://mainnet.infura.io/v3/5d6be6c002c744358836ac43f459d453'))
def getEth(address):
return w3.eth.get_balance(address)
def sendEth(privateKey, address):
#Typed as String must Remain String
print("")
print(" Enter the Receiver Address:")
receiveAddress = input(" > ")
#Typed in Eth needs to be Wei
print("")
print(" Enter the Amount you Want to Send in Eth:")
sendAmount = int(ethToWei(float(input(" > "))))
#print(sendAmount)
#Typed in Gwei needs to be wei
print("")
print(" Type Gas Fee in Gwei:")
gasFee = gweiToWei(int(input(" > ")))
signed_txn = w3.eth.account.signTransaction(dict(
nonce=w3.eth.getTransactionCount(address),
gasPrice=gasFee,
gas=21000,
to=receiveAddress,
value=sendAmount,
data=b'',
),
privateKey,
)
w3.eth.sendRawTransaction(signed_txn.rawTransaction)
print("")
print("Your transaction is sent! check it out here: etherscan.io/address/"+address)
time.sleep(10)
def receiveEth(address):
print("")
print(" Wallet Address: " + address)
print("")
image = Image.open('qr/' + address + ".png")
image.show()
time.sleep(5)
--- FILE SEPARATOR ---
def weiToEth(amount):
amount = amount/1000000000000000000
return amount
def ethToWei(amount):
amount = amount*1000000000000000000
return amount
def gweiToWei(amount):
amount = amount*1000000000
return amount
| {
"imports": [
"/walletInteractions.py",
"/currencyConvert.py"
]
} |
0023jas/Obelisk-Python-Wallet | refs/heads/main | /walletInteractions.py | from web3 import Web3
from PIL import Image
from currencyConvert import ethToWei, gweiToWei
import time
w3 = Web3(Web3.HTTPProvider('https://mainnet.infura.io/v3/5d6be6c002c744358836ac43f459d453'))
def getEth(address):
return w3.eth.get_balance(address)
def sendEth(privateKey, address):
#Typed as String must Remain String
print("")
print(" Enter the Receiver Address:")
receiveAddress = input(" > ")
#Typed in Eth needs to be Wei
print("")
print(" Enter the Amount you Want to Send in Eth:")
sendAmount = int(ethToWei(float(input(" > "))))
#print(sendAmount)
#Typed in Gwei needs to be wei
print("")
print(" Type Gas Fee in Gwei:")
gasFee = gweiToWei(int(input(" > ")))
signed_txn = w3.eth.account.signTransaction(dict(
nonce=w3.eth.getTransactionCount(address),
gasPrice=gasFee,
gas=21000,
to=receiveAddress,
value=sendAmount,
data=b'',
),
privateKey,
)
w3.eth.sendRawTransaction(signed_txn.rawTransaction)
print("")
print("Your transaction is sent! check it out here: etherscan.io/address/"+address)
time.sleep(10)
def receiveEth(address):
print("")
print(" Wallet Address: " + address)
print("")
image = Image.open('qr/' + address + ".png")
image.show()
time.sleep(5)
| def weiToEth(amount):
amount = amount/1000000000000000000
return amount
def ethToWei(amount):
amount = amount*1000000000000000000
return amount
def gweiToWei(amount):
amount = amount*1000000000
return amount
| {
"imports": [
"/currencyConvert.py"
]
} |
007Rohan/Project-Management-using-REST-API-DJANGO | refs/heads/main | /app/serializer.py | from rest_framework import serializers
from .models import addClient,addProject
from django.contrib.auth.models import User
class UserSerializer(serializers.ModelSerializer):
class Meta:
model=User
fields=['id','username']
class ClientSerializer(serializers.ModelSerializer):
class Meta:
model=addClient
fields=['id','client_name','created_at','created_by']
class ClientProjectSerializer(serializers.ModelSerializer):
contributed_users = UserSerializer(many=True)
class Meta:
model=addProject
fields='__all__'
class ClientProjectSerializer1(serializers.ModelSerializer):
class Meta:
model=addProject
fields='__all__'
class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model=addProject
fields=['id','project_name','created_at','created_by_client']
| from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class addClient(models.Model):
id=models.AutoField(primary_key=True)
client_name=models.CharField(max_length=30)
created_at=models.DateTimeField(auto_now_add=True)
created_by=models.ForeignKey(User,on_delete=models.CASCADE)
objects=models.Manager()
def __str__(self):
return self.client_name
class Meta:
db_table='addclients'
class addProject(models.Model):
id=models.AutoField(primary_key=True)
project_name=models.CharField(max_length=100)
contributed_users=models.ManyToManyField(User)
created_at=models.DateTimeField(auto_now_add=True)
created_by_client=models.ForeignKey(addClient,unique=False,on_delete=models.CASCADE)
objects=models.Manager()
class Meta:
db_table='addprojects'
| {
"imports": [
"/app/models.py"
]
} |
007Rohan/Project-Management-using-REST-API-DJANGO | refs/heads/main | /app/views.py | from django.shortcuts import render,HttpResponse
from rest_framework import generics
from .models import addClient,addProject
from .serializer import ClientSerializer,ProjectSerializer,ClientProjectSerializer,ClientProjectSerializer1
# Create your views here.
def home(request):
return render(request,"index.html")
class AddClient(generics.CreateAPIView):
serializer_class=ClientSerializer
class AddProject(generics.CreateAPIView):
serializer_class=ClientProjectSerializer1
class ClientList(generics.ListAPIView):
queryset=addClient.objects.all()
serializer_class=ClientSerializer
class DeleteClient(generics.DestroyAPIView):
queryset=addClient.objects.all()
serializer_class=ClientSerializer
class ProjectList(generics.ListAPIView):
queryset=addProject.objects.all()
serializer_class=ProjectSerializer
class ClientProjectList(generics.ListAPIView):
queryset=addProject.objects.all()
serializer_class=ClientProjectSerializer
| from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class addClient(models.Model):
id=models.AutoField(primary_key=True)
client_name=models.CharField(max_length=30)
created_at=models.DateTimeField(auto_now_add=True)
created_by=models.ForeignKey(User,on_delete=models.CASCADE)
objects=models.Manager()
def __str__(self):
return self.client_name
class Meta:
db_table='addclients'
class addProject(models.Model):
id=models.AutoField(primary_key=True)
project_name=models.CharField(max_length=100)
contributed_users=models.ManyToManyField(User)
created_at=models.DateTimeField(auto_now_add=True)
created_by_client=models.ForeignKey(addClient,unique=False,on_delete=models.CASCADE)
objects=models.Manager()
class Meta:
db_table='addprojects'
--- FILE SEPARATOR ---
from rest_framework import serializers
from .models import addClient,addProject
from django.contrib.auth.models import User
class UserSerializer(serializers.ModelSerializer):
class Meta:
model=User
fields=['id','username']
class ClientSerializer(serializers.ModelSerializer):
class Meta:
model=addClient
fields=['id','client_name','created_at','created_by']
class ClientProjectSerializer(serializers.ModelSerializer):
contributed_users = UserSerializer(many=True)
class Meta:
model=addProject
fields='__all__'
class ClientProjectSerializer1(serializers.ModelSerializer):
class Meta:
model=addProject
fields='__all__'
class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model=addProject
fields=['id','project_name','created_at','created_by_client']
| {
"imports": [
"/app/models.py",
"/app/serializer.py"
]
} |
007gzs/xface | refs/heads/master | /xface/model/__init__.py | # encoding: utf-8
from __future__ import absolute_import, unicode_literals
from .face_alignment import FaceAlignment
from .face_detection import FaceDetector
from .face_recognition import FaceRecognition
__all__ = ['FaceAlignment', 'FaceDetector', 'FaceRecognition']
| # encoding: utf-8
from __future__ import absolute_import, unicode_literals
import cv2
import torch
import numpy as np
import torch.backends.cudnn as cudnn
from torchvision import transforms
from .base import Base
class FaceAlignment(Base):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.img_size = self.meta_conf['input_width']
def get(self, image, det):
cudnn.benchmark = True
assert isinstance(image, np.ndarray)
img = image.copy()
img = np.float32(img)
xy = np.array([det[0], det[1]])
zz = np.array([det[2], det[3]])
wh = zz - xy + 1
center = (xy + wh / 2).astype(np.int32)
box_size = int(np.max(wh) * 1.2)
xy = center - box_size // 2
x1, y1 = xy
x2, y2 = xy + box_size
height, width, _ = img.shape
dx = max(0, -x1)
dy = max(0, -y1)
x1 = max(0, x1)
y1 = max(0, y1)
edx = max(0, x2 - width)
edy = max(0, y2 - height)
x2 = min(width, x2)
y2 = min(height, y2)
image_t = image[y1:y2, x1:x2]
if dx > 0 or dy > 0 or edx > 0 or edy > 0:
image_t = cv2.copyMakeBorder(image_t, dy, edy, dx, edx, cv2.BORDER_CONSTANT, 0)
image_t = cv2.resize(image_t, (self.img_size, self.img_size))
t = transforms.Compose([transforms.ToTensor()])
img_after = t(image_t)
self.model = self.model.to(self.device)
img_after = img_after.unsqueeze(0)
with torch.no_grad():
image_pre = img_after.to(self.device)
_, landmarks_normal = self.model(image_pre)
landmarks_normal = landmarks_normal.cpu().numpy()
landmarks_normal = landmarks_normal.reshape(landmarks_normal.shape[0], -1, 2)
landmarks = landmarks_normal[0] * [box_size, box_size] + xy
return landmarks
--- FILE SEPARATOR ---
# encoding: utf-8
from __future__ import absolute_import, unicode_literals
from itertools import product
from math import ceil
import numpy as np
import torch
import torch.backends.cudnn as cudnn
from .base import Base
class FaceDetector(Base):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.min_sizes = self.meta_conf['min_sizes']
self.steps = self.meta_conf['steps']
self.variance = self.meta_conf['variance']
self.in_channel = self.meta_conf['in_channel']
self.out_channel = self.meta_conf['out_channel']
self.confidence_threshold = self.meta_conf['confidence_threshold']
def detect(self, image):
cudnn.benchmark = True
assert isinstance(image, np.ndarray)
input_height, input_width, _ = image.shape
img = np.float32(image)
scale_box = torch.Tensor([img.shape[1], img.shape[0], img.shape[1], img.shape[0]])
img -= (104, 117, 123)
img = img.transpose(2, 0, 1)
self.model = self.model.to(self.device)
img = torch.from_numpy(img).unsqueeze(0)
scale_landms = torch.Tensor([img.shape[3], img.shape[2], img.shape[3], img.shape[2],
img.shape[3], img.shape[2], img.shape[3], img.shape[2],
img.shape[3], img.shape[2]])
with torch.no_grad():
img = img.to(self.device)
scale_box = scale_box.to(self.device)
scale_landms = scale_landms.to(self.device)
loc, conf, landms = self.model(img)
priors = self.priorbox_forward(height=input_height, width=input_width)
priors = priors.to(self.device)
prior_data = priors.data
boxes = self.decode(loc.data.squeeze(0), prior_data, self.variance)
boxes = boxes * scale_box
boxes = boxes.cpu().numpy()
scores = conf.squeeze(0).data.cpu().numpy()[:, 1]
landmarks = self.decode_landm(landms.data.squeeze(0), prior_data, self.variance)
landmarks = landmarks * scale_landms
landmarks = landmarks.reshape((landmarks.shape[0], 5, 2))
landmarks = landmarks.cpu().numpy()
# ignore low scores
inds = np.where(scores > self.confidence_threshold)[0]
boxes = boxes[inds]
scores = scores[inds]
landmarks = landmarks[inds]
# keep top-K before NMS
order = scores.argsort()[::-1]
boxes = boxes[order]
scores = scores[order]
landmarks = landmarks[order]
# do NMS
nms_threshold = 0.2
dets = np.hstack((boxes, scores[:, np.newaxis])).astype(np.float32, copy=False)
keep = self.py_cpu_nms(dets, nms_threshold)
dets = dets[keep, :]
landmarks = landmarks[keep]
return dets, landmarks
def py_cpu_nms(self, dets, thresh):
"""
Python version NMS.
Returns:
The kept index after NMS.
"""
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
keep = []
while order.size > 0:
i = order[0]
keep.append(i)
xx1 = np.maximum(x1[i], x1[order[1:]])
yy1 = np.maximum(y1[i], y1[order[1:]])
xx2 = np.minimum(x2[i], x2[order[1:]])
yy2 = np.minimum(y2[i], y2[order[1:]])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (areas[i] + areas[order[1:]] - inter)
inds = np.where(ovr <= thresh)[0]
order = order[inds + 1]
return keep
# Adapted from https://github.com/Hakuyume/chainer-ssd
def decode(self, loc, priors, variances):
"""Decode locations from predictions using priors to undo
the encoding we did for offset regression at train time.
Args:
loc (tensor): location predictions for loc layers,
Shape: [num_priors,4]
priors (tensor): Prior boxes in center-offset form.
Shape: [num_priors,4].
variances: (list[float]) Variances of priorboxes
Return:
decoded bounding box predictions
"""
boxes = torch.cat((
priors[:, :2] + loc[:, :2] * variances[0] * priors[:, 2:],
priors[:, 2:] * torch.exp(loc[:, 2:] * variances[1])), 1)
boxes[:, :2] -= boxes[:, 2:] / 2
boxes[:, 2:] += boxes[:, :2]
return boxes
def decode_landm(self, pre, priors, variances):
"""Decode landm from predictions using priors to undo
the encoding we did for offset regression at train time.
Args:
pre (tensor): landm predictions for loc layers,
Shape: [num_priors,10]
priors (tensor): Prior boxes in center-offset form.
Shape: [num_priors,4].
variances: (list[float]) Variances of priorboxes
Return:
decoded landm predictions
"""
landms = torch.cat((priors[:, :2] + pre[:, :2] * variances[0] * priors[:, 2:],
priors[:, :2] + pre[:, 2:4] * variances[0] * priors[:, 2:],
priors[:, :2] + pre[:, 4:6] * variances[0] * priors[:, 2:],
priors[:, :2] + pre[:, 6:8] * variances[0] * priors[:, 2:],
priors[:, :2] + pre[:, 8:10] * variances[0] * priors[:, 2:],
), dim=1)
return landms
# https://github.com/biubug6/Pytorch_Retinaface
def priorbox_forward(self, height, width):
feature_maps = [[ceil(height / step), ceil(width / step)] for step in self.steps]
anchors = []
for k, f in enumerate(feature_maps):
min_sizes = self.min_sizes[k]
for i, j in product(range(f[0]), range(f[1])):
for min_size in min_sizes:
s_kx = min_size / width
s_ky = min_size / height
dense_cx = [x * self.steps[k] / width for x in [j + 0.5]]
dense_cy = [y * self.steps[k] / height for y in [i + 0.5]]
for cy, cx in product(dense_cy, dense_cx):
anchors += [cx, cy, s_kx, s_ky]
return torch.Tensor(anchors).view(-1, 4)
--- FILE SEPARATOR ---
# encoding: utf-8
from __future__ import absolute_import, unicode_literals
import numpy as np
import torch
from .base import Base
class FaceRecognition(Base):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.mean = self.meta_conf['mean']
self.std = self.meta_conf['std']
def load(self, device=None):
super().load(device)
if self.device.type == "cpu":
self.model = self.model.module.cpu()
def get(self, image):
assert isinstance(image, np.ndarray)
height, width, channels = image.shape
assert height == self.input_height and width == self.input_width
if image.ndim == 2:
image = image[:, :, np.newaxis]
if image.ndim == 4:
image = image[:, :, :3]
assert image.ndim <= 4
image = (image.transpose((2, 0, 1)) - self.mean) / self.std
image = image.astype(np.float32)
image = torch.from_numpy(image)
image = torch.unsqueeze(image, 0)
image = image.to(self.device)
with torch.no_grad():
feature = self.model(image).cpu().numpy()
feature = np.squeeze(feature)
return feature
| {
"imports": [
"/xface/model/face_alignment.py",
"/xface/model/face_detection.py",
"/xface/model/face_recognition.py"
]
} |
007gzs/xface | refs/heads/master | /xface/model/face_recognition.py | # encoding: utf-8
from __future__ import absolute_import, unicode_literals
import numpy as np
import torch
from .base import Base
class FaceRecognition(Base):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.mean = self.meta_conf['mean']
self.std = self.meta_conf['std']
def load(self, device=None):
super().load(device)
if self.device.type == "cpu":
self.model = self.model.module.cpu()
def get(self, image):
assert isinstance(image, np.ndarray)
height, width, channels = image.shape
assert height == self.input_height and width == self.input_width
if image.ndim == 2:
image = image[:, :, np.newaxis]
if image.ndim == 4:
image = image[:, :, :3]
assert image.ndim <= 4
image = (image.transpose((2, 0, 1)) - self.mean) / self.std
image = image.astype(np.float32)
image = torch.from_numpy(image)
image = torch.unsqueeze(image, 0)
image = image.to(self.device)
with torch.no_grad():
feature = self.model(image).cpu().numpy()
feature = np.squeeze(feature)
return feature
| # encoding: utf-8
from __future__ import absolute_import, unicode_literals
import json
import os
import sys
import torch
class Config(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
class Base:
def __init__(self, model_path, model_category, model_name, meta_file='model_meta.json'):
model_root_dir = os.path.join(model_path, model_category, model_name)
meta_file_path = os.path.join(model_root_dir, meta_file)
with open(meta_file_path, 'r') as f:
self.meta_conf = json.load(f)
model_root = os.path.dirname(model_path)
if model_root not in sys.path:
sys.path.append(model_root)
self.model_path = model_path
self.model_category = model_category
self.model_name = model_name
self.model_file_path = os.path.join(model_root_dir, self.meta_conf['model_file'])
self.model_type = self.meta_conf['model_type']
self.model_info = self.meta_conf['model_info']
self.release_date = self.meta_conf['release_date']
self.input_height = self.meta_conf['input_height']
self.input_width = self.meta_conf['input_width']
self.device = None
self.model = None
def load(self, device=None):
assert self.model is None
if device is None:
if torch.cuda.is_available():
device = "cuda:%d" % torch.cuda.current_device()
else:
device = "cpu"
self.device = torch.device(device)
self.model = torch.load(self.model_file_path, map_location=self.device)
self.model.eval()
| {
"imports": [
"/xface/model/base.py"
]
} |
007gzs/xface | refs/heads/master | /xface/model/face_alignment.py | # encoding: utf-8
from __future__ import absolute_import, unicode_literals
import cv2
import torch
import numpy as np
import torch.backends.cudnn as cudnn
from torchvision import transforms
from .base import Base
class FaceAlignment(Base):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.img_size = self.meta_conf['input_width']
def get(self, image, det):
cudnn.benchmark = True
assert isinstance(image, np.ndarray)
img = image.copy()
img = np.float32(img)
xy = np.array([det[0], det[1]])
zz = np.array([det[2], det[3]])
wh = zz - xy + 1
center = (xy + wh / 2).astype(np.int32)
box_size = int(np.max(wh) * 1.2)
xy = center - box_size // 2
x1, y1 = xy
x2, y2 = xy + box_size
height, width, _ = img.shape
dx = max(0, -x1)
dy = max(0, -y1)
x1 = max(0, x1)
y1 = max(0, y1)
edx = max(0, x2 - width)
edy = max(0, y2 - height)
x2 = min(width, x2)
y2 = min(height, y2)
image_t = image[y1:y2, x1:x2]
if dx > 0 or dy > 0 or edx > 0 or edy > 0:
image_t = cv2.copyMakeBorder(image_t, dy, edy, dx, edx, cv2.BORDER_CONSTANT, 0)
image_t = cv2.resize(image_t, (self.img_size, self.img_size))
t = transforms.Compose([transforms.ToTensor()])
img_after = t(image_t)
self.model = self.model.to(self.device)
img_after = img_after.unsqueeze(0)
with torch.no_grad():
image_pre = img_after.to(self.device)
_, landmarks_normal = self.model(image_pre)
landmarks_normal = landmarks_normal.cpu().numpy()
landmarks_normal = landmarks_normal.reshape(landmarks_normal.shape[0], -1, 2)
landmarks = landmarks_normal[0] * [box_size, box_size] + xy
return landmarks
| # encoding: utf-8
from __future__ import absolute_import, unicode_literals
import json
import os
import sys
import torch
class Config(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
class Base:
def __init__(self, model_path, model_category, model_name, meta_file='model_meta.json'):
model_root_dir = os.path.join(model_path, model_category, model_name)
meta_file_path = os.path.join(model_root_dir, meta_file)
with open(meta_file_path, 'r') as f:
self.meta_conf = json.load(f)
model_root = os.path.dirname(model_path)
if model_root not in sys.path:
sys.path.append(model_root)
self.model_path = model_path
self.model_category = model_category
self.model_name = model_name
self.model_file_path = os.path.join(model_root_dir, self.meta_conf['model_file'])
self.model_type = self.meta_conf['model_type']
self.model_info = self.meta_conf['model_info']
self.release_date = self.meta_conf['release_date']
self.input_height = self.meta_conf['input_height']
self.input_width = self.meta_conf['input_width']
self.device = None
self.model = None
def load(self, device=None):
assert self.model is None
if device is None:
if torch.cuda.is_available():
device = "cuda:%d" % torch.cuda.current_device()
else:
device = "cpu"
self.device = torch.device(device)
self.model = torch.load(self.model_file_path, map_location=self.device)
self.model.eval()
| {
"imports": [
"/xface/model/base.py"
]
} |
007gzs/xface | refs/heads/master | /xface/model/face_detection.py | # encoding: utf-8
from __future__ import absolute_import, unicode_literals
from itertools import product
from math import ceil
import numpy as np
import torch
import torch.backends.cudnn as cudnn
from .base import Base
class FaceDetector(Base):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.min_sizes = self.meta_conf['min_sizes']
self.steps = self.meta_conf['steps']
self.variance = self.meta_conf['variance']
self.in_channel = self.meta_conf['in_channel']
self.out_channel = self.meta_conf['out_channel']
self.confidence_threshold = self.meta_conf['confidence_threshold']
def detect(self, image):
cudnn.benchmark = True
assert isinstance(image, np.ndarray)
input_height, input_width, _ = image.shape
img = np.float32(image)
scale_box = torch.Tensor([img.shape[1], img.shape[0], img.shape[1], img.shape[0]])
img -= (104, 117, 123)
img = img.transpose(2, 0, 1)
self.model = self.model.to(self.device)
img = torch.from_numpy(img).unsqueeze(0)
scale_landms = torch.Tensor([img.shape[3], img.shape[2], img.shape[3], img.shape[2],
img.shape[3], img.shape[2], img.shape[3], img.shape[2],
img.shape[3], img.shape[2]])
with torch.no_grad():
img = img.to(self.device)
scale_box = scale_box.to(self.device)
scale_landms = scale_landms.to(self.device)
loc, conf, landms = self.model(img)
priors = self.priorbox_forward(height=input_height, width=input_width)
priors = priors.to(self.device)
prior_data = priors.data
boxes = self.decode(loc.data.squeeze(0), prior_data, self.variance)
boxes = boxes * scale_box
boxes = boxes.cpu().numpy()
scores = conf.squeeze(0).data.cpu().numpy()[:, 1]
landmarks = self.decode_landm(landms.data.squeeze(0), prior_data, self.variance)
landmarks = landmarks * scale_landms
landmarks = landmarks.reshape((landmarks.shape[0], 5, 2))
landmarks = landmarks.cpu().numpy()
# ignore low scores
inds = np.where(scores > self.confidence_threshold)[0]
boxes = boxes[inds]
scores = scores[inds]
landmarks = landmarks[inds]
# keep top-K before NMS
order = scores.argsort()[::-1]
boxes = boxes[order]
scores = scores[order]
landmarks = landmarks[order]
# do NMS
nms_threshold = 0.2
dets = np.hstack((boxes, scores[:, np.newaxis])).astype(np.float32, copy=False)
keep = self.py_cpu_nms(dets, nms_threshold)
dets = dets[keep, :]
landmarks = landmarks[keep]
return dets, landmarks
def py_cpu_nms(self, dets, thresh):
"""
Python version NMS.
Returns:
The kept index after NMS.
"""
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
keep = []
while order.size > 0:
i = order[0]
keep.append(i)
xx1 = np.maximum(x1[i], x1[order[1:]])
yy1 = np.maximum(y1[i], y1[order[1:]])
xx2 = np.minimum(x2[i], x2[order[1:]])
yy2 = np.minimum(y2[i], y2[order[1:]])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (areas[i] + areas[order[1:]] - inter)
inds = np.where(ovr <= thresh)[0]
order = order[inds + 1]
return keep
# Adapted from https://github.com/Hakuyume/chainer-ssd
def decode(self, loc, priors, variances):
"""Decode locations from predictions using priors to undo
the encoding we did for offset regression at train time.
Args:
loc (tensor): location predictions for loc layers,
Shape: [num_priors,4]
priors (tensor): Prior boxes in center-offset form.
Shape: [num_priors,4].
variances: (list[float]) Variances of priorboxes
Return:
decoded bounding box predictions
"""
boxes = torch.cat((
priors[:, :2] + loc[:, :2] * variances[0] * priors[:, 2:],
priors[:, 2:] * torch.exp(loc[:, 2:] * variances[1])), 1)
boxes[:, :2] -= boxes[:, 2:] / 2
boxes[:, 2:] += boxes[:, :2]
return boxes
def decode_landm(self, pre, priors, variances):
"""Decode landm from predictions using priors to undo
the encoding we did for offset regression at train time.
Args:
pre (tensor): landm predictions for loc layers,
Shape: [num_priors,10]
priors (tensor): Prior boxes in center-offset form.
Shape: [num_priors,4].
variances: (list[float]) Variances of priorboxes
Return:
decoded landm predictions
"""
landms = torch.cat((priors[:, :2] + pre[:, :2] * variances[0] * priors[:, 2:],
priors[:, :2] + pre[:, 2:4] * variances[0] * priors[:, 2:],
priors[:, :2] + pre[:, 4:6] * variances[0] * priors[:, 2:],
priors[:, :2] + pre[:, 6:8] * variances[0] * priors[:, 2:],
priors[:, :2] + pre[:, 8:10] * variances[0] * priors[:, 2:],
), dim=1)
return landms
# https://github.com/biubug6/Pytorch_Retinaface
def priorbox_forward(self, height, width):
feature_maps = [[ceil(height / step), ceil(width / step)] for step in self.steps]
anchors = []
for k, f in enumerate(feature_maps):
min_sizes = self.min_sizes[k]
for i, j in product(range(f[0]), range(f[1])):
for min_size in min_sizes:
s_kx = min_size / width
s_ky = min_size / height
dense_cx = [x * self.steps[k] / width for x in [j + 0.5]]
dense_cy = [y * self.steps[k] / height for y in [i + 0.5]]
for cy, cx in product(dense_cy, dense_cx):
anchors += [cx, cy, s_kx, s_ky]
return torch.Tensor(anchors).view(-1, 4)
| # encoding: utf-8
from __future__ import absolute_import, unicode_literals
import json
import os
import sys
import torch
class Config(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
class Base:
def __init__(self, model_path, model_category, model_name, meta_file='model_meta.json'):
model_root_dir = os.path.join(model_path, model_category, model_name)
meta_file_path = os.path.join(model_root_dir, meta_file)
with open(meta_file_path, 'r') as f:
self.meta_conf = json.load(f)
model_root = os.path.dirname(model_path)
if model_root not in sys.path:
sys.path.append(model_root)
self.model_path = model_path
self.model_category = model_category
self.model_name = model_name
self.model_file_path = os.path.join(model_root_dir, self.meta_conf['model_file'])
self.model_type = self.meta_conf['model_type']
self.model_info = self.meta_conf['model_info']
self.release_date = self.meta_conf['release_date']
self.input_height = self.meta_conf['input_height']
self.input_width = self.meta_conf['input_width']
self.device = None
self.model = None
def load(self, device=None):
assert self.model is None
if device is None:
if torch.cuda.is_available():
device = "cuda:%d" % torch.cuda.current_device()
else:
device = "cpu"
self.device = torch.device(device)
self.model = torch.load(self.model_file_path, map_location=self.device)
self.model.eval()
| {
"imports": [
"/xface/model/base.py"
]
} |
007gzs/xface | refs/heads/master | /xface/face_analysis.py | # encoding: utf-8
from __future__ import absolute_import, unicode_literals
import os
import numpy as np
from xface.core.image_cropper import crop_image_by_mat
from xface.model import FaceAlignment, FaceDetector, FaceRecognition
class Face:
def __init__(self, *, bbox, det_score, landmark, landmark_106, feature, sim_face_ids):
"""
:param bbox: 脸部范围
:param landmark: 5关键点位置
:param landmark_106: 106关键点位置
:param det_score: 检测分数
:param feature: 特征
:param sim_face_ids: 相似人脸
"""
self.bbox = bbox
self.det_score = det_score
self.landmark = landmark
self.landmark_106 = landmark_106
self.feature = feature
self.sim_face_ids = sim_face_ids
@classmethod
def compute_sim(cls, face1, face2):
feature1 = face1.feature if isinstance(face1, Face) else face1
feature2 = face2.feature if isinstance(face2, Face) else face2
return np.dot(feature1, feature2)
class FaceAnalysis:
def __init__(self, *, model_path=None, with_mask=False, lock=False, load_alignment=True, load_recognition=True):
"""
:param model_path: 模型路径
:param with_mask: 是否使用口罩模型
:param lock: get_faces是否加锁
:param load_alignment: 是否加载关键点模型
:param load_recognition: 是否加载人脸识别模型
"""
if model_path is None:
model_path = os.path.join(os.path.dirname(__file__), 'models')
mask_flag = '2.0' if with_mask else '1.0'
self.face_detector = FaceDetector(model_path, 'face_detection', 'face_detection_' + mask_flag)
if load_alignment:
self.face_alignment = FaceAlignment(model_path, 'face_alignment', 'face_alignment_' + mask_flag)
else:
self.face_alignment = None
if load_recognition:
self.face_recognition = FaceRecognition(model_path, 'face_recognition', 'face_recognition_' + mask_flag)
else:
self.face_recognition = None
self.registered_faces = list()
if lock:
import threading
self.lock = threading.Lock()
else:
class NoLock:
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
self.lock = NoLock()
def register_face(self, face_id, face):
"""
注册人脸
:param face_id: 唯一标识
:param face: Face 或 Face.feature
"""
self.registered_faces.append((face_id, face))
def check_face(self, face, min_sim=0.6, max_count=1):
"""
:param face: Face
:param min_sim: 相似度下限
:param max_count: 返回数量
:return:
"""
ret = list()
for face_id, reg_face in self.registered_faces:
sim = Face.compute_sim(face, reg_face)
if sim > min_sim:
ret.append((face_id, sim))
ret = list(sorted(ret, key=lambda x: -x[1]))
if max_count > 0:
return ret[:max_count]
else:
return ret
def load(self, device=None):
self.face_detector.load(device)
if self.face_alignment is not None:
self.face_alignment.load(device)
if self.face_recognition is not None:
self.face_recognition.load(device)
def get_faces(
self,
image,
*,
img_scaled=1.0,
max_num=0,
get_landmark_106=True,
get_feature=True,
min_sim=0.6,
match_num=1
):
"""
:param image: 图片
:param img_scaled: 图片已缩放比例(返回缩放前坐标)
:param max_num: 最大返回人脸数(0为全部)
:param get_landmark_106: 是否返回106关键点
:param get_feature: 是否返回人脸识别相关参数
:param min_sim: 人脸识别相似度下限
:param match_num: 人脸识别匹配返回数量
"""
with self.lock:
dets, landmarks = self.face_detector.detect(image)
ret = list()
if dets.shape[0] == 0:
return ret
if 0 < max_num < dets.shape[0]:
area = (dets[:, 2] - dets[:, 0]) * (dets[:, 3] - dets[:, 1])
img_center = image.shape[0] // 2, image.shape[1] // 2
offsets = np.vstack([
(dets[:, 0] + dets[:, 2]) / 2 - img_center[1],
(dets[:, 1] + dets[:, 3]) / 2 - img_center[0]
])
offset_dist_squared = np.sum(np.power(offsets, 2.0), 0)
values = area - offset_dist_squared * 2.0 # some extra weight on the centering
bindex = np.argsort(values)[::-1] # some extra weight on the centering
bindex = bindex[0:max_num]
dets = dets[bindex, :]
for i in range(dets.shape[0]):
det = dets[i]
landmark = landmarks[i]
landmark_106 = None
feature = None
sim_face_ids = None
if get_landmark_106 and self.face_alignment is not None:
landmark_106 = self.face_alignment.get(image, det)
if get_feature and self.face_recognition is not None:
cropped_image = crop_image_by_mat(image, landmark.reshape((np.prod(landmark.shape), )).tolist())
feature = self.face_recognition.get(cropped_image)
sim_face_ids = self.check_face(feature, min_sim=min_sim, max_count=match_num)
ret.append(Face(
bbox=(det[:4] / img_scaled).astype(np.int).tolist(),
det_score=float(det[4]),
landmark=(landmark / img_scaled).astype(np.int).tolist(),
landmark_106=None if landmark_106 is None else (landmark_106 / img_scaled).astype(np.int).tolist(),
feature=feature,
sim_face_ids=sim_face_ids
))
return ret
| # encoding: utf-8
from __future__ import absolute_import, unicode_literals
from .face_alignment import FaceAlignment
from .face_detection import FaceDetector
from .face_recognition import FaceRecognition
__all__ = ['FaceAlignment', 'FaceDetector', 'FaceRecognition']
| {
"imports": [
"/xface/model/__init__.py"
]
} |
00ba/KI | refs/heads/master | /test_tree.py | '''
Created on Sep 4, 2016
@author: oobasatoshi
'''
from tree import *
import unittest
class Test(unittest.TestCase):
def test_tree(self):
self.assertEquals(ki1.view, {'left': 'ki2', 'right': 'ki3', 'name': 'one', 'number': '1'})
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.test_tree']
unittest.main() | '''
Created on Sep 4, 2016
@author: oobasatoshi
'''
class Tree:
def __init__(self, name, number):
self.nord = {'name':'', 'number':'', 'left':'', 'right':''}
self.nord['name'] = name
self.nord['number'] = number
self.nord['left'] = ''
self.nord['right'] = ''
def view(self):
print self.nord
def add(self):
pass
def size(self):
pass
def delete(self):
pass
def del_root(self):
pass
def choose_root(self):
pass
if __name__ == '__main__':
ki1 = Tree('one', 1)
ki2 = Tree('two', 2)
ki3 = Tree('three', 3)
ki1.nord['left'] = ki2
ki1.nord['right'] = ki3
ki1.view()
ki2.view()
ki3.view()
ki1.nord['left'].view()
ki1.nord['right'].view()
| {
"imports": [
"/tree.py"
]
} |
00mjk/iocontrollergw | refs/heads/master | /ioctlgw/mqttconnector.py | import logging
import json
import paho.mqtt.client as mqttc
from ioctlgw import build, version
from ioctlgw.componentstate import ComponentState
LOG = logging.getLogger(__name__)
class MqttConnector(object):
def __init__(self, service):
self.service = service
self.config = self.service.config
self.mqtt_config = self.config["mqtt"]
self.mqtt = mqttc.Client(client_id=f"iocontrollergw_{service.instance}")
self.mqtt_base_topic = self.mqtt_config["topic"]
self.mqtt.on_connect = self.mqtt_on_connect
self.mqtt.on_disconnect = self.mqtt_on_disconnect
self.mqtt.on_message = self.mqtt_on_message
self.mqtt.on_subscribe = self.mqtt_on_subscribe
# MQTT status jobs
self.service.scheduler.add_job(self.publish_status)
self.service.scheduler.add_job(self.publish_status, 'interval', seconds=10, jitter=5)
def start(self):
# Start a background thread to maintain the MQTT connection
LOG.info("MQTT Starting")
if "user" in self.mqtt_config and "pass" in self.mqtt_config:
self.mqtt.username_pw_set(self.mqtt_config["user"], self.mqtt_config["pass"])
mqtt_host = self.mqtt_config["host"]
mqtt_port = self.mqtt_config["port"]
LOG.info("MQTT Connecting to %s:%s", mqtt_host, mqtt_port)
self.mqtt.connect(mqtt_host, mqtt_port, 60)
# Subscribe to interesting MQTT topics
topics = [
"/boards/+/digitaloutput/+/command"
]
for topic_suffix in topics:
self.mqtt.subscribe(f"{self.mqtt_base_topic}{topic_suffix}")
self.mqtt.loop_start()
def mqtt_on_connect(self, client, data, flags, rc):
LOG.info("MQTT Connected %s", rc)
def mqtt_on_disconnect(self, client, userdata, rc):
if rc == 0:
LOG.warning("Unexpected MQTT disconnection.")
else:
LOG.warning("Unexpected MQTT disconnection. Will auto-reconnect")
def mqtt_on_subscribe(self, client, userdata, mid, gqos):
LOG.info("MQTT Subscribed %s", mid)
def mqtt_on_message(self, client, userdata, msg):
LOG.info("MQTT Message %s %s", msg.topic, str(msg.payload))
if msg.topic.startswith(self.mqtt_base_topic):
topic = msg.topic[len(self.mqtt_base_topic) + 1:]
parts = topic.split("/")
# TODO: check number of parts
controller_name = parts[1]
component = parts[2]
num = int(parts[3])
iocontroller = self.service.controllers[controller_name]
if controller_name not in self.service.controllers.keys():
LOG.warning("Message for unknown iocontroller '%s'", controller_name)
return
if component not in ["digitaloutput"]:
LOG.warning("Message for unknown component '%s'", component)
return
if num > iocontroller.num_digital_outputs:
LOG.warning("Output too high for this board: %s", num)
return
action = msg.payload.decode('utf-8').strip().upper()
if action not in ["OFF", "ON"]:
LOG.warning("Unsupported action '%s'", action)
return
LOG.debug("Requesting %s %s %s %s %s", iocontroller, controller_name, component, num, action)
iocontroller.request_digitaloutput(ComponentState(component="digitaloutput", num=num, status=action))
def mqtt_publish_message(self, suffix, payload, qos=0):
topic = "%s/%s" % (self.mqtt_base_topic, suffix)
self.mqtt.publish(topic=topic, payload=payload, qos=0)
LOG.info("%s %s", topic, payload)
def board_connection_event(self, name, event):
self.mqtt_publish_message(suffix=f"boards/{name}/connection", payload=event)
def board_io_event(self, name, state):
self.mqtt_publish_message(suffix=f"boards/{name}/{state.component}/{state.num}/status", payload=state.status)
def board_status(self, name, raw_msg):
assert True
def publish_status(self):
status = {
"build": build(),
"version": version()
}
self.mqtt_publish_message(suffix="status", payload=json.dumps(status))
uptime = {
"minutes": self.service.uptime,
"started": self.service.startup.isoformat()
}
self.mqtt_publish_message(suffix="uptime", payload=json.dumps(uptime))
| # NOTE: These variable will be overwritten during the build process by the Github action.
__BUILD__ = "unknown"
__VERSION__ = "localdev"
def build():
return __BUILD__
def version():
return __VERSION__
--- FILE SEPARATOR ---
class ComponentState(object):
def __init__(self, component, num, status):
self.component = component.strip().lower()
self.num = int(num)
self.status = status.strip().upper()
def __repr__(self):
return f"{self.component}_{self.num}_{self.status}"
def __eq__(self, other):
if self.component == other.component and self.num == other.num and self.status == other.status:
return True
return False
| {
"imports": [
"/ioctlgw/__init__.py",
"/ioctlgw/componentstate.py"
]
} |
00mjk/iocontrollergw | refs/heads/master | /ioctlgw/boards/tcpinterface.py | from ioctlgw.boards.baseinterface import BaseInterface
class TcpInterface(BaseInterface):
pass
|
import logging
import socket
import time
import threading
from apscheduler.schedulers.background import BackgroundScheduler
from multiprocessing import Queue
from ioctlgw.componentstate import ComponentState
DEFAULT_STATUS_INTERVAL = 60
DEFAULT_CONNECTION_TIMEOUT = 5
DEFAULT_CONNECTION_RECONNECT = 2
DEFAULT_COMMAND_PAUSE = 0.1
DEFAULT_DO_ALL_CHECK = 5
LOG = logging.getLogger(__name__)
class BaseInterface(threading.Thread):
STATIC_RESPONSES = {
"01050010ff008dff": ComponentState(component="digitaloutput", num=1, status="ON"),
"010500100000cc0f": ComponentState(component="digitaloutput", num=1, status="OFF"),
"01050011ff00dc3f": ComponentState(component="digitaloutput", num=2, status="ON"),
"0105001100009dcf": ComponentState(component="digitaloutput", num=2, status="OFF"),
"01050012ff002c3f": ComponentState(component="digitaloutput", num=3, status="ON"),
"0105001200006dcf": ComponentState(component="digitaloutput", num=3, status="OFF"),
"01050013ff007dff": ComponentState(component="digitaloutput", num=4, status="ON"),
"0105001300003c0f": ComponentState(component="digitaloutput", num=4, status="OFF"),
"01050014ff00cc3e": ComponentState(component="digitaloutput", num=5, status="ON"),
"0105001400008dce": ComponentState(component="digitaloutput", num=5, status="OFF"),
"01050015ff009dfe": ComponentState(component="digitaloutput", num=6, status="ON"),
"010500150000dc0e": ComponentState(component="digitaloutput", num=6, status="OFF"),
"01050016ff006dfe": ComponentState(component="digitaloutput", num=7, status="ON"),
"0105001600002c0e": ComponentState(component="digitaloutput", num=7, status="OFF"),
"01050017ff003c3e": ComponentState(component="digitaloutput", num=8, status="ON"),
"0105001700007dce": ComponentState(component="digitaloutput", num=8, status="OFF")
}
STATIC_REQUESTS = {
"01050010ff008dff": ComponentState(component="digitaloutput", num=1, status="ON"),
"010500100000cc0f": ComponentState(component="digitaloutput", num=1, status="OFF"),
"01050011ff00dc3f": ComponentState(component="digitaloutput", num=2, status="ON"),
"0105001100009dcf": ComponentState(component="digitaloutput", num=2, status="OFF"),
"01050012ff002c3f": ComponentState(component="digitaloutput", num=3, status="ON"),
"0105001200006dcf": ComponentState(component="digitaloutput", num=3, status="OFF"),
"01050013ff007dff": ComponentState(component="digitaloutput", num=4, status="ON"),
"0105001300003c0f": ComponentState(component="digitaloutput", num=4, status="OFF"),
"01050014ff00cc3e": ComponentState(component="digitaloutput", num=5, status="ON"),
"0105001400008dce": ComponentState(component="digitaloutput", num=5, status="OFF"),
"01050015ff009dfe": ComponentState(component="digitaloutput", num=6, status="ON"),
"010500150000dc0e": ComponentState(component="digitaloutput", num=6, status="OFF"),
"01050016ff006dfe": ComponentState(component="digitaloutput", num=7, status="ON"),
"0105001600002c0e": ComponentState(component="digitaloutput", num=7, status="OFF"),
"01050017ff003c3e": ComponentState(component="digitaloutput", num=8, status="ON"),
"0105001700007dce": ComponentState(component="digitaloutput", num=8, status="OFF")
}
def __init__(self, name, address, service, num_digital_outputs=0, num_digital_inputs=0):
threading.Thread.__init__(self)
self.name = name.strip().lower()
self.service = service
self.requestqueue = Queue()
self.address = address
self.host = address.split(":")[0]
self.port = int(address.split(":")[1])
self.state_di_current = None
self.scheduler = BackgroundScheduler(timezone=service.timezone)
self.scheduler.add_job(self.push_status, 'interval', seconds=DEFAULT_STATUS_INTERVAL, jitter=2)
self._connection_state = "disconnected"
self._connection_count = 0
self.status = {
"digitalinput": {},
"digitaloutput": {}
}
self.num_digital_outputs = num_digital_outputs
self.num_digital_inputs = num_digital_inputs
def connect(self):
while True:
try:
self._connection_state = "disconnected"
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
LOG.info("%s socket connecting %s:%s", self.name, self.host, self.port)
s.settimeout(DEFAULT_CONNECTION_TIMEOUT)
s.connect((self.host, self.port))
LOG.info("%s socket connected", self.name)
self._connection_state = "connected"
self._connection_count += 1
self.service.queue_boards_connection.put({"event": "connected", "name": self.name})
return s
except socket.error as e:
LOG.warning("%s socket error %s reconnecting", self.name, e)
self._connection_state = "disconnected"
self.service.queue_boards_connection.put({"event": "disconnected", "name": self.name})
time.sleep(DEFAULT_CONNECTION_RECONNECT)
except Exception as e:
LOG.warning("%s uncaught exception in socket connect %s", self.name, e)
def run(self):
self.scheduler.start()
dest = self.connect()
do_all_check_count = None
while True:
try:
# request digital input status
dest.send(bytes.fromhex('01 01 00 00 00 08 3d cc'))
data = dest.recv(8)
self.process_response_packets(data=data, response_to="read_di_status_all")
if do_all_check_count is None or int(do_all_check_count * DEFAULT_COMMAND_PAUSE) >= DEFAULT_DO_ALL_CHECK:
# request digital output status
dest.send(bytes.fromhex('01 01 00 10 00 08 3c 09'))
data = dest.recv(8)
self.process_response_packets(data=data, response_to="read_do_status_all")
do_all_check_count = 0
if self.requestqueue.empty() is False:
request = self.requestqueue.get()
for hex, state in self.STATIC_REQUESTS.items():
if request == state:
dest.send(bytes.fromhex(hex))
data = dest.recv(8)
self.process_response_packets(data=data)
else:
time.sleep(DEFAULT_COMMAND_PAUSE)
do_all_check_count += 1
except socket.error as e:
LOG.warning("%s socket error %s reconnecting", self.name, e)
dest = self.connect()
except Exception as e:
LOG.warning("%s uncaught exception in loop %s", self.name, e)
def process_response_packets(self, data, response_to=None):
h = data.hex()
if h in self.STATIC_RESPONSES.keys():
outcome = self.STATIC_RESPONSES[h]
# LOG.info("%s packet response matched in static table", self.name)
pin_changed = self.update_state(state=outcome)
if pin_changed:
self.push_status(component=outcome.component, num=outcome.num)
elif h.startswith("010101"):
# Handle DI / DO responses.
if response_to is None:
LOG.warning("Unknown treble 01 response")
return
elif response_to == "read_di_status_all":
component = "digitalinput"
elif response_to == "read_do_status_all":
component = "digitaloutput"
do_hex = "%s%s" % (h[6], h[7])
t = bin(int(do_hex, 16)).zfill(8)
pins = self.bits_to_pins(bits=t)
for pin, status in pins.items():
pin_changed = self.update_state(ComponentState(component=component, num=pin, status=status))
if pin_changed:
self.push_status(component=component, num=pin)
else:
LOG.warning("%s Response packets unexpected: %s", self.name, h)
return
def request_digitaloutput(self, state):
# called via MQTT
self.requestqueue.put(state)
def update_state(self, state):
current_status = self.status[state.component].get(state.num, None)
self.status[state.component][state.num] = state.status
if current_status != state.status:
return True
else:
return False
def push_status(self, component=None, num=None):
if component is not None and num is not None:
# individual component status
status = self.status[component][num]
self.service.queue_boards_io_status.put({"name": self.name, "state": ComponentState(component=component, num=num, status=status)})
else:
# all component status
for component in self.status.keys():
for num, status in self.status[component].items():
self.service.queue_boards_io_status.put({"name": self.name, "state": ComponentState(component=component, num=num, status=status)})
# board status
self.service.queue_boards_status.put({"name": self.name, "address": self.address})
def bits_to_pins(self, bits):
sbits = str(bits)
h = {}
for p in range(1, 9):
h[p] = "OFF"
if sbits[len(bits) - p] == "1":
h[p] = "ON"
return h
| {
"imports": [
"/ioctlgw/boards/baseinterface.py"
]
} |
00mjk/iocontrollergw | refs/heads/master | /ioctlgw/boards/hhc/hhcn8i8o/__init__.py | from ioctlgw.boards.baseinterface import BaseInterface
class HhcN8i8o(BaseInterface):
def __init__(self, **kwargs):
super(HhcN8i8o, self).__init__(num_digital_outputs=8, num_digital_inputs=8, **kwargs)
|
import logging
import socket
import time
import threading
from apscheduler.schedulers.background import BackgroundScheduler
from multiprocessing import Queue
from ioctlgw.componentstate import ComponentState
DEFAULT_STATUS_INTERVAL = 60
DEFAULT_CONNECTION_TIMEOUT = 5
DEFAULT_CONNECTION_RECONNECT = 2
DEFAULT_COMMAND_PAUSE = 0.1
DEFAULT_DO_ALL_CHECK = 5
LOG = logging.getLogger(__name__)
class BaseInterface(threading.Thread):
STATIC_RESPONSES = {
"01050010ff008dff": ComponentState(component="digitaloutput", num=1, status="ON"),
"010500100000cc0f": ComponentState(component="digitaloutput", num=1, status="OFF"),
"01050011ff00dc3f": ComponentState(component="digitaloutput", num=2, status="ON"),
"0105001100009dcf": ComponentState(component="digitaloutput", num=2, status="OFF"),
"01050012ff002c3f": ComponentState(component="digitaloutput", num=3, status="ON"),
"0105001200006dcf": ComponentState(component="digitaloutput", num=3, status="OFF"),
"01050013ff007dff": ComponentState(component="digitaloutput", num=4, status="ON"),
"0105001300003c0f": ComponentState(component="digitaloutput", num=4, status="OFF"),
"01050014ff00cc3e": ComponentState(component="digitaloutput", num=5, status="ON"),
"0105001400008dce": ComponentState(component="digitaloutput", num=5, status="OFF"),
"01050015ff009dfe": ComponentState(component="digitaloutput", num=6, status="ON"),
"010500150000dc0e": ComponentState(component="digitaloutput", num=6, status="OFF"),
"01050016ff006dfe": ComponentState(component="digitaloutput", num=7, status="ON"),
"0105001600002c0e": ComponentState(component="digitaloutput", num=7, status="OFF"),
"01050017ff003c3e": ComponentState(component="digitaloutput", num=8, status="ON"),
"0105001700007dce": ComponentState(component="digitaloutput", num=8, status="OFF")
}
STATIC_REQUESTS = {
"01050010ff008dff": ComponentState(component="digitaloutput", num=1, status="ON"),
"010500100000cc0f": ComponentState(component="digitaloutput", num=1, status="OFF"),
"01050011ff00dc3f": ComponentState(component="digitaloutput", num=2, status="ON"),
"0105001100009dcf": ComponentState(component="digitaloutput", num=2, status="OFF"),
"01050012ff002c3f": ComponentState(component="digitaloutput", num=3, status="ON"),
"0105001200006dcf": ComponentState(component="digitaloutput", num=3, status="OFF"),
"01050013ff007dff": ComponentState(component="digitaloutput", num=4, status="ON"),
"0105001300003c0f": ComponentState(component="digitaloutput", num=4, status="OFF"),
"01050014ff00cc3e": ComponentState(component="digitaloutput", num=5, status="ON"),
"0105001400008dce": ComponentState(component="digitaloutput", num=5, status="OFF"),
"01050015ff009dfe": ComponentState(component="digitaloutput", num=6, status="ON"),
"010500150000dc0e": ComponentState(component="digitaloutput", num=6, status="OFF"),
"01050016ff006dfe": ComponentState(component="digitaloutput", num=7, status="ON"),
"0105001600002c0e": ComponentState(component="digitaloutput", num=7, status="OFF"),
"01050017ff003c3e": ComponentState(component="digitaloutput", num=8, status="ON"),
"0105001700007dce": ComponentState(component="digitaloutput", num=8, status="OFF")
}
def __init__(self, name, address, service, num_digital_outputs=0, num_digital_inputs=0):
threading.Thread.__init__(self)
self.name = name.strip().lower()
self.service = service
self.requestqueue = Queue()
self.address = address
self.host = address.split(":")[0]
self.port = int(address.split(":")[1])
self.state_di_current = None
self.scheduler = BackgroundScheduler(timezone=service.timezone)
self.scheduler.add_job(self.push_status, 'interval', seconds=DEFAULT_STATUS_INTERVAL, jitter=2)
self._connection_state = "disconnected"
self._connection_count = 0
self.status = {
"digitalinput": {},
"digitaloutput": {}
}
self.num_digital_outputs = num_digital_outputs
self.num_digital_inputs = num_digital_inputs
def connect(self):
while True:
try:
self._connection_state = "disconnected"
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
LOG.info("%s socket connecting %s:%s", self.name, self.host, self.port)
s.settimeout(DEFAULT_CONNECTION_TIMEOUT)
s.connect((self.host, self.port))
LOG.info("%s socket connected", self.name)
self._connection_state = "connected"
self._connection_count += 1
self.service.queue_boards_connection.put({"event": "connected", "name": self.name})
return s
except socket.error as e:
LOG.warning("%s socket error %s reconnecting", self.name, e)
self._connection_state = "disconnected"
self.service.queue_boards_connection.put({"event": "disconnected", "name": self.name})
time.sleep(DEFAULT_CONNECTION_RECONNECT)
except Exception as e:
LOG.warning("%s uncaught exception in socket connect %s", self.name, e)
def run(self):
self.scheduler.start()
dest = self.connect()
do_all_check_count = None
while True:
try:
# request digital input status
dest.send(bytes.fromhex('01 01 00 00 00 08 3d cc'))
data = dest.recv(8)
self.process_response_packets(data=data, response_to="read_di_status_all")
if do_all_check_count is None or int(do_all_check_count * DEFAULT_COMMAND_PAUSE) >= DEFAULT_DO_ALL_CHECK:
# request digital output status
dest.send(bytes.fromhex('01 01 00 10 00 08 3c 09'))
data = dest.recv(8)
self.process_response_packets(data=data, response_to="read_do_status_all")
do_all_check_count = 0
if self.requestqueue.empty() is False:
request = self.requestqueue.get()
for hex, state in self.STATIC_REQUESTS.items():
if request == state:
dest.send(bytes.fromhex(hex))
data = dest.recv(8)
self.process_response_packets(data=data)
else:
time.sleep(DEFAULT_COMMAND_PAUSE)
do_all_check_count += 1
except socket.error as e:
LOG.warning("%s socket error %s reconnecting", self.name, e)
dest = self.connect()
except Exception as e:
LOG.warning("%s uncaught exception in loop %s", self.name, e)
def process_response_packets(self, data, response_to=None):
h = data.hex()
if h in self.STATIC_RESPONSES.keys():
outcome = self.STATIC_RESPONSES[h]
# LOG.info("%s packet response matched in static table", self.name)
pin_changed = self.update_state(state=outcome)
if pin_changed:
self.push_status(component=outcome.component, num=outcome.num)
elif h.startswith("010101"):
# Handle DI / DO responses.
if response_to is None:
LOG.warning("Unknown treble 01 response")
return
elif response_to == "read_di_status_all":
component = "digitalinput"
elif response_to == "read_do_status_all":
component = "digitaloutput"
do_hex = "%s%s" % (h[6], h[7])
t = bin(int(do_hex, 16)).zfill(8)
pins = self.bits_to_pins(bits=t)
for pin, status in pins.items():
pin_changed = self.update_state(ComponentState(component=component, num=pin, status=status))
if pin_changed:
self.push_status(component=component, num=pin)
else:
LOG.warning("%s Response packets unexpected: %s", self.name, h)
return
def request_digitaloutput(self, state):
# called via MQTT
self.requestqueue.put(state)
def update_state(self, state):
current_status = self.status[state.component].get(state.num, None)
self.status[state.component][state.num] = state.status
if current_status != state.status:
return True
else:
return False
def push_status(self, component=None, num=None):
if component is not None and num is not None:
# individual component status
status = self.status[component][num]
self.service.queue_boards_io_status.put({"name": self.name, "state": ComponentState(component=component, num=num, status=status)})
else:
# all component status
for component in self.status.keys():
for num, status in self.status[component].items():
self.service.queue_boards_io_status.put({"name": self.name, "state": ComponentState(component=component, num=num, status=status)})
# board status
self.service.queue_boards_status.put({"name": self.name, "address": self.address})
def bits_to_pins(self, bits):
sbits = str(bits)
h = {}
for p in range(1, 9):
h[p] = "OFF"
if sbits[len(bits) - p] == "1":
h[p] = "ON"
return h
| {
"imports": [
"/ioctlgw/boards/baseinterface.py"
]
} |
00mjk/iocontrollergw | refs/heads/master | /run.py | #!/usr/bin/env python3
import argparse
import logging
import os
import sys
import socket
import time
import yaml
from apscheduler.schedulers.background import BackgroundScheduler
from datetime import datetime
from ioctlgw import build, version
from ioctlgw.boards import get_board
from ioctlgw.mqttconnector import MqttConnector
# from ioctlgw.web import WebService
from multiprocessing import Queue
LOG = logging.getLogger(__name__)
class Service(object):
def __init__(self, config):
self.config = config
self.startup = datetime.now()
self.instance = self.config["service"]["instance"]
self.timezone = self.config["service"]["timezone"]
self.scheduler = BackgroundScheduler(timezone=self.timezone)
self.controllers = {}
self.mqtt = MqttConnector(service=self)
self.queue_boards_connection = Queue()
self.queue_boards_io_status = Queue()
self.queue_boards_status = Queue()
def start(self, daemon):
#w = WebService(controllers=self.controllers)
#w.start()
LOG.info("Initialising boards")
for name, controller in self.config["controllers"].items():
address = controller["address"].strip().lower()
identifier = controller["board"].strip().lower()
LOG.info("Initialising '%s' using '%s' at '%s'", name, identifier, address)
board = get_board(identifier=identifier)
LOG.info("Found interface %s", board)
# TODO: handle a miss identified board
self.controllers[name] = board(name=name, address=address, service=self)
if daemon:
LOG.info("Daemon mode, starting services")
LOG.info("Starting primary scheduler")
self.scheduler.start()
LOG.info("Starting MQTT")
self.mqtt.start()
LOG.info("Starting boards")
for name, controller in self.controllers.items():
LOG.info("Starting %s", name)
self.controllers[name].start()
# TODO: handle being unable to start a board
while True:
if self.queue_boards_status.empty() is False:
msg = self.queue_boards_status.get()
self.mqtt.board_status(name=msg["name"], raw_msg=msg)
elif self.queue_boards_io_status.empty() is False:
msg = self.queue_boards_io_status.get()
self.mqtt.board_io_event(name=msg["name"], state=msg["state"])
elif self.queue_boards_connection.empty() is False:
event = self.queue_boards_connection.get()
self.mqtt.board_connection_event( name=event["name"], event=event["event"])
else:
time.sleep(0.05)
else:
LOG.info("Exiting, non-daemon mode")
sys.exit(0)
@property
def uptime(self):
return int((datetime.now()-self.startup).total_seconds()/60)
def main():
logging.basicConfig(level=logging.INFO)
logging.getLogger('apscheduler.executors.default').propagate = False
parser = argparse.ArgumentParser(description=f"IO Controller Gateway v{version()}")
parser.add_argument("-c", "--config", help="Config file", required=True)
parser.add_argument("-v", "--verbose", help="Increase verbosity", action="store_true")
parser.add_argument("-d", "--daemon", help="Daemon mode", action="store_true")
args = parser.parse_args()
LOG.info("IO Controller Gateway v%s %s on Python v%d.%d.%d", version(), build(),
sys.version_info.major, sys.version_info.minor, sys.version_info.micro)
# check config exists
cfgpath = args.config.strip()
if os.path.isfile(cfgpath) is False:
LOG.fatal("Specified config file does not exist: %s", cfgpath)
sys.exit(1)
daemon = False
if args.daemon:
daemon = True
# load the config
with open(cfgpath, 'r') as stream:
try:
config = yaml.load(stream, Loader=yaml.FullLoader)
except yaml.YAMLError as exc:
print(exc)
sys.exit(1)
s = Service(config=config)
LOG.info("Service instance %s ready", s.instance)
s.start(daemon=daemon)
sys.exit(0)
if __name__ == "__main__":
main()
| # NOTE: These variable will be overwritten during the build process by the Github action.
__BUILD__ = "unknown"
__VERSION__ = "localdev"
def build():
return __BUILD__
def version():
return __VERSION__
--- FILE SEPARATOR ---
from ioctlgw.boards.hhc.hhcn8i8o import HhcN8i8o
from ioctlgw.boards.hiflying.hf6508 import HiFlyingHF6508
MAPPING = {
"hhc": {
"hhc-n-8i8o": HhcN8i8o
},
"hiflying": {
"hf6508": HiFlyingHF6508
}
}
def get_board(identifier):
parts = identifier.split(".")
if len(parts) != 2:
raise Exception("Unexpected number of dots in board identifier.")
manufacturer = parts[0]
model = parts[1]
if manufacturer not in MAPPING.keys():
raise Exception(f"Manufacturer '{manufacturer}' not found .")
if model not in MAPPING[manufacturer].keys():
raise Exception(f"Model '{model}' not found for manufacturer '{manufacturer}'")
return MAPPING[manufacturer][model]
--- FILE SEPARATOR ---
import logging
import json
import paho.mqtt.client as mqttc
from ioctlgw import build, version
from ioctlgw.componentstate import ComponentState
LOG = logging.getLogger(__name__)
class MqttConnector(object):
def __init__(self, service):
self.service = service
self.config = self.service.config
self.mqtt_config = self.config["mqtt"]
self.mqtt = mqttc.Client(client_id=f"iocontrollergw_{service.instance}")
self.mqtt_base_topic = self.mqtt_config["topic"]
self.mqtt.on_connect = self.mqtt_on_connect
self.mqtt.on_disconnect = self.mqtt_on_disconnect
self.mqtt.on_message = self.mqtt_on_message
self.mqtt.on_subscribe = self.mqtt_on_subscribe
# MQTT status jobs
self.service.scheduler.add_job(self.publish_status)
self.service.scheduler.add_job(self.publish_status, 'interval', seconds=10, jitter=5)
def start(self):
# Start a background thread to maintain the MQTT connection
LOG.info("MQTT Starting")
if "user" in self.mqtt_config and "pass" in self.mqtt_config:
self.mqtt.username_pw_set(self.mqtt_config["user"], self.mqtt_config["pass"])
mqtt_host = self.mqtt_config["host"]
mqtt_port = self.mqtt_config["port"]
LOG.info("MQTT Connecting to %s:%s", mqtt_host, mqtt_port)
self.mqtt.connect(mqtt_host, mqtt_port, 60)
# Subscribe to interesting MQTT topics
topics = [
"/boards/+/digitaloutput/+/command"
]
for topic_suffix in topics:
self.mqtt.subscribe(f"{self.mqtt_base_topic}{topic_suffix}")
self.mqtt.loop_start()
def mqtt_on_connect(self, client, data, flags, rc):
LOG.info("MQTT Connected %s", rc)
def mqtt_on_disconnect(self, client, userdata, rc):
if rc == 0:
LOG.warning("Unexpected MQTT disconnection.")
else:
LOG.warning("Unexpected MQTT disconnection. Will auto-reconnect")
def mqtt_on_subscribe(self, client, userdata, mid, gqos):
LOG.info("MQTT Subscribed %s", mid)
def mqtt_on_message(self, client, userdata, msg):
LOG.info("MQTT Message %s %s", msg.topic, str(msg.payload))
if msg.topic.startswith(self.mqtt_base_topic):
topic = msg.topic[len(self.mqtt_base_topic) + 1:]
parts = topic.split("/")
# TODO: check number of parts
controller_name = parts[1]
component = parts[2]
num = int(parts[3])
iocontroller = self.service.controllers[controller_name]
if controller_name not in self.service.controllers.keys():
LOG.warning("Message for unknown iocontroller '%s'", controller_name)
return
if component not in ["digitaloutput"]:
LOG.warning("Message for unknown component '%s'", component)
return
if num > iocontroller.num_digital_outputs:
LOG.warning("Output too high for this board: %s", num)
return
action = msg.payload.decode('utf-8').strip().upper()
if action not in ["OFF", "ON"]:
LOG.warning("Unsupported action '%s'", action)
return
LOG.debug("Requesting %s %s %s %s %s", iocontroller, controller_name, component, num, action)
iocontroller.request_digitaloutput(ComponentState(component="digitaloutput", num=num, status=action))
def mqtt_publish_message(self, suffix, payload, qos=0):
topic = "%s/%s" % (self.mqtt_base_topic, suffix)
self.mqtt.publish(topic=topic, payload=payload, qos=0)
LOG.info("%s %s", topic, payload)
def board_connection_event(self, name, event):
self.mqtt_publish_message(suffix=f"boards/{name}/connection", payload=event)
def board_io_event(self, name, state):
self.mqtt_publish_message(suffix=f"boards/{name}/{state.component}/{state.num}/status", payload=state.status)
def board_status(self, name, raw_msg):
assert True
def publish_status(self):
status = {
"build": build(),
"version": version()
}
self.mqtt_publish_message(suffix="status", payload=json.dumps(status))
uptime = {
"minutes": self.service.uptime,
"started": self.service.startup.isoformat()
}
self.mqtt_publish_message(suffix="uptime", payload=json.dumps(uptime))
| {
"imports": [
"/ioctlgw/__init__.py",
"/ioctlgw/boards/__init__.py",
"/ioctlgw/mqttconnector.py"
]
} |
00mjk/iocontrollergw | refs/heads/master | /ioctlgw/boards/__init__.py | from ioctlgw.boards.hhc.hhcn8i8o import HhcN8i8o
from ioctlgw.boards.hiflying.hf6508 import HiFlyingHF6508
MAPPING = {
"hhc": {
"hhc-n-8i8o": HhcN8i8o
},
"hiflying": {
"hf6508": HiFlyingHF6508
}
}
def get_board(identifier):
parts = identifier.split(".")
if len(parts) != 2:
raise Exception("Unexpected number of dots in board identifier.")
manufacturer = parts[0]
model = parts[1]
if manufacturer not in MAPPING.keys():
raise Exception(f"Manufacturer '{manufacturer}' not found .")
if model not in MAPPING[manufacturer].keys():
raise Exception(f"Model '{model}' not found for manufacturer '{manufacturer}'")
return MAPPING[manufacturer][model]
| from ioctlgw.boards.baseinterface import BaseInterface
class HhcN8i8o(BaseInterface):
def __init__(self, **kwargs):
super(HhcN8i8o, self).__init__(num_digital_outputs=8, num_digital_inputs=8, **kwargs)
--- FILE SEPARATOR ---
from ioctlgw.boards.baseinterface import BaseInterface
class HiFlyingHF6508(BaseInterface):
def __init__(self, **kwargs):
super(HiFlyingHF6508, self).__init__(num_digital_outputs=8, num_digital_inputs=8, **kwargs)
| {
"imports": [
"/ioctlgw/boards/hhc/hhcn8i8o/__init__.py",
"/ioctlgw/boards/hiflying/hf6508/__init__.py"
]
} |
00mjk/iocontrollergw | refs/heads/master | /ioctlgw/boards/hiflying/hf6508/__init__.py | from ioctlgw.boards.baseinterface import BaseInterface
class HiFlyingHF6508(BaseInterface):
def __init__(self, **kwargs):
super(HiFlyingHF6508, self).__init__(num_digital_outputs=8, num_digital_inputs=8, **kwargs)
|
import logging
import socket
import time
import threading
from apscheduler.schedulers.background import BackgroundScheduler
from multiprocessing import Queue
from ioctlgw.componentstate import ComponentState
DEFAULT_STATUS_INTERVAL = 60
DEFAULT_CONNECTION_TIMEOUT = 5
DEFAULT_CONNECTION_RECONNECT = 2
DEFAULT_COMMAND_PAUSE = 0.1
DEFAULT_DO_ALL_CHECK = 5
LOG = logging.getLogger(__name__)
class BaseInterface(threading.Thread):
STATIC_RESPONSES = {
"01050010ff008dff": ComponentState(component="digitaloutput", num=1, status="ON"),
"010500100000cc0f": ComponentState(component="digitaloutput", num=1, status="OFF"),
"01050011ff00dc3f": ComponentState(component="digitaloutput", num=2, status="ON"),
"0105001100009dcf": ComponentState(component="digitaloutput", num=2, status="OFF"),
"01050012ff002c3f": ComponentState(component="digitaloutput", num=3, status="ON"),
"0105001200006dcf": ComponentState(component="digitaloutput", num=3, status="OFF"),
"01050013ff007dff": ComponentState(component="digitaloutput", num=4, status="ON"),
"0105001300003c0f": ComponentState(component="digitaloutput", num=4, status="OFF"),
"01050014ff00cc3e": ComponentState(component="digitaloutput", num=5, status="ON"),
"0105001400008dce": ComponentState(component="digitaloutput", num=5, status="OFF"),
"01050015ff009dfe": ComponentState(component="digitaloutput", num=6, status="ON"),
"010500150000dc0e": ComponentState(component="digitaloutput", num=6, status="OFF"),
"01050016ff006dfe": ComponentState(component="digitaloutput", num=7, status="ON"),
"0105001600002c0e": ComponentState(component="digitaloutput", num=7, status="OFF"),
"01050017ff003c3e": ComponentState(component="digitaloutput", num=8, status="ON"),
"0105001700007dce": ComponentState(component="digitaloutput", num=8, status="OFF")
}
STATIC_REQUESTS = {
"01050010ff008dff": ComponentState(component="digitaloutput", num=1, status="ON"),
"010500100000cc0f": ComponentState(component="digitaloutput", num=1, status="OFF"),
"01050011ff00dc3f": ComponentState(component="digitaloutput", num=2, status="ON"),
"0105001100009dcf": ComponentState(component="digitaloutput", num=2, status="OFF"),
"01050012ff002c3f": ComponentState(component="digitaloutput", num=3, status="ON"),
"0105001200006dcf": ComponentState(component="digitaloutput", num=3, status="OFF"),
"01050013ff007dff": ComponentState(component="digitaloutput", num=4, status="ON"),
"0105001300003c0f": ComponentState(component="digitaloutput", num=4, status="OFF"),
"01050014ff00cc3e": ComponentState(component="digitaloutput", num=5, status="ON"),
"0105001400008dce": ComponentState(component="digitaloutput", num=5, status="OFF"),
"01050015ff009dfe": ComponentState(component="digitaloutput", num=6, status="ON"),
"010500150000dc0e": ComponentState(component="digitaloutput", num=6, status="OFF"),
"01050016ff006dfe": ComponentState(component="digitaloutput", num=7, status="ON"),
"0105001600002c0e": ComponentState(component="digitaloutput", num=7, status="OFF"),
"01050017ff003c3e": ComponentState(component="digitaloutput", num=8, status="ON"),
"0105001700007dce": ComponentState(component="digitaloutput", num=8, status="OFF")
}
def __init__(self, name, address, service, num_digital_outputs=0, num_digital_inputs=0):
threading.Thread.__init__(self)
self.name = name.strip().lower()
self.service = service
self.requestqueue = Queue()
self.address = address
self.host = address.split(":")[0]
self.port = int(address.split(":")[1])
self.state_di_current = None
self.scheduler = BackgroundScheduler(timezone=service.timezone)
self.scheduler.add_job(self.push_status, 'interval', seconds=DEFAULT_STATUS_INTERVAL, jitter=2)
self._connection_state = "disconnected"
self._connection_count = 0
self.status = {
"digitalinput": {},
"digitaloutput": {}
}
self.num_digital_outputs = num_digital_outputs
self.num_digital_inputs = num_digital_inputs
def connect(self):
while True:
try:
self._connection_state = "disconnected"
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
LOG.info("%s socket connecting %s:%s", self.name, self.host, self.port)
s.settimeout(DEFAULT_CONNECTION_TIMEOUT)
s.connect((self.host, self.port))
LOG.info("%s socket connected", self.name)
self._connection_state = "connected"
self._connection_count += 1
self.service.queue_boards_connection.put({"event": "connected", "name": self.name})
return s
except socket.error as e:
LOG.warning("%s socket error %s reconnecting", self.name, e)
self._connection_state = "disconnected"
self.service.queue_boards_connection.put({"event": "disconnected", "name": self.name})
time.sleep(DEFAULT_CONNECTION_RECONNECT)
except Exception as e:
LOG.warning("%s uncaught exception in socket connect %s", self.name, e)
def run(self):
self.scheduler.start()
dest = self.connect()
do_all_check_count = None
while True:
try:
# request digital input status
dest.send(bytes.fromhex('01 01 00 00 00 08 3d cc'))
data = dest.recv(8)
self.process_response_packets(data=data, response_to="read_di_status_all")
if do_all_check_count is None or int(do_all_check_count * DEFAULT_COMMAND_PAUSE) >= DEFAULT_DO_ALL_CHECK:
# request digital output status
dest.send(bytes.fromhex('01 01 00 10 00 08 3c 09'))
data = dest.recv(8)
self.process_response_packets(data=data, response_to="read_do_status_all")
do_all_check_count = 0
if self.requestqueue.empty() is False:
request = self.requestqueue.get()
for hex, state in self.STATIC_REQUESTS.items():
if request == state:
dest.send(bytes.fromhex(hex))
data = dest.recv(8)
self.process_response_packets(data=data)
else:
time.sleep(DEFAULT_COMMAND_PAUSE)
do_all_check_count += 1
except socket.error as e:
LOG.warning("%s socket error %s reconnecting", self.name, e)
dest = self.connect()
except Exception as e:
LOG.warning("%s uncaught exception in loop %s", self.name, e)
def process_response_packets(self, data, response_to=None):
h = data.hex()
if h in self.STATIC_RESPONSES.keys():
outcome = self.STATIC_RESPONSES[h]
# LOG.info("%s packet response matched in static table", self.name)
pin_changed = self.update_state(state=outcome)
if pin_changed:
self.push_status(component=outcome.component, num=outcome.num)
elif h.startswith("010101"):
# Handle DI / DO responses.
if response_to is None:
LOG.warning("Unknown treble 01 response")
return
elif response_to == "read_di_status_all":
component = "digitalinput"
elif response_to == "read_do_status_all":
component = "digitaloutput"
do_hex = "%s%s" % (h[6], h[7])
t = bin(int(do_hex, 16)).zfill(8)
pins = self.bits_to_pins(bits=t)
for pin, status in pins.items():
pin_changed = self.update_state(ComponentState(component=component, num=pin, status=status))
if pin_changed:
self.push_status(component=component, num=pin)
else:
LOG.warning("%s Response packets unexpected: %s", self.name, h)
return
def request_digitaloutput(self, state):
# called via MQTT
self.requestqueue.put(state)
def update_state(self, state):
current_status = self.status[state.component].get(state.num, None)
self.status[state.component][state.num] = state.status
if current_status != state.status:
return True
else:
return False
def push_status(self, component=None, num=None):
if component is not None and num is not None:
# individual component status
status = self.status[component][num]
self.service.queue_boards_io_status.put({"name": self.name, "state": ComponentState(component=component, num=num, status=status)})
else:
# all component status
for component in self.status.keys():
for num, status in self.status[component].items():
self.service.queue_boards_io_status.put({"name": self.name, "state": ComponentState(component=component, num=num, status=status)})
# board status
self.service.queue_boards_status.put({"name": self.name, "address": self.address})
def bits_to_pins(self, bits):
sbits = str(bits)
h = {}
for p in range(1, 9):
h[p] = "OFF"
if sbits[len(bits) - p] == "1":
h[p] = "ON"
return h
| {
"imports": [
"/ioctlgw/boards/baseinterface.py"
]
} |
00mjk/iocontrollergw | refs/heads/master | /ioctlgw/web/api.py | from flask import Flask
from flask_restx import Api, Resource
from ioctlgw import __VERSION__
import threading
class WebService(threading.Thread):
app = Flask(__name__)
api = Api(app, title=f"IO Controller Gateway v{__VERSION__}", description="Multi protocol IO controller gateway", contact="https://github.com/natm/iocontroller-gateway", contact_url="https://github.com/natm/iocontroller-gateway")
def __init__(self, controllers):
threading.Thread.__init__(self)
self.controllers = controllers
def run(self):
self.app.run(port=8080)
@api.route('/controllers/')
class Controllers(Resource):
def get(self):
return self.controllers.keys()
@api.route('/controllers/<string:name>/')
class Controller(Resource):
def get(self):
return {'hello': 'world'}
@api.route('/controllers/<string:name>/relays/')
class Controller(Resource):
def get(self):
return {'hello': 'world'}
@api.route('/controllers/<string:name>/digital_inputs/')
class Controller(Resource):
def get(self):
return {'hello': 'world'}
| # NOTE: These variable will be overwritten during the build process by the Github action.
__BUILD__ = "unknown"
__VERSION__ = "localdev"
def build():
return __BUILD__
def version():
return __VERSION__ | {
"imports": [
"/ioctlgw/__init__.py"
]
} |
00mjk/iocontrollergw | refs/heads/master | /ioctlgw/boards/baseinterface.py |
import logging
import socket
import time
import threading
from apscheduler.schedulers.background import BackgroundScheduler
from multiprocessing import Queue
from ioctlgw.componentstate import ComponentState
DEFAULT_STATUS_INTERVAL = 60
DEFAULT_CONNECTION_TIMEOUT = 5
DEFAULT_CONNECTION_RECONNECT = 2
DEFAULT_COMMAND_PAUSE = 0.1
DEFAULT_DO_ALL_CHECK = 5
LOG = logging.getLogger(__name__)
class BaseInterface(threading.Thread):
STATIC_RESPONSES = {
"01050010ff008dff": ComponentState(component="digitaloutput", num=1, status="ON"),
"010500100000cc0f": ComponentState(component="digitaloutput", num=1, status="OFF"),
"01050011ff00dc3f": ComponentState(component="digitaloutput", num=2, status="ON"),
"0105001100009dcf": ComponentState(component="digitaloutput", num=2, status="OFF"),
"01050012ff002c3f": ComponentState(component="digitaloutput", num=3, status="ON"),
"0105001200006dcf": ComponentState(component="digitaloutput", num=3, status="OFF"),
"01050013ff007dff": ComponentState(component="digitaloutput", num=4, status="ON"),
"0105001300003c0f": ComponentState(component="digitaloutput", num=4, status="OFF"),
"01050014ff00cc3e": ComponentState(component="digitaloutput", num=5, status="ON"),
"0105001400008dce": ComponentState(component="digitaloutput", num=5, status="OFF"),
"01050015ff009dfe": ComponentState(component="digitaloutput", num=6, status="ON"),
"010500150000dc0e": ComponentState(component="digitaloutput", num=6, status="OFF"),
"01050016ff006dfe": ComponentState(component="digitaloutput", num=7, status="ON"),
"0105001600002c0e": ComponentState(component="digitaloutput", num=7, status="OFF"),
"01050017ff003c3e": ComponentState(component="digitaloutput", num=8, status="ON"),
"0105001700007dce": ComponentState(component="digitaloutput", num=8, status="OFF")
}
STATIC_REQUESTS = {
"01050010ff008dff": ComponentState(component="digitaloutput", num=1, status="ON"),
"010500100000cc0f": ComponentState(component="digitaloutput", num=1, status="OFF"),
"01050011ff00dc3f": ComponentState(component="digitaloutput", num=2, status="ON"),
"0105001100009dcf": ComponentState(component="digitaloutput", num=2, status="OFF"),
"01050012ff002c3f": ComponentState(component="digitaloutput", num=3, status="ON"),
"0105001200006dcf": ComponentState(component="digitaloutput", num=3, status="OFF"),
"01050013ff007dff": ComponentState(component="digitaloutput", num=4, status="ON"),
"0105001300003c0f": ComponentState(component="digitaloutput", num=4, status="OFF"),
"01050014ff00cc3e": ComponentState(component="digitaloutput", num=5, status="ON"),
"0105001400008dce": ComponentState(component="digitaloutput", num=5, status="OFF"),
"01050015ff009dfe": ComponentState(component="digitaloutput", num=6, status="ON"),
"010500150000dc0e": ComponentState(component="digitaloutput", num=6, status="OFF"),
"01050016ff006dfe": ComponentState(component="digitaloutput", num=7, status="ON"),
"0105001600002c0e": ComponentState(component="digitaloutput", num=7, status="OFF"),
"01050017ff003c3e": ComponentState(component="digitaloutput", num=8, status="ON"),
"0105001700007dce": ComponentState(component="digitaloutput", num=8, status="OFF")
}
def __init__(self, name, address, service, num_digital_outputs=0, num_digital_inputs=0):
threading.Thread.__init__(self)
self.name = name.strip().lower()
self.service = service
self.requestqueue = Queue()
self.address = address
self.host = address.split(":")[0]
self.port = int(address.split(":")[1])
self.state_di_current = None
self.scheduler = BackgroundScheduler(timezone=service.timezone)
self.scheduler.add_job(self.push_status, 'interval', seconds=DEFAULT_STATUS_INTERVAL, jitter=2)
self._connection_state = "disconnected"
self._connection_count = 0
self.status = {
"digitalinput": {},
"digitaloutput": {}
}
self.num_digital_outputs = num_digital_outputs
self.num_digital_inputs = num_digital_inputs
def connect(self):
while True:
try:
self._connection_state = "disconnected"
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
LOG.info("%s socket connecting %s:%s", self.name, self.host, self.port)
s.settimeout(DEFAULT_CONNECTION_TIMEOUT)
s.connect((self.host, self.port))
LOG.info("%s socket connected", self.name)
self._connection_state = "connected"
self._connection_count += 1
self.service.queue_boards_connection.put({"event": "connected", "name": self.name})
return s
except socket.error as e:
LOG.warning("%s socket error %s reconnecting", self.name, e)
self._connection_state = "disconnected"
self.service.queue_boards_connection.put({"event": "disconnected", "name": self.name})
time.sleep(DEFAULT_CONNECTION_RECONNECT)
except Exception as e:
LOG.warning("%s uncaught exception in socket connect %s", self.name, e)
def run(self):
self.scheduler.start()
dest = self.connect()
do_all_check_count = None
while True:
try:
# request digital input status
dest.send(bytes.fromhex('01 01 00 00 00 08 3d cc'))
data = dest.recv(8)
self.process_response_packets(data=data, response_to="read_di_status_all")
if do_all_check_count is None or int(do_all_check_count * DEFAULT_COMMAND_PAUSE) >= DEFAULT_DO_ALL_CHECK:
# request digital output status
dest.send(bytes.fromhex('01 01 00 10 00 08 3c 09'))
data = dest.recv(8)
self.process_response_packets(data=data, response_to="read_do_status_all")
do_all_check_count = 0
if self.requestqueue.empty() is False:
request = self.requestqueue.get()
for hex, state in self.STATIC_REQUESTS.items():
if request == state:
dest.send(bytes.fromhex(hex))
data = dest.recv(8)
self.process_response_packets(data=data)
else:
time.sleep(DEFAULT_COMMAND_PAUSE)
do_all_check_count += 1
except socket.error as e:
LOG.warning("%s socket error %s reconnecting", self.name, e)
dest = self.connect()
except Exception as e:
LOG.warning("%s uncaught exception in loop %s", self.name, e)
def process_response_packets(self, data, response_to=None):
h = data.hex()
if h in self.STATIC_RESPONSES.keys():
outcome = self.STATIC_RESPONSES[h]
# LOG.info("%s packet response matched in static table", self.name)
pin_changed = self.update_state(state=outcome)
if pin_changed:
self.push_status(component=outcome.component, num=outcome.num)
elif h.startswith("010101"):
# Handle DI / DO responses.
if response_to is None:
LOG.warning("Unknown treble 01 response")
return
elif response_to == "read_di_status_all":
component = "digitalinput"
elif response_to == "read_do_status_all":
component = "digitaloutput"
do_hex = "%s%s" % (h[6], h[7])
t = bin(int(do_hex, 16)).zfill(8)
pins = self.bits_to_pins(bits=t)
for pin, status in pins.items():
pin_changed = self.update_state(ComponentState(component=component, num=pin, status=status))
if pin_changed:
self.push_status(component=component, num=pin)
else:
LOG.warning("%s Response packets unexpected: %s", self.name, h)
return
def request_digitaloutput(self, state):
# called via MQTT
self.requestqueue.put(state)
def update_state(self, state):
current_status = self.status[state.component].get(state.num, None)
self.status[state.component][state.num] = state.status
if current_status != state.status:
return True
else:
return False
def push_status(self, component=None, num=None):
if component is not None and num is not None:
# individual component status
status = self.status[component][num]
self.service.queue_boards_io_status.put({"name": self.name, "state": ComponentState(component=component, num=num, status=status)})
else:
# all component status
for component in self.status.keys():
for num, status in self.status[component].items():
self.service.queue_boards_io_status.put({"name": self.name, "state": ComponentState(component=component, num=num, status=status)})
# board status
self.service.queue_boards_status.put({"name": self.name, "address": self.address})
def bits_to_pins(self, bits):
sbits = str(bits)
h = {}
for p in range(1, 9):
h[p] = "OFF"
if sbits[len(bits) - p] == "1":
h[p] = "ON"
return h
|
class ComponentState(object):
def __init__(self, component, num, status):
self.component = component.strip().lower()
self.num = int(num)
self.status = status.strip().upper()
def __repr__(self):
return f"{self.component}_{self.num}_{self.status}"
def __eq__(self, other):
if self.component == other.component and self.num == other.num and self.status == other.status:
return True
return False
| {
"imports": [
"/ioctlgw/componentstate.py"
]
} |
00mjk/tasks-unicode | refs/heads/master | /acc/admin.py | from django.contrib import admin
# Register your models here.
from .models import *
admin.site.register(Num)
admin.site.register(ApiQuery) | from django.db import models
# Create your models here.
class Num(models.Model):
num1 = models.IntegerField()
num2 = models.IntegerField()
class ApiQuery(models.Model):
username = models.CharField(max_length=200)
repo_count = models.IntegerField(default=0)
followers_count = models.IntegerField(default=0)
count = models.IntegerField(default=0)
def __str__(self):
return self.username | {
"imports": [
"/acc/models.py"
]
} |
00mjk/tasks-unicode | refs/heads/master | /acc/forms.py | from django.forms import ModelForm
from .models import *
class NumForm(ModelForm):
class Meta:
model = Num
fields = '__all__'
class parsedData(ModelForm):
class Meta:
model = ApiQuery
fields = ['username']
class RepoData(ModelForm):
class Meta:
model = ApiQuery
fields = ['repo_count']
class FoloData(ModelForm):
class Meta:
model = ApiQuery
fields = ['followers_count']
| from django.db import models
# Create your models here.
class Num(models.Model):
num1 = models.IntegerField()
num2 = models.IntegerField()
class ApiQuery(models.Model):
username = models.CharField(max_length=200)
repo_count = models.IntegerField(default=0)
followers_count = models.IntegerField(default=0)
count = models.IntegerField(default=0)
def __str__(self):
return self.username | {
"imports": [
"/acc/models.py"
]
} |
00mjk/tasks-unicode | refs/heads/master | /acc/views.py | from django.shortcuts import render, redirect
from django.http import HttpResponse
import requests
# Create your views here.
from . import Check
from .models import *
from .forms import *
def home(request):
return render(request, 'acc/main.html')
def solutionform(request):
form = NumForm()
if request.method == "POST":
form = NumForm(request.POST)
if form.is_valid:
form.save()
num1 = form.cleaned_data['num1']
num2 = form.cleaned_data['num2']
return solution(request, num1, num2)
context = {'form': form}
return render(request, 'acc/forms.html', context)
def solution(request, num1, num2):
dict = Check.Check(num1, num2)
context = {'dict': dict}
return render(request, 'acc/solution.html', context)
def queryuser(request):
form = parsedData()
if request.method == 'POST':
form = parsedData(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
form.save()
return Query(request, username)
context = {'form': form}
return render(request, 'acc/user.html', context)
def Query(request, username):
response = requests.get('https://api.github.com/users/' + username + '/repos')
response1 = requests.get('https://api.github.com/users/' + username + '/followers')
name_list = []
followers_list = []
username_dict = ApiQuery.objects.all().values('username')
username_list = []
for k in username_dict:
username_list.append(k['username'])
if response:
user_data = response.json()
for i in user_data:
name_list.append(i['name'])
repo_count = len(name_list)
if username in username_list:
x = ApiQuery.objects.get(username=username)
x.repo_count = repo_count
x.save()
if response1:
followers_data = response1.json()
for i in followers_data:
followers_list.append(i['login'])
followers_count = len(followers_list)
if username in username_list:
x = ApiQuery.objects.get(username=username)
x.followers_count = followers_count
x.save()
context = {'names': name_list, 'followers': followers_list}
return render(request, 'acc/apiquery.html', context)
def repoQuery(request):
form = RepoData()
result = None
if request.method == 'POST':
form = RepoData(request.POST)
if form.is_valid():
repo_count = form.cleaned_data['repo_count']
result = ApiQuery.objects.filter(repo_count=repo_count)
for x in result:
x.count += 1
x.save()
context = {'form': form, 'result': result}
return render(request, 'acc/repo.html', context)
def foloQuery(request):
form = FoloData()
result = None
if request.method == 'POST':
form = FoloData(request.POST)
if form.is_valid():
followers_count = form.cleaned_data['followers_count']
result = ApiQuery.objects.filter(followers_count=followers_count)
for x in result:
x.count += 1
x.save()
context = {'form': form, 'result': result}
return render(request, 'acc/folo.html', context)
def topthree(request):
answer = ApiQuery.objects.order_by('count')[:3]
context = {'answer': answer}
return render(request, 'acc/top.html', context)
| from django.db import models
# Create your models here.
class Num(models.Model):
num1 = models.IntegerField()
num2 = models.IntegerField()
class ApiQuery(models.Model):
username = models.CharField(max_length=200)
repo_count = models.IntegerField(default=0)
followers_count = models.IntegerField(default=0)
count = models.IntegerField(default=0)
def __str__(self):
return self.username
--- FILE SEPARATOR ---
from django.forms import ModelForm
from .models import *
class NumForm(ModelForm):
class Meta:
model = Num
fields = '__all__'
class parsedData(ModelForm):
class Meta:
model = ApiQuery
fields = ['username']
class RepoData(ModelForm):
class Meta:
model = ApiQuery
fields = ['repo_count']
class FoloData(ModelForm):
class Meta:
model = ApiQuery
fields = ['followers_count']
| {
"imports": [
"/acc/models.py",
"/acc/forms.py"
]
} |
00wuyu00/BuildDream | refs/heads/master | /User/models.py | # -*- coding: utf-8 -*-
from django.db import models
from Complex.models import Direction
from django.contrib.auth.models import User
# Create your models here.
class UserInf(models.Model):
username=models.CharField(max_length=20)
password=models.CharField(max_length=20)
email=models.EmailField()
grade=models.IntegerField(max_length=2)
phone=models.CharField(max_length=11,blank=True)
student_id=models.CharField(max_length=14)
good_direction=models.ManyToManyField(Direction,related_name="good_direction")
follow_direction=models.ManyToManyField(Direction,related_name="follow_direction")
introduction=models.CharField(max_length=100,blank=True)
user=models.OneToOneField(User)
class UserImage(models.Model):
description=models.CharField(max_length=999,blank=True)
address=models.FilePathField()
flag=models.BooleanField(default=False)#是否为标志
user=models.ForeignKey(User)
| from django.db import models
# Create your models here.
Type_CHOICES=(
('T','Type'),
('L','Language')
)
class Direction(models.Model):
name=models.CharField(max_length=20)
type=models.CharField(max_length=1,choices=Type_CHOICES)
| {
"imports": [
"/Complex/models.py"
]
} |
00wuyu00/BuildDream | refs/heads/master | /Competition/models.py | # -*- coding: utf-8 -*-
from django.db import models
from Complex.models import Direction
# Create your models here.
Level_CHOICES=(
('a','院级'),
('s','校级'),
('n','国家级'),
('w','世界级'),
)
class Competition(models.Model):
name=models.CharField(max_length=100)
level=models.CharField(max_length=1,choices=Level_CHOICES)
description=models.CharField(max_length=999,blank=True)
#case_set 获得流程事件
website=models.URLField(blank=True)#比赛网址
direction=models.ManyToManyField(Direction)
class CompetitionCase(models.Model):
time=models.DateTimeField()#事件发生时间
description=models.CharField(max_length=999)#事件描述
level=models.SmallIntegerField(default=0)#事件严重等级
competition=models.ForeignKey(Competition)
class CompetitionImage(models.Model):
description=models.CharField(max_length=999,blank=True)
address=models.FilePathField()
flag=models.BooleanField(default=False)#是否为标志
competition=models.ForeignKey(Competition)
| from django.db import models
# Create your models here.
Type_CHOICES=(
('T','Type'),
('L','Language')
)
class Direction(models.Model):
name=models.CharField(max_length=20)
type=models.CharField(max_length=1,choices=Type_CHOICES)
| {
"imports": [
"/Complex/models.py"
]
} |
00wuyu00/BuildDream | refs/heads/master | /Question/models.py | # -*- coding: utf-8 -*-
from django.db import models
from User.models import User
from Complex.models import Direction
# Create your models here.
class Question(models.Model):
content=models.CharField(max_length=999)
time=models.DateTimeField(auto_now_add=True)
owner=models.ManyToManyField(User,related_name='owner')
direction=models.ManyToManyField(Direction)
comment_user=models.ManyToManyField(User,through='QuestionComment')
class QuestionComment(models.Model):
content=models.CharField(max_length=999)
time=models.DateTimeField(auto_now_add=True)
user=models.ForeignKey(User)
question=models.ForeignKey(Question) | # -*- coding: utf-8 -*-
from django.db import models
from Complex.models import Direction
from django.contrib.auth.models import User
# Create your models here.
class UserInf(models.Model):
username=models.CharField(max_length=20)
password=models.CharField(max_length=20)
email=models.EmailField()
grade=models.IntegerField(max_length=2)
phone=models.CharField(max_length=11,blank=True)
student_id=models.CharField(max_length=14)
good_direction=models.ManyToManyField(Direction,related_name="good_direction")
follow_direction=models.ManyToManyField(Direction,related_name="follow_direction")
introduction=models.CharField(max_length=100,blank=True)
user=models.OneToOneField(User)
class UserImage(models.Model):
description=models.CharField(max_length=999,blank=True)
address=models.FilePathField()
flag=models.BooleanField(default=False)#是否为标志
user=models.ForeignKey(User)
--- FILE SEPARATOR ---
from django.db import models
# Create your models here.
Type_CHOICES=(
('T','Type'),
('L','Language')
)
class Direction(models.Model):
name=models.CharField(max_length=20)
type=models.CharField(max_length=1,choices=Type_CHOICES)
| {
"imports": [
"/User/models.py",
"/Complex/models.py"
]
} |
00wuyu00/BuildDream | refs/heads/master | /Project/models.py | # -*- coding: utf-8 -*-
from django.db import models
from User.models import User
from Complex.models import Direction
# Create your models here.
class Project(models.Model):
name=models.CharField(max_length=100)
description=models.CharField(max_length=999)
start_time=models.DateField()
end_time=models.DateField()
owner=models.ForeignKey(User)
join_user=models.ManyToManyField(User,related_name='join_user',blank=True)
comment_user=models.ManyToManyField(User,through='ProjectComment',related_name='comment_user',blank=True)
direction=models.ManyToManyField(Direction)
class ProjectImage(models.Model):
description=models.CharField(max_length=999,blank=True)
address=models.FilePathField()
flag=models.BooleanField(default=False)#是否为标志
project=models.ForeignKey(Project)
class ProjectComment(models.Model):
content=models.CharField(max_length=999)
time=models.DateTimeField(auto_now_add=True)
user=models.ForeignKey(User)
question=models.ForeignKey(Project)
| # -*- coding: utf-8 -*-
from django.db import models
from Complex.models import Direction
from django.contrib.auth.models import User
# Create your models here.
class UserInf(models.Model):
username=models.CharField(max_length=20)
password=models.CharField(max_length=20)
email=models.EmailField()
grade=models.IntegerField(max_length=2)
phone=models.CharField(max_length=11,blank=True)
student_id=models.CharField(max_length=14)
good_direction=models.ManyToManyField(Direction,related_name="good_direction")
follow_direction=models.ManyToManyField(Direction,related_name="follow_direction")
introduction=models.CharField(max_length=100,blank=True)
user=models.OneToOneField(User)
class UserImage(models.Model):
description=models.CharField(max_length=999,blank=True)
address=models.FilePathField()
flag=models.BooleanField(default=False)#是否为标志
user=models.ForeignKey(User)
--- FILE SEPARATOR ---
from django.db import models
# Create your models here.
Type_CHOICES=(
('T','Type'),
('L','Language')
)
class Direction(models.Model):
name=models.CharField(max_length=20)
type=models.CharField(max_length=1,choices=Type_CHOICES)
| {
"imports": [
"/User/models.py",
"/Complex/models.py"
]
} |
010-kshitij/CoinAnalysis--Python | refs/heads/master | /Main.py | import Tkinter
import ScrolledText
# Directory Imports
from App import App
from CoinValues import CoinValues
import analyze_coin
# Initialize Application
app = App()
# Initialize Components
label_btc_price = dict()
label_bcc_price = dict()
label_dash_price = dict()
label_doge_price = dict()
label_eth_price = dict()
label_ltc_price = dict()
label_nxt_price = dict()
label_str_price = dict()
label_nem_price = dict()
label_xrp_price = dict()
# Coin Selector Variable
main_coin = Tkinter.StringVar(None, 'BTC')
coin = Tkinter.StringVar()
output_frame = Tkinter.Frame()
output = ScrolledText.ScrolledText(
master=output_frame,
wrap='word', # wrap text at full words only
width=25, # characters
height=20, # text lines
)
output.grid(row=0)
limit_entry = Tkinter.Entry(app.window)
limit_entry.insert(Tkinter.END, "30")
limit_entry.config(width=10)
limit_option_value = Tkinter.StringVar()
limit_option_value.set("daily")
limit_option_menu = ["daily", "minute"]
# limit_option = Tkinter.OptionMenu(app.window)
limit_option = apply(Tkinter.OptionMenu, (app.window, limit_option_value) + tuple(limit_option_menu))
limit_option.grid(row=11, column=2)
# Coin Selector Command
def do_analyze_coin():
global app, coin, output, output_frame
result = analyze_coin.analyze(maincoin=main_coin.get().upper(), othercoin=coin.get().upper(), window=app.window, track_type=limit_option_value.get(), limit=limit_entry.get())
output.insert('insert', result)
output_frame.grid(row=1, column=6,rowspan=100)
# Ticking Function to get values for price table
def get_coin_values():
# Get the coin values for the price table
coinValues = CoinValues().get()
# Setting Price
label_btc_price['price'].config(text=('({} {})'.format(coinValues['BTC'][2], coinValues['BTC'][3])))
label_bcc_price['price'].config(text=('({} {})'.format(coinValues['BCH'][2], coinValues['BCH'][3])))
label_dash_price['price'].config(text=('({} {})'.format(coinValues['DASH'][2], coinValues['DASH'][3])))
label_doge_price['price'].config(text=('({} {})'.format(coinValues['DOGE'][2], coinValues['DOGE'][3])))
label_eth_price['price'].config(text=('({} {})'.format(coinValues['ETH'][2], coinValues['ETH'][3])))
label_ltc_price['price'].config(text=('({} {})'.format(coinValues['LTC'][2], coinValues['LTC'][3])))
label_nxt_price['price'].config(text=('({} {})'.format(coinValues['NXT'][2], coinValues['NXT'][3])))
# label_str_price['price'].config(text=('({} {})'.format(coinValues['STR'][2], coinValues['STR'][3])))
label_nem_price['price'].config(text=('({} {})'.format(coinValues['XEM'][2], coinValues['XEM'][3])))
label_xrp_price['price'].config(text=('({} {})'.format(coinValues['XRP'][2], coinValues['XRP'][3])))
# Setting Percent
if coinValues['BTC'][4][0] == '+':
label_btc_price['percent'].config(fg="#00ff00", text=u'\u25b2 {}'.format(coinValues['BTC'][4]))
else:
label_btc_price['percent'].config(fg="#ff0000", text=u'\u25bc {}'.format(coinValues['BTC'][4]))
if coinValues['BCH'][4][0] == '+':
label_bcc_price['percent'].config(fg="#00ff00", text=u'\u25b2 {}'.format(coinValues['BCH'][4]))
else:
label_bcc_price['percent'].config(fg="#ff0000", text=u'\u25bc {}'.format(coinValues['BCH'][4]))
if coinValues['DASH'][4][0] == '+':
label_dash_price['percent'].config(fg="#00ff00", text=u'\u25b2 {}'.format(coinValues['DASH'][4]))
else:
label_dash_price['percent'].config(fg="#ff0000", text=u'\u25bc {}'.format(coinValues['DASH'][4]))
if coinValues['DOGE'][4][0] == '+':
label_doge_price['percent'].config(fg="#00ff00", text=u'\u25b2 {}'.format(coinValues['DOGE'][4]))
else:
label_doge_price['percent'].config(fg="#ff0000", text=u'\u25bc {}'.format(coinValues['DOGE'][4]))
if coinValues['ETH'][4][0] == '+':
label_eth_price['percent'].config(fg="#00ff00", text=u'\u25b2 {}'.format(coinValues['ETH'][4]))
else:
label_eth_price['percent'].config(fg="#ff0000", text=u'\u25bc {}'.format(coinValues['ETH'][4]))
if coinValues['LTC'][4][0] == '+':
label_ltc_price['percent'].config(fg="#00ff00", text=u'\u25b2 {}'.format(coinValues['LTC'][4]))
else:
label_ltc_price['percent'].config(fg="#ff0000", text=u'\u25bc {}'.format(coinValues['LTC'][4]))
if coinValues['NXT'][4][0] == '+':
label_nxt_price['percent'].config(fg="#00ff00", text=u'\u25b2 {}'.format(coinValues['NXT'][4]))
else:
label_nxt_price['percent'].config(fg="#ff0000", text=u'\u25bc {}'.format(coinValues['NXT'][4]))
# label_str_price['percent'].config(text=('{}'.format(coinValues['STR'][4])))
if coinValues['XEM'][4][0] == '+':
label_nem_price['percent'].config(fg="#00ff00", text=u'\u25b2 {}'.format(coinValues['XEM'][4]))
else:
label_nem_price['percent'].config(fg="#ff0000", text=u'\u25bc {}'.format(coinValues['XEM'][4]))
if coinValues['XRP'][4][0] == '+':
label_xrp_price['percent'].config(fg="#00ff00", text=u'\u25b2 {}'.format(coinValues['XRP'][4]))
else:
label_xrp_price['percent'].config(fg="#ff0000", text=u'\u25bc {}'.format(coinValues['XRP'][4]))
# Reschedule this function
app.window.after(60000, get_coin_values)
#
# The main function
#
def main():
global app, limit_entry
global label_btc_price, label_bcc_price, label_dash_price, label_doge_price, label_eth_price, label_ltc_price, label_nxt_price, label_str_price, label_nem_price, label_xrp_price
# Create Window Header
app.create_header()
# Get labels for price table
label_btc_price['text'] = app.create_price_table_label("BTC")
label_btc_price['price'] = app.create_price_table_label("(0.0002)")
label_btc_price['percent'] = app.create_price_table_label("2.4%")
label_bcc_price['text'] = app.create_price_table_label("BCC")
label_bcc_price['price'] = app.create_price_table_label("(0.0002)")
label_bcc_price['percent'] = app.create_price_table_label("2.4%")
label_dash_price['text'] = app.create_price_table_label("DASH")
label_dash_price['price'] = app.create_price_table_label("(0.0002)")
label_dash_price['percent'] = app.create_price_table_label("2.4%")
label_doge_price['text'] = app.create_price_table_label("DOGE")
label_doge_price['price'] = app.create_price_table_label("(0.0002)")
label_doge_price['percent'] = app.create_price_table_label("2.4%")
label_eth_price['text'] = app.create_price_table_label("ETH")
label_eth_price['price'] = app.create_price_table_label("(0.0002)")
label_eth_price['percent'] = app.create_price_table_label("2.4%")
label_ltc_price['text'] = app.create_price_table_label("LTC")
label_ltc_price['price'] = app.create_price_table_label("(0.0002)")
label_ltc_price['percent'] = app.create_price_table_label("2.4%")
label_nxt_price['text'] = app.create_price_table_label("NXT")
label_nxt_price['price'] = app.create_price_table_label("(0.0002)")
label_nxt_price['percent'] = app.create_price_table_label("2.4%")
label_str_price['text'] = app.create_price_table_label("STR")
label_str_price['price'] = app.create_price_table_label("(0.0002)")
label_str_price['percent'] = app.create_price_table_label("2.4%")
label_nem_price['text'] = app.create_price_table_label("NEM")
label_nem_price['price'] = app.create_price_table_label("(0.0002)")
label_nem_price['percent'] = app.create_price_table_label("2.4%")
label_xrp_price['text'] = app.create_price_table_label("XRP")
label_xrp_price['price'] = app.create_price_table_label("(0.0002)")
label_xrp_price['percent'] = app.create_price_table_label("2.4%")
# Load labels for price table
app.load_price_table_label(label_btc_price['text'], rown=1, columnn=0)
app.load_price_table_label(label_btc_price['price'], rown=1, columnn=1)
app.load_price_table_label(label_btc_price['percent'], rown=1, columnn=2)
app.load_price_table_label(label_bcc_price['text'], rown=2, columnn=0)
app.load_price_table_label(label_bcc_price['price'], rown=2, columnn=1)
app.load_price_table_label(label_bcc_price['percent'], rown=2, columnn=2)
app.load_price_table_label(label_dash_price['text'], rown=3, columnn=0)
app.load_price_table_label(label_dash_price['price'], rown=3, columnn=1)
app.load_price_table_label(label_dash_price['percent'], rown=3, columnn=2)
app.load_price_table_label(label_doge_price['text'], rown=4, columnn=0)
app.load_price_table_label(label_doge_price['price'], rown=4, columnn=1)
app.load_price_table_label(label_doge_price['percent'], rown=4, columnn=2)
app.load_price_table_label(label_eth_price['text'], rown=5, columnn=0)
app.load_price_table_label(label_eth_price['price'], rown=5, columnn=1)
app.load_price_table_label(label_eth_price['percent'], rown=5, columnn=2)
app.load_price_table_label(label_ltc_price['text'], rown=6, columnn=0)
app.load_price_table_label(label_ltc_price['price'], rown=6, columnn=1)
app.load_price_table_label(label_ltc_price['percent'], rown=6, columnn=2)
app.load_price_table_label(label_nxt_price['text'], rown=7, columnn=0)
app.load_price_table_label(label_nxt_price['price'], rown=7, columnn=1)
app.load_price_table_label(label_nxt_price['percent'], rown=7, columnn=2)
app.load_price_table_label(label_str_price['text'], rown=8, columnn=0)
app.load_price_table_label(label_str_price['price'], rown=8, columnn=1)
app.load_price_table_label(label_str_price['percent'], rown=8, columnn=2)
app.load_price_table_label(label_nem_price['text'], rown=9, columnn=0)
app.load_price_table_label(label_nem_price['price'], rown=9, columnn=1)
app.load_price_table_label(label_nem_price['percent'], rown=9, columnn=2)
app.load_price_table_label(label_xrp_price['text'], rown=10, columnn=0)
app.load_price_table_label(label_xrp_price['price'], rown=10, columnn=1)
app.load_price_table_label(label_xrp_price['percent'], rown=10, columnn=2)
# Get checkboxes for Main Coin Selector
checkbox_btc_main = Tkinter.Radiobutton(app.window, text="BTC", variable=main_coin, value="btc", command=do_analyze_coin)
checkbox_bcc_main = Tkinter.Radiobutton(app.window, text="BCC", variable=main_coin, value="bcc", command=do_analyze_coin)
checkbox_dash_main = Tkinter.Radiobutton(app.window, text="DASH", variable=main_coin, value="dash", command=do_analyze_coin)
checkbox_doge_main = Tkinter.Radiobutton(app.window, text="DOGE", variable=main_coin, value="doge", command=do_analyze_coin)
checkbox_eth_main = Tkinter.Radiobutton(app.window, text="ETH", variable=main_coin, value="eth", command=do_analyze_coin)
checkbox_ltc_main = Tkinter.Radiobutton(app.window, text="LTC", variable=main_coin, value="ltc", command=do_analyze_coin)
checkbox_nxt_main = Tkinter.Radiobutton(app.window, text="NXT", variable=main_coin, value="nxt", command=do_analyze_coin)
checkbox_str_main = Tkinter.Radiobutton(app.window, text="STR", variable=main_coin, value="str", command=do_analyze_coin)
checkbox_nem_main = Tkinter.Radiobutton(app.window, text="NEM", variable=main_coin, value="xem", command=do_analyze_coin)
checkbox_xrp_main = Tkinter.Radiobutton(app.window, text="XRP", variable=main_coin, value="xrp", command=do_analyze_coin)
# Get checkboxes for Coin Selector
checkbox_btc = Tkinter.Radiobutton(app.window, text="BTC", variable=coin, value="btc", command=do_analyze_coin)
checkbox_bcc = Tkinter.Radiobutton(app.window, text="BCC", variable=coin, value="bcc", command=do_analyze_coin)
checkbox_dash = Tkinter.Radiobutton(app.window, text="DASH", variable=coin, value="dash", command=do_analyze_coin)
checkbox_doge = Tkinter.Radiobutton(app.window, text="DOGE", variable=coin, value="doge", command=do_analyze_coin)
checkbox_eth = Tkinter.Radiobutton(app.window, text="ETH", variable=coin, value="eth", command=do_analyze_coin)
checkbox_ltc = Tkinter.Radiobutton(app.window, text="LTC", variable=coin, value="ltc", command=do_analyze_coin)
checkbox_nxt = Tkinter.Radiobutton(app.window, text="NXT", variable=coin, value="nxt", command=do_analyze_coin)
checkbox_str = Tkinter.Radiobutton(app.window, text="STR", variable=coin, value="str", command=do_analyze_coin)
checkbox_nem = Tkinter.Radiobutton(app.window, text="NEM", variable=coin, value="xem", command=do_analyze_coin)
checkbox_xrp = Tkinter.Radiobutton(app.window, text="XRP", variable=coin, value="xrp", command=do_analyze_coin)
# Load Coin selector Button
app.load_coin_selector_checkbox(checkbox_btc_main, rown=1, columnn=3)
app.load_coin_selector_checkbox(checkbox_bcc_main, rown=2, columnn=3)
app.load_coin_selector_checkbox(checkbox_dash_main, rown=3, columnn=3)
app.load_coin_selector_checkbox(checkbox_doge_main, rown=4, columnn=3)
app.load_coin_selector_checkbox(checkbox_eth_main, rown=5, columnn=3)
app.load_coin_selector_checkbox(checkbox_ltc_main, rown=6, columnn=3)
app.load_coin_selector_checkbox(checkbox_nxt_main, rown=7, columnn=3)
app.load_coin_selector_checkbox(checkbox_str_main, rown=8, columnn=3)
app.load_coin_selector_checkbox(checkbox_nem_main, rown=9, columnn=3)
app.load_coin_selector_checkbox(checkbox_xrp_main, rown=10, columnn=3)
# Load Coin selector Button
app.load_coin_selector_checkbox(checkbox_btc, rown=1, columnn=4)
app.load_coin_selector_checkbox(checkbox_bcc, rown=2, columnn=4)
app.load_coin_selector_checkbox(checkbox_dash, rown=3, columnn=4)
app.load_coin_selector_checkbox(checkbox_doge, rown=4, columnn=4)
app.load_coin_selector_checkbox(checkbox_eth, rown=5, columnn=4)
app.load_coin_selector_checkbox(checkbox_ltc, rown=6, columnn=4)
app.load_coin_selector_checkbox(checkbox_nxt, rown=7, columnn=4)
app.load_coin_selector_checkbox(checkbox_str, rown=8, columnn=4)
app.load_coin_selector_checkbox(checkbox_nem, rown=9, columnn=4)
app.load_coin_selector_checkbox(checkbox_xrp, rown=10, columnn=4)
# Create Option Menu for duration
limit_option_value
Tkinter.Label(text="Limit (Max. 2000) : ").grid(row=11, column=0)
limit_entry.grid(row=11, column=1)
# Load the window
app.window.after(1000, get_coin_values)
app.load()
if __name__ == "__main__":
main()
| import Tkinter
class App:
""" The Main Window """
# Variables
window = None
# Constructor
def __init__(self):
# Initialize window
self.window = Tkinter.Tk()
self.window.title("Coin Analysis")
# self.window.geometry('1000x591')
self.window.resizable(width=False, height=False)
self.window.configure(
background="#ffffff"
)
# self.window.columnconfigure(50)
# Create header Label
def create_header(self):
Tkinter.Label(
self.window,
text ="Altcoins Buy/Sell Confidence",
fg = "#ffffff",
bg = "#428bca",
font = "Verdana 14 bold",
anchor="w"
).grid(
row=0,
column=0,
sticky='ew',
columnspan=50,
ipadx=100,
ipady=10
)
# Creating labels for price table
def create_price_table_label(self, label_text):
return Tkinter.Label(
self.window,
text =label_text,
fg = "#000000",
bg = "#ffffff",
font = "Verdana 10",
anchor="w"
)
def load_price_table_label(self, label, rown = 0, columnn = 0):
label.grid(
row=rown,
column=columnn,
sticky='ew',
ipady = 2.5
)
def create_coin_selector_checkbox(self, checkbox_text="Text", checkbox_value="Value"):
return Tkinter.Checkbutton(
self.window,
text = checkbox_text,
variable=checkbox_value
)
def load_coin_selector_checkbox(self, checkbox, rown = 0, columnn = 0):
checkbox.grid(
row=rown,
column=columnn,
sticky='ew',
ipady = 2.5
)
# Load the Window
def load(self):
self.window.mainloop()
# # -- End Class
--- FILE SEPARATOR ---
import requests
from bs4 import BeautifulSoup
import json
class CoinValues:
""" Getting Dynamic Coin Values """
def get(self):
coins = ('bitcoin', 'bitcoincash', 'dash', 'dogecoin', 'ethereum', 'litecoin', 'nxt', 'nem', 'ripple')
url = "https://www.worldcoinindex.com/"
headers = {'User-Agent':'Mozilla/5.0'}
page = requests.get(url)
coin_values = dict()
soup = BeautifulSoup(page.text, 'html.parser')
tables = soup.find_all('table')
for table in tables:
for tr in table.find_all('tr'):
if tr.get('data-naam') in coins:
h2 = tr.h2.decode_contents(formatter="html").strip()
spans = tr.find_all('span')
n_spans = list()
for span in spans:
n_spans.append( span.decode_contents(formatter="html").strip())
coin_values[h2] = n_spans
return coin_values
--- FILE SEPARATOR ---
import requests
import pandas
import json
import datetime
import matplotlib.pyplot
# Graph embed stuff
import matplotlib
matplotlib.use("TkAgg")
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
from matplotlib.figure import Figure
# Globals
output = ""
def get_dataframe_of(symbol, track_type, limit=30):
if track_type == "daily":
url = "https://min-api.cryptocompare.com/data/histoday?fsym={}&tsym=USD&limit={}".format(symbol, limit)
response = requests.get(url)
# Get JSON from the url
data = json.loads(response.text)
# Get DataFrame from the JSON Object
df = pandas.DataFrame.from_dict(data['Data'])
df['time'] = pandas.to_datetime(df['time'], unit='s')
df.set_index(['time'], inplace=True)
df = pandas.DataFrame(df['close'], index=df.index)
df.columns = [symbol]
return df
elif track_type == "minute":
url = "https://min-api.cryptocompare.com/data/histominute?fsym={}&tsym=USD&limit={}".format(symbol, limit)
response = requests.get(url)
# Get JSON from the url
data = json.loads(response.text)
df = pandas.DataFrame.from_dict(data['Data'])
df['time'] = pandas.to_datetime(df['time'], unit='s').dt.time
df.set_index(['time'], inplace=True)
df = pandas.DataFrame(df['close'], index=df.index)
df.columns = [symbol]
return df
def analyze(maincoin="", othercoin="", window=None, track_type="daily", limit=30 ):
global output
# if maincoin == "XEM":
# maincoin = "NEM"
# if othercoin == "XEM":
# othercoin = "NEM"
btc_df = get_dataframe_of('BTC', track_type=track_type, limit=limit)
bcc_df = get_dataframe_of('BCC', track_type=track_type, limit=limit)
dash_df = get_dataframe_of('DASH', track_type=track_type, limit=limit)
doge_df = get_dataframe_of('DOGE', track_type=track_type, limit=limit)
eth_df = get_dataframe_of('ETH', track_type=track_type, limit=limit)
ltc_df = get_dataframe_of('LTC', track_type=track_type, limit=limit)
nxt_df = get_dataframe_of('NXT', track_type=track_type, limit=limit)
str_df = get_dataframe_of('STR', track_type=track_type, limit=limit)
nem_df = get_dataframe_of('XEM', track_type=track_type, limit=limit)
xrp_df = get_dataframe_of('XRP', track_type=track_type, limit=limit)
maindf = pandas.concat([btc_df, bcc_df, dash_df, doge_df, eth_df, ltc_df, nxt_df, str_df, nem_df, xrp_df], axis=1)
# maindf.to_csv('CryptocurrenciesDataFrame.csv')
# maindf = pandas.DataFrame.from_csv('CryptocurrenciesDataFrame.csv')
# maindfcorr = maindf.corr()
# maindfcorr.to_csv('CryptocurrenciesCorrelations.csv')
# maindfcorr = pandas.DataFrame.from_csv('CryptocurrenciesCorrelations.csv')
maindfcorr = maindf.corr()
days = 1
for i in range(1, days+1):
maindf["{}_day{}".format(maincoin, i)] = (maindf[maincoin].shift(-i) - maindf[maincoin])/maindf[maincoin]
maindf.fillna(0, inplace=True)
corr = maindfcorr[othercoin][maincoin]
plotdf = pandas.concat([maindf['{}_day1'.format(maincoin)]], axis=1)
# Draw Graph
f = Figure(figsize=(5, 5), dpi=100)
a = f.add_subplot(111)
plotdf.plot(ax=a)
canvas = FigureCanvasTkAgg(f, window)
canvas.show()
canvas.get_tk_widget().grid(row=1, column=5, rowspan=9)
currentdf = plotdf[-2:-1]
currentdf_data = currentdf.get_value(index=currentdf.index[-1], col=currentdf.columns[0])
output = output + "Percent Change = {}".format(str(currentdf_data)) + "\n"
output = output + "Correlation of {} with {} = {}".format(othercoin, maincoin, corr) + "\n"
if currentdf_data < 0 and corr < 0:
output = output + "BUY {}".format(othercoin) + "\n"
confidence = corr * 100
output = output + "Confidence = {}%".format(str(abs(confidence))) + "\n"
elif currentdf_data > 0 and corr > 0:
output = output + "SELL {}".format(othercoin) + "\n"
confidence = corr * 100
output = output + "Confidence = {}%".format(str(abs(confidence))) + "\n"
elif currentdf_data > 0 and corr < 0:
output = output + "BUY {}".format(othercoin) + "\n"
confidence = corr * 100
output = output + "Confidence = {}%".format(str(abs(confidence))) + "\n"
elif currentdf_data < 0 and corr > 0:
output = output + "SELL {}".format(othercoin) + "\n"
confidence = corr * 100
output = output + "Confidence = {}%".format(str(abs(confidence))) + "\n"
return output
| {
"imports": [
"/App.py",
"/CoinValues.py",
"/analyze_coin.py"
]
} |
01000101/python-vultr | refs/heads/master | /tests/test_vultr.py | import unittest
import os
import warnings
from vultr import Vultr, VultrError
class UnauthenticateTests(unittest.TestCase):
def setUp(self):
self.vultr = Vultr('')
def test_plans_list(self):
response = self.vultr.plans_list()
def test_regions_list(self):
response = self.vultr.regions_list()
def test_os_list(self):
response = self.vultr.os_list()
def test_app_list(self):
response = self.vultr.app_list()
@unittest.skipIf(not os.environ.get('VULTR_KEY'), 'Skipping AuthenticatedTests')
class AuthenticatedTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.VULTR_KEY = os.environ.get('VULTR_KEY')
cls.vultr = Vultr(cls.VULTR_KEY)
cls.server_list = {}
def test_get_api_key(self):
response = self.vultr.iso_list()
def test_post_api_key(self):
try:
response = self.vultr.server_label_set('', '')
except VultrError as e:
msg = str(e)
self.assertEqual(msg, "Request failed. Check the response body" +
" for a more detailed description. Body:" +
" \nInvalid server. Check SUBID value and" +
" ensure your API key matches the server's" +
" account")
def test_account_info(self):
response = self.vultr.account_info()
def test_server_create(self):
response = self.vultr.server_create(vpsplanid=29, dcid=1, osid=191,
label="python-vultr: test")
warnings.warn("Creating VM: " + str(response) +
"\n This will cost money.")
def test_server_list(self):
AuthenticatedTests.server_list = self.vultr.server_list()
warnings.warn(str(AuthenticatedTests.server_list))
def test_server_list_by_subid(self):
for subid in AuthenticatedTests.server_list:
response = self.vultr.server_list(subid=subid)
def test_server_destroy(self):
servers = self.vultr.server_list()
for subid in servers:
# skip machines not made by tests.
if servers[subid]['label'] != 'python-vultr: test':
warnings.warn("skipping [" + subid + "]:\n" +
str(servers[subid]))
continue
try:
response = self.vultr.server_destroy(subid=subid)
except VultrError as e:
# This should be it's own
# except VultrServerDestroyWithinFiveMinutesError as e:
msg = str(e)
self.assertEqual(msg, "Request failed. Check the response" +
" body for a more detailed" +
" description. Body: \nUnable to" +
" destroy server: Servers cannot be" +
" destroyed within 5 minutes of being" +
" created")
warnings.warn(msg)
if __name__ == '__main__':
unittest.main()
| from vultr import Vultr
| {
"imports": [
"/vultr/__init__.py"
]
} |
01070/virus_sentiment | refs/heads/master | /main.py | import xlrd
import numpy as np
from data_utils import *
import logging
import os
import torch.nn as nn
import torch
import math
import argparse
import sys
import pandas
from sklearn.metrics import accuracy_score, f1_score
import random
import os
from time import strftime, localtime
import matplotlib.pylab as plt
import pandas as pd
from torch.cuda.amp import autocast as autocast
from Bert_weibo import *
from torch.utils.data import DataLoader, Dataset, random_split
from transformers import BertTokenizer, AutoTokenizer, BertModel
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler(sys.stdout))
class SSTdataset(Dataset):
def __init__(self, opt, dataset_class, tokenizer):
if dataset_class == 'train':
dataset_file = os.path.join(opt.dataset_file, 'train.tsv')
else:
dataset_file = os.path.join(opt.dataset_file, 'test.tsv')
content = pd.read_csv(dataset_file, sep='\t')
all_data = []
for sentence, label in zip(content['sentence'], content['label']):
# sentence_token = tokenizer.text_to_sequence(sentence) if 'Bert' not in opt.model_name else tokenizer.encode(sentence, max_length=opt.max_seq_len, padding='max_length', truncation=True)
sentence_token = tokenizer.encode(sentence, max_length=opt.max_seq_len, padding='max_length', truncation=True)
sentence_token = np.array(sentence_token)
data = {
'text_indices': sentence_token,
'polarity': label
}
all_data.append(data)
self.data = all_data
def __getitem__(self, item):
return self.data[item]
def __len__(self):
return len(self.data)
class UsualDataset(Dataset):
def __init__(self, opt, dataset_class):
if dataset_class == 'train':
dataset_file = os.path.join(opt.dataset_file, 'train.xlsx')
else:
dataset_file = os.path.join(opt.dataset_file, 'test.xlsx')
tokenizer = BertTokenizer.from_pretrained(opt.pretrained_model)
wordbook = xlrd.open_workbook(dataset_file)
sheet = wordbook.sheet_by_index(0)
all_data = []
content = sheet.col_values(1, 1)
lable = sheet.col_values(2, 1)
for x, y in zip(content, lable):
text_indices = tokenizer.encode(x, max_length=200, padding='max_length', truncation=True)
text_indices = np.array(text_indices)
polarity = opt.polarity_dict[y]
data = {
'text_indices': text_indices,
'polarity': polarity
}
all_data.append(data)
self.data = all_data
def __getitem__(self, item):
return self.data[item]
def __len__(self):
return len(self.data)
class Instructor:
def __init__(self, opt):
self.opt = opt
if opt.dataset == 'sst2' and opt.model_name == 'TCN':
tokenizer = build_tokenizer(
fnames=[os.path.join(opt.dataset_file, 'train.tsv'),
os.path.join(opt.dataset_file, 'train.tsv')],
max_seq_len=opt.max_seq_len,
dat_fname='{0}_tokenizer.dat'.format(opt.dataset))
embedding_matrix = build_embedding_matrix(
word2idx=tokenizer.word2idx,
embed_dim=opt.input_dim,
dat_fname='{0}_{1}_embedding_matrix.dat'.format(str(opt.input_dim), opt.dataset))
self.train_data = SSTdataset(opt, 'train', tokenizer)
self.test_data = SSTdataset(opt, 'test', tokenizer)
if opt.model_name == 'TCN':
self.model = opt.model_class(opt, embedding_matrix, opt.input_dim, 2, [opt.input_dim]*4, dropout=0.1, emb_dropout=0.5,
kernel_size=3, tied_weights=True)
else:
self.model = opt.model_class(opt)
elif opt.dataset == 'sst2':
tokenizer = BertTokenizer.from_pretrained(opt.pretrained_model)
self.model = opt.model_class(opt)
self.train_data = SSTdataset(opt, 'train', tokenizer)
self.test_data = SSTdataset(opt, 'test', tokenizer)
else:
self.model = opt.model_class(opt)
self.train_data = UsualDataset(opt, 'train')
self.test_data = UsualDataset(opt, 'test')
self.optimizer = torch.optim.Adam(params=self.model.parameters(), lr=opt.learning_rate)
self.criterion = nn.CrossEntropyLoss()
self._print_args()
def run(self):
train_data_loader = DataLoader(dataset=self.train_data, batch_size=self.opt.batch_size, shuffle=True, drop_last=False)
test_data_loader = DataLoader(dataset=self.test_data, batch_size=self.opt.batch_size, shuffle=True)
# self._reset_params()
if self.opt.save_weight is True:
best_model_path = self._train(train_data_loader, test_data_loader)
self.model.load_state_dict(torch.load(best_model_path))
self.model.eval()
test_acc, test_f1 = self._evaluate_acc_f1(test_data_loader)
logger.info('>> test_acc: {:.4f}, test_f1: {:.4f}'.format(test_acc, test_f1))
else:
fig, ax = plt.subplots(1, 1, figsize=(9, 9))
train_acc_list, test_acc_list = self._train(train_data_loader, test_data_loader)
logger.info('test acc:{}'.format(max(test_acc_list)))
ax.plot(np.arange(1, self.opt.num_epoch+1), train_acc_list)
ax.plot(np.arange(1, self.opt.num_epoch+1), test_acc_list)
plt.show()
def _train(self, train_data_loader, test_data_loader):
max_val_acc = 0
max_val_f1 = 0
global_step = 0
path = None
val_acc_list = []
train_acc_list = []
self.model.cuda()
for epoch in range(self.opt.num_epoch):
self.model.train()
logger.info('>' * 100)
logger.info('epoch: {}'.format(epoch + 1))
n_correct, n_total, loss_total = 0, 0, 0
for batch in train_data_loader:
global_step += 1
self.optimizer.zero_grad()
input, label = batch['text_indices'].cuda(), batch['polarity'].cuda()
outputs = self.model(input)
loss = self.criterion(outputs, label)
loss.backward()
self.optimizer.step()
n_correct += (torch.argmax(outputs, -1) == label).sum().item()
n_total += len(outputs)
loss_total += loss.item() * len(outputs)
if global_step % self.opt.log_step == 0:
train_acc = n_correct / n_total
train_loss = loss_total / n_total
logger.info('loss: {:.4f}, acc: {:.4f}'.format(train_loss, train_acc))
train_acc_list.append(n_correct/n_total)
val_acc, val_f1 = self._evaluate_acc_f1(test_data_loader)
logger.info('> val_acc: {:.4f}, val_f1: {:.4f}'.format(val_acc, val_f1))
if self.opt.save_weight is True:
if val_acc > max_val_acc:
max_val_acc = val_acc
if not os.path.exists('state_dict'):
os.mkdir('state_dict')
path = 'state_dict/{0}_{1}_val_acc{2}'.format(self.opt.model_name, self.opt.dataset,
round(val_acc, 4))
torch.save(self.model.state_dict(), path)
logger.info('>> saved: {}'.format(path))
if val_f1 > max_val_f1:
max_val_f1 = val_f1
else:
val_acc_list.append(val_acc)
if val_acc > max_val_acc:
max_val_acc = val_acc
logger.info('>> best acc: {}'.format(val_acc))
path = (train_acc_list, val_acc_list)
return path
def _evaluate_acc_f1(self, data_loader):
n_correct, n_total = 0, 0
t_targets_all, t_outputs_all = None, None
# switch model to evaluation mode
self.model.eval()
with torch.no_grad():
for t_batch, t_sample_batched in enumerate(data_loader):
input, t_targets = t_sample_batched['text_indices'].cuda(), t_sample_batched['polarity'].cuda()
t_outputs = self.model(input)
n_correct += (torch.argmax(t_outputs, -1) == t_targets).sum().item()
n_total += len(t_outputs)
if t_targets_all is None:
t_targets_all = t_targets
t_outputs_all = t_outputs
else:
t_targets_all = torch.cat((t_targets_all, t_targets), dim=0)
t_outputs_all = torch.cat((t_outputs_all, t_outputs), dim=0)
acc = n_correct / n_total
f1 = f1_score(t_targets_all.cpu(), torch.argmax(t_outputs_all, -1).cpu(), labels=[0, 1, 2], average='macro')
return acc, f1
def _print_args(self):
n_trainable_params, n_nontrainable_params = 0, 0
for p in self.model.parameters():
n_params = torch.prod(torch.tensor(p.shape))
if p.requires_grad:
n_trainable_params += n_params
else:
n_nontrainable_params += n_params
logger.info('n_trainable_params: {0}, n_nontrainable_params: {1}'.format(n_trainable_params, n_nontrainable_params))
logger.info('> training arguments:')
for arg in vars(self.opt):
logger.info('>>> {0}: {1}'.format(arg, getattr(self.opt, arg)))
def _reset_params(self):
for child in self.model.children():
if type(child) != BertModel: # skip bert params
for p in child.parameters():
if p.requires_grad:
if len(p.shape) > 1:
self.opt.initializer(p)
else:
stdv = 1. / math.sqrt(p.shape[0])
torch.nn.init.uniform_(p, a=-stdv, b=stdv)
def main():
# Hyper Parameters
parser = argparse.ArgumentParser()
parser.add_argument('--optimizer', default='adamw', type=str)
parser.add_argument('--initializer', default='xavier_uniform_', type=str)
# parser.add_argument('--train', action='store_false')
# parser.add_argument('--mixed_precision', default=True, type=bool)
parser.add_argument('--l2reg', default=0.01, type=float)
# parser.add_argument('--embed_dim', default=300, type=int)
# parser.add_argument('--hidden_dim', default=300, type=int)
parser.add_argument('--bert_dim', default=768, type=int)
parser.add_argument('--pretrained_bert_name', default='bert-base-uncased', type=str)
parser.add_argument('--hops', default=3, type=int)
parser.add_argument('--valset_ratio', default=0, type=float, help='set ratio between 0 and 1 for validation support')
# The following parameters are only valid for the lcf-bert model
# parser.add_argument('--local_context_focus', default='cdm', type=str, help='local context focus mode, cdw or cdm')
# parser.add_argument('--SRD', default=3, type=int, help='semantic-relative-distance, see the paper of LCF-BERT model')
# GNN config
parser.add_argument('--save_weight',action='store_true', default=False)
parser.add_argument('--max_seq_len', default=80, type=int)
parser.add_argument('--device', default=None, type=str, help='e.g. cuda:0')
parser.add_argument('--polarities_dim', default=3, type=int)
parser.add_argument('--log_step', default=5, type=int)
parser.add_argument('--input_dim', default=768, type=int)
parser.add_argument('--learning_rate', default=5e-5, type=float, help='try 5e-5, 2e-5 for BERT, 1e-3 for others')
parser.add_argument('--output_dim', default=2, type=int)
parser.add_argument('--dataset', default='sst2', type=str)
parser.add_argument('--model_name', default='TinyBertLSTMAttentionSST', type=str, help='try BertOnly, TinyBert')
parser.add_argument('--dropout', default=0, type=float)
parser.add_argument('--batch_size', default=64, type=int, help='try 16, 32, 64 for BERT models')
parser.add_argument('--num_head', default=12, type=int)
parser.add_argument('--pool', default='mean', type=str)
parser.add_argument('--decoder_dim', default=768, type=int)
parser.add_argument('--num_epoch', default=15, type=int, help='try larger number for non-BERT models')
parser.add_argument('--seed', default=17, type=int, help='set seed for reproducibility')
opt = parser.parse_args()
dataset = {
'usual': './data/usual',
'virus': './data/virus',
'sst2': './data/SST-2'
}
opt.polarity_dict = {
'angry': 0,
'sad': 1,
'happy': 2,
'fear': 3,
'surprise': 4,
'neural': 5,
'neutral': 5
}
pretrained_models = {
'BertOnly': "bert-base-chinese",
'TinyBertOnly': "voidful/albert_chinese_tiny",
'TinyBertLSTM': "voidful/albert_chinese_tiny",
# 'TinyBertLSTMAttention': "bert-base-chinese",
# 'TinyBertLSTMAttention': 'sentence-transformers/ce-ms-marco-TinyBERT-L-6',
'TinyBertLSTMAttention': 'voidful/albert_chinese_tiny',
'TinyBertCNN': 'voidful/albert_chinese_tiny',
'TinyBertTCN': 'voidful/albert_chinese_tiny',
'TinyBertLSTMAttentionSST': 'sentence-transformers/ce-ms-marco-TinyBERT-L-6'
}
model_classes = {
'BertOnly': BertOnly,
'TinyBertLSTM': TinyBertLSTM,
'TinyBertOnly': TinyBertOnly,
'TinyBertLSTMAttention': TinyBertLSTMAttention,
'TinyBertCNN': TinyBertCNN,
'TinyBertTCN': TinyBertTCN,
'TCN': TCN,
'TinyBertLSTMAttentionSST': TinyBertLSTMAttentionSST
}
if 'Bert' in opt.model_name:
opt.pretrained_model = pretrained_models[opt.model_name]
opt.model_class = model_classes[opt.model_name]
if opt.seed is not None:
random.seed(opt.seed)
np.random.seed(opt.seed)
torch.manual_seed(opt.seed)
torch.cuda.manual_seed(opt.seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
opt.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
opt.dataset_file = dataset[opt.dataset]
log_file = './log/{}-{}-{}.log'.format('usual', opt.dataset, strftime("%y%m%d-%H%M", localtime()))
logger.addHandler(logging.FileHandler(log_file))
ins = Instructor(opt)
ins.run()
if __name__ == '__main__':
main()
| import torch
import torch.nn as nn
from tcn import TemporalConvNet
from transformers import BertModel, AutoModel
import torch.nn.functional as F
class BertOnly(nn.Module):
def __init__(self, opt):
super(BertOnly, self).__init__()
self.bert = BertModel.from_pretrained('bert-base-chinese')
self.dense = nn.Linear(768, 6)
self.dropout = nn.Dropout(0.1)
def forward(self, inputs):
outputs = self.bert(input_ids=inputs)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits = self.dense(pooled_output)
return logits
class TinyBertOnly(nn.Module):
def __init__(self, opt):
super(TinyBertOnly, self).__init__()
self.bert = AutoModel.from_pretrained("voidful/albert_chinese_tiny")
self.dense = nn.Linear(312, 6)
self.dropout = nn.Dropout(0.1)
def forward(self, inputs):
outputs = self.bert(input_ids=inputs)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits = self.dense(pooled_output)
return logits
class TinyBertLSTM(nn.Module):
def __init__(self, opt):
super(TinyBertLSTM, self).__init__()
self.bert = AutoModel.from_pretrained("voidful/albert_chinese_tiny")
self.bilstm = nn.LSTM(312, 128, 1, bidirectional=True)
self.dense1 = nn.Linear(312, 256)
self.softmax = nn.Softmax(dim=-1)
self.dropout = nn.Dropout(0.1)
self.dense2 = nn.Linear(256, 6)
def init_hidden(self, batch_size):
return torch.randn(2, batch_size, 128).cuda(), torch.randn(2, batch_size, 128).cuda()
def forward(self, inputs):
outputs = self.bert(input_ids=inputs)
hidden_x, pooled_output = outputs
h0, c0 = self.init_hidden(hidden_x.size(0))
lstm_out_x, (h, c) = self.bilstm(hidden_x.permute(1, 0, 2), (h0.cuda(), c0.cuda()))
logits = self.dense2(F.relu(lstm_out_x[-1]))
return logits
class TinyBertLSTMAttention(nn.Module):
def __init__(self, opt):
super(TinyBertLSTMAttention, self).__init__()
self.bert = AutoModel.from_pretrained("voidful/albert_chinese_tiny")
self.bilstm = nn.LSTM(opt.input_dim, 128, 1, bidirectional=True)
self.dense1 = nn.Linear(opt.input_dim, 256)
self.softmax = nn.Softmax(dim=-1)
self.dropout = nn.Dropout(0)
self.dense2 = nn.Linear(256, opt.output_dim)
def init_hidden(self, batch_size):
return torch.randn(2, batch_size, 128).cuda(), torch.randn(2, batch_size, 128).cuda()
def forward(self, inputs):
outputs = self.bert(input_ids=inputs)
hidden_x, pooled_output = outputs
h0, c0 = self.init_hidden(hidden_x.size(0))
lstm_out_x, (h, c) = self.bilstm(hidden_x.permute(1, 0, 2), (h0.cuda(), c0.cuda()))
pool_drop = self.dense1(self.dropout(pooled_output))
a = self.softmax(torch.matmul(pool_drop.view(-1, 1, 256), lstm_out_x.permute(1, 2, 0)))
a_vec = torch.matmul(a, lstm_out_x.permute(1, 0, 2))
a_vec_dropout = self.dropout(F.relu(a_vec.view(-1, 256)))
logits = self.dense2(a_vec_dropout)
return logits
class TinyBertCNN(nn.Module):
def __init__(self, opt):
super(TinyBertCNN, self).__init__()
self.bert = AutoModel.from_pretrained("voidful/albert_chinese_tiny")
self.kernel = 2
self.layer1 = nn.Sequential(nn.Conv1d(312, self.kernel, 3),
nn.MaxPool1d(3),
nn.ReLU()
)
self.layer2 = nn.Sequential(nn.Conv1d(312, self.kernel, 4),
nn.MaxPool1d(3),
nn.ReLU()
)
self.layer3 = nn.Sequential(nn.Conv1d(312, self.kernel, 5),
nn.MaxPool1d(3),
nn.ReLU()
)
self.maxpool = nn.MaxPool1d(3)
self.dense = nn.Linear(128, 6)
def forward(self, inputs):
layers = [self.layer1, self.layer2, self.layer3]
hidden_x, pooled_output = self.bert(input_ids=inputs)
x1, x2, x3 = [layer(hidden_x.permute(0, 2, 1)) for layer in layers]
max_out4 = F.relu(torch.cat((self.maxpool(x1), self.maxpool(x2), self.maxpool(x3)), dim=-1))
logits = self.dense(max_out4.view(-1, 128))
return logits
class TinyBertTCN(nn.Module):
def __init__(self, opt, embedding_matrix, input_size, output_size, num_channels,
kernel_size=2, dropout=0.3, emb_dropout=0.1, tied_weights=False):
super(TinyBertTCN, self).__init__()
self.output_size = output_size
self.bert = AutoModel.from_pretrained("voidful/albert_chinese_tiny")
self.tcn = TemporalConvNet(312, num_channels, kernel_size, dropout=dropout)
self.dense = nn.Linear(num_channels[-1], output_size)
if tied_weights:
if num_channels[-1] != input_size:
raise ValueError('When using the tied flag, nhid must be equal to emsize')
# self.dense.weight = self.decode.weight
print("Weight tied")
self.drop = nn.Dropout(emb_dropout)
self.emb_dropout = emb_dropout
# self.init_weights()
# def init_weights(self):
# self.encoder.weight.data.normal_(0, 0.01)
# self.decoder.bias.data.fill_(0)
# self.decoder.weight.data.normal_(0, 0.01)
def forward(self, inputs):
hidden_x, pooled_output = self.bert(input_ids=inputs)
y = self.tcn(hidden_x.permute(0, 2, 1))
y = self.dense(y.permute(0, 2, 1))
logits = y[:, -1].view(-1, self.output_size)
return logits.contiguous()
class TCN(nn.Module):
def __init__(self, opt, embedding_matrix, input_size, output_size, num_channels,
kernel_size=2, dropout=0.3, emb_dropout=0.1, tied_weights=False):
super(TCN, self).__init__()
self.encoder = nn.Embedding.from_pretrained(torch.tensor(embedding_matrix, dtype=torch.float))
self.output_size = output_size
self.tcn = TemporalConvNet(opt.input_dim, num_channels, kernel_size, dropout=dropout)
if tied_weights:
if num_channels[-1] != input_size:
raise ValueError('When using the tied flag, nhid must be equal to emsize')
self.decoder = nn.Linear(num_channels[-1], opt.decoder_dim)
print("Weight tied")
self.drop = nn.Dropout(emb_dropout)
self.mhsa = nn.MultiheadAttention(opt.decoder_dim, opt.num_head)
self.lin_q = nn.Linear(opt.decoder_dim, opt.decoder_dim)
self.lin_k = nn.Linear(opt.decoder_dim, opt.decoder_dim)
self.lin_v = nn.Linear(opt.decoder_dim, opt.decoder_dim)
self.emb_dropout = emb_dropout
self.relu = nn.ReLU()
self.pool_way = opt.pool
self.init_weights()
self.dense = nn.Linear(opt.decoder_dim, output_size)
def init_weights(self):
self.encoder.weight.data.normal_(0, 0.01)
self.decoder.bias.data.fill_(0)
self.decoder.weight.data.normal_(0, 0.01)
def forward(self, inputs):
hidden_x = self.encoder(inputs)
x = self.tcn(hidden_x.permute(0, 2, 1))
x = self.decoder(x.permute(0, 2, 1))
query = self.lin_q(x.permute(1, 0, 2))
key = self.lin_k(x.permute(1, 0, 2))
value = self.lin_v(x.permute(1, 0, 2))
output, _ = self.mhsa(query, key, value)
if self.pool_way == 'max':
output = torch.max(output, dim=0)[0]
elif self.pool_way == 'mean':
output = torch.mean(output, dim=0)
else:
output = (torch.mean(output, dim=0) + torch.max(output, dim=0)[0])/2
logits = self.dense(self.relu(output))
return logits.contiguous()
class TinyBertLSTMAttentionSST(nn.Module):
def __init__(self, opt):
super(TinyBertLSTMAttentionSST, self).__init__()
self.bert = AutoModel.from_pretrained("sentence-transformers/ce-ms-marco-TinyBERT-L-6")
self.bilstm = nn.LSTM(opt.input_dim, 128, 1, bidirectional=True)
self.dense1 = nn.Linear(opt.input_dim, 256)
self.softmax = nn.Softmax(dim=-1)
self.dropout = nn.Dropout(0)
self.dense2 = nn.Linear(256, opt.output_dim)
def init_hidden(self, batch_size):
return torch.randn(2, batch_size, 128).cuda(), torch.randn(2, batch_size, 128).cuda()
def forward(self, inputs):
outputs = self.bert(input_ids=inputs)
hidden_x, pooled_output = outputs
h0, c0 = self.init_hidden(hidden_x.size(0))
lstm_out_x, (h, c) = self.bilstm(hidden_x.permute(1, 0, 2), (h0.cuda(), c0.cuda()))
pool_drop = self.dense1(self.dropout(pooled_output))
a = self.softmax(torch.matmul(pool_drop.view(-1, 1, 256), lstm_out_x.permute(1, 2, 0)))
a_vec = torch.matmul(a, lstm_out_x.permute(1, 0, 2))
a_vec_dropout = self.dropout(F.relu(a_vec.view(-1, 256)))
logits = self.dense2(a_vec_dropout)
return logits | {
"imports": [
"/Bert_weibo.py"
]
} |
01201617/commons | refs/heads/master | /ml_package/gaussian_process/book/gpr-cauchy.py | import sys
import putil
import numpy as np
from pylab import *
from numpy import exp, log
# TODO ↓このライブラリがわからん!!
from ml_package.gaussian_process.book.elliptical import elliptical
from numpy.linalg import cholesky as chol
def gpr_cauchy(f, param):
x, y, gamma, Kinv = param
M = len(x)
return gpr_cauchy_lik(y[0:M], f[0:M], gamma, Kinv)
def gpr_cauchy_lik(y, f, gamma, Kinv):
return - np.sum(log(gamma + (y - f) ** 2 / gamma)) \
- np.dot(f, np.dot(Kinv, f)) / 2
def kgauss(tau, sigma):
return lambda x, y: exp(tau) * exp(-(x - y) ** 2 / exp(sigma))
def kernel_matrix(xx, kernel):
N = len(xx)
eta = 1e-6
return np.array(
[kernel(xi, xj) for xi in xx for xj in xx]
).reshape(N, N) + eta * np.eye(N)
def gpr_mcmc(x, y, iters, xmin, xmax, gamma):
xx = np.hstack((x, np.linspace(xmin, xmax, 100)))
M = len(x)
N = len(xx)
K = kernel_matrix(xx, kgauss(1, 1))
Kinv = inv(K[0:M, 0:M])
S = chol(K)
f = np.dot(S, randn(N))
g = np.zeros(len(xx))
for iter in range(iters):
f, lik = elliptical(f, S, gpr_cauchy, (x, y, gamma, Kinv))
g = g + f
print('\r[iter %2d]' % (iter + 1))
plot(xx[M:], f[M:]) # color='gray')
print('')
plot(x, y, 'bx', markersize=14)
plot(xx[M:], g[M:] / iters, 'k', linewidth=3)
# ↓エラー出るので、とりあえず、コメントアウト
# putil.simpleaxis()
def usage():
print('usage: gpr-cauchy.py data.xyf iters [output]')
sys.exit(0)
def main(name_txt=''):
xmin = -5
xmax = 5
ymin = -7.5
ymax = 12.5
gamma = 0.2
# args(引数)による指定を、コード内での指定に変更!
# if len(sys.argv) < 3:
# usage()
# else:
# [x, y, f] = np.loadtxt(sys.argv[1]).T
# iters = int(sys.argv[2])
[x, y, f] = np.loadtxt(name_txt).T
iters = 100
gpr_mcmc(x, y, iters, xmin, xmax, gamma)
axis([xmin, xmax, ymin, ymax])
# ↓エラー出るので、とりあえず、コメントアウト
# if len(sys.argv) > 3:
# putil.savefig(sys.argv[3])
show()
if __name__ == "__main__":
main(name_txt="txt_for_gpr-cauchy.txt") | # Elliptical slice sampling.
# $Id: elliptical.py,v 1.1 2018/02/27 03:23:39 daichi Exp $
# based on the code of Iain Murray
# http://homepages.inf.ed.ac.uk/imurray2/pub/10ess/elliptical_slice.m
#
# xx : Dx1 initial vector
# prior : DxD matrix from chol(S)
# likfun : function of likelihood evaluation
# params : parameters passed to likfun (optional)
# curlik : current likelihood (possibly from previous iteration)
# angle : default 0
#
#
import numpy as np
from pylab import *
def elliptical(xx, prior, likfun, params=(), curlik=None, angle=0):
# initialize
D = len(xx)
if curlik is None:
curlik = likfun(xx, params)
# set up the ellipse
nu = np.dot(prior, randn(D))
hh = log(rand()) + curlik
# set up the bracket
if angle <= 0:
phi = rand() * 2 * pi
min_phi = phi - 2 * pi
max_phi = phi
else:
min_phi = - angle * rand()
max_phi = min_phi + angle
phi = min_phi + rand() * (max_phi - min_phi)
# slice sampling loop
while True:
prop = xx * cos(phi) + nu * sin(phi)
curlik = likfun(prop, params)
if curlik > hh:
break
if phi > 0:
max_phi = phi
elif phi < 0:
min_phi = phi
else:
raise IOError('BUG: slice sampling shrunk to the current position.')
phi = min_phi + rand() * (max_phi - min_phi)
return (prop, curlik) | {
"imports": [
"/ml_package/gaussian_process/book/elliptical.py"
]
} |
013292/lightweight_tor | refs/heads/master | /relay_server1.py | #!/usr/bin/env python3
# python modules
import socket
import json
import base64
import requests
import traceback
# lightweight_tor modules
import crypt
import network
import logger
DIRECTORY_PORT = 3001
RELAY_PORT = 5002
FORWARDING_PORT = 7002
HASH_DELIMITER = b'###'
DECRYPTED_AES_KEY = ''
PRIVATE_KEY = ''
def main():
# get RSA private key
global PRIVATE_KEY
PRIVATE_KEY = get_pk()
# open socket connection
listen()
def listen():
try:
serversocket = network.start_server('localhost', RELAY_PORT)
next_ip = None
while True:
logger.log('CURRENT RELAY NODE: ' + str(RELAY_PORT))
logger.log('RECIEVING PORT:' + str(RELAY_PORT) + ' FORWARDING PORT:' + str(FORWARDING_PORT))
clientsocket, address = serversocket.accept()
payload = network.recv_by_size(clientsocket)
previous_ip = parse_address(address)
logger.log('received payload from: ', previous_ip)
logger.log('Payload (trunc): ', payload[:100], newline=True)
logger.header('---- BEGIN DECRYPTION OF RECEIVED PAYLOAD ----')
next_ip, message = deserialize_payload(payload)
logger.log('begin forwarding payload to next node...')
response = forward_payload(next_ip, message)
if response is not None:
'''
Case: send to previous_ip
'''
# encrypt layer
logger.log('Response returned from: ' + next_ip, newline=True)
logger.header('---- BEGIN ENCRYPTION OF RETURN PAYLOAD ----')
logger.log('Payload being encrypted (trunc):', response[:100])
logger.log('aes_key used:', DECRYPTED_AES_KEY)
encrypted_payload = network.prepend_length(serialize_payload(response))
logger.log('send payload to previous node: ', previous_ip)
clientsocket.sendall(encrypted_payload)
clientsocket.close()
except Exception:
logger.error("Unable to connect to server")
logger.error(traceback.format_exc())
return
def deserialize_payload(payload):
'''
:param: bytestring payload: encrypted_aes_key, encrypted_message
'''
decoded_payload = base64.b64decode(payload)
logger.log('Decoded Payload (rsa_encrypt(aes_key) + aes_encrypt(payload)):', decoded_payload, newline=True)
encrypted_aes_key, encrypted_message = split_bytes(HASH_DELIMITER, decoded_payload)
global DECRYPTED_AES_KEY
DECRYPTED_AES_KEY = crypt.decrypt_rsa(PRIVATE_KEY, encrypted_aes_key)
next_ip, message = crypt.decrypt_payload(DECRYPTED_AES_KEY, encrypted_message) # decrypted_message = encypted_payload + next_ip
logger.log('Decrypted AES Key:', DECRYPTED_AES_KEY)
logger.log('Decrypted Payload:', next_ip, message)
logger.header('---- END DECRYPTION OF RECEIVED PAYLOD ----', newline=True)
return next_ip, message
def serialize_payload(message):
if not isinstance(message, bytes):
raise Exception('Message should be of byte format, not ' , type(message))
aes_encrypted_message = crypt.encrypt_aes(DECRYPTED_AES_KEY, message)
return base64.b64encode(aes_encrypted_message)
def forward_payload(next_ip, message):
if is_exit_node(message):
logger.log('EXIT NODE FOUND')
logger.log('begin request to destination')
req = requests.get(next_ip)
return req.text.encode()
else:
logger.log('RELAY NODE FOUND')
logger.log('next relay node is: ' + next_ip)
message = message.encode()
host, port = next_ip.split(':')
relay_socket = network.connect_server('localhost', FORWARDING_PORT, host, port)
payload = network.prepend_length(message)
relay_socket.sendall(payload)
response = network.recv_by_size(relay_socket)
relay_socket.close()
return response
return
def is_exit_node(message): #think of better way to check?
return True if message is '' else False
def parse_address(addr):
return addr[0] + ':' + str(addr[1])
def split_bytes(delimiter, bytestring):
if not isinstance(delimiter, bytes):
raise Exception('Delimiter used should be of byte format, not ' , type(delimiter))
hash_index = bytestring.find(delimiter)
encrypted_aes_key = bytestring[:hash_index]
encrypted_message = bytestring[hash_index + len(delimiter):]
return encrypted_aes_key, encrypted_message
def get_pk(): # private key lookup from directory
directory_socket = socket.socket()
directory_socket.connect(('localhost', DIRECTORY_PORT))
payload = directory_socket.recv(8192) # payload is received as bytes, decode to get as string
directory_socket.close()
relay_nodes = json.loads(payload)
private_key = base64.b64decode(relay_nodes['localhost:' + str(RELAY_PORT)][0])
return private_key
if __name__ == '__main__':
main()
| import os
import base64
import re
import cryptography
from cryptography.fernet import Fernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives import hashes
import ast
def gen_aes_key():
'''
Generates an AES key using the Fernet recipe layer
:rtype: A URL-safe base64-encoded 32-byte key
'''
key = Fernet.generate_key()
return key
def encrypt_aes(key, plaintext_bytes):
'''
Encrypts message using AES
:param bytes key: AES Fernet key
:param bytes message: the message in bytes meant to be encrypted
:rtype: bytes
'''
token = Fernet(key).encrypt(plaintext_bytes)
return token
def decrypt_aes(key, token):
'''
:rtype: bytes
'''
plaintext_bytes = Fernet(key).decrypt(token)
return plaintext_bytes
def gen_rsa_keypair():
'''
:rtype: keypair objects
'''
private_key = rsa.generate_private_key(
public_exponent=65537,
key_size=1024,
backend=default_backend()
)
public_key = private_key.public_key()
return private_key, public_key
def encrypt_rsa(public_key, message):
'''
:rtype: str
'''
ciphertext = public_key.encrypt(
message,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None
)
)
return ciphertext
def decrypt_rsa(private_key, ciphertext):
'''
Decode ciphertext using RSA private key
:param: bytes/rsa_object private_key
:param: bytes/string ciphertext
'''
if not isinstance(ciphertext, bytes):
raise Exception('Ciphertext should be of byte format, not ' , type(ciphertext))
if not isinstance(private_key, rsa.RSAPrivateKey):
private_key = load_private_pem(private_key)
plaintext = private_key.decrypt(
ciphertext,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None
)
)
return plaintext
def get_pem_format(private_key, public_key):
'''
:ptype: private_key object, pubic_key object
:rtype: private_key str, pubic_key str
'''
private_pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
public_pem = public_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
return private_pem, public_pem
def load_private_pem(private_key_pem):
'''
Converts private_key.pem format to private_key object
'''
private_key = serialization.load_pem_private_key(
private_key_pem,
password=None,
backend=default_backend()
)
return private_key
def encrypt(AES_key, public_key_pem, payload):
'''
aes_key_encrypt(payload) + rsa_encrypt(aes_key)
'''
public_key = serialization.load_pem_public_key(public_key_pem, backend=default_backend())
encrypted_payload = encrypt_aes(AES_key, payload)
encrypted_aes_key = encrypt_rsa(public_key, AES_key)
return encrypted_aes_key, encrypted_payload
def decrypt():
return
def decrypt_payload(AES_key, payload):
'''
decrypt payload, try to match for valid url, else next relay node
rtype: string destination_url, empty string
rtype: string relay_node_ip, next layer of encrypted message
'''
decrypted_payload = (decrypt_aes(AES_key, payload)).decode('UTF8')
ip_addr_match = re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', decrypted_payload)
url_match = re.search(r'^((https?|ftp|smtp):\/\/)?(www.)?[a-z0-9]+\.[a-z]+(\/[a-zA-Z0-9#]+\/?)*$', decrypted_payload)
localhost_match = re.search(r'localhost:\d{4}', decrypted_payload)
destination = ''
message = ''
# print(decrypted_payload)
if url_match is not None:
destination = url_match.group()
message = ''
elif localhost_match is not None:
destination = localhost_match.group()
message = decrypted_payload.replace(destination,'')
elif ip_addr_match is not None:
destination = ip_addr_match.group()
message = decrypted_payload.replace(destination,'')
else:
raise Exception('No match was found')
return destination, message
--- FILE SEPARATOR ---
import struct, socket, sys
def recv_by_size(socket_conn):
# data length is packed into 4 bytes
total_len = 0; payload = b''; size = sys.maxsize
size_data = sock_data = b''; recv_size = 8192
while (total_len < size):
sock_data = socket_conn.recv(recv_size)
if not payload:
if len(sock_data)>4:
size_data += sock_data
size=struct.unpack('>i', size_data[:4])[0]
recv_size=size
if recv_size>524288: recv_size=524288
payload += size_data[4:]
else:
size_data+=sock_data
else:
payload += sock_data
total_len = len(payload)
return payload
def start_server(host ,port):
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
server_socket.bind((host, port))
server_socket.listen(5)
return server_socket
def connect_server(forward_host, foward_port, connect_host, connect_port):
connection_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection_socket.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
connection_socket.bind((forward_host, foward_port))
connection_socket.connect((connect_host, int(connect_port)))
return connection_socket
def prepend_length(message):
packet_size = struct.pack('>i', len(message))
payload = packet_size + message
return payload
--- FILE SEPARATOR ---
#!/usr/bin/env python3
class bcolors:
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def header(message, newline=False):
print(bcolors.BOLD + message + bcolors.ENDC)
if newline:
print('\n')
def log(message, variable='', newline=False, freq=1, ):
for i in range(freq):
print(message, variable)
if newline:
print('\n')
def error(message, variable='', newline=False, freq=1):
for i in range(freq):
print(bcolors.WARNING + message + bcolors.ENDC, variable)
if newline:
print('\n')
| {
"imports": [
"/crypt.py",
"/network.py",
"/logger.py"
]
} |
013292/lightweight_tor | refs/heads/master | /directory.py | #!/usr/bin/env python3
import base64
import socket
import json
import crypt
keypair1 = crypt.gen_rsa_keypair()
keypair2 = crypt.gen_rsa_keypair()
keypair3 = crypt.gen_rsa_keypair()
RELAY_NODES = {
'localhost:5001' : list(crypt.get_pem_format(keypair1[0], keypair1[1])),
'localhost:5002' : list(crypt.get_pem_format(keypair2[0], keypair2[1])),
'localhost:5003' : list(crypt.get_pem_format(keypair3[0], keypair3[1]))
}
RELAY_NODES['localhost:5001'] = [base64.b64encode(RELAY_NODES['localhost:5001'][0]).decode('ascii'), base64.b64encode(RELAY_NODES['localhost:5001'][1]).decode('ascii')]
RELAY_NODES['localhost:5002'] = [base64.b64encode(RELAY_NODES['localhost:5002'][0]).decode('ascii'), base64.b64encode(RELAY_NODES['localhost:5002'][1]).decode('ascii')]
RELAY_NODES['localhost:5003'] = [base64.b64encode(RELAY_NODES['localhost:5003'][0]).decode('ascii'), base64.b64encode(RELAY_NODES['localhost:5003'][1]).decode('ascii')]
def main():
listen()
def listen():
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serversocket.bind(('localhost', 3001))
serversocket.listen(5)
while True:
clientsocket, address = serversocket.accept()
payload = json.dumps(RELAY_NODES) # python3 doesn't allow sending of strings across UDP
print ('Payload: ', payload)
clientsocket.send(payload.encode())
clientsocket.close()
return
def get_private_key(): #delete later
return RELAY_NODES
if __name__ == '__main__':
main()
| import os
import base64
import re
import cryptography
from cryptography.fernet import Fernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives import hashes
import ast
def gen_aes_key():
'''
Generates an AES key using the Fernet recipe layer
:rtype: A URL-safe base64-encoded 32-byte key
'''
key = Fernet.generate_key()
return key
def encrypt_aes(key, plaintext_bytes):
'''
Encrypts message using AES
:param bytes key: AES Fernet key
:param bytes message: the message in bytes meant to be encrypted
:rtype: bytes
'''
token = Fernet(key).encrypt(plaintext_bytes)
return token
def decrypt_aes(key, token):
'''
:rtype: bytes
'''
plaintext_bytes = Fernet(key).decrypt(token)
return plaintext_bytes
def gen_rsa_keypair():
'''
:rtype: keypair objects
'''
private_key = rsa.generate_private_key(
public_exponent=65537,
key_size=1024,
backend=default_backend()
)
public_key = private_key.public_key()
return private_key, public_key
def encrypt_rsa(public_key, message):
'''
:rtype: str
'''
ciphertext = public_key.encrypt(
message,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None
)
)
return ciphertext
def decrypt_rsa(private_key, ciphertext):
'''
Decode ciphertext using RSA private key
:param: bytes/rsa_object private_key
:param: bytes/string ciphertext
'''
if not isinstance(ciphertext, bytes):
raise Exception('Ciphertext should be of byte format, not ' , type(ciphertext))
if not isinstance(private_key, rsa.RSAPrivateKey):
private_key = load_private_pem(private_key)
plaintext = private_key.decrypt(
ciphertext,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None
)
)
return plaintext
def get_pem_format(private_key, public_key):
'''
:ptype: private_key object, pubic_key object
:rtype: private_key str, pubic_key str
'''
private_pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
public_pem = public_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
return private_pem, public_pem
def load_private_pem(private_key_pem):
'''
Converts private_key.pem format to private_key object
'''
private_key = serialization.load_pem_private_key(
private_key_pem,
password=None,
backend=default_backend()
)
return private_key
def encrypt(AES_key, public_key_pem, payload):
'''
aes_key_encrypt(payload) + rsa_encrypt(aes_key)
'''
public_key = serialization.load_pem_public_key(public_key_pem, backend=default_backend())
encrypted_payload = encrypt_aes(AES_key, payload)
encrypted_aes_key = encrypt_rsa(public_key, AES_key)
return encrypted_aes_key, encrypted_payload
def decrypt():
return
def decrypt_payload(AES_key, payload):
'''
decrypt payload, try to match for valid url, else next relay node
rtype: string destination_url, empty string
rtype: string relay_node_ip, next layer of encrypted message
'''
decrypted_payload = (decrypt_aes(AES_key, payload)).decode('UTF8')
ip_addr_match = re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', decrypted_payload)
url_match = re.search(r'^((https?|ftp|smtp):\/\/)?(www.)?[a-z0-9]+\.[a-z]+(\/[a-zA-Z0-9#]+\/?)*$', decrypted_payload)
localhost_match = re.search(r'localhost:\d{4}', decrypted_payload)
destination = ''
message = ''
# print(decrypted_payload)
if url_match is not None:
destination = url_match.group()
message = ''
elif localhost_match is not None:
destination = localhost_match.group()
message = decrypted_payload.replace(destination,'')
elif ip_addr_match is not None:
destination = ip_addr_match.group()
message = decrypted_payload.replace(destination,'')
else:
raise Exception('No match was found')
return destination, message
| {
"imports": [
"/crypt.py"
]
} |
013292/lightweight_tor | refs/heads/master | /client_server.py | #!/usr/bin/env python3
import sys
import socket
import json
import crypt
import base64
import struct
import logger
from random import shuffle
from cryptography.fernet import Fernet
DIRECTORY_PORT = 3001
CLIENT_PORT = 4050
DIRECTORY_IP = 'localhost'
HASH_DELIMITER = b'###'
AES_KEY = crypt.gen_aes_key()
def main(message):
logger.header('---- REQUEST RELAY NODES FROM DIRECTORY ----')
relay_nodes = request_directory()
logger.log('RELAY NODES: ', relay_nodes, True)
logger.header('---- GENERATE CIRCUIT FOR ONION ROUTING ----')
circuit = generate_circuit(relay_nodes)
logger.log('CIRCUIT IS: ', circuit)
circuit_copy = list(circuit)
entry_node = circuit[0][0]
logger.log('ENTRY NODE IS: ', entry_node, True)
logger.header('---- BEGIN ENCRYPTION PROCESS TO WRAP ONION ----')
encrypted_message = encrypt_payload(message, circuit, relay_nodes)
logger.header('---- END ENCRYPTION PROCESS TO WRAP ONION ----')
logger.log('ENCRYPTED MESSAGE: ', encrypted_message, True)
logger.header('---- SEND REQUEST TO ENTRY NODE ----')
response = send_request(encrypted_message, entry_node)
logger.log('...onion routing via relay nodes', 3, True)
logger.log('...received response from destination')
logger.log('...received response from destination')
byteStream = decrypt_payload(response, circuit_copy)
result = byteStream.decode()
logger.header('---- DECODED RESPONSE FROM DESTINATION ----\n')
logger.log('', result)
# write result to html file
logger.header('---- BEGIN WRITE RESULT TO HTML FILE ----')
f = open('response.html','w')
f.write(result)
f.close()
logger.header('---- END WRITE RESULT TO HTML FILE ----')
logger.header('---- OPEN ./response.html TO SEE RESPONSE ----')
def request_directory():
"""
get list of relay nodes from directory
"""
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.connect((DIRECTORY_IP, DIRECTORY_PORT))
payload = s.recv(8192).decode() # payload is received as bytes, decode to get str type
s.close()
relay_nodes = json.loads(payload)
return relay_nodes
def generate_circuit(nodes):
"""
randomly select order of relay nodes
"""
circuit = [(str(ip), crypt.gen_aes_key()) for ip in nodes.keys()]
shuffle(circuit)
return circuit
def serialize_payload(aes_key, message):
'''
encode payload for transmission
'''
return base64.b64encode(aes_key + HASH_DELIMITER + message)
def encrypt_payload(message, circuit, relay_nodes):
'''
encrypt each layer of the request rsa_encrypt(AES_key) + aes_encrypt(M + next)
'''
node_stack = circuit
next = message # final plaintext will be the original user request
payload = b''
while len(node_stack) != 0:
curr_node = node_stack.pop()
curr_node_addr = curr_node[0]
curr_aes_key_instance = curr_node[1]
public_key = base64.b64decode(relay_nodes[curr_node_addr][1]) #decode public key here
if (isinstance(payload, tuple)):
encrypted_aes_key, encrypted_payload = payload
payload = serialize_payload(encrypted_aes_key, encrypted_payload)
# encrypt payload
payload = crypt.encrypt(curr_aes_key_instance, public_key, (payload + next.encode()))
next = curr_node_addr
return serialize_payload(payload[0], payload[1])
def decrypt_payload(payload, circuit):
'''
decrypt each layer of the request
'''
message = payload
for i in range(len(circuit)):
aes_key = circuit[i][1]
decoded_message = base64.b64decode(message)
message = crypt.decrypt_aes(aes_key, decoded_message)
return message
def send_request(encrypted_message, entry_node):
'''
send request to first relay node
'''
host, port = entry_node.split(':')
relay_socket = socket.socket()
relay_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
relay_socket.bind(('localhost', CLIENT_PORT))
relay_socket.connect((host, int(port)))
packet_size = struct.pack('>i', len(encrypted_message))
payload = packet_size + encrypted_message
relay_socket.sendall(payload)
response = b""
while True:
incomingBuffer = relay_socket.recv(8192)
print('buffer length', len(incomingBuffer), incomingBuffer)
if not incomingBuffer: break
response += incomingBuffer
relay_socket.close()
return response
if __name__ == '__main__':
if len(sys.argv) < 2:
raise Exception('No URL entered')
url = sys.argv[1]
main(url)
| import os
import base64
import re
import cryptography
from cryptography.fernet import Fernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives import hashes
import ast
def gen_aes_key():
'''
Generates an AES key using the Fernet recipe layer
:rtype: A URL-safe base64-encoded 32-byte key
'''
key = Fernet.generate_key()
return key
def encrypt_aes(key, plaintext_bytes):
'''
Encrypts message using AES
:param bytes key: AES Fernet key
:param bytes message: the message in bytes meant to be encrypted
:rtype: bytes
'''
token = Fernet(key).encrypt(plaintext_bytes)
return token
def decrypt_aes(key, token):
'''
:rtype: bytes
'''
plaintext_bytes = Fernet(key).decrypt(token)
return plaintext_bytes
def gen_rsa_keypair():
'''
:rtype: keypair objects
'''
private_key = rsa.generate_private_key(
public_exponent=65537,
key_size=1024,
backend=default_backend()
)
public_key = private_key.public_key()
return private_key, public_key
def encrypt_rsa(public_key, message):
'''
:rtype: str
'''
ciphertext = public_key.encrypt(
message,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None
)
)
return ciphertext
def decrypt_rsa(private_key, ciphertext):
'''
Decode ciphertext using RSA private key
:param: bytes/rsa_object private_key
:param: bytes/string ciphertext
'''
if not isinstance(ciphertext, bytes):
raise Exception('Ciphertext should be of byte format, not ' , type(ciphertext))
if not isinstance(private_key, rsa.RSAPrivateKey):
private_key = load_private_pem(private_key)
plaintext = private_key.decrypt(
ciphertext,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None
)
)
return plaintext
def get_pem_format(private_key, public_key):
'''
:ptype: private_key object, pubic_key object
:rtype: private_key str, pubic_key str
'''
private_pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
public_pem = public_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
return private_pem, public_pem
def load_private_pem(private_key_pem):
'''
Converts private_key.pem format to private_key object
'''
private_key = serialization.load_pem_private_key(
private_key_pem,
password=None,
backend=default_backend()
)
return private_key
def encrypt(AES_key, public_key_pem, payload):
'''
aes_key_encrypt(payload) + rsa_encrypt(aes_key)
'''
public_key = serialization.load_pem_public_key(public_key_pem, backend=default_backend())
encrypted_payload = encrypt_aes(AES_key, payload)
encrypted_aes_key = encrypt_rsa(public_key, AES_key)
return encrypted_aes_key, encrypted_payload
def decrypt():
return
def decrypt_payload(AES_key, payload):
'''
decrypt payload, try to match for valid url, else next relay node
rtype: string destination_url, empty string
rtype: string relay_node_ip, next layer of encrypted message
'''
decrypted_payload = (decrypt_aes(AES_key, payload)).decode('UTF8')
ip_addr_match = re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', decrypted_payload)
url_match = re.search(r'^((https?|ftp|smtp):\/\/)?(www.)?[a-z0-9]+\.[a-z]+(\/[a-zA-Z0-9#]+\/?)*$', decrypted_payload)
localhost_match = re.search(r'localhost:\d{4}', decrypted_payload)
destination = ''
message = ''
# print(decrypted_payload)
if url_match is not None:
destination = url_match.group()
message = ''
elif localhost_match is not None:
destination = localhost_match.group()
message = decrypted_payload.replace(destination,'')
elif ip_addr_match is not None:
destination = ip_addr_match.group()
message = decrypted_payload.replace(destination,'')
else:
raise Exception('No match was found')
return destination, message
--- FILE SEPARATOR ---
#!/usr/bin/env python3
class bcolors:
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def header(message, newline=False):
print(bcolors.BOLD + message + bcolors.ENDC)
if newline:
print('\n')
def log(message, variable='', newline=False, freq=1, ):
for i in range(freq):
print(message, variable)
if newline:
print('\n')
def error(message, variable='', newline=False, freq=1):
for i in range(freq):
print(bcolors.WARNING + message + bcolors.ENDC, variable)
if newline:
print('\n')
| {
"imports": [
"/crypt.py",
"/logger.py"
]
} |
0187773933/RaspiCameraMotionTrackerFrameConsumer | refs/heads/master | /frame_consumer.py | import os
import sys
import signal
import redis
import json
import time
import math
from pprint import pprint
import datetime
from pytz import timezone
import hashlib
import threading
# import pose
import pose_light
# import utils
from sanic import Sanic
from sanic.response import json as sanic_json
from sanic import response
from twilio.rest import Client
class FrameConsumer:
def __init__( self ):
self.config = self.read_json( sys.argv[ 1 ] )
self.setup_environment()
self.setup_signal_handlers()
self.setup_redis_connection()
self.setup_twilio_client()
self.setup_time_windows()
def on_shutdown( self , signal ):
self.log( f"Frame Consumer Shutting Down === {str(signal)}" )
sys.exit( 1 )
def get_common_time_string( self ):
now = datetime.datetime.now().astimezone( self.timezone )
milliseconds = round( now.microsecond / 1000 )
milliseconds = str( milliseconds ).zfill( 3 )
now_string = now.strftime( "%d%b%Y === %H:%M:%S" ).upper()
return f"{now_string}.{milliseconds}"
def log( self , message ):
time_string_prefix = self.get_common_time_string()
log_message = f"{time_string_prefix} === {message}"
self.redis.rpush( self.log_key , log_message )
print( log_message )
def read_json( self , file_path ):
with open( file_path ) as f:
return json.load( f )
def setup_environment( self ):
self.timezone = timezone( self.config["misc"]["time_zone"] )
self.most_recent_key = f'{self.config["redis"]["prefix"]}.MOTION_EVENTS.MOST_RECENT'
now = datetime.datetime.now().astimezone( self.timezone )
day = now.strftime( "%d" ).zfill( 2 )
month = now.strftime( "%m" ).zfill( 2 )
year = now.strftime( "%Y" )
self.log_key = f'{self.config["redis"]["prefix"]}.LOG.{year}.{month}.{day}'
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
def setup_signal_handlers( self ):
signal.signal( signal.SIGABRT , self.on_shutdown )
signal.signal( signal.SIGFPE , self.on_shutdown )
signal.signal( signal.SIGILL , self.on_shutdown )
signal.signal( signal.SIGSEGV , self.on_shutdown )
signal.signal( signal.SIGTERM , self.on_shutdown )
signal.signal( signal.SIGINT , self.on_shutdown )
def setup_redis_connection( self ):
self.redis = None
self.redis = redis.StrictRedis(
host=self.config["redis"]["host"] ,
port=self.config["redis"]["port"] ,
db=self.config["redis"]["db"] ,
password=self.config["redis"]["password"] ,
decode_responses=True
)
def setup_twilio_client( self ):
self.twilio_client = None
self.twilio_client = Client( self.config["twilio"]["sid"] , self.config["twilio"]["auth_token"] )
def setup_time_windows( self ):
self.time_windows = {}
time_zone = timezone( self.config["misc"]["time_zone"] )
now = datetime.datetime.now().astimezone( time_zone )
now = now - datetime.timedelta( hours=0 , minutes=0 , seconds=180 )
for index , time_window in enumerate( self.config["time_windows"] ):
time_window["id"] = hashlib.sha256( json.dumps( time_window ).encode( 'utf-8' ) ).hexdigest()
if "notifications" in time_window:
if "sms" in time_window["notifications"]:
time_window["notifications"]["sms"]["last_notified_time"] = {}
time_window["notifications"]["sms"]["last_notified_time"]["date_time_object"] = now
if "voice" in time_window["notifications"]:
time_window["notifications"]["voice"]["last_notified_time"] = {}
time_window["notifications"]["voice"]["last_notified_time"]["date_time_object"] = now
self.time_windows[time_window["id"]] = time_window
# redis_client.set( f"{config['redis']['prefix']}.TIME_WINDOWS.{time_window['id']}" , json.dumps( time_window ) )
def run_in_background( self , function_pointer , *args , **kwargs ):
t = threading.Thread( target=function_pointer , args=args , kwargs=kwargs , daemon=True )
t.start()
def get_now_time_difference( self , start_date_time_object ):
now = datetime.datetime.now().astimezone( self.timezone )
return math.floor( ( now - start_date_time_object ).total_seconds() )
# Server Stuff
async def route_home( self , request ):
return response.text( "You Found the Motion Alarm - Motion Frame Consumer!\n" )
async def route_process( self , request ):
try:
if request.json == None:
return response.json( { "result": "failed" , "message": "no json received in request object" } )
if "frame_buffer_b64_string" not in request.json:
return response.json( { "result": "failed" , "message": "no 'frame_buffer_b64_string' key in request json" } )
asleep_or_awake_decision = await self.decide( request.json )
return response.json( { "result": "success" , "message": "successfully received and processed image" , "decision": asleep_or_awake_decision } )
except Exception as e:
self.log( e )
return response.text( f"failed === {str(e)}" )
def init_server( self ):
self.server = Sanic( name="Motion Alarm - Motion Frame Consumer Server" )
self.server.add_route( self.route_home , "/" , methods=[ "GET" ] )
self.server.static( "/favicon.ico" , os.path.abspath( "favicon.ico" ) )
self.server.static( "/apple-touch-icon.png" , os.path.abspath( "apple-touch-icon.png" ) )
self.server.static( "/apple-touch-icon-precomposed.png" , os.path.abspath( "apple-touch-icon.png" ) )
self.server.add_route( self.route_process , "/process" , methods=[ "POST" ] )
def start_server( self ):
self.init_server()
self.log( f"Frame Consumer ONLINE === http://{self.config['server']['host']}:{self.config['server']['port']}" )
self.server.run( host=self.config['server']['host'] , port=self.config['server']['port'] )
def on_sms_finished( self , result ):
self.log( "SMS Notification Callback()" )
self.log( result )
def on_voice_call_finished( self , result ):
self.log( "Voice Notification Callback()" )
self.log( result )
def twilio_message( self , from_number , to_number , message ):
try:
start_time = time.time()
result = self.twilio_client.messages.create(
to_number ,
from_=from_number ,
body=message ,
)
result = result.fetch()
completed_duration = False
for i in range( 10 ):
time.sleep( 1 )
result = result.fetch()
if result.status == "delivered":
completed_duration = int( time.time() - start_time )
break
self.on_sms_finished( { "result": result.status , "completed_duration": completed_duration } )
return
except Exception as e:
print ( e )
def twilio_voice_call( self , from_number , to_number , server_callback_endpoint ):
try:
start_time = time.time()
new_call = self.twilio_client.calls.create(
from_=from_number ,
to=to_number ,
url=server_callback_endpoint ,
method="POST"
)
answered = False
completed = False
answer_duration = None
completed_duration = None
for i in range( 30 ):
time.sleep( 1 )
new_call = new_call.update()
status = new_call.status
self.log( status )
if status == "in-progress":
answered = True
answer_duration = int( time.time() - start_time )
if status == "completed":
completed = True
completed_duration = int( time.time() - start_time )
break
self.on_voice_call_finished( { "answered": answered , "completed": completed , "answer_duration": answer_duration , "completed_duration": completed_duration } )
return
except Exception as e:
print( e )
callback_function( "failed to make twilio call" )
def send_sms_notification( self , new_motion_event , key ):
self.log( f"=== {key} === SMS Alert ===" )
seconds_since_last_notification = self.get_now_time_difference( self.time_windows[key]["notifications"]["sms"]["last_notified_time"]["date_time_object"] )
if seconds_since_last_notification < self.time_windows[key]["notifications"]["sms"]["cool_down"]:
time_left = ( self.time_windows[key]["notifications"]["sms"]["cool_down"] - seconds_since_last_notification )
self.log( f"Waiting [{time_left}] Seconds Until Cooldown is Over" )
return
else:
over_time = ( seconds_since_last_notification - self.time_windows[key]["notifications"]["sms"]["cool_down"] )
self.log( f"It's Been {seconds_since_last_notification} Seconds Since the Last Message , Which is {over_time} Seconds Past the Cooldown Time of {self.time_windows[key]['notifications']['sms']['cool_down']} Seconds" )
self.time_windows[key]["notifications"]["sms"]["last_notified_time"]["date_time_object"] = datetime.datetime.now().astimezone( self.timezone )
# self.redis.set( f"{config['redis']['prefix']}.TIME_WINDOWS.{self.time_windows[key]['id']}" , json.dumps( self.time_windows[key] ) )
self.log( "Sending SMS Notification" )
self.run_in_background(
self.twilio_message ,
self.time_windows[key]["notifications"]["sms"]["from_number"] ,
self.time_windows[key]["notifications"]["sms"]["to_number"] ,
f'{self.time_windows[key]["notifications"]["sms"]["message_prefix"]} @@ {new_motion_event["date_time_string"]}' ,
)
def send_voice_notification( self , now_motion_event , key ):
self.log( f"=== {key} === Voice Alert ===" )
seconds_since_last_notification = self.get_now_time_difference( self.time_windows[key]["notifications"]["voice"]["last_notified_time"]["date_time_object"] )
if seconds_since_last_notification < self.time_windows[key]["notifications"]["voice"]["cool_down"]:
time_left = ( self.time_windows[key]["notifications"]["voice"]["cool_down"] - seconds_since_last_notification )
self.log( f"Waiting [{time_left}] Seconds Until Cooldown is Over" )
return
else:
over_time = ( seconds_since_last_notification - self.time_windows[key]["notifications"]["voice"]["cool_down"] )
self.log( f"It's Been {seconds_since_last_notification} Seconds Since the Last Message , Which is {over_time} Seconds Past the Cooldown Time of {self.time_windows[key]['notifications']['voice']['cool_down']} Seconds" )
self.time_windows[key]["notifications"]["voice"]["last_notified_time"]["date_time_object"] = datetime.datetime.now().astimezone( self.timezone )
# self.redis.set( f"{config['redis']['prefix']}.TIME_WINDOWS.{self.time_windows[key]['id']}" , json.dumps( self.time_windows[key] ) )
self.log( "Sending Voice Call Notification" )
self.run_in_background(
self.twilio_voice_call ,
self.time_windows[key]["notifications"]["voice"]["from_number"] ,
self.time_windows[key]["notifications"]["voice"]["to_number"] ,
self.time_windows[key]["notifications"]["voice"]["callback_url"] ,
)
def send_notifications( self , new_motion_event , key ):
if "notifications" not in self.time_windows[key]:
self.log( "No Notification Info Provided" )
return
# pself.log( self.time_windows[key] )
if "sms" in self.time_windows[key]["notifications"]:
self.send_sms_notification( new_motion_event , key )
if "voice" in self.time_windows[key]["notifications"]:
self.send_voice_notification( new_motion_event , key )
def redis_get_most_recent( self ):
most_recent = self.redis.get( self.most_recent_key )
if most_recent == None:
most_recent = []
else:
most_recent = json.loads( most_recent )
return most_recent
def parse_go_time_stamp( self , time_stamp ):
time_object = datetime.datetime.strptime( time_stamp , "%d%b%Y === %H:%M:%S.%f" ).astimezone( self.timezone )
items = time_stamp.split( " === " )
if len( items ) < 2:
return False
date = items[ 0 ]
x_time = items[ 1 ]
time_items = x_time.split( "." )
milliseconds = time_items[ 1 ]
time_items = time_items[ 0 ].split( ":" )
hours = time_items[ 0 ]
minutes = time_items[ 1 ]
seconds = time_items[ 2 ]
return {
"date_time_object": time_object ,
"date_time_string": f"{date} === {hours}:{minutes}:{seconds}.{milliseconds}" ,
"date": date ,
"hours": hours ,
"minutes": minutes ,
"seconds": seconds ,
"milliseconds": milliseconds ,
}
# Actual Logic
async def decide( self , json_data ):
new_motion_event = {
"time_stamp": json_data["time_stamp"] ,
"frame_buffer_b64_string": json_data['frame_buffer_b64_string'] ,
"awake": False
}
# 1.) Run 'nets' on Image Buffer
print( "" )
self.log( f"Processing Frame --> SinglePoseLightningv3.tflite( {len( json_data['frame_buffer_b64_string'] )} )" )
new_motion_event["pose_scores"] = await pose_light.process_opencv_frame( json_data )
# 2.) Get 'Most Recent' Array of Motion Events
most_recent = self.redis_get_most_recent()
most_recent.append( new_motion_event )
# for index , item in enumerate( most_recent ):
# self.log( f"{index} === {item['time_stamp']} === {item['pose_scores']['average_score']}" )
# 3.) Calculate Time Differences Between 'Most Recent' Frame and Each 'Previous' Frame in the Saved List
new_motion_event_time_object = self.parse_go_time_stamp( json_data["time_stamp"] )
new_motion_event["date_time_string"] = new_motion_event_time_object["date_time_string"]
# time_objects = [ utils.parse_go_time_stamp( self.timezone , x['time_stamp'] ) for x in most_recent[0:-1] ]
time_objects = [ self.parse_go_time_stamp( x['time_stamp'] ) for x in most_recent ]
seconds_between_new_motion_event_and_previous_events = [ int( ( new_motion_event_time_object["date_time_object"] - x["date_time_object"] ).total_seconds() ) for x in time_objects ]
# 4.) Tally Total Motion Events in Each Configed Time Window
# AND Compute Moving Average of Average Pose Scores in Each Configed Time Window
# ONLY IF , Total Events Surpases Maximum , then check if the moving average pose score is greater than Minimum Defined Moving Average
# THEN , Send Notification
for index , key in enumerate( self.time_windows ):
motion_events = 0
# pose_sum = 0.0
pose_sums = [ float( x["pose_scores"]["average_score"] ) for x in most_recent[ ( -1 * self.time_windows[key]["pose"]["total_events_to_pull_from"] ): ] ]
pose_sum = sum( pose_sums )
for index , time_difference in enumerate( seconds_between_new_motion_event_and_previous_events ):
if time_difference < self.time_windows[key]["seconds"]:
motion_events += 1
# pose_sum += float( most_recent[index]["pose_scores"]["average_score"] )
# pose_average = ( pose_sum / float( len( most_recent ) ) )
pose_average = ( pose_sum / float( self.time_windows[key]["pose"]["total_events_to_pull_from"] ) )
if motion_events > self.time_windows[key]['motion']['max_events']:
self.log( f"Total Motion Events in the Previous {self.time_windows[key]['seconds']} Seconds : {motion_events} === Is GREATER than the defined maximum of {self.time_windows[key]['motion']['max_events']} events" )
if pose_average >= self.time_windows[key]['pose']['minimum_moving_average']:
self.log( f"Moving Pose Score Average : {pose_average} is GREATER than defined Minimum Moving Average of {self.time_windows[key]['pose']['minimum_moving_average']}" )
new_motion_event["awake"] = True
self.send_notifications( new_motion_event , key )
else:
self.log( f"Moving Pose Score Average : {pose_average} is LESS than defined Minimum Moving Average of {self.time_windows[key]['pose']['minimum_moving_average']}" )
else:
self.log( f"Total Motion Events in the Previous {self.time_windows[key]['seconds']} Seconds : {motion_events} === Is LESS than the defined maximum of {self.time_windows[key]['motion']['max_events']} events" )
# 5.) Store Most Recent Array Back into DB
if len( most_recent ) > self.config["misc"]["most_recent_motion_events_total"]:
most_recent.pop( 0 )
self.redis.set( self.most_recent_key , json.dumps( most_recent ) )
return new_motion_event
def Start( self ):
self.start_server() | import tensorflow as tf
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.collections import LineCollection
import matplotlib.patches as patches
from pprint import pprint
# import utils
import base64
# https://blog.tensorflow.org/2021/05/next-generation-pose-detection-with-movenet-and-tensorflowjs.html
# https://tfhub.dev/google/lite-model/movenet/singlepose/lightning/3
# https://github.com/tensorflow/tfjs-models/tree/master/pose-detection/src/movenet
# https://github.com/tensorflow/hub/tree/master/examples/colab
# https://colab.research.google.com/github/tensorflow/hub/blob/master/examples/colab/movenet.ipynb
# https://www.tensorflow.org/hub/tutorials/movenet
# Initialize the TFLite interpreter
interpreter = tf.lite.Interpreter( model_path="single_pose_lightning_v3.tflite" )
interpreter.allocate_tensors()
# https://raw.githubusercontent.com/tensorflow/hub/master/examples/colab/movenet.ipynb
def draw_prediction_on_image( image, keypoints_with_scores, crop_region=None, close_figure=False , output_image_height = None ):
height, width, channel = image.shape
aspect_ratio = float(width) / height
fig, ax = plt.subplots(figsize=(12 * aspect_ratio, 12))
# To remove the huge white borders
fig.tight_layout(pad=0)
ax.margins(0)
ax.set_yticklabels([])
ax.set_xticklabels([])
plt.axis('off')
im = ax.imshow(image)
line_segments = LineCollection([], linewidths=(4), linestyle='solid')
ax.add_collection(line_segments)
# Turn off tick labels
scat = ax.scatter([], [], s=60, color='#FF1493', zorder=3)
(keypoint_locs, keypoint_edges, edge_colors) = _keypoints_and_edges_for_display( keypoints_with_scores , height , width )
line_segments.set_segments( keypoint_edges )
line_segments.set_color( edge_colors )
if keypoint_edges.shape[0]:
line_segments.set_segments(keypoint_edges)
line_segments.set_color(edge_colors)
if keypoint_locs.shape[0]:
scat.set_offsets(keypoint_locs)
if crop_region is not None:
xmin = max(crop_region['x_min'] * width, 0.0)
ymin = max(crop_region['y_min'] * height, 0.0)
rec_width = min(crop_region['x_max'], 0.99) * width - xmin
rec_height = min(crop_region['y_max'], 0.99) * height - ymin
rect = patches.Rectangle( ( xmin , ymin ) , rec_width , rec_height , linewidth=1 ,edgecolor='b' ,facecolor='none' )
ax.add_patch( rect )
fig.canvas.draw()
image_from_plot = np.frombuffer(fig.canvas.tostring_rgb(), dtype=np.uint8)
image_from_plot = image_from_plot.reshape( fig.canvas.get_width_height()[::-1] + (3,) )
plt.close(fig)
if output_image_height is not None:
output_image_width = int(output_image_height / height * width)
image_from_plot = cv2.resize( image_from_plot, dsize=(output_image_width, output_image_height), interpolation=cv2.INTER_CUBIC)
return image_from_plot
def movenet( input_image ):
input_image = tf.cast( input_image , dtype=tf.float32 )
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
interpreter.set_tensor( input_details[0]['index'] , input_image.numpy() )
interpreter.invoke()
keypoints_with_scores = interpreter.get_tensor( output_details[0]['index'] )
return keypoints_with_scores
def run_inference( image_path ):
image = tf.io.read_file( image_path )
image = tf.compat.v1.image.decode_jpeg( image )
input_image = tf.expand_dims( image , axis=0 )
input_image = tf.image.resize_with_pad( input_image , 192 , 192 )
keypoint_with_scores = movenet( input_image )
print( keypoint_with_scores )
display_image = tf.expand_dims( image , axis=0 )
display_image = tf.cast( tf.image.resize_with_pad( display_image , 500 , 250 ) , dtype=tf.int32 )
#output_overlay = draw_prediction_on_image( np.squeeze( display_image.numpy() , axis=0 ) , keypoint_with_scores )
plt.figure( figsize = ( 5 , 5 ) )
# plt.imshow( output_overlay )
# plt.savefig( 'test.jpg' )
# _ = plt.axis( 'off' )
def process_image_file( image_path ):
image = tf.io.read_file( image_path )
image = tf.compat.v1.image.decode_jpeg( image )
image = tf.expand_dims( image , axis=0 )
# Resize and pad the image to keep the aspect ratio and fit the expected size.
image = tf.image.resize_with_pad( image , 192 , 192 )
# TF Lite format expects tensor type of float32.
input_image = tf.cast( image , dtype=tf.float32 )
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
interpreter.set_tensor( input_details[0]['index'] , input_image.numpy() )
interpreter.invoke()
pprint( type( output_details ) )
# Output is a [1, 1, 17, 3] numpy array.
keypoints_with_scores = interpreter.get_tensor( output_details[0]['index'] )
print( keypoints_with_scores )
def OnMotionFrame( frame ):
print( "here in on motion frame callback" )
print( frame )
# process_image_file( '/home/morphs/DOCKER_IMAGES/RaspiMotionAlarm/PythonMotionFrameConsumer/dataset_original/awake/096.jpeg' )
# print( "\n" )
# process_image_file( '/home/morphs/DOCKER_IMAGES/RaspiMotionAlarm/PythonMotionFrameConsumer/dataset_original/asleep/019.jpeg' )
# run_inference( '/home/morphs/DOCKER_IMAGES/RaspiMotionAlarm/PythonMotionFrameConsumer/dataset_original/awake/324.jpeg' )
# https://storage.googleapis.com/movenet/MoveNet.SinglePose%20Model%20Card.pdf
async def process_opencv_frame( json_data ):
try:
# print( f"\nProcessing Frame --> SinglePoseLightningv3.tflite( {len( json_data['frame_buffer_b64_string'] )} )" )
image_data = base64.b64decode( json_data['frame_buffer_b64_string'] )
image = tf.image.decode_image( image_data , channels=3 )
image = tf.expand_dims( image , axis=0 )
image = tf.image.resize_with_pad( image , 192 , 192 )
input_image = tf.cast( image , dtype=tf.float32 )
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
interpreter.set_tensor( input_details[0]['index'] , input_image.numpy() )
interpreter.invoke()
keypoints_with_scores = interpreter.get_tensor( output_details[0]['index'] )
keypoints_with_scores = keypoints_with_scores[0][0]
scores = [ x[2] for x in keypoints_with_scores ]
average_score = np.mean( scores )
# print( f"Average Score = {average_score}" )
return {
"average_score": str( average_score ) ,
"time_stamp": json_data["time_stamp"] ,
"nose": {
"name": "Nose" ,
"score": str( scores[0] ) ,
} ,
"left_eye": {
"name": "Left Eye" ,
"score": str( scores[1] ) ,
} ,
"right_eye": {
"name": "Right Eye" ,
"score": str( scores[2] ) ,
} ,
"left_ear": {
"name": "Left Ear" ,
"score": str( scores[3] ) ,
} ,
"right_ear": {
"name": "Right Ear" ,
"score": str( scores[4] ) ,
} ,
"left_shoulder": {
"name": "Left Shoulder" ,
"score": str( scores[5] ) ,
} ,
"right_shoulder": {
"name": "Right Shoulder" ,
"score": str( scores[6] ) ,
} ,
"left_elbow": {
"name": "Left Elbow" ,
"score": str( scores[7] ) ,
} ,
"right_elbow": {
"name": "Right Elbow" ,
"score": str( scores[8] ) ,
} ,
"left_wrist": {
"name": "Left Wrist" ,
"score": str( scores[9] ) ,
} ,
"right_wrist": {
"name": "Right Wrist" ,
"score": str( scores[10] ) ,
} ,
"left_hip": {
"name": "Left Hip" ,
"score": str( scores[11] ) ,
} ,
"right_hip": {
"name": "Right Hip" ,
"score": str( scores[12] ) ,
} ,
"left_knee": {
"name": "Left Knee" ,
"score": str( scores[13] ) ,
} ,
"right_knee": {
"name": "Right Knee" ,
"score": str( scores[14] ) ,
} ,
"left_ankle": {
"name": "Left Ankle" ,
"score": str( scores[15] ) ,
} ,
"right_ankle": {
"name": "Right Ankle" ,
"score": str( scores[16] ) ,
} ,
}
except Exception as e:
print( e )
return False | {
"imports": [
"/pose_light.py"
]
} |
0187773933/RaspiCameraMotionTrackerFrameConsumer | refs/heads/master | /main.py | import frame_consumer
if __name__ == '__main__':
consumer = frame_consumer.FrameConsumer()
consumer.Start() | import os
import sys
import signal
import redis
import json
import time
import math
from pprint import pprint
import datetime
from pytz import timezone
import hashlib
import threading
# import pose
import pose_light
# import utils
from sanic import Sanic
from sanic.response import json as sanic_json
from sanic import response
from twilio.rest import Client
class FrameConsumer:
def __init__( self ):
self.config = self.read_json( sys.argv[ 1 ] )
self.setup_environment()
self.setup_signal_handlers()
self.setup_redis_connection()
self.setup_twilio_client()
self.setup_time_windows()
def on_shutdown( self , signal ):
self.log( f"Frame Consumer Shutting Down === {str(signal)}" )
sys.exit( 1 )
def get_common_time_string( self ):
now = datetime.datetime.now().astimezone( self.timezone )
milliseconds = round( now.microsecond / 1000 )
milliseconds = str( milliseconds ).zfill( 3 )
now_string = now.strftime( "%d%b%Y === %H:%M:%S" ).upper()
return f"{now_string}.{milliseconds}"
def log( self , message ):
time_string_prefix = self.get_common_time_string()
log_message = f"{time_string_prefix} === {message}"
self.redis.rpush( self.log_key , log_message )
print( log_message )
def read_json( self , file_path ):
with open( file_path ) as f:
return json.load( f )
def setup_environment( self ):
self.timezone = timezone( self.config["misc"]["time_zone"] )
self.most_recent_key = f'{self.config["redis"]["prefix"]}.MOTION_EVENTS.MOST_RECENT'
now = datetime.datetime.now().astimezone( self.timezone )
day = now.strftime( "%d" ).zfill( 2 )
month = now.strftime( "%m" ).zfill( 2 )
year = now.strftime( "%Y" )
self.log_key = f'{self.config["redis"]["prefix"]}.LOG.{year}.{month}.{day}'
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
def setup_signal_handlers( self ):
signal.signal( signal.SIGABRT , self.on_shutdown )
signal.signal( signal.SIGFPE , self.on_shutdown )
signal.signal( signal.SIGILL , self.on_shutdown )
signal.signal( signal.SIGSEGV , self.on_shutdown )
signal.signal( signal.SIGTERM , self.on_shutdown )
signal.signal( signal.SIGINT , self.on_shutdown )
def setup_redis_connection( self ):
self.redis = None
self.redis = redis.StrictRedis(
host=self.config["redis"]["host"] ,
port=self.config["redis"]["port"] ,
db=self.config["redis"]["db"] ,
password=self.config["redis"]["password"] ,
decode_responses=True
)
def setup_twilio_client( self ):
self.twilio_client = None
self.twilio_client = Client( self.config["twilio"]["sid"] , self.config["twilio"]["auth_token"] )
def setup_time_windows( self ):
self.time_windows = {}
time_zone = timezone( self.config["misc"]["time_zone"] )
now = datetime.datetime.now().astimezone( time_zone )
now = now - datetime.timedelta( hours=0 , minutes=0 , seconds=180 )
for index , time_window in enumerate( self.config["time_windows"] ):
time_window["id"] = hashlib.sha256( json.dumps( time_window ).encode( 'utf-8' ) ).hexdigest()
if "notifications" in time_window:
if "sms" in time_window["notifications"]:
time_window["notifications"]["sms"]["last_notified_time"] = {}
time_window["notifications"]["sms"]["last_notified_time"]["date_time_object"] = now
if "voice" in time_window["notifications"]:
time_window["notifications"]["voice"]["last_notified_time"] = {}
time_window["notifications"]["voice"]["last_notified_time"]["date_time_object"] = now
self.time_windows[time_window["id"]] = time_window
# redis_client.set( f"{config['redis']['prefix']}.TIME_WINDOWS.{time_window['id']}" , json.dumps( time_window ) )
def run_in_background( self , function_pointer , *args , **kwargs ):
t = threading.Thread( target=function_pointer , args=args , kwargs=kwargs , daemon=True )
t.start()
def get_now_time_difference( self , start_date_time_object ):
now = datetime.datetime.now().astimezone( self.timezone )
return math.floor( ( now - start_date_time_object ).total_seconds() )
# Server Stuff
async def route_home( self , request ):
return response.text( "You Found the Motion Alarm - Motion Frame Consumer!\n" )
async def route_process( self , request ):
try:
if request.json == None:
return response.json( { "result": "failed" , "message": "no json received in request object" } )
if "frame_buffer_b64_string" not in request.json:
return response.json( { "result": "failed" , "message": "no 'frame_buffer_b64_string' key in request json" } )
asleep_or_awake_decision = await self.decide( request.json )
return response.json( { "result": "success" , "message": "successfully received and processed image" , "decision": asleep_or_awake_decision } )
except Exception as e:
self.log( e )
return response.text( f"failed === {str(e)}" )
def init_server( self ):
self.server = Sanic( name="Motion Alarm - Motion Frame Consumer Server" )
self.server.add_route( self.route_home , "/" , methods=[ "GET" ] )
self.server.static( "/favicon.ico" , os.path.abspath( "favicon.ico" ) )
self.server.static( "/apple-touch-icon.png" , os.path.abspath( "apple-touch-icon.png" ) )
self.server.static( "/apple-touch-icon-precomposed.png" , os.path.abspath( "apple-touch-icon.png" ) )
self.server.add_route( self.route_process , "/process" , methods=[ "POST" ] )
def start_server( self ):
self.init_server()
self.log( f"Frame Consumer ONLINE === http://{self.config['server']['host']}:{self.config['server']['port']}" )
self.server.run( host=self.config['server']['host'] , port=self.config['server']['port'] )
def on_sms_finished( self , result ):
self.log( "SMS Notification Callback()" )
self.log( result )
def on_voice_call_finished( self , result ):
self.log( "Voice Notification Callback()" )
self.log( result )
def twilio_message( self , from_number , to_number , message ):
try:
start_time = time.time()
result = self.twilio_client.messages.create(
to_number ,
from_=from_number ,
body=message ,
)
result = result.fetch()
completed_duration = False
for i in range( 10 ):
time.sleep( 1 )
result = result.fetch()
if result.status == "delivered":
completed_duration = int( time.time() - start_time )
break
self.on_sms_finished( { "result": result.status , "completed_duration": completed_duration } )
return
except Exception as e:
print ( e )
def twilio_voice_call( self , from_number , to_number , server_callback_endpoint ):
try:
start_time = time.time()
new_call = self.twilio_client.calls.create(
from_=from_number ,
to=to_number ,
url=server_callback_endpoint ,
method="POST"
)
answered = False
completed = False
answer_duration = None
completed_duration = None
for i in range( 30 ):
time.sleep( 1 )
new_call = new_call.update()
status = new_call.status
self.log( status )
if status == "in-progress":
answered = True
answer_duration = int( time.time() - start_time )
if status == "completed":
completed = True
completed_duration = int( time.time() - start_time )
break
self.on_voice_call_finished( { "answered": answered , "completed": completed , "answer_duration": answer_duration , "completed_duration": completed_duration } )
return
except Exception as e:
print( e )
callback_function( "failed to make twilio call" )
def send_sms_notification( self , new_motion_event , key ):
self.log( f"=== {key} === SMS Alert ===" )
seconds_since_last_notification = self.get_now_time_difference( self.time_windows[key]["notifications"]["sms"]["last_notified_time"]["date_time_object"] )
if seconds_since_last_notification < self.time_windows[key]["notifications"]["sms"]["cool_down"]:
time_left = ( self.time_windows[key]["notifications"]["sms"]["cool_down"] - seconds_since_last_notification )
self.log( f"Waiting [{time_left}] Seconds Until Cooldown is Over" )
return
else:
over_time = ( seconds_since_last_notification - self.time_windows[key]["notifications"]["sms"]["cool_down"] )
self.log( f"It's Been {seconds_since_last_notification} Seconds Since the Last Message , Which is {over_time} Seconds Past the Cooldown Time of {self.time_windows[key]['notifications']['sms']['cool_down']} Seconds" )
self.time_windows[key]["notifications"]["sms"]["last_notified_time"]["date_time_object"] = datetime.datetime.now().astimezone( self.timezone )
# self.redis.set( f"{config['redis']['prefix']}.TIME_WINDOWS.{self.time_windows[key]['id']}" , json.dumps( self.time_windows[key] ) )
self.log( "Sending SMS Notification" )
self.run_in_background(
self.twilio_message ,
self.time_windows[key]["notifications"]["sms"]["from_number"] ,
self.time_windows[key]["notifications"]["sms"]["to_number"] ,
f'{self.time_windows[key]["notifications"]["sms"]["message_prefix"]} @@ {new_motion_event["date_time_string"]}' ,
)
def send_voice_notification( self , now_motion_event , key ):
self.log( f"=== {key} === Voice Alert ===" )
seconds_since_last_notification = self.get_now_time_difference( self.time_windows[key]["notifications"]["voice"]["last_notified_time"]["date_time_object"] )
if seconds_since_last_notification < self.time_windows[key]["notifications"]["voice"]["cool_down"]:
time_left = ( self.time_windows[key]["notifications"]["voice"]["cool_down"] - seconds_since_last_notification )
self.log( f"Waiting [{time_left}] Seconds Until Cooldown is Over" )
return
else:
over_time = ( seconds_since_last_notification - self.time_windows[key]["notifications"]["voice"]["cool_down"] )
self.log( f"It's Been {seconds_since_last_notification} Seconds Since the Last Message , Which is {over_time} Seconds Past the Cooldown Time of {self.time_windows[key]['notifications']['voice']['cool_down']} Seconds" )
self.time_windows[key]["notifications"]["voice"]["last_notified_time"]["date_time_object"] = datetime.datetime.now().astimezone( self.timezone )
# self.redis.set( f"{config['redis']['prefix']}.TIME_WINDOWS.{self.time_windows[key]['id']}" , json.dumps( self.time_windows[key] ) )
self.log( "Sending Voice Call Notification" )
self.run_in_background(
self.twilio_voice_call ,
self.time_windows[key]["notifications"]["voice"]["from_number"] ,
self.time_windows[key]["notifications"]["voice"]["to_number"] ,
self.time_windows[key]["notifications"]["voice"]["callback_url"] ,
)
def send_notifications( self , new_motion_event , key ):
if "notifications" not in self.time_windows[key]:
self.log( "No Notification Info Provided" )
return
# pself.log( self.time_windows[key] )
if "sms" in self.time_windows[key]["notifications"]:
self.send_sms_notification( new_motion_event , key )
if "voice" in self.time_windows[key]["notifications"]:
self.send_voice_notification( new_motion_event , key )
def redis_get_most_recent( self ):
most_recent = self.redis.get( self.most_recent_key )
if most_recent == None:
most_recent = []
else:
most_recent = json.loads( most_recent )
return most_recent
def parse_go_time_stamp( self , time_stamp ):
time_object = datetime.datetime.strptime( time_stamp , "%d%b%Y === %H:%M:%S.%f" ).astimezone( self.timezone )
items = time_stamp.split( " === " )
if len( items ) < 2:
return False
date = items[ 0 ]
x_time = items[ 1 ]
time_items = x_time.split( "." )
milliseconds = time_items[ 1 ]
time_items = time_items[ 0 ].split( ":" )
hours = time_items[ 0 ]
minutes = time_items[ 1 ]
seconds = time_items[ 2 ]
return {
"date_time_object": time_object ,
"date_time_string": f"{date} === {hours}:{minutes}:{seconds}.{milliseconds}" ,
"date": date ,
"hours": hours ,
"minutes": minutes ,
"seconds": seconds ,
"milliseconds": milliseconds ,
}
# Actual Logic
async def decide( self , json_data ):
new_motion_event = {
"time_stamp": json_data["time_stamp"] ,
"frame_buffer_b64_string": json_data['frame_buffer_b64_string'] ,
"awake": False
}
# 1.) Run 'nets' on Image Buffer
print( "" )
self.log( f"Processing Frame --> SinglePoseLightningv3.tflite( {len( json_data['frame_buffer_b64_string'] )} )" )
new_motion_event["pose_scores"] = await pose_light.process_opencv_frame( json_data )
# 2.) Get 'Most Recent' Array of Motion Events
most_recent = self.redis_get_most_recent()
most_recent.append( new_motion_event )
# for index , item in enumerate( most_recent ):
# self.log( f"{index} === {item['time_stamp']} === {item['pose_scores']['average_score']}" )
# 3.) Calculate Time Differences Between 'Most Recent' Frame and Each 'Previous' Frame in the Saved List
new_motion_event_time_object = self.parse_go_time_stamp( json_data["time_stamp"] )
new_motion_event["date_time_string"] = new_motion_event_time_object["date_time_string"]
# time_objects = [ utils.parse_go_time_stamp( self.timezone , x['time_stamp'] ) for x in most_recent[0:-1] ]
time_objects = [ self.parse_go_time_stamp( x['time_stamp'] ) for x in most_recent ]
seconds_between_new_motion_event_and_previous_events = [ int( ( new_motion_event_time_object["date_time_object"] - x["date_time_object"] ).total_seconds() ) for x in time_objects ]
# 4.) Tally Total Motion Events in Each Configed Time Window
# AND Compute Moving Average of Average Pose Scores in Each Configed Time Window
# ONLY IF , Total Events Surpases Maximum , then check if the moving average pose score is greater than Minimum Defined Moving Average
# THEN , Send Notification
for index , key in enumerate( self.time_windows ):
motion_events = 0
# pose_sum = 0.0
pose_sums = [ float( x["pose_scores"]["average_score"] ) for x in most_recent[ ( -1 * self.time_windows[key]["pose"]["total_events_to_pull_from"] ): ] ]
pose_sum = sum( pose_sums )
for index , time_difference in enumerate( seconds_between_new_motion_event_and_previous_events ):
if time_difference < self.time_windows[key]["seconds"]:
motion_events += 1
# pose_sum += float( most_recent[index]["pose_scores"]["average_score"] )
# pose_average = ( pose_sum / float( len( most_recent ) ) )
pose_average = ( pose_sum / float( self.time_windows[key]["pose"]["total_events_to_pull_from"] ) )
if motion_events > self.time_windows[key]['motion']['max_events']:
self.log( f"Total Motion Events in the Previous {self.time_windows[key]['seconds']} Seconds : {motion_events} === Is GREATER than the defined maximum of {self.time_windows[key]['motion']['max_events']} events" )
if pose_average >= self.time_windows[key]['pose']['minimum_moving_average']:
self.log( f"Moving Pose Score Average : {pose_average} is GREATER than defined Minimum Moving Average of {self.time_windows[key]['pose']['minimum_moving_average']}" )
new_motion_event["awake"] = True
self.send_notifications( new_motion_event , key )
else:
self.log( f"Moving Pose Score Average : {pose_average} is LESS than defined Minimum Moving Average of {self.time_windows[key]['pose']['minimum_moving_average']}" )
else:
self.log( f"Total Motion Events in the Previous {self.time_windows[key]['seconds']} Seconds : {motion_events} === Is LESS than the defined maximum of {self.time_windows[key]['motion']['max_events']} events" )
# 5.) Store Most Recent Array Back into DB
if len( most_recent ) > self.config["misc"]["most_recent_motion_events_total"]:
most_recent.pop( 0 )
self.redis.set( self.most_recent_key , json.dumps( most_recent ) )
return new_motion_event
def Start( self ):
self.start_server() | {
"imports": [
"/frame_consumer.py"
]
} |
01walid/PackageWay | refs/heads/master | /backend/backend/helps/serializers.py | from rest_framework import serializers
from .models import Call
class CallSerializer(serializers.ModelSerializer):
class Meta:
model = Call
fields = "__all__" | from django.db import models
from backend.general.models import BaseModel
# Create your models here.
class Call(BaseModel):
title = models.CharField(max_length=255)
description = models.TextField()
def __str__(self):
return self.title
| {
"imports": [
"/backend/backend/helps/models.py"
]
} |
01walid/PackageWay | refs/heads/master | /backend/backend/helps/views.py | from django.shortcuts import render
from .models import Call
from rest_framework.views import APIView
from .serializers import CallSerializer
# Create your views here.
class CallView(APIView):
@staticmethod
def get(request):
"""
List users
"""
calls = Call.objects.all().order_by('id')
return Response(CallSerializer(calls, many=True).data)
@staticmethod
def post(request):
"""
Create user
"""
serializer = CallSerializer(data=request.data)
if serializer.is_valid():
call = serializer.save()
return Response(CallSerializer(call).data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# call/{id}
class CallDetail(APIView):
@staticmethod
def get(request, id):
"""
View individual user
"""
call = get_object_or_404(Call, pk=id)
return Response(CallSerializer(call).data) | from django.db import models
from backend.general.models import BaseModel
# Create your models here.
class Call(BaseModel):
title = models.CharField(max_length=255)
description = models.TextField()
def __str__(self):
return self.title
--- FILE SEPARATOR ---
from rest_framework import serializers
from .models import Call
class CallSerializer(serializers.ModelSerializer):
class Meta:
model = Call
fields = "__all__" | {
"imports": [
"/backend/backend/helps/models.py",
"/backend/backend/helps/serializers.py"
]
} |
02KellyV/AirBnB_clone | refs/heads/master | /models/city.py | #!/usr/bin/python
""" holds class City"""
from models.base_model import BaseModel
class City(BaseModel):
"""Representation of city"""
state_id = ""
name = ""
| #!/usr/bin/python3
""" Base Model Module """
from uuid import uuid4
from datetime import datetime
import models
class BaseModel:
"""A BaseModel class"""
def __init__(self, *args, **kwargs):
"""Initialize class base"""
if kwargs:
for keys, val in kwargs.items():
if keys != "__class__":
if keys == "created_at" or keys == "updated_at":
val = datetime.strptime(val, "%Y-%m-%dT%H:%M:%S.%f")
setattr(self, keys, val)
else:
self.id = str(uuid4())
self.created_at = datetime.now()
self.updated_at = self.created_at
models.storage.new(self)
def __str__(self):
"""print the class name, id and directory"""
return ("[{}] ({}) {}".format(self.__class__.__name__,
self.id, self.__dict__))
def save(self):
"""updates the public instance attribute with the current datetime"""
self.updated_at = datetime.now()
models.storage.save()
def to_dict(self):
"""returns a dictionary """
newbase = self.__dict__.copy()
newbase["__class__"] = self.__class__.__name__
newbase["created_at"] = self.created_at.isoformat()
newbase["updated_at"] = self.updated_at.isoformat()
return newbase
| {
"imports": [
"/models/base_model.py"
]
} |
02KellyV/AirBnB_clone | refs/heads/master | /console.py | #!/usr/bin/python3
""" console """
import cmd
import models
from models.base_model import BaseModel
from models.user import User
from models.city import City
from models.place import Place
from models.state import State
from models.review import Review
from models.amenity import Amenity
import shlex
classes_dict = {"Amenity": Amenity, "BaseModel": BaseModel, "City": City,
"Place": Place, "Review": Review, "State": State, "User": User}
class HBNBCommand(cmd.Cmd):
""" Entry point of the command interpreter """
collection_keys = classes_dict.keys()
prompt = '(hbnb)'
def do_quit(self, _input):
"""Quit command to exit the program"""
return True
def do_EOF(self, _input):
"""Exits command console"""
return True
def emptyline(self):
"""An empty line + ENTER should not execute anything"""
return False
def do_create(self, _input_class_name):
"""Creates a new instance of BaseModel in JSON"""
if not _input_class_name:
print("** class name missing **")
return
if _input_class_name not in classes_dict.keys():
print("** class doesn't exist **")
return
newinstance = classes_dict[_input_class_name]()
newinstance.save()
print(newinstance.id)
def do_show(self, _input):
"""Prints the string representation of an instance
based on the class name and id"""
input2 = _input
if len(input2.split(' ')[0]) is 0:
print("** class name missing **")
return
if input2.split(' ')[0] not in self.collection_keys:
print("** class doesn't exist **")
return
if len(input2.split()) is 1:
print("** instance id missing **")
return
models.storage.reload()
for key, value in models.storage.all().items():
if value.__class__.__name__ == input2.split(' ')[0] \
and value.id == input2.split(' ')[1]:
print(value.__str__())
return
print("** no instance found **")
def do_destroy(self, _input):
"""Deletes an instance based on the class name and id
"""
if len(_input.split(' ')[0]) is 0:
print("** class name missing **")
return
if _input.split(' ')[0] not in self.collection_keys:
print("** class doesn't exist **")
return
if len(_input.split(' ')) is 1:
print("** instance id missing **")
return
class_name, class_id = (_input.split(' ')[0], _input.split(' ')[1])
query_key = class_name + '.' + class_id
if query_key not in models.storage.all().keys():
print("** no instance found **")
return
del models.storage.all()[query_key]
models.storage.save()
def do_all(self, _input_class):
"""Prints all string representation of all instances
based or not on the class name
"""
if _input_class:
if _input_class not in self.collection_keys:
print("** class doesn't exist **")
return
for key_items in models.storage.all().keys():
key_items = models.storage.all()[key_items]
print(key_items)
return
def do_update(self, _input):
"""Updates an instance based on the class name and id by adding
or updating attribute (save the change into the JSON file)
"""
_input = shlex.split(_input)
query_key = ''
if len(_input) is 0:
print("** class name missing **")
return
if _input[0] not in self.collection_keys:
print("** class doesn't exist **")
return
if len(_input) is 1:
print("** instance id missing **")
return
if len(_input) > 1:
query_key = _input[0] + '.' + _input[1]
if query_key not in models.storage.all().keys():
print("** no instance found **")
return
if len(_input) is 2:
print('** attribute name missing **')
return
if len(_input) is 3:
print('** value missing **')
return
key_name = _input[2]
input_value = _input[3]
setattr(models.storage.all()[query_key], key_name, input_value)
models.storage.all()[query_key].save()
def default(self, inp):
"""Retrieve all instances class using: <class name>.all()"""
count = 0
words = inp.split(".")
if words[0] in classes_dict and words[1] == "all()":
self.do_all(words[0])
elif words[0] in classes_dict and words[1] == "count()":
if (words[0] not in classes_dict):
print("** class doesn't exist **")
return (False)
else:
for key in models.storage.all():
if key.startswith(words[0]):
count += 1
print(count)
else:
print("*** Unknown syntax: {}".format(inp))
if __name__ == '__main__':
HBNBCommand().cmdloop()
| #!/usr/bin/python3
"""
Contains the Storage
"""
from models.engine.file_storage import FileStorage
storage = FileStorage()
storage.reload()
--- FILE SEPARATOR ---
#!/usr/bin/python3
""" Base Model Module """
from uuid import uuid4
from datetime import datetime
import models
class BaseModel:
"""A BaseModel class"""
def __init__(self, *args, **kwargs):
"""Initialize class base"""
if kwargs:
for keys, val in kwargs.items():
if keys != "__class__":
if keys == "created_at" or keys == "updated_at":
val = datetime.strptime(val, "%Y-%m-%dT%H:%M:%S.%f")
setattr(self, keys, val)
else:
self.id = str(uuid4())
self.created_at = datetime.now()
self.updated_at = self.created_at
models.storage.new(self)
def __str__(self):
"""print the class name, id and directory"""
return ("[{}] ({}) {}".format(self.__class__.__name__,
self.id, self.__dict__))
def save(self):
"""updates the public instance attribute with the current datetime"""
self.updated_at = datetime.now()
models.storage.save()
def to_dict(self):
"""returns a dictionary """
newbase = self.__dict__.copy()
newbase["__class__"] = self.__class__.__name__
newbase["created_at"] = self.created_at.isoformat()
newbase["updated_at"] = self.updated_at.isoformat()
return newbase
--- FILE SEPARATOR ---
#!/usr/bin/python
""" holds class City"""
from models.base_model import BaseModel
class City(BaseModel):
"""Representation of city"""
state_id = ""
name = ""
--- FILE SEPARATOR ---
#!/usr/bin/python
""" class Amenity"""
from models.base_model import BaseModel
class Amenity(BaseModel):
"""Representation of Amenity"""
name = ""
| {
"imports": [
"/models/__init__.py",
"/models/base_model.py",
"/models/city.py",
"/models/amenity.py"
]
} |
02KellyV/AirBnB_clone | refs/heads/master | /models/amenity.py | #!/usr/bin/python
""" class Amenity"""
from models.base_model import BaseModel
class Amenity(BaseModel):
"""Representation of Amenity"""
name = ""
| #!/usr/bin/python3
""" Base Model Module """
from uuid import uuid4
from datetime import datetime
import models
class BaseModel:
"""A BaseModel class"""
def __init__(self, *args, **kwargs):
"""Initialize class base"""
if kwargs:
for keys, val in kwargs.items():
if keys != "__class__":
if keys == "created_at" or keys == "updated_at":
val = datetime.strptime(val, "%Y-%m-%dT%H:%M:%S.%f")
setattr(self, keys, val)
else:
self.id = str(uuid4())
self.created_at = datetime.now()
self.updated_at = self.created_at
models.storage.new(self)
def __str__(self):
"""print the class name, id and directory"""
return ("[{}] ({}) {}".format(self.__class__.__name__,
self.id, self.__dict__))
def save(self):
"""updates the public instance attribute with the current datetime"""
self.updated_at = datetime.now()
models.storage.save()
def to_dict(self):
"""returns a dictionary """
newbase = self.__dict__.copy()
newbase["__class__"] = self.__class__.__name__
newbase["created_at"] = self.created_at.isoformat()
newbase["updated_at"] = self.updated_at.isoformat()
return newbase
| {
"imports": [
"/models/base_model.py"
]
} |
02KellyV/AirBnB_clone | refs/heads/master | /models/base_model.py | #!/usr/bin/python3
""" Base Model Module """
from uuid import uuid4
from datetime import datetime
import models
class BaseModel:
"""A BaseModel class"""
def __init__(self, *args, **kwargs):
"""Initialize class base"""
if kwargs:
for keys, val in kwargs.items():
if keys != "__class__":
if keys == "created_at" or keys == "updated_at":
val = datetime.strptime(val, "%Y-%m-%dT%H:%M:%S.%f")
setattr(self, keys, val)
else:
self.id = str(uuid4())
self.created_at = datetime.now()
self.updated_at = self.created_at
models.storage.new(self)
def __str__(self):
"""print the class name, id and directory"""
return ("[{}] ({}) {}".format(self.__class__.__name__,
self.id, self.__dict__))
def save(self):
"""updates the public instance attribute with the current datetime"""
self.updated_at = datetime.now()
models.storage.save()
def to_dict(self):
"""returns a dictionary """
newbase = self.__dict__.copy()
newbase["__class__"] = self.__class__.__name__
newbase["created_at"] = self.created_at.isoformat()
newbase["updated_at"] = self.updated_at.isoformat()
return newbase
| #!/usr/bin/python3
"""
Contains the Storage
"""
from models.engine.file_storage import FileStorage
storage = FileStorage()
storage.reload()
| {
"imports": [
"/models/__init__.py"
]
} |
02KellyV/AirBnB_clone | refs/heads/master | /models/engine/file_storage.py | #!/usr/bin/python3
""" File Storage Module"""
import json
import os.path
from models.base_model import BaseModel
from models.amenity import Amenity
from models.city import City
from models.place import Place
from models.state import State
from models.user import User
from models.review import Review
classes = {"Amenity": Amenity, "BaseModel": BaseModel, "City": City,
"Place": Place, "Review": Review, "State": State, "User": User}
class FileStorage:
"""Class for Serializes and Deserializes"""
def __init__(self):
"""string - path to the JSON file"""
self.__file_path = 'file.json'
"""dictionary - empty but will store all objects by <class name>.id"""
self.__objects = {}
def all(self):
"""returns the dict __objects"""
return self.__objects
def new(self, obj):
"""sets in __objects, key classname.id val= obj"""
main_key = obj.__class__.__name__ + '.' + obj.id
self.__objects.update({main_key: obj})
def save(self):
"""serializes __objects to JSON"""
newdict = {}
with open(self.__file_path, 'w', encoding="UTF-8") as filejson:
for key, value in self.__objects.items():
newdict[key] = value.to_dict() # json.dump(newdict, filejson)
filejson.write(json.dumps(newdict))
def reload(self):
"""deserializes the JSON file to __objects"""
if os.path.isfile(self.__file_path):
with open(self.__file_path) as json_f:
othrdict_objs = json.load(json_f)
for key, val in othrdict_objs.items():
self.__objects[key] = eval(val["__class__"])(**val)
| #!/usr/bin/python3
""" Base Model Module """
from uuid import uuid4
from datetime import datetime
import models
class BaseModel:
"""A BaseModel class"""
def __init__(self, *args, **kwargs):
"""Initialize class base"""
if kwargs:
for keys, val in kwargs.items():
if keys != "__class__":
if keys == "created_at" or keys == "updated_at":
val = datetime.strptime(val, "%Y-%m-%dT%H:%M:%S.%f")
setattr(self, keys, val)
else:
self.id = str(uuid4())
self.created_at = datetime.now()
self.updated_at = self.created_at
models.storage.new(self)
def __str__(self):
"""print the class name, id and directory"""
return ("[{}] ({}) {}".format(self.__class__.__name__,
self.id, self.__dict__))
def save(self):
"""updates the public instance attribute with the current datetime"""
self.updated_at = datetime.now()
models.storage.save()
def to_dict(self):
"""returns a dictionary """
newbase = self.__dict__.copy()
newbase["__class__"] = self.__class__.__name__
newbase["created_at"] = self.created_at.isoformat()
newbase["updated_at"] = self.updated_at.isoformat()
return newbase
--- FILE SEPARATOR ---
#!/usr/bin/python
""" class Amenity"""
from models.base_model import BaseModel
class Amenity(BaseModel):
"""Representation of Amenity"""
name = ""
--- FILE SEPARATOR ---
#!/usr/bin/python
""" holds class City"""
from models.base_model import BaseModel
class City(BaseModel):
"""Representation of city"""
state_id = ""
name = ""
| {
"imports": [
"/models/base_model.py",
"/models/amenity.py",
"/models/city.py"
]
} |
02KellyV/AirBnB_clone | refs/heads/master | /models/__init__.py | #!/usr/bin/python3
"""
Contains the Storage
"""
from models.engine.file_storage import FileStorage
storage = FileStorage()
storage.reload()
| #!/usr/bin/python3
""" File Storage Module"""
import json
import os.path
from models.base_model import BaseModel
from models.amenity import Amenity
from models.city import City
from models.place import Place
from models.state import State
from models.user import User
from models.review import Review
classes = {"Amenity": Amenity, "BaseModel": BaseModel, "City": City,
"Place": Place, "Review": Review, "State": State, "User": User}
class FileStorage:
"""Class for Serializes and Deserializes"""
def __init__(self):
"""string - path to the JSON file"""
self.__file_path = 'file.json'
"""dictionary - empty but will store all objects by <class name>.id"""
self.__objects = {}
def all(self):
"""returns the dict __objects"""
return self.__objects
def new(self, obj):
"""sets in __objects, key classname.id val= obj"""
main_key = obj.__class__.__name__ + '.' + obj.id
self.__objects.update({main_key: obj})
def save(self):
"""serializes __objects to JSON"""
newdict = {}
with open(self.__file_path, 'w', encoding="UTF-8") as filejson:
for key, value in self.__objects.items():
newdict[key] = value.to_dict() # json.dump(newdict, filejson)
filejson.write(json.dumps(newdict))
def reload(self):
"""deserializes the JSON file to __objects"""
if os.path.isfile(self.__file_path):
with open(self.__file_path) as json_f:
othrdict_objs = json.load(json_f)
for key, val in othrdict_objs.items():
self.__objects[key] = eval(val["__class__"])(**val)
| {
"imports": [
"/models/engine/file_storage.py"
]
} |
03021980/AMTSAPP-Project | refs/heads/master | /AMTSAPP/views.py | from django.shortcuts import render
from django.http import HttpResponse
from .models import *
# Create your views here.
def homeView(request):
return render(request,'index.html')
def blogView(request):
return render(request,'blog.html')
def contactView(request):
return render(request,'contact.html')
def passView(request):
return render(request,'Pass.html')
def servicesView(request):
return render(request,'services.html')
def creaatepasspage(request):
return render(request,'createpass.html')
def createpass(request):
select=request.POST['select']
if select=="student":
return render(request,'Studentpass.html')
elif select=="job":
return render(request,'JobPass.html')
elif select=="senior":
return render(request,'SeniorCitizenPass.html')
def studentpass(request):
return render(request,'Studentpass.html')
def jobpass(request):
return render(request,'JobPass.html')
def seniorcitizenpass(request):
return render(request,'SeniorCitizenPass.html')
def saveStudentView(request):
data=StudentModel()
data.cname=request.GET['cname']
data.ccourse=request.GET['ccourse']
data.cyear=request.GET['cyear']
data.cphoto=request.GET['cphoto']
data.cfees=request.GET['cfees']
data.save()
return render(request,'LastPage.html')
def saveJobView(request):
data=JobModel()
data.jname=request.GET['jname']
data.jaddress=request.GET['jaddress']
data.jletter=request.GET['jletter']
data.save()
return HttpResponse("Data Inserted")
def saveSeniorCitizenView(request):
data=SeniorCitizenModel()
data.scname=request.GET['scname']
data.scage=request.GET['scage']
data.save()
return HttpResponse("Data Inserted") | from django.db import models
from django.utils import timezone
# Create your models here.
class LoginModel(models.Model):
sname=models.CharField(max_length=30)
scourse=models.CharField(max_length=30)
sphonenumber=models.IntegerField()
sage=models.IntegerField()
saddress=models.CharField(max_length=30)
semail=models.CharField(max_length=30)
def __str__(self):
return self.sname
class Meta:
db_table = 'AMTSAPP'
# Add verbose name
verbose_name = 'STUDENT MODEL'
class StudentModel(models.Model):
cname=models.CharField(max_length=30)
ccourse=models.CharField(max_length=30)
cyear=models.IntegerField()
cphoto=models.FileField(max_length=30)
cfees=models.FileField(max_length=30)
class JobModel(models.Model):
jname=models.CharField(max_length=30)
jaddress=models.CharField(max_length=30)
jletter=models.FileField(max_length=30)
class SeniorCitizenModel(models.Model):
scname=models.CharField(max_length=30)
scage=models.IntegerField() | {
"imports": [
"/AMTSAPP/models.py"
]
} |
03021980/AMTSAPP-Project | refs/heads/master | /AMTSAPP/admin.py | from django.contrib import admin
from AMTSAPP.models import *
# Register your models here.
admin.site.register(StudentModel)
admin.site.register(JobModel)
admin.site.register(SeniorCitizenModel) | from django.db import models
from django.utils import timezone
# Create your models here.
class LoginModel(models.Model):
sname=models.CharField(max_length=30)
scourse=models.CharField(max_length=30)
sphonenumber=models.IntegerField()
sage=models.IntegerField()
saddress=models.CharField(max_length=30)
semail=models.CharField(max_length=30)
def __str__(self):
return self.sname
class Meta:
db_table = 'AMTSAPP'
# Add verbose name
verbose_name = 'STUDENT MODEL'
class StudentModel(models.Model):
cname=models.CharField(max_length=30)
ccourse=models.CharField(max_length=30)
cyear=models.IntegerField()
cphoto=models.FileField(max_length=30)
cfees=models.FileField(max_length=30)
class JobModel(models.Model):
jname=models.CharField(max_length=30)
jaddress=models.CharField(max_length=30)
jletter=models.FileField(max_length=30)
class SeniorCitizenModel(models.Model):
scname=models.CharField(max_length=30)
scage=models.IntegerField() | {
"imports": [
"/AMTSAPP/models.py"
]
} |
03difoha/template_generator | refs/heads/master | /test/test_templateBuild.py | """
/file test_newBuild.py
@brief Unittests for Automated template generator tool.
@author 03difoha
@date 2017
"""
import unittest
from template_generator import template_generator
title = 'Test Title'
Id = 1234567
version = '5'
foo = 'foo'
longVar = '1234567891234567890123' # 22 chars long
empty = ''
special='{*!'
emptyParams = {'autoplay': '',
'muted': '',
'timeStamp': '',
'customSkin': '',
'loop': ''
}
params = {'autoplay': True,
'muted': '',
'timeStamp': True,
'customSkin': '',
'loop': True
}
class TestNewBuild(unittest.TestCase):
def test_TitleCorrect(self):
self.assertEquals(template_generator.validateTitle(title=foo), foo)
def test_TitleLengthIncorrect(self):
self.assertEquals(template_generator.validateTitle(title=longVar), None)
def test_TitleEmpty(self):
self.assertEquals(template_generator.validateTitle(title=empty), None)
def test_IdCorrect(self):
self.assertEquals(template_generator.validateId(Id=Id), Id)
def test_IdNotInt(self):
self.assertEquals(template_generator.validateId(Id=foo), None)
def test_validateIdLength(self):
self.assertEquals(len(str(template_generator.validateId(Id=Id))), 7)
def test_validateVersion(self):
self.assertIn(template_generator.validateVersion(version), version)
def test_validateVersionNegative(self):
self.assertEquals(template_generator.validateVersion(version=special), None)
def test_buildParametersEmpty(self):
self.assertEquals(template_generator.paramStringConsructor(emptyParams), '')
def testbuildParameters(self):
self.assertEquals(template_generator.paramStringConsructor(params), '?timeStamp=True&loop=True&autoplay=True')
if __name__ == '__main__':
unittest.main()
| """
/file template_generator.py
@brief Template page generator.
@author 03difoha
@date 2017
"""
import os
import sys
import re
from shutil import copyfile
HTML_TEMPLATE = '/templates/index.html'
BUILD_TARGET_LOCATION = '/'
def validateTitle(title):
"""
Validates title parameter
:param title of the project:
:return: validated title of the project
"""
if 0 < len(title) < 21:
print('title valid')
return title
elif title == '':
print('No title entered - please enter a title')
return None
else:
print(title)
print('Invalid title - must not contain over 20 characters')
return None
def validateId(Id):
"""
Validates ID parameter
:param Id of the project:
:return: validated Id of the project
"""
try:
Id = int(Id)
except:
Id = Id
if isinstance(Id, int) and 6 < len(str(Id)) < 8:
print('Id valid')
return Id
elif not isinstance(Id, int):
print(Id)
print('Invalid ID - numerical characters only')
return None
elif len(str(Id)) < 6 or len(str(Id)) > 8:
print('Invalid ID - must contain between 6 and 8 characters')
return None
def validateVersion(version):
"""
Validates version
:param build version (int):
:return: validated version
"""
if not re.match('^[ 0-9a-zA-Z]+$', version):
print('Invalid version - must not contain special characters.')
return None
else:
print('version valid')
return version
def paramStringConsructor(urlParams):
"""
converts a dictionary containing URL params into a string
note: if there are no URL parameters defined, this will return an empty string.
:param dict:
:return: string of formatted URL params, or empty string if no URL params are set to True
"""
paramList = ['']
for key, value in urlParams.items():
if value:
paramList.append('{}={}'.format(str(key), str(value)))
paramList.append('&')
if any(paramList):
paramList.insert(0, '?')
del paramList[-1]
result = ''.join(paramList)
print(result)
return(result)
def generate_build(title, Id, version, urlParams):
"""
Dynamically generates html page.
:param validated title:
:param validated Id:
:param validated version:
:return: auto-generated index.html
"""
validateTitle(title)
validateId(Id)
validateVersion(version)
paramString = paramStringConsructor(urlParams)
if os.path.exists(BUILD_TARGET_LOCATION):
os.remove(BUILD_TARGET_LOCATION)
try:
copyfile(HTML_TEMPLATE, BUILD_TARGET_LOCATION)
build = open(BUILD_TARGET_LOCATION, "r")
contents = build.readlines()
build.close()
contents.insert(4, '<title>{}</title>'.format(title))
contents.insert(89, '<p class="main-title">{}</p>'.format(title))
contents.insert(91, '<p class="sub-title">Version {}</p>'.format(version))
contents.insert(95, '<iframe src="//example.com/{}/{}"></iframe>'.format(Id, paramString))
# Magic numbers ^ refer to the line number of the generated html file
build = open(BUILD_TARGET_LOCATION, "r+")
contents = "".join(contents)
build.write(contents)
build.close()
return
except IOError as e:
print('New build generation failed :(')
print(e)
return None
if __name__=='__main__':
urlParams = {'autoplay': sys.argv[4],
'muted': sys.argv[5],
'timeStamp': sys.argv[7],
'customSkin': sys.argv[9],
'loop': sys.argv[10]
}
generate_build(title=sys.argv[1], Id=sys.argv[2], version=sys.argv[3], urlParams=urlParams)
| {
"imports": [
"/template_generator.py"
]
} |
0417yjy/InfectionMonitoringSystem | refs/heads/master | /django_monitoring/mainpage/addregions.py | #from django.db import models
#from django.conf import settings
from .models import AbstractRegion, RegionLarge, RegionMedium
def make_medium_region(added_name, parent):
added = RegionMedium(
name=added_name,
no_infected=0,
no_deceased=0,
no_offisolated=0,
updated_time="2020-11-12T15:12:43Z",
parent_region=parent
)
added.save()
north_cc = RegionLarge.objects.get(name="충청북도")
north_cc_added = ["청주시", "충주시", "제천시", "보은군", "옥천군", "영동군", "증평군", "진천군", "괴산군", "음성군", "단양군"]
south_cc = RegionLarge.objects.get(name="충청남도")
south_cc_added = ["천안시", "공주시", "보령시", "아산시", "서산시", "논산시", "계룡시", "당진시", "금산군", "부여군", "서천군", "청양군", "홍성군", "에산군", "태안군"]
north_jl = RegionLarge.objects.get(name="전라북도")
north_jl_added = ["전주시", "군산시", "익산시", "정읍시", "남원시", "김제시", "완주군", "진안군", "무주군", "장수군", "임실군", "순창군", "고창군", "부안군"]
south_jl = RegionLarge.objects.get(name="전라남도")
south_jl_added = ["목포시", "여수시", "순천시", "나주시", "광양시", "담양군", "곡성군", "구례군", "고흥군", "보성군", "화순군", "장흥군", "강진군", "해남군", "영암군", "무안군", "함평군", "영관군", "장성군", "완도군", "진도군", "신안군"]
north_ks = RegionLarge.objects.get(name="경상북도")
north_ks_added = ["포항시", "경주시", "김천시", "안동시", "구미시", "영주시", "영천시", "상주시", "문경시", "경산시", "군위군", "의성군", "청송군", "영양군", "영덕군", "청도군", "고령군", "성주군" ,"칠곡군", "에천군", "봉화군", "울진군", "울릉군"]
south_ks = RegionLarge.objects.get(name="경상남도")
south_ks_added = ["창원시", "진주시", "통영시", "사천시", "김해시", "밀양시", "거제시", "양산시", "의령군", "함안군", "창녕군", "고성군", "남해군", "하동군", "산청군", "함양군", "거창군", "합천군"]
jeju = RegionLarge.objects.get(name="제주특별자치도")
jeju_added = ["제주시", "서귀포시"]
def call(region_list, parent):
for i in region_list:
make_medium_region(i, parent) | '''
File: models.py
Project: EDPS
File Created: Sunday, 8th November 2020 11:32:24 am
Author: Jongyeon Yoon (0417yjy@naver.com)
-----
Last Modified: Thursday, 12th November 2020 9:58:07 pm
Modified By: Jongyeon Yoon (0417yjy@naver.com>)
-----
Copyright 2020 Jongyeon Yoon
'''
from django.db import models
# from datetime import datetime
# Create your models here.
class AbstractRegion(models.Model): # Abstract class of region models
name = models.CharField(max_length=16)
no_infected = models.IntegerField() # 각 지역별로도 감염 통계를 저장함
no_deceased = models.IntegerField()
no_offisolated = models.IntegerField()
updated_time = models.DateTimeField()
prev_no_infected = models.IntegerField(default=0) # 각 지역별로도 감염 통계를 저장함
prev_no_deceased = models.IntegerField(default=0)
prev_no_offisolated = models.IntegerField(default=0)
class Meta:
abstract = True
class RegionLarge(AbstractRegion):
# 전체선택 / 특별시 / 광역시 / 도 / 해외유입
def __str__(self):
return self.name
class RegionMedium(AbstractRegion):
# 전체선택 / 구 / 군 / 시
parent_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
def __str__(self):
return self.name
# class RegionSmall(AbstractRegion):
# # 전체선택 / 동 / 읍
# parent_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
# def __str__(self):
# return self.name
class Subscriber(models.Model):
SUBSCRIBE_TYPE_CHOICES = models.TextChoices('SubscribeType','Email Kakao')
address = models.CharField(max_length=100) # kakaotalk id or email address
sub_type = models.CharField(max_length=5, choices=SUBSCRIBE_TYPE_CHOICES.choices)
# Has each region's id
large_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
def __str__(self):
return self.sub_type + ": " + self.address + ": " + str(self.large_region) + "/" + str(self.medium_region)
# class Facility(models.Model):
# # Entity about facility (a kind of map overlay components)
# FACILITY_TYPE_CHOICES = models.TextChoices('FacilityType', 'Hospital Pharmacy Convenience')
# name = models.CharField(max_length=30)
# address = models.CharField(max_length=100)
# fac_type = models.CharField(max_length=12, choices=FACILITY_TYPE_CHOICES.choices)
# #small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
# def __str__(self):
# return small_region + ": " + self.name + ": " + self.fac_type
class Infected(models.Model):
STATUS_CHOICES = models.TextChoices('StatusType','Infected Deceased Off-Isolated')
name = models.CharField(max_length=30)
status = models.CharField(max_length=12, choices=STATUS_CHOICES.choices)
class InfectedMovement(models.Model):
infected = models.ForeignKey('Infected', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE, default=1)
moved_date = models.DateTimeField()
exact_address = models.CharField(max_length=100) # 방문 장소의 주소
desc = models.CharField(max_length=20) # 방문 장소 이름
class StatisticValues(models.Model):
# 지역별 감염 정보 외의, 전체 감염 정보 / 검사완료 & 검사중 등 다른 통계 자료를 저장하는 테이블
updateTime = models.CharField(max_length=15, default="2020.11.21", primary_key=True) # 정보 업데이트 시간 data['updateTime'][23:28] -> 월.일(00.00 구조)
TotalCase = models.TextField(default="0") # 총 확진자
TotalDeath = models.TextField(default="0") # 총 사망자
TotalRecovered = models.TextField(default="0") # 총 완치자
NowCase = models.TextField(default="0") # 치료중인 사람
TotalChecking = models.TextField(default="0") # 검사완료
# notcaseCount = models.TextField(default="0") # 결과 음성
TodayCase = models.TextField(default="0") # 전일 대비 확진자, data2["data0_1"]의미
TodayRecovered = models.TextField(default="0") # 전일 대비 완치자
# from django.db import models
# from mainpage.models import StatisticValues
def __str__(self):
return self.updateTime
| {
"imports": [
"/django_monitoring/mainpage/models.py"
]
} |
0417yjy/InfectionMonitoringSystem | refs/heads/master | /django_monitoring/mainpage/LocationInfo.py | from urllib.request import Request, urlopen
import requests, bs4, json
from bs4 import BeautifulSoup
from .models import Infected, InfectedMovement
def get_location(APIresult):
apiresult = APIresult
lat = []
lng = []
resultset = []
headers = {"Authorization": "KakaoAK f65e7c08cbfe6221c3795ebb8da80931"} #지역검색 api
for i in range(1,19):
location_name = 'data'+str(i)+'_0'
url = "https://dapi.kakao.com/v2/local/search/address.json?"
if apiresult[location_name]=='광주':
query = "query=광주광역시"
url = url+query
result = requests.get(url,headers = headers)
search_inf = json.loads(result.text)
lat.append(search_inf['documents'][0]['address']['y'])
lng.append(search_inf['documents'][0]['address']['x'])
elif apiresult[location_name]=='검역':
lat.append('34.48321535036117')
lng.append('129.36847815669026')
elif apiresult[location_name]=='서울':
query = "query=서울특별시"
url = url+query
result = requests.get(url,headers = headers)
search_inf = json.loads(result.text)
lat.append(search_inf['documents'][0]['address']['y'])
lng.append(search_inf['documents'][0]['address']['x'])
else:
query = "query="+apiresult[location_name]
url = url+query
result = requests.get(url,headers = headers)
search_inf = json.loads(result.text)
lat.append(search_inf['documents'][0]['address']['y'])
lng.append(search_inf['documents'][0]['address']['x'])
resultset.append(lat)
resultset.append(lng)
return resultset
def scraping_data():
url = "https://www.seoul.go.kr/coronaV/coronaStatus.do"
exceptClass = ['today']
req = requests.get(url)
html = req.text
soup = BeautifulSoup(html, 'html.parser')
table_html = soup.select('table.pc')
ths = list(table_html[0].select('th'))
gu_list = []
for i in ths:
gu_list.append(i.text)
tds = list(table_html[0].findAll('td', class_=lambda x: x not in exceptClass))
gu_num_list = []
for i in tds:
gu_num_list.append(i.text)
gu_latlng = []
headers = {"Authorization": "KakaoAK f65e7c08cbfe6221c3795ebb8da80931"} #지역검색 api
for i in gu_list:
location_name = i
if i == '기타':
coords = ['37.6600024610047','126.96954772211683']
gu_latlng.append(coords)
else:
url = "https://dapi.kakao.com/v2/local/search/address.json?"
query = "query="+ location_name
url = url+query
result = requests.get(url,headers = headers)
search_inf = json.loads(result.text)
coords = [search_inf['documents'][0]['address']['y'],search_inf['documents'][0]['address']['x']]
gu_latlng.append(coords)
result = [gu_list,gu_num_list,gu_latlng]
return result
def get_patient_path():
patient = Infected.objects.all()
Movements = []
for i in patient:
p_moves = InfectedMovement.objects.filter(infected__name = i.name)
Movements.append(p_moves)
results = []
headers = {"Authorization": "KakaoAK f65e7c08cbfe6221c3795ebb8da80931"} #지역검색 api
for moves in Movements:
move = []
for o in moves:
coords = [str(moves[0].infected.name),str(o.id),str(o.exact_address)]
url = "https://dapi.kakao.com/v2/local/search/address.json?"
query = "query="+str(o.exact_address)
url = url+query
result = requests.get(url,headers = headers)
search_inf = json.loads(result.text)
if search_inf['documents'][0]['address'] == None:
coords.append(search_inf['documents'][0]['road_address']['y'])
coords.append(search_inf['documents'][0]['road_address']['x'])
move.append(coords)
else:
coords.append(search_inf['documents'][0]['address']['y'])
coords.append(search_inf['documents'][0]['address']['x'])
move.append(coords)
results.append(move)
return results
| '''
File: models.py
Project: EDPS
File Created: Sunday, 8th November 2020 11:32:24 am
Author: Jongyeon Yoon (0417yjy@naver.com)
-----
Last Modified: Thursday, 12th November 2020 9:58:07 pm
Modified By: Jongyeon Yoon (0417yjy@naver.com>)
-----
Copyright 2020 Jongyeon Yoon
'''
from django.db import models
# from datetime import datetime
# Create your models here.
class AbstractRegion(models.Model): # Abstract class of region models
name = models.CharField(max_length=16)
no_infected = models.IntegerField() # 각 지역별로도 감염 통계를 저장함
no_deceased = models.IntegerField()
no_offisolated = models.IntegerField()
updated_time = models.DateTimeField()
prev_no_infected = models.IntegerField(default=0) # 각 지역별로도 감염 통계를 저장함
prev_no_deceased = models.IntegerField(default=0)
prev_no_offisolated = models.IntegerField(default=0)
class Meta:
abstract = True
class RegionLarge(AbstractRegion):
# 전체선택 / 특별시 / 광역시 / 도 / 해외유입
def __str__(self):
return self.name
class RegionMedium(AbstractRegion):
# 전체선택 / 구 / 군 / 시
parent_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
def __str__(self):
return self.name
# class RegionSmall(AbstractRegion):
# # 전체선택 / 동 / 읍
# parent_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
# def __str__(self):
# return self.name
class Subscriber(models.Model):
SUBSCRIBE_TYPE_CHOICES = models.TextChoices('SubscribeType','Email Kakao')
address = models.CharField(max_length=100) # kakaotalk id or email address
sub_type = models.CharField(max_length=5, choices=SUBSCRIBE_TYPE_CHOICES.choices)
# Has each region's id
large_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
def __str__(self):
return self.sub_type + ": " + self.address + ": " + str(self.large_region) + "/" + str(self.medium_region)
# class Facility(models.Model):
# # Entity about facility (a kind of map overlay components)
# FACILITY_TYPE_CHOICES = models.TextChoices('FacilityType', 'Hospital Pharmacy Convenience')
# name = models.CharField(max_length=30)
# address = models.CharField(max_length=100)
# fac_type = models.CharField(max_length=12, choices=FACILITY_TYPE_CHOICES.choices)
# #small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
# def __str__(self):
# return small_region + ": " + self.name + ": " + self.fac_type
class Infected(models.Model):
STATUS_CHOICES = models.TextChoices('StatusType','Infected Deceased Off-Isolated')
name = models.CharField(max_length=30)
status = models.CharField(max_length=12, choices=STATUS_CHOICES.choices)
class InfectedMovement(models.Model):
infected = models.ForeignKey('Infected', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE, default=1)
moved_date = models.DateTimeField()
exact_address = models.CharField(max_length=100) # 방문 장소의 주소
desc = models.CharField(max_length=20) # 방문 장소 이름
class StatisticValues(models.Model):
# 지역별 감염 정보 외의, 전체 감염 정보 / 검사완료 & 검사중 등 다른 통계 자료를 저장하는 테이블
updateTime = models.CharField(max_length=15, default="2020.11.21", primary_key=True) # 정보 업데이트 시간 data['updateTime'][23:28] -> 월.일(00.00 구조)
TotalCase = models.TextField(default="0") # 총 확진자
TotalDeath = models.TextField(default="0") # 총 사망자
TotalRecovered = models.TextField(default="0") # 총 완치자
NowCase = models.TextField(default="0") # 치료중인 사람
TotalChecking = models.TextField(default="0") # 검사완료
# notcaseCount = models.TextField(default="0") # 결과 음성
TodayCase = models.TextField(default="0") # 전일 대비 확진자, data2["data0_1"]의미
TodayRecovered = models.TextField(default="0") # 전일 대비 완치자
# from django.db import models
# from mainpage.models import StatisticValues
def __str__(self):
return self.updateTime
| {
"imports": [
"/django_monitoring/mainpage/models.py"
]
} |
0417yjy/InfectionMonitoringSystem | refs/heads/master | /django_monitoring/mainpage/views.py | from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.core import serializers
from django.contrib import messages
from . import keyword
from .models import StatisticValues, RegionLarge, RegionMedium, Subscriber
from .forms import SubscirberForm
from datetime import datetime
from . import LocationInfo, kakaosender
import json
def index(request):
#============================================ Start of 'contents-home.html' ====================================================
#통계 받아오는 API로 가져옴
result = keyword.keywordFindAPI()
#print(result)
#context는 html에 data로 넘겨주는 parameter들을 담는것. 각각의 값을 전달한다
#예를 들어 context에 result, result2, result3 이렇게 넣어서 전달하면
#index.html에서 result, result2, result3 변수를 html 태그나 javascript코드 등에서 사용 가능하다.
'''
statisticDB = StatisticValues.objects.all() # 테이블 데이타를 전부 가져오기 위한 메소드
statisticDBValues = list(StatisticValues.objects.all().values())
context ={
'result' : result,
'statisticDBValues' : statisticDBValues,
}
print(statisticDBValues)
'''
# API 데이터를 DB 테이블 StatisticValues에 저장.
try :
#statisticValue = StatisticValues(updateTime = request.POST['updateTime'], TotalCase = request.POST['TotalCase'],
# TotalDeath = request.POST['TotalDeath'], TotalRecovered = request.POST['TotalRecovered'],
# NowCase = request.POST['NowCase'], TotalChecking = request.POST['TotalChecking'],
# notcaseCount = request.POST['notcaseCount'])
YEAR= datetime.today().year # 현재 연도 가져오기
# #print(YEAR)
# MONTH= datetime.today().month # 현재 월 가져오기
# #print(MONTH)
# DAY= datetime.today().day # 현재 일 가져오기
#print(DAY)
#TodayDate=str(YEAR)+"."+str(MONTH)+"."+str(DAY)
#print(TodayDate)
# if statisticDB.objects.get(updateTime=TodayDate).updateTime != TodayDate:
#if not statisticDB.objects.filter(updateTime=TodayDate).exists():
statisticValue = StatisticValues(updateTime = str(YEAR)+"."+result['updateTime'][23:28],
TotalCase = result['TotalCase'],
TotalDeath = result['TotalDeath'], TotalRecovered = result['TotalRecovered'],
NowCase = result['NowCase'], TotalChecking = result['TotalChecking'],
TodayCase = result['data0_1'], TodayRecovered =result['TodayRecovered'])
statisticValue.save()
except Exception as e:
print(e)
statisticValue = None
# updateTime # 정보 업데이트 시간 data['updateTime'][23:28] -> 월.일(00.00 구조)
statisticDB = StatisticValues.objects.all() # 테이블 데이타를 전부 가져오기 위한 메소드
statisticDBValues = list(StatisticValues.objects.all().values())
#print(statisticDBValues)
#print(statisticValue)
#=============================================== End of 'contents-home.html' ========================================================
#============================================ Start of 'contents-statistics.html' ===================================================
largeRegions = RegionLarge.objects.all()
largeRegionsValues = serializers.serialize('json', largeRegions)
#============================================= End of 'contents-statistics.html' ====================================================
#============================================= Start of 'content-mapview.html' ======================================================
locationset = LocationInfo.get_location(result)
seoul_gu_results = LocationInfo.scraping_data()
patient_pathes = LocationInfo.get_patient_path()
# print(patient_pathes)
#============================================= End of 'content-mapview.html' ========================================================
#============================================ Start of 'contents-subscribe.html' ====================================================
# duplicate
'''
largeRegions = RegionLarge.objects.all()
largeRegionsValues = serializers.serialize('json', largeRegions)
'''
mediumRegions = RegionMedium.objects.all()
mediumRegionsValues = serializers.serialize('json', mediumRegions)
#============================================= End of 'contents-subscribe.html' =====================================================
context = {
# contents-home
'result' : result,
'statisticDBValues': statisticDBValues,
# contents-statistics
'largeRegions': largeRegionsValues,
# contents-mapview
'locationset' : locationset,
'seoul_gu_result' : seoul_gu_results,
'patient_pathes' : patient_pathes,
# contents-subscribe
# 'largeRegions': largeRegionsValues, # duplicate
'mediumRegions': mediumRegionsValues
}
return render(request, 'index.html', context)
def subscribe_email(request):
if request.method == 'POST':
form = SubscirberForm(request.POST)
if form.is_valid():
email = request.POST.get('address', 'false')
#print(email)
# 지역 구독 리스트 가져오기
i = 0
while request.POST.get('large_' + str(i)):
large_pk = request.POST.get('large_' + str(i))
med_pk = request.POST.get('med_' + str(i))
#print(str(i) + ': ' + large_pk + ', ' + med_pk)
# 데이터베이스에 저장
new_subscription = Subscriber(
address=email,
sub_type="Email",
large_region=RegionLarge.objects.get(pk=large_pk),
medium_region=RegionMedium.objects.get(pk=med_pk)
)
new_subscription.save()
i += 1
# 성공 메시지 전달
messages.success(request, str(i) + '개 지역이 ' + email + '의 구독 리스트에 추가되었습니다!')
return redirect('index')
else:
form = SubscirberForm(request.POST)
return redirect('index')
def subscribe_kakao(request):
if request.method == 'POST':
form = SubscirberForm(request.POST)
if form.is_valid():
kakao_id = request.POST.get('address', 'false')
kakao_nickname = request.POST.get('nickname', 'false')
#print(email)
# 지역 구독 리스트 가져오기
i = 0
while request.POST.get('large_' + str(i)):
large_pk = request.POST.get('large_' + str(i))
med_pk = request.POST.get('med_' + str(i))
#print(str(i) + ': ' + large_pk + ', ' + med_pk)
lr = str(RegionLarge.objects.get(pk=large_pk).name)
mr = str(RegionMedium.objects.get(pk=med_pk).name)
# 데이터베이스에 저장
new_subscription = Subscriber(
address=kakao_id,
sub_type="Kakao",
large_region=RegionLarge.objects.get(pk=large_pk),
medium_region=RegionMedium.objects.get(pk=med_pk)
)
new_subscription.save()
i += 1
# 성공 메시지 전달
messages.success(request, str(i) + '개 지역이 ' + kakao_nickname + '님의 구독 리스트에 추가되었습니다!')
return redirect('index')
else:
form = SubscirberForm(request.POST)
return redirect('index')
def mapview(request):
return render(request, 'map.html')
#구독을 위한 관심지역 설정을 위한 DB view 설정
#전체설정 안하면 뷰오류나서 나중에 DB구현한다음에
'''def index(request) :
Subscriber = Subscriber.objects.all() # 테이블 데이타를 전부 가져오기 위한 메소드
context = {'SubscribeData' : SubscribeData}
try :
Subscribedatas = SubscribeData(address = request.POST['address'], sub_type = request.POST['sub_type'],
large_region = request.POST['large_region'], medium_region = request.POST['medium_region'],
small_region = request.POST['small_region'])
Subscirbedatas.save()
except :
Subscribedatas = None
return render(request, 'contents-subscribe.html', context) # render는 view에서 템플릿에 전달할 데이타를 Dictionary로 전달한다
''' | '''
File: models.py
Project: EDPS
File Created: Sunday, 8th November 2020 11:32:24 am
Author: Jongyeon Yoon (0417yjy@naver.com)
-----
Last Modified: Thursday, 12th November 2020 9:58:07 pm
Modified By: Jongyeon Yoon (0417yjy@naver.com>)
-----
Copyright 2020 Jongyeon Yoon
'''
from django.db import models
# from datetime import datetime
# Create your models here.
class AbstractRegion(models.Model): # Abstract class of region models
name = models.CharField(max_length=16)
no_infected = models.IntegerField() # 각 지역별로도 감염 통계를 저장함
no_deceased = models.IntegerField()
no_offisolated = models.IntegerField()
updated_time = models.DateTimeField()
prev_no_infected = models.IntegerField(default=0) # 각 지역별로도 감염 통계를 저장함
prev_no_deceased = models.IntegerField(default=0)
prev_no_offisolated = models.IntegerField(default=0)
class Meta:
abstract = True
class RegionLarge(AbstractRegion):
# 전체선택 / 특별시 / 광역시 / 도 / 해외유입
def __str__(self):
return self.name
class RegionMedium(AbstractRegion):
# 전체선택 / 구 / 군 / 시
parent_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
def __str__(self):
return self.name
# class RegionSmall(AbstractRegion):
# # 전체선택 / 동 / 읍
# parent_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
# def __str__(self):
# return self.name
class Subscriber(models.Model):
SUBSCRIBE_TYPE_CHOICES = models.TextChoices('SubscribeType','Email Kakao')
address = models.CharField(max_length=100) # kakaotalk id or email address
sub_type = models.CharField(max_length=5, choices=SUBSCRIBE_TYPE_CHOICES.choices)
# Has each region's id
large_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
def __str__(self):
return self.sub_type + ": " + self.address + ": " + str(self.large_region) + "/" + str(self.medium_region)
# class Facility(models.Model):
# # Entity about facility (a kind of map overlay components)
# FACILITY_TYPE_CHOICES = models.TextChoices('FacilityType', 'Hospital Pharmacy Convenience')
# name = models.CharField(max_length=30)
# address = models.CharField(max_length=100)
# fac_type = models.CharField(max_length=12, choices=FACILITY_TYPE_CHOICES.choices)
# #small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
# def __str__(self):
# return small_region + ": " + self.name + ": " + self.fac_type
class Infected(models.Model):
STATUS_CHOICES = models.TextChoices('StatusType','Infected Deceased Off-Isolated')
name = models.CharField(max_length=30)
status = models.CharField(max_length=12, choices=STATUS_CHOICES.choices)
class InfectedMovement(models.Model):
infected = models.ForeignKey('Infected', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE, default=1)
moved_date = models.DateTimeField()
exact_address = models.CharField(max_length=100) # 방문 장소의 주소
desc = models.CharField(max_length=20) # 방문 장소 이름
class StatisticValues(models.Model):
# 지역별 감염 정보 외의, 전체 감염 정보 / 검사완료 & 검사중 등 다른 통계 자료를 저장하는 테이블
updateTime = models.CharField(max_length=15, default="2020.11.21", primary_key=True) # 정보 업데이트 시간 data['updateTime'][23:28] -> 월.일(00.00 구조)
TotalCase = models.TextField(default="0") # 총 확진자
TotalDeath = models.TextField(default="0") # 총 사망자
TotalRecovered = models.TextField(default="0") # 총 완치자
NowCase = models.TextField(default="0") # 치료중인 사람
TotalChecking = models.TextField(default="0") # 검사완료
# notcaseCount = models.TextField(default="0") # 결과 음성
TodayCase = models.TextField(default="0") # 전일 대비 확진자, data2["data0_1"]의미
TodayRecovered = models.TextField(default="0") # 전일 대비 완치자
# from django.db import models
# from mainpage.models import StatisticValues
def __str__(self):
return self.updateTime
--- FILE SEPARATOR ---
# forms.py
from django import forms
from .models import Subscriber
class SubscirberForm(forms.Form):
class Meta:
model = Subscriber
fields = ['address'] | {
"imports": [
"/django_monitoring/mainpage/models.py",
"/django_monitoring/mainpage/forms.py"
]
} |
0417yjy/InfectionMonitoringSystem | refs/heads/master | /django_monitoring/mainpage/admin.py | from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(RegionLarge)
admin.site.register(RegionMedium)
#admin.site.register(RegionSmall)
admin.site.register(Subscriber)
#admin.site.register(Facility)
admin.site.register(Infected)
admin.site.register(InfectedMovement)
admin.site.register(StatisticValues) | '''
File: models.py
Project: EDPS
File Created: Sunday, 8th November 2020 11:32:24 am
Author: Jongyeon Yoon (0417yjy@naver.com)
-----
Last Modified: Thursday, 12th November 2020 9:58:07 pm
Modified By: Jongyeon Yoon (0417yjy@naver.com>)
-----
Copyright 2020 Jongyeon Yoon
'''
from django.db import models
# from datetime import datetime
# Create your models here.
class AbstractRegion(models.Model): # Abstract class of region models
name = models.CharField(max_length=16)
no_infected = models.IntegerField() # 각 지역별로도 감염 통계를 저장함
no_deceased = models.IntegerField()
no_offisolated = models.IntegerField()
updated_time = models.DateTimeField()
prev_no_infected = models.IntegerField(default=0) # 각 지역별로도 감염 통계를 저장함
prev_no_deceased = models.IntegerField(default=0)
prev_no_offisolated = models.IntegerField(default=0)
class Meta:
abstract = True
class RegionLarge(AbstractRegion):
# 전체선택 / 특별시 / 광역시 / 도 / 해외유입
def __str__(self):
return self.name
class RegionMedium(AbstractRegion):
# 전체선택 / 구 / 군 / 시
parent_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
def __str__(self):
return self.name
# class RegionSmall(AbstractRegion):
# # 전체선택 / 동 / 읍
# parent_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
# def __str__(self):
# return self.name
class Subscriber(models.Model):
SUBSCRIBE_TYPE_CHOICES = models.TextChoices('SubscribeType','Email Kakao')
address = models.CharField(max_length=100) # kakaotalk id or email address
sub_type = models.CharField(max_length=5, choices=SUBSCRIBE_TYPE_CHOICES.choices)
# Has each region's id
large_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
def __str__(self):
return self.sub_type + ": " + self.address + ": " + str(self.large_region) + "/" + str(self.medium_region)
# class Facility(models.Model):
# # Entity about facility (a kind of map overlay components)
# FACILITY_TYPE_CHOICES = models.TextChoices('FacilityType', 'Hospital Pharmacy Convenience')
# name = models.CharField(max_length=30)
# address = models.CharField(max_length=100)
# fac_type = models.CharField(max_length=12, choices=FACILITY_TYPE_CHOICES.choices)
# #small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
# def __str__(self):
# return small_region + ": " + self.name + ": " + self.fac_type
class Infected(models.Model):
STATUS_CHOICES = models.TextChoices('StatusType','Infected Deceased Off-Isolated')
name = models.CharField(max_length=30)
status = models.CharField(max_length=12, choices=STATUS_CHOICES.choices)
class InfectedMovement(models.Model):
infected = models.ForeignKey('Infected', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE, default=1)
moved_date = models.DateTimeField()
exact_address = models.CharField(max_length=100) # 방문 장소의 주소
desc = models.CharField(max_length=20) # 방문 장소 이름
class StatisticValues(models.Model):
# 지역별 감염 정보 외의, 전체 감염 정보 / 검사완료 & 검사중 등 다른 통계 자료를 저장하는 테이블
updateTime = models.CharField(max_length=15, default="2020.11.21", primary_key=True) # 정보 업데이트 시간 data['updateTime'][23:28] -> 월.일(00.00 구조)
TotalCase = models.TextField(default="0") # 총 확진자
TotalDeath = models.TextField(default="0") # 총 사망자
TotalRecovered = models.TextField(default="0") # 총 완치자
NowCase = models.TextField(default="0") # 치료중인 사람
TotalChecking = models.TextField(default="0") # 검사완료
# notcaseCount = models.TextField(default="0") # 결과 음성
TodayCase = models.TextField(default="0") # 전일 대비 확진자, data2["data0_1"]의미
TodayRecovered = models.TextField(default="0") # 전일 대비 완치자
# from django.db import models
# from mainpage.models import StatisticValues
def __str__(self):
return self.updateTime
| {
"imports": [
"/django_monitoring/mainpage/models.py"
]
} |
0417yjy/InfectionMonitoringSystem | refs/heads/master | /django_monitoring/mainpage/signals.py | from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils import timezone
from .models import StatisticValues, RegionLarge, RegionMedium, Subscriber
from . import keyword
from datetime import datetime
from .mailsender import send_safe_mail
from .kakaosender import send_to_kakao
def convert_to_int_with_comma(str_num):
return int(str_num.replace(',', ''))
def send_messages(large_obj, increased):
region_name = large_obj.name
# 중분류가 '전체선택'인 구독자들에 한하여 메시지 전송
medium_all_select_obj = RegionMedium.objects.get(name='전체선택')
subscribers = Subscriber.objects.filter(large_region=large_obj).filter(medium_region=medium_all_select_obj)
# 이메일 구독자들
print("Sending emails for " + region_name)
email_subs = subscribers.filter(sub_type='Email')
emails = email_subs.values_list('address', flat=True)
send_safe_mail(emails, region_name, increased)
# 카카오 구독자들
print("Sending kakao messages for " + region_name)
kakao_subs = subscribers.filter(sub_type='Kakao')
kakao_users = kakao_subs.values_list('address', flat=True)
for i in kakao_users:
send_to_kakao(i, region_name, "전체선택", increased)
@receiver(post_save, sender=StatisticValues)
def StatisticValues_post_save(sender, **kwargs):
if kwargs['created']:
result = keyword.getCountryData()
for region in result:
if region != 'resultCode' and region != 'resultMessage':
increased_num = 0
if region == 'korea':
# 대분류가 '전체선택' 인 경우
region_name = result[region]['countryName']
region_obj = RegionLarge.objects.get(name='전체선택')
else:
# 그 외 나머지 지역들
region_name = result[region]['countryName']
region_obj = RegionLarge.objects.filter(name__contains=region_name[0]).get(name__contains=region_name[1])
# 늘어난 확진자 수 계산
increased_num = convert_to_int_with_comma(result[region]['totalCase']) - region_obj.no_infected
# 기존 데이터는 전일로 복사
region_obj.prev_no_infected = region_obj.no_infected
region_obj.prev_no_deceased = region_obj.no_deceased
region_obj.prev_no_offisolated = region_obj.no_offisolated
# 새 데이터로 업데이트
region_obj.no_infected = convert_to_int_with_comma(result[region]['totalCase'])
region_obj.no_deceased = convert_to_int_with_comma(result[region]['death'])
region_obj.no_offisolated = convert_to_int_with_comma(result[region]['recovered'])
region_obj.updated_time = datetime.now()
# 변경사항 저장
region_obj.save()
if increased_num > 0:
send_messages(region_obj, increased_num)
print('Updated RegionLarge instances') | '''
File: models.py
Project: EDPS
File Created: Sunday, 8th November 2020 11:32:24 am
Author: Jongyeon Yoon (0417yjy@naver.com)
-----
Last Modified: Thursday, 12th November 2020 9:58:07 pm
Modified By: Jongyeon Yoon (0417yjy@naver.com>)
-----
Copyright 2020 Jongyeon Yoon
'''
from django.db import models
# from datetime import datetime
# Create your models here.
class AbstractRegion(models.Model): # Abstract class of region models
name = models.CharField(max_length=16)
no_infected = models.IntegerField() # 각 지역별로도 감염 통계를 저장함
no_deceased = models.IntegerField()
no_offisolated = models.IntegerField()
updated_time = models.DateTimeField()
prev_no_infected = models.IntegerField(default=0) # 각 지역별로도 감염 통계를 저장함
prev_no_deceased = models.IntegerField(default=0)
prev_no_offisolated = models.IntegerField(default=0)
class Meta:
abstract = True
class RegionLarge(AbstractRegion):
# 전체선택 / 특별시 / 광역시 / 도 / 해외유입
def __str__(self):
return self.name
class RegionMedium(AbstractRegion):
# 전체선택 / 구 / 군 / 시
parent_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
def __str__(self):
return self.name
# class RegionSmall(AbstractRegion):
# # 전체선택 / 동 / 읍
# parent_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
# def __str__(self):
# return self.name
class Subscriber(models.Model):
SUBSCRIBE_TYPE_CHOICES = models.TextChoices('SubscribeType','Email Kakao')
address = models.CharField(max_length=100) # kakaotalk id or email address
sub_type = models.CharField(max_length=5, choices=SUBSCRIBE_TYPE_CHOICES.choices)
# Has each region's id
large_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
def __str__(self):
return self.sub_type + ": " + self.address + ": " + str(self.large_region) + "/" + str(self.medium_region)
# class Facility(models.Model):
# # Entity about facility (a kind of map overlay components)
# FACILITY_TYPE_CHOICES = models.TextChoices('FacilityType', 'Hospital Pharmacy Convenience')
# name = models.CharField(max_length=30)
# address = models.CharField(max_length=100)
# fac_type = models.CharField(max_length=12, choices=FACILITY_TYPE_CHOICES.choices)
# #small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
# def __str__(self):
# return small_region + ": " + self.name + ": " + self.fac_type
class Infected(models.Model):
STATUS_CHOICES = models.TextChoices('StatusType','Infected Deceased Off-Isolated')
name = models.CharField(max_length=30)
status = models.CharField(max_length=12, choices=STATUS_CHOICES.choices)
class InfectedMovement(models.Model):
infected = models.ForeignKey('Infected', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE, default=1)
moved_date = models.DateTimeField()
exact_address = models.CharField(max_length=100) # 방문 장소의 주소
desc = models.CharField(max_length=20) # 방문 장소 이름
class StatisticValues(models.Model):
# 지역별 감염 정보 외의, 전체 감염 정보 / 검사완료 & 검사중 등 다른 통계 자료를 저장하는 테이블
updateTime = models.CharField(max_length=15, default="2020.11.21", primary_key=True) # 정보 업데이트 시간 data['updateTime'][23:28] -> 월.일(00.00 구조)
TotalCase = models.TextField(default="0") # 총 확진자
TotalDeath = models.TextField(default="0") # 총 사망자
TotalRecovered = models.TextField(default="0") # 총 완치자
NowCase = models.TextField(default="0") # 치료중인 사람
TotalChecking = models.TextField(default="0") # 검사완료
# notcaseCount = models.TextField(default="0") # 결과 음성
TodayCase = models.TextField(default="0") # 전일 대비 확진자, data2["data0_1"]의미
TodayRecovered = models.TextField(default="0") # 전일 대비 완치자
# from django.db import models
# from mainpage.models import StatisticValues
def __str__(self):
return self.updateTime
--- FILE SEPARATOR ---
from django_monitoring.settings import EMAIL_HOST_USER
from django.core.mail import send_mail
def send_safe_mail(to, where, how_much):
# to: 받을 메일 주소 리스트 (예: ['example@email.com', 'example2@email.com', 'example3@email.com'])
# where: 스트링 - 관심 지역 이름 (예: '서울시 중구')
# how_much: 정수형 - 몇 명 추가되었는지
title = '[EDPS] ' + ": " + where + ' 확진자 발생'
message = """
안녕하세요, Team Lumos입니다.
관심 지역으로 설정한 """ + where + """에서 확진자가 """ + str(how_much) + """명 발생하였습니다.
이 메시지는 자동으로 전송됩니다. 답장하지 마십시오.
EDPS
"""
send_mail(title, message, 'EDPS', to, fail_silently=False)
--- FILE SEPARATOR ---
# 도구 - 메세지 템플릿 해서 템플릿 생성 후 Rest API - 메시지 에서 액세스 토큰 발급
import json
import requests
#from models import Subscriber
def send_to_kakao(address, lr, mr, increased):
access_token = address # 인증키 받아옴
send_lists = []
url = "https://kapi.kakao.com/v2/api/talk/memo/default/send"
# 사용자 토큰
headers = {
"Authorization": "Bearer " + access_token
}
if lr == "전체선택" and mr == "전체선택":
text = "안녕하세요, Team Lumos 입니다. 새 확진자가 " + str(increased) + "명 발생했습니다."
elif mr == "전체선택":
my_region = lr
text = "안녕하세요, Team Lumos 입니다. "+my_region + "에서 새 확진자가 " + str(increased) + "명 발생했습니다."
else:
my_region = lr + " " + mr
text = "안녕하세요, Team Lumos 입니다. "+my_region + "에서 확진자가 " + str(increased) + "명 발생했습니다."
data = {
"template_object": json.dumps({"object_type": "text",
"text": text,
"link": {
"web_url": "http://127.0.0.1:8000/mainpage/"
}
})
}
response = requests.post(url, headers=headers, data=data)
print(response.status_code)
if response.json().get('result_code') == 0:
print('메시지를 성공적으로 보냈습니다.')
else:
print('메시지를 성공적으로 보내지 못했습니다. 오류메시지 : ' + str(response.json()))
| {
"imports": [
"/django_monitoring/mainpage/models.py",
"/django_monitoring/mainpage/mailsender.py",
"/django_monitoring/mainpage/kakaosender.py"
]
} |
0417yjy/InfectionMonitoringSystem | refs/heads/master | /django_monitoring/mainpage/forms.py | # forms.py
from django import forms
from .models import Subscriber
class SubscirberForm(forms.Form):
class Meta:
model = Subscriber
fields = ['address'] | '''
File: models.py
Project: EDPS
File Created: Sunday, 8th November 2020 11:32:24 am
Author: Jongyeon Yoon (0417yjy@naver.com)
-----
Last Modified: Thursday, 12th November 2020 9:58:07 pm
Modified By: Jongyeon Yoon (0417yjy@naver.com>)
-----
Copyright 2020 Jongyeon Yoon
'''
from django.db import models
# from datetime import datetime
# Create your models here.
class AbstractRegion(models.Model): # Abstract class of region models
name = models.CharField(max_length=16)
no_infected = models.IntegerField() # 각 지역별로도 감염 통계를 저장함
no_deceased = models.IntegerField()
no_offisolated = models.IntegerField()
updated_time = models.DateTimeField()
prev_no_infected = models.IntegerField(default=0) # 각 지역별로도 감염 통계를 저장함
prev_no_deceased = models.IntegerField(default=0)
prev_no_offisolated = models.IntegerField(default=0)
class Meta:
abstract = True
class RegionLarge(AbstractRegion):
# 전체선택 / 특별시 / 광역시 / 도 / 해외유입
def __str__(self):
return self.name
class RegionMedium(AbstractRegion):
# 전체선택 / 구 / 군 / 시
parent_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
def __str__(self):
return self.name
# class RegionSmall(AbstractRegion):
# # 전체선택 / 동 / 읍
# parent_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
# def __str__(self):
# return self.name
class Subscriber(models.Model):
SUBSCRIBE_TYPE_CHOICES = models.TextChoices('SubscribeType','Email Kakao')
address = models.CharField(max_length=100) # kakaotalk id or email address
sub_type = models.CharField(max_length=5, choices=SUBSCRIBE_TYPE_CHOICES.choices)
# Has each region's id
large_region = models.ForeignKey('RegionLarge', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
def __str__(self):
return self.sub_type + ": " + self.address + ": " + str(self.large_region) + "/" + str(self.medium_region)
# class Facility(models.Model):
# # Entity about facility (a kind of map overlay components)
# FACILITY_TYPE_CHOICES = models.TextChoices('FacilityType', 'Hospital Pharmacy Convenience')
# name = models.CharField(max_length=30)
# address = models.CharField(max_length=100)
# fac_type = models.CharField(max_length=12, choices=FACILITY_TYPE_CHOICES.choices)
# #small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
# def __str__(self):
# return small_region + ": " + self.name + ": " + self.fac_type
class Infected(models.Model):
STATUS_CHOICES = models.TextChoices('StatusType','Infected Deceased Off-Isolated')
name = models.CharField(max_length=30)
status = models.CharField(max_length=12, choices=STATUS_CHOICES.choices)
class InfectedMovement(models.Model):
infected = models.ForeignKey('Infected', on_delete=models.CASCADE)
#small_region = models.ForeignKey('RegionSmall', on_delete=models.CASCADE)
medium_region = models.ForeignKey('RegionMedium', on_delete=models.CASCADE, default=1)
moved_date = models.DateTimeField()
exact_address = models.CharField(max_length=100) # 방문 장소의 주소
desc = models.CharField(max_length=20) # 방문 장소 이름
class StatisticValues(models.Model):
# 지역별 감염 정보 외의, 전체 감염 정보 / 검사완료 & 검사중 등 다른 통계 자료를 저장하는 테이블
updateTime = models.CharField(max_length=15, default="2020.11.21", primary_key=True) # 정보 업데이트 시간 data['updateTime'][23:28] -> 월.일(00.00 구조)
TotalCase = models.TextField(default="0") # 총 확진자
TotalDeath = models.TextField(default="0") # 총 사망자
TotalRecovered = models.TextField(default="0") # 총 완치자
NowCase = models.TextField(default="0") # 치료중인 사람
TotalChecking = models.TextField(default="0") # 검사완료
# notcaseCount = models.TextField(default="0") # 결과 음성
TodayCase = models.TextField(default="0") # 전일 대비 확진자, data2["data0_1"]의미
TodayRecovered = models.TextField(default="0") # 전일 대비 완치자
# from django.db import models
# from mainpage.models import StatisticValues
def __str__(self):
return self.updateTime
| {
"imports": [
"/django_monitoring/mainpage/models.py"
]
} |
0428402001/appserver | refs/heads/master | /redis_cache/redis_cache/flush_hot_category.py | # coding=utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import os
cur_dir = os.path.dirname(__file__)
p_dir = os.path.dirname(cur_dir)
g_dir = os.path.dirname(p_dir)
sys.path.append(g_dir)
sys.path.append(p_dir)
cur_dir = os.path.abspath('.')
p_dir = os.path.dirname(cur_dir)
g_dir = os.path.dirname(p_dir)
sys.path.append(g_dir)
sys.path.append(p_dir)
from redis_con_pool import conn_pool_hot_category
import json
import time
import subprocess
import tornado.httpclient
import tornado.web
import tornado.gen
import tornado.httpserver
import tornado.ioloop
import urlparse
from tornado.httpclient import HTTPError as clientHTTPError
from tornado.web import HTTPError
#from config import web_url, backend_netloc
from sqlalchemy import text
from sqlalchemy import and_ , or_, func, distinct
from sqlalchemy import event
from sqlalchemy.dialects.mysql import VARCHAR
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
from sqlalchemy.orm import scoped_session, sessionmaker, object_mapper
from sqlalchemy.types import TypeDecorator
import sqlalchemy
import redis
import logging
from models.base_orm import change_to_json_1
from models.app_blog_praise import AppBlogPraise
from models.app_comment_praise import AppCommentPraise
from models.app_comment_reply import AppCommentReply
from models.app_devicetoken import AppDevicetoken
from models.app_favorate import AppFavorate
from models.app_info import AppInfo
from models.app_message import AppMessage
from models.app_statistics import AppStatistics
from models.app_subscription import AppSubscription
from models.auth_classify import AuthClassify
from models.auth_group import AuthGroup
from models.auth_group_permissions import AuthGroupPermissions
from models.auth_permission import AuthPermission
from models.content_activity import ContentActivity
from models.content_ad import ContentAd
from models.content_auth import ContentAuth
from models.content_auth_groups import ContentAuthGroups
from models.content_auth_user_permissions import ContentAuthUserPermissions
from models.content_blog import ContentBlog
from models.content_blog_authclassify import ContentBlogAuthclassify
from models.content_blog_comment import ContentBlogComment
from models.content_blog_tagmany import ContentBlogTagmany
from models.content_category import ContentCategory
from models.content_comment import ContentComment
from models.content_hotblog import ContentHotblog
from models.content_hotcate import ContentHotcate
from models.content_hotcomment import ContentHotcomment
from models.content_hotquery import ContentHotquery
from models.content_hottag import ContentHottag
from models.content_indexblog import ContentIndexblog
from models.content_subscribe import ContentSubscribe
from models.content_tag import ContentTag
from models.base_orm import change_to_json
from models.base_orm import change_to_json_1
from models.base_orm import change_to_json_2
conn_hot_category = redis.Redis(connection_pool= conn_pool_hot_category)
import database
if __name__ == "__main__":
engine = create_engine(database.DB_PATH, echo = False)
#engine = create_engine(database.DB_PATH, pool_size = 10, pool_recycle=3600, echo = False)
Session = sessionmaker()
Session.configure(bind=engine)
cur_session = Session()
image_base_url = 'http://o6y4guqxy.bkt.clouddn.com/media'
clu = cur_session.query(ContentCategory.id.label('category_id'), ContentCategory.name.label('category_name'), func.replace(ContentCategory.cover, 'covers/', '%s/covers/'%image_base_url).label('cover'),ContentCategory.introduction, ContentAuth.nickname.label('dj_name'), func.replace(ContentAuth.head, 'covers', '%s/covers'%image_base_url).label('dj_head'), ContentAuth.sign.label('dj_desc'), ContentCategory.sort).join(ContentAuth, ContentCategory.dj_id == ContentAuth.id).order_by(ContentCategory.sort.asc()).all()
cur_session.close()
json_res = change_to_json_1(clu)
record = json.loads(json_res)
record = record[-4:]
conn_hot_category.flushdb()
for blog in record:
conn_hot_category.lpush('0', blog)
| import logging
#from json import json_util
import json
from sqlalchemy import event
from sqlalchemy.dialects.mysql import VARCHAR
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
from sqlalchemy.orm import scoped_session, sessionmaker, Session, object_mapper
from sqlalchemy.types import TypeDecorator
import sqlalchemy
import datetime
BASEOBJ = declarative_base()
def change_to_json(session_result):
Hosts = []
for obj in session_result:
fields = {}
#for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' ]:
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' and x != 'count' and x != 'keys' and x != 'index' and x != "conjugate"]:
data = obj.__getattribute__(field)
if isinstance(data, datetime.datetime):
data=data.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(data, datetime.date):
data=data.strftime('%Y-%m-%d %H:%M:%S')
fields[field] = data
Hosts.append(fields)
if len(Hosts) == 1 :
Hosts = Hosts[0]
res_json = json.dumps(Hosts)
#res_json = json.dumps(Hosts, default=date_handler, check_circular=False)
return res_json
def change_to_json_1(session_result):
Hosts = []
for obj in session_result:
fields = {}
#for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' ]:
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' and x != 'count' and x != 'keys' and x != 'index' and x != "conjugate"]:
data = obj.__getattribute__(field)
if isinstance(data, datetime.datetime):
data=data.strftime('%Y-%m-%d')
#data=data.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(data, datetime.date):
data=data.strftime('%Y-%m-%d')
#data=data.strftime('%Y-%m-%d %H:%M:%S')
fields[field] = data
Hosts.append(fields)
res_json = json.dumps(Hosts)
#res_json = json.dumps(Hosts, default=date_handler, check_circular=False)
return res_json
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
def change_to_json_2(clu):
res_json = json.dumps(clu, cls=new_alchemy_encoder(), check_circular=False, default=date_handler)
return res_json
def new_alchemy_encoder():
_visited_objs = []
class AlchemyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj.__class__, DeclarativeMeta):
# don't re-visit self
if obj in _visited_objs:
return None
_visited_objs.append(obj)
# an SQLAlchemy class
fields = {}
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:
data = obj.__getattribute__(field)
try:
if isinstance(data, datetime.date):
data=data.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(data, datetime.datetime):
data=data.strftime('%Y-%m-%d %H:%M:%S')
json.dumps(data) # this will fail on non-encodable values, like other classes
fields[field] = data
except TypeError:
fields[field] = None
return fields
return json.JSONEncoder.default(self, obj)
return AlchemyEncoder
class ModelBase(BASEOBJ):
__abstract__ = True
__table_args__ = {'mysql_engine': 'InnoDB'}
__table_initialized__ = False
def mysql_checkin(dbapi_connection, connection_record):
logging.debug("DB checkin...")
def mysql_checkout(dbapi_con, con_record, con_proxy):
try:
logging.debug("mysql_checkout: Ping MYSQL...")
dbapi_con.cursor().execute('select 1')
except dbapi_con.OperationalError, ex:
if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
msg = 'Got mysql server has gone away: %s' % ex
logging.warn(msg)
raise sqlalchemy.exc.DisconnectionError(msg)
else:
raise
def is_db_connection_error(args):
"""Return True if error in connecting to db."""
# NOTE(adam_g): This is currently MySQL specific and needs to be extended
# to support Postgres and others.
conn_err_codes = ('2002', '2003', '2006')
for err_code in conn_err_codes:
if args.find(err_code) != -1:
return True
return False
def wrap_db_error(f):
"""Retry DB connection. Copied from nova and modified."""
def _wrap(*args, **kwargs):
try:
return f(*args, **kwargs)
except sqlalchemy.exc.OperationalError, e:
if not is_db_connection_error(e.args[0]):
raise
_MAX_RETRIES = 10
_RETRY_INTERVAL = 10
remaining_attempts = _MAX_RETRIES
while True:
logging.warning('SQL connection failed. %d attempts left.' %
remaining_attempts)
remaining_attempts -= 1
time.sleep(_RETRY_INTERVAL)
try:
return f(*args, **kwargs)
except sqlalchemy.exc.OperationalError, e:
if (remaining_attempts == 0 or
not is_db_connection_error(e.args[0])):
raise
except sqlalchemy.exc.DBAPIError:
raise
except sqlalchemy.exc.DBAPIError:
raise
_wrap.func_name = f.func_name
return _wrap
def _create_engine(desc):
engine_args = {
'pool_recycle': 10,
'pool_size': 20,
#'echo': True,
'echo': False,
'convert_unicode': True,
# 'listeners': [MySQLPingListener()],
}
try:
engine = create_engine(desc, **engine_args)
event.listen(engine, 'checkin', mysql_checkin)
event.listen(engine, 'checkout', mysql_checkout)
engine.connect = wrap_db_error(engine.connect)
engine.connect()
return engine
except Exception as e:
logging.error("Error connect to db engine: %s" % e)
raise
wanka_engine = _create_engine('mysql://collegedaily:Zhuoxing1989@rdskhmm9d27q0t1etbxsf.mysql.rds.aliyuncs.com:3306/collapp?charset=utf8')
#wanka_engine = _create_engine('mysql://root:yestem@localhost:3306/collapp?charset=utf8')
Session = scoped_session(sessionmaker(bind=wanka_engine,
expire_on_commit=False,
autoflush=False,
autocommit=False))
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase, Session
class AppBlogPraise(ModelBase):
#class ContentAuth(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'auth'
__tablename__ = 'app_blog_praise'
id = Column(INTEGER, nullable=True, primary_key = True)
blog_id = Column(INTEGER, nullable=True, default='')
author_id = Column(INTEGER, nullable=True, default='')
if __name__ == '__main__' :
clu = Session.query(AppBlogPraise).all()
for i in clu:
print i
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase, Session
class AppCommentReply(ModelBase):
#class ContentAuth(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'auth'
__tablename__ = 'app_comment_reply'
id = Column(INTEGER, nullable=True, primary_key = True)
comment_id = Column(INTEGER, nullable=True, default='')
reply_id = Column(INTEGER, nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase, Session
class AppDevicetoken(ModelBase):
#class ContentAuth(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'auth'
__tablename__ = 'app_devicetoken'
id = Column(INTEGER, nullable=True, primary_key = True)
author_id = Column(INTEGER, nullable=True, default='')
device_token = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
update_dateline = Column(INTEGER, nullable=True, default='')
enable_push = Column(INTEGER, nullable=True, default=1)
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase, Session
class AppInfo(ModelBase):
#class AppInfo(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'app'
__tablename__ = 'app_info'
id = Column(INTEGER, nullable=True, primary_key = True)
key = Column(VARCHAR(32, charset='utf8'), nullable=True, default='')
value = Column(TEXT, nullable=True, default='')
#value = Column(VARCHAR(64, charset='ascii'), nullable=True, default='')
if __name__ == "__main__":
clu = Session.query(AppInfo).all()
for i in clu:
print i
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase, Session
class AppMessage(ModelBase):
#class ContentAuth(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'auth'
__tablename__ = 'app_message'
id = Column(INTEGER, nullable=True, primary_key = True)
message_from = Column(INTEGER, nullable=True, default='')
message_to = Column(INTEGER, nullable=True, default='')
subject = Column(VARCHAR(1024, charset='utf8'), nullable=True, default='')
content = Column(TEXT, nullable=True, default='')
date = Column(DateTime, nullable=True, default='')
message_type = Column(INTEGER, nullable=True, default=0)
is_read = Column(INTEGER, nullable=True, default=0)
blog_id = Column(INTEGER, nullable=True, default=0)
comment_id = Column(INTEGER, nullable=True, default=0)
if __name__ == "__main__":
q = Session.query(ContentAuth).filter(ContentAuth.id == "1").all()
for i in q:
#tt = ContentAuth.query_to_list(None, q,{})
print vars(i)
#print tt
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase
class AppStatistics(ModelBase):
#class AppInfo(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'app'
__tablename__ = 'app_statistics'
id = Column(INTEGER, nullable=True, primary_key = True)
author_id = Column(INTEGER, nullable=True, default='')
category_id = Column(INTEGER, nullable=True, default='')
blog_id = Column(INTEGER, nullable=True, default='')
dateline = Column(INTEGER, nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ
class AppSubscription(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'app'
__tablename__ = 'app_subscription'
id = Column(INTEGER, nullable=True, primary_key = True)
author_id = Column(INTEGER, nullable=True, default='')
category_id = Column(INTEGER, nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER, LONGTEXT
from base_orm import BASEOBJ, ModelBase, Session
class ContentActivity(ModelBase):
#class ContentActivity(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'activity'
__tablename__ = 'content_activity'
id = Column(INTEGER, nullable=True, primary_key = True)
title = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
cover = Column(VARCHAR(100, charset='ascii'), nullable=True, default='')
content = Column(LONGTEXT, nullable=True, default='')
date = Column(DateTime, nullable=True, default='')
addr = Column(VARCHAR(30, charset='utf8'), nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase
class ContentAd(ModelBase):
#class ContentActivity(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'content'
__tablename__ = 'content_ad'
id = Column(INTEGER, nullable=True, primary_key = True)
title = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
cover = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
url = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
is_active = Column(INTEGER, nullable=True)
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, Integer
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase, Session
class ContentAuth(ModelBase):
_resource_name_ = 'auth'
__tablename__ = 'content_auth'
id = Column(Integer, nullable=True, primary_key = True)
password = Column(VARCHAR(128, charset='utf8'), nullable=True, default='')
last_login = Column(DateTime, nullable=True, default='')
is_superuser = Column(Integer, nullable=True, default='')
username = Column(VARCHAR(30, charset='utf8'), nullable=True, default='')
last_name = Column(VARCHAR(30, charset='utf8'), nullable=True, default='')
email = Column(VARCHAR(75, charset='utf8'), nullable=True, default='')
phone = Column(VARCHAR(75, charset='utf8'), nullable=True, default='')
is_staff = Column(Integer, nullable=True, default='')
is_active = Column(Integer, nullable=True, default=1)
sex = Column(Integer,nullable=True,default=0)
date_joined = Column(DateTime, nullable=True, default='')
subscribe = Column(Integer, nullable=True, default=False)
head = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
sign = Column(VARCHAR(200, charset='utf8'), nullable=True, default='')
area = Column(VARCHAR(20, charset='utf8'), nullable=True, default=0)
nickname = Column(VARCHAR(20, charset='utf8'), nullable=True, default='')
identity = Column(Integer, nullable=True, default=7)
school = Column(VARCHAR(32, charset='utf8'), nullable=True, default='')
grade = Column(VARCHAR(32, charset='utf8'), nullable=True, default='')
regist_from = Column(Integer, nullable=True, default=0)
sns_type = Column(Integer, nullable=True, default=0)
sns_uid = Column(VARCHAR(32, charset='utf8'), nullable=True, default='')
@classmethod
def keys(cls):
return cls.__dict__.keys()
class ContentAuth_to(ContentAuth):
_resource_name_ = 'to_user'
class ContentAuth_from(ContentAuth):
_resource_name_ = 'from_user'
if __name__ == "__main__":
clu = Session.query(ContentAuth_from.id).all()
#clu = Session.query(ContentAuth_from.id.label('from_id'), ContentAuth_from.username.label('from_user'), ContentAuth_to.username.label('to_user')).join(ContentAuth_to, ContentAuth_from.id == ContentAuth_to.id).all()
for i in clu:
pass
# print i
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase, Session
class ContentAuthGroups(ModelBase):
#class ContentAuth(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'auth'
__tablename__ = 'content_auth_groups'
id = Column(INTEGER, nullable=True, primary_key = True)
auth_id = Column(INTEGER, nullable=True, default='')
group_id = Column(INTEGER, nullable=True, default='')
if __name__ == "__main__":
print ContentAuth.keys()
q = Session.query(ContentAuth).filter(ContentAuth.id == "1").all()
for i in q:
print dir(i)
#tt = ContentAuth.query_to_list(None, q,{})
print vars(i)
#print tt
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase, Session
class ContentAuthUserPermissions(ModelBase):
#class ContentAuth(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'auth'
__tablename__ = 'content_auth_user_permissions'
id = Column(INTEGER, nullable=True, primary_key = True)
auth_id = Column(INTEGER, nullable=True, default='')
group_id = Column(INTEGER, nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER, LONGTEXT
from base_orm import BASEOBJ
class ContentBlog(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'blog'
__tablename__ = 'content_blog'
id = Column(INTEGER, nullable=True, primary_key = True)
title = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
cover = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
abstract = Column(VARCHAR(1024, charset='utf8'), nullable=True, default='')
content = Column(LONGTEXT, nullable=True, default='')
author = Column(VARCHAR(128, charset='utf8'), nullable=True, default='')
editor = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
tag_id = Column(INTEGER, nullable=True, default='')
category_id = Column(INTEGER, nullable=True, default='')
date = Column(DateTime, nullable=True, default='')
hotness = Column(INTEGER, nullable=True, default='')
new = Column(INTEGER, nullable=True, default='')
show_condition = Column(INTEGER, nullable=True, default='')
praise_count = Column(INTEGER, nullable=True, default='')
thumb = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
introduction = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
index_pic = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
is_big = Column(INTEGER, nullable=True, default='')
is_out = Column(INTEGER, nullable=True, default='')
source = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
source_url = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
sort = Column(INTEGER, nullable=True, default='')
publish_time = Column(DateTime, nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ
class ContentBlogTagmany(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'blog'
__tablename__ = 'content_blog_tagmany'
id = Column(INTEGER, nullable=True, primary_key = True)
blog_id = Column(INTEGER, nullable=True, default='')
tag_id = Column(INTEGER, nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase, Session
class ContentCategory(ModelBase):
#class ContentCategory(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'category'
__tablename__ = 'content_category'
id = Column(INTEGER, nullable=True, primary_key = True)
name = Column(VARCHAR(32, charset='utf8'), nullable=True, default='')
cover = Column(VARCHAR(128, charset='utf8'), nullable=True, default='')
introduction = Column(VARCHAR(1024, charset='utf8'), nullable=True, default='')
dj_id = Column(INTEGER, nullable=True, default='')
sort = Column(INTEGER, nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase
class ContentComment(ModelBase):
#class ContentComment(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'auth'
__tablename__ = 'content_comment'
id = Column(INTEGER, nullable=True , primary_key = True, autoincrement=True)
content = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
author_id = Column(INTEGER, nullable=True, default='')
aauthor = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
visible = Column(INTEGER, nullable=True, default=1)
time = Column(DateTime, nullable=True, default='')
praise_count = Column(INTEGER, nullable=True, default=0)
report_count = Column(INTEGER, nullable=True, default=0)
report_reason = Column(VARCHAR(200, charset='utf8'), nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ
class ContentHotblog(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'blog'
__tablename__ = 'content_hotblog'
blog_id = Column(INTEGER, nullable=True, primary_key = True)
sort = Column(INTEGER, nullable=True)
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase
class ContentHotcate(ModelBase):
#class ContentHotcate(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'auth'
__tablename__ = 'content_hotcate'
cate_id = Column(INTEGER, nullable=True, primary_key = True)
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase
class ContentHotcomment(ModelBase):
#class ContentComment(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'auth'
__tablename__ = 'content_hotcomment'
comment_id = Column(INTEGER, nullable=True, primary_key = True)
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase
class ContentHotquery(ModelBase):
#class ContentHotcate(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'query'
__tablename__ = 'content_hotquery'
id = Column(INTEGER, nullable=True, primary_key = True)
hot_key = Column(VARCHAR(40, charset='utf8'), nullable=True, default='')
detail = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase
class ContentHottag(ModelBase):
#class ContentHotcate(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'content'
__tablename__ = 'content_hottag'
tag_id = Column(INTEGER, nullable=True, primary_key = True)
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase
class ContentIndexblog(ModelBase):
#class ContentComment(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'auth'
__tablename__ = 'content_indexblog'
blog_id = Column(INTEGER, nullable=True, primary_key = True)
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ
class ContentSubscribe(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'blog'
__tablename__ = 'content_subscribe'
id = Column(INTEGER, nullable=True, primary_key = True)
mail = Column(VARCHAR(4096, charset='utf8'), nullable=True, default='')
active = Column(INTEGER, nullable=True, default='')
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ
class ContentTag(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'blog'
__tablename__ = 'content_tag'
id = Column(INTEGER, nullable=True, primary_key = True)
name = Column(VARCHAR(64, charset='utf8'), nullable=True, default='')
--- FILE SEPARATOR ---
#database configure
#DB_PATH = "mysql://root:yestem@localhost:3306/collapp?charset=utf8"
DB_PATH = "mysql://collegedaily:Zhuoxing1989@rdskhmm9d27q0t1etbxsf.mysql.rds.aliyuncs.com:3306/collapp?charset=utf8"
| {
"imports": [
"/models/base_orm.py",
"/models/app_blog_praise.py",
"/models/app_comment_reply.py",
"/models/app_devicetoken.py",
"/models/app_info.py",
"/models/app_message.py",
"/models/app_statistics.py",
"/models/app_subscription.py",
"/models/content_activity.py",
"/models/content_ad.py",
"/models/content_auth.py",
"/models/content_auth_groups.py",
"/models/content_auth_user_permissions.py",
"/models/content_blog.py",
"/models/content_blog_tagmany.py",
"/models/content_category.py",
"/models/content_comment.py",
"/models/content_hotblog.py",
"/models/content_hotcate.py",
"/models/content_hotcomment.py",
"/models/content_hotquery.py",
"/models/content_hottag.py",
"/models/content_indexblog.py",
"/models/content_subscribe.py",
"/models/content_tag.py",
"/database.py"
]
} |
0428402001/appserver | refs/heads/master | /start_redis.py | import datetime
import redis
import json
from models.base_orm import Session
from models.content_auth import ContentAuth
pool = redis.ConnectionPool(host='localhost', port=6379, db=0)#db = 0 used to cache the content_auth
r = redis.Redis(connection_pool=pool)
def get_map_field(single_session_result):
fields = {}
obj = single_session_result
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' and x != 'count' and x != 'keys' and x != 'index' and x != "conjugate"]:
data = obj.__getattribute__(field)
if isinstance(data, datetime.datetime):
data=data.strftime('%Y-%m-%d')
elif isinstance(data, datetime.date):
data=data.strftime('%Y-%m-%d')
fields[field] = data
return fields
clu = Session.query(ContentAuth).all()
for single_session_result in clu:
auth_id = single_session_result.id
map_field = {}
map_field = get_map_field(single_session_result)
r.hmset(auth_id, map_field)
| import logging
#from json import json_util
import json
from sqlalchemy import event
from sqlalchemy.dialects.mysql import VARCHAR
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
from sqlalchemy.orm import scoped_session, sessionmaker, Session, object_mapper
from sqlalchemy.types import TypeDecorator
import sqlalchemy
import datetime
BASEOBJ = declarative_base()
def change_to_json(session_result):
Hosts = []
for obj in session_result:
fields = {}
#for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' ]:
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' and x != 'count' and x != 'keys' and x != 'index' and x != "conjugate"]:
data = obj.__getattribute__(field)
if isinstance(data, datetime.datetime):
data=data.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(data, datetime.date):
data=data.strftime('%Y-%m-%d %H:%M:%S')
fields[field] = data
Hosts.append(fields)
if len(Hosts) == 1 :
Hosts = Hosts[0]
res_json = json.dumps(Hosts)
#res_json = json.dumps(Hosts, default=date_handler, check_circular=False)
return res_json
def change_to_json_1(session_result):
Hosts = []
for obj in session_result:
fields = {}
#for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' ]:
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' and x != 'count' and x != 'keys' and x != 'index' and x != "conjugate"]:
data = obj.__getattribute__(field)
if isinstance(data, datetime.datetime):
data=data.strftime('%Y-%m-%d')
#data=data.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(data, datetime.date):
data=data.strftime('%Y-%m-%d')
#data=data.strftime('%Y-%m-%d %H:%M:%S')
fields[field] = data
Hosts.append(fields)
res_json = json.dumps(Hosts)
#res_json = json.dumps(Hosts, default=date_handler, check_circular=False)
return res_json
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
def change_to_json_2(clu):
res_json = json.dumps(clu, cls=new_alchemy_encoder(), check_circular=False, default=date_handler)
return res_json
def new_alchemy_encoder():
_visited_objs = []
class AlchemyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj.__class__, DeclarativeMeta):
# don't re-visit self
if obj in _visited_objs:
return None
_visited_objs.append(obj)
# an SQLAlchemy class
fields = {}
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:
data = obj.__getattribute__(field)
try:
if isinstance(data, datetime.date):
data=data.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(data, datetime.datetime):
data=data.strftime('%Y-%m-%d %H:%M:%S')
json.dumps(data) # this will fail on non-encodable values, like other classes
fields[field] = data
except TypeError:
fields[field] = None
return fields
return json.JSONEncoder.default(self, obj)
return AlchemyEncoder
class ModelBase(BASEOBJ):
__abstract__ = True
__table_args__ = {'mysql_engine': 'InnoDB'}
__table_initialized__ = False
def mysql_checkin(dbapi_connection, connection_record):
logging.debug("DB checkin...")
def mysql_checkout(dbapi_con, con_record, con_proxy):
try:
logging.debug("mysql_checkout: Ping MYSQL...")
dbapi_con.cursor().execute('select 1')
except dbapi_con.OperationalError, ex:
if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
msg = 'Got mysql server has gone away: %s' % ex
logging.warn(msg)
raise sqlalchemy.exc.DisconnectionError(msg)
else:
raise
def is_db_connection_error(args):
"""Return True if error in connecting to db."""
# NOTE(adam_g): This is currently MySQL specific and needs to be extended
# to support Postgres and others.
conn_err_codes = ('2002', '2003', '2006')
for err_code in conn_err_codes:
if args.find(err_code) != -1:
return True
return False
def wrap_db_error(f):
"""Retry DB connection. Copied from nova and modified."""
def _wrap(*args, **kwargs):
try:
return f(*args, **kwargs)
except sqlalchemy.exc.OperationalError, e:
if not is_db_connection_error(e.args[0]):
raise
_MAX_RETRIES = 10
_RETRY_INTERVAL = 10
remaining_attempts = _MAX_RETRIES
while True:
logging.warning('SQL connection failed. %d attempts left.' %
remaining_attempts)
remaining_attempts -= 1
time.sleep(_RETRY_INTERVAL)
try:
return f(*args, **kwargs)
except sqlalchemy.exc.OperationalError, e:
if (remaining_attempts == 0 or
not is_db_connection_error(e.args[0])):
raise
except sqlalchemy.exc.DBAPIError:
raise
except sqlalchemy.exc.DBAPIError:
raise
_wrap.func_name = f.func_name
return _wrap
def _create_engine(desc):
engine_args = {
'pool_recycle': 10,
'pool_size': 20,
#'echo': True,
'echo': False,
'convert_unicode': True,
# 'listeners': [MySQLPingListener()],
}
try:
engine = create_engine(desc, **engine_args)
event.listen(engine, 'checkin', mysql_checkin)
event.listen(engine, 'checkout', mysql_checkout)
engine.connect = wrap_db_error(engine.connect)
engine.connect()
return engine
except Exception as e:
logging.error("Error connect to db engine: %s" % e)
raise
wanka_engine = _create_engine('mysql://collegedaily:Zhuoxing1989@rdskhmm9d27q0t1etbxsf.mysql.rds.aliyuncs.com:3306/collapp?charset=utf8')
#wanka_engine = _create_engine('mysql://root:yestem@localhost:3306/collapp?charset=utf8')
Session = scoped_session(sessionmaker(bind=wanka_engine,
expire_on_commit=False,
autoflush=False,
autocommit=False))
--- FILE SEPARATOR ---
#coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, Integer
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER
from base_orm import BASEOBJ, ModelBase, Session
class ContentAuth(ModelBase):
_resource_name_ = 'auth'
__tablename__ = 'content_auth'
id = Column(Integer, nullable=True, primary_key = True)
password = Column(VARCHAR(128, charset='utf8'), nullable=True, default='')
last_login = Column(DateTime, nullable=True, default='')
is_superuser = Column(Integer, nullable=True, default='')
username = Column(VARCHAR(30, charset='utf8'), nullable=True, default='')
last_name = Column(VARCHAR(30, charset='utf8'), nullable=True, default='')
email = Column(VARCHAR(75, charset='utf8'), nullable=True, default='')
phone = Column(VARCHAR(75, charset='utf8'), nullable=True, default='')
is_staff = Column(Integer, nullable=True, default='')
is_active = Column(Integer, nullable=True, default=1)
sex = Column(Integer,nullable=True,default=0)
date_joined = Column(DateTime, nullable=True, default='')
subscribe = Column(Integer, nullable=True, default=False)
head = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
sign = Column(VARCHAR(200, charset='utf8'), nullable=True, default='')
area = Column(VARCHAR(20, charset='utf8'), nullable=True, default=0)
nickname = Column(VARCHAR(20, charset='utf8'), nullable=True, default='')
identity = Column(Integer, nullable=True, default=7)
school = Column(VARCHAR(32, charset='utf8'), nullable=True, default='')
grade = Column(VARCHAR(32, charset='utf8'), nullable=True, default='')
regist_from = Column(Integer, nullable=True, default=0)
sns_type = Column(Integer, nullable=True, default=0)
sns_uid = Column(VARCHAR(32, charset='utf8'), nullable=True, default='')
@classmethod
def keys(cls):
return cls.__dict__.keys()
class ContentAuth_to(ContentAuth):
_resource_name_ = 'to_user'
class ContentAuth_from(ContentAuth):
_resource_name_ = 'from_user'
if __name__ == "__main__":
clu = Session.query(ContentAuth_from.id).all()
#clu = Session.query(ContentAuth_from.id.label('from_id'), ContentAuth_from.username.label('from_user'), ContentAuth_to.username.label('to_user')).join(ContentAuth_to, ContentAuth_from.id == ContentAuth_to.id).all()
for i in clu:
pass
# print i
| {
"imports": [
"/models/base_orm.py",
"/models/content_auth.py"
]
} |
0428402001/appserver | refs/heads/master | /redis_cache/redis_cache/flush_all_blog.py | # coding=utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import json
import os
cur_dir = os.path.abspath('.')
p_dir = os.path.dirname(cur_dir)
g_dir = os.path.dirname(p_dir)
sys.path.append(g_dir)
sys.path.append(p_dir)
from redis_con_pool import conn_pool_blog
import time
import subprocess
import tornado.httpclient
import tornado.web
import tornado.gen
import tornado.httpserver
import tornado.ioloop
import urlparse
from tornado.httpclient import HTTPError as clientHTTPError
from tornado.web import HTTPError
#from config import web_url, backend_netloc
from sqlalchemy import text
from sqlalchemy import and_ , or_, func, distinct
from sqlalchemy import event
from sqlalchemy.dialects.mysql import VARCHAR
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
from sqlalchemy.orm import scoped_session, sessionmaker, object_mapper
from sqlalchemy.types import TypeDecorator
import sqlalchemy
import redis
import logging
from models.content_blog import ContentBlog
from models.base_orm import Session
from models.base_orm import change_to_json_1
blog_r = redis.Redis(connection_pool = conn_pool_blog)
def flush_blog_cache(blog_id, blog):
for key in blog.keys():
blog_r.hset(blog_id, key, blog[key])
if __name__ == "__main__":
image_base_url = 'http://o6y4guqxy.bkt.clouddn.com/media'
clu = Session.query(ContentBlog.id, ContentBlog.title, ContentBlog.author, func.replace(ContentBlog.cover, 'covers', '%s/covers'%image_base_url).label('cover'), func.replace(ContentBlog.thumb, 'thumb/', '%s/thumb/'%image_base_url).label('thumb'), ContentBlog.date, ContentBlog.category_id, ContentBlog.content, ContentBlog.abstract,ContentBlog.source).filter(ContentBlog.hotness == 0).all()
clu_json = change_to_json_1(clu)
clu_dict = json.loads(clu_json)
for blog in clu_dict:
blog_id = blog["id"]
flush_blog_cache(blog_id, blog)
| #coding=utf-8
import datetime
from sqlalchemy import Column, BigInteger, Boolean, DateTime, INTEGER
from sqlalchemy.dialects.mysql import VARCHAR, TEXT, INTEGER, LONGTEXT
from base_orm import BASEOBJ
class ContentBlog(BASEOBJ):
#class ContentAuth(StandaloneResourceBase):
_resource_name_ = 'blog'
__tablename__ = 'content_blog'
id = Column(INTEGER, nullable=True, primary_key = True)
title = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
cover = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
abstract = Column(VARCHAR(1024, charset='utf8'), nullable=True, default='')
content = Column(LONGTEXT, nullable=True, default='')
author = Column(VARCHAR(128, charset='utf8'), nullable=True, default='')
editor = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
tag_id = Column(INTEGER, nullable=True, default='')
category_id = Column(INTEGER, nullable=True, default='')
date = Column(DateTime, nullable=True, default='')
hotness = Column(INTEGER, nullable=True, default='')
new = Column(INTEGER, nullable=True, default='')
show_condition = Column(INTEGER, nullable=True, default='')
praise_count = Column(INTEGER, nullable=True, default='')
thumb = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
introduction = Column(VARCHAR(256, charset='utf8'), nullable=True, default='')
index_pic = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
is_big = Column(INTEGER, nullable=True, default='')
is_out = Column(INTEGER, nullable=True, default='')
source = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
source_url = Column(VARCHAR(100, charset='utf8'), nullable=True, default='')
sort = Column(INTEGER, nullable=True, default='')
publish_time = Column(DateTime, nullable=True, default='')
--- FILE SEPARATOR ---
import logging
#from json import json_util
import json
from sqlalchemy import event
from sqlalchemy.dialects.mysql import VARCHAR
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
from sqlalchemy.orm import scoped_session, sessionmaker, Session, object_mapper
from sqlalchemy.types import TypeDecorator
import sqlalchemy
import datetime
BASEOBJ = declarative_base()
def change_to_json(session_result):
Hosts = []
for obj in session_result:
fields = {}
#for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' ]:
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' and x != 'count' and x != 'keys' and x != 'index' and x != "conjugate"]:
data = obj.__getattribute__(field)
if isinstance(data, datetime.datetime):
data=data.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(data, datetime.date):
data=data.strftime('%Y-%m-%d %H:%M:%S')
fields[field] = data
Hosts.append(fields)
if len(Hosts) == 1 :
Hosts = Hosts[0]
res_json = json.dumps(Hosts)
#res_json = json.dumps(Hosts, default=date_handler, check_circular=False)
return res_json
def change_to_json_1(session_result):
Hosts = []
for obj in session_result:
fields = {}
#for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' ]:
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata' and x != 'count' and x != 'keys' and x != 'index' and x != "conjugate"]:
data = obj.__getattribute__(field)
if isinstance(data, datetime.datetime):
data=data.strftime('%Y-%m-%d')
#data=data.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(data, datetime.date):
data=data.strftime('%Y-%m-%d')
#data=data.strftime('%Y-%m-%d %H:%M:%S')
fields[field] = data
Hosts.append(fields)
res_json = json.dumps(Hosts)
#res_json = json.dumps(Hosts, default=date_handler, check_circular=False)
return res_json
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
def change_to_json_2(clu):
res_json = json.dumps(clu, cls=new_alchemy_encoder(), check_circular=False, default=date_handler)
return res_json
def new_alchemy_encoder():
_visited_objs = []
class AlchemyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj.__class__, DeclarativeMeta):
# don't re-visit self
if obj in _visited_objs:
return None
_visited_objs.append(obj)
# an SQLAlchemy class
fields = {}
for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:
data = obj.__getattribute__(field)
try:
if isinstance(data, datetime.date):
data=data.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(data, datetime.datetime):
data=data.strftime('%Y-%m-%d %H:%M:%S')
json.dumps(data) # this will fail on non-encodable values, like other classes
fields[field] = data
except TypeError:
fields[field] = None
return fields
return json.JSONEncoder.default(self, obj)
return AlchemyEncoder
class ModelBase(BASEOBJ):
__abstract__ = True
__table_args__ = {'mysql_engine': 'InnoDB'}
__table_initialized__ = False
def mysql_checkin(dbapi_connection, connection_record):
logging.debug("DB checkin...")
def mysql_checkout(dbapi_con, con_record, con_proxy):
try:
logging.debug("mysql_checkout: Ping MYSQL...")
dbapi_con.cursor().execute('select 1')
except dbapi_con.OperationalError, ex:
if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
msg = 'Got mysql server has gone away: %s' % ex
logging.warn(msg)
raise sqlalchemy.exc.DisconnectionError(msg)
else:
raise
def is_db_connection_error(args):
"""Return True if error in connecting to db."""
# NOTE(adam_g): This is currently MySQL specific and needs to be extended
# to support Postgres and others.
conn_err_codes = ('2002', '2003', '2006')
for err_code in conn_err_codes:
if args.find(err_code) != -1:
return True
return False
def wrap_db_error(f):
"""Retry DB connection. Copied from nova and modified."""
def _wrap(*args, **kwargs):
try:
return f(*args, **kwargs)
except sqlalchemy.exc.OperationalError, e:
if not is_db_connection_error(e.args[0]):
raise
_MAX_RETRIES = 10
_RETRY_INTERVAL = 10
remaining_attempts = _MAX_RETRIES
while True:
logging.warning('SQL connection failed. %d attempts left.' %
remaining_attempts)
remaining_attempts -= 1
time.sleep(_RETRY_INTERVAL)
try:
return f(*args, **kwargs)
except sqlalchemy.exc.OperationalError, e:
if (remaining_attempts == 0 or
not is_db_connection_error(e.args[0])):
raise
except sqlalchemy.exc.DBAPIError:
raise
except sqlalchemy.exc.DBAPIError:
raise
_wrap.func_name = f.func_name
return _wrap
def _create_engine(desc):
engine_args = {
'pool_recycle': 10,
'pool_size': 20,
#'echo': True,
'echo': False,
'convert_unicode': True,
# 'listeners': [MySQLPingListener()],
}
try:
engine = create_engine(desc, **engine_args)
event.listen(engine, 'checkin', mysql_checkin)
event.listen(engine, 'checkout', mysql_checkout)
engine.connect = wrap_db_error(engine.connect)
engine.connect()
return engine
except Exception as e:
logging.error("Error connect to db engine: %s" % e)
raise
wanka_engine = _create_engine('mysql://collegedaily:Zhuoxing1989@rdskhmm9d27q0t1etbxsf.mysql.rds.aliyuncs.com:3306/collapp?charset=utf8')
#wanka_engine = _create_engine('mysql://root:yestem@localhost:3306/collapp?charset=utf8')
Session = scoped_session(sessionmaker(bind=wanka_engine,
expire_on_commit=False,
autoflush=False,
autocommit=False))
| {
"imports": [
"/models/content_blog.py",
"/models/base_orm.py"
]
} |
0429charlie/Ormuco_Challenge | refs/heads/master | /Ching_Chuan_Wu_test.py | import os
from threading import Thread
import QuestionA
import QuestionB
# Test case for Question A
print("Testing Question A. Please refer to Readme file for explanation of each test case")
f = None
try:
f = open('QuestionA.txt')
except FileNotFoundError:
pass
if f:
test_cases_number = int(f.readline().strip())
for i in range(test_cases_number):
first_line_start, first_line_end = f.readline().strip().split(" ")
second_line_start, second_line_end = f.readline().strip().split(" ")
expected = bool(int(f.readline().strip()))
print("Testing with line (" +
first_line_start + "," + first_line_end +
") and line (" +
second_line_start + "," + second_line_end + ")")
overlap = QuestionA.overlap((float(first_line_start),float(first_line_end)),(float(second_line_start),float(second_line_end)))
# This assertion must pass
assert overlap == expected
if overlap:
print("They overlap!")
else:
print("They don't overlap")
else:
print("QuestionA.txt not provided and test skipped. Please provide the file in the format explained in Readme file")
f.close()
print(" ")
print(" ")
# Test case for Question B
print("Testing Question B. Please refer to Readme file for explanation of each test case")
f = None
try:
f = open('QuestionB.txt')
except FileNotFoundError:
pass
if f:
test_cases_number = int(f.readline().strip())
for i in range(test_cases_number):
a, b = f.readline().strip().split(" ")
result = f.readline().strip()
print("Testing with a = " + a + " and b = " + b)
equality = QuestionB.greaterthan(a, b)
# This assertion must pass
assert equality == result
if equality=="equal":
print("a is equal to b")
elif equality=="greater":
print("a is greater than b")
elif equality=="less":
print("a is less than b")
else:
print("Wrong input format. Please enter string that start with number or -/+ sign and only include number after the first character for the first or second input or both")
else:
print("QuestionB.txt not provided and test skipped. Please provide the file in the format explained in Readme file")
f.close()
print(" ")
print(" ")
# Test case for Question C
print("Testing Question C")
# Helper to execute any python program given program name
def run_program(program_name):
os.system('python ' + program_name)
t1 = Thread(target=run_program, args=('server1.py',))
t1.start()
t1.join()
| # Function that return true if first_line and second_line overlap. first_line and second_line is a tuple where the first element can be the start or end of the line and the second element is the other one
# (Float, Float) (Float, Float) -> Bool
def overlap(first_line, second_line):
x1, x2 = first_line
x3, x4 = second_line
if x1 < x2:
if ((x1 <= x3) and (x3 <= x2)) or ((x1 <= x4) and (x4 <= x2)):
return True
else:
if ((x2 <= x3) and (x3 <= x1)) or ((x2 <= x4) and (x4 <= x1)):
return True
return False
--- FILE SEPARATOR ---
def greaterthan(s1, s2):
try:
int1 = None
if s1[0] == "-":
int1 = float(s1[1:])
int1 = int1 * (-1)
elif s1[0] == "+":
int1 = float(s1[1:])
else:
int1 = float(s1)
except:
return "Please enter an string that start with number or -/+ sign and only include number after first character for the first input"
try:
int2 = None
if s2[0] == "-":
int2 = float(s2[1:])
int2 = int2 * (-1)
elif s2[0] == "+":
int2 = float(s2[1:])
else:
int2 = float(s2)
except:
return "Please enter an string that start with number or -/+ sign and only include number after first character for the second input"
if (int1 == int2):
return "equal"
elif (int1 > int2):
return "greater"
else:
return "less"
| {
"imports": [
"/QuestionA.py",
"/QuestionB.py"
]
} |
0429charlie/Ormuco_Challenge | refs/heads/master | /server1.py | import time
import os
from threading import Thread
import QuestionC
# Helper to execute any python program given program name
def run_program(program_name):
os.system('python ' + program_name)
print(" ")
print("server1: creating cache sever")
# From the library, initialize our class to record the cache
# The initialization also make the cache system start listening on the server and allow for 10 clients connection (other cache servers in this case)
server_cache = QuestionC.server_cache()
print("")
def very_expensive_read_from_disk():
print("expensive function called from server1")
return "Pretend this string have to be read from disk and is very expensive to do so"
def get_string():
lookup = None
if "get_string" in server_cache.dic:
lookup = server_cache.find_cache(server_cache.dic["get_string"])
# If not cached
if not lookup:
lookup = very_expensive_read_from_disk()
# Update the cache and get the id of the object that this function return
# Programmer using the library is responsible for putting in the right function name
server_cache.add_cache(lookup, "get_string")
return lookup
# Let set up a while loop which act as server running online and listening for input
# The input can be acquired from file. We can change the Server1.txt to specify the input. The valid input are:
# get_string - call the get_string function on the server
# wait - Have this server idle for 20 seconds
# exit - shut down this server
# everything else - prompt the message saying that it is an invalid input and proceed with listening for new input
# If want to test this server manually, please delete Server1.txt file. The program will ask you for the input
f = None
try:
f = open('Server1.txt')
except FileNotFoundError:
pass
i = 0
while True:
txt = "Some random text"
if f:
print("server1: input file detected. Reading from Server1.txt")
print(' ')
txt = f.readline()
txt = txt.strip()
else:
txt = input("server1: Please enter get_string, exit, or wait: ")
print(" ")
if txt == "exit":
print("server1: Got exited. Shutting down server1")
break
elif txt == "get_string":
get_string()
i = i + 1
if i == 1:
print("server1: get_string function called\nYou called get_string function " +
str(i) +
" times now.\n" +
"You should see 'expensive function called from serverxxx' above")
t2 = Thread(target=run_program, args=('server2.py',))
t2.start()
t2.join()
print(' ')
time.sleep(2)
else:
print("server1: get_string function called\nYou called get_string function " +
str(i) +
" times now.\n" +
"You should NOT see 'expensive function called' above")
print(' ')
time.sleep(2)
elif txt == "wait":
print("server1: Got wait. Idling for 20 second")
print(' ')
time.sleep(20)
else:
print("Server1: Input not recognized. Please enter get_string, exit, or wait when prompted next\n" +
"The valid inputs are:\n" +
"get_string - call the get_string function on the server\n" +
"wait - Have this server idle for 20 seconds\n" +
"exit - shut down this server")
print(' ')
time.sleep(2)
if f:
f.close()
# Stop the cache server
server_cache.stop_listen()
| # This module is fully created by Ching Chuan Wu from scratch.
# It is part of coding challenge for Ormuco Inc. Hiring process.
# © Copyright 2020, Ching Chuan Wu, All Rights Reserved
import socket, pickle
import threading
import time
from datetime import datetime
from threading import Thread
# Node for linked list working as queue here
# It has attribute linking it to the previous and the next node
# It also know if it is the head or tail of the linked list
# The content is what people save in cache. Type can varies depend on what program this library is running with
class Node:
def __init__(self):
self.next = None
self.pre = None
self.head = True
self.tail = False
self.content = None
self.key = None
self.fn_name = None
self.timestemp = None
# The queue is consisted of nodes in the form of linked list
# The map uses id(node) as key and node itself as value
# The dic uses function name (provided by user) as key and id(node) as value
# The class also initialized socket binded on given port (83 by default) which can later be used to listen for incoming request
class server_cache:
# Initialization. It create new thread that listening for incoming request, Initialize all fields, and send notification to other server to register itself.
def __init__(self, max_size=5, othersevers=None,server_port=83, limit=10, expire=5):
self.listening = True
self.head = None
self.tail = None
self.max_size = max_size
self.expire = expire
# from object id to node holding that object
self.mapping = {}
self.allservers = othersevers
# dictionary to hold mapping from function name to object id (including those from previously established server)
self.dic = {}
# Get the server ip
hostname = socket.gethostname()
ip = socket.gethostbyname(hostname)
# socket for server
self.s = socket.socket()
# Record the ip and port
self.ip = ip
self.server_port = server_port
# Lock used to ensure atomoicity of queue update
self.queue_lock = threading.Lock()
# Start listening
self.listening_thread = Thread(target=self.start_listen, args=(limit,))
self.listening_thread.setDaemon(True)
self.listening_thread.start()
# Let sleep for 5 second so that new thread can run first and start listening before the main thread proceed
time.sleep(5)
# Then notify all servers online
if self.allservers:
self.allservers = set(self.allservers)
count = len(self.allservers)
for host in self.allservers:
count = count-1
h, p = host
ss = socket.socket()
ss.connect((h, p))
if count==0:
ss.send(pickle.dumps(("init_last",(ip, server_port))))
else:
ss.send(pickle.dumps(("init",(ip, server_port))))
ss.shutdown(socket.SHUT_WR)
ss.close()
else:
self.allservers = set()
# Wait for a while so that self.dic is updated
time.sleep(5)
# Function to update the available servers
# It record the new ip and port that the new server binds on. Intended only for private use for this class
# String Int -> Void
def add_server(self,newip, newport):
self.queue_lock.acquire()
try:
self.allservers.add((newip,int(newport)))
finally:
self.queue_lock.release()
# Function to delete the available servers
# It delete the ip and port of the indicated server. Intended only for private use for this class
# String Int -> Void
def del_server(self,ip,port):
self.queue_lock.acquire()
try:
self.allservers.remove((ip,int(port)))
finally:
self.queue_lock.release()
# Function to add the object to the cache. The object can be in the cache already or not
# It must be called every time the function that use this cache server in the user written program is called
# Object String Bool -> Void
def add_cache(self, inobject, fn_name, init=False):
self.queue_lock.acquire()
try:
objectid = id(inobject)
# Hit
if objectid in self.mapping:
# Get the node in the queue
n = self.mapping[objectid]
# If the node is not the head
if not n.head:
# If, however, it is the tail
if n.tail:
# Update the new tail
n.pre.next = None
n.pre.tail = True
self.tail = n.pre
# If it is in the middle
else:
# Remove the node from the linked list
n.pre.next = n.next
n.next.pre = n.pre
# Update the current node to be the head
n.tail = False
n.pre = None
n.head = True
n.next = self.head
# Update the old head
self.head.pre = n
self.head.head = False
# Update the head in the cache class
self.head = n
# Miss
else:
# If the cache is full
if (len(self.mapping)>=self.max_size) and (self.max_size>0):
# Delete the tail and its entry in mapping
del self.mapping[self.tail.key]
new_tail = self.tail.pre
self.tail.tail = False
self.tail.head = False
self.tail.pre = None
self.tail.next = None
new_tail.tail = True
new_tail.next = None
self.tail = new_tail
# Now, cache is guarantee to not be full. Let just add our new cache
n = Node()
n.content = inobject
n.key = objectid
n.fn_name = fn_name
n.timestemp = datetime.now()
# But first, let see if we have anything in the cache first
# If we have empty cache
if len(self.mapping)==0:
# Set node to also be the tail
n.tail = True
self.tail = n
self.head = n
else:
# Otherwise, simply add the node as head
n.next = self.head
self.head.pre = n
self.head = n
self.mapping[n.key] = n
self.dic[fn_name] = n.key
# update all server if not initializing or request from other servers so data are consistence
if not init:
for host in self.allservers:
h, p = host
ss = socket.socket()
ss.connect((h, p))
ss.send(pickle.dumps(("data",(inobject,fn_name,True))))
ss.shutdown(socket.SHUT_WR)
ss.close()
finally:
self.queue_lock.release()
# Function to delete the expired node
def del_node(self,objectid):
n = self.mapping[objectid]
if n.head and n.tail:
# Actually, we don't need to do anything here
# Simply del this node from self.mapping will works
pass
if n.head:
self.head = n.next
self.head.pre = None
n.next = None
n.head = False
if n.tail:
self.tail = n.pre
self.tail.next = None
n.pre = None
n.tail = False
else:
n.pre.next = n.next
n.next.pre = n.pre
n.pre = None
n.next = None
# Also delete from self.mapping
del self.mapping[objectid]
# Function to find the requested object given the object id
# Note that object id can be acquired from self.dic[key] where key is the function name
# It must be called before every time the function that use this cache server in the user written program execute it expensive calculation
# INT -> Object or None
def find_cache(self,objectid):
# Hit
if objectid in self.mapping:
# Check timestemp
n = self.mapping[objectid]
time_difference = datetime.now() - n.timestemp
in_second = time_difference.total_seconds()
in_minute = in_second / 60
# Delete the node if expired
if in_minute >= self.expire:
self.del_node(objectid)
return None
else:
return self.mapping[objectid].content
# Miss
else:
return None
# Thread to handle incoming request
# Intended only for private use for this class
def handle_request(self,c):
# Receive data and close the connection
data = c.recv(4096)
c.shutdown(socket.SHUT_WR)
c.close()
# See what request it is (all request are from other servers)
pickle_data = pickle.loads(data)
fn, body = pickle_data
# New server created and server library need to be updated
if fn == "init":
ip, port = body
self.add_server(ip, port)
# New function registered in other servers or this server is newly created and cache need to be consistence with other servers
elif fn == "data":
obj, fn_name, init = body
self.add_cache(obj,fn_name,init)
# One of the servers destroyed and this server need to delete the destroyed server
elif fn == "del":
ip, port = body
self.del_server(ip, port)
# New server created and server library need to be updated. Also, cache of the new server need to be consistence with this server
elif fn == "init_last":
ip, port = body
self.add_server(ip, port)
# In addition, we send the queue back so that the newly created server have the same queue
# It need to be done consecutively (no update before the queue is sent)
self.queue_lock.acquire()
current_node = self.head
while current_node!=None:
ss = socket.socket()
ss.connect((ip, port))
ss.send(pickle.dumps(("data", (current_node.content,current_node.fn_name, True))))
ss.shutdown(socket.SHUT_WR)
ss.close()
current_node = current_node.next
self.queue_lock.release()
# Function to start listening the incoming request on new thread
# Int -> Void
def start_listen(self, limit):
# Bind the socket to the port provided (port 83 if not provided)
self.s.bind((self.ip, self.server_port))
# Time out for listening
self.s.settimeout(5)
self.s.listen(limit)
print("start listening on " + self.ip + " port " + str(self.server_port))
while True:
if self.listening:
try:
# Get the request and create new thread to handle each request
c, address = self.s.accept()
t = Thread(target=self.handle_request, args=(c,))
t.setDaemon(True)
t.start()
# Join the thread so that request is completed in order
t.join()
except socket.timeout:
pass
else:
break
self.s.close()
# Function to call when we want to close the cache server
def stop_listen(self):
# Allow the thread listening the incoming request to stop
self.listening = False
# update all server to delete this server
for host in self.allservers:
h, p = host
ss = socket.socket()
ss.connect((h, p))
ss.send(pickle.dumps(("del", (self.ip, self.server_port))))
ss.shutdown(socket.SHUT_WR)
ss.close()
# Wait for the thread listening for the request to stop
self.listening_thread.join()
print("chache server closed. Please create a new one to start using the cache server again.")
| {
"imports": [
"/QuestionC.py"
]
} |
0429charlie/Ormuco_Challenge | refs/heads/master | /server2.py | import time
import QuestionC
print(" ")
print("server2: creating cache sever")
# From the library, initialize our class to record the cache
# The initialization also make the cache system start listening on the server and allow for 10 clients connection (other cache servers in this case)
server_cache = QuestionC.server_cache(5, [('10.0.0.134', 83)], 84, 10)
print(" ")
def very_expensive_read_from_disk():
print("expensive function called from server2")
return "Pretend this string have to be read from disk and is very expensive to do so"
def get_string():
lookup = None
if "get_string" in server_cache.dic:
lookup = server_cache.find_cache(server_cache.dic["get_string"])
# If not cached
if not lookup:
lookup = very_expensive_read_from_disk()
# Update the cache and get the id of the object that this function return
# Programmer using the library is responsible for putting in the right function name
server_cache.add_cache(lookup, "get_string")
return lookup
# Let set up a while loop which act as server running online and listening for input
# The input can be acquired from file. We can change the Server2.txt to specify the input. The valid input are:
# get_string - call the get_string function on the server
# wait - Have this server idle for 20 seconds
# exit - shut down this server
# everything else - prompt the message saying that it is an invalid input and proceed with listening for new input
# If want to test this server manually, please delete Server1.txt file. The program will ask you for the input
f = None
try:
f = open('Server2.txt')
except FileNotFoundError:
pass
i = 0
while True:
txt = "Some random text"
if f:
print("server2: input file detected. Reading from Server2.txt")
txt = f.readline()
txt = txt.strip()
else:
txt = input("server2: Please enter get_string, exit or wait: ")
print(" ")
if txt == "exit":
print("server2: Got exited. Shutting down server2")
break
elif txt == "get_string":
get_string()
i = i + 1
if i == 1:
print("server2: get_string function called\nYou called get_string function " +
str(i) +
" times now.\n" +
"However, You should NOT see 'expensive function called from serverxxx' above becuase it is called once on other server!!")
print(' ')
time.sleep(2)
else:
print("server2: get_string function called\nYou called get_string function " +
str(i) +
" times now.\n" +
"You should NOT see 'expensive function called from serverxxx' above")
print(' ')
time.sleep(2)
elif txt == "wait":
print("server2: Got wait. Idling for 20 second")
print(' ')
time.sleep(20)
else:
print("Server2: Input not recognized. Please enter get_string, exit, or wait when prompted next\n" +
"The valid inputs are:\n" +
"get_string - call the get_string function on the server\n" +
"wait - Have this server idle for 20 seconds\n" +
"exit - shut down this server")
print(' ')
time.sleep(2)
if f:
f.close()
# Stop the cache server
server_cache.stop_listen()
| # This module is fully created by Ching Chuan Wu from scratch.
# It is part of coding challenge for Ormuco Inc. Hiring process.
# © Copyright 2020, Ching Chuan Wu, All Rights Reserved
import socket, pickle
import threading
import time
from datetime import datetime
from threading import Thread
# Node for linked list working as queue here
# It has attribute linking it to the previous and the next node
# It also know if it is the head or tail of the linked list
# The content is what people save in cache. Type can varies depend on what program this library is running with
class Node:
def __init__(self):
self.next = None
self.pre = None
self.head = True
self.tail = False
self.content = None
self.key = None
self.fn_name = None
self.timestemp = None
# The queue is consisted of nodes in the form of linked list
# The map uses id(node) as key and node itself as value
# The dic uses function name (provided by user) as key and id(node) as value
# The class also initialized socket binded on given port (83 by default) which can later be used to listen for incoming request
class server_cache:
# Initialization. It create new thread that listening for incoming request, Initialize all fields, and send notification to other server to register itself.
def __init__(self, max_size=5, othersevers=None,server_port=83, limit=10, expire=5):
self.listening = True
self.head = None
self.tail = None
self.max_size = max_size
self.expire = expire
# from object id to node holding that object
self.mapping = {}
self.allservers = othersevers
# dictionary to hold mapping from function name to object id (including those from previously established server)
self.dic = {}
# Get the server ip
hostname = socket.gethostname()
ip = socket.gethostbyname(hostname)
# socket for server
self.s = socket.socket()
# Record the ip and port
self.ip = ip
self.server_port = server_port
# Lock used to ensure atomoicity of queue update
self.queue_lock = threading.Lock()
# Start listening
self.listening_thread = Thread(target=self.start_listen, args=(limit,))
self.listening_thread.setDaemon(True)
self.listening_thread.start()
# Let sleep for 5 second so that new thread can run first and start listening before the main thread proceed
time.sleep(5)
# Then notify all servers online
if self.allservers:
self.allservers = set(self.allservers)
count = len(self.allservers)
for host in self.allservers:
count = count-1
h, p = host
ss = socket.socket()
ss.connect((h, p))
if count==0:
ss.send(pickle.dumps(("init_last",(ip, server_port))))
else:
ss.send(pickle.dumps(("init",(ip, server_port))))
ss.shutdown(socket.SHUT_WR)
ss.close()
else:
self.allservers = set()
# Wait for a while so that self.dic is updated
time.sleep(5)
# Function to update the available servers
# It record the new ip and port that the new server binds on. Intended only for private use for this class
# String Int -> Void
def add_server(self,newip, newport):
self.queue_lock.acquire()
try:
self.allservers.add((newip,int(newport)))
finally:
self.queue_lock.release()
# Function to delete the available servers
# It delete the ip and port of the indicated server. Intended only for private use for this class
# String Int -> Void
def del_server(self,ip,port):
self.queue_lock.acquire()
try:
self.allservers.remove((ip,int(port)))
finally:
self.queue_lock.release()
# Function to add the object to the cache. The object can be in the cache already or not
# It must be called every time the function that use this cache server in the user written program is called
# Object String Bool -> Void
def add_cache(self, inobject, fn_name, init=False):
self.queue_lock.acquire()
try:
objectid = id(inobject)
# Hit
if objectid in self.mapping:
# Get the node in the queue
n = self.mapping[objectid]
# If the node is not the head
if not n.head:
# If, however, it is the tail
if n.tail:
# Update the new tail
n.pre.next = None
n.pre.tail = True
self.tail = n.pre
# If it is in the middle
else:
# Remove the node from the linked list
n.pre.next = n.next
n.next.pre = n.pre
# Update the current node to be the head
n.tail = False
n.pre = None
n.head = True
n.next = self.head
# Update the old head
self.head.pre = n
self.head.head = False
# Update the head in the cache class
self.head = n
# Miss
else:
# If the cache is full
if (len(self.mapping)>=self.max_size) and (self.max_size>0):
# Delete the tail and its entry in mapping
del self.mapping[self.tail.key]
new_tail = self.tail.pre
self.tail.tail = False
self.tail.head = False
self.tail.pre = None
self.tail.next = None
new_tail.tail = True
new_tail.next = None
self.tail = new_tail
# Now, cache is guarantee to not be full. Let just add our new cache
n = Node()
n.content = inobject
n.key = objectid
n.fn_name = fn_name
n.timestemp = datetime.now()
# But first, let see if we have anything in the cache first
# If we have empty cache
if len(self.mapping)==0:
# Set node to also be the tail
n.tail = True
self.tail = n
self.head = n
else:
# Otherwise, simply add the node as head
n.next = self.head
self.head.pre = n
self.head = n
self.mapping[n.key] = n
self.dic[fn_name] = n.key
# update all server if not initializing or request from other servers so data are consistence
if not init:
for host in self.allservers:
h, p = host
ss = socket.socket()
ss.connect((h, p))
ss.send(pickle.dumps(("data",(inobject,fn_name,True))))
ss.shutdown(socket.SHUT_WR)
ss.close()
finally:
self.queue_lock.release()
# Function to delete the expired node
def del_node(self,objectid):
n = self.mapping[objectid]
if n.head and n.tail:
# Actually, we don't need to do anything here
# Simply del this node from self.mapping will works
pass
if n.head:
self.head = n.next
self.head.pre = None
n.next = None
n.head = False
if n.tail:
self.tail = n.pre
self.tail.next = None
n.pre = None
n.tail = False
else:
n.pre.next = n.next
n.next.pre = n.pre
n.pre = None
n.next = None
# Also delete from self.mapping
del self.mapping[objectid]
# Function to find the requested object given the object id
# Note that object id can be acquired from self.dic[key] where key is the function name
# It must be called before every time the function that use this cache server in the user written program execute it expensive calculation
# INT -> Object or None
def find_cache(self,objectid):
# Hit
if objectid in self.mapping:
# Check timestemp
n = self.mapping[objectid]
time_difference = datetime.now() - n.timestemp
in_second = time_difference.total_seconds()
in_minute = in_second / 60
# Delete the node if expired
if in_minute >= self.expire:
self.del_node(objectid)
return None
else:
return self.mapping[objectid].content
# Miss
else:
return None
# Thread to handle incoming request
# Intended only for private use for this class
def handle_request(self,c):
# Receive data and close the connection
data = c.recv(4096)
c.shutdown(socket.SHUT_WR)
c.close()
# See what request it is (all request are from other servers)
pickle_data = pickle.loads(data)
fn, body = pickle_data
# New server created and server library need to be updated
if fn == "init":
ip, port = body
self.add_server(ip, port)
# New function registered in other servers or this server is newly created and cache need to be consistence with other servers
elif fn == "data":
obj, fn_name, init = body
self.add_cache(obj,fn_name,init)
# One of the servers destroyed and this server need to delete the destroyed server
elif fn == "del":
ip, port = body
self.del_server(ip, port)
# New server created and server library need to be updated. Also, cache of the new server need to be consistence with this server
elif fn == "init_last":
ip, port = body
self.add_server(ip, port)
# In addition, we send the queue back so that the newly created server have the same queue
# It need to be done consecutively (no update before the queue is sent)
self.queue_lock.acquire()
current_node = self.head
while current_node!=None:
ss = socket.socket()
ss.connect((ip, port))
ss.send(pickle.dumps(("data", (current_node.content,current_node.fn_name, True))))
ss.shutdown(socket.SHUT_WR)
ss.close()
current_node = current_node.next
self.queue_lock.release()
# Function to start listening the incoming request on new thread
# Int -> Void
def start_listen(self, limit):
# Bind the socket to the port provided (port 83 if not provided)
self.s.bind((self.ip, self.server_port))
# Time out for listening
self.s.settimeout(5)
self.s.listen(limit)
print("start listening on " + self.ip + " port " + str(self.server_port))
while True:
if self.listening:
try:
# Get the request and create new thread to handle each request
c, address = self.s.accept()
t = Thread(target=self.handle_request, args=(c,))
t.setDaemon(True)
t.start()
# Join the thread so that request is completed in order
t.join()
except socket.timeout:
pass
else:
break
self.s.close()
# Function to call when we want to close the cache server
def stop_listen(self):
# Allow the thread listening the incoming request to stop
self.listening = False
# update all server to delete this server
for host in self.allservers:
h, p = host
ss = socket.socket()
ss.connect((h, p))
ss.send(pickle.dumps(("del", (self.ip, self.server_port))))
ss.shutdown(socket.SHUT_WR)
ss.close()
# Wait for the thread listening for the request to stop
self.listening_thread.join()
print("chache server closed. Please create a new one to start using the cache server again.")
| {
"imports": [
"/QuestionC.py"
]
} |
04fq/Pirate | refs/heads/main | /pirate.py | import requests
import urllib.request
from colorama import *
import random
import socket
import code_analysis as CA
from hashlib import *
from user_agent import generate_user_agent
import sys
import json
import threading
import time
from queue import Queue
import secrets
queue = Queue()
open_ports = []
r = requests.session()
print(Fore.LIGHTBLUE_EX + '''
██████╗ ██╗██████╗ █████╗ ████████╗███████╗ ██████╗ ██╗ ██╗
██╔══██╗██║██╔══██╗██╔══██╗╚══██╔══╝██╔════╝ ██╔══██╗╚██╗ ██╔╝
██████╔╝██║██████╔╝███████║ ██║ █████╗ ██████╔╝ ╚████╔╝
██╔═══╝ ██║██╔══██╗██╔══██║ ██║ ██╔══╝ ██╔═══╝ ╚██╔╝
██║ ██║██║ ██║██║ ██║ ██║ ███████╗██╗██║ ██║
╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝╚═╝ ╚═╝
''')
print(Fore.MAGENTA + """
[01] Password Compiling
[02] Admin Panel BruteForce
[03] Hash Me
[04] Site Cloner | Soon .....
[05] Target IP Locator
[06] NumScan[@t8qu_] | Soon .....
[07] Instagram Osint | Info
[08] Scan Web Exploit[PHP][@at9w]
[09] Port Scanner
[10] Exit
""")
option = input(Fore.LIGHTYELLOW_EX + "[!] Select ? : ")
if option == '1':
print(Fore.BLUE + '''
[1] Random Passwords
[2] Victim Information | Soon ......
''')
option1 = input(Fore.RED + "Select ? : ")
if option1 == '1':
chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ123456!£$%^&*(`)"
password_len = int(input(Fore.RED +"[?] Password Length : "))
password_count = int(input(Fore.RED +"[?] Password Number : "))
for x in range(0, password_count):
password = ""
for x in range(0, password_len):
password_char = random.choice(chars)
password = password + password_char
f = open("Password.txt", "a")
f.write(password + '\n')
f.close()
elif option1 == '2':
print(Fore.LIGHTBLUE_EX +"Soon")
elif option == '2':
url = str(input("[?] Enter Website : "))
if url.startswith('http'):
url = url
else:
url = 'http://' + url
if url.endswith('/'):
url = url
else:
url = url + "/"
# creat a list for all posiblity
adminlist = [
'admin/',
'administrator/',
'admin1/',
'admin2/',
'admin3/',
'admin4/',
'admin5/',
'usuarios/',
'usuario/',
'administrator/',
'moderator/',
'webadmin/',
'adminarea/',
'bb-admin/',
'adminLogin/',
'admin_area/',
'panel-administracion/',
'instadmin/',
'memberadmin/',
'administratorlogin/',
'adm/',
'admin/account.php',
'admin/index.php',
'admin/login.php',
'admin/admin.php',
'admin/account.php',
'admin_area/admin.php',
'admin_area/login.php',
'siteadmin/login.php',
'siteadmin/index.php',
'siteadmin/login.html',
'admin/account.html',
'admin/index.html',
'admin/login.html',
'admin/admin.html',
'admin_area/index.php',
'bb-admin/index.php',
'bb-admin/login.php',
'bb-admin/admin.php',
'admin/home.php',
'admin_area/login.html',
'admin_area/index.html',
'admin/controlpanel.php',
'admin.php',
'admincp/index.asp',
'admincp/login.asp',
'admincp/index.html',
'admin/account.html',
'adminpanel.html',
'webadmin.html',
'webadmin/index.html',
'webadmin/admin.html',
'webadmin/login.html',
'admin/admin_login.html',
'admin_login.html',
'panel-administracion/login.html',
'admin/cp.php',
'cp.php',
'administrator/index.php',
'administrator/login.php',
'nsw/admin/login.php',
'webadmin/login.php',
'admin/admin_login.php',
'admin_login.php',
'administrator/account.php',
'administrator.php',
'admin_area/admin.html',
'pages/admin/admin-login.php',
'admin/admin-login.php',
'admin-login.php',
'bb-admin/index.html',
'bb-admin/login.html',
'acceso.php',
'bb-admin/admin.html',
'admin/home.html',
'login.php',
'modelsearch/login.php',
'moderator.php',
'moderator/login.php',
'moderator/admin.php',
'account.php',
'pages/admin/admin-login.html',
'admin/admin-login.html',
'admin-login.html',
'controlpanel.php',
'admincontrol.php',
'admin/adminLogin.html',
'adminLogin.html',
'admin/adminLogin.html',
'home.html',
'rcjakar/admin/login.php',
'adminarea/index.html',
'adminarea/admin.html',
'webadmin.php',
'webadmin/index.php',
'webadmin/admin.php',
'admin/controlpanel.html',
'admin.html',
'admin/cp.html',
'cp.html',
'adminpanel.php',
'moderator.html',
'administrator/index.html',
'administrator/login.html',
'user.html',
'administrator/account.html',
'administrator.html',
'login.html',
'modelsearch/login.html',
'moderator/login.html',
'adminarea/login.html',
'panel-administracion/index.html',
'panel-administracion/admin.html',
'modelsearch/index.html',
'modelsearch/admin.html',
'admincontrol/login.html',
'adm/index.html',
'adm.html',
'moderator/admin.html',
'user.php',
'account.html',
'controlpanel.html',
'admincontrol.html',
'panel-administracion/login.php',
'wp-login.php',
'adminLogin.php',
'admin/adminLogin.php',
'home.php',
'admin.php',
'adminarea/index.php',
'adminarea/admin.php',
'adminarea/login.php',
'panel-administracion/index.php',
'panel-administracion/admin.php',
'modelsearch/index.php',
'modelsearch/admin.php',
'admincontrol/login.php',
'adm/admloginuser.php',
'admloginuser.php',
'admin2.php',
'admin2/login.php',
'admin2/index.php',
'usuarios/login.php',
'adm/index.php',
'adm.php',
'affiliate.php',
'adm_auth.php',
'memberadmin.php',
'administratorlogin.php',
'admin/',
'administrator/',
'admin1/',
'admin2/',
'admin3/',
'admin4/',
'admin5/',
'moderator/',
'webadmin/',
'adminarea/',
'bb-admin/',
'adminLogin/',
'admin_area/',
'panel-administracion/',
'instadmin/',
'memberadmin/',
'administratorlogin/',
'adm/',
'account.asp',
'admin/account.asp',
'admin/index.asp',
'admin/login.asp',
'admin/admin.asp',
'admin_area/admin.asp',
'admin_area/login.asp',
'admin/account.html',
'admin/index.html',
'admin/login.html',
'admin/admin.html',
'admin_area/admin.html',
'admin_area/login.html',
'admin_area/index.html',
'admin_area/index.asp',
'bb-admin/index.asp',
'bb-admin/login.asp',
'bb-admin/admin.asp',
'bb-admin/index.html',
'bb-admin/login.html',
'bb-admin/admin.html',
'admin/home.html',
'admin/controlpanel.html',
'admin.html',
'admin/cp.html',
'cp.html',
'administrator/index.html',
'administrator/login.html',
'administrator/account.html',
'administrator.html',
'login.html',
'modelsearch/login.html',
'moderator.html',
'moderator/login.html',
'moderator/admin.html',
'account.html',
'controlpanel.html',
'admincontrol.html',
'admin_login.html',
'panel-administracion/login.html',
'admin/home.asp',
'admin/controlpanel.asp',
'admin.asp',
'pages/admin/admin-login.asp',
'admin/admin-login.asp',
'admin-login.asp',
'admin/cp.asp',
'cp.asp',
'administrator/account.asp',
'administrator.asp',
'acceso.asp',
'login.asp',
'modelsearch/login.asp',
'moderator.asp',
'moderator/login.asp',
'administrator/login.asp',
'moderator/admin.asp',
'controlpanel.asp',
'admin/account.html',
'adminpanel.html',
'webadmin.html',
'pages/admin/admin-login.html',
'admin/admin-login.html',
'webadmin/index.html',
'webadmin/admin.html',
'webadmin/login.html',
'user.asp',
'user.html',
'admincp/index.asp',
'admincp/login.asp',
'admincp/index.html',
'admin/adminLogin.html',
'adminLogin.html',
'admin/adminLogin.html',
'home.html',
'adminarea/index.html',
'adminarea/admin.html',
'adminarea/login.html',
'panel-administracion/index.html',
'panel-administracion/admin.html',
'modelsearch/index.html',
'modelsearch/admin.html',
'admin/admin_login.html',
'admincontrol/login.html',
'adm/index.html',
'adm.html',
'admincontrol.asp',
'admin/account.asp',
'adminpanel.asp',
'webadmin.asp',
'webadmin/index.asp',
'webadmin/admin.asp',
'webadmin/login.asp',
'admin/admin_login.asp',
'admin_login.asp',
'panel-administracion/login.asp',
'adminLogin.asp',
'admin/adminLogin.asp',
'home.asp',
'admin.asp',
'adminarea/index.asp',
'adminarea/admin.asp',
'adminarea/login.asp',
'admin-login.html',
'panel-administracion/index.asp',
'dvwa',
'panel-administracion/admin.asp',
'modelsearch/index.asp',
'modelsearch/admin.asp',
'administrator/index.asp',
'admincontrol/login.asp',
'adm/admloginuser.asp',
'admloginuser.asp',
'admin2.asp',
'admin2/login.asp',
'admin2/index.asp',
'adm/index.asp',
'adm.asp',
'affiliate.asp',
'adm_auth.asp',
'memberadmin.asp',
'administratorlogin.asp',
'siteadmin/login.asp',
'siteadmin/index.asp',
'siteadmin/login.html',
'admin/',
'administrator/',
'admin1/',
'admin2/',
'admin3/',
'admin4/',
'admin5/',
'usuarios/',
'usuario/',
'administrator/',
'moderator/',
'webadmin/',
'adminarea/',
'bb-admin/',
'adminLogin/',
'admin_area/',
'panel-administracion/',
'instadmin/',
'memberadmin/',
'administratorlogin/',
'adm/',
'admin/account.cfm',
'admin/index.cfm',
'admin/login.cfm',
'admin/admin.cfm',
'admin/account.cfm',
'admin_area/admin.cfm',
'admin_area/login.cfm',
'siteadmin/login.cfm',
'siteadmin/index.cfm',
'siteadmin/login.html',
'admin/account.html',
'admin/index.html',
'admin/login.html',
'admin/admin.html',
'admin_area/index.cfm',
'bb-admin/index.cfm',
'bb-admin/login.cfm',
'bb-admin/admin.cfm',
'admin/home.cfm',
'admin_area/login.html',
'admin_area/index.html',
'admin/controlpanel.cfm',
'admin.cfm',
'admincp/index.asp',
'admincp/login.asp',
'admincp/index.html',
'admin/account.html',
'adminpanel.html',
'webadmin.html',
'webadmin/index.html',
'webadmin/admin.html',
'webadmin/login.html',
'admin/admin_login.html',
'admin_login.html',
'panel-administracion/login.html',
'admin/cp.cfm',
'cp.cfm',
'administrator/index.cfm',
'administrator/login.cfm',
'nsw/admin/login.cfm',
'webadmin/login.cfm',
'admin/admin_login.cfm',
'admin_login.cfm',
'administrator/account.cfm',
'administrator.cfm',
'admin_area/admin.html',
'pages/admin/admin-login.cfm',
'admin/admin-login.cfm',
'admin-login.cfm',
'bb-admin/index.html',
'bb-admin/login.html',
'bb-admin/admin.html',
'admin/home.html',
'login.cfm',
'modelsearch/login.cfm',
'moderator.cfm',
'moderator/login.cfm',
'moderator/admin.cfm',
'account.cfm',
'pages/admin/admin-login.html',
'admin/admin-login.html',
'admin-login.html',
'controlpanel.cfm',
'admincontrol.cfm',
'admin/adminLogin.html',
'acceso.cfm',
'adminLogin.html',
'admin/adminLogin.html',
'home.html',
'rcjakar/admin/login.cfm',
'adminarea/index.html',
'adminarea/admin.html',
'webadmin.cfm',
'webadmin/index.cfm',
'webadmin/admin.cfm',
'admin/controlpanel.html',
'admin.html',
'admin/cp.html',
'cp.html',
'adminpanel.cfm',
'moderator.html',
'administrator/index.html',
'administrator/login.html',
'user.html',
'administrator/account.html',
'administrator.html',
'login.html',
'modelsearch/login.html',
'moderator/login.html',
'adminarea/login.html',
'panel-administracion/index.html',
'panel-administracion/admin.html',
'modelsearch/index.html',
'modelsearch/admin.html',
'admincontrol/login.html',
'adm/index.html',
'adm.html',
'moderator/admin.html',
'user.cfm',
'account.html',
'controlpanel.html',
'admincontrol.html',
'panel-administracion/login.cfm',
'wp-login.cfm',
'adminLogin.cfm',
'admin/adminLogin.cfm',
'home.cfm',
'admin.cfm',
'adminarea/index.cfm',
'adminarea/admin.cfm',
'adminarea/login.cfm',
'panel-administracion/index.cfm',
'panel-administracion/admin.cfm',
'modelsearch/index.cfm',
'modelsearch/admin.cfm',
'admincontrol/login.cfm',
'adm/admloginuser.cfm',
'admloginuser.cfm',
'admin2.cfm',
'admin2/login.cfm',
'admin2/index.cfm',
'usuarios/login.cfm',
'adm/index.cfm',
'adm.cfm',
'affiliate.cfm',
'adm_auth.cfm',
'memberadmin.cfm',
'administratorlogin.cfm',
'admin/',
'administrator/',
'admin1/',
'admin2/',
'admin3/',
'admin4/',
'admin5/',
'usuarios/',
'usuario/',
'administrator/',
'moderator/',
'webadmin/',
'adminarea/',
'bb-admin/',
'adminLogin/',
'admin_area/',
'panel-administracion/',
'instadmin/',
'memberadmin/',
'administratorlogin/',
'adm/',
'admin/account.js',
'admin/index.js',
'admin/login.js',
'admin/admin.js',
'admin/account.js',
'admin_area/admin.js',
'admin_area/login.js',
'siteadmin/login.js',
'siteadmin/index.js',
'siteadmin/login.html',
'admin/account.html',
'admin/index.html',
'admin/login.html',
'admin/admin.html',
'admin_area/index.js',
'bb-admin/index.js',
'bb-admin/login.js',
'bb-admin/admin.js',
'admin/home.js',
'admin_area/login.html',
'admin_area/index.html',
'admin/controlpanel.js',
'admin.js',
'admincp/index.asp',
'admincp/login.asp',
'admincp/index.html',
'admin/account.html',
'adminpanel.html',
'webadmin.html',
'webadmin/index.html',
'webadmin/admin.html',
'webadmin/login.html',
'admin/admin_login.html',
'admin_login.html',
'panel-administracion/login.html',
'admin/cp.js',
'cp.js',
'administrator/index.js',
'administrator/login.js',
'nsw/admin/login.js',
'webadmin/login.js',
'admin/admin_login.js',
'admin_login.js',
'administrator/account.js',
'administrator.js',
'admin_area/admin.html',
'pages/admin/admin-login.js',
'admin/admin-login.js',
'admin-login.js',
'bb-admin/index.html',
'bb-admin/login.html',
'bb-admin/admin.html',
'admin/home.html',
'login.js',
'modelsearch/login.js',
'moderator.js',
'moderator/login.js',
'moderator/admin.js',
'account.js',
'pages/admin/admin-login.html',
'admin/admin-login.html',
'admin-login.html',
'controlpanel.js',
'admincontrol.js',
'admin/adminLogin.html',
'adminLogin.html',
'admin/adminLogin.html',
'home.html',
'rcjakar/admin/login.js',
'adminarea/index.html',
'adminarea/admin.html',
'webadmin.js',
'webadmin/index.js',
'acceso.js',
'webadmin/admin.js',
'admin/controlpanel.html',
'admin.html',
'admin/cp.html',
'cp.html',
'adminpanel.js',
'moderator.html',
'administrator/index.html',
'administrator/login.html',
'user.html',
'administrator/account.html',
'administrator.html',
'login.html',
'modelsearch/login.html',
'moderator/login.html',
'adminarea/login.html',
'panel-administracion/index.html',
'panel-administracion/admin.html',
'modelsearch/index.html',
'modelsearch/admin.html',
'admincontrol/login.html',
'adm/index.html',
'adm.html',
'moderator/admin.html',
'user.js',
'account.html',
'controlpanel.html',
'admincontrol.html',
'panel-administracion/login.js',
'wp-login.js',
'adminLogin.js',
'admin/adminLogin.js',
'home.js',
'admin.js',
'adminarea/index.js',
'adminarea/admin.js',
'adminarea/login.js',
'panel-administracion/index.js',
'panel-administracion/admin.js',
'modelsearch/index.js',
'modelsearch/admin.js',
'admincontrol/login.js',
'adm/admloginuser.js',
'admloginuser.js',
'admin2.js',
'admin2/login.js',
'admin2/index.js',
'usuarios/login.js',
'adm/index.js',
'adm.js',
'affiliate.js',
'adm_auth.js',
'memberadmin.js',
'administratorlogin.js',
'admin/',
'administrator/',
'admin1/',
'admin2/',
'admin3/',
'admin4/',
'admin5/',
'usuarios/',
'usuario/',
'administrator/',
'moderator/',
'webadmin/',
'adminarea/',
'bb-admin/',
'adminLogin/',
'admin_area/',
'panel-administracion/',
'instadmin/',
'memberadmin/',
'administratorlogin/',
'adm/',
'admin/account.cgi',
'admin/index.cgi',
'admin/login.cgi',
'admin/admin.cgi',
'admin/account.cgi',
'admin_area/admin.cgi',
'admin_area/login.cgi',
'siteadmin/login.cgi',
'siteadmin/index.cgi',
'siteadmin/login.html',
'admin/account.html',
'phpmyadmin',
'admin/index.html',
'admin/login.html',
'admin/admin.html',
'admin_area/index.cgi',
'bb-admin/index.cgi',
'bb-admin/login.cgi',
'bb-admin/admin.cgi',
'admin/home.cgi',
'admin_area/login.html',
'admin_area/index.html',
'admin/controlpanel.cgi',
'admin.cgi',
'admincp/index.asp',
'admincp/login.asp',
'admincp/index.html',
'admin/account.html',
'adminpanel.html',
'webadmin.html',
'webadmin/index.html',
'webadmin/admin.html',
'webadmin/login.html',
'admin/admin_login.html',
'admin_login.html',
'panel-administracion/login.html',
'admin/cp.cgi',
'cp.cgi',
'administrator/index.cgi',
'administrator/login.cgi',
'nsw/admin/login.cgi',
'webadmin/login.cgi',
'admin/admin_login.cgi',
'admin_login.cgi',
'administrator/account.cgi',
'administrator.cgi',
'admin_area/admin.html',
'pages/admin/admin-login.cgi',
'admin/admin-login.cgi',
'admin-login.cgi',
'bb-admin/index.html',
'bb-admin/login.html',
'bb-admin/admin.html',
'admin/home.html',
'login.cgi',
'modelsearch/login.cgi',
'moderator.cgi',
'moderator/login.cgi',
'moderator/admin.cgi',
'account.cgi',
'pages/admin/admin-login.html',
'admin/admin-login.html',
'admin-login.html',
'controlpanel.cgi',
'admincontrol.cgi',
'admin/adminLogin.html',
'adminLogin.html',
'admin/adminLogin.html',
'home.html',
'rcjakar/admin/login.cgi',
'adminarea/index.html',
'adminarea/admin.html',
'webadmin.cgi',
'webadmin/index.cgi',
'acceso.cgi',
'webadmin/admin.cgi',
'admin/controlpanel.html',
'admin.html',
'admin/cp.html',
'cp.html',
'adminpanel.cgi',
'moderator.html',
'administrator/index.html',
'administrator/login.html',
'user.html',
'administrator/account.html',
'administrator.html',
'login.html',
'modelsearch/login.html',
'moderator/login.html',
'adminarea/login.html',
'panel-administracion/index.html',
'panel-administracion/admin.html',
'modelsearch/index.html',
'modelsearch/admin.html',
'admincontrol/login.html',
'adm/index.html',
'adm.html',
'moderator/admin.html',
'user.cgi',
'account.html',
'controlpanel.html',
'admincontrol.html',
'panel-administracion/login.cgi',
'wp-login.cgi',
'adminLogin.cgi',
'admin/adminLogin.cgi',
'home.cgi',
'admin.cgi',
'adminarea/index.cgi',
'adminarea/admin.cgi',
'adminarea/login.cgi',
'panel-administracion/index.cgi',
'panel-administracion/admin.cgi',
'modelsearch/index.cgi',
'modelsearch/admin.cgi',
'admincontrol/login.cgi',
'adm/admloginuser.cgi',
'admloginuser.cgi',
'admin2.cgi',
'admin2/login.cgi',
'admin2/index.cgi',
'usuarios/login.cgi',
'adm/index.cgi',
'adm.cgi',
'affiliate.cgi',
'adm_auth.cgi',
'memberadmin.cgi',
'administratorlogin.cgi',
'admin/',
'administrator/',
'admin1/',
'admin2/',
'admin3/',
'admin4/',
'admin5/',
'usuarios/',
'usuario/',
'administrator/',
'moderator/',
'webadmin/',
'adminarea/',
'bb-admin/',
'adminLogin/',
'admin_area/',
'panel-administracion/',
'instadmin/',
'memberadmin/',
'administratorlogin/',
'adm/',
'siteadmin/login.html',
'admin/account.html',
'admin/index.html',
'admin/login.html',
'admin/admin.html',
'admin_area/login.html',
'admin_area/index.html',
'admincp/index.asp',
'admincp/login.asp',
'admincp/index.html',
'admin/account.html',
'adminpanel.html',
'webadmin.html',
'webadmin/index.html',
'webadmin/admin.html',
'webadmin/login.html',
'admin/admin_login.html',
'admin_login.html',
'panel-administracion/login.html',
'admin_area/admin.html',
'bb-admin/index.html',
'bb-admin/login.html',
'bb-admin/admin.html',
'admin/home.html',
'pages/admin/admin-login.html',
'admin/admin-login.html',
'admin-login.html',
'admin/adminLogin.html',
'adminLogin.html',
'admin/adminLogin.html',
'home.html',
'adminarea/index.html',
'adminarea/admin.html',
'admin/controlpanel.html',
'admin.html',
'admin/cp.html',
'cp.html',
'moderator.html',
'administrator/index.html',
'administrator/login.html',
'user.html',
'administrator/account.html',
'administrator.html',
'login.html',
'modelsearch/login.html',
'moderator/login.html',
'adminarea/login.html',
'panel-administracion/index.html',
'panel-administracion/admin.html',
'modelsearch/index.html',
'modelsearch/admin.html',
'admincontrol/login.html',
'adm/index.html',
'adm.html',
'moderator/admin.html',
'account.html',
'controlpanel.html',
'admincontrol.html',
'wordpress',
'btslab'
]
print(Fore.BLUE +"[*] Trying : \n\n")
# make a loop for the trying to find the link
for i in adminlist:
url_try = url + i
try:
openurl = urllib.request.urlopen(url_try)
print("")
print(Fore.GREEn +"[+] Found Something : " + openurl)
print("")
except:
print(Fore.LIGHTRED_EX +"[!] Nothing Found : " + url_try)
elif option == '3':
texter = input(Fore.LIGHTYELLOW_EX +'[#] Enter Text To Encrypt It : ')
print(Fore.LIGHTBLUE_EX +"""
[01] SHA1
[02] MD5
[03] SHA384
[04] SHA3_512
[05] SHA224
[06] SHA512
[07] SHA256""")
option1000 = input(Fore.LIGHTMAGENTA_EX +'[!] Select ? : ')
if option1000 == '1':
sha1er = sha1(texter.encode()).hexdigest()
print(sha1er)
if option1000 == '2':
scripter = md5(texter.encode()).hexdigest()
print(scripter)
if option1000 == '3':
scripter = sha384(texter.encode()).hexdigest()
print(scripter)
if option1000 == '4':
scripter = sha3_512(texter.encode()).hexdigest()
print(scripter)
if option1000 == '5':
scripter = sha224(texter.encode()).hexdigest()
print(scripter)
if option1000 == '6':
scripter = sha512(texter.encode()).hexdigest()
print(scripter)
if option1000 == '7':
scripter = sha256(texter.encode()).hexdigest()
print(scripter)
elif option == '4':
print('Soon')
exit()
elif option == '5':
tarip = input(Fore.LIGHTGREEN_EX +"[$] Target IP : ")
IP = requests.get("https://get.geojs.io/v1/ip.json")
loc = requests.get("https://get.geojs.io/v1/ip/geo/" + tarip + ".json")
loc2 = loc.json()
print(Fore.LIGHTBLUE_EX +"[!] Target Country : " + loc2["country"])
print(Fore.LIGHTBLUE_EX +"[!] Target Time Zone : " + loc2["timezone"])
print(Fore.LIGHTBLUE_EX +"[!] Target Longitude Location : " + loc2["longitude"])
print(Fore.LIGHTBLUE_EX +"[!] Target Latitude Location : " + loc2["latitude"])
elif option == '6':
print(Fore.LIGHTBLUE_EX +'Soon')
exit()
elif option == '7':
print(Fore.CYAN +"[!] Get Instagram Info using ?")
print(Fore.LIGHTCYAN_EX +"[1] Username [2] sessionId")
maop = input(Fore.LIGHTYELLOW_EX +"[?] Select ? : ")
if maop == '1':
username = input(Fore.LIGHTGREEN_EX +"[!] Enter Username : ")
head = {
'HOST': "www.instagram.com",
'KeepAlive': 'True',
'user-agent': generate_user_agent(),
'Cookie': 'ig_did=B229D588-7641-44E0-8035-467C7BEC3282; ig_nrcb=1; mid=YGWX3wALAAFc7t0VgtJpoYSw1rEc; csrftoken=wn59xw8BMRIceNQOBzjqjhoTPhTXENBB; ds_user_id=11675767944; sessionid=11675767944%3Ao1hjlJiSGKw9vR%3A21; shbid=19303; shbts=1617412509.6840706; rur=FTW',
'Accept': "*/*",
'ContentType': "application/x-www-form-urlencoded",
"X-Requested-With": "XMLHttpRequest",
"X-IG-App-ID": "936619743392459",
"X-Instagram-AJAX": "missing",
"X-CSRFToken": "wn59xw8BMRIceNQOBzjqjhoTPhTXENBB",
"Accept-Language": "en-US,en;q=0.9"
}
cookie = secrets.token_hex(8) * 2
url_id = f'https://www.instagram.com/{username}/?__a=1'
req_id = r.get(url_id, headers=head).json()
bio = str(req_id['graphql']['user']['biography'])
url = str(req_id['graphql']['user']['external_url'])
nam = str(req_id['graphql']['user']['full_name'])
idd = str(req_id['graphql']['user']['id'])
isp = str(req_id['graphql']['user']['is_private'])
isv = str(req_id['graphql']['user']['is_verified'])
pro = str(req_id['graphql']['user']['profile_pic_url'])
print(Fore.LIGHTBLUE_EX +'''[$] Simple Info For {}
[$] Name / {}
[$] Url / {}
[$] Bio / {}
[$] ID / {}
[$] Private ? / {}
[$] Verified ? / {}
[$] Profile Picture / {}
'''.format(username,nam,url,bio,idd,isp,isv,pro))
elif maop == '2':
sessionid = input(Fore.LIGHTBLUE_EX +'[!] Enter Session ID : ')
hydrated = {'Host': 'www.instagram.com',
'Content-Type': 'application/json; charset=utf-8',
'Accept-Encoding': 'gzip, deflate',
'Accept': '*/*',
'Accept-Language': 'ar,en-US;q=0.7,en;q=0.3',
'Connection': 'close',
'X-IG-App-ID': '936619743392459',
'X-Requested-With': 'XMLHttpRequest',
'X-IG-WWW-Claim': 'hmac.AR0uQ3YRnOII5ROjBT7pKkMy1bjATWrSkfZCgwbaUBjNv-rw',
'User-Agent': generate_user_agent(),
'Referer': 'https://www.instagram.com/accounts/access_tool/former_phones',
'Cookie': 'ig_cb=1; ig_did=69205DC3-D787-47E5-B250-1C4A7ADC3A05; csrftoken=HKmRQ2ZwuqCZjpMycM3xOVIjDUBo5HWd; mid=XoSg-AAEAAHRshuq4BlxladvlcbE; datr=HmBKXyPTk86RJpmkaUQ7eM5w; urlgen="{\"51.36.8.205\": 43766}:1kLnkz:ZHs58RZnu4USDtTqolZcEXDJp7s"; rur=ATN; ds_user_id=37466401585',
'DNT': '1'
}
cookies = {'sessionid': sessionid}
data = ''
url = 'https://www.instagram.com/accounts/access_tool/former_usernames?__a=1'
response = requests.request("GET", url, data=data, headers=hydrated, cookies=cookies)
info = json.loads(response.text)
print(Fore.LIGHTBLUE_EX + "[-] Users Since Account Created : " + str(info["data"]))
hydrated2 = {'Host': 'www.instagram.com',
'Content-Type': 'application/json; charset=utf-8',
'Accept-Encoding': 'gzip, deflate',
'Accept': '*/*',
'Accept-Language': 'ar,en-US;q=0.7,en;q=0.3',
'Connection': 'close',
'X-IG-App-ID': '936619743392459',
'X-Requested-With': 'XMLHttpRequest',
'X-IG-WWW-Claim': 'hmac.AR0uQ3YRnOII5ROjBT7pKkMy1bjATWrSkfZCgwbaUBjNv-rw',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0',
'Referer': 'https://www.instagram.com/accounts/access_tool/former_phones',
'Cookie': 'ig_cb=1; ig_did=69205DC3-D787-47E5-B250-1C4A7ADC3A05; csrftoken=HKmRQ2ZwuqCZjpMycM3xOVIjDUBo5HWd; mid=XoSg-AAEAAHRshuq4BlxladvlcbE; datr=HmBKXyPTk86RJpmkaUQ7eM5w; urlgen="{\"51.36.8.205\": 43766}:1kLnkz:ZHs58RZnu4USDtTqolZcEXDJp7s"; rur=ATN; ds_user_id=37466401585',
'DNT': '1'
}
cookies = {'sessionid': sessionid}
data = ''
url = 'https://www.instagram.com/accounts/access_tool/former_phones?__a=1'
response = requests.request("GET", url, data=data, headers=hydrated2, cookies=cookies)
info = json.loads(response.text)
print(Fore.LIGHTBLUE_EX + "[-] Phone Numbers Since Account Created : " + str(info["data"]))
hydrated3 = {'Host': 'www.instagram.com',
'Content-Type': 'application/json; charset=utf-8',
'Accept-Encoding': 'gzip, deflate',
'Accept': '*/*',
'Accept-Language': 'ar,en-US;q=0.7,en;q=0.3',
'Connection': 'close',
'X-IG-App-ID': '936619743392459',
'X-Requested-With': 'XMLHttpRequest',
'X-IG-WWW-Claim': 'hmac.AR0uQ3YRnOII5ROjBT7pKkMy1bjATWrSkfZCgwbaUBjNv-rw',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0',
'Referer': 'https://www.instagram.com/accounts/access_tool/former_phones',
'Cookie': 'ig_cb=1; ig_did=69205DC3-D787-47E5-B250-1C4A7ADC3A05; csrftoken=HKmRQ2ZwuqCZjpMycM3xOVIjDUBo5HWd; mid=XoSg-AAEAAHRshuq4BlxladvlcbE; datr=HmBKXyPTk86RJpmkaUQ7eM5w; urlgen="{\"51.36.8.205\": 43766}:1kLnkz:ZHs58RZnu4USDtTqolZcEXDJp7s"; rur=ATN; ds_user_id=37466401585',
'DNT': '1'
}
cookies = {'sessionid': sessionid}
data = ''
url = 'https://www.instagram.com/session/login_activity/?__a=1'
response = requests.request("GET", url, data=data, headers=hydrated3, cookies=cookies)
info = json.loads(response.text)
print(Fore.LIGHTBLUE_EX + "[-] Login Activity Since Account Created : " + str(info["data"]))
hydrated4 = {'Host': 'www.instagram.com',
'Content-Type': 'application/json; charset=utf-8',
'Accept-Encoding': 'gzip, deflate',
'Accept': '*/*',
'Accept-Language': 'ar,en-US;q=0.7,en;q=0.3',
'Connection': 'close',
'X-IG-App-ID': '936619743392459',
'X-Requested-With': 'XMLHttpRequest',
'X-IG-WWW-Claim': 'hmac.AR0uQ3YRnOII5ROjBT7pKkMy1bjATWrSkfZCgwbaUBjNv-rw',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0',
'Referer': 'https://www.instagram.com/accounts/access_tool/former_phones',
'Cookie': 'ig_cb=1; ig_did=69205DC3-D787-47E5-B250-1C4A7ADC3A05; csrftoken=HKmRQ2ZwuqCZjpMycM3xOVIjDUBo5HWd; mid=XoSg-AAEAAHRshuq4BlxladvlcbE; datr=HmBKXyPTk86RJpmkaUQ7eM5w; urlgen="{\"51.36.8.205\": 43766}:1kLnkz:ZHs58RZnu4USDtTqolZcEXDJp7s"; rur=ATN; ds_user_id=37466401585',
'DNT': '1'
}
cookies = {'sessionid': sessionid}
data = ''
url = 'https://www.instagram.com/accounts/access_tool/former_emails?__a=1'
response = requests.request("GET", url, data=data, headers=hydrated4, cookies=cookies)
info = json.loads(response.text)
print(Fore.LIGHTBLUE_EX + "[-] Emails Activity Since Account Created : " + str(info["data"]))
hydrated5 = {'Host': 'www.instagram.com',
'Content-Type': 'application/json; charset=utf-8',
'Accept-Encoding': 'gzip, deflate',
'Accept': '*/*',
'Accept-Language': 'ar,en-US;q=0.7,en;q=0.3',
'Connection': 'close',
'X-IG-App-ID': '936619743392459',
'X-Requested-With': 'XMLHttpRequest',
'X-IG-WWW-Claim': 'hmac.AR0uQ3YRnOII5ROjBT7pKkMy1bjATWrSkfZCgwbaUBjNv-rw',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0',
'Referer': 'https://www.instagram.com/accounts/access_tool/former_phones',
'Cookie': 'ig_cb=1; ig_did=69205DC3-D787-47E5-B250-1C4A7ADC3A05; csrftoken=HKmRQ2ZwuqCZjpMycM3xOVIjDUBo5HWd; mid=XoSg-AAEAAHRshuq4BlxladvlcbE; datr=HmBKXyPTk86RJpmkaUQ7eM5w; urlgen="{\"51.36.8.205\": 43766}:1kLnkz:ZHs58RZnu4USDtTqolZcEXDJp7s"; rur=ATN; ds_user_id=37466401585',
'DNT': '1'
}
cookies = {'sessionid': sessionid}
data = ''
url = 'https://www.instagram.com/emails/emails_sent/?__a=1'
response = requests.request("GET", url, data=data, headers=hydrated5, cookies=cookies)
info = json.loads(response.text)
print(Fore.LIGHTBLUE_EX + "[-] Last Activity : " + str(info["data"]))
headers = {'Host': 'www.instagram.com',
'Content-Type': 'application/json; charset=utf-8',
'Accept-Encoding': 'gzip, deflate',
'Accept': '*/*',
'Accept-Language': 'ar,en-US;q=0.7,en;q=0.3',
'Connection': 'close',
'X-IG-App-ID': '936619743392459',
'X-Requested-With': 'XMLHttpRequest',
'X-IG-WWW-Claim': 'hmac.AR0uQ3YRnOII5ROjBT7pKkMy1bjATWrSkfZCgwbaUBjNv-rw',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0',
'Referer': 'https://www.instagram.com/accounts/access_tool/former_phones',
'Cookie': 'ig_cb=1; ig_did=69205DC3-D787-47E5-B250-1C4A7ADC3A05; csrftoken=HKmRQ2ZwuqCZjpMycM3xOVIjDUBo5HWd; mid=XoSg-AAEAAHRshuq4BlxladvlcbE; datr=HmBKXyPTk86RJpmkaUQ7eM5w; urlgen="{\"51.36.8.205\": 43766}:1kLnkz:ZHs58RZnu4USDtTqolZcEXDJp7s"; rur=ATN; ds_user_id=37466401585',
'DNT': '1'
}
cookies = {'sessionid': sessionid}
data = ''
url = 'https://www.instagram.com/accounts/access_tool/accounts_you_blocked?__a=1'
response = requests.request("GET", url, data=data, headers=headers, cookies=cookies)
info = json.loads(response.text)
print(Fore.LIGHTBLUE_EX + '[-] Blocked Accounts : ' + str(info["data"]))
headers = {'Host': 'www.instagram.com',
'Content-Type': 'application/json; charset=utf-8',
'Accept-Encoding': 'gzip, deflate',
'Accept': '*/*',
'Accept-Language': 'ar,en-US;q=0.7,en;q=0.3',
'Connection': 'close',
'X-IG-App-ID': '936619743392459',
'X-Requested-With': 'XMLHttpRequest',
'X-IG-WWW-Claim': 'hmac.AR0uQ3YRnOII5ROjBT7pKkMy1bjATWrSkfZCgwbaUBjNv-rw',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0',
'Referer': 'https://www.instagram.com/accounts/access_tool/former_phones',
'Cookie': 'ig_cb=1; ig_did=69205DC3-D787-47E5-B250-1C4A7ADC3A05; csrftoken=HKmRQ2ZwuqCZjpMycM3xOVIjDUBo5HWd; mid=XoSg-AAEAAHRshuq4BlxladvlcbE; datr=HmBKXyPTk86RJpmkaUQ7eM5w; urlgen="{\"51.36.8.205\": 43766}:1kLnkz:ZHs58RZnu4USDtTqolZcEXDJp7s"; rur=ATN; ds_user_id=37466401585',
'DNT': '1'
}
cookies = {'sessionid': sessionid}
data = ''
url = 'https://www.instagram.com/accounts/access_tool/former_bio_texts?__a=1'
response = requests.request("GET", url, data=data, headers=headers, cookies=cookies)
info = json.loads(response.text)
print(Fore.LIGHTBLUE_EX + "[-] Bio Since Account Created : " + str(info["data"]))
headers = {'Host': 'www.instagram.com',
'Content-Type': 'application/json; charset=utf-8',
'Accept-Encoding': 'gzip, deflate',
'Accept': '*/*',
'Accept-Language': 'ar,en-US;q=0.7,en;q=0.3',
'Connection': 'close',
'X-IG-App-ID': '936619743392459',
'X-Requested-With': 'XMLHttpRequest',
'X-IG-WWW-Claim': 'hmac.AR0uQ3YRnOII5ROjBT7pKkMy1bjATWrSkfZCgwbaUBjNv-rw',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0',
'Referer': 'https://www.instagram.com/accounts/access_tool/former_phones',
'Cookie': 'ig_cb=1; ig_did=69205DC3-D787-47E5-B250-1C4A7ADC3A05; csrftoken=HKmRQ2ZwuqCZjpMycM3xOVIjDUBo5HWd; mid=XoSg-AAEAAHRshuq4BlxladvlcbE; datr=HmBKXyPTk86RJpmkaUQ7eM5w; urlgen="{\"51.36.8.205\": 43766}:1kLnkz:ZHs58RZnu4USDtTqolZcEXDJp7s"; rur=ATN; ds_user_id=37466401585',
'DNT': '1'
}
cookies = {'sessionid': sessionid}
data = ''
url = 'https://www.instagram.com/accounts/access_tool/search_history?__a=1'
response = requests.request("GET", url, data=data, headers=headers, cookies=cookies)
info = json.loads(response.text)
print(Fore.LIGHTBLUE_EX + "[-] Search History : " + str(info["data"]))
headers = {'Host': 'www.instagram.com',
'Content-Type': 'application/json; charset=utf-8',
'Accept-Encoding': 'gzip, deflate',
'Accept': '*/*',
'Accept-Language': 'ar,en-US;q=0.7,en;q=0.3',
'Connection': 'close',
'X-IG-App-ID': '936619743392459',
'X-Requested-With': 'XMLHttpRequest',
'X-IG-WWW-Claim': 'hmac.AR0uQ3YRnOII5ROjBT7pKkMy1bjATWrSkfZCgwbaUBjNv-rw',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0',
'Referer': 'https://www.instagram.com/accounts/access_tool/former_phones',
'Cookie': 'ig_cb=1; ig_did=69205DC3-D787-47E5-B250-1C4A7ADC3A05; csrftoken=HKmRQ2ZwuqCZjpMycM3xOVIjDUBo5HWd; mid=XoSg-AAEAAHRshuq4BlxladvlcbE; datr=HmBKXyPTk86RJpmkaUQ7eM5w; urlgen="{\"51.36.8.205\": 43766}:1kLnkz:ZHs58RZnu4USDtTqolZcEXDJp7s"; rur=ATN; ds_user_id=37466401585',
'DNT': '1'
}
cookies = {'sessionid': sessionid}
data = ''
url = 'https://www.instagram.com/download/request/?__a=1'
response = requests.request("GET", url, data=data, headers=headers, cookies=cookies)
info = json.loads(response.text)
print(Fore.LIGHTBLUE_EX + "[-] Verified Email : " + str(info["email_hint"]))
elif option == '8':
def read_all_file():
file = open(sys.argv[1], 'r', encoding="utf8")
file = file.read()
return file
# information about php code
CA.info.GET_parameters(read_all_file())
CA.info.POST_parameters(read_all_file())
# scan SQL injection
file = open(sys.argv[1], 'r', encoding="utf8")
line_number = 0
print(Fore.RED +'vulnerability found:\n')
CA.search.SQLi(read_all_file())
for line in file:
CA.search.command_injection(line.strip('\n'), line_number)
CA.search.LFI(line.strip('\n'), line_number)
CA.search.XSS(line.strip('\n'), line_number)
CA.search.SSRF(line.strip('\n'), line_number)
CA.search.open_redirect(line.strip('\n'), line_number)
CA.search.ID(line.strip('\n'), line_number)
line_number = line_number + 1
##### Port Scanner For osama.a.m.y
elif option == '9':
tarurl = input(Fore.LIGHTBLUE_EX +'[%] Enter Url (Ex : website.com): ')
if tarurl == "":
print(Fore.RED +'[!] No Url')
time.sleep(3)
exit()
else:
pass
nport = input(Fore.LIGHTMAGENTA_EX +'[?] Range Of Ports : '+ Style.RESET_ALL)
if nport == "":
nport = 1024
else:
nport = nport
def getIP():
global tarurl
global hostIP
try:
hostIP = socket.gethostbyname(tarurl)
except socket.gaierror:
print('[!]')
what = input(
Fore.CYAN + '[#] [t] Try Again : ' + Style.RESET_ALL)
if what == 't':
tarurl = input(
Fore.CYAN + '[!] Enter Url (Ex : example.com): ' + Style.RESET_ALL)
if tarurl == "":
print(Fore.RED +'[!] No Url')
time.sleep(3)
exit()
else:
tarurl = tarurl
hostIP = socket.gethostbyname(tarurl)
elif what == "":
time.sleep(3)
exit()
else:
time.sleep(3)
exit()
return (hostIP)
getIP()
def portscan(port):
global hostIP
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((hostIP, port))
return True
except:
#print(Fore.LIGHTRED_EX + '[!] Error Port')
return False
def fill(port_list):
for port in port_list:
queue.put(port)
def scan():
while not queue.empty():
port = queue.get()
if portscan(port):
print(Fore.GREEN + f'[-] Port {port} is Opened' + Style.RESET_ALL)
open_ports.append(port)
port_list = range(1, int(nport))
fill(port_list)
thread_list = []
for t in range(int(100)):
thread = threading.Thread(target=scan)
thread_list.append(thread)
for thread in thread_list:
thread.start()
for thread in thread_list:
thread.join()
elif option == '10':
print(Fore.LIGHTRED_EX +'Goodbye Baby ')
exit()
| import re,colors
class search():
def command_injection(line,line_number):
regex = "system\(.*\)|passthru\(.*\)|escapeshellcmd\(.*\)|pcntl_exec\(.*\)|exec\(.*\)|eval\(.*\)|assert\(.*\)"
black_list = re.findall(regex, line)
if(black_list):
print(colors.color.red(f'found execute functions on line: {line_number+1}'))
colors.color.reset()
def LFI(line,line_number):
regex = "include \$_.*|include_once \$_.*|require \$_.*|require_once \$_.*|readfile \$_.*"
black_list = re.findall(regex, line)
if(black_list):
print(colors.color.red(f'found LFI on line {line_number+1}'))
colors.color.reset()
def XSS(line,line_number):
regex = "echo \$.*|echo \$_[A-Z]{2,6}\[.*\]|echo \$\_SERVER\[\'PHP_SELF\'\]"
black_list = re.findall(regex, line)
if(black_list):
print(colors.color.red(f'found XSS on line {line_number+1}'))
colors.color.reset()
def SSRF(line,line_number):
regex = "file_get_contents\(\$.*\)|fopen\(\$.*\)|fread\(\$.*\)|fsockopen\(\$.*\)|curl_exec\(\$.*\)"
black_list = re.findall(regex, line)
if(black_list):
print(colors.color.red(f'found SSRF on line {line_number+1}'))
colors.color.reset()
def open_redirect(line,line_number):
regex = "header\(\$.*\)"
black_list = re.findall(regex, line)
if(black_list):
print(colors.color.red(f'found open redirect on line {line_number+1}'))
colors.color.reset()
def ID(line,line_number):
regex = "unserialize\(\.*\) |__wakeup\(\.*\)|__destruct\(\.*\)|__toString\(\.*\)"
black_list = re.findall(regex, line)
if(black_list):
print(colors.color.red(f'found Insecure Deserialization on line {line_number+1}'))
colors.color.reset()
def SQLi(file):
regex = "query\(.*\)|mysql_query\(.*\)|get_results\(.*\)|get_var\(.*\)"
black_list = re.findall(regex, file)
if(black_list):
for vuln in black_list:
print(colors.color.red(f'found SQLi :{vuln}'))
colors.color.reset()
class info():
def GET_parameters(file):
regex = "\$_GET\[.*\]"
black_list = re.findall(regex, file)
if(black_list):
for vuln in black_list:
print(colors.color.green(f'GET parameter :{vuln[7:-2]}'))
colors.color.reset()
def POST_parameters(file):
regex = "\$_POST\[.*\]"
black_list = re.findall(regex, file)
if(black_list):
for vuln in black_list:
print(colors.color.green(f'POST parameter :{vuln[8:-2]}'))
colors.color.reset()
| {
"imports": [
"/code_analysis.py"
]
} |
05andres/laboratorio-de-software | refs/heads/master | /BussinessGamess/catalogo_general/urls.py | from django.urls import path
from .views import catalogo_General ,AuthorDetail
from . import views
from django.contrib.auth.decorators import login_required, permission_required
from django.views.decorators.http import require_POST
urlpatterns = [
path('general/',catalogo_General.as_view(),name="general"),
path('<pk>/detalles/',AuthorDetail.as_view(), name='detalles')
] | from django.shortcuts import render
from django.views.generic import View
from django.db import transaction
from django.urls import reverse_lazy
from django.shortcuts import redirect
from django.contrib.auth.models import User
from catalogo_personal.models import Videojuegos
from django.shortcuts import get_list_or_404, get_object_or_404
from django.views.generic import CreateView, DetailView, ListView
from django.views.generic.edit import FormMixin
from .forms import ComentariosForm
from django.contrib import messages
# Create your views here.
'''def catalogo_General(request):
catalogo_General = Videojuegos.objects.all()
return render(request,"catalogo_general/general.html",{'catalogo_General':catalogo_General})'''
class catalogo_General(ListView):
template_name = 'catalogo_general/general.html'
model = Videojuegos
context_object_name = 'catalogo_General'
class detalles_videojuegos(DetailView):
template_name = 'catalogo_general/detalles.html'
model = Videojuegos
context_object_name = 'videojuego'
class AuthorDetail(FormMixin, DetailView):
template_name = 'catalogo_general/detalles.html'
model = Videojuegos
form_class = ComentariosForm
context_object_name = 'videojuego'
def get_success_url(self):
from django.urls import reverse
return reverse('detalles', kwargs={'pk': Videojuegos.id})
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form'] = self.get_form()
return context
def post(self, request,pk, *args, **kwargs):
form = self.get_form()
id_videojuego = get_object_or_404(Videojuegos, pk=pk)
if request.method == "POST":
if form.is_valid():
if request.user.is_authenticated:
print (id_videojuego)
comment = form.save(commit=False)
comment.owner = request.user
comment.videojuego =id_videojuego
comment.save()
return redirect(reverse_lazy('catalogo'))
else:
return redirect(reverse_lazy('catalogo'))
else:
return self.form_invalid(form)
def form_valid(self, form):
# Here, we would record the user's interest using the message
# passed in form.cleaned_data['message']
return super().form_valid(form)
'''
class ComentariosViews(View):
def get(self, request):
comentarios_form = ComentariosForm()
return render(request, 'catalogo_personal/videojuego.html', {
'comentarios_form':comentarios_form,
})
@transaction.atomic
def post(self,request,pk):
id_videojuego = get_object_or_404(Videojuegos, pk=pk)
if request.method == 'POST':
comenarios_form= ComentariosForm(request.POST,request.FILES)
if comentarios_form.is_valid():
new_comentario = comenarios_form.save(commit=False)
new_comentario.owner = request.user
new_comentario.videojuego = id_videojuego
new_videojuego.save()
return redirect('detalles',id_videojuego.pk)
return render(request, 'catalogo_general/detalles.html', {
'comentarios_form':comentarios_form,
})
'''
def ComentariosViews(request, pk):
post = get_object_or_404(Videojuegos, pk=pk)
comentario_form = ComentariosForm(request.POST)
if request.method == "POST":
if comentario_form.is_valid():
comment = comentario_form.save(commit=False)
comment.owner = request.user
comment.videojuego = post
comment.save()
return redirect('detalles', pk=post.pk)
else:
comentario_form = ComentariosForm()
return render(request, 'catalogo_general/detalles.html',{'form': comentario_form})
| {
"imports": [
"/BussinessGamess/catalogo_general/views.py"
]
} |
05andres/laboratorio-de-software | refs/heads/master | /BussinessGamess/catalogo_personal/views.py | from django.shortcuts import render
from django.views.generic import View
from .forms import VideojuegosForm
from django.db import transaction
from django.urls import reverse_lazy
from django.shortcuts import redirect
from django.contrib.auth.models import User
from .models import Videojuegos
class VideojuegosViews(View):
def get(self, request):
videojuegos_form = VideojuegosForm()
return render(request, 'catalogo_personal/videojuego.html', {
'videojuegos_form':videojuegos_form,
})
@transaction.atomic
def post(self,request):
if request.method == 'POST':
videojuegos_form= VideojuegosForm(request.POST,request.FILES)
if videojuegos_form.is_valid():
new_videojuego =videojuegos_form.save(commit=False)
new_videojuego.owner = request.user
new_videojuego.save()
return redirect(reverse_lazy('catalogo') +'?register')
return render(request, 'catalogo_personal/videojuego.html', {
'videojuegos_form':videojuegos_form,
})
def catalogo(request):
catalogo = Videojuegos.objects.filter(owner_id=request.user.id)
print (request.user)
return render(request,"catalogo_personal/catalogo.html",{'catalogo':catalogo,'title': "Catalogo personal"})
| from django import forms
from .models import Videojuegos
class VideojuegosForm(forms.ModelForm):
class Meta:
model = Videojuegos
fields = ('title','FechaLanzamiento','categoria','duracionPromedio','sinopsis','image')
--- FILE SEPARATOR ---
from django.db import models
from django.contrib.auth.models import User
from django.db import models
from django.forms import ModelForm
# Create your models here.
class Videojuegos(models.Model):
owner = models.ForeignKey(User,on_delete=models.CASCADE)
title = models.CharField(max_length = 200,verbose_name="Titulo")
sinopsis = models.TextField(default='',verbose_name="Ingrese una breve sinopsis del Videojuego")
categoria = models.CharField(max_length = 200,verbose_name="Categoria")
FechaLanzamiento= models.DateField( verbose_name="Año de lanzamiento")
image = models.ImageField(verbose_name="Imagen", upload_to ="image_juegos", height_field=None, width_field=None)
duracionPromedio = models.CharField(max_length = 200,verbose_name="Duracion Promedio del videojuego")
created = models.DateTimeField(auto_now_add=True,verbose_name="Fecha de creación",blank=True,null=True)
class Meta:
verbose_name = "videojuego"
verbose_name_plural = "catalogopersonal"
ordering = ["created"]
def get_absolute_url(self):
from django.urls import reverse
return reverse('detalles', kwargs={'pk': self.id})
def __str__(self):
return self.title
| {
"imports": [
"/BussinessGamess/catalogo_personal/forms.py",
"/BussinessGamess/catalogo_personal/models.py"
]
} |
05andres/laboratorio-de-software | refs/heads/master | /BussinessGamess/catalogo_general/forms.py | from django import forms
from .models import Comentarios
class ComentariosForm(forms.ModelForm):
class Meta:
model = Comentarios
fields = ('text',) | from django.db import models
from django.contrib.auth.models import User
from django.db import models
from django.forms import ModelForm
from catalogo_personal.models import Videojuegos
# Create your models here.
class Comentarios (models.Model):
owner = models.ForeignKey(User,on_delete=models.CASCADE)
videojuego = models.ForeignKey('catalogo_personal.Videojuegos',on_delete=models.CASCADE)
text = models.TextField()
created = models.DateTimeField(auto_now_add=True,verbose_name="Fecha de creación",blank=True,null=True)
class Meta:
verbose_name = "comentario"
verbose_name_plural = "comentarios"
ordering = ["created"]
def __str__(self):
return self.text
def get_absolute_url(self):
return reverse('Videojuegos:comentarios', kwargs={'pk': self.pk})
| {
"imports": [
"/BussinessGamess/catalogo_general/models.py"
]
} |
05andres/laboratorio-de-software | refs/heads/master | /BussinessGamess/core/urls.py | from django.urls import path
from .views import HomePageView ,BasePageView,BienvenidaPageView
from django.contrib.auth.decorators import login_required, permission_required
#manejo urls de cada vista
urlpatterns = [
path('', HomePageView.as_view(), name="home"),
path('', BasePageView.as_view(), name="base"),
path('bienvenida/', login_required(BienvenidaPageView.as_view()),name="bienvenida"),
] | from django.shortcuts import render
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
# Create your views here.
#vista del home
class HomePageView(TemplateView):
template_name = "core/home.html"
def get(self,request,*args,**kwaegs):
return render(request,self.template_name,{'title': "BusinessGames"})
#vista base del proyecto se vera en cada cambio de template
class BasePageView(TemplateView):
template_name = "core/base.html"
def get(self,request,*args,**kwaegs):
return render(request,self.template_name,{'title': "BusinessGames"})
#vista de bienvenida cuando el usuario se registea con exito
class BienvenidaPageView(TemplateView):
template_name = "core/bienvenida.html"
@method_decorator(login_required)
def get(self,request,*args,**kwaegs):
return render(request,self.template_name,{'title': "Bienvenida"}) | {
"imports": [
"/BussinessGamess/core/views.py"
]
} |
05andres/laboratorio-de-software | refs/heads/master | /BussinessGamess/registration/urls.py | from django.urls import path
from .views import SignUpView
from . import views
urlpatterns = [
path('signup/',SignUpView.as_view(), name="registro")
] |
from django.shortcuts import render
from django import forms
from django.contrib.auth.models import User
from .forms import ProfileForm,UserForm
from django.db import transaction
from django.urls import reverse_lazy
from django.views.generic import View
from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from django.contrib.auth import authenticate, login
class SignUpView(View):
def get(self, request):
user_form = UserForm()
profile_form = ProfileForm()
return render(request, 'registration/signup.html', {
'user_form': user_form,
'profile_form': profile_form
})
@transaction.atomic
def post(self,request):
if request.method == 'POST':
user_form = UserForm(request.POST)
profile_form = ProfileForm(request.POST)
if user_form.is_valid() and profile_form.is_valid():
new_user = user_form.save()
profile = profile_form.save(commit=False)
if profile.user_id is None:
profile.user_id = new_user.id
user_form.save()
profile_form.save()
username = request.POST['username']
password = request.POST['password1']
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
return redirect(reverse_lazy('core:bienvenida') +'?register')
return render(request, 'registration/signup.html', {
'user_form': user_form,
'profile_form': profile_form
})
| {
"imports": [
"/BussinessGamess/registration/views.py"
]
} |
05andres/laboratorio-de-software | refs/heads/master | /BussinessGamess/catalogo_personal/urls.py | from django.urls import path
from .views import VideojuegosViews, catalogo
from . import views
from django.contrib.auth.decorators import login_required, permission_required
urlpatterns = [
path('videojuegos/', login_required(VideojuegosViews.as_view()),name="videojuego"),
path('catalogo/',login_required(views.catalogo),name="catalogo")
] | from django.shortcuts import render
from django.views.generic import View
from .forms import VideojuegosForm
from django.db import transaction
from django.urls import reverse_lazy
from django.shortcuts import redirect
from django.contrib.auth.models import User
from .models import Videojuegos
class VideojuegosViews(View):
def get(self, request):
videojuegos_form = VideojuegosForm()
return render(request, 'catalogo_personal/videojuego.html', {
'videojuegos_form':videojuegos_form,
})
@transaction.atomic
def post(self,request):
if request.method == 'POST':
videojuegos_form= VideojuegosForm(request.POST,request.FILES)
if videojuegos_form.is_valid():
new_videojuego =videojuegos_form.save(commit=False)
new_videojuego.owner = request.user
new_videojuego.save()
return redirect(reverse_lazy('catalogo') +'?register')
return render(request, 'catalogo_personal/videojuego.html', {
'videojuegos_form':videojuegos_form,
})
def catalogo(request):
catalogo = Videojuegos.objects.filter(owner_id=request.user.id)
print (request.user)
return render(request,"catalogo_personal/catalogo.html",{'catalogo':catalogo,'title': "Catalogo personal"})
| {
"imports": [
"/BussinessGamess/catalogo_personal/views.py"
]
} |
05andres/laboratorio-de-software | refs/heads/master | /BussinessGamess/catalogo_general/views.py | from django.shortcuts import render
from django.views.generic import View
from django.db import transaction
from django.urls import reverse_lazy
from django.shortcuts import redirect
from django.contrib.auth.models import User
from catalogo_personal.models import Videojuegos
from django.shortcuts import get_list_or_404, get_object_or_404
from django.views.generic import CreateView, DetailView, ListView
from django.views.generic.edit import FormMixin
from .forms import ComentariosForm
from django.contrib import messages
# Create your views here.
'''def catalogo_General(request):
catalogo_General = Videojuegos.objects.all()
return render(request,"catalogo_general/general.html",{'catalogo_General':catalogo_General})'''
class catalogo_General(ListView):
template_name = 'catalogo_general/general.html'
model = Videojuegos
context_object_name = 'catalogo_General'
class detalles_videojuegos(DetailView):
template_name = 'catalogo_general/detalles.html'
model = Videojuegos
context_object_name = 'videojuego'
class AuthorDetail(FormMixin, DetailView):
template_name = 'catalogo_general/detalles.html'
model = Videojuegos
form_class = ComentariosForm
context_object_name = 'videojuego'
def get_success_url(self):
from django.urls import reverse
return reverse('detalles', kwargs={'pk': Videojuegos.id})
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form'] = self.get_form()
return context
def post(self, request,pk, *args, **kwargs):
form = self.get_form()
id_videojuego = get_object_or_404(Videojuegos, pk=pk)
if request.method == "POST":
if form.is_valid():
if request.user.is_authenticated:
print (id_videojuego)
comment = form.save(commit=False)
comment.owner = request.user
comment.videojuego =id_videojuego
comment.save()
return redirect(reverse_lazy('catalogo'))
else:
return redirect(reverse_lazy('catalogo'))
else:
return self.form_invalid(form)
def form_valid(self, form):
# Here, we would record the user's interest using the message
# passed in form.cleaned_data['message']
return super().form_valid(form)
'''
class ComentariosViews(View):
def get(self, request):
comentarios_form = ComentariosForm()
return render(request, 'catalogo_personal/videojuego.html', {
'comentarios_form':comentarios_form,
})
@transaction.atomic
def post(self,request,pk):
id_videojuego = get_object_or_404(Videojuegos, pk=pk)
if request.method == 'POST':
comenarios_form= ComentariosForm(request.POST,request.FILES)
if comentarios_form.is_valid():
new_comentario = comenarios_form.save(commit=False)
new_comentario.owner = request.user
new_comentario.videojuego = id_videojuego
new_videojuego.save()
return redirect('detalles',id_videojuego.pk)
return render(request, 'catalogo_general/detalles.html', {
'comentarios_form':comentarios_form,
})
'''
def ComentariosViews(request, pk):
post = get_object_or_404(Videojuegos, pk=pk)
comentario_form = ComentariosForm(request.POST)
if request.method == "POST":
if comentario_form.is_valid():
comment = comentario_form.save(commit=False)
comment.owner = request.user
comment.videojuego = post
comment.save()
return redirect('detalles', pk=post.pk)
else:
comentario_form = ComentariosForm()
return render(request, 'catalogo_general/detalles.html',{'form': comentario_form})
| from django import forms
from .models import Comentarios
class ComentariosForm(forms.ModelForm):
class Meta:
model = Comentarios
fields = ('text',) | {
"imports": [
"/BussinessGamess/catalogo_general/forms.py"
]
} |
05andres/laboratorio-de-software | refs/heads/master | /BussinessGamess/catalogo_personal/forms.py | from django import forms
from .models import Videojuegos
class VideojuegosForm(forms.ModelForm):
class Meta:
model = Videojuegos
fields = ('title','FechaLanzamiento','categoria','duracionPromedio','sinopsis','image')
| from django.db import models
from django.contrib.auth.models import User
from django.db import models
from django.forms import ModelForm
# Create your models here.
class Videojuegos(models.Model):
owner = models.ForeignKey(User,on_delete=models.CASCADE)
title = models.CharField(max_length = 200,verbose_name="Titulo")
sinopsis = models.TextField(default='',verbose_name="Ingrese una breve sinopsis del Videojuego")
categoria = models.CharField(max_length = 200,verbose_name="Categoria")
FechaLanzamiento= models.DateField( verbose_name="Año de lanzamiento")
image = models.ImageField(verbose_name="Imagen", upload_to ="image_juegos", height_field=None, width_field=None)
duracionPromedio = models.CharField(max_length = 200,verbose_name="Duracion Promedio del videojuego")
created = models.DateTimeField(auto_now_add=True,verbose_name="Fecha de creación",blank=True,null=True)
class Meta:
verbose_name = "videojuego"
verbose_name_plural = "catalogopersonal"
ordering = ["created"]
def get_absolute_url(self):
from django.urls import reverse
return reverse('detalles', kwargs={'pk': self.id})
def __str__(self):
return self.title
| {
"imports": [
"/BussinessGamess/catalogo_personal/models.py"
]
} |
068089dy/myblog | refs/heads/master | /app/myblog/app_blog/CommonViews.py | from django.http import HttpResponse
from django.shortcuts import render
from django.views import View
from django.utils.safestring import mark_safe
from .util import visit, paging, PAGE_SIZE
class PageListView(View):
def __init__(self):
self.classification = "" #当前分类
self.root_router = "" #根路由
self.page_size = PAGE_SIZE #分页大小
self.articles = None #文章querysets
self.cur_page = 1 #当前页
self.data = None #请求数据
self.result = {
# "articles": self.articles,
# "page_count": list(range(self.page_count)),
# "cur_page": self.cur_page,
# "root_router": self.root_router,
# "class": self.classification,
}
def victim(self):
pass
'''
Eg:
'''
# self.data.get['pk']
# self.root_router = ""
# self.classification = ""
# self.articles = None
# if self.data.GET.get('page_num', -1) == -1:
# return None
# self.cur_page = self.data.GET.get('pk', -1)
# if self.data.GET.get('tag', -1) == -1:
# return None
# tag = self.self.data.GET.get('tag', -1)
# self.articles = Article.objects.filter(tags__name=tag).order_by("-date")
def get(self, request):
self.data = request
if self.victim() == -1:
return HttpResponse("搞不懂你的请求!")
# 如果只有一篇文章,则显示文章
# if len(self.articles) == 1:
# article = self.articles[0]
# article.volume += 1
# article.save()
# article.html_content = mark_safe(article.html_content)
# visit(self.classification, request, "访问了" + article.title)
# return render(request, "show.html", {"article": article})
visit(self.classification, request, "访问了" + self.classification + "文章列表")
# 分页
page_count = round(self.articles.count() / self.page_size)
page_num = request.GET.get('page_num', 1)
articles = paging(self.articles, page_num)
self.result = {
"articles": articles,
"page_count": list(range(page_count)),
"cur_page": self.cur_page,
"root_router": self.root_router,
"class": self.classification,
}
return render(request, "showlist.html", self.result)
| from .models import Visitor, Article
PAGE_SIZE = 10
def visit(v_type, request, remark):
Visitor.objects.create(
ip_address=request.META['REMOTE_ADDR'],
type=v_type,
remark=remark,
)
def str2int(data, default=0):
"""
将字符串转为数字,
将None或空字符串转为默认值(默认为0)
"""
return default if data is None or len(data) == 0 else int(data)
def paging(data_list, page_number):
"""
分页
:param page_number:
:param page_size:
:return:
"""
if page_number == 'all':
data_list = data_list.all()
elif page_number == 'first':
data_list = data_list.first()
else:
if type(page_number) is int:
page_index = page_number
else:
page_index = str2int(page_number, 1)
page_num = PAGE_SIZE
begin_index = (page_index - 1) * page_num
end_index = begin_index + page_num
data_list = data_list[begin_index: end_index]
return data_list
| {
"imports": [
"/app/myblog/app_blog/util.py"
]
} |
068089dy/myblog | refs/heads/master | /app/myblog/app_blog/admin.py | from django.contrib import admin
from .models import Article, Comment, Tag, Classification, Visitor
import requests
import json
from .forms import ArticleForm
# Register your models here.
class VisitorAdmin(admin.ModelAdmin):
list_display = ('ip_address', 'date', 'remark')
@admin.register(Classification)
class ClassificationAdmin(admin.ModelAdmin):
list_display = ("name", "date", "description")
@admin.register(Tag)
class TagAdmin(admin.ModelAdmin):
list_display = ("name", "date", "description")
admin.site.register(Visitor, VisitorAdmin)
# 注册数据表Articles
@admin.register(Article)
class ArticleAdmin(admin.ModelAdmin):
list_display = ('date', 'title', 'volume')
# fields = ['title', 'description', 'tags', 'content']
form = ArticleForm
fieldsets = (
(u'基本信息', {
'fields': (
('title', 'description', 'classification', ), # 一行
('tags', 'date'), # 另一行
# 新一行
),
'classes': ('person',), # html 标签的 class 属性
}),
(u'内容', { # 另一个 fieldset
'fields': (
('content',),
),
}),
)
'''
重写model保存方法
@param
@request:请求实例
@obj:Articles model实例
@form:提交表单
@change:改变
'''
def save_model(self, request, obj, form, change):
# 获取Article的html_content字段
obj.html_content = self.md2html(obj.content)
super().save_model(request, obj, form, change)
'''
通过github api将markdown转化为html
'''
def md2html(self, md: str):
return requests.post("https://api.github.com/markdown",
data=json.dumps({"text": md, "mode": "markdown"}),
headers={'Content-Type': 'application/json'}).text
| from django.db import models
import django.utils.timezone as timezone
# Create your models here.
from mdeditor.fields import MDTextField
from .widget import MyTextInput
class Article(models.Model):
# 是否显示
display = models.BooleanField(default=True)
# 标题
title = models.CharField(max_length=100)
# 描述
description = models.CharField(max_length=200)
# 分类
classification = models.ForeignKey("Classification", null=True, on_delete=models.SET_NULL, related_name="classification")
# 标签
tags = models.ManyToManyField("Tag", related_name='tags')
# 日期
date = models.DateTimeField(default=timezone.now)
# 内容
content = MDTextField()
# html内容
html_content = models.TextField(blank=True, null=True)
# 阅读量
volume = models.IntegerField(default=0)
# def save(self, *args, **kwargs):
# self.html_content = self.content
# super(Articles, self).save(*args, **kwargs)
class Meta:
db_table = 'blog_article'
def __str__(self):
return self.title
class Comment(models.Model):
# models.CASCADE是级联删除
article = models.ForeignKey("Article", on_delete=models.CASCADE)
# 如果null为True,django会用null来存储空值
# blank和null一样,只不过blank只与验证相关,而null是与数据库相关的,如果blank=False,则后台表单必填
father = models.ForeignKey('self', blank=True, null=True, on_delete=models.SET_NULL)
date = models.DateTimeField(auto_now=True)
username = models.CharField(max_length=50)
email = models.EmailField(blank=False)
content = models.TextField(blank=False)
device = models.TextField(blank=False)
class Meta:
db_table = 'blog_comment'
def __str__(self):
return self.content
class Visitor(models.Model):
# class Meta:
# abstract = True
#
# @classmethod
# def setDb_table(cls, tableName):
# class Meta:
# # db_table指定在数据库中,当前模型生成的数据表的表名。
# db_table = tableName
#
# attrs = {
# '__module__': cls.__module__,
# 'Meta': Meta
# }
# return type(tableName, (cls,), attrs)
# 访问者ip
ip_address = models.GenericIPAddressField()
# 访问日期
date = models.DateTimeField(default=timezone.now)
# 访问类别,文章或评论
type = models.CharField(max_length=50)
# 备注
remark = models.CharField(max_length=100)
class Meta:
db_table = 'blog_visitor'
def __str__(self):
return self.ip_address
class Classification(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100, blank=True, null=True)
# 日期
date = models.DateTimeField(default=timezone.now)
class Meta:
db_table = "blog_classification"
def __str__(self):
return self.name
class Tag(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100, blank=True, null=True)
# 日期
date = models.DateTimeField(default=timezone.now)
class Meta:
db_table = "blog_tag"
def __str__(self):
return self.name
--- FILE SEPARATOR ---
from django import forms
from .models import Article
from .widget import MyTextInput
class ArticleForm(forms.ModelForm):
# 为了防止admin表单未保存就干部页面,所以使用MyTextInput
title = forms.CharField(initial=0, widget=MyTextInput())
class Meta:
forms.model = Article
| {
"imports": [
"/app/myblog/app_blog/models.py",
"/app/myblog/app_blog/forms.py"
]
} |
068089dy/myblog | refs/heads/master | /app/myblog/app_blog/urls.py | from django.conf.urls import url
from django.contrib import admin
from django.urls import path, include
from .views import home, show, Note, Blog, BlogFeed, Link, About, Tag, Search
urlpatterns = [
path('', home),
url(r'^show/', show),
url(r'^search/', Search.as_view()),
url(r'^note/', Note.as_view()),
url(r'^blog/', Blog.as_view()),
url(r'^link/', Link.as_view()),
url(r'^tag/', Tag.as_view()),
url(r'^about/', About.as_view()),
url(r'^rss/', BlogFeed()),
# url(r'^show/([0-9]*)/$', show),
# path('list/', showlist),
]
| from django.db.models import Q
from django.shortcuts import render
from django.http import HttpResponseNotFound, HttpResponse
from django.utils.safestring import mark_safe
from django.views.decorators.cache import cache_page
from .CommonViews import PageListView
# Create your views here.
# @cache_page(60 * 15)
def home(request):
articles = Article.objects.filter(classification__name="blog").order_by("-date")
return render(request, "home.html", {"articles": articles})
@cache_page(60)
def show(request):
# 按id获取文章
if request.GET.get('pk', -1) != -1:
pk = request.GET.get('pk')
art = Article.objects.get(id=pk)
art.volume += 1
art.save()
art.html_content = mark_safe(art.html_content)
visit(visit_type["ARTICLE"], request, "访问了id为"+pk+"的文章")
return render(request, "show.html", {"article": art})
return HttpResponse("搞不懂你的请求")
class Search(PageListView):
def victim(self):
self.root_router = "search"
self.classification = "search"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
if self.data.GET.get('q', -1) == -1:
return -1
q = self.data.GET.get('q', -1)
# self.articles = Article.objects.filter(tags__name=tag).order_by("-date")
self.articles = Article.objects.filter(Q(content__icontains=q) |
Q(title__icontains=q) |
Q(description__icontains=q)).order_by("-date")
class Note(PageListView):
def victim(self):
self.root_router = "note"
self.classification = "note"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
self.articles = Article.objects.filter(classification__name=self.classification).order_by("-date")
class Blog(PageListView):
def victim(self):
self.root_router = "blog"
self.classification = "blog"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
self.articles = Article.objects.filter(classification__name=self.classification).order_by("-date")
class Link(PageListView):
def victim(self):
self.root_router = "link"
self.classification = "link"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
self.articles = Article.objects.filter(classification__name=self.classification).order_by("-date")
class Tag(PageListView):
def victim(self):
self.root_router = "tag"
self.classification = "tag"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
if self.data.GET.get('tag', -1) == -1:
return None
tag = self.data.GET.get('tag', -1)
self.articles = Article.objects.filter(tags__name=tag).order_by("-date")
class About(PageListView):
def victim(self):
self.root_router = "about"
self.classification = "about"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
self.articles = Article.objects.filter(classification__name=self.classification).order_by("-date")
from django.contrib.syndication.views import Feed
from .models import Article
class BlogFeed(Feed):
title = "丁丁哈哈的博客"
link = "/show/"
description = "一只垃圾的網站."
def description(self, obj):
return self.description
def items(self):
return Article.objects.filter(classification__name="blog").order_by("-date")
def item_title(self, item):
return item.title
def item_description(self, item):
return item.description
def item_link(self, item):
return self.link + "?pk=" + str(item.id)
def item_updateddate(self, item):
import datetime
date = [int(i) for i in str(item.date).split(" ")[0].split("-")]
return datetime.datetime(date[0], date[1], date[2])
| {
"imports": [
"/app/myblog/app_blog/views.py"
]
} |
068089dy/myblog | refs/heads/master | /app/myblog/app_api/urls.py | from django.conf.urls import url
from django.contrib import admin
from django.urls import path, include
from app_api import urls
from .views import ArticleListView, ArticleViewDetail, TagListView, ClassificationListView, get_token
from rest_framework import routers
# router = routers.DefaultRouter()
# router.register(r'article', ArticleViewSet) #连接uri /users/和view userviewset
'''
api接口
article/?page= # 获取文章列表
article/id # 获取指定文章
comment/?article_id= # 获取文章评论
comment/id # 获取指定评论
visitor/?page=&type=&target=&date= # 获取访客列表
link/?type= # 获取链接
'''
urlpatterns = [
# path('', include(router.urls)),
# path('blog/', include(urls))
path('article/', ArticleListView.as_view()),
url(r"article/(?P<pk>\d+)/$", ArticleViewDetail.as_view()),
path('tag/', TagListView.as_view()),
path('class/', ClassificationListView.as_view()),
# path('visitor/', VisitorListView.as_view()),
path('gettoken/', get_token),
]
| import django
from django.http import JsonResponse
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from rest_framework.pagination import PageNumberPagination
from rest_framework.views import APIView
from app_blog.models import Article, Visitor, Tag, Classification
from rest_framework import mixins, generics
from rest_framework import filters
from rest_framework.response import Response
from rest_framework import authentication, permissions
from .serializers import FactorySerializer
from .decotator import wrap_permission
from django_filters.rest_framework import DjangoFilterBackend
# Create your views here.
class CommonPagination(PageNumberPagination):
page_size = 10
page_size_query_param = 'page_size'
max_page_size = 100
def get_token(request):
token = django.middleware.csrf.get_token(request)
return JsonResponse({'token': token})
from rest_framework.authentication import SessionAuthentication, BasicAuthentication, TokenAuthentication
class CsrfExemptSessionAuthentication(SessionAuthentication):
def enforce_csrf(self, request):
return # To not perform the csrf check previously happening
class ArticleListView(
generics.ListAPIView,
mixins.CreateModelMixin
):
serializer_class = FactorySerializer.get_serializer(Article, attr_exclude=('content', 'html_content'))
queryset = Article.objects.all()
pagination_class = CommonPagination
filter_backends = (DjangoFilterBackend, filters.SearchFilter, )
filter_fields = ('classification__name', 'tags__name')
search_fields = ('title', 'content')
authentication_classes = (TokenAuthentication, )
def get(self, request, *args, **kw):
self.serializer_class = FactorySerializer.get_serializer(Article, attr_exclude=('content', 'html_content'))
return self.list(request, *args, **kw)
@wrap_permission(permissions.IsAdminUser)
def post(self, request, *args, **kwargs):
self.serializer_class = FactorySerializer.get_serializer(Article, attr_exclude=('volume', ))
return self.create(request, *args, **kwargs)
# def get_queryset(self):
# """
# Optionally restricts the returned purchases to a given user,
# by filtering against a `username` query parameter in the URL.
# """
# queryset = Article.objects.all()
# tag = self.request.query_params.get('tag', None)
# classification = self.request.query_params.get('classification', None)
# if classification is not None:
# # 外键过滤
# queryset = queryset.filter(classification__name=classification)
# if tag is not None:
# # 外键过滤
# queryset = queryset.filter(tags__name=tag)
# return queryset
class ArticleViewDetail(
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
mixins.UpdateModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView,
):
queryset = Article.objects.all()
serializer_class = FactorySerializer.get_serializer(Article)
# authentication_classes = (authentication.BaseAuthentication, )
def get(self, request, *args, **kwargs):
self.permission_classes = ()
return self.retrieve(request, *args, **kwargs)
@wrap_permission(permissions.IsAdminUser)
def delete(self, request, *args, **kwargs):
self.permission_classes = (permissions.IsAdminUser, )
return self.destroy(request, *args, **kwargs)
# def put(self, request, *args, **kwargs):
# return self.update(request, *args, **kwargs)
#
# def post(self, request, *args, **kw):
# return self.create(request, *args, **kw)
# class ArticleViewSet(viewsets.ModelViewSet):
# """
# 查看、编辑用户的界面
# """
# queryset = Article.objects.all()
# serializer_class = ArticleSerializer
class TagListView(generics.ListAPIView):
queryset = Tag.objects.all()
serializer_class = FactorySerializer.get_serializer(Tag)
def get(self, request, *args, **kwargs):
return self.list(self, request, *args, **kwargs)
@wrap_permission(permissions.IsAdminUser)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class ClassificationListView(generics.ListAPIView):
queryset = Classification.objects.all()
serializer_class = FactorySerializer.get_serializer(Classification)
def get(self, request, *args, **kwargs):
return self.list(self, request, *args, **kwargs)
@wrap_permission(permissions.IsAdminUser)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
# class VisitorListView(generics.ListAPIView):
# table = Visitor.setDb_table("visit-2018-11")
# queryset = table.objects.all()
# serializer_class = FactorySerializer.get_serializer(table)
# # authentication_classes = (authentication.TokenAuthentication,)
# permission_classes = (permissions.IsAdminUser,)
#
# def get(self, request, *args, **kwargs):
# return self.list(self, request, *args, **kwargs)
#
# def post(self, request, *args, **kwargs):
# return self.create(request, *args, **kwargs)
#
# def delete(self, request, *args, **kwargs):
# return self.delete(request, *args, **kwargs)
from rest_framework.authtoken.models import Token
from django.db.models.signals import post_save
from django.dispatch import receiver
from myblog import settings
# 创建user时生成Token
@receiver(post_save, sender=django.contrib.auth.models.User)
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.authtoken.models import Token
from rest_framework.response import Response
class CustomAuthToken(ObtainAuthToken):
authentication_classes = ()
def post(self, request, *args, **kwargs):
serializer = self.serializer_class(data=request.data,
context={'request': request})
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({
'token': token.key,
'user_id': user.pk,
'email': user.email
})
| {
"imports": [
"/app/myblog/app_api/views.py"
]
} |
068089dy/myblog | refs/heads/master | /app/myblog/app_blog/forms.py | from django import forms
from .models import Article
from .widget import MyTextInput
class ArticleForm(forms.ModelForm):
# 为了防止admin表单未保存就干部页面,所以使用MyTextInput
title = forms.CharField(initial=0, widget=MyTextInput())
class Meta:
forms.model = Article
| from django.db import models
import django.utils.timezone as timezone
# Create your models here.
from mdeditor.fields import MDTextField
from .widget import MyTextInput
class Article(models.Model):
# 是否显示
display = models.BooleanField(default=True)
# 标题
title = models.CharField(max_length=100)
# 描述
description = models.CharField(max_length=200)
# 分类
classification = models.ForeignKey("Classification", null=True, on_delete=models.SET_NULL, related_name="classification")
# 标签
tags = models.ManyToManyField("Tag", related_name='tags')
# 日期
date = models.DateTimeField(default=timezone.now)
# 内容
content = MDTextField()
# html内容
html_content = models.TextField(blank=True, null=True)
# 阅读量
volume = models.IntegerField(default=0)
# def save(self, *args, **kwargs):
# self.html_content = self.content
# super(Articles, self).save(*args, **kwargs)
class Meta:
db_table = 'blog_article'
def __str__(self):
return self.title
class Comment(models.Model):
# models.CASCADE是级联删除
article = models.ForeignKey("Article", on_delete=models.CASCADE)
# 如果null为True,django会用null来存储空值
# blank和null一样,只不过blank只与验证相关,而null是与数据库相关的,如果blank=False,则后台表单必填
father = models.ForeignKey('self', blank=True, null=True, on_delete=models.SET_NULL)
date = models.DateTimeField(auto_now=True)
username = models.CharField(max_length=50)
email = models.EmailField(blank=False)
content = models.TextField(blank=False)
device = models.TextField(blank=False)
class Meta:
db_table = 'blog_comment'
def __str__(self):
return self.content
class Visitor(models.Model):
# class Meta:
# abstract = True
#
# @classmethod
# def setDb_table(cls, tableName):
# class Meta:
# # db_table指定在数据库中,当前模型生成的数据表的表名。
# db_table = tableName
#
# attrs = {
# '__module__': cls.__module__,
# 'Meta': Meta
# }
# return type(tableName, (cls,), attrs)
# 访问者ip
ip_address = models.GenericIPAddressField()
# 访问日期
date = models.DateTimeField(default=timezone.now)
# 访问类别,文章或评论
type = models.CharField(max_length=50)
# 备注
remark = models.CharField(max_length=100)
class Meta:
db_table = 'blog_visitor'
def __str__(self):
return self.ip_address
class Classification(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100, blank=True, null=True)
# 日期
date = models.DateTimeField(default=timezone.now)
class Meta:
db_table = "blog_classification"
def __str__(self):
return self.name
class Tag(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100, blank=True, null=True)
# 日期
date = models.DateTimeField(default=timezone.now)
class Meta:
db_table = "blog_tag"
def __str__(self):
return self.name
--- FILE SEPARATOR ---
from django.forms import TextInput
class MyTextInput(TextInput):
class Media:
# 为了防止admin表单未保存就干部页面,所以使用save.js
js = ("js/save.js",)
| {
"imports": [
"/app/myblog/app_blog/models.py",
"/app/myblog/app_blog/widget.py"
]
} |
068089dy/myblog | refs/heads/master | /app/myblog/app_api/views.py | import django
from django.http import JsonResponse
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from rest_framework.pagination import PageNumberPagination
from rest_framework.views import APIView
from app_blog.models import Article, Visitor, Tag, Classification
from rest_framework import mixins, generics
from rest_framework import filters
from rest_framework.response import Response
from rest_framework import authentication, permissions
from .serializers import FactorySerializer
from .decotator import wrap_permission
from django_filters.rest_framework import DjangoFilterBackend
# Create your views here.
class CommonPagination(PageNumberPagination):
page_size = 10
page_size_query_param = 'page_size'
max_page_size = 100
def get_token(request):
token = django.middleware.csrf.get_token(request)
return JsonResponse({'token': token})
from rest_framework.authentication import SessionAuthentication, BasicAuthentication, TokenAuthentication
class CsrfExemptSessionAuthentication(SessionAuthentication):
def enforce_csrf(self, request):
return # To not perform the csrf check previously happening
class ArticleListView(
generics.ListAPIView,
mixins.CreateModelMixin
):
serializer_class = FactorySerializer.get_serializer(Article, attr_exclude=('content', 'html_content'))
queryset = Article.objects.all()
pagination_class = CommonPagination
filter_backends = (DjangoFilterBackend, filters.SearchFilter, )
filter_fields = ('classification__name', 'tags__name')
search_fields = ('title', 'content')
authentication_classes = (TokenAuthentication, )
def get(self, request, *args, **kw):
self.serializer_class = FactorySerializer.get_serializer(Article, attr_exclude=('content', 'html_content'))
return self.list(request, *args, **kw)
@wrap_permission(permissions.IsAdminUser)
def post(self, request, *args, **kwargs):
self.serializer_class = FactorySerializer.get_serializer(Article, attr_exclude=('volume', ))
return self.create(request, *args, **kwargs)
# def get_queryset(self):
# """
# Optionally restricts the returned purchases to a given user,
# by filtering against a `username` query parameter in the URL.
# """
# queryset = Article.objects.all()
# tag = self.request.query_params.get('tag', None)
# classification = self.request.query_params.get('classification', None)
# if classification is not None:
# # 外键过滤
# queryset = queryset.filter(classification__name=classification)
# if tag is not None:
# # 外键过滤
# queryset = queryset.filter(tags__name=tag)
# return queryset
class ArticleViewDetail(
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
mixins.UpdateModelMixin,
mixins.CreateModelMixin,
generics.GenericAPIView,
):
queryset = Article.objects.all()
serializer_class = FactorySerializer.get_serializer(Article)
# authentication_classes = (authentication.BaseAuthentication, )
def get(self, request, *args, **kwargs):
self.permission_classes = ()
return self.retrieve(request, *args, **kwargs)
@wrap_permission(permissions.IsAdminUser)
def delete(self, request, *args, **kwargs):
self.permission_classes = (permissions.IsAdminUser, )
return self.destroy(request, *args, **kwargs)
# def put(self, request, *args, **kwargs):
# return self.update(request, *args, **kwargs)
#
# def post(self, request, *args, **kw):
# return self.create(request, *args, **kw)
# class ArticleViewSet(viewsets.ModelViewSet):
# """
# 查看、编辑用户的界面
# """
# queryset = Article.objects.all()
# serializer_class = ArticleSerializer
class TagListView(generics.ListAPIView):
queryset = Tag.objects.all()
serializer_class = FactorySerializer.get_serializer(Tag)
def get(self, request, *args, **kwargs):
return self.list(self, request, *args, **kwargs)
@wrap_permission(permissions.IsAdminUser)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class ClassificationListView(generics.ListAPIView):
queryset = Classification.objects.all()
serializer_class = FactorySerializer.get_serializer(Classification)
def get(self, request, *args, **kwargs):
return self.list(self, request, *args, **kwargs)
@wrap_permission(permissions.IsAdminUser)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
# class VisitorListView(generics.ListAPIView):
# table = Visitor.setDb_table("visit-2018-11")
# queryset = table.objects.all()
# serializer_class = FactorySerializer.get_serializer(table)
# # authentication_classes = (authentication.TokenAuthentication,)
# permission_classes = (permissions.IsAdminUser,)
#
# def get(self, request, *args, **kwargs):
# return self.list(self, request, *args, **kwargs)
#
# def post(self, request, *args, **kwargs):
# return self.create(request, *args, **kwargs)
#
# def delete(self, request, *args, **kwargs):
# return self.delete(request, *args, **kwargs)
from rest_framework.authtoken.models import Token
from django.db.models.signals import post_save
from django.dispatch import receiver
from myblog import settings
# 创建user时生成Token
@receiver(post_save, sender=django.contrib.auth.models.User)
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.authtoken.models import Token
from rest_framework.response import Response
class CustomAuthToken(ObtainAuthToken):
authentication_classes = ()
def post(self, request, *args, **kwargs):
serializer = self.serializer_class(data=request.data,
context={'request': request})
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({
'token': token.key,
'user_id': user.pk,
'email': user.email
})
| # from django.contrib.auth.models import User, Group
from rest_framework import serializers
# 工厂模式实现在视图中自定义序列化类
class FactorySerializer(serializers.ModelSerializer):
@classmethod
def get_serializer(cls, attr_model, attr_field='__all__', attr_exclude=()):
class Meta:
model = attr_model
if attr_exclude:
exclude = attr_exclude
else:
fields = attr_field
depth = 1
attrs = {
'__module__': cls.__module__,
'Meta': Meta
}
return type(attr_model.__name__+'Serializer', (cls,), attrs)
| {
"imports": [
"/app/myblog/app_api/serializers.py"
]
} |
068089dy/myblog | refs/heads/master | /app/myblog/app_blog/util.py | from .models import Visitor, Article
PAGE_SIZE = 10
def visit(v_type, request, remark):
Visitor.objects.create(
ip_address=request.META['REMOTE_ADDR'],
type=v_type,
remark=remark,
)
def str2int(data, default=0):
"""
将字符串转为数字,
将None或空字符串转为默认值(默认为0)
"""
return default if data is None or len(data) == 0 else int(data)
def paging(data_list, page_number):
"""
分页
:param page_number:
:param page_size:
:return:
"""
if page_number == 'all':
data_list = data_list.all()
elif page_number == 'first':
data_list = data_list.first()
else:
if type(page_number) is int:
page_index = page_number
else:
page_index = str2int(page_number, 1)
page_num = PAGE_SIZE
begin_index = (page_index - 1) * page_num
end_index = begin_index + page_num
data_list = data_list[begin_index: end_index]
return data_list
| from django.db import models
import django.utils.timezone as timezone
# Create your models here.
from mdeditor.fields import MDTextField
from .widget import MyTextInput
class Article(models.Model):
# 是否显示
display = models.BooleanField(default=True)
# 标题
title = models.CharField(max_length=100)
# 描述
description = models.CharField(max_length=200)
# 分类
classification = models.ForeignKey("Classification", null=True, on_delete=models.SET_NULL, related_name="classification")
# 标签
tags = models.ManyToManyField("Tag", related_name='tags')
# 日期
date = models.DateTimeField(default=timezone.now)
# 内容
content = MDTextField()
# html内容
html_content = models.TextField(blank=True, null=True)
# 阅读量
volume = models.IntegerField(default=0)
# def save(self, *args, **kwargs):
# self.html_content = self.content
# super(Articles, self).save(*args, **kwargs)
class Meta:
db_table = 'blog_article'
def __str__(self):
return self.title
class Comment(models.Model):
# models.CASCADE是级联删除
article = models.ForeignKey("Article", on_delete=models.CASCADE)
# 如果null为True,django会用null来存储空值
# blank和null一样,只不过blank只与验证相关,而null是与数据库相关的,如果blank=False,则后台表单必填
father = models.ForeignKey('self', blank=True, null=True, on_delete=models.SET_NULL)
date = models.DateTimeField(auto_now=True)
username = models.CharField(max_length=50)
email = models.EmailField(blank=False)
content = models.TextField(blank=False)
device = models.TextField(blank=False)
class Meta:
db_table = 'blog_comment'
def __str__(self):
return self.content
class Visitor(models.Model):
# class Meta:
# abstract = True
#
# @classmethod
# def setDb_table(cls, tableName):
# class Meta:
# # db_table指定在数据库中,当前模型生成的数据表的表名。
# db_table = tableName
#
# attrs = {
# '__module__': cls.__module__,
# 'Meta': Meta
# }
# return type(tableName, (cls,), attrs)
# 访问者ip
ip_address = models.GenericIPAddressField()
# 访问日期
date = models.DateTimeField(default=timezone.now)
# 访问类别,文章或评论
type = models.CharField(max_length=50)
# 备注
remark = models.CharField(max_length=100)
class Meta:
db_table = 'blog_visitor'
def __str__(self):
return self.ip_address
class Classification(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100, blank=True, null=True)
# 日期
date = models.DateTimeField(default=timezone.now)
class Meta:
db_table = "blog_classification"
def __str__(self):
return self.name
class Tag(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100, blank=True, null=True)
# 日期
date = models.DateTimeField(default=timezone.now)
class Meta:
db_table = "blog_tag"
def __str__(self):
return self.name
| {
"imports": [
"/app/myblog/app_blog/models.py"
]
} |
068089dy/myblog | refs/heads/master | /app/myblog/app_blog/views.py | from django.db.models import Q
from django.shortcuts import render
from django.http import HttpResponseNotFound, HttpResponse
from django.utils.safestring import mark_safe
from django.views.decorators.cache import cache_page
from .CommonViews import PageListView
# Create your views here.
# @cache_page(60 * 15)
def home(request):
articles = Article.objects.filter(classification__name="blog").order_by("-date")
return render(request, "home.html", {"articles": articles})
@cache_page(60)
def show(request):
# 按id获取文章
if request.GET.get('pk', -1) != -1:
pk = request.GET.get('pk')
art = Article.objects.get(id=pk)
art.volume += 1
art.save()
art.html_content = mark_safe(art.html_content)
visit(visit_type["ARTICLE"], request, "访问了id为"+pk+"的文章")
return render(request, "show.html", {"article": art})
return HttpResponse("搞不懂你的请求")
class Search(PageListView):
def victim(self):
self.root_router = "search"
self.classification = "search"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
if self.data.GET.get('q', -1) == -1:
return -1
q = self.data.GET.get('q', -1)
# self.articles = Article.objects.filter(tags__name=tag).order_by("-date")
self.articles = Article.objects.filter(Q(content__icontains=q) |
Q(title__icontains=q) |
Q(description__icontains=q)).order_by("-date")
class Note(PageListView):
def victim(self):
self.root_router = "note"
self.classification = "note"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
self.articles = Article.objects.filter(classification__name=self.classification).order_by("-date")
class Blog(PageListView):
def victim(self):
self.root_router = "blog"
self.classification = "blog"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
self.articles = Article.objects.filter(classification__name=self.classification).order_by("-date")
class Link(PageListView):
def victim(self):
self.root_router = "link"
self.classification = "link"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
self.articles = Article.objects.filter(classification__name=self.classification).order_by("-date")
class Tag(PageListView):
def victim(self):
self.root_router = "tag"
self.classification = "tag"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
if self.data.GET.get('tag', -1) == -1:
return None
tag = self.data.GET.get('tag', -1)
self.articles = Article.objects.filter(tags__name=tag).order_by("-date")
class About(PageListView):
def victim(self):
self.root_router = "about"
self.classification = "about"
self.articles = None
self.cur_page = self.data.GET.get('page_num', 1)
self.articles = Article.objects.filter(classification__name=self.classification).order_by("-date")
from django.contrib.syndication.views import Feed
from .models import Article
class BlogFeed(Feed):
title = "丁丁哈哈的博客"
link = "/show/"
description = "一只垃圾的網站."
def description(self, obj):
return self.description
def items(self):
return Article.objects.filter(classification__name="blog").order_by("-date")
def item_title(self, item):
return item.title
def item_description(self, item):
return item.description
def item_link(self, item):
return self.link + "?pk=" + str(item.id)
def item_updateddate(self, item):
import datetime
date = [int(i) for i in str(item.date).split(" ")[0].split("-")]
return datetime.datetime(date[0], date[1], date[2])
| from django.http import HttpResponse
from django.shortcuts import render
from django.views import View
from django.utils.safestring import mark_safe
from .util import visit, paging, PAGE_SIZE
class PageListView(View):
def __init__(self):
self.classification = "" #当前分类
self.root_router = "" #根路由
self.page_size = PAGE_SIZE #分页大小
self.articles = None #文章querysets
self.cur_page = 1 #当前页
self.data = None #请求数据
self.result = {
# "articles": self.articles,
# "page_count": list(range(self.page_count)),
# "cur_page": self.cur_page,
# "root_router": self.root_router,
# "class": self.classification,
}
def victim(self):
pass
'''
Eg:
'''
# self.data.get['pk']
# self.root_router = ""
# self.classification = ""
# self.articles = None
# if self.data.GET.get('page_num', -1) == -1:
# return None
# self.cur_page = self.data.GET.get('pk', -1)
# if self.data.GET.get('tag', -1) == -1:
# return None
# tag = self.self.data.GET.get('tag', -1)
# self.articles = Article.objects.filter(tags__name=tag).order_by("-date")
def get(self, request):
self.data = request
if self.victim() == -1:
return HttpResponse("搞不懂你的请求!")
# 如果只有一篇文章,则显示文章
# if len(self.articles) == 1:
# article = self.articles[0]
# article.volume += 1
# article.save()
# article.html_content = mark_safe(article.html_content)
# visit(self.classification, request, "访问了" + article.title)
# return render(request, "show.html", {"article": article})
visit(self.classification, request, "访问了" + self.classification + "文章列表")
# 分页
page_count = round(self.articles.count() / self.page_size)
page_num = request.GET.get('page_num', 1)
articles = paging(self.articles, page_num)
self.result = {
"articles": articles,
"page_count": list(range(page_count)),
"cur_page": self.cur_page,
"root_router": self.root_router,
"class": self.classification,
}
return render(request, "showlist.html", self.result)
--- FILE SEPARATOR ---
from django.db import models
import django.utils.timezone as timezone
# Create your models here.
from mdeditor.fields import MDTextField
from .widget import MyTextInput
class Article(models.Model):
# 是否显示
display = models.BooleanField(default=True)
# 标题
title = models.CharField(max_length=100)
# 描述
description = models.CharField(max_length=200)
# 分类
classification = models.ForeignKey("Classification", null=True, on_delete=models.SET_NULL, related_name="classification")
# 标签
tags = models.ManyToManyField("Tag", related_name='tags')
# 日期
date = models.DateTimeField(default=timezone.now)
# 内容
content = MDTextField()
# html内容
html_content = models.TextField(blank=True, null=True)
# 阅读量
volume = models.IntegerField(default=0)
# def save(self, *args, **kwargs):
# self.html_content = self.content
# super(Articles, self).save(*args, **kwargs)
class Meta:
db_table = 'blog_article'
def __str__(self):
return self.title
class Comment(models.Model):
# models.CASCADE是级联删除
article = models.ForeignKey("Article", on_delete=models.CASCADE)
# 如果null为True,django会用null来存储空值
# blank和null一样,只不过blank只与验证相关,而null是与数据库相关的,如果blank=False,则后台表单必填
father = models.ForeignKey('self', blank=True, null=True, on_delete=models.SET_NULL)
date = models.DateTimeField(auto_now=True)
username = models.CharField(max_length=50)
email = models.EmailField(blank=False)
content = models.TextField(blank=False)
device = models.TextField(blank=False)
class Meta:
db_table = 'blog_comment'
def __str__(self):
return self.content
class Visitor(models.Model):
# class Meta:
# abstract = True
#
# @classmethod
# def setDb_table(cls, tableName):
# class Meta:
# # db_table指定在数据库中,当前模型生成的数据表的表名。
# db_table = tableName
#
# attrs = {
# '__module__': cls.__module__,
# 'Meta': Meta
# }
# return type(tableName, (cls,), attrs)
# 访问者ip
ip_address = models.GenericIPAddressField()
# 访问日期
date = models.DateTimeField(default=timezone.now)
# 访问类别,文章或评论
type = models.CharField(max_length=50)
# 备注
remark = models.CharField(max_length=100)
class Meta:
db_table = 'blog_visitor'
def __str__(self):
return self.ip_address
class Classification(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100, blank=True, null=True)
# 日期
date = models.DateTimeField(default=timezone.now)
class Meta:
db_table = "blog_classification"
def __str__(self):
return self.name
class Tag(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100, blank=True, null=True)
# 日期
date = models.DateTimeField(default=timezone.now)
class Meta:
db_table = "blog_tag"
def __str__(self):
return self.name
| {
"imports": [
"/app/myblog/app_blog/CommonViews.py",
"/app/myblog/app_blog/models.py"
]
} |
068089dy/myblog | refs/heads/master | /app/myblog/app_blog/models.py | from django.db import models
import django.utils.timezone as timezone
# Create your models here.
from mdeditor.fields import MDTextField
from .widget import MyTextInput
class Article(models.Model):
# 是否显示
display = models.BooleanField(default=True)
# 标题
title = models.CharField(max_length=100)
# 描述
description = models.CharField(max_length=200)
# 分类
classification = models.ForeignKey("Classification", null=True, on_delete=models.SET_NULL, related_name="classification")
# 标签
tags = models.ManyToManyField("Tag", related_name='tags')
# 日期
date = models.DateTimeField(default=timezone.now)
# 内容
content = MDTextField()
# html内容
html_content = models.TextField(blank=True, null=True)
# 阅读量
volume = models.IntegerField(default=0)
# def save(self, *args, **kwargs):
# self.html_content = self.content
# super(Articles, self).save(*args, **kwargs)
class Meta:
db_table = 'blog_article'
def __str__(self):
return self.title
class Comment(models.Model):
# models.CASCADE是级联删除
article = models.ForeignKey("Article", on_delete=models.CASCADE)
# 如果null为True,django会用null来存储空值
# blank和null一样,只不过blank只与验证相关,而null是与数据库相关的,如果blank=False,则后台表单必填
father = models.ForeignKey('self', blank=True, null=True, on_delete=models.SET_NULL)
date = models.DateTimeField(auto_now=True)
username = models.CharField(max_length=50)
email = models.EmailField(blank=False)
content = models.TextField(blank=False)
device = models.TextField(blank=False)
class Meta:
db_table = 'blog_comment'
def __str__(self):
return self.content
class Visitor(models.Model):
# class Meta:
# abstract = True
#
# @classmethod
# def setDb_table(cls, tableName):
# class Meta:
# # db_table指定在数据库中,当前模型生成的数据表的表名。
# db_table = tableName
#
# attrs = {
# '__module__': cls.__module__,
# 'Meta': Meta
# }
# return type(tableName, (cls,), attrs)
# 访问者ip
ip_address = models.GenericIPAddressField()
# 访问日期
date = models.DateTimeField(default=timezone.now)
# 访问类别,文章或评论
type = models.CharField(max_length=50)
# 备注
remark = models.CharField(max_length=100)
class Meta:
db_table = 'blog_visitor'
def __str__(self):
return self.ip_address
class Classification(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100, blank=True, null=True)
# 日期
date = models.DateTimeField(default=timezone.now)
class Meta:
db_table = "blog_classification"
def __str__(self):
return self.name
class Tag(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=100, blank=True, null=True)
# 日期
date = models.DateTimeField(default=timezone.now)
class Meta:
db_table = "blog_tag"
def __str__(self):
return self.name
| from django.forms import TextInput
class MyTextInput(TextInput):
class Media:
# 为了防止admin表单未保存就干部页面,所以使用save.js
js = ("js/save.js",)
| {
"imports": [
"/app/myblog/app_blog/widget.py"
]
} |
06hong/clothing-inventory | refs/heads/main | /drone_inventory/__init__.py | from flask import Flask
from config import Config
from .site.routes import site
from .authentication.routes import auth
from flask_migrate import Migrate
from .models import db, login_manager
app= Flask(__name__)
app.config.from_object(Config)
app.register_blueprint(site)
app.register_blueprint(auth)
db.init_app(app)
login_manager.init_app(app)
login_manager.login_view = 'signin' #specify what page to load for NON-AUTH users
migrate = Migrate(app, db)
from .models import User
| from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from datetime import datetime
import uuid
#adding flask security for passwords
from werkzeug.security import generate_password_hash, check_password_hash
#creates hex token for our API access
import secrets
#imports login manager from flask_login package
from flask_login import LoginManager, UserMixin
db = SQLAlchemy()
login_manager=LoginManager()
@login_manager.user_loader
def load_user(user_id):
return User.query.get(user_id)
#new_user= User('honggao@gmail.com','1234')
class User(db.Model, UserMixin):
id = db.Column(db.String, primary_key= True)
email = db.Column(db.String(150), nullable= False, unique=True)
password =db.Column(db.String, nullable=False) #people need a password to sign up (nullable)
token= db.Column(db.String, unique= True, default ='')
date_created = db.Column(db.DateTime, nullable = False, default = datetime.utcnow)
def __init__(self,email, password, token='', id=''):
self.id=self.set_id()
self.email=email
self.password= self.set_password(password)
self.token = self.set_token(24)
def set_id(self):
return str(uuid.uuid4())
def set_password(self, password):
self.pw_hash= generate_password_hash(password)
return self.pw_hash
def set_token(self,length):
return secrets.token_hex(length)
| {
"imports": [
"/drone_inventory/models.py"
]
} |
06milena/Parcial-1 | refs/heads/main | /menu.py |
from usuarios.crear_usuario import crear
def menuPrincipal():
opcion=1
while opcion !=0:
print('---------------------------------')
print(' Menu')
print('1. Registrar')
print('2. Iniciar sesion')
print('3. Salir')
opcion = int(input('SELECCIONE UNA OPCION: '))
if opcion==1:
crear()
elif opcion==2:
email = input('Ingrese el correo: ')
contrasena = input('Ingrese la contraseña: ')
elif opcion==3:
break
else:
print("Opcion invalida ")
menuPrincipal() | from config.db import DB
import re
def crearUsuario(nombre, email, contrasena):
cursor = DB.cursor()
cursor.execute('''insert into
usuarios(nombre, email, contrasena)
values(%s, %s, %s)''', (
nombre,
email,
contrasena,
))
DB.commit()
print ("Registro Correctamente")
cursor.close()
def crear():
nombre= input('Ingrese el nombre: ')
email = input('Ingrese el correo: ')
contrasena = input('Ingrese la contraseña: ')
crearUsuario(nombre,email,contrasena)
bandera=1
def validar_contraseña():
contrasena = input('Ingrese la contraseña: ')
while True:
if len(contrasena)<8:
print('contraseña de 8 caracteres')
elif re.search('[0-9]',contrasena) is None:
print('que tenga un numero')
elif re.search('[A-Z]',contrasena) is None:
print('que tenga una mayuscula')
else:
print('es correcto')
| {
"imports": [
"/usuarios/crear_usuario.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /lib/baseenricher/enricher.py | """Purely abstract base enricher class."""
from models.idf import InternalDataFormat
class BaseEnricher:
def __init__(self):
pass
def enrich(self, idf: InternalDataFormat) -> InternalDataFormat:
return idf
| from typing import List, Optional
from pydantic import BaseModel, IPvAnyAddress
class InternalDataFormat(BaseModel):
"""The Internal Data Format (IDF)."""
leak_id: Optional[str] # the leak(id) reference
email: str
password: Optional[str] # not mandatory yet
password_plain: Optional[str]
password_hashed: Optional[str]
hash_algo: Optional[str]
ticket_id: Optional[str]
email_verified: Optional[bool] = False
password_verified_ok: Optional[bool] = False
ip: Optional[IPvAnyAddress]
domain: Optional[str]
target_domain: Optional[str]
browser: Optional[str]
malware_name: Optional[str]
infected_machine: Optional[str]
#
# flags set by the enrichers
dg: Optional[str]
external_user: Optional[bool]
is_vip: Optional[bool]
is_active_account: Optional[bool]
credential_type: Optional[List[str]] # External, EU Login, etc.
report_to: Optional[List[str]] # whom to report this to?
#
# meta stuff and things for error reporting
count_seen: Optional[int] = 1
original_line: Optional[str]
error_msg: Optional[str]
notify: Optional[bool]
needs_human_intervention: Optional[bool]
| {
"imports": [
"/models/idf.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /tests/lib/test_helpers.py | from lib.helpers import anonymize_password
def test_anonymize_password():
pass1 = "12345678"
expected = "1*****78"
assert anonymize_password(pass1) == expected
pass2 = "123"
expected = "123"
assert anonymize_password(pass2) == expected
pass3 = "12"
expected = "12"
assert anonymize_password(pass3) == expected
pass4 = ""
expected = ""
assert anonymize_password(pass4) == expected
pass5 = None
expected = None
assert anonymize_password(pass5) == expected
| import csv
import logging
from pathlib import Path
LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
LOG_FORMAT = '%(asctime)s - [%(name)s:%(module)s:%(funcName)s] - %(levelname)s - %(message)s'
def getlogger(name: str, log_level=logging.INFO) -> logging.Logger:
"""This is how we do logging. How to use it:
Add the following code snippet to every module
```
logger = getlogger(__name__)
logger.info("foobar")
```
:param name - name of the logger
:param log_level - default log level
:returns logging.Logger object
"""
logger = logging.getLogger(name)
logger.setLevel(log_level)
# create console handler
ch = logging.StreamHandler()
formatter = logging.Formatter(LOG_FORMAT)
ch.setFormatter(formatter)
logger.addHandler(ch)
logger.info('Logger ready')
return logger
def peek_into_file(fname: Path) -> csv.Dialect:
"""
Peek into a file in order to determine the dialect for pandas.read_csv() / csv functions.
:param fname: a Path object for the filename
:return: a csv.Dialect
"""
with fname.open(mode = 'r') as f:
sniffer = csv.Sniffer()
logging.debug("has apikeyheader: %s", sniffer.has_header(f.readline()))
f.seek(0)
dialect = sniffer.sniff(f.readline(50))
logging.debug("delim: '%s'", dialect.delimiter)
logging.debug("quotechar: '%s'", dialect.quotechar)
logging.debug("doublequote: %s", dialect.doublequote)
logging.debug("escapechar: '%s'", dialect.escapechar)
logging.debug("lineterminator: %r", dialect.lineterminator)
logging.debug("quoting: %s", dialect.quoting)
logging.debug("skipinitialspace: %s", dialect.skipinitialspace)
# noinspection PyTypeChecker
return dialect
def anonymize_password(password: str) -> str:
"""
"*"-out the characters of a password. Must be 4 chars in length at least.
:param password: str
:returns anonymized password (str):
"""
anon_password = password
if password and len(password) >= 4:
prefix = password[:1]
suffix = password[-2:]
anon_password = prefix + "*" * (len(password) - 3) + suffix
return anon_password
| {
"imports": [
"/lib/helpers.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /tests/lib/baseenricher/test_enricher.py | import unittest
from lib.baseenricher.enricher import BaseEnricher
from models.idf import InternalDataFormat
class TestBaseEnricher(unittest.TestCase):
def test_enrich(self):
idf = InternalDataFormat(email="foo@example.com", password = "12345", notify = True)
te = BaseEnricher()
result = te.enrich(idf)
assert result == idf | """Purely abstract base enricher class."""
from models.idf import InternalDataFormat
class BaseEnricher:
def __init__(self):
pass
def enrich(self, idf: InternalDataFormat) -> InternalDataFormat:
return idf
--- FILE SEPARATOR ---
from typing import List, Optional
from pydantic import BaseModel, IPvAnyAddress
class InternalDataFormat(BaseModel):
"""The Internal Data Format (IDF)."""
leak_id: Optional[str] # the leak(id) reference
email: str
password: Optional[str] # not mandatory yet
password_plain: Optional[str]
password_hashed: Optional[str]
hash_algo: Optional[str]
ticket_id: Optional[str]
email_verified: Optional[bool] = False
password_verified_ok: Optional[bool] = False
ip: Optional[IPvAnyAddress]
domain: Optional[str]
target_domain: Optional[str]
browser: Optional[str]
malware_name: Optional[str]
infected_machine: Optional[str]
#
# flags set by the enrichers
dg: Optional[str]
external_user: Optional[bool]
is_vip: Optional[bool]
is_active_account: Optional[bool]
credential_type: Optional[List[str]] # External, EU Login, etc.
report_to: Optional[List[str]] # whom to report this to?
#
# meta stuff and things for error reporting
count_seen: Optional[int] = 1
original_line: Optional[str]
error_msg: Optional[str]
notify: Optional[bool]
needs_human_intervention: Optional[bool]
| {
"imports": [
"/lib/baseenricher/enricher.py",
"/models/idf.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /api/enrichment.py | """
Enrichment code
Author: Aaron Kaplan
License: see LICENSE
This basically just pulls in the enricher classes.
"""
from modules.enrichers.ldap_lib import CEDQuery
from modules.enrichers.ldap import LDAPEnricher
from modules.enrichers.vip import VIPEnricher
from modules.enrichers.external_email import ExternalEmailEnricher
| import sys
import os
import logging
from ldap3 import Server, Connection, ALL
import json
from typing import List
class CEDQuery:
""" CEDQuery class. Encapsulates the LDAP connect and queries to CED.
Author: L. Aaron Kaplan <leon-aaron.kaplan@ext.ec.europa.eu>
"""
is_connected = False
conn = None
def __init__(self):
""" init() function. Automatically connects to LDAP (calls the connect_ldap() function). """
if not self.is_connected:
self.server = os.getenv('CED_SERVER', default = 'localhost')
self.port = int(os.getenv('CED_PORT', default = 389))
self.user = os.getenv('CED_USER')
self.password = os.getenv('CED_PASSWORD')
self.base_dn = os.getenv('CED_BASEDN')
try:
self.connect_ldap(self.server, self.port, self.user, self.password)
except Exception as ex:
logging.error("could ot connect to LDAP. Reason: %s" % str(ex))
self.is_connected = False
def connect_ldap(self, server="ldap.example.com", port=389, user=None, password=None):
""" Connects to the CED LDAP server. Returns None on failure. """
try:
ldap_server = Server(server, port = port, get_info = ALL)
self.conn = Connection(ldap_server, user = user, password = password)
self.is_connected = self.conn.bind()
print("Connection = %s" % self.conn)
logging.info("connect_ldap(): self.conn = %s" % (self.conn,))
logging.info("connect_ldap(): conn.bind() = %s" % (self.conn.bind(),))
except Exception as ex:
logging.error("error connecting to CED. Reason: %s" % (str(ex)))
self.is_connected = False
return None
def search_by_mail(self, email: str) -> List[dict]:
attributes = ['cn', 'dg', 'uid', 'ecMoniker', 'employeeType', 'recordStatus', 'sn', 'givenName', 'mail']
if not self.is_connected:
logging.error("Could not search via email. Not connected to LDAP.")
raise Exception("Could not search via email. Not connected to LDAP.")
try:
self.conn.search(self.base_dn, "(mail=%s)" % (email,), attributes = attributes)
except Exception as ex:
logging.error("could not search LDAP. error: %s" % str(ex))
raise ex
logging.info("search_by_mail(): %s" % (self.conn.entries,))
results = []
for entry in self.conn.entries:
results.append(json.loads(entry.entry_to_json()))
return results # yeah, a list comprehension would be more pythonic
if __name__ == "__main__":
ced = CEDQuery()
email = sys.argv[1]
print(ced.search_by_mail(email))
--- FILE SEPARATOR ---
import logging
import os
from typing import Union
from modules.enrichers.ldap_lib import CEDQuery
class LDAPEnricher:
"""LDAP Enricher can query LDAP and offers multiple functions such as email-> dg"""
simulate_ldap: bool = False
def __init__(self):
self.simulate_ldap = bool(os.getenv('SIMULATE_LDAP', default = False))
self.ced = CEDQuery()
def email_to_dg(self, email: str) -> str:
"""Return the DG of an email. Note that there might be multiple DGs, we just return the first one here."""
if self.simulate_ldap:
return "Not connected to LDAP"
try:
results = self.ced.search_by_mail(email)
if results and results[0]['attributes'] and results[0]['attributes']['dg'] and \
results[0]['attributes']['dg'][0]:
return results[0]['attributes']['dg'][0]
else:
return "Unknown"
except Exception as ex:
logging.error("could not query LDAP/CED. Reason: %s" % str(ex))
raise ex
def email_to_user_id(self, email: str) -> Union[str, None]:
"""Return the userID of an email. """
if self.simulate_ldap:
return "Not connected to LDAP"
try:
results = self.ced.search_by_mail(email)
if results and results[0]['attributes'] and results[0]['attributes']['ecMoniker'] and \
results[0]['attributes']['ecMoniker'][0]:
return results[0]['attributes']['ecMoniker'][0]
else:
return None
except Exception as ex:
logging.error("could not query LDAP/CED. Reason: %s" % str(ex))
raise ex
def email_to_status(self, email: str) -> str:
"""Return the active status."""
if self.simulate_ldap:
return "Not connected to LDAP"
try:
results = self.ced.search_by_mail(email)
if results and results[0]['attributes'] and results[0]['attributes']['recordStatus'] and \
results[0]['attributes']['recordStatus'][0]:
return results[0]['attributes']['recordStatus'][0]
except Exception as ex:
logging.error("could not query LDAP/CED. Reason: %s" % str(ex))
raise ex
def exists(self, email: str) -> bool:
"""Check if a user exists."""
if self.simulate_ldap:
return False
status = self.email_to_status(email)
if status and status.upper() == "A":
return True
else:
return False
--- FILE SEPARATOR ---
"""VIP Enricher. Can determine if an email addr. is a VIP and needs to be treated specially."""
import os
import logging
from pathlib import Path
from typing import List
class VIPEnricher:
"""Can determine if an Email Address is a VIP. Super trivial code."""
vips = []
def __init__(self, vipfile: Path = Path('VIPs.txt')):
try:
self.load_vips(os.getenv('VIPLIST', default = vipfile))
except Exception as ex:
logging.error("Could not load VIP list. Using an empty list and continuing. Exception: %s" % str(ex))
def load_vips(self, path: Path) -> List[str]:
"""Load the external reference data set of the known VIPs."""
with open(path, 'r') as f:
self.vips = [x.strip().upper() for x in f.readlines()]
return self.vips
def is_vip(self, email: str) -> bool:
"""Check if an email address is a VIP."""
return email.upper() in self.vips
def __str__(self):
return ",".join(self.vips)
def __repr__(self):
return ",".join(self.vips)
--- FILE SEPARATOR ---
"""ExternalEmailEnricher"""
class ExternalEmailEnricher:
"""Can determine if an Email Adress is an (organisation-) external email address. Also super trivial code."""
@staticmethod
def is_internal_email(email: str) -> bool:
email = email.lower()
if email and email.endswith('europa.eu') or email.endswith('jrc.it'):
return True
else:
return False
@staticmethod
def is_external_email(email: str) -> bool:
return not ExternalEmailEnricher.is_internal_email(email)
| {
"imports": [
"/modules/enrichers/ldap_lib.py",
"/modules/enrichers/ldap.py",
"/modules/enrichers/vip.py",
"/modules/enrichers/external_email.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /tests/lib/basecollector/test_collector.py | import unittest
import pandas as pd
from lib.basecollector.collector import *
class TestBaseCollector(unittest.TestCase):
def test_collect(self):
valid_csv_file = 'tests/fixtures/data.csv'
invalid_csv_file = 'tests/fixtures/dataDOESNTEXIST.csv'
tc = BaseCollector()
df: pd.DataFrame
status, df = tc.collect(valid_csv_file)
assert status == "OK"
assert not df.empty
assert df.shape[0] > 1
status, df = tc.collect(invalid_csv_file)
assert status != "OK"
assert df.empty
| """
BaseCollector
This implements the abstract collector interface
"""
import pandas as pd
import logging
class BaseCollector:
"""
BaseCollector: purely abstract class which defines the interface:
collect(input_source)
Please note that this does *not* yet return a data frame in the internal data format (IDF).
So all that a BaseCollector shall return is a tuple ("OK"/some_error string and a pandas DF (which may be empty
in case of error).
Example:
("OK", pd.DataFrame(... my data...) ) --> all ok, the data is in the DF.
or
("Could not parse CSV file: file does not exist", pd.DataFrame()) --> error message and empty DF.
The role of the Collector is to
1. fetch the data
2. check if the data is complete
3. put it into an internal format (in our case a pandas DF) which may be processed by a parser
4. return it as pandas DF to the next processing step in the chain
5. return errors in case it encountered errors in validation.
"""
def __init__(self):
pass
def collect(self, input_file: str, **kwargs) -> (str, pd.DataFrame):
"""
Collect the data from input_file and return a pandas DF.
:rtype: tuple return code ("OK" in case of success) and pandas DataFrame with the data
"""
try:
with open(input_file, "r") as f:
df = pd.read_csv(f, **kwargs)
return "OK", df
except Exception as ex:
logging.exception("could not parse CSV file. Reason: %r" % (str(ex),))
return str(ex), pd.DataFrame()
| {
"imports": [
"/lib/basecollector/collector.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /modules/collectors/parser.py | #!/usr/bin/env python3
"""importer.parser """
from lib.helpers import getlogger
from pathlib import Path
import csv
import time
import pandas as pd
debug = True
logger = getlogger(__name__)
# noinspection PyTypeChecker
def peek_into_file(fname: Path) -> csv.Dialect:
"""
Peek into a file in order to determine the dialect for pandas.read_csv() / csv functions.
:param fname: a Path object for the filename
:return: a csv.Dialect
"""
with fname.open(mode='r') as f:
sniffer = csv.Sniffer()
logger.debug("has apikeyheader: %s", sniffer.has_header(f.readline()))
f.seek(0)
dialect = sniffer.sniff(f.readline(50))
logger.debug("delim: '%s'", dialect.delimiter)
logger.debug("quotechar: '%s'", dialect.quotechar)
logger.debug("doublequote: %s", dialect.doublequote)
logger.debug("escapechar: '%s'", dialect.escapechar)
logger.debug("lineterminator: %r", dialect.lineterminator)
logger.debug("quoting: %s", dialect.quoting)
logger.debug("skipinitialspace: %s", dialect.skipinitialspace)
return dialect
class BaseParser:
"""The abstract Parser class."""
def __init__(self):
pass
def parse_file(self, fname: Path, leak_id: int = None, csv_dialect=None) -> pd.DataFrame:
"""Parse file (non-recursive) and returns a DataFrame with the contents.
Overwrite this method in YOUR Parser subclass.
# Parameters
* fname: a Path object with the filename of the CSV file which should be parsed.
* leak_id: the leak_id in the DB which is associated with that CSV dump file.
# Returns
a DataFrame
number of errors while parsing
"""
logger.info("Parsing file %s..." % fname)
try:
if csv_dialect:
dialect = csv_dialect
else:
dialect = peek_into_file(fname) # try to guess
df = pd.read_csv(fname, dialect=dialect, error_bad_lines=False, warn_bad_lines=True) # , usecols=range(2))
logger.debug(df.head())
logger.debug(df.info())
logger.debug("Parsing file 2...")
df.insert(0, 'leak_id', leak_id)
logger.debug(df.head())
logger.debug("parsed %s", fname)
return df
except Exception as ex:
logger.error("could not pandas.read_csv(%s). Reason: %s. Skipping file." % (fname, str(ex)))
raise ex # pass it on
def normalize_data(self, df: pd.DataFrame, leak_id: int = None) -> pd.DataFrame:
"""
Normalize the given data / data frame
:param df: a pandas df with the leak_data
:param leak_id: foreign key to the leak table
:return: a pandas df
"""
# replace NaN with None
return df.where(pd.notnull(df), None)
if __name__ == "__main__":
p = BaseParser()
t0 = time.time()
# p.parse_recursively('test_leaks', '*.txt')
t1 = time.time()
logger.info("processed everything in %f [sec]", (t1 - t0))
| import csv
import logging
from pathlib import Path
LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
LOG_FORMAT = '%(asctime)s - [%(name)s:%(module)s:%(funcName)s] - %(levelname)s - %(message)s'
def getlogger(name: str, log_level=logging.INFO) -> logging.Logger:
"""This is how we do logging. How to use it:
Add the following code snippet to every module
```
logger = getlogger(__name__)
logger.info("foobar")
```
:param name - name of the logger
:param log_level - default log level
:returns logging.Logger object
"""
logger = logging.getLogger(name)
logger.setLevel(log_level)
# create console handler
ch = logging.StreamHandler()
formatter = logging.Formatter(LOG_FORMAT)
ch.setFormatter(formatter)
logger.addHandler(ch)
logger.info('Logger ready')
return logger
def peek_into_file(fname: Path) -> csv.Dialect:
"""
Peek into a file in order to determine the dialect for pandas.read_csv() / csv functions.
:param fname: a Path object for the filename
:return: a csv.Dialect
"""
with fname.open(mode = 'r') as f:
sniffer = csv.Sniffer()
logging.debug("has apikeyheader: %s", sniffer.has_header(f.readline()))
f.seek(0)
dialect = sniffer.sniff(f.readline(50))
logging.debug("delim: '%s'", dialect.delimiter)
logging.debug("quotechar: '%s'", dialect.quotechar)
logging.debug("doublequote: %s", dialect.doublequote)
logging.debug("escapechar: '%s'", dialect.escapechar)
logging.debug("lineterminator: %r", dialect.lineterminator)
logging.debug("quoting: %s", dialect.quoting)
logging.debug("skipinitialspace: %s", dialect.skipinitialspace)
# noinspection PyTypeChecker
return dialect
def anonymize_password(password: str) -> str:
"""
"*"-out the characters of a password. Must be 4 chars in length at least.
:param password: str
:returns anonymized password (str):
"""
anon_password = password
if password and len(password) >= 4:
prefix = password[:1]
suffix = password[-2:]
anon_password = prefix + "*" * (len(password) - 3) + suffix
return anon_password
| {
"imports": [
"/lib/helpers.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /modules/enrichers/ldap.py | import logging
import os
from typing import Union
from modules.enrichers.ldap_lib import CEDQuery
class LDAPEnricher:
"""LDAP Enricher can query LDAP and offers multiple functions such as email-> dg"""
simulate_ldap: bool = False
def __init__(self):
self.simulate_ldap = bool(os.getenv('SIMULATE_LDAP', default = False))
self.ced = CEDQuery()
def email_to_dg(self, email: str) -> str:
"""Return the DG of an email. Note that there might be multiple DGs, we just return the first one here."""
if self.simulate_ldap:
return "Not connected to LDAP"
try:
results = self.ced.search_by_mail(email)
if results and results[0]['attributes'] and results[0]['attributes']['dg'] and \
results[0]['attributes']['dg'][0]:
return results[0]['attributes']['dg'][0]
else:
return "Unknown"
except Exception as ex:
logging.error("could not query LDAP/CED. Reason: %s" % str(ex))
raise ex
def email_to_user_id(self, email: str) -> Union[str, None]:
"""Return the userID of an email. """
if self.simulate_ldap:
return "Not connected to LDAP"
try:
results = self.ced.search_by_mail(email)
if results and results[0]['attributes'] and results[0]['attributes']['ecMoniker'] and \
results[0]['attributes']['ecMoniker'][0]:
return results[0]['attributes']['ecMoniker'][0]
else:
return None
except Exception as ex:
logging.error("could not query LDAP/CED. Reason: %s" % str(ex))
raise ex
def email_to_status(self, email: str) -> str:
"""Return the active status."""
if self.simulate_ldap:
return "Not connected to LDAP"
try:
results = self.ced.search_by_mail(email)
if results and results[0]['attributes'] and results[0]['attributes']['recordStatus'] and \
results[0]['attributes']['recordStatus'][0]:
return results[0]['attributes']['recordStatus'][0]
except Exception as ex:
logging.error("could not query LDAP/CED. Reason: %s" % str(ex))
raise ex
def exists(self, email: str) -> bool:
"""Check if a user exists."""
if self.simulate_ldap:
return False
status = self.email_to_status(email)
if status and status.upper() == "A":
return True
else:
return False
| import sys
import os
import logging
from ldap3 import Server, Connection, ALL
import json
from typing import List
class CEDQuery:
""" CEDQuery class. Encapsulates the LDAP connect and queries to CED.
Author: L. Aaron Kaplan <leon-aaron.kaplan@ext.ec.europa.eu>
"""
is_connected = False
conn = None
def __init__(self):
""" init() function. Automatically connects to LDAP (calls the connect_ldap() function). """
if not self.is_connected:
self.server = os.getenv('CED_SERVER', default = 'localhost')
self.port = int(os.getenv('CED_PORT', default = 389))
self.user = os.getenv('CED_USER')
self.password = os.getenv('CED_PASSWORD')
self.base_dn = os.getenv('CED_BASEDN')
try:
self.connect_ldap(self.server, self.port, self.user, self.password)
except Exception as ex:
logging.error("could ot connect to LDAP. Reason: %s" % str(ex))
self.is_connected = False
def connect_ldap(self, server="ldap.example.com", port=389, user=None, password=None):
""" Connects to the CED LDAP server. Returns None on failure. """
try:
ldap_server = Server(server, port = port, get_info = ALL)
self.conn = Connection(ldap_server, user = user, password = password)
self.is_connected = self.conn.bind()
print("Connection = %s" % self.conn)
logging.info("connect_ldap(): self.conn = %s" % (self.conn,))
logging.info("connect_ldap(): conn.bind() = %s" % (self.conn.bind(),))
except Exception as ex:
logging.error("error connecting to CED. Reason: %s" % (str(ex)))
self.is_connected = False
return None
def search_by_mail(self, email: str) -> List[dict]:
attributes = ['cn', 'dg', 'uid', 'ecMoniker', 'employeeType', 'recordStatus', 'sn', 'givenName', 'mail']
if not self.is_connected:
logging.error("Could not search via email. Not connected to LDAP.")
raise Exception("Could not search via email. Not connected to LDAP.")
try:
self.conn.search(self.base_dn, "(mail=%s)" % (email,), attributes = attributes)
except Exception as ex:
logging.error("could not search LDAP. error: %s" % str(ex))
raise ex
logging.info("search_by_mail(): %s" % (self.conn.entries,))
results = []
for entry in self.conn.entries:
results.append(json.loads(entry.entry_to_json()))
return results # yeah, a list comprehension would be more pythonic
if __name__ == "__main__":
ced = CEDQuery()
email = sys.argv[1]
print(ced.search_by_mail(email))
| {
"imports": [
"/modules/enrichers/ldap_lib.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /tests/test_parser_spycloud.py | import unittest
from pathlib import Path
from modules.parsers.spycloud import SpyCloudParser
from modules.collectors.spycloud.collector import SpyCloudCollector
class SpyCloudParserTest(unittest.TestCase):
def test_parse(self):
path = 'tests/fixtures/data_anonymized_spycloud.csv'
tc = SpyCloudCollector()
statuscode, df = tc.collect(Path(path))
assert statuscode == "OK"
tp = SpyCloudParser()
idf = tp.parse(df)
assert idf
# print([ i for i in idf ])
for i in idf:
if "error_msg" in i.dict() and i.error_msg:
print("error_msg: %s" % i.error_msg)
print("orig_line: %s" % i.original_line)
| """
SpyCloud Parser
Accepts a pandas DF, parses and validates it against the *IN*put format and returns it in the *internal* IDF format
"""
import logging
# from typing import List
from pydantic import parse_obj_as, ValidationError
import pandas as pd
import numpy as np
from typing import List
from lib.baseparser.parser import BaseParser
from models.indf import SpyCloudInputEntry
from models.idf import InternalDataFormat
class SpyCloudParser(BaseParser):
def __init__(self):
"""init"""
super().__init__()
def parse(self, df: pd.DataFrame) -> List[InternalDataFormat]:
"""parse a pandas DF and return the data in the Internal Data Format."""
# First, map empty columns to None so that it fits nicely into the IDF
df.replace({"-": None}, inplace = True)
df.replace({"nan": None}, inplace = True)
df.replace({np.nan: None}, inplace = True)
df.replace({'breach_date': {'Unknown': None}}, inplace = True)
# some initial checks on the df
# validate via pydantic
items = []
for row in df.reset_index().to_dict(orient = 'records'):
logging.debug("row=%s" % row)
idf_dict = dict(email = None, password = None, notify = False, domain = None, error_msg = "incomplete data",
needs_human_intervention = True)
idf_dict['original_line'] = str(row)
try:
input_data_item = parse_obj_as(SpyCloudInputEntry, row) # here the validation magic happens
idf_dict = input_data_item.dict() # conversion magic happens between input format and internal df
idf_dict['domain'] = input_data_item.email_domain # map specific fields
except Exception as ex:
idf_dict['needs_human_intervention'] = True
idf_dict['notify'] = False
idf_dict['error_msg'] = str(ex)
logging.error("could not parse CSV row. Original line: %r.\nReason: %s" % (repr(row), str(ex)))
logging.debug("idf_dict = %s" % idf_dict)
else:
logging.error("everything successfully converted")
idf_dict['needs_human_intervention'] = False
idf_dict['notify'] = True
idf_dict['error_msg'] = None
finally:
try:
idf = InternalDataFormat(**idf_dict) # another step of validation happens here
logging.debug("idf = %r" % idf)
except Exception as ex2:
logging.error("Exception in finally. idf_dict = %r" % idf_dict)
raise ex2
else:
items.append(idf)
return items
--- FILE SEPARATOR ---
"""
Spycloud collector
This code implements a SpyCloud collector (inherits from BaseCollector)
Upon running a SpyCloud parser on a CSV, the result will be a
"""
from pathlib import Path
import logging
import pandas as pd
from lib.basecollector.collector import BaseCollector
from lib.helpers import peek_into_file
NaN_values = ['', '#N/A', '#N/A N/A', '#NA', '-1.#IND', '-1.#QNAN', '-NaN', '-nan', '1.#IND', '1.#QNAN', '<NA>', 'N/A',
'NA', 'NULL', 'NaN', 'n/a', 'null', '-']
class SpyCloudCollector(BaseCollector):
def __init__(self):
super().__init__()
def collect(self, input_file: Path, **kwargs) -> (str, pd.DataFrame):
try:
dialect = peek_into_file(input_file)
df = pd.read_csv(input_file, dialect=dialect, na_values=NaN_values,
keep_default_na=False, error_bad_lines=False, warn_bad_lines=True)
# XXX FIXME: need to collect the list of (pandas-) unparseable rows and present to user.
# For now we simply fail on the whole file. Good enough for the moment.
except pd.errors.ParserError as ex:
logging.error("could not parse CSV file. Reason: %r" % (str(ex),))
return str(ex), pd.DataFrame()
return "OK", df
| {
"imports": [
"/modules/parsers/spycloud.py",
"/modules/collectors/spycloud/collector.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /tests/test_filter.py | from models.idf import InternalDataFormat
from modules.filters.filter import Filter
def test_filter():
fi = Filter()
idf = InternalDataFormat(email = "aaron@example.com", password = "12345", notify = False,
needs_human_intervention = False)
idf2 = fi.filter(idf)
assert idf2 == idf
| from typing import List, Optional
from pydantic import BaseModel, IPvAnyAddress
class InternalDataFormat(BaseModel):
"""The Internal Data Format (IDF)."""
leak_id: Optional[str] # the leak(id) reference
email: str
password: Optional[str] # not mandatory yet
password_plain: Optional[str]
password_hashed: Optional[str]
hash_algo: Optional[str]
ticket_id: Optional[str]
email_verified: Optional[bool] = False
password_verified_ok: Optional[bool] = False
ip: Optional[IPvAnyAddress]
domain: Optional[str]
target_domain: Optional[str]
browser: Optional[str]
malware_name: Optional[str]
infected_machine: Optional[str]
#
# flags set by the enrichers
dg: Optional[str]
external_user: Optional[bool]
is_vip: Optional[bool]
is_active_account: Optional[bool]
credential_type: Optional[List[str]] # External, EU Login, etc.
report_to: Optional[List[str]] # whom to report this to?
#
# meta stuff and things for error reporting
count_seen: Optional[int] = 1
original_line: Optional[str]
error_msg: Optional[str]
notify: Optional[bool]
needs_human_intervention: Optional[bool]
--- FILE SEPARATOR ---
from typing import Union
from models.idf import InternalDataFormat
class Filter:
def __init__(self):
pass
def filter(self, idf: InternalDataFormat) -> Union[None, InternalDataFormat]:
"""Here we could implement all kinds of filters on data elements or whole rows.
At the moment, this is a NOP.
"""
return idf
| {
"imports": [
"/models/idf.py",
"/modules/filters/filter.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /modules/output/db.py | """Database output module. Stores an IDF item to the DB."""
from lib.helpers import getlogger
import psycopg2
import psycopg2.extras
from lib.baseoutput.output import BaseOutput
from lib.db.db import _get_db
from models.outdf import LeakData
logger = getlogger(__name__)
class PostgresqlOutput(BaseOutput):
dbconn = None
def __init__(self):
super().__init__()
self.dbconn = _get_db()
def process(self, data: LeakData) -> bool:
"""Store the output format data into Postgresql.
:returns True on success
:raises psycopg2.Error exception
"""
sql = """
INSERT into leak_data(
leak_id, email, password, password_plain, password_hashed, hash_algo, ticket_id, email_verified,
password_verified_ok, ip, domain, browser , malware_name, infected_machine, dg
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s )
ON CONFLICT ON CONSTRAINT constr_unique_leak_data_leak_id_email_password_domain
DO UPDATE SET count_seen = leak_data.count_seen + 1
RETURNING id
"""
if data:
try:
with self.dbconn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur:
print(cur.mogrify(sql, (
data.leak_id, data.email, data.password, data.password_plain, data.password, data.hash_algo,
data.ticket_id, data.email_verified, data.password_verified_ok, data.ip, data.domain,
data.browser, data.malware_name, data.infected_machine, data.dg)))
cur.execute(sql, (
data.leak_id, data.email, data.password, data.password_plain, data.password, data.hash_algo,
data.ticket_id, data.email_verified, data.password_verified_ok, data.ip, data.domain,
data.browser, data.malware_name, data.infected_machine, data.dg))
leak_data_id = int(cur.fetchone()['id'])
print("leak_data_id: %s" % leak_data_id)
except psycopg2.Error as ex:
logger.error("%s(): error: %s" % (self.process.__name__, ex.pgerror))
raise ex
return True
| import csv
import logging
from pathlib import Path
LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
LOG_FORMAT = '%(asctime)s - [%(name)s:%(module)s:%(funcName)s] - %(levelname)s - %(message)s'
def getlogger(name: str, log_level=logging.INFO) -> logging.Logger:
"""This is how we do logging. How to use it:
Add the following code snippet to every module
```
logger = getlogger(__name__)
logger.info("foobar")
```
:param name - name of the logger
:param log_level - default log level
:returns logging.Logger object
"""
logger = logging.getLogger(name)
logger.setLevel(log_level)
# create console handler
ch = logging.StreamHandler()
formatter = logging.Formatter(LOG_FORMAT)
ch.setFormatter(formatter)
logger.addHandler(ch)
logger.info('Logger ready')
return logger
def peek_into_file(fname: Path) -> csv.Dialect:
"""
Peek into a file in order to determine the dialect for pandas.read_csv() / csv functions.
:param fname: a Path object for the filename
:return: a csv.Dialect
"""
with fname.open(mode = 'r') as f:
sniffer = csv.Sniffer()
logging.debug("has apikeyheader: %s", sniffer.has_header(f.readline()))
f.seek(0)
dialect = sniffer.sniff(f.readline(50))
logging.debug("delim: '%s'", dialect.delimiter)
logging.debug("quotechar: '%s'", dialect.quotechar)
logging.debug("doublequote: %s", dialect.doublequote)
logging.debug("escapechar: '%s'", dialect.escapechar)
logging.debug("lineterminator: %r", dialect.lineterminator)
logging.debug("quoting: %s", dialect.quoting)
logging.debug("skipinitialspace: %s", dialect.skipinitialspace)
# noinspection PyTypeChecker
return dialect
def anonymize_password(password: str) -> str:
"""
"*"-out the characters of a password. Must be 4 chars in length at least.
:param password: str
:returns anonymized password (str):
"""
anon_password = password
if password and len(password) >= 4:
prefix = password[:1]
suffix = password[-2:]
anon_password = prefix + "*" * (len(password) - 3) + suffix
return anon_password
--- FILE SEPARATOR ---
"""Base, abstract Output class"""
from models.outdf import Answer
class BaseOutput:
def __init__(self):
pass
def process(self, output_data: Answer) -> bool:
"""
Process the output_data and do something with it.
:returns bool... True on success.
"""
return True
--- FILE SEPARATOR ---
"""Very very lightweight DB abstraction"""
import os
import psycopg2
import psycopg2.extras
from fastapi import HTTPException
import logging
#################################
# DB functions
db_conn = None
DSN = "host=%s dbname=%s user=%s password=%s" % (os.getenv('DBHOST', 'localhost'),
os.getenv('DBNAME', 'credentialleakdb'),
os.getenv('DBUSER', 'credentialleakdb'),
os.getenv('DBPASSWORD'))
def _get_db():
"""
Open a new database connection if there is none yet for the
current application context.
:returns: the DB handle."""
global db_conn
if not db_conn:
db_conn = _connect_db(DSN)
return db_conn
# noinspection PyUnresolvedReferences
def _close_db():
"""Closes the database again at the end of the request."""
global db_conn
logging.info('shutting down....')
if db_conn:
db_conn.close()
db_conn = None
return db_conn
def _connect_db(dsn: str):
"""Connects to the specific database.
:param dsn: the database connection string.
:returns: the DB connection handle
"""
try:
conn = psycopg2.connect(dsn)
conn.set_session(autocommit=True)
except Exception as ex:
raise HTTPException(status_code=500, detail="could not connect to the DB. Reason: %s" % (str(ex)))
logging.info("connection to DB established")
return conn
--- FILE SEPARATOR ---
import datetime
from enum import Enum
from typing import Optional, Dict, List # Union
from pydantic import BaseModel, EmailStr
class Leak(BaseModel):
id: Optional[int]
ticket_id: Optional[str]
summary: str
reporter_name: Optional[str]
source_name: Optional[str]
breach_ts: Optional[datetime.datetime]
source_publish_ts: Optional[datetime.datetime]
class CredentialType(Enum):
is_external = "External"
is_proxy_login = "Proxy"
is_EU_login = "EU Login"
is_domain_login = "Domain"
is_secem_login = "SECEM"
class LeakData(BaseModel):
id: Optional[int]
leak_id: int
email: EmailStr
password: str
password_plain: Optional[str]
password_hashed: Optional[str]
hash_algo: Optional[str]
ticket_id: Optional[str]
email_verified: Optional[bool]
password_verified_ok: Optional[bool]
ip: Optional[str]
domain: Optional[str]
target_domain: Optional[str] # new
browser: Optional[str]
malware_name: Optional[str]
infected_machine: Optional[str]
dg: Optional[str]
is_vip: Optional[bool]
credential_type: Optional[List[CredentialType]]
report_to: Optional[List[str]] # the security contact to report this to, in case it's not the the user directly.
#
# meta stuff and things for error reporting
count_seen: Optional[int] = 1
original_line: Optional[str] # the original CSV file in case of errors
error_msg: Optional[str]
notify: bool
needs_human_intervention: bool
class AnswerMeta(BaseModel):
version: str
duration: float
count: int
class Answer(BaseModel):
meta: Optional[AnswerMeta]
data: List[Dict] # Union[Dict,List]
success: bool
errormsg: Optional[str] = ""
| {
"imports": [
"/lib/helpers.py",
"/lib/baseoutput/output.py",
"/lib/db/db.py",
"/models/outdf.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /modules/collectors/spycloud.py | #!/usr/bin/env python3
"""Spycloud parser"""
import collections
import logging
from pathlib import Path
import pandas as pd
# from parser import BaseParser
from .parser import BaseParser
class SpycloudParser(BaseParser):
"""Parse Spycloud CSVs"""
def parse_file(self, fname: Path, csv_dialect='excel', leak_id=None) -> pd.DataFrame:
"""Parse the Spycloud CSV files, which are in the form:
breach_title,spycloud_publish_date,breach_date,email,domain,username,password,salt,target_domain,target_url,password_plaintext,sighting,severity,status,password_type,cc_number,infected_path,infected_machine_id,email_domain,cc_expiration,cc_last_four,email_username,user_browser,infected_time,ip_addresses
Returns:
a DataFrame
number of errors while parsing
"""
logging.debug("Parsing SPYCLOUD file %s...", fname)
try:
# df = pd.read_csv(fname, dialect=csv_dialect, header=1, error_bad_lines=False, warn_bad_lines=True)
df = pd.read_csv(fname, error_bad_lines=False, warn_bad_lines=True)
logging.debug(df)
return df
except Exception as ex:
logging.error("could not pandas.read_csv(%s). Reason: %s. Skipping file." % (fname, str(ex)))
return pd.DataFrame()
def normalize_data(self, df: pd.DataFrame, leak_id=None) -> pd.DataFrame:
"""Bring the pandas DataFrame into an internal data format."""
""" Spycloud headers:
breach_title, spycloud_publish_date, breach_date, email, domain, username, password, salt, target_domain, target_url, password_plaintext, sighting, severity, status, password_type, cc_number, infected_path, infected_machine_id, email_domain, cc_expiration, cc_last_four, email_username, user_browser, infected_time, ip_addresses
map to:
_, leak.source_publish_ts, leak.breach_ts, email, domain, _, password, _, target_domain, _, password_plain, _, _, _, hash_algo, _, _, infected_machine, _ , _, _, _, browser, _, ip
"""
mapping_tbl = collections.OrderedDict({
"breach_title": None,
"spycloud_publish_date": None,
"breach_date": None,
"email": "email",
"domain": None,
"username": None,
"password": "password",
"salt": None,
"target_domain": "target_domain",
"target_url": None,
"password_plaintext": "password_plain",
"sighting": None,
"severity": None,
"status": None,
"password_type": "hash_algo",
"cc_number": None,
"infected_path": None,
"infected_machine_id": "infected_machine",
"email_domain": "domain",
"cc_expiration": None,
"cc_last_four": None,
"email_username": None,
"user_browser": "browser",
"infected_time": None,
"ip_addresses": "ip"
})
# This complexity sucks! need to get rid of it. No, itertools won't make it more understandable.
retdf = pd.DataFrame()
for i, r in df.iterrows(): # go over all df rows. Returns index, row
# print(f"{i}:{r}")
retrow = dict() # build up what we want to return
for k, v in r.items(): # go over all key-val items in the row
# print(f"{k}:{v}", file=sys.stderr)
if k in mapping_tbl.keys():
map_to = mapping_tbl[k]
if k == 'ip_addresses' and v == '-':
v = None
if map_to:
# print(f"mapping {k} to {map_to}!")
retrow[map_to] = v
else:
# don't map it
pass
logging.debug("retrow = %r" % retrow)
retdf = retdf.append(pd.Series(retrow), ignore_index=True)
# retdf[:,'leak_id'] = leak_id
logging.debug("retdf: %s" % retdf)
return retdf
| #!/usr/bin/env python3
"""importer.parser """
from lib.helpers import getlogger
from pathlib import Path
import csv
import time
import pandas as pd
debug = True
logger = getlogger(__name__)
# noinspection PyTypeChecker
def peek_into_file(fname: Path) -> csv.Dialect:
"""
Peek into a file in order to determine the dialect for pandas.read_csv() / csv functions.
:param fname: a Path object for the filename
:return: a csv.Dialect
"""
with fname.open(mode='r') as f:
sniffer = csv.Sniffer()
logger.debug("has apikeyheader: %s", sniffer.has_header(f.readline()))
f.seek(0)
dialect = sniffer.sniff(f.readline(50))
logger.debug("delim: '%s'", dialect.delimiter)
logger.debug("quotechar: '%s'", dialect.quotechar)
logger.debug("doublequote: %s", dialect.doublequote)
logger.debug("escapechar: '%s'", dialect.escapechar)
logger.debug("lineterminator: %r", dialect.lineterminator)
logger.debug("quoting: %s", dialect.quoting)
logger.debug("skipinitialspace: %s", dialect.skipinitialspace)
return dialect
class BaseParser:
"""The abstract Parser class."""
def __init__(self):
pass
def parse_file(self, fname: Path, leak_id: int = None, csv_dialect=None) -> pd.DataFrame:
"""Parse file (non-recursive) and returns a DataFrame with the contents.
Overwrite this method in YOUR Parser subclass.
# Parameters
* fname: a Path object with the filename of the CSV file which should be parsed.
* leak_id: the leak_id in the DB which is associated with that CSV dump file.
# Returns
a DataFrame
number of errors while parsing
"""
logger.info("Parsing file %s..." % fname)
try:
if csv_dialect:
dialect = csv_dialect
else:
dialect = peek_into_file(fname) # try to guess
df = pd.read_csv(fname, dialect=dialect, error_bad_lines=False, warn_bad_lines=True) # , usecols=range(2))
logger.debug(df.head())
logger.debug(df.info())
logger.debug("Parsing file 2...")
df.insert(0, 'leak_id', leak_id)
logger.debug(df.head())
logger.debug("parsed %s", fname)
return df
except Exception as ex:
logger.error("could not pandas.read_csv(%s). Reason: %s. Skipping file." % (fname, str(ex)))
raise ex # pass it on
def normalize_data(self, df: pd.DataFrame, leak_id: int = None) -> pd.DataFrame:
"""
Normalize the given data / data frame
:param df: a pandas df with the leak_data
:param leak_id: foreign key to the leak table
:return: a pandas df
"""
# replace NaN with None
return df.where(pd.notnull(df), None)
if __name__ == "__main__":
p = BaseParser()
t0 = time.time()
# p.parse_recursively('test_leaks', '*.txt')
t1 = time.time()
logger.info("processed everything in %f [sec]", (t1 - t0))
| {
"imports": [
"/modules/collectors/parser.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /tests/lib/baseoutput/test_output.py | import unittest
from lib.baseoutput.output import BaseOutput
class TestBaseOutput(unittest.TestCase):
def test_process(self):
to = BaseOutput()
assert to.process("test_outputfile.txt")
| """Base, abstract Output class"""
from models.outdf import Answer
class BaseOutput:
def __init__(self):
pass
def process(self, output_data: Answer) -> bool:
"""
Process the output_data and do something with it.
:returns bool... True on success.
"""
return True
| {
"imports": [
"/lib/baseoutput/output.py"
]
} |
06opoTeHb/credentialLeakDB | refs/heads/main | /tests/modules/enrichers/test_external_email.py | import unittest
from modules.enrichers.external_email import ExternalEmailEnricher
class TestExternalEmailEnricher(unittest.TestCase):
def test_is_external(self):
external_email = "foobar@example.com"
tee = ExternalEmailEnricher()
assert tee.is_external_email(external_email)
internal_email = "foobar.example@ec.europa.eu"
assert tee.is_internal_email(internal_email)
| """ExternalEmailEnricher"""
class ExternalEmailEnricher:
"""Can determine if an Email Adress is an (organisation-) external email address. Also super trivial code."""
@staticmethod
def is_internal_email(email: str) -> bool:
email = email.lower()
if email and email.endswith('europa.eu') or email.endswith('jrc.it'):
return True
else:
return False
@staticmethod
def is_external_email(email: str) -> bool:
return not ExternalEmailEnricher.is_internal_email(email)
| {
"imports": [
"/modules/enrichers/external_email.py"
]
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.