index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
71,630 | thirdpin-hackaton/telegram-bot | refs/heads/master | /bot.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import json
import time
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
from telegram import ChatAction, ReplyKeyboardMarkup
import logging
from logging.handlers import RotatingFileHandler
from mbdevs.dooropener import DoorOpener, Action
from mbdevs.dooropener import Action as DoorAction
from mbdevs.trafflight import TrafficLight
from mbdevs.emergency import Emergency
from mbdevs import ivitmrs
from mbdevs.ivitmrs import IvitMRS
from mbdevs.ivitmrs import REGS as IVIT_MRS_REGS
class _BotLogger():
def __init__(self):
if not _BotLogger.is_inited:
__formatter = logging.Formatter(
'%(asctime)s_%(name)s_%(levelname)s: %(message)s')
__ch = logging.StreamHandler()
__ch.setFormatter(__formatter)
__ch.setLevel(logging.INFO)
__fh = RotatingFileHandler(
"log.txt", maxBytes=1048576, backupCount=5)
__fh.setFormatter(__formatter)
__fh.setLevel(logging.DEBUG)
self._logger = logging.getLogger(__name__)
self._logger.addHandler(__fh)
self._logger.addHandler(__ch)
self._logger.setLevel(logging.DEBUG)
_BotLogger.is_inited = True
else:
self._logger = logging.getLogger(__name__)
@property
def logger(self):
return self._logger
def instance():
return _BotLogger()._logger
is_inited = False
FULL_ACCESS_USER_IDS_FILE = "ids.json"
class Bot(object):
@classmethod
def make_bot(
cls,
full_access_ids_file=FULL_ACCESS_USER_IDS_FILE,
):
log = _BotLogger.instance()
return cls._BotImpl(full_access_ids_file)
class _BotImpl(object):
def __init__(self, full_access_ids_file):
self._full_access_users = list()
self._log = _BotLogger.instance()
self._ivt_mrs = IvitMRS.from_vid_pid(0x0403, 0x6015)
self._door = DoorOpener.from_vid_pid(0x0403, 0x6015)
self._trafflight = TrafficLight.from_vid_pid(0x0403, 0x6015)
self._emergency = Emergency.from_vid_pid(0x0403, 0x6015)
try:
with open(full_access_ids_file) as f:
self._full_access_users = json.load(f)["ids"]
except FileNotFoundError as e:
self._log.error(
"File \"{}\" with full access IDs is not found!".format(
full_access_ids_file))
raise e
def start(self, bot, update):
"""Send a message when the command /start is issued."""
custom_keyboard = [["/open_door"],
["/get_temperature_and_humidity"]]
reply_markup = ReplyKeyboardMarkup(
custom_keyboard, resize_keyboard=True)
update.message.reply_text('Hi!', reply_markup=reply_markup)
def _traffic_light(self):
'''Just for lulz aka test'''
self._trafflight.tell({
"action":
TrafficLight.Action.SEQUENCE,
"sleep_time":
0.1,
"colors":
(TrafficLight.Color.GREEN, TrafficLight.Color.YELLOW,
TrafficLight.Color.RED, TrafficLight.Color.GREEN,
TrafficLight.Color.YELLOW, TrafficLight.Color.YELLOW,
TrafficLight.Color.GREEN, TrafficLight.Color.RED,
TrafficLight.Color.YELLOW, TrafficLight.Color.GREEN)
})
self._trafflight.tell({
"action": TrafficLight.Action.OFF,
"color": TrafficLight.Color.ALL
})
def get_temperature_and_humidity(self, bot, update):
try:
msg = 'Temperature: {t:0.1f}{t_units:s}. '\
'Humidity: {h:0.1f}{h_units:s}.'.format(
t=self._ivt_mrs.temp, t_units=IVIT_MRS_REGS.temp.unit,
h=self._ivt_mrs.humidity, h_units=IVIT_MRS_REGS.humidity.unit)
update.message.reply_text(msg)
except Exception as e:
self._log.error(
"Error while connection with a temp sensor!",
exc_info=True)
update.message.reply_text('Something goes wrong!')
self._traffic_light()
def open_door(self, bot, update):
self._log.info("User opening door: {}".format(
update.message.chat.id))
if not self._check_user_access(update):
return
update.message.reply_text('Opening the door...')
try:
not_is_opened = self._door.ask({"action": DoorAction.OPEN})
if not_is_opened:
update.message.reply_text('The door was opened.')
self._trafflight.tell({
"action":
TrafficLight.Action.SEQUENCE,
"sleep_time":
0.5,
"colors":
(TrafficLight.Color.GREEN, TrafficLight.Color.GREEN,
TrafficLight.Color.GREEN, TrafficLight.Color.GREEN,
TrafficLight.Color.GREEN, TrafficLight.Color.GREEN)
})
else:
update.message.reply_text('The door is already opened.')
except Exception as e:
self._log.error(
"Error while connection with a door opener!",
exc_info=True)
update.message.reply_text('Cannot open the door.')
def error(self, bot, update, error):
"""Log Errors caused by Updates."""
self._log.warning('Update "%s" caused error "%s"', update, error)
def _check_user_access(self, update):
if update.message.chat.id not in self._full_access_users:
update.message.reply_text('Sorry, but this function is not '
'avaliable for you, pal.')
self._log.warn(
'An attempt of a restricted access, user {}'.format(
update.message.chat.id))
return False
else:
return True
def main():
"""Start the bot."""
log = _BotLogger.instance()
# Make a bot instance
try:
bot = Bot.make_bot()
except Exception as e:
log.error("Can not create a bot instance:", exc_info=True)
raise e
# Create the EventHandler and pass it your bot's token.
updater = Updater(sys.argv[1])
# Get the dispatcher to register handlers
dp = updater.dispatcher
# On different commands - answer in Telegram
dp.add_handler(CommandHandler("start", bot.start))
dp.add_handler(CommandHandler("open_door", bot.open_door))
dp.add_handler(
CommandHandler("get_temperature_and_humidity",
bot.get_temperature_and_humidity))
# Log all errors
dp.add_error_handler(bot.error)
# Start the Bot
updater.start_polling()
# Run the bot until you press Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
if __name__ == '__main__':
main()
| {"/mbdevs/trafflight.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/bot.py": ["/mbdevs/dooropener.py", "/mbdevs/trafflight.py", "/mbdevs/emergency.py", "/mbdevs/__init__.py", "/mbdevs/ivitmrs.py"], "/mbdevs/emergency.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/dooropener.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/ivitmrs.py": ["/mbdevs/common.py", "/mbdevs/modbus.py"], "/mbdevs/common.py": ["/mbdevs/exceptions.py"], "/mbdevs/modbus.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py"]} |
71,631 | thirdpin-hackaton/telegram-bot | refs/heads/master | /mbdevs/emergency.py | #!/usr/bin/env python
import time
import enum
import minimalmodbus
import serial.tools.list_ports
from threading import Thread
from collections import namedtuple
from pykka import ThreadingActor
from functools import partial
from .common import Logger, find_device
from .exceptions import ComDeviceNotFound
from .modbus import FunctionalCodes, Register, Modbus, Action, ModbusUser
EmergencyRegs = namedtuple(
'EmergencyRegs', ['button', 'sound', 'button_config', 'sound_config'])
REGS = EmergencyRegs(
button=Register(
name="Emergency button",
addr=4103,
func_code=FunctionalCodes.DISCRETE,
count=1,
value_type=bool,
unit=''),
sound=Register(
name="Dudka",
addr=11,
func_code=FunctionalCodes.COIL,
count=1,
value_type=bool,
unit=''),
button_config=Register(
name="Emergency button config",
addr=7,
func_code=FunctionalCodes.COIL,
count=1,
value_type=bool,
unit=''),
sound_config=Register(
name="Dudka config",
addr=3,
func_code=FunctionalCodes.COIL,
count=1,
value_type=bool,
unit=''))
class Emergency(ModbusUser, ThreadingActor):
class Action(enum.Enum):
SOUND_ON = 1
SOUND_OFF = 0
SUBSCRIBE_TO_BUTTON = 2
BUTTON_STATE = 3
class State(enum.Enum):
ON = 1
OFF = 0
@classmethod
def from_vid_pid(cls, vip, pid, dev_addr=2):
Logger.for_name(__name__).info("Device search...")
dev = find_device(vip, pid)
return cls.start(dev.device, dev_addr)
def __init__(self, port, dev_addr):
ThreadingActor.__init__(self)
self._log = Logger.for_name(__name__)
try:
ModbusUser.__init__(
self, minimalmodbus.Instrument(
str(port), dev_addr, mode='rtu'))
except Exception as e:
self._log.error(str(e), exc_info=True)
raise e
self._initialize_gpio()
self._button_state = Emergency.State.OFF
self._button_check_th = Thread(target=self._button_check_thread)
self._button_check_th.start()
def on_receive(self, msg):
action = msg.pop('action')
self._match_action(action, **msg)
def sound_on(self):
self._log.info("Dudka on!")
self._write_reg(REGS.sound, 1)
def sound_off(self):
self._log.info("Dudka off!")
self._write_reg(REGS.sound, 0)
def _button_handler(self, state):
self._button_state = state
if state == Emergency.State.ON:
self._log.info("Emergency button pressed!")
self.sound_on()
else:
self._log.info("Emergency button disabled")
self.sound_off()
def _match_action(self, action, **kwarg):
try:
{
Emergency.Action.SOUND_ON:
self.sound_on,
Emergency.Action.SOUND_OFF:
self.sound_off,
Emergency.Action.BUTTON_STATE:
lambda: self._button_handler(**kwarg)
}[action]()
except:
self._log.info("", exc_info=True)
def _initialize_gpio(self):
self._write_reg(REGS.button_config, 0)
self._write_reg(REGS.sound_config, 1)
def _button_check_thread(self):
while True:
is_button_on = not self._read_reg(REGS.button)
btn_state = Emergency.State.ON if is_button_on else Emergency.State.OFF
if self._button_state != btn_state:
self.actor_ref.tell({
"action": Emergency.Action.BUTTON_STATE,
"state": btn_state
})
time.sleep(0.5)
| {"/mbdevs/trafflight.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/bot.py": ["/mbdevs/dooropener.py", "/mbdevs/trafflight.py", "/mbdevs/emergency.py", "/mbdevs/__init__.py", "/mbdevs/ivitmrs.py"], "/mbdevs/emergency.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/dooropener.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/ivitmrs.py": ["/mbdevs/common.py", "/mbdevs/modbus.py"], "/mbdevs/common.py": ["/mbdevs/exceptions.py"], "/mbdevs/modbus.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py"]} |
71,632 | thirdpin-hackaton/telegram-bot | refs/heads/master | /mbdevs/dooropener.py | #!/usr/bin/env python
import time
import minimalmodbus
import serial.tools.list_ports
import threading
from enum import Enum
from collections import namedtuple
from pykka import ThreadingActor
from .common import Logger, find_device
from .exceptions import CannotReadARegisterValue
from .modbus import FunctionalCodes, Register, Modbus, ModbusUser
DoorOpenerRegs = namedtuple('DoorOpenerRegs',
['light', 'light_config', 'door', 'door_config'])
REGS = DoorOpenerRegs(
light=Register(
name="Light",
addr=9,
func_code=FunctionalCodes.COIL,
count=1,
value_type=bool,
unit=''),
light_config=Register(
name="Light config",
addr=1,
func_code=FunctionalCodes.COIL,
count=1,
value_type=bool,
unit=''),
door=Register(
name="Door",
addr=8,
func_code=FunctionalCodes.COIL,
count=1,
value_type=bool,
unit=''),
door_config=Register(
name="Door config",
addr=0,
func_code=FunctionalCodes.COIL,
count=1,
value_type=bool,
unit=''),
)
class Action(Enum):
OPEN = 1
CLOSE = 0
FINALIZE_CLOSING = -1
class DoorState(Enum):
OPENED = 1,
CLOSED = 0
class DoorOpener(ModbusUser, ThreadingActor):
@classmethod
def from_vid_pid(cls, vip, pid, dev_addr=1):
Logger.for_name(__name__).info("Device search...")
dev = find_device(vip, pid)
return cls.start(dev.device, dev_addr)
def __init__(self, port, dev_addr):
ThreadingActor.__init__(self)
self._logger = Logger.for_name(__name__)
try:
self._mb = minimalmodbus.Instrument(
str(port), dev_addr, mode='rtu')
ModbusUser.__init__(self, self._mb)
except Exception as e:
self._logger.error(str(e), exc_info=True)
raise e
self._initialize_gpio()
self._state = DoorState.CLOSED
def _open_serial(self):
if not self._mb.serial.is_open:
self._mb.serial.open()
self._logger.info(
'Device {device} opened'.format(device=self._mb.serial.port))
return True
else:
self._mb.serial.close()
self._mb.serial.open()
self._logger.info(
'Device {device} reopened'.format(device=self._mb.serial.port))
return True
def _initialize_gpio(self):
self._write_reg(REGS.door_config, 1)
self._write_reg(REGS.light_config, 1)
def on_receive(self, msg):
if msg["action"] == Action.OPEN:
if self._state == DoorState.OPENED:
return False
else:
self._start_opening_door_proc()
return True
elif msg["action"] == Action.CLOSE:
if self._state == DoorState.OPENED:
self._start_closing_door_proc()
elif msg["action"] == Action.FINALIZE_CLOSING:
if self._state == DoorState.OPENED:
self._traffic_light_off()
self._state = DoorState.CLOSED
self._logger.info("Door closed...")
def _start_opening_door_proc(self):
"""Opens/closes the door and blinks traffic light."""
self._state = DoorState.OPENED
self._traffic_light_on()
self._open_door()
self._logger.info("Door opened...")
threading.Timer(0.5, self._door_close_timer_handler,
[Action.CLOSE]).start()
def _start_closing_door_proc(self):
self._close_door()
threading.Timer(2.5, self._door_close_timer_handler,
[Action.FINALIZE_CLOSING]).start()
def _open_door(self):
self._write_reg(REGS.door, 1)
def _close_door(self):
self._write_reg(REGS.door, 0)
def _traffic_light_on(self):
self._write_reg(REGS.light, 1)
def _traffic_light_off(self):
self._write_reg(REGS.light, 0)
def _door_close_timer_handler(self, action):
self.actor_ref.tell({"action": action})
| {"/mbdevs/trafflight.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/bot.py": ["/mbdevs/dooropener.py", "/mbdevs/trafflight.py", "/mbdevs/emergency.py", "/mbdevs/__init__.py", "/mbdevs/ivitmrs.py"], "/mbdevs/emergency.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/dooropener.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/ivitmrs.py": ["/mbdevs/common.py", "/mbdevs/modbus.py"], "/mbdevs/common.py": ["/mbdevs/exceptions.py"], "/mbdevs/modbus.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py"]} |
71,633 | thirdpin-hackaton/telegram-bot | refs/heads/master | /mbdevs/__init__.py | from collections import namedtuple
import minimalmodbus
ModBusDefaults = namedtuple(
"BotDefaults",
["MB_BAUDRATE", "MB_PARITY", "MB_TIMEOUT", "CLOSE_PORT_AFTER_EACH_CALL"])
MB_DEFAULTS = ModBusDefaults(
MB_BAUDRATE=115200,
MB_PARITY='N',
MB_TIMEOUT=3,
CLOSE_PORT_AFTER_EACH_CALL=True)
minimalmodbus.BAUDRATE = MB_DEFAULTS.MB_BAUDRATE
minimalmodbus.TIMEOUT = MB_DEFAULTS.MB_TIMEOUT
minimalmodbus.PARITY = MB_DEFAULTS.MB_PARITY
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = MB_DEFAULTS.CLOSE_PORT_AFTER_EACH_CALL
def mb_init(baudrate=MB_DEFAULTS.MB_BAUDRATE,
parity=MB_DEFAULTS.MB_PARITY,
timeout=MB_DEFAULTS.MB_TIMEOUT,
close_port_after_each_call=MB_DEFAULTS.CLOSE_PORT_AFTER_EACH_CALL):
minimalmodbus.BAUDRATE = baudrate
minimalmodbus.PARITY = parity
minimalmodbus.TIMEOUT = timeout
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = close_port_after_each_call
| {"/mbdevs/trafflight.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/bot.py": ["/mbdevs/dooropener.py", "/mbdevs/trafflight.py", "/mbdevs/emergency.py", "/mbdevs/__init__.py", "/mbdevs/ivitmrs.py"], "/mbdevs/emergency.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/dooropener.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/ivitmrs.py": ["/mbdevs/common.py", "/mbdevs/modbus.py"], "/mbdevs/common.py": ["/mbdevs/exceptions.py"], "/mbdevs/modbus.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py"]} |
71,634 | thirdpin-hackaton/telegram-bot | refs/heads/master | /mbdevs/ivitmrs.py | import minimalmodbus
import serial.tools.list_ports
from collections import namedtuple
from .common import find_device, Logger
from .modbus import FunctionalCodes, Register, Modbus, Action, ModbusUser
IvitMRSRegs = namedtuple('IvitMRSRegs', [
'humidity', 'humidity_no_correction', 'humidity_no_adjustment', 'temp',
'temp_sht', 'temp_no_correction', 'temp_no_adjustment'
])
REGS = IvitMRSRegs(
humidity=Register("Relative humidity", 0x0016, FunctionalCodes.INPUT, 2,
float, '%'),
humidity_no_correction=Register("Relative humidity (no correction)",
0x0014, FunctionalCodes.INPUT, 2, float,
'%'),
humidity_no_adjustment=Register("Relative humidity (no adjustment)",
0x0012, FunctionalCodes.INPUT, 2, float,
'%'),
temp=Register("Temperature", 0x0022, FunctionalCodes.INPUT, 2, float, 'C'),
temp_sht=Register("Temperature SHT", 0x0034, FunctionalCodes.INPUT, 2,
float, 'C'),
temp_no_correction=Register("Temperature (no correction)", 0x0020,
FunctionalCodes.INPUT, 2, float, 'C'),
temp_no_adjustment=Register("Temperature (no adjustment)", 0x0018,
FunctionalCodes.INPUT, 2, float, 'C'),
)
# REGS = IvitMRSRegs(
# humidity=Register("Relative humidity", 0x0016, FunctionalCodes.INPUT, 2,
# ">f", '%'),
# humidity_no_correction=Register("Relative humidity (no correction)",
# 0x0014, FunctionalCodes.INPUT, 2, ">f",
# '%'),
# humidity_no_adjustment=Register("Relative humidity (no adjustment)",
# 0x0012, FunctionalCodes.INPUT, 2, ">f",
# '%'),
# temp=Register("Temperature", 0x0022, FunctionalCodes.INPUT, 2, ">f", 'C'),
# temp_sht=Register("Temperature SHT", 0x0034, FunctionalCodes.INPUT, 2,
# float, 'C'),
# temp_no_correction=Register("Temperature (no correction)", 0x0020,
# FunctionalCodes.INPUT, 2, ">f", 'C'),
# temp_no_adjustment=Register("Temperature (no adjustment)", 0x0018,
# FunctionalCodes.INPUT, 2, ">f", 'C'),
# )
class IvitMRS(ModbusUser):
@classmethod
def from_vid_pid(cls, vip, pid, dev_addr=247):
Logger.for_name(__name__).info("Device search...")
dev = find_device(vip, pid)
return cls(dev.device, dev_addr)
def __init__(self, port, dev_addr=247):
log = Logger.for_name(__name__)
try:
self._mb = minimalmodbus.Instrument(
str(port), dev_addr, mode='rtu')
super().__init__(self._mb)
# super().__init__(dev_addr, str(port), 115200)
except Exception as e:
log.error(str(e), exc_info=True)
raise e
@property
def humidity(self):
return self._read_reg(REGS.humidity)
@property
def humidity_no_correction(self):
return self._read_reg(REGS.humidity_no_correction)
@property
def humidity_no_adjustment(self):
return self._read_reg(REGS.humidity_no_adjustment)
@property
def temp(self):
return self._read_reg(REGS.temp)
@property
def temp_sht(self):
return self._read_reg(REGS.temp_sht)
@property
def temp_no_correction(self):
return self._read_reg(REGS.temp_no_correction)
@property
def temp_no_adjustment(self):
return self._read_reg(REGS.temp_no_adjustment)
def poll_sesors_and_print(self):
log = Logger.for_name(__name__)
log.info(
'%s: %.1f%s\t' % (REGS.temp.name, self.temp, REGS.temp.unit))
log.info('%s: %.1f%s\t' % (REGS.temp_sht.name, self.temp_sht,
REGS.temp_sht.unit))
log.info('%s: %.1f%s\t' %
(REGS.temp_no_correction.name, self.temp_no_correction,
REGS.temp_no_correction.unit))
log.info('%s: %.1f%s\t' %
(REGS.temp_no_adjustment.name, self.temp_no_adjustment,
REGS.temp_no_adjustment.unit))
log.info('%s: %.1f%s\t' % (REGS.humidity.name, self.humidity,
REGS.humidity.unit))
log.info('%s: %.1f%s\t' %
(REGS.humidity_no_adjustment.name,
self.humidity_no_adjustment, REGS.humidity.unit))
log.info('%s: %.1f%s\t' %
(REGS.humidity_no_correction.name,
self.humidity_no_correction, REGS.humidity.unit))
log.info('\n')
if __name__ == "__main__":
minimalmodbus.BAUDRATE = 9600
minimalmodbus.TIMEOUT = 1
minimalmodbus.PARITY = 'E'
dev_handler = find_device(0x0403, 0x6015)
if dev_handler:
ivt_mrs = IvitMRS(dev_handler.device)
else:
sys.exit(1)
while (True):
ivt_mrs.poll_sesors_and_print()
time.sleep(1)
| {"/mbdevs/trafflight.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/bot.py": ["/mbdevs/dooropener.py", "/mbdevs/trafflight.py", "/mbdevs/emergency.py", "/mbdevs/__init__.py", "/mbdevs/ivitmrs.py"], "/mbdevs/emergency.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/dooropener.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/ivitmrs.py": ["/mbdevs/common.py", "/mbdevs/modbus.py"], "/mbdevs/common.py": ["/mbdevs/exceptions.py"], "/mbdevs/modbus.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py"]} |
71,635 | thirdpin-hackaton/telegram-bot | refs/heads/master | /mbdevs/common.py | import time
import logging
import serial.tools.list_ports
from collections import namedtuple
from .exceptions import ComDeviceNotFound
LoggerDefaults = namedtuple("LoggerDefaults", ["str_format", "logger_level"])
LOGGER_DEFAULTS = LoggerDefaults(
str_format='%(asctime)s_%(name)s_%(levelname)s: %(message)s',
logger_level=logging.DEBUG)
class _Logger():
def __init__(self, name):
self._logger = logging.getLogger(name)
if not self._logger.hasHandlers():
self._logger_formatter = logging.Formatter(LOGGER_DEFAULTS.str_format)
self._logger_ch = logging.StreamHandler()
self._logger_ch.setFormatter(self._logger_formatter)
self._logger_ch.setLevel(LOGGER_DEFAULTS.logger_level)
self._logger = logging.getLogger(name)
self._logger.setLevel(LOGGER_DEFAULTS.logger_level)
self._logger.addHandler(self._logger_ch)
else:
self._logger = logging.getLogger(name)
@property
def logger(self):
return self._logger
def instance():
return _Logger()._logger
is_inited = False
class Logger():
@staticmethod
def for_name(name):
return _Logger(name).logger
def find_device(vid, pid):
log = Logger.for_name(__name__)
for p in list(serial.tools.list_ports.comports()):
if (p.vid == vid) and (p.pid == pid):
log.info("Device {vid}:{pid} found: {com}!".format(
vid=vid, pid=pid, com=p.device))
return p
log.error("Device not found!")
raise ComDeviceNotFound(
"Not found any devices with VID:PID = {vid}:{pid}".format(**locals()))
| {"/mbdevs/trafflight.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/bot.py": ["/mbdevs/dooropener.py", "/mbdevs/trafflight.py", "/mbdevs/emergency.py", "/mbdevs/__init__.py", "/mbdevs/ivitmrs.py"], "/mbdevs/emergency.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/dooropener.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/ivitmrs.py": ["/mbdevs/common.py", "/mbdevs/modbus.py"], "/mbdevs/common.py": ["/mbdevs/exceptions.py"], "/mbdevs/modbus.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py"]} |
71,636 | thirdpin-hackaton/telegram-bot | refs/heads/master | /mbdevs/modbus.py | from collections import namedtuple
from enum import Enum
from .common import Logger
from .exceptions import CannotReadARegisterValue
import pykka
import minimalmodbus
FunctionCode = namedtuple('FunctionCodes', ['read', 'write'])
class FunctionalCodes(Enum):
COIL = FunctionCode(1, 5)
DISCRETE = FunctionCode(2, None)
INPUT = FunctionCode(4, None)
HOLDING = FunctionCode(3, 6)
Register = namedtuple(
'MbRegister', ['name', 'addr', 'func_code', 'count', 'value_type', 'unit'])
class Action(Enum):
READ = 0
WRITE = 1
class Modbus(pykka.ThreadingActor):
__instance = None
@classmethod
def modbus(cls):
if not cls.__instance:
cls.__instance = cls.start()
return cls.__instance
def __init__(self):
super().__init__()
self._log = Logger.for_name(__name__)
self._mb = None
def on_receive(self, msg):
self._mb = msg["mb"]
if self._mb:
reg = msg["reg"]
if msg["action"] == Action.READ:
return self._read(msg["reg"])
elif msg["action"] == Action.WRITE:
return self._write(msg["reg"], msg["value"])
def _read(self, reg):
try:
if reg.func_code == FunctionalCodes.COIL or\
reg.func_code == FunctionalCodes.DISCRETE:
return bool(
self._mb.read_bit(reg.addr, reg.func_code.value.read))
elif reg.value_type is float:
return reg.value_type(
self._mb.read_float(reg.addr, reg.func_code.value.read))
elif reg.value_type is str:
return reg.value_type(
self._mb.read_string(reg.addr, reg.count,
reg.func_code.value.read))
else:
return reg.value_type(
self._mb.read_register(reg.addr, reg.count,
reg.func_code.value.read))
except Exception as e:
self._log.error(
"Cannot read a \"{}\" register!".format(reg.name),
exc_info=True)
return None
def _write(self, reg, val):
try:
if reg.func_code == FunctionalCodes.COIL:
self._mb.write_bit(reg.addr, val, reg.func_code.value.write)
elif reg.value_type is float:
self._mb.write_float(reg.addr, reg.value_type(val),
reg.func_code.value.write)
elif reg.value_type is str:
self._mb.write_string(reg.addr, reg.value_type(val))
else:
return self._mb.write_registers(reg.addr, val)
except Exception as e:
self._log.error(
"Cannot write to a \"{}\" register!".format(reg.name),
exc_info=True)
return None
class ModbusUser:
def __init__(self, mb_instrument):
self._mb = mb_instrument
self._mb_actor = Modbus.modbus()
def _read_reg(self, reg):
ans = self._mb_actor.ask({
"mb": self._mb,
"action": Action.READ,
"reg": reg
})
if ans is None:
raise CannotReadARegisterValue(reg)
return ans
def _write_reg(self, reg, val):
ans = self._mb_actor.ask({
"mb": self._mb,
"action": Action.WRITE,
"reg": reg,
"value": val
})
return ans
| {"/mbdevs/trafflight.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/bot.py": ["/mbdevs/dooropener.py", "/mbdevs/trafflight.py", "/mbdevs/emergency.py", "/mbdevs/__init__.py", "/mbdevs/ivitmrs.py"], "/mbdevs/emergency.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/dooropener.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/ivitmrs.py": ["/mbdevs/common.py", "/mbdevs/modbus.py"], "/mbdevs/common.py": ["/mbdevs/exceptions.py"], "/mbdevs/modbus.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py"]} |
71,637 | thirdpin-hackaton/telegram-bot | refs/heads/master | /mbdevs/exceptions.py | class ComDeviceNotFound(IOError):
pass
class CannotReadARegisterValue(Exception):
def __init__(self, reg):
msg = "Cannot a value read {} register".format(reg.name)
super().__init__(msg) | {"/mbdevs/trafflight.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/bot.py": ["/mbdevs/dooropener.py", "/mbdevs/trafflight.py", "/mbdevs/emergency.py", "/mbdevs/__init__.py", "/mbdevs/ivitmrs.py"], "/mbdevs/emergency.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/dooropener.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py", "/mbdevs/modbus.py"], "/mbdevs/ivitmrs.py": ["/mbdevs/common.py", "/mbdevs/modbus.py"], "/mbdevs/common.py": ["/mbdevs/exceptions.py"], "/mbdevs/modbus.py": ["/mbdevs/common.py", "/mbdevs/exceptions.py"]} |
71,638 | ahmad-abdellatif/miraiml | refs/heads/master | /miraiml/__init__.py | """
:mod:`miraiml` provides the following components:
- :class:`miraiml.SearchSpace` represents the search space for a base model
- :class:`miraiml.Config` defines the general behavior for :class:`miraiml.Engine`
- :class:`miraiml.Engine` manages the optimization process
- :mod:`miraiml.pipeline` has some features related to pipelines **(hot!)**
"""
__version__ = '3.0.0'
__all__ = ['SearchSpace', 'Config', 'Engine', 'Status', 'pipeline']
from miraiml.main import SearchSpace, Config, Engine, Status
from miraiml import pipeline
| {"/miraiml/__init__.py": ["/miraiml/main.py"], "/miraiml/pipeline.py": ["/miraiml/util.py", "/miraiml/core.py"], "/miraiml/main.py": ["/miraiml/util.py", "/miraiml/core.py"], "/tests/run.py": ["/miraiml/__init__.py", "/miraiml/pipeline.py"], "/miraiml/core.py": ["/miraiml/util.py"]} |
71,639 | ahmad-abdellatif/miraiml | refs/heads/master | /miraiml/pipeline.py | """
:mod:`miraiml.pipeline` contains a function that lets you build your own
pipeline classes. It also contains a few pre-defined pipelines for baselines.
"""
from sklearn.preprocessing import OneHotEncoder, MinMaxScaler
from sklearn.impute import SimpleImputer
from sklearn.naive_bayes import GaussianNB
from sklearn.linear_model import LinearRegression
from miraiml.util import is_valid_pipeline_name
from miraiml.core import BasePipelineClass
def compose(steps):
"""
A function that defines pipeline classes dinamically. It builds a pipeline
class that can be instantiated with particular parameters for each of its
transformers/estimator without needing to call ``set_params`` as you would
do with scikit-learn's Pipeline when performing hyperparameters optimizations.
Similarly to scikit-learn's Pipeline, ``steps`` is a list of tuples
containing an alias and the respective pipeline element. Although, since
this function is a class factory, you shouldn't instantiate the
transformer/estimator as you would do with scikit-learn's Pipeline. Thus,
this is how :func:`compose` should be called:
::
>>> from sklearn.ensemble import RandomForestClassifier
>>> from sklearn.preprocessing import StandardScaler
>>> from miraiml.pipeline import compose
>>> MyPipelineClass = compose(
... steps = [
... ('scaler', StandardScaler), # StandardScaler instead of StandardScaler()
... ('rfc', RandomForestClassifier) # No instantiation either
... ]
... )
And then, in order to instantiate ``MyPipelineClass`` with the desired
parameters, you just need to refer to them as a concatenation of their
respective class aliases and their names, separated by ``'__'``.
::
>>> pipeline = MyPipelineClass(scaler__with_mean=False, rfc__max_depth=3)
If you want to know which parameters you're allowed to play with, just call
``get_params``:
::
>>> params = pipeline.get_params()
>>> print("\\n".join(params))
scaler__with_mean
scaler__with_std
rfc__bootstrap
rfc__class_weight
rfc__criterion
rfc__max_depth
rfc__max_features
rfc__max_leaf_nodes
rfc__min_impurity_decrease
rfc__min_impurity_split
rfc__min_samples_leaf
rfc__min_samples_split
rfc__min_weight_fraction_leaf
rfc__n_estimators
rfc__n_jobs
rfc__oob_score
rfc__random_state
rfc__verbose
rfc__warm_start
You can check the available methods for your instantiated pipelines on the
documentation for :class:`miraiml.core.BasePipelineClass`, which is the
class from which the composed classes inherit from.
**The intended purpose** of such pipeline classes is that they can work as
base models to build instances of :class:`miraiml.SearchSpace`.
::
>>> from miraiml import SearchSpace
>>> search_space = SearchSpace(
... id='MyPipelineClass',
... model_class=MyPipelineClass,
... parameters_values=dict(
... scaler__with_mean=[True, False],
... scaler__with_std=[True, False],
... rfc__max_depth=[3, 4, 5, 6]
... )
... )
:type steps: list
:param steps: The list of pairs (alias, class) to define the pipeline.
.. warning::
Repeated aliases are not allowed and none of the aliases can start
with numbers or contain ``'__'``.
The classes used to compose a pipeline **must** implement ``get_params``
and ``set_params``, such as scikit-learn's classes, or :func:`compose`
**will break**.
:rtype: type
:returns: The composed pipeline class
:raises: ``TypeError`` if an alias is not a string.
:raises: ``ValueError`` if an alias has an invalid name.
:raises: ``NotImplementedError`` if some class of the pipeline does not implement
the required methods.
"""
aliases = []
for alias, class_type in steps:
if not isinstance(alias, str):
raise TypeError('{} is not a string'.format(alias))
if not is_valid_pipeline_name(alias):
raise ValueError('{} is not allowed for an alias'.format(alias))
class_content = dir(class_type)
if 'fit' not in class_content:
raise NotImplementedError('{} must implement fit'.format(class_type.__name__))
aliases.append(alias)
if len(aliases) < len(steps):
if 'transform' not in class_content:
raise NotImplementedError(
'{} must implement transform'.format(class_type.__name__)
)
else:
if 'predict' not in class_content and 'predict_proba' not in class_content:
raise NotImplementedError(
'{} must implement predict or predict_proba'.format(class_type.__name__)
)
if len(set(aliases)) != len(aliases):
raise ValueError('Repeated aliases are not allowed')
return type('MiraiPipeline', (BasePipelineClass,), dict(steps=steps))
__initial_steps__ = [
('ohe', OneHotEncoder),
('impute', SimpleImputer),
('min_max', MinMaxScaler)
]
class NaiveBayesBaseliner(compose(__initial_steps__ + [('naive', GaussianNB)])):
"""
This is a baseline pipeline for classification problems. It's composed by
the following transformers/estimator:
1. ``sklearn.preprocessing.OneHotEncoder``
2. ``sklearn.impute.SimpleImputer``
3. ``sklearn.preprocessing.MinMaxScaler``
4. ``sklearn.naive_bayes.GaussianNB``
The available parameters to tweak are:
::
>>> from miraiml.pipeline import NaiveBayesBaseliner
>>> for param in NaiveBayesBaseliner().get_params():
... print(param)
...
ohe__categorical_features
ohe__categories
ohe__drop
ohe__dtype
ohe__handle_unknown
ohe__n_values
ohe__sparse
impute__add_indicator
impute__fill_value
impute__missing_values
impute__strategy
impute__verbose
min_max__feature_range
naive__priors
naive__var_smoothing
"""
def __init__(self):
super().__init__()
class LinearRegressionBaseliner(compose(__initial_steps__ + [('lin_reg', LinearRegression)])):
"""
This is a baseline pipeline for regression problems. It's composed by the
following transformers/estimator:
1. ``sklearn.preprocessing.OneHotEncoder``
2. ``sklearn.impute.SimpleImputer``
3. ``sklearn.preprocessing.MinMaxScaler``
4. ``sklearn.linear_model.LinearRegression``
The available parameters to tweak are:
::
>>> from miraiml.pipeline import LinearRegressionBaseliner
>>> for param in LinearRegressionBaseliner().get_params():
... print(param)
...
ohe__categorical_features
ohe__categories
ohe__drop
ohe__dtype
ohe__handle_unknown
ohe__n_values
ohe__sparse
impute__add_indicator
impute__fill_value
impute__missing_values
impute__strategy
impute__verbose
min_max__feature_range
lin_reg__fit_intercept
lin_reg__n_jobs
lin_reg__normalize
"""
def __init__(self):
super().__init__()
| {"/miraiml/__init__.py": ["/miraiml/main.py"], "/miraiml/pipeline.py": ["/miraiml/util.py", "/miraiml/core.py"], "/miraiml/main.py": ["/miraiml/util.py", "/miraiml/core.py"], "/tests/run.py": ["/miraiml/__init__.py", "/miraiml/pipeline.py"], "/miraiml/core.py": ["/miraiml/util.py"]} |
71,640 | ahmad-abdellatif/miraiml | refs/heads/master | /miraiml/util.py | """
:mod:`miraiml.util` provides utility functions that are used by higher level
modules.
"""
from threading import Thread
import random as rnd
import pickle
import string
import math
def load(path):
"""
A clean `pickle.load` wrapper for binary files.
:type path: string
:param path: The path of the binary file to be loaded.
:rtype: object
:returns: The loaded object.
"""
return pickle.load(open(path, 'rb'))
def dump(obj, path):
"""
Optimizes the process of writing objects on disc by triggering a thread.
:type obj: object
:param obj: The object to be dumped to the binary file.
:type path: string
:param path: The path of the binary file to be written.
"""
Thread(target=lambda: pickle.dump(obj, open(path, 'wb'))).start()
def sample_random_len(lst):
"""
Returns a sample of random size from the list ``lst``. The minimum length of
the returned list is 1.
:type lst: list
:param lst: A list containing the elements to be sampled.
:rtype: sampled_lst: list
:returns: The randomly sampled elements from ``lst``.
"""
if len(lst) == 0:
return []
return rnd.sample(lst, max(1, math.ceil(rnd.random()*len(lst))))
__valid_filename_chars__ = frozenset('-_.() %s%s' % (string.ascii_letters,
string.digits))
def is_valid_filename(filename):
"""
Tells whether a string can be used as a safe file name or not.
:type filename: str
:param filename: The file name.
:rtype: bool
:returns: Whether ``filename`` is a valid file name or not.
"""
filename = filename.strip()
if len(filename) == 0 or '..' in filename or filename == '.':
return False
for char in filename:
if char not in __valid_filename_chars__:
return False
return True
__valid_pipeline_chars__ = frozenset('_%s%s' % (string.ascii_letters,
string.digits))
def is_valid_pipeline_name(pipeline_name):
"""
Tells whether a string can be used to compose pipelines or not.
:type pipeline_name: str
:param pipeline_name: The file name.
:rtype: bool
:returns: Whether ``pipeline_name`` is a valid name or not.
"""
if len(pipeline_name) == 0 or '__' in pipeline_name \
or pipeline_name[0] in string.digits:
return False
for char in pipeline_name:
if char not in __valid_pipeline_chars__:
return False
return True
| {"/miraiml/__init__.py": ["/miraiml/main.py"], "/miraiml/pipeline.py": ["/miraiml/util.py", "/miraiml/core.py"], "/miraiml/main.py": ["/miraiml/util.py", "/miraiml/core.py"], "/tests/run.py": ["/miraiml/__init__.py", "/miraiml/pipeline.py"], "/miraiml/core.py": ["/miraiml/util.py"]} |
71,641 | ahmad-abdellatif/miraiml | refs/heads/master | /miraiml/main.py | from threading import Thread
import pandas as pd
import warnings
import time
import os
import gc
from miraiml.util import is_valid_filename
from miraiml.core import MiraiSeeker, Ensembler
from miraiml.core import load_base_model, dump_base_model
class SearchSpace:
"""
This class represents the search space of hyperparameters for a base model.
:type id: str
:param id: The id that will be associated with the models generated within
this search space.
:type model_class: type
:param model_class: Any class that represents a statistical model. It must
implement the methods ``fit`` as well as ``predict`` for regression or
``predict_proba`` for classification problems.
:type parameters_values: dict, optional, default=None
:param parameters_values: A dictionary containing lists of values to be
tested as parameters when instantiating objects of ``model_class`` for
``id``.
:type parameters_rules: function, optional, default=lambda x: None
:param parameters_rules: A function that constrains certain parameters because
of the values assumed by others. It must receive a dictionary as input and
doesn't need to return anything. Not used if ``parameters_values`` has no
keys.
.. warning::
Make sure that the parameters accessed in ``parameters_rules`` exist
in the set of parameters defined on ``parameters_values``, otherwise
the engine will attempt to access an invalid key.
:raises: ``NotImplementedError`` if a model class does not implement ``fit``
or none of ``predict`` or ``predict_proba``.
:raises: ``TypeError`` if some parameter is of a prohibited type.
:raises: ``ValueError`` if a provided ``id`` is not allowed.
:Example:
::
>>> import numpy as np
>>> from sklearn.linear_model import LogisticRegression
>>> from miraiml import SearchSpace
>>> def logistic_regression_parameters_rules(parameters):
... if parameters['solver'] in ['newton-cg', 'sag', 'lbfgs']:
... parameters['penalty'] = 'l2'
>>> search_space = SearchSpace(
... id = 'Logistic Regression',
... model_class = LogisticRegression,
... parameters_values = {
... 'penalty': ['l1', 'l2'],
... 'C': np.arange(0.1, 2, 0.1),
... 'max_iter': np.arange(50, 300),
... 'solver': ['newton-cg', 'lbfgs', 'liblinear', 'sag', 'saga'],
... 'random_state': [0]
... },
... parameters_rules = logistic_regression_parameters_rules
... )
.. warning::
**Do not** allow ``random_state`` assume multiple values. If ``model_class``
has a ``random_state`` parameter, force the engine to always choose the
same value by providing a list with a single element.
Allowing ``random_state`` to assume multiple values will confuse the engine
because the scores will be unstable even with the same choice of
hyperparameters and features.
"""
def __init__(self, id, model_class, parameters_values=None,
parameters_rules=lambda x: None):
self.__validate__(id, model_class, parameters_values, parameters_rules)
self.model_class = model_class
self.id = id
if parameters_values is None:
parameters_values = {}
self.parameters_values = parameters_values
self.parameters_rules = parameters_rules
@staticmethod
def __validate__(id, model_class, parameters_values, parameters_rules):
"""
Validates the constructor parameters.
"""
if not isinstance(id, str):
raise TypeError('id must be a string')
if not is_valid_filename(id):
raise ValueError('Invalid id: {}'.format(id))
dir_model_class = dir(model_class)
if 'fit' not in dir_model_class:
raise NotImplementedError('model_class must implement fit')
if 'predict' not in dir_model_class and 'predict_proba' not in dir_model_class:
raise NotImplementedError('model_class must implement predict or predict_proba')
if parameters_values is not None and not isinstance(parameters_values, dict):
raise TypeError('parameters_values must be None or a dictionary')
if not callable(parameters_rules):
raise TypeError('parameters_rules must be a function')
class Config:
"""
This class defines the general behavior of the engine.
:type local_dir: str
:param local_dir: The name of the folder in which the engine will save its
internal files. If the directory doesn't exist, it will be created
automatically. ``..`` and ``/`` are not allowed to compose ``local_dir``.
:type problem_type: str
:param problem_type: ``'classification'`` or ``'regression'``. The problem
type. Multi-class classification problems are not supported.
:type search_spaces: list
:param search_spaces: The list of :class:`miraiml.SearchSpace`
objects to optimize. If ``search_spaces`` has length 1, the engine
will not run ensemble cycles.
:type score_function: function
:param score_function: A function that receives the "truth" and the predictions
(in this order) and returns the score. Bigger scores must mean better models.
:type use_all_features: bool, optional, default=False
:param use_all_features: Whether to force MiraiML to always use all features
or not.
:type n_folds: int, optional, default=5
:param n_folds: The number of folds for the fitting/predicting process. The
minimum value allowed is 2.
:type stratified: bool, optional, default=True
:param stratified: Whether to stratify folds on target or not. Only used if
``problem_type == 'classification'``.
:type ensemble_id: str, optional, default=None
:param ensemble_id: The id for the ensemble. If none is given, the engine will
not ensemble base models.
:type stagnation: int or float, optional, default=60
:param stagnation: The amount of time (in minutes) for the engine to
automatically interrupt itself if no improvement happens. Negative numbers
are interpreted as "infinite".
.. warning::
Stagnation checks only happen after the engine finishes at least one
optimization cycle. In other words, every base model and the ensemble
(if set) must be scored at least once.
:raises: ``NotImplementedError`` if a model class does not implement the proper
method for prediction.
:raises: ``TypeError`` if some parameter is not of its allowed type.
:raises: ``ValueError`` if some parameter has an invalid value.
:Example:
::
>>> from sklearn.metrics import roc_auc_score
>>> from sklearn.naive_bayes import GaussianNB
>>> from sklearn.tree import DecisionTreeClassifier
>>> from miraiml import SearchSpace, Config
>>> search_spaces = [
... SearchSpace('Naive Bayes', GaussianNB),
... SearchSpace('Decicion Tree', DecisionTreeClassifier)
... ]
>>> config = Config(
... local_dir = 'miraiml_local',
... problem_type = 'classification',
... score_function = roc_auc_score,
... search_spaces = search_spaces,
... use_all_features = False,
... n_folds = 5,
... stratified = True,
... ensemble_id = 'Ensemble',
... stagnation = -1
... )
"""
def __init__(self, local_dir, problem_type, score_function, search_spaces,
use_all_features=False, n_folds=5, stratified=True,
ensemble_id=None, stagnation=60):
self.__validate__(local_dir, problem_type, score_function, search_spaces,
use_all_features, n_folds, stratified, ensemble_id,
stagnation)
self.local_dir = local_dir
if self.local_dir[-1] != '/':
self.local_dir += '/'
self.problem_type = problem_type
self.search_spaces = search_spaces
self.score_function = score_function
self.use_all_features = use_all_features
self.n_folds = n_folds
self.stratified = stratified
self.ensemble_id = ensemble_id
self.stagnation = stagnation
@staticmethod
def __validate__(local_dir, problem_type, score_function, search_spaces,
use_all_features, n_folds, stratified, ensemble_id,
stagnation):
"""
Validates the constructor parameters.
"""
if not isinstance(local_dir, str):
raise TypeError('local_dir must be a string')
if not is_valid_filename(local_dir):
raise ValueError('Invalid directory name: {}'.format(local_dir))
if not isinstance(problem_type, str):
raise TypeError('problem_type must be a string')
if problem_type not in ('classification', 'regression'):
raise ValueError('Invalid problem type')
if not callable(score_function):
raise TypeError('score_function must be a function')
if not isinstance(search_spaces, list):
raise TypeError('search_spaces must be a list')
if len(search_spaces) == 0:
raise ValueError('No search spaces')
ids = []
for search_space in search_spaces:
if not isinstance(search_space, SearchSpace):
raise TypeError('All search spaces must be objects of ' +
'miraiml.SearchSpace')
id = search_space.id
if id in ids:
raise ValueError('Duplicated search space id: {}'.format(id))
ids.append(id)
dir_model_class = dir(search_space.model_class)
if problem_type == 'classification' and 'predict_proba' not in dir_model_class:
raise NotImplementedError('Model class of id {} '.format(id) +
'must implement predict_proba for ' +
'classification problems')
if problem_type == 'regression' and 'predict' not in dir_model_class:
raise NotImplementedError('Model class of id {} '.format(id) +
'must implement predict for regression problems')
if not isinstance(use_all_features, bool):
raise TypeError('use_all_features must be a boolean')
if not isinstance(n_folds, int):
raise TypeError('n_folds must be an integer')
if n_folds < 2:
raise ValueError('n_folds must be greater than 1')
if not isinstance(stratified, bool):
raise TypeError('stratified must be a boolean')
if ensemble_id is not None and not isinstance(ensemble_id, str):
raise TypeError('ensemble_id must be None or a string')
if isinstance(ensemble_id, str) and not is_valid_filename(ensemble_id):
raise ValueError('invalid ensemble_id')
if ensemble_id in ids:
raise ValueError('ensemble_id cannot have the same id of a ' +
'search space')
if not isinstance(stagnation, int) and not isinstance(stagnation, float):
raise TypeError('stagnation must be an integer or a float')
class Engine:
"""
This class offers the controls for the engine.
:type config: miraiml.Config
:param config: The configurations for the behavior of the engine.
:type on_improvement: function, optional, default=None
:param on_improvement: A function that will be executed everytime the engine
finds an improvement for some id. It must receive a ``status`` parameter,
which is the return of the method :func:`request_status` (an instance of
:class:`miraiml.Status`).
:raises: ``TypeError`` if ``config`` is not an instance of :class:`miraiml.Config`
or ``on_improvement`` (if provided) is not callable.
:Example:
::
>>> from sklearn.metrics import roc_auc_score
>>> from sklearn.naive_bayes import GaussianNB
>>> from sklearn.tree import DecisionTreeClassifier
>>> from miraiml import SearchSpace, Config, Engine
>>> search_spaces = [
... SearchSpace('Naive Bayes', GaussianNB),
... SearchSpace('Decision Tree', DecisionTreeClassifier)
... ]
>>> config = Config(
... local_dir = 'miraiml_local',
... problem_type = 'classification',
... score_function = roc_auc_score,
... search_spaces = search_spaces,
... ensemble_id = 'Ensemble'
... )
>>> def on_improvement(status):
... print('Scores:', status.scores)
>>> engine = Engine(config, on_improvement=on_improvement)
"""
def __init__(self, config, on_improvement=None):
self.__validate__(config, on_improvement)
self.config = config
self.on_improvement = on_improvement
self.train_predictions_df = None
self.test_predictions_df = None
self.__is_running__ = False
self.must_interrupt = False
self.mirai_seeker = None
self.models_dir = config.local_dir + 'models/'
self.train_data = None
self.ensembler = None
self.n_cycles = 0
self.last_improvement_timestamp = None
@staticmethod
def __validate__(config, on_improvement):
"""
Validates the constructor parameters.
"""
if not isinstance(config, Config):
raise TypeError('miraiml.Engine\'s constructor requires an object ' +
'of miraiml.Config')
if on_improvement is not None and not callable(on_improvement):
raise TypeError('on_improvement must be None or a function')
def is_running(self):
"""
Tells whether the engine is running or not.
:rtype: bool
:returns: ``True`` if the engine is running and ``False`` otherwise.
"""
return self.__is_running__
def interrupt(self):
"""
Makes the engine stop on the first opportunity.
.. note::
This method is **not** asynchronous. It will wait until the engine
stops.
"""
self.must_interrupt = True
if self.ensembler is not None:
self.ensembler.interrupt()
while self.__is_running__:
time.sleep(.1)
self.must_interrupt = False
def load_train_data(self, train_data, target_column, restart=False):
"""
Interrupts the engine and loads the train dataset. All of its columns must
be either instances of ``str`` or ``int``.
.. warning::
Loading new training data will **always** trigger the loss of history
for optimization.
:type train_data: pandas.DataFrame
:param train_data: The training data.
:type target_column: str or int
:param target_column: The target column identifier.
:type restart: bool, optional, default=False
:param restart: Whether to restart the engine after updating data or not.
:raises: ``TypeError`` if ``train_data`` is not an instance of
``pandas.DataFrame``.
:raises: ``ValueError`` if ``target_column`` is not a column of
``train_data`` or if some column name is of a prohibited type.
"""
self.__validate_train_data__(train_data, target_column)
self.columns_renaming_map = {}
self.columns_renaming_unmap = {}
for column in train_data.columns:
column_renamed = str(column)
self.columns_renaming_map[column] = column_renamed
self.columns_renaming_unmap[column_renamed] = column
self.target_column = target_column
train_data = train_data.rename(columns=self.columns_renaming_map)
self.interrupt()
self.train_data = train_data.drop(columns=target_column)
self.train_target = train_data[target_column]
self.all_features = list(self.train_data.columns)
if self.mirai_seeker is not None:
self.mirai_seeker.reset()
if restart:
self.restart()
@staticmethod
def __validate_train_data__(train_data, target_column):
"""
Validates the train data.
"""
if not isinstance(train_data, pd.DataFrame):
raise TypeError('Training data must be an object of pandas.DataFrame')
train_columns = train_data.columns
if target_column not in train_columns:
raise ValueError('target_column must be a column of train_data')
for column in train_columns:
if not isinstance(column, str) and not isinstance(column, int):
raise ValueError('All columns names must be either str or int')
def load_test_data(self, test_data, restart=False):
"""
Interrupts the engine and loads the test dataset. All of its columns must
be columns in the train data.
The test dataset is the one for which we don't have the values for the
target column. This method should be used to load data in production.
.. warning::
This method can only be called after
:func:`miraiml.Engine.load_train_data`
:type test_data: pandas.DataFrame, optional, default=None
:param test_data: The testing data. Use the default value if you don't
need to make predictions for data with unknown labels.
:type restart: bool, optional, default=False
:param restart: Whether to restart the engine after loading data or not.
:raises: ``RuntimeError`` if this method is called before loading the
train data.
:raises: ``ValueError`` if the column names are not consistent.
"""
if self.train_data is None:
raise RuntimeError('This method cannot be called before load_train_data')
self.__validate_test_data__(test_data)
self.test_data = test_data.rename(columns=self.columns_renaming_map)
if restart:
self.restart()
def __validate_test_data__(self, test_data):
"""
Validates the test data.
"""
for column in self.columns_renaming_map:
if column != self.target_column and column not in test_data.columns:
raise ValueError(
'Column {} is not a column in the train data'.format(column)
)
def clean_test_data(self, restart=False):
"""
Cleans the test data from the buffer.
.. note::
Keep in mind that if you don't intend to make predictions for
unlabeled data, the engine will run faster with a clean test data
buffer.
:type restart: bool, optional, default=False
:param restart: Whether to restart the engine after cleaning test data or
not.
"""
self.interrupt()
self.test_data = None
if restart:
self.restart()
def shuffle_train_data(self, restart=False):
"""
Interrupts the engine and shuffles the training data.
:type restart: bool, optional, default=False
:param restart: Whether to restart the engine after shuffling data or not.
:raises: ``RuntimeError`` if the engine has no data loaded.
.. note::
It's a good practice to shuffle the training data periodically to avoid
overfitting on a particular folding pattern.
"""
if self.train_data is None:
raise RuntimeError('No data to shuffle')
self.interrupt()
seed = int(time.time())
self.train_data = self.train_data.sample(frac=1, random_state=seed)
self.train_target = self.train_target.sample(frac=1, random_state=seed)
if restart:
self.restart()
def reconfigure(self, config, restart=False):
"""
Interrupts the engine and loads a new configuration.
.. warning::
Reconfiguring the engine will **always** trigger the loss of history
for optimization.
:type config: miraiml.Config
:param config: The configurations for the behavior of the engine.
:type restart: bool, optional, default=False
:param restart: Whether to restart the engine after reconfiguring it or
not.
"""
self.interrupt()
self.config = config
if self.mirai_seeker is not None:
self.mirai_seeker.reset()
if restart:
self.restart()
def restart(self):
"""
Interrupts the engine and starts again from last checkpoint (if any). It
is also used to start the engine for the first time.
:raises: ``RuntimeError`` if no data is loaded.
"""
if self.train_data is None:
raise RuntimeError('No data to train')
self.interrupt()
def starter():
try:
self.__main_loop__()
except Exception:
self.__is_running__ = False
raise
Thread(target=starter).start()
def __improvement_trigger__(self):
"""
Called when an improvement happens.
"""
self.last_improvement_timestamp = time.time()
if self.on_improvement is not None:
self.on_improvement(self.request_status())
def __update_best__(self, score, id):
"""
Updates the best id of the engine.
"""
if self.best_score is None or score > self.best_score:
self.best_score = score
self.best_id = id
def __check_stagnation__(self):
"""
Checks whether the engine has reached stagnation or not. If so, the
engine is interrupted.
"""
if self.config.stagnation >= 0:
diff_in_seconds = time.time() - self.last_improvement_timestamp
if diff_in_seconds/60 > self.config.stagnation:
self.interrupt()
def __main_loop__(self):
"""
Main optimization loop.
"""
self.__is_running__ = True
if not os.path.exists(self.models_dir):
os.makedirs(self.models_dir)
self.base_models = {}
self.train_predictions_df = pd.DataFrame()
self.test_predictions_df = pd.DataFrame()
self.scores = {}
self.best_score = None
self.best_id = None
self.ensembler = None
self.mirai_seeker = MiraiSeeker(
self.config.search_spaces,
self.all_features,
self.config
)
self.n_cycles = 0
self.last_improvement_timestamp = time.time()
start = time.time()
for search_space in self.config.search_spaces:
if self.must_interrupt:
break
id = search_space.id
base_model_path = self.models_dir + id
base_model_class = search_space.model_class
if os.path.exists(base_model_path):
base_model = load_base_model(base_model_class, base_model_path)
parameters = base_model.parameters
parameters_values = search_space.parameters_values
for key, value in zip(parameters.keys(), parameters.values()):
if key not in parameters_values:
warnings.warn(
'Parameter ' + str(key) + ', set with value ' +
str(value) + ', from checkpoint is not on the ' +
'provided search space for the id ' + str(id),
RuntimeWarning
)
else:
if value not in parameters_values[key]:
warnings.warn(
'Value ' + str(value) + ' for parameter ' + str(key) +
' from checkpoint is not on the provided ' +
'search space for the id ' + str(id),
RuntimeWarning
)
else:
base_model = self.mirai_seeker.seek(search_space.id)
dump_base_model(base_model, base_model_path)
self.base_models[id] = base_model
train_predictions, test_predictions, score = base_model.predict(
self.train_data, self.train_target, self.test_data, self.config)
self.train_predictions_df[id] = train_predictions
self.test_predictions_df[id] = test_predictions
self.scores[id] = score
self.__update_best__(self.scores[id], id)
total_cycles_duration = time.time() - start
will_ensemble = len(self.base_models) > 1 and\
self.config.ensemble_id is not None
if will_ensemble:
self.ensembler = Ensembler(
list(self.base_models),
self.train_target,
self.train_predictions_df,
self.test_predictions_df,
self.scores,
self.config
)
ensemble_id = self.config.ensemble_id
if self.ensembler.optimize(total_cycles_duration):
self.__update_best__(self.scores[ensemble_id], ensemble_id)
self.__improvement_trigger__()
self.n_cycles = 1
while not self.must_interrupt:
gc.collect()
start = time.time()
for search_space in self.config.search_spaces:
self.__check_stagnation__()
if self.must_interrupt:
break
id = search_space.id
base_model = self.mirai_seeker.seek(id)
train_predictions, test_predictions, score = base_model.predict(
self.train_data, self.train_target,
self.test_data, self.config)
self.mirai_seeker.register_base_model(id, base_model, score)
if score > self.scores[id] or (
score == self.scores[id] and
len(base_model.features) < len(self.base_models[id].features)
):
self.scores[id] = score
self.train_predictions_df[id] = train_predictions
self.test_predictions_df[id] = test_predictions
self.__update_best__(score, id)
if will_ensemble:
self.ensembler.update()
self.__update_best__(self.scores[ensemble_id], ensemble_id)
self.__improvement_trigger__()
dump_base_model(base_model, self.models_dir + id)
else:
del train_predictions, test_predictions
total_cycles_duration += time.time() - start
self.n_cycles += 1
if will_ensemble:
if self.ensembler.optimize(total_cycles_duration/self.n_cycles):
self.__update_best__(self.scores[ensemble_id], ensemble_id)
self.__improvement_trigger__()
self.__is_running__ = False
def request_status(self):
"""
Queries the current status of the engine.
:rtype: miraiml.Status
:returns: The current status of the engine in the form of a dictionary.
If no score has been computed yet, returns ``None``.
"""
if self.best_id is None:
return None
train_predictions = None
if self.train_predictions_df is not None:
train_predictions = self.train_predictions_df.copy()
test_predictions = None
if self.test_data is not None and self.test_predictions_df is not None:
test_predictions = self.test_predictions_df.copy()
ensemble_weights = None
if self.ensembler is not None:
ensemble_weights = self.ensembler.weights.copy()
base_models = {}
for id in self.base_models:
base_model = self.base_models[id]
base_models[id] = dict(
model_class=base_model.model_class.__name__,
parameters=base_model.parameters.copy()
)
base_models[id]['features'] = [
self.columns_renaming_unmap[col] for col in base_model.features
]
histories = None
if self.mirai_seeker is not None:
histories = {}
for id in self.mirai_seeker.histories:
histories[id] = self.mirai_seeker.histories[id].copy()
return Status(
best_id=self.best_id,
scores=self.scores.copy(),
train_predictions=train_predictions,
test_predictions=test_predictions,
ensemble_weights=ensemble_weights,
base_models=base_models,
histories=histories
)
class Status:
"""
Represents the current status of the engine. Objects of this class are
not supposed to be instantiated by the user. Rather, they are returned
by the :func:`miraiml.Engine.request_status()` method.
The following attributes are accessible:
* ``best_id``: the id of the best base model (or ensemble)
* ``scores``: a dictionary containing the current score of each id
* ``train_predictions``: a ``pandas.DataFrame`` object containing the predictions\
for the train data for each id
* ``test_predictions``: a ``pandas.DataFrame`` object containing the predictions\
for the test data for each id
* ``ensemble_weights``: a dictionary containing the ensemble weights for\
each base model id
* ``base_models``: a dictionary containing the characteristics of each base\
model (accessed by its respective id)
* ``histories``: a dictionary of ``pandas.DataFrame`` objects for each id,\
containing the history of base models attempts and their respective scores.\
Hyperparameters columns end with the ``'__(hyperparameter)'`` suffix and\
features columns end with the ``'__(feature)'`` suffix. The score column\
can be accessed with the key ``'score'``. For more information, please\
check the :ref:`User Guide <mirai_seeker>`.
The characteristics of each base model are represent by dictionaries, containing
the following keys:
* ``'model_class'``: The name of the base model's modeling class
* ``'parameters'``: The dictionary of hyperparameters values
* ``'features'``: The list of features used
"""
def __init__(self, **kwargs):
self.__dict__ = kwargs
def build_report(self, include_features=False):
"""
Returns the report of the current status of the engine in a formatted
string.
:type include_features: bool, optional, default=False
:param include_features: Whether to include the list of features on the
report or not (may cause some visual mess).
:rtype: str
:returns: The formatted report.
"""
output = '########################\n'
output += ('best id: {}\n'.format(self.best_id))
output += ('best score: {}\n'.format(self.scores[self.best_id]))
if self.ensemble_weights is not None:
output += ('########################\n')
output += ('ensemble weights:\n')
weights_ = {}
for id in self.ensemble_weights:
weights_[self.ensemble_weights[id]] = id
for weight in reversed(sorted(weights_)):
id = weights_[weight]
output += (' {}: {}\n'.format(id, weight))
output += ('########################\n')
output += ('all scores:\n')
scores_ = {}
for id in self.scores:
scores_[self.scores[id]] = id
for score in reversed(sorted(scores_)):
id = scores_[score]
output += (' {}: {}\n'.format(id, score))
for id in sorted(self.base_models):
base_model = self.base_models[id]
features = sorted([str(feature) for feature in base_model['features']])
output += ('########################\n')
output += ('id: {}\n'.format(id))
output += ('model class: {}\n'.format(base_model['model_class']))
output += ('n features: {}\n'.format(len(features)))
output += ('parameters:\n')
parameters = base_model['parameters']
for parameter in sorted(parameters):
value = parameters[parameter]
output += (' {}: {}\n'.format(parameter, value))
if include_features:
output += ('features: {}\n'.format(', '.join(features)))
return output
| {"/miraiml/__init__.py": ["/miraiml/main.py"], "/miraiml/pipeline.py": ["/miraiml/util.py", "/miraiml/core.py"], "/miraiml/main.py": ["/miraiml/util.py", "/miraiml/core.py"], "/tests/run.py": ["/miraiml/__init__.py", "/miraiml/pipeline.py"], "/miraiml/core.py": ["/miraiml/util.py"]} |
71,642 | ahmad-abdellatif/miraiml | refs/heads/master | /tests/run.py | from sklearn.linear_model import LinearRegression, Lasso
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import fetch_california_housing
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score
from time import sleep
import pandas as pd
from miraiml import SearchSpace, Config, Engine
from miraiml.pipeline import compose
TEST_FOLDER = '.pytest_cache'
def test_run():
X, y = fetch_california_housing(data_home=TEST_FOLDER, return_X_y=True)
data = pd.DataFrame(X)
data['target'] = y
Pipeline = compose(
[('scaler', StandardScaler), ('lin_reg', LinearRegression)]
)
search_spaces = [
SearchSpace(id='Linear Regression', model_class=LinearRegression),
SearchSpace(id='Lasso', model_class=Lasso),
SearchSpace(
id='Pipeline',
model_class=Pipeline,
parameters_values=dict(
scaler__with_mean=[True, False],
scaler__with_std=[True, False],
lin_reg__fit_intercept=[True, False],
lin_reg__normalize=[True, False]
)
)
]
config = Config(
local_dir=TEST_FOLDER,
problem_type='regression',
score_function=r2_score,
search_spaces=search_spaces,
ensemble_id='Ensemble',
stagnation=1
)
engine = Engine(config)
train_data, test_data = train_test_split(data, test_size=0.2)
train_data_original, test_data_original = train_data.copy(), test_data.copy()
engine.load_train_data(train_data, 'target')
engine.load_test_data(test_data)
if engine.is_running():
raise AssertionError()
engine.restart()
sleep(2)
if not engine.is_running():
raise AssertionError()
sleep(5)
status = engine.request_status()
if len(status.scores) != len(search_spaces) + 1 or \
len(status.ensemble_weights) != len(search_spaces):
raise AssertionError()
if status.train_predictions.shape[0] != train_data.shape[0]:
raise AssertionError()
if status.test_predictions.shape[0] != test_data.shape[0]:
raise AssertionError()
for base_model in status.base_models.values():
for feature in base_model['features']:
if feature not in test_data.columns or feature not in train_data.columns:
raise AssertionError()
engine.interrupt()
if engine.is_running():
raise AssertionError()
engine.clean_test_data(restart=True)
sleep(5)
if not engine.is_running():
raise AssertionError()
engine.shuffle_train_data(restart=True)
sleep(5)
status = engine.request_status()
if status.test_predictions is not None:
raise AssertionError()
engine.interrupt()
status.build_report()
status.build_report(include_features=True)
pd.testing.assert_frame_equal(train_data, train_data_original)
pd.testing.assert_frame_equal(test_data, test_data_original)
| {"/miraiml/__init__.py": ["/miraiml/main.py"], "/miraiml/pipeline.py": ["/miraiml/util.py", "/miraiml/core.py"], "/miraiml/main.py": ["/miraiml/util.py", "/miraiml/core.py"], "/tests/run.py": ["/miraiml/__init__.py", "/miraiml/pipeline.py"], "/miraiml/core.py": ["/miraiml/util.py"]} |
71,643 | ahmad-abdellatif/miraiml | refs/heads/master | /miraiml/core.py | """
:mod:`miraiml.core` contains internal classes responsible for the optimization
process.
"""
import random as rnd
import pandas as pd
import numpy as np
import time
import os
from sklearn.model_selection import StratifiedKFold, KFold
from sklearn.linear_model import LinearRegression
from sklearn.pipeline import Pipeline
from miraiml.util import load, dump, sample_random_len
class BaseModel:
"""
Represents an element from the search space, defined by an instance of
:class:`miraiml.SearchSpace` and a set of features.
Read more in the :ref:`User Guide <base_model>`.
:type model_class: type
:param model_class: A statistical model class that must implement the methods
``fit`` and ``predict`` for regression or ``predict_proba`` classification
problems.
:type parameters: dict
:param parameters: The parameters that will be used to instantiate objects of
``model_class``.
:type features: list
:param features: The list of features that will be used to train the statistical
model.
"""
def __init__(self, model_class, parameters, features):
self.model_class = model_class
self.parameters = parameters
self.features = features
def predict(self, X_train, y_train, X_test, config):
"""
Performs the predictions for the training and testing datasets and also
computes the score of the model.
:type X_train: pandas.DataFrame
:param X_train: The dataframe that contains the training inputs for the
model.
:type y_train: pandas.Series or numpy.ndarray
:param y_train: The training targets for the model.
:type X_test: pandas.DataFrame
:param X_test: The dataframe that contains the testing inputs for the model.
:type config: miraiml.Config
:param config: The configuration of the engine.
:rtype: tuple
:returns: ``(train_predictions, test_predictions, score)``
* ``train_predictions``: The predictions for the training dataset
* ``test_predictions``: The predictions for the testing dataset
* ``score``: The score of the model on the training dataset
:raises: ``RuntimeError`` when fitting or predicting doesn't work.
"""
X_train = X_train[self.features]
train_predictions = np.zeros(X_train.shape[0])
test_predictions = None
if X_test is not None:
X_test = X_test[self.features]
test_predictions = np.zeros(X_test.shape[0])
if config.problem_type == 'classification' and config.stratified:
fold = StratifiedKFold(n_splits=config.n_folds, shuffle=False)
elif config.problem_type == 'regression' or not config.stratified:
fold = KFold(n_splits=config.n_folds, shuffle=False)
for big_part, small_part in fold.split(X_train, y_train):
X_train_big, y_train_big = X_train.iloc[big_part], y_train.iloc[big_part]
X_train_small = X_train.iloc[small_part]
model = self.model_class(**self.parameters)
class_name = self.model_class.__name__
try:
model.fit(X_train_big, y_train_big)
except Exception:
raise RuntimeError('Error when fitting with model class {}'.format(class_name))
try:
if config.problem_type == 'classification':
train_predictions[small_part] = model.predict_proba(X_train_small)[:, 1]
if X_test is not None:
test_predictions += model.predict_proba(X_test)[:, 1]
elif config.problem_type == 'regression':
train_predictions[small_part] = model.predict(X_train_small)
if X_test is not None:
test_predictions += model.predict(X_test)
except Exception:
raise RuntimeError('Error when predicting with model class {}'.format(
class_name
))
if X_test is not None:
test_predictions /= config.n_folds
return (train_predictions, test_predictions,
config.score_function(y_train, train_predictions))
def dump_base_model(base_model, path):
"""
Saves the characteristics of a base model as a checkpoint.
:type base_model: miraiml.core.BaseModel
:param base_model: The base model to be saved
:type path: str
:param path: The path to save the base model
:rtype: tuple
:returns: ``(train_predictions, test_predictions, score)``
"""
attributes = dict(parameters=base_model.parameters, features=base_model.features)
dump(attributes, path)
def load_base_model(model_class, path):
"""
Loads the characteristics of a base model from disk and returns its respective
instance of :class:`miraiml.core.BaseModel`.
:type model_class: type
:param model_class: The model class related to the base model
:type path: str
:param path: The path to load the base model from
:rtype: miraiml.core.BaseModel
:returns: The base model loaded from disk
"""
attributes = load(path)
return BaseModel(model_class=model_class,
parameters=attributes['parameters'],
features=attributes['features'])
class MiraiSeeker:
"""
This class implements a smarter way of searching good parameters and sets of
features.
Read more in the :ref:`User Guide <mirai_seeker>`.
:param base_models_ids: The list of base models' ids to keep track of.
:type base_models_ids: list
:param all_features: A list containing all available features.
:type all_features: list
:param config: The configuration of the engine.
:type config: miraiml.Config
"""
def __init__(self, search_spaces, all_features, config):
self.all_features = all_features
self.config = config
histories_path = config.local_dir + 'histories/'
if not os.path.exists(histories_path):
os.makedirs(histories_path)
self.search_spaces_dict = {}
self.histories = {}
self.histories_paths = {}
for search_space in search_spaces:
id = search_space.id
self.search_spaces_dict[id] = search_space
self.histories_paths[id] = histories_path + id
if os.path.exists(self.histories_paths[id]):
self.histories[id] = load(self.histories_paths[id])
else:
self.histories[id] = pd.DataFrame()
dump(self.histories[id], self.histories_paths[id])
def reset(self):
"""
Deletes all base models registries.
"""
for id in self.search_spaces_dict:
self.histories[id] = pd.DataFrame()
dump(self.histories[id], self.histories_paths[id])
def parameters_features_to_dataframe(self, parameters, features, score):
"""
Creates an entry for a history.
:type parameters: dict
:param parameters: The set of parameters to transform.
:type parameters: list
:param parameters: The set of features to transform.
:type score: float
:param score: The score to transform.
"""
entry = {'score': score}
for parameter in parameters:
entry[parameter+'__(hyperparameter)'] = parameters[parameter]
for feature in self.all_features:
entry[feature+'__(feature)'] = 1 if feature in features else 0
return pd.DataFrame([entry])
def register_base_model(self, id, base_model, score):
"""
Registers the performance of a base model and its characteristics.
:type id: str
:param id: The id associated with the base model.
:type base_model: miraiml.core.BaseModel
:param base_model: The base model being registered.
:type score: float
:param score: The score of ``base_model``.
"""
new_entry = self.parameters_features_to_dataframe(
base_model.parameters,
base_model.features, score)
self.histories[id] = pd.concat([self.histories[id], new_entry], sort=True)
self.histories[id].drop_duplicates(inplace=True)
dump(self.histories[id], self.histories_paths[id])
def is_ready(self, id):
"""
Tells whether the history of an id is large enough for more advanced
strategies or not.
:type id: str
:param id: The id to be inspected.
:rtype: bool
:returns: Whether ``id`` can be used to generate parameters and features
lists or not.
"""
return self.histories[id].shape[0] > 1
def seek(self, id):
"""
Manages the search strategy for better solutions.
With a probability of 0.5, the random strategy will be chosen. If it's
not, the other strategies will be chosen with equal probabilities.
:type id: str
:param id: The id for which a new base model is required.
:rtype: miraiml.core.BaseModel
:returns: The next base model for exploration.
:raises: ``KeyError`` if ``parameters_rules`` tries to access an invalid
key.
"""
if rnd.choice([0, 1]) == 1 or not self.is_ready(id):
parameters, features = self.random_search(id)
else:
available_method_names = [method_name for method_name in dir(self)
if method_name.endswith('_search')
and method_name != 'random_search']
method_name = rnd.choice(available_method_names)
parameters, features = getattr(self, method_name)(id)
search_space = self.search_spaces_dict[id]
if len(parameters) > 0:
try:
search_space.parameters_rules(parameters)
except Exception:
raise KeyError('Error on parameters rules for the id {}'.format(id))
model_class = search_space.model_class
return BaseModel(model_class, parameters, features)
def random_search(self, id):
"""
Generates completely random sets of parameters and features.
:type all_features: list
:param all_features: The list of available features.
:rtype: tuple
:returns: ``(parameters, features)``
Respectively, the dictionary of parameters and the list of features
that can be used to generate a new base model.
"""
search_space = self.search_spaces_dict[id]
parameters = {}
for parameter in search_space.parameters_values:
parameters[parameter] = rnd.choice(
search_space.parameters_values[parameter])
if self.config.use_all_features:
features = self.all_features
else:
features = sample_random_len(self.all_features)
return (parameters, features)
def naive_search(self, id):
"""
Characteristics that achieved higher scores have independently higher
chances of being chosen again.
:type id: str
:param id: The id for which we want a new set of parameters and features.
:rtype: tuple
:returns: ``(parameters, features)``
Respectively, the dictionary of parameters and the list of features
that can be used to generate a new base model.
"""
history = self.histories[id]
parameters = {}
features = []
for column in history.columns:
if column == 'score':
continue
dist = history[[column, 'score']].groupby(column).mean().reset_index()
chosen_value = rnd.choices(
dist[column].values,
cum_weights=dist['score'].cumsum().values)[0]
del dist
if column.endswith('__(hyperparameter)'):
parameter = column.split('__(')[0]
parameters[parameter] = chosen_value
elif column.endswith('__(feature)'):
feature = column.split('__(')[0]
if self.config.use_all_features:
features.append(feature)
else:
if chosen_value:
features.append(feature)
if len(features) == 0:
features = sample_random_len(self.all_features)
return (parameters, features)
@staticmethod
def __dataframe_to_parameters_features__(dataframe):
"""
Transforms a history entry in a pair of parameters and features.
:type dataframe: pandas.DataFrame
:param dataframe: The history entry to be transformed,
:rtype: tuple
:returns: ``(parameters, features)``. The transformed sets of parameters
and features, respectively.
"""
parameters = {}
features = []
for column in dataframe.columns:
if column == 'score':
continue
column_filtered = column.split('__(')[0]
value = dataframe[column].values[0]
if column.endswith('__(hyperparameter)'):
parameters[column_filtered] = value
elif column.endswith('__(feature)'):
if value:
features.append(column_filtered)
return (parameters, features)
def linear_regression_search(self, id):
"""
Uses the history to model the score with a linear regression. Guesses the
scores of `n`/2 random sets of parameters and features, where `n` is the
size of the history. The one with the highest score is chosen.
:type id: str
:param id: The id for which we want a new set of parameters and features.
:rtype: tuple
:returns: ``(parameters, features)``
Respectively, the dictionary of parameters and the list of features
that can be used to generate a new base model.
"""
history = self.histories[id]
n_guesses = history.shape[0]//2
# Creating guesses:
guesses_df = pd.DataFrame()
for _ in range(n_guesses):
guess_parameters, guess_features = self.random_search(id)
guess_df = self.parameters_features_to_dataframe(
guess_parameters, guess_features, np.nan)
guesses_df = pd.concat([guesses_df, guess_df], sort=True)
# Concatenating data to perform one-hot-encoding:
data = pd.concat([history, guesses_df], sort=True)
object_columns = [col for col in data.columns if data[col].dtype == object]
data_ohe = pd.get_dummies(data, columns=object_columns, drop_first=True)
# Separating train and test:
train_mask = data_ohe['score'].notna()
data_ohe_train = data_ohe[train_mask]
data_ohe_test = data_ohe[~train_mask].drop(columns='score')
y = data_ohe_train.pop('score')
# Fitting and predicting scores:
model = LinearRegression(normalize=True)
model.fit(data_ohe_train, y)
guesses_df['score'] = model.predict(data_ohe_test)
# Choosing the best guess:
best_guess = guesses_df.sort_values('score', ascending=False).head(1).copy()
del guesses_df, data, data_ohe, data_ohe_train, data_ohe_test, y, model
return self.__dataframe_to_parameters_features__(best_guess)
class Ensembler:
"""
Performs the ensemble of the base models and optimizes its weights.
Read more in the :ref:`User Guide <ensemble>`.
:type y_train: pandas.Series or numpy.ndarray
:param y_train: The target column.
:type base_models_ids: list
:param base_models_ids: The list of base models' ids to keep track of.
:type train_predictions_df: pandas.DataFrame
:param train_predictions_df: The dataframe of predictions for the training
dataset.
:type test_predictions_df: pandas.DataFrame
:param test_predictions_df: The dataframe of predictions for the testing
dataset.
:type scores: dict
:param scores: The dictionary of scores.
:type config: miraiml.Config
:param config: The configuration of the engine.
"""
def __init__(self, base_models_ids, y_train, train_predictions_df,
test_predictions_df, scores, config):
self.y_train = y_train
self.base_models_ids = sorted(base_models_ids)
self.train_predictions_df = train_predictions_df
self.test_predictions_df = test_predictions_df
self.scores = scores
self.config = config
self.id = config.ensemble_id
self.weights_path = config.local_dir + 'models/' + self.id
self.must_interrupt = False
if os.path.exists(self.weights_path):
self.weights = load(self.weights_path)
else:
self.weights = self.gen_weights()
dump(self.weights, self.weights_path)
def interrupt(self):
"""
Sets an internal flag to interrupt the optimization process on the first
opportunity.
"""
self.must_interrupt = True
def update(self):
"""
Updates the ensemble with the newest predictions from the base models.
"""
train_predictions, test_predictions, score = self.ensemble(self.weights)
self.train_predictions_df[self.id] = train_predictions
self.test_predictions_df[self.id] = test_predictions
self.scores[self.id] = score
def gen_weights(self):
"""
Generates the ensemble weights according to the score of each base model.
Higher scores have higher chances of generating higher weights.
:rtype: dict
:returns: A dictionary containing the weights for each base model id.
"""
weights = {}
if len(self.scores) > 0:
min_score, max_score = np.inf, -np.inf
for id in self.base_models_ids:
score = self.scores[id]
min_score = min(min_score, score)
max_score = max(max_score, score)
diff_score = max_score - min_score
for id in self.base_models_ids:
if self.scores[id] == max_score:
weights[id] = rnd.triangular(0, 1, 1)
else:
normalized_score = (self.scores[id]-min_score)/diff_score
range_ = rnd.triangular(0, 1, normalized_score)
weights[id] = rnd.triangular(0, range_, 0)
else:
for id in self.base_models_ids:
weights[id] = 1
return weights
def ensemble(self, weights):
"""
Performs the ensemble of the current predictions of each base model.
:type weights: dict
:param weights: A dictionary containing the weights related to the id of
each base model.
:rtype: tuple
:returns: ``(train_predictions, test_predictions, score)``
* ``train_predictions``: The ensemble predictions for the training dataset
* ``test_predictions``: The ensemble predictions for the testing dataset
* ``score``: The score of the ensemble on the training dataset
"""
weights_list = [weights[id] for id in self.base_models_ids]
train_predictions = np.average(
self.train_predictions_df[self.base_models_ids],
axis=1, weights=weights_list)
test_predictions = None
if self.test_predictions_df.shape[0] > 0:
test_predictions = np.average(
self.test_predictions_df[self.base_models_ids],
axis=1, weights=weights_list)
return (train_predictions, test_predictions,
self.config.score_function(self.y_train, train_predictions))
def optimize(self, max_duration):
"""
Performs ensembling cycles for ``max_duration`` seconds.
:type max_duration: float
:param max_duration: The maximum duration allowed for the optimization
process.
:rtype: bool
:returns: ``True`` if a better set of weights was found and ``False``
otherwise.
"""
optimized = False
start = time.time()
while time.time() - start < max_duration and not self.must_interrupt:
weights = self.gen_weights()
train_predictions, test_predictions, score = self.ensemble(weights)
if self.id not in self.scores or score > self.scores[self.id]:
self.scores[self.id] = score
self.weights = weights
self.train_predictions_df[self.id] = train_predictions
self.test_predictions_df[self.id] = test_predictions
dump(self.weights, self.weights_path)
optimized = True
else:
del weights, train_predictions, test_predictions
return optimized
class BasePipelineClass:
"""
This is the base class for your custom pipeline classes.
.. warning::
Instantiating this class directly **does not work**.
"""
def __init__(self, **params):
self.pipeline = Pipeline(
# self.steps has been set from outside at this point!
[(alias, class_type()) for (alias, class_type) in self.steps]
)
self.set_params(**params)
def get_params(self):
"""
Gets the list of parameters that can be set.
:type X: iterable
:param X: Data to predict on.
:rtype: list
:returns: The list of allowed parameters
"""
params = [param for param in self.pipeline.get_params() if
'copy' not in param]
prefixes = [alias + '__' for alias, _ in self.steps]
return [param for param in params if
any([param.startswith(prefix) for prefix in prefixes])]
def set_params(self, **params):
"""
Sets the parameters for the pipeline. You can check the parameters that
are allowed to be set by calling :func:`get_params`.
:rtype: miraiml.core.BasePipelineClass
:returns: self
"""
allowed_params = self.get_params()
for param in params:
if param not in allowed_params:
raise ValueError(
'Parameter ' + param + ' is incompatible. The allowed ' +
'parameters are:\n' + ', '.join(allowed_params)
)
self.pipeline.set_params(**params)
return self
def fit(self, X, y):
"""
Fits the pipeline to ``X`` using ``y`` as the target.
:type X: iterable
:param X: The training data.
:type y: iterable
:param y: The target.
:rtype: miraiml.core.BasePipelineClass
:returns: self
"""
self.pipeline.fit(X, y)
return self
def predict(self, X):
"""
Predicts the class for each element of ``X`` in case of classification
problems or the estimated target value in case of regression problems.
:type X: iterable
:param X: Data to predict on.
:rtype: numpy.ndarray
:returns: The set of predictions
"""
return self.pipeline.predict(X)
def predict_proba(self, X):
"""
Returns the probabilities for each class. Available only if your end
estimator implements it.
:type X: iterable
:param X: Data to predict on.
:rtype: numpy.ndarray
:returns: The probabilities for each class
"""
return self.pipeline.predict_proba(X)
| {"/miraiml/__init__.py": ["/miraiml/main.py"], "/miraiml/pipeline.py": ["/miraiml/util.py", "/miraiml/core.py"], "/miraiml/main.py": ["/miraiml/util.py", "/miraiml/core.py"], "/tests/run.py": ["/miraiml/__init__.py", "/miraiml/pipeline.py"], "/miraiml/core.py": ["/miraiml/util.py"]} |
71,649 | colinschoen/slack-tic-tac-toe | refs/heads/master | /utils.py | HORIZONAL_BORDER = "-"
CORNER_BORDER = "+"
VERTICAL_BORDER = "|"
def getCurrentTurn(board):
"""Returns a pretty message reminding players whose turn it is
Args:
board (Board): The current board object
Returns:
str: The current players turn in a nice pretty string.
"""
current_turn = board.player_turn
if current_turn == board.player0_id:
player_name = board.player0_nickname
else:
player_name = board.player1_nickname
return "It is {}'s turn.".format(player_name)
def getBoard(board):
""" Returns a pretty ASCII tic tac toe board.
Args:
board (list): The current board encoded in a list of lists
Returns:
str: The current ASCII representation of the board.
>>> getBoard([["X", "O", "X"], ["X", "X", "X"], ["O", "X", "X"]])
'|X|O|X|\\n|X|X|X|\\n|O|X|X|'
>>> getBoard([[" ", " ", " "], [" ", " ", " "], [" ", " ", " "]])
'| | | |\\n| | | |\\n| | | |'
"""
assert type(board) is list, "The board must be a list"
return "|{}|{}|{}|\n|{}|{}|{}|\n|{}|{}|{}|".format(
board[0][0] if board[0][0] else " ",
board[0][1] if board[0][1] else " ",
board[0][2] if board[0][2] else " ",
board[1][0] if board[1][0] else " ",
board[1][1] if board[1][1] else " ",
board[1][2] if board[1][2] else " ",
board[2][0] if board[2][0] else " ",
board[2][1] if board[2][1] else " ",
board[2][2] if board[2][2] else " ")
| {"/board.py": ["/app.py", "/utils.py"], "/app.py": ["/utils.py", "/board.py"]} |
71,650 | colinschoen/slack-tic-tac-toe | refs/heads/master | /board.py | from app import db
import time
import utils
class Board(db.Model):
id = db.Column(db.Integer, primary_key=True)
player0_id = db.Column(db.String(100))
player1_id = db.Column(db.String(100))
player0_nickname = db.Column(db.String(40))
player1_nickname = db.Column(db.String(40))
player_turn = db.Column(db.String(100))
channel_id = db.Column(db.String(100))
state = db.Column(db.String(9))
updated_at = db.Column(db.String(100))
created_at = db.Column(db.String(100))
STARTING_BOARD = [[None, None, None], [None, None, None], [None, None, None]]
def __init__(self, player0_id=None, player1_id=None, player1_nickname=None,
player0_nickname=None, player_turn=None, channel_id=None, state=None,
updated_at=time.ctime(), created_at=time.ctime()):
"""
Creates a new board
"""
self.player0_id = player0_id
self.player1_id = player1_id
self.player0_nickname = player0_nickname
self.player1_nickname = player1_nickname
self.player_turn = player_turn
if not player_turn:
self.player_turn = player0_id
self.channel_id = channel_id
self.state = state
self.updated_at = updated_at
self.created_at = created_at
@staticmethod
def encode_state(state):
"""
Encode a game state by flattening it to a string to be stored in the DB
args:
state(list) The state to encode
returns:
strState(str) A string representation of the state
>>> encode_state([["X", "O", "X"], ["X", "O", "X"], ["X", "O", "X"]])
>>> 'XOXXOXXOX'
>>> encode_state([[None, "O", "X"], ["X", None, "X"], ["X", "O", "X"]])
>>> ' OXX XXOX'
"""
strState = ""
for row in state:
for column in row:
strState += column if column is not None else " "
return strState
@staticmethod
def decode_state(state):
"""
Decode the serialized game state by creating the list structure
args:
state(str) The serialized string state to decode
returns:
state(list) A list representation of the state
>>> decode_state("XOXOXOXOX")
[["X", "O", "X"], ["X", "O", "X"], ["X", "O", "X"]]
>>> decode_state(" O OXOXOX")
[[None, "O", None], ["X", "O", "X"], ["X", "O", "X"]]
"""
assert type(state) is str, "state must be a string"
state = list(state)
lstState = []
for _ in range(3):
row = []
for _ in range(3):
element = state[0] if state[0] != " " else None
row.append(state[0])
state = state[1:]
lstState.append(row)
return lstState
def isGameOver(self):
"""
Returns true or false depending on if the game is over
Warning -- This is really messy and can be greatly improved.
returns:
over(bool) - True if game is over False otherwise
"""
#TODO(@colinschoen) Make this actually legible and more efficient
# Perhaps use magic square
state = str(self.state)
if " " not in state:
return True
state = self.decode_state(state)
for row in state:
player0_row_score = 0
player1_row_score = 0
for element in row:
if element == "X":
player0_row_score += 1
elif element == "O":
player1_row_score += 1
if player0_row_score == 3 or player1_row_score == 3:
return True
# Check diagonals
d1 = [state[0][0], state[1][1], state[2][2]]
d2 = [state[2][0], state[1][1], state[0][2]]
if d1 == ["X", "X", "X"] or d1 == ["O", "O", "O"] or d2 == ["X", "X", "X"] or d2 ==["O", "O", "O"]:
return True
# Finally check the columns
transpose = list(zip(*state))
return any([c == ("O", "O", "O") or c == ("X", "X", "X") for c in transpose])
@staticmethod
def help(payload=None, args=None):
"""
Prints a help message detailing the commands for the user
args:
payload (dict) - Dictionary containing POST payload from Slack.
args (list) - List containing arguments or flags passed after Sack
commands.
returns:
help (str) - A "helpful" help string
"""
return """
Available commands:
board - (E.g. /ttt board) Display the current board in the channel
move [row index] [col index] - (E.g. /ttt move 0 3) Make a move to the 0th row and 3rd column
start [user] - (E.g. /ttt start @colin) Start a game with opponent @Colin
"""
@staticmethod
def start(payload, args):
"""
Starts a new game if one doesn't already exist in the channel
args:
payload (dict) - Dictionary containing POST payload from Slack.
args (list) - List containing arguments or flags passed after Sack
commands.
"""
if len(args) < 1:
return "Error: You must specify an opponent"
# Does a game already exist in this channel?
channel_id = payload['channel_id']
board = Board.query.filter_by(channel_id=channel_id).first()
if board and not board.isGameOver():
return "Error: An active game already exists in this channel."
opponent = args[0]
if opponent[1:].lower() == payload['user_name'].lower():
return "Error: You can't challenge yourself."
# There isn't a way to properly validate that a user with the handle
# exists, so just ensure the format is correct.
if opponent[0] != '@':
return 'Error: You must specify an opponent by their @handle'
state = Board.encode_state(Board.STARTING_BOARD)
board = Board(player0_id=payload['user_id'],
player0_nickname=payload['user_name'],
player1_nickname=opponent[1:],
player_turn=payload['user_id'],
channel_id=payload['channel_id'],
state=state
)
db.session.add(board)
db.session.commit()
state = Board.decode_state(state)
return "{}\n{}".format(utils.getBoard(state), utils.getCurrentTurn(board))
@staticmethod
def move(payload, args):
"""
Makes a move for a player if it is the players turn and a game exists.
args:
payload (dict) - Dictionary containing POST payload from Slack.
args (list) - List containing arguments or flags passed after Sack
commands.
"""
# Were the proper arguments passed?
if len(args) < 2:
return 'Error: You must specific a position to make your move (E.g. /ttt move 0 3)'
# Make our arguments ints
args = [int(arg) for arg in args]
# Do we have valid arguments?
if not all([arg <= 2 and arg >= 0 for arg in args]):
return 'Error: Invalid row or column specified'
# Does a game exist?
channel_id = payload['channel_id']
board = Board.query.filter_by(channel_id=channel_id).first()
if not board:
return 'Error: No game exists in current channel. "Try /ttt start @opponent"'
# Do we need to log this as player 1
if not board.player1_id and str(board.player1_nickname) == payload['user_name'].lower():
board.player1_id = payload['user_id']
db.session.commit()
# Is this user even a player involved in the game?
if payload['user_id'] != str(board.player0_id) and payload['user_id'] != str(board.player1_id):
return 'Error: You are not a player in this game'
# Is it the "invoking" players turn?
if payload['user_id'] == board.player0_id and board.player_turn != board.player0_id or ( payload['user_id'] == board.player1_id and board.player_turn and board.player_turn != board.player1_id):
return "Error: It is your opponents turn."
state = Board.decode_state(str(board.state))
row, col = args[0], args[1]
if state[row][col] == "O" or state[row][col] == "X":
return "Error: You can't move here."
state[row][col] = "X" if payload['user_id'] == str(board.player0_id) else "O"
# Update our board
board.state = Board.encode_state(state)
# Update our current players turn
board.player_turn = board.player0_id if payload['user_id'] != str(board.player0_id) else board.player1_id
db.session.commit()
if board.isGameOver():
# Delete the board
db.session.delete(board)
db.session.commit()
return utils.getBoard(state) + "\n Game is over!"
return "{}\n{}".format(utils.getBoard(state), utils.getCurrentTurn(board))
@staticmethod
def board(payload, args=None):
"""
Fetches and outputs a pretty version of the current game state (board)
args:
payload (dict) - Dictionary containing POST payload from Slack.
args (list) - List containing arguments or flags passed after Sack
commands.
"""
channel_id = payload['channel_id']
board = Board.query.filter_by(channel_id=channel_id).first()
if not board:
return 'Error: No game exists in this channel'
state = Board.decode_state(str(board.state))
return "{}\n{}".format(utils.getBoard(state), utils.getCurrentTurn(board))
| {"/board.py": ["/app.py", "/utils.py"], "/app.py": ["/utils.py", "/board.py"]} |
71,651 | colinschoen/slack-tic-tac-toe | refs/heads/master | /app.py | from flask import Flask, jsonify, request
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Resource, Api
import utils
API_VERSION = 'v1'
STARTING_BOARD = [[None, None, None], [None, None, None], [None, None, None]]
VALID_COMMANDS = ['start', 'board', 'move', 'help', 'pony']
app = Flask(__name__)
api = Api(app)
app.config.from_pyfile('config.cfg')
app.config['API_VERSION'] = API_VERSION
app.config['STARTING_BOARD'] = STARTING_BOARD
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://{}:{}@{}/{}'.format(
app.config['DB_USERNAME'],
app.config['DB_PASSWORD'],
app.config['DB_SERVER'],
app.config['DB_NAME'],
)
db = SQLAlchemy(app)
from board import Board
class Hook(Resource):
def post(self):
data = request.form
# Ensure our slack token is valid
if data["token"] != app.config['SLACK_TOKEN']:
return "Error: Invalid Slack API Token"
text = data['text'].split()
if len(text) < 1:
return """Please specify a command {} and any argument/s."""\
.format(str(VALID_COMMANDS))
command = text[0]
if command not in VALID_COMMANDS:
return """{} is not a valid command. The valid commands are {}."""\
.format(command, str(VALID_COMMANDS))
args = text[1:]
# Call our respective board command
response = getattr(Board, command)(data, args)
return jsonify({
'response_type': 'in_channel',
'text': response
})
api.add_resource(Hook, '/{}/hook'.format(app.config['API_VERSION']))
if __name__ == "__main__":
app.run(host='0.0.0.0')
| {"/board.py": ["/app.py", "/utils.py"], "/app.py": ["/utils.py", "/board.py"]} |
71,654 | AugustoRucle/xor_or_and_gate_neural | refs/heads/master | /main.py | from Neural import Neural
from Draw import Draw
THETA = 1
tolerancia = 0.00005
etha = 1
error = 2 * tolerancia
# Input
# [ value_theta, value_and, value_and ]
input_values = [[1, 0, 0, 0], [1, 0, 0, 1], [1, 0, 1, 0], [1, 1, 1, 1]]
# Output
output_values = [0 , 1, 1, 0]
neural = Neural(input_values, output_values)
neural.Start(THETA, tolerancia, etha, error, 'XOR')
Draw.Draw_Error_LineChart(neural.array_error) | {"/main.py": ["/Neural.py", "/Draw.py"], "/Neural.py": ["/Draw.py"]} |
71,655 | AugustoRucle/xor_or_and_gate_neural | refs/heads/master | /Neural.py | import numpy as np
import matplotlib.pyplot as plt
from Draw import Draw
class Neural:
def __init__ (self, array_input, array_output):
self.array_input = np.asarray(array_input)
self.array_output = np.asarray(array_output)
self.array_weight = self.InitWeights(len(array_input[0]))
self.array_error = []
def InitWeights(self, amount_inputs):
return np.asarray(np.random.rand(amount_inputs, 1))
def Start(self, THETA, tolerancia, etha, error, draw = ''):
while (error >= tolerancia):
#Get products and function activate's values
produc_values = np.dot(self.array_input, self.array_weight)
result_function_activation = self.Hardlim (produc_values)
#Update weigth
error = self.GetDifference(result_function_activation)
DW =np.array([ etha * np.dot(self.array_input.transpose(), error)]).transpose()
self.array_weight = self.array_weight + DW;
error = self.GetEuclideanNorm(error)
self.array_error.append(error)
#Draw AND
if(draw == 'AND'):
Draw.Draw_AND_LineChart(self.array_weight, self.array_input)
elif(draw == 'OR'):
Draw.Draw_OR_LineChart(self.array_weight, self.array_input)
elif(draw == 'XOR'):
Draw.Draw_XOR_LineChart(self.array_weight)
def Hardlim(self, array_product):
lenght_input, data = len(array_product), []
for i in range(lenght_input):
values = array_product[i]
if(values > 0):
data.append(1)
else:
data.append(0)
return data
def GetDifference(self, result_function_activation):
new_list, size_array_output = [], len(self.array_output)
for i in range(size_array_output):
new_list.append(self.array_output[i] - result_function_activation[i])
return new_list
def GetEuclideanNorm(self, array_error):
size_array_error, result = len(array_error), 0
for i in range(size_array_error):
result += array_error[i] ** 2
return result | {"/main.py": ["/Neural.py", "/Draw.py"], "/Neural.py": ["/Draw.py"]} |
71,656 | AugustoRucle/xor_or_and_gate_neural | refs/heads/master | /Draw.py | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
class Draw:
@staticmethod
def Draw_OR_LineChart(array_weight, array_x):
size_array_input = len(array_x)
values_X = [-1, 2]
values_Y = -(array_weight[1] * values_X + array_weight[0]) / array_weight[2]
#Draw line
plt.plot(values_X, values_Y)
#Draw point
plt.plot(array_x[0][1], array_x[0][2], 'ro')
for i in range(size_array_input-1):
plt.plot(array_x[i+1][1], array_x[i+1][2], 'bo')
plt.axis([-1, 2, -1, 2])
plt.show()
@staticmethod
def Draw_AND_LineChart(array_weight, array_x):
size_array_input = len(array_x)
values_X = [-1, 2]
values_Y = -(array_weight[1] * values_X + array_weight[0]) / array_weight[2]
#Draw line
plt.plot(values_X, values_Y)
#Draw point
plt.plot(array_x[3][1], array_x[3][2], 'bo')
for i in range(size_array_input-1):
plt.plot(array_x[i][1], array_x[i][2], 'ro')
plt.axis([-1, 2, -1, 2])
plt.show()
@staticmethod
def Draw_XOR_LineChart(array_weight):
# Initialita share 3D
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
#Draw point
ax.scatter3D(0, 0, 0, c='r');
ax.scatter3D(0, 1, 0, c='g');
ax.scatter3D(1, 0, 0, c='g');
ax.scatter3D(1, 1, 1, c='r');
#Set Labels
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
#Create grid
xx, yy = np.meshgrid([0, 1], [0, 1])
zz = -(array_weight[1] * xx + array_weight[0] * yy + array_weight[2]) / array_weight[3]
ax.plot_surface(xx, yy, zz, rstride=1, cstride=1, cmap='cubehelix', edgecolor='none')
plt.show()
@staticmethod
def Draw_Error_LineChart(list_error):
array_error = np.asarray(list_error)
plt.plot(array_error)
plt.show() | {"/main.py": ["/Neural.py", "/Draw.py"], "/Neural.py": ["/Draw.py"]} |
71,682 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/migrations/0004_auto_20201027_1427.py | # Generated by Django 3.0.1 on 2020-10-27 13:27
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('signalsection', '0003_auto_20201027_0750'),
]
operations = [
migrations.AddField(
model_name='newsletter',
name='author',
field=models.ForeignKey(default=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='subscriber',
name='email',
field=models.EmailField(blank=True, error_messages={'unique': 'This email is already registered in the database.'}, max_length=50, null=True, unique=True),
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,683 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/migrations/0011_auto_20201030_1511.py | # Generated by Django 3.0.1 on 2020-10-30 14:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('signalsection', '0010_auto_20201030_1508'),
]
operations = [
migrations.AlterField(
model_name='signal',
name='body',
field=models.TextField(),
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,684 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/admin.py | from django.contrib import admin
from .models import Signal, Category, About, Introduction, Announcement, Term, Customer, Brokers, Social, Email, Address, Number, Subscriber, Newsletter
admin.site.register(Signal)
admin.site.register(Category)
admin.site.register(About)
admin.site.register(Introduction)
admin.site.register(Term)
admin.site.register(Customer)
admin.site.register(Brokers)
admin.site.register(Email)
admin.site.register(Number)
admin.site.register(Address)
admin.site.register(Social)
admin.site.register(Subscriber)
admin.site.register(Newsletter)
admin.site.register(Announcement)
def send_newsletter(modeladmin, request, queryset):
for newsletter in queryset:
newsletter.send(request)
send_newsletter.short_description = "Send selected Newsletters to all subscribers"
class NewsletterAdmin(admin.ModelAdmin):
actions = [send_newsletter]
def send_signal(modeladmin, request, queryset):
for signal in queryset:
signal.send(request)
send_signal.short_description = "Send selected Signals to Customers"
class SignalsAdmin(admin.ModelAdmin):
actions = [send_signal]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,685 | TJLSUDAD/Heavypips | refs/heads/master | /users/migrations/0005_auto_20201025_2033.py | # Generated by Django 3.0.1 on 2020-10-25 19:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0004_auto_20201025_1716'),
]
operations = [
migrations.AlterField(
model_name='customuser',
name='allowaccess',
field=models.BooleanField(blank=True, default=False, help_text='allow access to signals', null=True, verbose_name='allow access'),
),
migrations.AlterField(
model_name='customuser',
name='is_newslettermanager',
field=models.BooleanField(blank=True, default=False, help_text='let this user send newsletters', null=True, verbose_name='is newsletter manager'),
),
migrations.AlterField(
model_name='customuser',
name='is_signalmanager',
field=models.BooleanField(blank=True, default=False, help_text='let this user post signals', null=True, verbose_name='is signal manager'),
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,686 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/migrations/0003_auto_20201027_0750.py | # Generated by Django 3.0.1 on 2020-10-27 06:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('signalsection', '0002_auto_20201027_0639'),
]
operations = [
migrations.AlterField(
model_name='subscriber',
name='email',
field=models.EmailField(blank=True, max_length=50, null=True),
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,687 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/migrations/0010_auto_20201030_1508.py | # Generated by Django 3.0.1 on 2020-10-30 14:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('signalsection', '0009_social_linkedin'),
]
operations = [
migrations.AlterField(
model_name='signal',
name='body',
field=models.TextField(default=''),
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,688 | TJLSUDAD/Heavypips | refs/heads/master | /users/migrations/0004_auto_20201025_1716.py | # Generated by Django 3.0.1 on 2020-10-25 16:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0003_customuser_allowaccess'),
]
operations = [
migrations.AddField(
model_name='customuser',
name='is_newslettermanager',
field=models.BooleanField(blank=True, help_text='let this user send newsletters', null=True, verbose_name='is newsletter manager'),
),
migrations.AddField(
model_name='customuser',
name='is_signalmanager',
field=models.BooleanField(blank=True, help_text='let this user post signals', null=True, verbose_name='is signal manager'),
),
migrations.AlterField(
model_name='customuser',
name='allowaccess',
field=models.BooleanField(blank=True, help_text='allow access to signals', null=True, verbose_name='allow access'),
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,689 | TJLSUDAD/Heavypips | refs/heads/master | /users/views.py | from django.shortcuts import render, redirect
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from .forms import UserRegisterForm, ContactForm, SubscriberForm, UserUpdaetForm
from django.conf import settings
from django.core.mail import send_mail
from .models import CustomUser
from signalsection.models import Email
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import EmailMessage
from django.http import HttpResponse
from .tokens import AccountActivationTokenGenerator
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.template.loader import render_to_string
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
from django.contrib.auth import get_user_model
def register(request):
# if request.method == 'GET':
# return render(request, 'users/register.html')
if request.method == 'POST':
form = UserRegisterForm(request.POST)
if form.is_valid():
messages.success(request, f'Your Account has been created and an Activation link has been sent to your email')
user = form.save(commit=False)
user.is_active = False
user.save()
current_site = get_current_site(request)
mail_subject = 'Account Activation'
message = render_to_string('users/account_activation.html', {
'user': user,
'domain': current_site.domain,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'token': default_token_generator.make_token(user),
})
to_email = form.cleaned_data.get('email')
send_link = EmailMessage(
mail_subject, message, to=[to_email]
)
send_link.send()
return redirect('login')
else:
form = UserRegisterForm()
return render(request, 'users/register.html', {'form': form})
def activate(request, uidb64, token):
try:
uid = urlsafe_base64_decode(uidb64).decode()
user = get_user_model()._default_manager.get(pk=uid)
except(TypeError, ValueError, OverflowError, get_user_model().DoesNotExist):
user = None
if user is not None and default_token_generator.check_token(user, token):
user.is_active = True
user.save()
messages.success(request, f'Your Account has been activated you can now login')
return redirect('login')
else:
return HttpResponse('Activation link is invalid')
def ContactView(request):
name = ''
user_email = ''
comment = ''
form = ContactForm(request.POST or None)
if form.is_valid():
name = form.cleaned_data.get('name')
user_email = form.cleaned_data.get('user_email')
comment = form.cleaned_data.get('comment')
subject = "Heavypips Contact Form Feedback"
comment = name + " with email address " + user_email + " says:\n " + comment
send_mail(subject=subject, from_email=user_email, message=comment,
recipient_list=['hussainmaina27@gmail.com'])
context = {'form': form, 'title': 'Contact Us'}
return render(request, 'users/contact.html', context)
else:
context = {'form': form, 'title': 'Contact Us'}
return render(request, 'users/contact.html', context)
@login_required
def settings(request):
if request.method == 'POST':
form = UserUpdaetForm(request.POST, instance=request.user)
if form.is_valid():
form.save()
messages.success(request, f'Your Account Has Been Updated!')
return redirect('settings')
else:
form = UserUpdaetForm(instance=request.user)
context = {'form': form, 'title': 'Settings'}
return render(request, 'users/settings.html', context)
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,690 | TJLSUDAD/Heavypips | refs/heads/master | /users/models.py | from django.db import models
from django.contrib.auth.models import AbstractUser, BaseUserManager
from django.utils.translation import ugettext_lazy as _
from phonenumber_field.modelfields import PhoneNumberField
class CustomUserManager(BaseUserManager):
"""Define a model manager for User model with no username field."""
def _create_user(self, email, password=None, **extra_fields):
"""Create and save a User with the given email and password."""
if not email:
raise ValueError('The given email must be set')
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, email, password=None, **extra_fields):
extra_fields.setdefault('is_staff', False)
extra_fields.setdefault('is_superuser', False)
extra_fields.setdefault('is_signalmanager', False)
extra_fields.setdefault('is_newslettermanager', False)
extra_fields.setdefault('allowaccess', False)
return self._create_user(email, password, **extra_fields)
def create_superuser(self, email, password=None, **extra_fields):
"""Create and save a SuperUser with the given email and password."""
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
extra_fields.setdefault('is_signalmanager', True)
extra_fields.setdefault('is_newslettermanager', True)
extra_fields.setdefault('allowaccess', True)
if extra_fields.get('is_staff') is not True:
raise ValueError('Superuser must have is_staff=True.')
if extra_fields.get('is_superuser') is not True:
raise ValueError('Superuser must have is_superuser=True.')
if extra_fields.get('is_signalmanager') is not True:
raise ValueError('Superuser must have is_signalmanager=True.')
if extra_fields.get('is_newslettermanager') is not True:
raise ValueError('Superuser must have is_newslettermanager=True.')
if extra_fields.get('allowaccess') is not True:
raise ValueError('Superuser must have allowaccess=True.')
return self._create_user(email, password, **extra_fields)
class CustomUser(AbstractUser):
username = None
email = models.EmailField(_('email address'), unique=True,
error_messages={
'unique': _("A user with that email already exists."),
},
max_length=150,
)
phone = PhoneNumberField(max_length=30, unique=True, null=True, blank=False,
error_messages={
'unique': _("A user with that phone number already exists."),
},
help_text='We will be sending you signals through this phone number.'
)
allowaccess = models.BooleanField(_('allow access'), default=False, null=True, blank=True, help_text='allow access to signals')
is_signalmanager = models.BooleanField(_('is signal manager'), default=False,
help_text='let this user post signals', null=True, blank=True)
is_newslettermanager = models.BooleanField(_('is newsletter manager'),default=False,
help_text='let this user send newsletters', null=True, blank=True)
birth_date = models.DateField(null=True, blank=True)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
objects = CustomUserManager()
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,691 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/views.py | from django.shortcuts import render, get_object_or_404, redirect, HttpResponse
from .models import About, Introduction, Term, Privacy, Signal, Customer, Brokers, Subscriber, Newsletter, Announcement
from django.contrib.auth.decorators import login_required, user_passes_test
from django.views.generic import ListView, CreateView, UpdateView, DeleteView
from django.contrib.auth import logout
from celery.schedules import crontab
from django.contrib import messages
from celery.task import periodic_task
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin, PermissionRequiredMixin, AccessMixin
from paystackapi.paystack import Paystack
from django.utils.decorators import method_decorator
from django_twilio.decorators import twilio_view
from users.forms import SubscriberForm
from django.conf import settings
from twilio.rest import Client
import random
from django.core.mail import EmailMessage
from django.views.decorators.csrf import csrf_exempt
from signalsection.models import Subscriber
from users.decorators import signal_required, newsletter_required
from users.forms import SubscriberForm, NewsletterForm
from django.contrib.auth import get_user_model
import uuid
import datetime
pstk_key = settings.PAYSTACK_KEY
pstk = Paystack(secret_key=pstk_key)
@periodic_task(run_every=crontab(minute=0))
def updateaccount(request):
customer_status = Customer.objects
for customer in customer_status:
subscription = pstk.subscription.fetch(customer.paystack_customer_subscription_code)
if subscription['data']['status'] == 'active':
customer.membership = True
customer.save()
else:
customer.membership = False
customer.save()
return HttpResponse('completed')
def randoms_digits():
return "0.12d" % random.randint(0, 999999999999)
def random_digits():
password = uuid.uuid4()
return password
@csrf_exempt
def home(request):
form = SubscriberForm(request.POST)
if form.is_valid():
sub = Subscriber()
email = form.cleaned_data.get('email')
try:
filterss = Subscriber.objects.get(email =email)
except Subscriber.DoesNotExist:
filterss = None
if filterss == None:
sub.email = form.cleaned_data.get('email')
sub.conf_num = random_digits()
sub.save()
message = EmailMessage(
from_email='noreply@heavypips.com',
to=[form.cleaned_data.get('email')],
subject='Newsletter Confirmation',
body='Thank you for signing up for our email newsletter! \
Please complete the process by \
<a href="{}?email={}&conf_num={}"> \
clicking here to confirm your registration</a>'.format(
request.build_absolute_uri('/confirm/'),
sub.email,
sub.conf_num))
message.send()
return HttpResponse('An Activation link has been sent to your email with instructions.', 200)
else:
return HttpResponse('This email is already registeredin the database, please use a different one.', 404)
else:
return render(request, 'signalsection/home.html', {'form': form, 'title': 'Home'})
def confirm(request):
sub = Subscriber.objects.get(email=request.GET['email'])
if sub.conf_num == request.GET['conf_num']:
sub.confirmed = True
sub.save()
return HttpResponse('You have successfully subscribe to our newsletter')
else:
return HttpResponse('Invalid Confirmation Details')
def delete(request):
try:
filterss = Subscriber.objects.get(email=request.GET['email'])
except Subscriber.DoesNotExist:
filterss = None
if filterss:
if filterss.conf_num == request.GET['conf_num']:
filterss.delete()
return HttpResponse('You have successfully unsubscribed.')
else:
return HttpResponse('Invalid details.')
else:
return HttpResponse('Subscriber does not exist')
@newsletter_required
def newsletter(request):
form = NewsletterForm(request.POST or None)
if form.is_valid():
subject = form.cleaned_data.get('subject')
content = form.cleaned_data.get('content').read().decode('utf-8')
subscribers = Subscriber.objects.filter(confirmed=True)
newsletters = Newsletter(subject=subject, content=content)
newsletters.save()
messages.success(request, f'Newsletter has been sent!')
for sub in subscribers:
message = EmailMessage(
from_email='hussainmaina27@gmail.com',
to=[sub.email],
subject=subject,
body=content + (
'<br><a href="{}/delete/?email={}&conf_num={}">Unsubscribe</a>.').format(
request.build_absolute_uri('/delete/'),
sub.email,
sub.conf_num))
message.send()
return render(request, 'signalsection/newsletter.html', {'title': 'Newsletter', 'form': form})
else:
return render(request, 'signalsection/newsletter.html', {'title': 'Newsletter', 'form': form})
def about(request):
about = About.objects.all
return render(request, 'signalsection/about.html', {'title': 'About', 'about': about,})
def brokers(request):
brokers = Brokers.objects.all
return render(request, 'signalsection/brokers.html', {'title': 'Brokers', 'brokers': brokers})
def introduction(request):
introduction = Introduction.objects.all
return render(request, 'signalsection/introduction.html', {'title': 'Introduction', 'intro': introduction})
def charts(request):
return render(request, 'signalsection/charts.html', {'title': 'Charts'})
def terms(request):
terms = Term.objects.all
return render(request, 'signalsection/terms.html', {'terms': terms, 'title': 'Terms of Use'})
def privacy(request):
privacy = Privacy.objects.all
return render(request, 'signalsection/privacy.html', {'privacy': privacy, 'title': 'Privacy Policy'})
def subscribe(request):
return render(request, 'signalsection/subscribe.html', {'title': 'Subscribe'})
def signalsub(request):
return render(request, 'signalsection/signalsub.html', {'title': 'Signals'})
@login_required
def checkout(request):
try:
if request.user.customer.membership:
return redirect('dashboard')
except Customer.DoesNotExist:
pass
if request.method == 'POST':
pstk_user = pstk.customer.create(email=request.user.email,
first_name=request.user.first_name,
last_name=request.user.last_name,
phone= str(request.user.phone),
)
plan_code = '***********'
if request.POST.get('plan') == 'signalyearly':
plan_code = '******'
else:
customer_subscription = pstk.subscription.create(customer=pstk_user['data']['customer_code'],
plan=plan_code)
customer = Customer()
customer.user = request.user
customer.paystack_customer_code = pstk_user['data']['customer_code']
customer.phone = request.user.phone
customer_code = pstk_user['data']['customer_code']
customer.membership = True
subscription_code = pstk.customer.get(customer_code)
customer.paystack_customer_subscription_code = subscription_code['data']['subscriptions'][0]['subscription_code']
customer.save()
return redirect('dashboard')
else:
plan = 'signalmonthly'
name = 'Monthly Signal'
price = 5000
plan_code = '********'
title_plan = 'Monthly Plan'
if request.method == 'GET':
if request.GET['plan'] == 'signalyearly':
plan = 'signalyearly'
price = 50000
name = 'Yearly Signals'
plan_code = '**********'
title_plan = 'Yearly Plan'
return render(request, 'signalsection/checkout.html',
{'plan_code': plan_code, 'price': price, 'title': f'{ title_plan }', 'name': name, })
class MembershipUpdateMixin(AccessMixin):
def dispatch(self, request, *args, **kwargs):
customers = Customer.objects.all
for customer in customers:
subscription = pstk.subscription.fetch(customer.paystack_customer_subscription_code)
if subscription['data']['status'] == 'active':
customer.membership = True
customer.save()
return redirect('dashboard')
else:
customer.membership = False
customer.save()
return redirect('checkout')
return super(MembershipUpdateMixin, self).dispatch(request, *args, **kwargs)
class Dashboard(LoginRequiredMixin, ListView):
model = Signal
template_name = 'signalsection/dashboard.html'
context_object_name = 'signals'
ordering = ['-date_posted']
paginate_by = 9
def get_context_data(self, *, object_list=None, **kwargs):
context = super().get_context_data(**kwargs)
title = 'Dashboard'
today = datetime.datetime.now()
last3 = datetime.timedelta(days=2)
finaldays = today - last3
announcement = Announcement.objects.filter(date_posted__gte=finaldays).order_by('-date_posted')
context['announcement'] = announcement
context['title'] = title
return context
class LogoutIfNotStaffMixin(AccessMixin):
def dispatch(self, request, *args, **kwargs):
if not request.user.is_staff:
logout(request)
return self.handle_no_permission()
return super(LogoutIfNotStaffMixin, self).dispatch(request, *args, **kwargs)
@method_decorator([signal_required], name='dispatch')
class SignalCreateView(UserPassesTestMixin, CreateView):
model = Signal
fields = ['category', 'pair', 'body']
success_url = reverse_lazy('dashboard')
def form_valid(self, form):
form.instance.author = self.request.user
messages.success(self.request, 'signal created with success!')
return super().form_valid(form)
def test_func(self):
if self.request.user.is_signalmanager:
return True
return False
@method_decorator(twilio_view)
def dispatch(self, request, *args, **kwargs):
if request.method == 'POST':
for customer in Customer.objects.all():
subscription = pstk.subscription.fetch(customer.paystack_customer_subscription_code)
if subscription['data']['status'] == 'active':
customer.membership = True
customer.save()
else:
customer.membership = False
customer.save()
receiver = Customer.objects.filter(membership=True)
access = get_user_model().filter(allowaccess=True)
client = Client(settings.TWILIO_ACCOUNT_SID, settings.TWILIO_AUTH_TOKEN)
for recepient in receiver:
client.messages.create(to=recepient.phone, from_='Heavypips',
body=f"{request.POST.get('pair')}:\n {request.POST.get('body')} \n Happy Trading")
for user in access:
client.messages.create(to=user.phone, from_='Heavypips',
body=f"{request.POST.get('pair')}:\n {request.POST.get('body')}")
messages.success(request, f'signal sent successfully')
return redirect()
return super().dispatch(request, *args, **kwargs)
@method_decorator([signal_required], name='dispatch')
class SignalUpdateView(UserPassesTestMixin, UpdateView):
model = Signal
fields = ['category', 'pair', 'body']
success_url = reverse_lazy('dashboard')
def form_valid(self, form):
form.instance.author = self.request.user
messages.success(self.request, 'signal updated with success!')
return super().form_valid(form)
def test_func(self):
signal = self.get_object()
if self.request.user == signal.author or self.request.user.is_staff:
return True
return False
@method_decorator([signal_required], name='dispatch')
class SignalDeleteView(UserPassesTestMixin, DeleteView):
model = Signal
success_url = reverse_lazy('dashboard')
def form_valid(self, form):
form.instance.author = self.request.user
messages.ERROR(self.request, 'signal deleted with success!')
return super().form_valid(form)
def test_func(self):
signal = self.get_object()
if self.request.user == signal.author or self.request.user.is_staff:
return True
return False
@method_decorator([signal_required], name='dispatch')
class AnnouncementCreateView(UserPassesTestMixin, CreateView):
model = Announcement
fields = ['body']
template_name = 'signalsection/announcement_form.html'
success_url = reverse_lazy('dashboard')
def form_valid(self, form):
form.instance.author = self.request.user
messages.success(self.request, 'Added Announcement!')
return super().form_valid(form)
def test_func(self):
if self.request.user.is_signalmanager:
return True
return False
@method_decorator([signal_required], name='dispatch')
class UserSignalsListView(ListView):
model = Signal
template_name = 'signalsection/user_signals.html'
context_object_name = 'signals'
ordering = ['-date_posted']
paginate_by = 9
def get_queryset(self):
user = get_object_or_404(get_user_model(), email=self.kwargs.get('email'))
return Signal.objects.filter(author=user).order_by('-date_posted')
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,692 | TJLSUDAD/Heavypips | refs/heads/master | /users/forms.py | from django import forms
from django.conf import settings
from django.utils import timezone
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm, PasswordChangeForm, PasswordResetForm
from phonenumber_field.formfields import PhoneNumberField
from phonenumber_field.widgets import PhoneNumberPrefixWidget
from django.contrib.auth import get_user_model
from django.utils.translation import gettext, gettext_lazy as _
#from ckeditor.widgets import CKEditorWidget
class UserRegisterForm(UserCreationForm):
email = forms.EmailField(label='', widget=forms.EmailInput(attrs={'placeholder': 'Email'}))
first_name = forms.CharField(label='', widget=forms.TextInput(attrs={'placeholder': 'First Name'}))
last_name = forms.CharField(label='', widget=forms.TextInput(attrs={'placeholder': 'Last Name'}))
phone = PhoneNumberField(required=True, label='',
widget=forms.NumberInput(attrs={'placeholder': 'Phone: +234 8035873345'}))
birth_date = forms.DateField(label='', widget=forms.DateInput(attrs={'placeholder': 'DOB: YYYY-MM-DD '}))
password1 = forms.CharField(label='', widget=forms.PasswordInput(attrs={'placeholder': 'Password'}))
password2 = forms.CharField(label='', widget=forms.PasswordInput(attrs={'placeholder': 'Password Confirmation'}))
class Meta:
model = get_user_model()
fields = ['first_name', 'last_name', 'phone', 'birth_date','phone', 'email', 'password1', 'password2']
class UserLoginForm(AuthenticationForm):
def __init__(self, *args, **kwargs):
super(UserLoginForm, self).__init__(*args, **kwargs)
username = forms.EmailField(label='', widget=forms.EmailInput(attrs={'placeholder': 'Email'}))
password = forms.CharField(label='', widget=forms.PasswordInput(attrs={'placeholder': 'Password'}))
class MyPasswordResetForm(PasswordResetForm):
def __init__(self, *args, **kwargs):
super(MyPasswordResetForm, self).__init__(*args, **kwargs)
email = forms.EmailField(label='', widget=forms.EmailInput(attrs={'placeholder': 'Email'}))
class MyPasswordChangeForm(PasswordChangeForm):
def __init__(self, *args, **kwargs):
super(MyPasswordChangeForm, self).__init__(*args, **kwargs)
old_password = forms.CharField(label='', widget=forms.PasswordInput(attrs={'placeholder': 'Old Password'}))
new_password1 = forms.CharField(label='', widget=forms.PasswordInput(attrs={'placeholder': 'New Password'}))
new_password2 = forms.CharField(label='',
widget=forms.PasswordInput(attrs={'placeholder': 'New Password Confirmation'}))
class ContactForm(forms.Form):
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
name = forms.CharField(max_length=50, label='', widget=forms.TextInput(attrs={'placeholder': 'Name'}))
user_email = forms.EmailField(max_length=50, label='', widget=forms.EmailInput(attrs={'placeholder': 'Email'}))
comment = forms.CharField(label='', widget=forms.Textarea(attrs={'placeholder': 'Comment'}))
class SubscriberForm(forms.Form):
def __init__(self, *args, **kwargs):
super(SubscriberForm, self).__init__(*args, **kwargs)
email = forms.EmailField(max_length=50, label='', widget=forms.EmailInput(attrs={'placeholder': 'Email'}))
class NewsletterForm(forms.Form):
def __init__(self, *args, **kwargs):
super(NewsletterForm, self).__init__(*args, **kwargs)
subject = forms.CharField(max_length=150, label='', widget=forms.TextInput(attrs={'placeholder': 'Subject'}))
content = forms.FileField(label='', widget=forms.FileInput(attrs={'placeholder': 'Content'}))
class UserUpdaetForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(UserUpdaetForm, self).__init__(*args, **kwargs)
email = forms.EmailField(label='', widget=forms.EmailInput(attrs={'placeholder': 'Email'}))
first_name = forms.CharField(label='', widget=forms.TextInput(attrs={'placeholder': 'First Name'}))
last_name = forms.CharField(label='', widget=forms.TextInput(attrs={'placeholder': 'Last Name'}))
phone = PhoneNumberField(required=True, label='',
widget=forms.NumberInput(attrs={'placeholder': 'Phone: +234 8035873345'}))
class Meta:
model = get_user_model()
fields = ['email', 'phone', 'first_name', 'last_name']
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,693 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/migrations/0001_initial.py | # Generated by Django 3.0.1 on 2020-10-25 12:33
import ckeditor_uploader.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='About',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='AboutPage', max_length=150)),
('text', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True)),
('date_posted', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address', models.CharField(default='', max_length=50)),
],
),
migrations.CreateModel(
name='Brokers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='Brokers Page', max_length=150)),
('url', models.CharField(default='https://', max_length=1000)),
('description', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True)),
('date_posted', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', models.SlugField(max_length=150, unique=True)),
],
options={
'verbose_name': 'category',
'verbose_name_plural': 'categories',
},
),
migrations.CreateModel(
name='Email',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(default='', max_length=255)),
],
),
migrations.CreateModel(
name='Introduction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='IntroductionPage', max_length=150)),
('text', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True)),
('date_posted', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Newsletter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('subject', models.CharField(max_length=150)),
('content', models.FileField(upload_to='newsletter_uploads/')),
],
),
migrations.CreateModel(
name='Number',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('number', models.CharField(default='', max_length=50)),
],
),
migrations.CreateModel(
name='Privacy',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='PrivacyPage', max_length=150)),
('text', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True)),
('date_posted', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Social',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('instagram', models.CharField(default='', max_length=255)),
('twitter', models.CharField(default='', max_length=255)),
('facebook', models.CharField(default='', max_length=255)),
],
),
migrations.CreateModel(
name='Subscriber',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=254, unique=True)),
('conf_num', models.CharField(max_length=15)),
('confirmed', models.BooleanField(default=False)),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Term',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='TermsPage', max_length=150)),
('text', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True)),
('date_posted', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Signal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pair', models.CharField(max_length=50)),
('body', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True)),
('date_posted', models.DateTimeField(default=django.utils.timezone.now)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('category', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='signalsection.Category')),
],
),
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('phone', models.CharField(max_length=50)),
('paystack_customer_code', models.CharField(max_length=255)),
('paystack_customer_subscription_code', models.CharField(max_length=255)),
('membership', models.BooleanField(default=False)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,694 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/migrations/0012_auto_20201030_1514.py | # Generated by Django 3.0.1 on 2020-10-30 14:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('signalsection', '0011_auto_20201030_1511'),
]
operations = [
migrations.AlterField(
model_name='social',
name='facebook',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='social',
name='instagram',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='social',
name='linkedin',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='social',
name='twitter',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,695 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/processor.py | from . models import Email, Number, Address, Social
def context(request):
number = Number.objects.last
email = Email.objects.last
address = Address.objects.last
social = Social.objects.last
return {'number': number, 'email': email, 'address': address, 'social': social}
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,696 | TJLSUDAD/Heavypips | refs/heads/master | /users/decorators.py | from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import user_passes_test
def newsletter_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url='login'):
'''
Decorator for views that checks that the logged in user is a newsletter manager,
redirects to the log-in page if necessary.
'''
actual_decorator = user_passes_test(
lambda u: u.is_active and u.is_newslettermanager,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def signal_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url='login'):
'''
Decorator for views that checks that the logged in user is a signal manager,
redirects to the log-in page if necessary.
'''
actual_decorator = user_passes_test(
lambda u: u.is_active and u.is_signalmanager,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,697 | TJLSUDAD/Heavypips | refs/heads/master | /users/migrations/0003_customuser_allowaccess.py | # Generated by Django 3.0.1 on 2020-10-25 16:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0002_auto_20201025_1420'),
]
operations = [
migrations.AddField(
model_name='customuser',
name='allowaccess',
field=models.BooleanField(blank=True, null=True),
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,698 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/urls.py | from django.urls import path
from . import views
from .views import Dashboard, SignalCreateView, SignalDeleteView, SignalUpdateView, AnnouncementCreateView, UserSignalsListView
urlpatterns = [
path('', views.home, name='home'),
path('user/<str:email>/', UserSignalsListView.as_view(), name='user-signals'),
path('signal/<int:pk>/update/', SignalUpdateView.as_view(), name='signal-update'),
path('signal/new/', SignalCreateView.as_view(), name='signal-create'),
path('signal/<int:pk>/delete/', SignalDeleteView.as_view(), name='signal-delete'),
path('about-us/', views.about, name='about'),
path('introduction/', views.introduction, name='introduction'),
path('brokers/', views.brokers, name='brokers'),
path('charts/', views.charts, name='charts'),
path('terms-of-use/', views.terms, name='terms'),
path('privacy-policy/', views.terms, name='privacy'),
path('subscribe/', views.subscribe, name='subscribe'),
path('dashboard/', Dashboard.as_view(), name='dashboard'),
path('announcement/new/', AnnouncementCreateView.as_view(), name='announcement'),
path('signal-subscription/', views.signalsub, name='signalsub'),
path('checkout/', views.checkout, name='checkout'),
path('confirm/', views.confirm, name='confirm'),
path('delete/', views.delete, name='delete'),
path('newsletter/', views.newsletter, name='newsletter'),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,699 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/migrations/0008_auto_20201028_0622.py | # Generated by Django 3.0.1 on 2020-10-28 05:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('signalsection', '0007_auto_20201027_1431'),
]
operations = [
migrations.AlterField(
model_name='subscriber',
name='email',
field=models.EmailField(error_messages={'unique': 'This email is already registered in the database.'}, max_length=50, null=True, unique=True),
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,700 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/apps.py | from django.apps import AppConfig
class SignalsectionConfig(AppConfig):
name = 'signalsection'
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,701 | TJLSUDAD/Heavypips | refs/heads/master | /signalsection/models.py | from django.db import models
from django.utils import timezone
from django.urls import reverse
from ckeditor_uploader.fields import RichTextUploadingField
from django.core.mail import EmailMessage
from django.conf import settings
from twilio.rest import Client
class Customer(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
phone = models.CharField(max_length=50)
paystack_customer_code = models.CharField(max_length=255)
paystack_customer_subscription_code = models.CharField(max_length=255)
membership = models.BooleanField(default=False)
def __str__(self):
return self.user.email
class About(models.Model):
title = models.CharField(default='AboutPage', max_length=150)
text = RichTextUploadingField(blank=True, null=True, config_name='special', external_plugin_resources=[(
'youtube',
'/static/signalsection/vendor/ckeditor_plugins/youtube/youtube/',
'plugin.js'
)])
date_posted = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('signal-detail', kwargs={'pk': self.pk})
class Introduction(models.Model):
title = models.CharField(default='IntroductionPage', max_length=150)
text = RichTextUploadingField(blank=True, null=True, config_name='special', external_plugin_resources=[(
'youtube',
'/static/signalsection/vendor/ckeditor_plugins/youtube/youtube/',
'plugin.js'
)])
date_posted = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.title
class Brokers(models.Model):
name = models.CharField(default='Brokers Page', max_length=150)
url = models.CharField(default='https://', max_length=1000)
description = RichTextUploadingField(blank=True, null=True, config_name='special', external_plugin_resources=[(
'youtube',
'/static/signalsection/vendor/ckeditor_plugins/youtube/youtube/',
'plugin.js'
)])
date_posted = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.name
class Announcement(models.Model):
body = RichTextUploadingField(blank=False, null=False, config_name='special', external_plugin_resources=[(
'youtube',
'/static/signalsection/vendor/ckeditor_plugins/youtube/youtube/',
'plugin.js'
)])
date_posted = models.DateTimeField(default=timezone.now)
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def __str__(self):
return f'({self.author}) ,{self.body}, {self.date_posted}'
class Signal(models.Model):
pair = models.CharField(max_length=50)
body = models.TextField(null=False, blank=False)
date_posted = models.DateTimeField(default=timezone.now)
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
category = models.ForeignKey('Category', on_delete=models.SET_NULL, null=True)
def __str__(self):
return f'({self.author}) ,{self.category}, {self.body}'
def send(self, request):
receiver = Customer.objects.filter(membership=True)
client = Client(settings.TWILIO_ACCOUNT_SID, settings.TWILIO_AUTH_TOKEN)
for recepient in receiver:
client.messages.create(to=recepient.phone, from_='Heavypips',
body=f"{self.pair}, {self.body} ")
class Category(models.Model):
name = models.CharField(max_length=100)
slug = models.SlugField(max_length=150, unique=True)
class Meta:
verbose_name = 'category'
verbose_name_plural = 'categories'
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('signal-category-detail', kwargs={'slug': self.slug})
class Term(models.Model):
title = models.CharField(default='TermsPage', max_length=150)
text = RichTextUploadingField(blank=True, null=True, config_name='special', external_plugin_resources=[(
'youtube',
'/static/signalsection/vendor/ckeditor_plugins/youtube/youtube/',
'plugin.js'
)])
date_posted = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.title
class Privacy(models.Model):
title = models.CharField(default='PrivacyPage', max_length=150)
text = RichTextUploadingField(blank=True, null=True, config_name='special', external_plugin_resources=[(
'youtube',
'/static/signalsection/vendor/ckeditor_plugins/youtube/youtube/',
'plugin.js'
)])
date_posted = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.title
class Email(models.Model):
email = models.EmailField(default='', max_length=255)
def __str__(self):
return self.email
class Number(models.Model):
number = models.CharField(default='', max_length=50)
def __str__(self):
return self.number
class Address(models.Model):
address = models.CharField(default='', max_length=50)
def __str__(self):
return self.address
class Social(models.Model):
name = 'Social Media'
instagram = models.CharField(max_length=255, null=True,blank=True)
twitter = models.CharField(max_length=255, null=True,blank=True)
facebook = models.CharField(max_length=255, null=True,blank=True)
linkedin = models.CharField(max_length=255, null=True,blank=True)
def __str__(self):
return self.name
class Subscriber(models.Model):
email = models.EmailField(null=True, blank=False, max_length=50, unique=True,
error_messages={'unique': 'This email is already registered in the database.'})
conf_num = models.CharField(max_length=15)
confirmed = models.BooleanField(default=False)
timestamp = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.email + "(" + str(self.confirmed) + ")"
class Newsletter(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
subject = models.CharField(max_length=150)
content = models.FileField(upload_to='newsletter_uploads/')
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True)
def __str__(self):
return f"{self.subject} {self.created_at.strftime('%B %d, %Y')} by {self.author}"
def send(self, request):
contents = self.content.read().decode('utf-8')
subscribers = Subscriber.objects.filter(confirmed=True)
for sub in subscribers:
message = EmailMessage(
from_email=settings.FROM_EMAIL,
to=sub.emal,
subject=self.subject,
body=self.content + (
'<br><a href="{}/delete/?email={}&conf_num={}">Unsubscribe</a>.').format(
request.build_absolute_uri('/delete/'),
sub.email,
sub.conf_num))
message.send()
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,702 | TJLSUDAD/Heavypips | refs/heads/master | /users/migrations/0006_auto_20201025_2056.py | # Generated by Django 3.0.1 on 2020-10-25 19:56
from django.db import migrations
import phonenumber_field.modelfields
class Migration(migrations.Migration):
dependencies = [
('users', '0005_auto_20201025_2033'),
]
operations = [
migrations.AddField(
model_name='customuser',
name='phone_number',
field=phonenumber_field.modelfields.PhoneNumberField(blank=True, error_messages={'unique': 'A user with that phone number already exists.'}, help_text='We will be sending you signals through this phone number.', max_length=30, null=True, region=None, unique=True),
),
migrations.AlterField(
model_name='customuser',
name='phone',
field=phonenumber_field.modelfields.PhoneNumberField(blank=True, error_messages={'unique': 'A user with that phone number already exists.'}, help_text='We will be sending you signals through this phone number.', max_length=30, null=True, region=None, unique=True),
),
]
| {"/signalsection/admin.py": ["/signalsection/models.py"], "/users/views.py": ["/users/forms.py", "/users/models.py", "/signalsection/models.py"], "/signalsection/views.py": ["/signalsection/models.py", "/users/forms.py", "/users/decorators.py"], "/signalsection/processor.py": ["/signalsection/models.py"], "/signalsection/urls.py": ["/signalsection/views.py"]} |
71,709 | LotteSuz/intseconds30 | refs/heads/master | /api/migrations/0005_auto_20190911_1941.py | # Generated by Django 2.2.5 on 2019-09-11 19:41
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('api', '0004_auto_20190911_1939'),
]
operations = [
migrations.AddField(
model_name='playsession',
name='started',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='playsession',
name='used_cards',
field=models.ManyToManyField(blank=True, to='api.Card'),
),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,710 | LotteSuz/intseconds30 | refs/heads/master | /api/migrations/0003_playsession.py | # Generated by Django 2.2.5 on 2019-09-11 19:28
import api.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0002_auto_20190911_1744'),
]
operations = [
migrations.CreateModel(
name='PlaySession',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('started', models.DateTimeField(auto_now=True)),
('secret', models.CharField(default=api.models.random_secret, max_length=16)),
('used_cards', models.ManyToManyField(to='api.Card')),
],
),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,711 | LotteSuz/intseconds30 | refs/heads/master | /api/migrations/0004_auto_20190911_1939.py | # Generated by Django 2.2.5 on 2019-09-11 19:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0003_playsession'),
]
operations = [
migrations.RenameField(
model_name='playsession',
old_name='started',
new_name='last_activity',
),
migrations.AlterField(
model_name='playsession',
name='used_cards',
field=models.ManyToManyField(blank=True, null=True, to='api.Card'),
),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,712 | LotteSuz/intseconds30 | refs/heads/master | /game/urls.py | from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('seconds', views.seconds, name='seconds'),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,713 | LotteSuz/intseconds30 | refs/heads/master | /api/admin.py | from django.contrib import admin
from .models import Word, Card, Session, Category, Pack
class WordInline(admin.TabularInline):
model = Word
extra = 0
class CardAdmin(admin.ModelAdmin):
inlines = (WordInline,)
class SessionAdmin(admin.ModelAdmin):
readonly_fields = ('started', 'last_activity',)
class CategoryAdmin(admin.ModelAdmin):
inlines = (WordInline,)
class PackAdmin(admin.ModelAdmin):
inlines = (WordInline,)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Card, CardAdmin)
admin.site.register(Session, SessionAdmin)
admin.site.register(Word)
admin.site.register(Pack, PackAdmin)
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,714 | LotteSuz/intseconds30 | refs/heads/master | /api/migrations/0013_word_pack.py | # Generated by Django 2.2.5 on 2019-09-12 10:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0012_auto_20190912_1026'),
]
operations = [
migrations.AddField(
model_name='word',
name='pack',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='words', to='api.Pack'),
),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,715 | LotteSuz/intseconds30 | refs/heads/master | /api/migrations/0012_auto_20190912_1026.py | # Generated by Django 2.2.5 on 2019-09-12 10:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0011_auto_20190912_1026'),
]
operations = [
migrations.AlterField(
model_name='word',
name='description',
field=models.CharField(blank=True, max_length=512),
),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,716 | LotteSuz/intseconds30 | refs/heads/master | /api/urls.py | from django.urls import path
from . import views
app_name = 'api'
urlpatterns = [
path('', views.index, name='index'),
path('card/<int:card_nr>', views.card_by_id, name='card'),
path('get_token', views.get_token, name='get-token'),
path('get_card', views.get_card, name='get-card'),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,717 | LotteSuz/intseconds30 | refs/heads/master | /api/management/commands/cleanup.py | from datetime import timedelta
from django.core.management.base import BaseCommand
from django.utils import timezone
from ...models import Session
class Command(BaseCommand):
help = "Removes session older than 6 hours"
def handle(self, *args, **options):
threshold = timezone.now() - timedelta(hours=6)
sessions = Session.objects.filter(last_activity__lt=threshold)
sessions_deleted = len(sessions)
sessions.delete()
if sessions_deleted > 1:
self.stdout.write(self.style.SUCCESS(f"Removed {sessions_deleted} sessions"))
elif sessions_deleted is 1:
self.stdout.write(self.style.SUCCESS(f"Removed {sessions_deleted} session"))
else:
self.stdout.write(self.style.WARNING(f"No sessions to remove"))
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,718 | LotteSuz/intseconds30 | refs/heads/master | /api/migrations/0006_auto_20190911_2031.py | # Generated by Django 2.2.5 on 2019-09-11 20:31
from django.db import migrations
class Migration(migrations.Migration):
atomic = False
dependencies = [
('api', '0005_auto_20190911_1941'),
]
operations = [
migrations.RenameModel(
old_name='PlaySession',
new_name='Session',
),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,719 | LotteSuz/intseconds30 | refs/heads/master | /api/views.py | from django.core.exceptions import ObjectDoesNotExist
from django.http import JsonResponse, HttpRequest, HttpResponse
from django.urls import reverse
from .models import Card, Session
def index(request):
return HttpResponse("<pre>int seconds = 30;</pre>")
def card_by_id(request, card_nr: int):
try:
card = Card.objects.get(number=card_nr)
return JsonResponse({'card': card_nr, 'words': card.words_list_with_description})
except ObjectDoesNotExist:
return JsonResponse({"error": f"Card {card_nr} does not exist!"})
def get_token(request):
session = Session()
session.save()
return JsonResponse({'token': session.id,
'secret': session.secret,
'get_card': f"{reverse('api:get-card')}?token={session.id}&secret={session.secret}"
})
def get_card(request: HttpRequest):
token = request.GET.get('token')
secret = request.GET.get('secret')
if token and not secret or not token and secret:
# XOR on token and secret
return JsonResponse({'error': 'No token or secret'})
elif token and secret:
# Verify token and secret match
try:
session = Session.objects.get(id=token, secret=secret)
except ObjectDoesNotExist:
return JsonResponse({"error": "Token expired or invalid"})
# Check is the session has seen all the cards
card = session.get_card()
if card:
return JsonResponse(card.json())
else:
return JsonResponse({"error": "All cards have been used"})
else:
# Just a random question
return JsonResponse(Card.random().json())
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,720 | LotteSuz/intseconds30 | refs/heads/master | /api/migrations/0015_auto_20190913_1051.py | # Generated by Django 2.2.5 on 2019-09-13 10:51
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0014_auto_20190913_1043'),
]
operations = [
migrations.RenameField(
model_name='word',
old_name='title',
new_name='word',
),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,721 | LotteSuz/intseconds30 | refs/heads/master | /api/migrations/0016_auto_20191114_1943.py | # Generated by Django 2.2.7 on 2019-11-14 19:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0015_auto_20190913_1051'),
]
operations = [
migrations.AlterField(
model_name='word',
name='card',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='words', to='api.Card'),
),
migrations.AlterField(
model_name='word',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='words', to='api.Category'),
),
migrations.AlterField(
model_name='word',
name='pack',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='words', to='api.Pack'),
),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,722 | LotteSuz/intseconds30 | refs/heads/master | /api/migrations/0011_auto_20190912_1026.py | # Generated by Django 2.2.5 on 2019-09-12 10:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0010_auto_20190912_0856'),
]
operations = [
migrations.AlterField(
model_name='word',
name='description',
field=models.TextField(blank=True, max_length=200),
),
]
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,723 | LotteSuz/intseconds30 | refs/heads/master | /game/views.py | from django.shortcuts import redirect, render
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from api.models import Session
max_rounds = 8
def index(request):
# return render(request, "game/index.html"
return render(request, "game/teams.html")
@csrf_exempt
@require_http_methods(["POST"])
def seconds(request):
global max_rounds
if 'round' not in request.session or request.session['round'] == 0:
request.session['round'] = 0
request.session['player'] = 0
request.session['scoreteam1'] = 0
request.session['scoreteam2'] = 0
request.session['team1'] = []
request.session['team2'] = []
api_session = Session()
api_session.save()
request.session['token'] = api_session.id
request.session['secret'] = api_session.secret
# get teams
team1_players = []
for i in range(1, 5):
if request.POST[f't1{i}'] == '':
message = 'Please enter four players for each team'
return redirect('index')
team1_players.append(request.POST[f't1{i}'])
request.session['team1'] = team1_players
team2_players = []
for j in range(1, 5):
if request.POST[f't2{j}'] == '':
message = 'Please enter four players for each team'
return redirect('index')
team2_players.append(request.POST[f't2{j}'])
request.session['team2'] = team2_players
hits = request.POST.getlist("words")
round = request.session['round']
# append points
if round % 2 == 0:
points = len(hits)
request.session['scoreteam1'] += points
if round != max_rounds:
request.session['turn'] = request.session['team1'][int(round/2)]
else:
points = len(hits)
request.session['scoreteam2'] += points
if round != max_rounds:
request.session['turn'] = request.session['team2'][int(round/2)]
# determine winner
if round == max_rounds:
request.session['round'] = 0
team1_points = request.session['scoreteam1']
team2_points = request.session['scoreteam2']
request.session['scoreteam1'] = 0
request.session['scoreteam2'] = 0
request.session['turn'] = 0
request.session['team1'] = []
request.session['team2'] = []
if team1_points == team2_points:
tie = True
return render(request, "game/end.html", {'tie': tie})
elif team1_points > team2_points:
winner_name = 'Team 1'
winner_points = team1_points
loser_name = 'Team 2'
loser_points = team2_points
return render(request, "game/end.html",
{'winner_name': winner_name, 'winner_points': winner_points, 'loser_name': loser_name,
'loser_points': loser_points})
elif team1_points < team2_points:
winner_name = 'Team 2'
winner_points = team2_points
loser_name = 'Team 1'
loser_points = team1_points
return render(request, "game/end.html",
{'winner_name': winner_name, 'winner_points': winner_points, 'loser_name': loser_name,
'loser_points': loser_points})
# get cards to play with
token = request.session['token']
secret = request.session['secret']
player = request.session['turn']
# link = request.build_absolute_uri(reverse("api:get-card"))
# card = requests.get(f"{link}?token={token}&secret={secret}").json()['words']
card = Session.objects.get(id__exact=token, secret__exact=secret).get_card().json()['words']
request.session['round'] += 1
return render(request, "game/seconds.html", {'card': card, 'player': player})
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,724 | LotteSuz/intseconds30 | refs/heads/master | /api/models.py | import binascii
import os
from random import randint, shuffle
from django.db import models
from django.db.models import Max
class Card(models.Model):
number = models.fields.IntegerField(primary_key=True)
def __str__(self):
return f"Card {self.number} ({self.words.count()})"
@property
def words_list(self):
return list(self.words.all().values_list('word', flat=True))
@property
def words_list_with_description(self):
return list(self.words.all().values('word', 'description'))
@property
def words_list_shuffled(self):
words = self.words_list
shuffle(words)
return words
def json(self):
return {'card': self.number, 'words': self.words_list_shuffled}
@staticmethod
def random():
max_number = Card.objects.all().aggregate(max_id=Max("number"))['max_id']
while True:
pk = randint(1, max_number)
card = Card.objects.get(number=pk)
if card:
return card
class Category(models.Model):
name = models.fields.CharField(max_length=32)
def __str__(self):
return self.name
class Meta:
verbose_name_plural = "Categories"
class Pack(models.Model):
name = models.fields.CharField(max_length=32)
def __str__(self):
return self.name
class Word(models.Model):
word = models.fields.CharField(max_length=50)
description = models.fields.CharField(blank=True, max_length=512)
category = models.ForeignKey(
Category,
related_name="words",
on_delete=models.SET_NULL,
null=True,
blank=True,
)
card = models.ForeignKey(
Card,
related_name="words",
on_delete=models.SET_NULL,
null=True,
blank=True,
)
pack = models.ForeignKey(
Pack,
related_name="words",
on_delete=models.SET_NULL,
null=True,
blank=True,
)
def __str__(self):
return f"{self.word} ({self.category.name if self.category else ''}) @ {self.pack.name if self.pack else ''}"
def random_secret(length: int = 16) -> str:
return binascii.b2a_hex(os.urandom(length // 2)).decode()
class Session(models.Model):
last_activity = models.fields.DateTimeField(auto_now=True)
started = models.fields.DateTimeField(auto_now_add=True)
secret = models.fields.CharField(max_length=16, default=random_secret)
used_cards = models.ManyToManyField(Card, blank=True)
def get_card(self):
if Card.objects.count() == self.used_cards.count():
return None
while True:
card = Card.random()
if card not in self.used_cards.all():
self.used_cards.add(card)
return card
| {"/api/migrations/0003_playsession.py": ["/api/models.py"], "/api/admin.py": ["/api/models.py"], "/api/management/commands/cleanup.py": ["/api/models.py"], "/api/views.py": ["/api/models.py"], "/game/views.py": ["/api/models.py"]} |
71,750 | neeral/cs221-flappybird | refs/heads/master | /agent.py | from init import *
from game_objects import *
def bangbang(bird_y, pipe_y, bird_height):
error = (bird_y + bird_height/2) - pipe_y
return 1 if error > 0 else 0
class FlappySearch:
def __init__(self,start):
self.start = start
self.delta_frames = 1
# Return the start state.
def startState(self):
return self.start
# Return whether |state| is a goal state or not.
def isGoal(self, state):
return WIN_WIDTH <= state.bird.x
# Return a list of (action, newState, cost) tuples corresponding to edges
# coming out of |state|.
def succAndCost(self, state):
newStateList = []
bird = state.bird
for action in ['jump','stay']:
if action == 'stay':
newbird = Bird(bird.x, bird.y, bird.msec_to_climb, (bird._img_wingup, bird._img_wingdown) )
newbird.update(self.delta_frames) # only updates y-position
newbird.x += ANIMATION_SPEED * frames_to_msec(self.delta_frames)
newState = FlappyState(newbird, state.pipes)
if newState.isCollide():
newStateList.append( (action, newState, float('Inf')) )
else:
gap_y = (state.pipes[0].top_pipe_end_y + state.pipes[0].bottom_pipe_end_y) / 2
# newStateList.append( (action, newState, euclideanDistance_state(state,newState) ))
newStateList.append( (action, newState, euclideanDistance_state(state,newState) \
- ingoal(state.bird.x)*euclideanDistance((state.bird.x,state.bird.y),(WIN_WIDTH,gap_y)) \
+ ingoal(newState.bird.x)*euclideanDistance((newState.bird.x,newState.bird.y),(WIN_WIDTH,gap_y)) ) )
elif action == 'jump':
newbird = Bird(bird.x, bird.y, Bird.CLIMB_DURATION, (bird._img_wingup, bird._img_wingdown) )
newbird.update(self.delta_frames) # only updates y-position
newbird.x += ANIMATION_SPEED * frames_to_msec(self.delta_frames)
newState = FlappyState(newbird, state.pipes)
if newState.isCollide():
newStateList.append( (action, newState, float('Inf')) )
else:
gap_y = (state.pipes[0].top_pipe_end_y + state.pipes[0].bottom_pipe_end_y) / 2
# newStateList.append( (action, newState, euclideanDistance_state(state,newState) ))
newStateList.append( (action, newState, euclideanDistance_state(state,newState) \
- ingoal(state.bird.x)*euclideanDistance((state.bird.x,state.bird.y),(WIN_WIDTH,gap_y)) +\
+ ingoal(newState.bird.x)*euclideanDistance((newState.bird.x,newState.bird.y),(WIN_WIDTH,gap_y)) ) )
return newStateList
def ingoal(state_x):
if state_x >= WIN_WIDTH:
return 0
else:
return 1
def euclideanDistance_state(state1,state2):
return (state1.bird.x-state2.bird.x)**2 + (state1.bird.y-state2.bird.y)**2
def euclideanDistance(x1,x2):
return (x1[0]-x2[0])**2 + (x1[1]-x2[1])**2
class FlappyState:
def __init__(self, bird, pipes):
self.bird = bird
self.pipes = pipes # queue of pipe pairs objects
def isCollide(self):
x = self.bird.x
y = self.bird.y
if y <= 0 or y + Bird.HEIGHT >= WIN_HEIGHT:
return True # gone off the top or bottom of the screen
for pipePair in self.pipes:
# pipePair: bottom_pipe_end_y, top_pipe_end_y, x
# PipePair.WIDTH
if x + Bird.WIDTH >= pipePair.x and x <= pipePair.x + PipePair.WIDTH and \
(y <= pipePair.top_pipe_end_y or y + Bird.HEIGHT >= pipePair.bottom_pipe_end_y):
return True # collided with a pipe
return False
def __str__(self):
return 'Bird({},{}) with {}ms to climb, {} pipes'.format(self.bird.x, self.bird.y, self.bird.msec_to_climb, len(self.pipes))
def __lt__(self, other):
return True
| {"/agent.py": ["/init.py"], "/QLearning.py": ["/init.py"], "/flappybird.py": ["/init.py", "/agent.py"]} |
71,751 | neeral/cs221-flappybird | refs/heads/master | /flappybirdQLscript.py | #! /usr/bin/env python3
"""Flappy Bird, implemented using Pygame."""
from init import *
from game_objects import *
import agent
import search
import QLearning
from collections import Counter
import time
import timeit
def main(maxGames, gamma, epsilon, bird_has_learned, q_values_counter):
"""The application's entry point.
If someone executes this module (instead of importing it, for
example), this function is called.
"""
counter = 0
QL = QLearning.Qvalue(gamma)
if bird_has_learned==1 :
QL.Q = q_values_counter
reward = 10
reward_die = -1000
reward_pass = 1
reward_ingap = 200
scoreList = []
avgScore = []
filename_prefix = './q-attempt-auto-'
filename = filename_prefix + str(gamma) + '-' + str(epsilon) + '.txt'
f = open(filename, 'w+')
pygame.init()
while counter < maxGames:
episode = []
display_surface = pygame.display.set_mode((WIN_WIDTH, WIN_HEIGHT))
pygame.display.set_caption('Pygame Flappy Bird')
clock = pygame.time.Clock()
score_font = pygame.font.SysFont(None, 32, bold=True) # default font
images = load_images()
# the bird stays in the same x position, so bird.x is a constant
# center bird on screen
bird = Bird(50, int(WIN_HEIGHT/2 - Bird.HEIGHT/2), 2,
(images['bird-wingup'], images['bird-wingdown']))
pipes = deque()
nextPipes = deque()
agent_y = None
agent_status = True
time_taken = []
ActionList = []
lastPipes = 0
fcounter = 0
frame_clock = 0 # this counter is only incremented if the game isn't paused
score = 0
done = paused = False
while not done:
clock.tick(FPS)
# Handle this 'manually'. If we used pygame.time.set_timer(),
# pipe addition would be messed up when paused.
if not (paused or frame_clock % msec_to_frames(PipePair.ADD_INTERVAL)):
pp = PipePair(images['pipe-end'], images['pipe-body'])
pipes.append(pp)
nextPipes.append(pp)
for e in pygame.event.get():
if e.type == QUIT or (e.type == KEYUP and e.key == K_ESCAPE):
done = True
break
elif e.type == KEYUP and e.key in (K_PAUSE, K_p):
paused = not paused
elif e.type == MOUSEBUTTONUP or (e.type == KEYUP and
e.key in (K_UP, K_RETURN, K_SPACE)):
bird.msec_to_climb = Bird.CLIMB_DURATION
############################### RL CODE ####################################################
######################################################################################################
####### QLearning
######################################################################################################
if (fcounter%(FPS/4) == 0):
newState = QLearning.QLState(bird,pipes)
if bird_has_learned==1:
newAction = QLearning.epsilon_greedy(QL, 0.0, newState)
else:
newAction = QLearning.epsilon_greedy(QL, min(0.1, epsilon/float(counter+1)), newState)
if newAction == 'jump':
bird.msec_to_climb = Bird.CLIMB_DURATION
episode.append((newState.short(),newAction))
fcounter+=1
if paused:
continue # don't draw anything
# check for collisions
pipe_collision = any(p.collides_with(bird) for p in pipes)
if pipe_collision or 0 >= bird.y or bird.y >= WIN_HEIGHT - Bird.HEIGHT:
done = True
for x in (0, WIN_WIDTH / 2):
display_surface.blit(images['background'], (x, 0))
############################## display predicted path ###################
# for state in predState:
# display_surface.blit(state.bird.image,state.bird.rect)
# predState.pop(0)
##########################################################################
while pipes and not pipes[0].visible:
pipes.popleft()
for p in pipes:
p.update()
display_surface.blit(p.image, p.rect)
bird.update()
display_surface.blit(bird.image, bird.rect)
# update and display score
for p in pipes:
if p.x + PipePair.WIDTH < bird.x and not p.score_counted:
score += 1
p.score_counted = True
nextPipes.popleft()
score_surface = score_font.render(str(score), True, (255, 255, 255))
score_x = WIN_WIDTH/2 - score_surface.get_width()/2
display_surface.blit(score_surface, (score_x, PipePair.PIECE_HEIGHT))
pygame.display.flip()
frame_clock += 1
if bird_has_learned != 1:
for i in range(len(episode)-2):
if episode[i+1][0][1] >= 0 and episode[i+1][0][1] <= 3:
QL.update(episode[i][0],episode[i][1],reward_ingap,episode[i+1][0],counter)
else:
QL.update(episode[i][0],episode[i][1],reward,episode[i+1][0],counter)
QL.update(episode[len(episode)-2][0],episode[len(episode)-2][1],reward_die,episode[len(episode)-1][0],counter)
print('Game over! Score: %i\tnum states:%i\tnum games:%i' % (score, len(QL.Q), counter))# print(QL.Q)
counter+=1
if len(avgScore) == 0:
avgScore.append(score)
else:
avgScore.append((avgScore[-1]*(counter-1)+ score)/float(counter))
scoreList.append(score)
pygame.quit()
print(scoreList)
print(avgScore)
f.write(str(avgScore))
f.write('\n')
f.write(str(scoreList))
f.write('\n')
f.write(str(QL.Q))
f.write('\n')
if __name__ == '__main__':
# If this module had been imported, __name__ would be 'flappybird'.
# It was executed (e.g. by double-clicking the file), so call main.
maxGames = 2000
gamma = 0.6
epsilon = 0.8
print 'now running for training: main(maxGames=%d, gamma=%f, epsilon=%f, learning=0, None)' % (maxGames, gamma, epsilon)
Qvalues = main(maxGames, gamma, epsilon, 0, None)
print 'now running for test: main(maxGames=%d, gamma=%f, epsilon=%f, learning=1, Qvalues)' % (maxGames, gamma, epsilon)
main(maxGames, gamma, epsilon, 1, Qvalues)
| {"/agent.py": ["/init.py"], "/QLearning.py": ["/init.py"], "/flappybird.py": ["/init.py", "/agent.py"]} |
71,752 | neeral/cs221-flappybird | refs/heads/master | /init.py | #! /usr/bin/env python3
"""Flappy Bird, implemented using Pygame."""
import math
import os
from random import randint
from collections import deque
import pygame
from pygame.locals import *
FPS = 5
ANIMATION_SPEED = 0.18 # pixels per millisecond
WIN_WIDTH = 284 * 2 # BG image size: 284x512 px; tiled twice
WIN_HEIGHT = 512
| {"/agent.py": ["/init.py"], "/QLearning.py": ["/init.py"], "/flappybird.py": ["/init.py", "/agent.py"]} |
71,753 | neeral/cs221-flappybird | refs/heads/master | /QLearning.py | from init import *
from game_objects import *
from collections import Counter
import random
class QLState:
NUM_TILES_X = 40
NUM_TILES_Y = 40
def __init__(self, bird, pipes):
self.x = int(pipes[0].x*QLState.NUM_TILES_X/float(WIN_WIDTH) - bird.x*QLState.NUM_TILES_X/float(WIN_WIDTH))
self.y = int(pipes[0].bottom_pipe_end_y*QLState.NUM_TILES_Y/float(WIN_HEIGHT) - bird.y*QLState.NUM_TILES_Y/float(WIN_HEIGHT))
def short(self):
return (self.x,self.y)
def __str__(self):
return 'Bird({},{}) '.format(self.x, self.y)
def __lt__(self, other):
return True
class Qvalue:
# ETA = 0.6
GAMMA = 1.0
def __init__(self, gamma):
self.Q = Counter()
if gamma is not None:
self.GAMMA = gamma
def update(self,state,action,reward,nextState,N):
ETA = 1/math.sqrt(N+1)
self.Q[(state,action)] = (1-ETA)*self.Q[(state,action)] +\
ETA*(reward + Qvalue.GAMMA*max(self.Q[(nextState,'jump')],self.Q[(nextState,'stay')]))
def policy(self, state):
if self.Q[(state,'jump')] > self.Q[(state,'stay')]:
return 'jump'
else:
return 'stay'
def epsilon_greedy(Qvalue,epsilon,state):
if epsilon > random.uniform(0,1):
return random.choice(['jump','stay'])
else:
return Qvalue.policy(state.short())
| {"/agent.py": ["/init.py"], "/QLearning.py": ["/init.py"], "/flappybird.py": ["/init.py", "/agent.py"]} |
71,754 | neeral/cs221-flappybird | refs/heads/master | /flappybird.py | #! /usr/bin/env python3
"""Flappy Bird, implemented using Pygame."""
from init import *
from game_objects import *
import agent
import search
import time
import timeit
def main():
"""The application's entry point.
If someone executes this module (instead of importing it, for
example), this function is called.
"""
pygame.init()
display_surface = pygame.display.set_mode((WIN_WIDTH, WIN_HEIGHT))
pygame.display.set_caption('Pygame Flappy Bird')
clock = pygame.time.Clock()
score_font = pygame.font.SysFont(None, 32, bold=True) # default font
images = load_images()
# the bird stays in the same x position, so bird.x is a constant
# center bird on screen
bird = Bird(50, int(WIN_HEIGHT/2 - Bird.HEIGHT/2), 2,
(images['bird-wingup'], images['bird-wingdown']))
pipes = deque()
nextPipes = deque()
agent_y = None
agent_status = True
time_taken = []
ActionList = []
lastPipes = 0
frame_clock = 0 # this counter is only incremented if the game isn't paused
score = 0
done = paused = False
while not done:
clock.tick(FPS)
# Handle this 'manually'. If we used pygame.time.set_timer(),
# pipe addition would be messed up when paused.
if not (paused or frame_clock % msec_to_frames(PipePair.ADD_INTERVAL)):
pp = PipePair(images['pipe-end'], images['pipe-body'])
pipes.append(pp)
nextPipes.append(pp)
for e in pygame.event.get():
if e.type == QUIT or (e.type == KEYUP and e.key == K_ESCAPE):
done = True
break
elif e.type == KEYUP and e.key in (K_PAUSE, K_p):
paused = not paused
elif e.type == MOUSEBUTTONUP or (e.type == KEYUP and
e.key in (K_UP, K_RETURN, K_SPACE)):
bird.msec_to_climb = Bird.CLIMB_DURATION
############################### AGENT CODE ####################################################
########################################################################
#### Bangbang controller
########################################################################
# if agent_y != None and agent_y - bird.y > 10:
# agent_status = True
#
# if agent_y == None or agent_y - bird.y > 10 or agent_status:
# if agent.bangbang(bird.y, nextPipes[0].bottom_pipe_end_y, Bird.HEIGHT) == 1:
# bird.msec_to_climb = Bird.CLIMB_DURATION
# agent_y = bird.y
# agent_status = False
########################################################################
#### UniformCostSearch
########################################################################
if len(pipes)-lastPipes >= 1 or len(ActionList) == 0:
flappyProblem = agent.FlappySearch(agent.FlappyState(bird, pipes))
ucs = search.UniformCostSearch()
start_time = time.time()
ucs.solve(flappyProblem)
time_taken.append(time.time() - start_time)
ActionList = ucs.actions
predState = ucs.optStates
currAction = ActionList.pop(0)
if currAction == 'jump':
bird.msec_to_climb = Bird.CLIMB_DURATION
lastPipes = len(pipes)
######################################################################################################
if paused:
continue # don't draw anything
# check for collisions
pipe_collision = any(p.collides_with(bird) for p in pipes)
if pipe_collision or 0 >= bird.y or bird.y >= WIN_HEIGHT - Bird.HEIGHT:
done = True
for x in (0, WIN_WIDTH / 2):
display_surface.blit(images['background'], (x, 0))
############################## display predicted path ###################
# for state in predState:
# display_surface.blit(state.bird.image,state.bird.rect)
# predState.pop(0)
##########################################################################
while pipes and not pipes[0].visible:
pipes.popleft()
for p in pipes:
p.update()
display_surface.blit(p.image, p.rect)
bird.update()
display_surface.blit(bird.image, bird.rect)
# update and display score
for p in pipes:
if p.x + PipePair.WIDTH < bird.x and not p.score_counted:
score += 1
p.score_counted = True
nextPipes.popleft()
print('Times: total=%fs from %i iterations. Average=%fs' % (sum(time_taken), len(time_taken), (sum(time_taken)/len(time_taken))))
score_surface = score_font.render(str(score), True, (255, 255, 255))
score_x = WIN_WIDTH/2 - score_surface.get_width()/2
display_surface.blit(score_surface, (score_x, PipePair.PIECE_HEIGHT))
pygame.display.flip()
frame_clock += 1
print('Game over! Score: %i' % score)
pygame.quit()
if __name__ == '__main__':
# If this module had been imported, __name__ would be 'flappybird'.
# It was executed (e.g. by double-clicking the file), so call main.
main()
| {"/agent.py": ["/init.py"], "/QLearning.py": ["/init.py"], "/flappybird.py": ["/init.py", "/agent.py"]} |
71,756 | jwveldhuis/home-assistant | refs/heads/master | /homeassistant/actors.py | """
homeassistant.actors
~~~~~~~~~~~~~~~~~~~~
This module provides actors that will react
to events happening within homeassistant.
"""
import os
import logging
from datetime import datetime, timedelta
import re
import requests
import homeassistant as ha
import homeassistant.util as util
from homeassistant.observers import (
STATE_CATEGORY_SUN, SUN_STATE_BELOW_HORIZON, SUN_STATE_ABOVE_HORIZON,
STATE_CATEGORY_ALL_DEVICES, DEVICE_STATE_HOME, DEVICE_STATE_NOT_HOME,
STATE_ATTRIBUTE_NEXT_SUN_SETTING)
LIGHT_TRANSITION_TIME = timedelta(minutes=15)
EVENT_DOWNLOAD_FILE = "download_file"
EVENT_BROWSE_URL = "browse_url"
EVENT_CHROMECAST_YOUTUBE_VIDEO = "chromecast.play_youtube_video"
EVENT_TURN_LIGHT_ON = "turn_light_on"
EVENT_TURN_LIGHT_OFF = "turn_light_off"
EVENT_KEYBOARD_VOLUME_UP = "keyboard.volume_up"
EVENT_KEYBOARD_VOLUME_DOWN = "keyboard.volume_down"
EVENT_KEYBOARD_VOLUME_MUTE = "keyboard.volume_mute"
EVENT_KEYBOARD_MEDIA_PLAY_PAUSE = "keyboard.media_play_pause"
EVENT_KEYBOARD_MEDIA_NEXT_TRACK = "keyboard.media_next_track"
EVENT_KEYBOARD_MEDIA_PREV_TRACK = "keyboard.media_prev_track"
def _hue_process_transition_time(transition_seconds):
""" Transition time is in 1/10th seconds
and cannot exceed MAX_TRANSITION_TIME. """
# Max transition time for Hue is 900 seconds/15 minutes
return min(9000, transition_seconds * 10)
# pylint: disable=too-few-public-methods
class LightTrigger(object):
""" Class to turn on lights based on state of devices and the sun
or triggered by light events. """
def __init__(self, eventbus, statemachine, device_tracker, light_control):
self.eventbus = eventbus
self.statemachine = statemachine
self.light_control = light_control
self.logger = logging.getLogger(__name__)
# Track home coming of each seperate device
for category in device_tracker.device_state_categories:
ha.track_state_change(eventbus, category,
DEVICE_STATE_NOT_HOME, DEVICE_STATE_HOME,
self._handle_device_state_change)
# Track when all devices are gone to shut down lights
ha.track_state_change(eventbus, STATE_CATEGORY_ALL_DEVICES,
DEVICE_STATE_HOME, DEVICE_STATE_NOT_HOME,
self._handle_device_state_change)
# Track every time sun rises so we can schedule a time-based
# pre-sun set event
ha.track_state_change(eventbus, STATE_CATEGORY_SUN,
SUN_STATE_BELOW_HORIZON, SUN_STATE_ABOVE_HORIZON,
self._handle_sun_rising)
# If the sun is already above horizon
# schedule the time-based pre-sun set event
if statemachine.is_state(STATE_CATEGORY_SUN, SUN_STATE_ABOVE_HORIZON):
self._handle_sun_rising(None, None, None)
def handle_light_event(event):
""" Hande a turn light on or off event. """
light_id = event.data.get("light_id", None)
transition_seconds = event.data.get("transition_seconds", None)
if event.event_type == EVENT_TURN_LIGHT_ON:
self.light_control.turn_light_on(light_id, transition_seconds)
else:
self.light_control.turn_light_off(light_id, transition_seconds)
# Listen for light on and light off events
eventbus.listen(EVENT_TURN_LIGHT_ON, handle_light_event)
eventbus.listen(EVENT_TURN_LIGHT_OFF, handle_light_event)
# pylint: disable=unused-argument
def _handle_sun_rising(self, category, old_state, new_state):
"""The moment sun sets we want to have all the lights on.
We will schedule to have each light start after one another
and slowly transition in."""
start_point = self._time_for_light_before_sun_set()
def turn_on(light):
""" Lambda can keep track of function parameters but not local
parameters. If we put the lambda directly in the below statement
only the last light would be turned on.. """
return lambda now: self._turn_light_on_before_sunset(light)
for index, light_id in enumerate(self.light_control.light_ids):
ha.track_time_change(self.eventbus, turn_on(light_id),
point_in_time=(start_point +
index * LIGHT_TRANSITION_TIME))
def _turn_light_on_before_sunset(self, light_id=None):
""" Helper function to turn on lights slowly if there
are devices home and the light is not on yet. """
if self.statemachine.is_state(STATE_CATEGORY_ALL_DEVICES,
DEVICE_STATE_HOME) and not self.light_control.is_light_on(light_id):
self.light_control.turn_light_on(light_id,
LIGHT_TRANSITION_TIME.seconds)
def _handle_device_state_change(self, category, old_state, new_state):
""" Function to handle tracked device state changes. """
lights_are_on = self.light_control.is_light_on()
light_needed = (not lights_are_on and
self.statemachine.is_state(STATE_CATEGORY_SUN,
SUN_STATE_BELOW_HORIZON))
# Specific device came home ?
if (category != STATE_CATEGORY_ALL_DEVICES and
new_state['state'] == DEVICE_STATE_HOME):
# These variables are needed for the elif check
now = datetime.now()
start_point = self._time_for_light_before_sun_set()
# Do we need lights?
if light_needed:
self.logger.info(
"Home coming event for {}. Turning lights on".
format(category))
self.light_control.turn_light_on()
# Are we in the time span were we would turn on the lights
# if someone would be home?
# Check this by seeing if current time is later then the point
# in time when we would start putting the lights on.
elif start_point < now < self._next_sun_setting():
# Check for every light if it would be on if someone was home
# when the fading in started and turn it on if so
for index, light_id in enumerate(self.light_control.light_ids):
if now > start_point + index * LIGHT_TRANSITION_TIME:
self.light_control.turn_light_on(light_id)
else:
# If this light didn't happen to be turned on yet so
# will all the following then, break.
break
# Did all devices leave the house?
elif (category == STATE_CATEGORY_ALL_DEVICES and
new_state['state'] == DEVICE_STATE_NOT_HOME and lights_are_on):
self.logger.info(("Everyone has left but lights are on. "
"Turning lights off"))
self.light_control.turn_light_off()
def _next_sun_setting(self):
""" Returns the datetime object representing the next sun setting. """
state = self.statemachine.get_state(STATE_CATEGORY_SUN)
return ha.str_to_datetime(
state['attributes'][STATE_ATTRIBUTE_NEXT_SUN_SETTING])
def _time_for_light_before_sun_set(self):
""" Helper method to calculate the point in time we have to start
fading in lights so that all the lights are on the moment the sun
sets.
"""
return (self._next_sun_setting() -
LIGHT_TRANSITION_TIME * len(self.light_control.light_ids))
class HueLightControl(object):
""" Class to interface with the Hue light system. """
def __init__(self, host=None):
try:
import phue
except ImportError:
logging.getLogger(__name__).exception(
"HueLightControl: Error while importing dependency phue.")
self.success_init = False
return
self.bridge = phue.Bridge(host)
self.lights = self.bridge.get_light_objects()
self.light_ids = [light.light_id for light in self.lights]
self.success_init = True
def is_light_on(self, light_id=None):
""" Returns if specified or all light are on. """
if not light_id:
return sum([1 for light in self.lights if light.on]) > 0
else:
return self.bridge.get_light(light_id, 'on')
def turn_light_on(self, light_id=None, transition_seconds=None):
""" Turn the specified or all lights on. """
if not light_id:
light_id = self.light_ids
command = {'on': True, 'xy': [0.5119, 0.4147], 'bri': 164}
if transition_seconds:
command['transitiontime'] = \
_hue_process_transition_time(transition_seconds)
self.bridge.set_light(light_id, command)
def turn_light_off(self, light_id=None, transition_seconds=None):
""" Turn the specified or all lights off. """
if not light_id:
light_id = self.light_ids
command = {'on': False}
if transition_seconds:
command['transitiontime'] = \
_hue_process_transition_time(transition_seconds)
self.bridge.set_light(light_id, command)
def setup_file_downloader(eventbus, download_path):
""" Listens for download events to download files. """
logger = logging.getLogger(__name__)
if not os.path.isdir(download_path):
logger.error(
("FileDownloader:"
"Download path {} does not exist. File Downloader not active.").
format(download_path))
return False
def download_file(event):
""" Downloads file specified in the url. """
try:
req = requests.get(event.data['url'], stream=True)
if req.status_code == 200:
filename = None
if 'content-disposition' in req.headers:
match = re.findall(r"filename=(\S+)",
req.headers['content-disposition'])
if len(match) > 0:
filename = match[0].strip("'\" ")
if not filename:
filename = os.path.basename(event.data['url']).strip()
if not filename:
filename = "ha_download"
# Remove stuff to ruin paths
filename = util.sanitize_filename(filename)
path, ext = os.path.splitext(os.path.join(download_path,
filename))
# If file exist append a number. We test filename, filename_2..
tries = 0
while True:
tries += 1
name_suffix = "" if tries == 1 else "_{}".format(tries)
final_path = path + name_suffix + ext
if not os.path.isfile(final_path):
break
logger.info("FileDownloader:{} -> {}".format(
event.data['url'], final_path))
with open(final_path, 'wb') as fil:
for chunk in req.iter_content(1024):
fil.write(chunk)
except requests.exceptions.ConnectionError:
logger.exception("FileDownloader:ConnectionError occured for {}".
format(event.data['url']))
eventbus.listen(EVENT_DOWNLOAD_FILE, download_file)
return True
def setup_webbrowser(eventbus):
""" Listen for browse_url events and open
the url in the default webbrowser. """
import webbrowser
eventbus.listen(EVENT_BROWSE_URL,
lambda event: webbrowser.open(event.data['url']))
return True
def setup_chromecast(eventbus, host):
""" Listen for chromecast events. """
from homeassistant.packages import pychromecast
eventbus.listen("start_fireplace",
lambda event:
pychromecast.play_youtube_video(host, "eyU3bRy2x44"))
eventbus.listen("start_epic_sax",
lambda event:
pychromecast.play_youtube_video(host, "kxopViU98Xo"))
eventbus.listen(EVENT_CHROMECAST_YOUTUBE_VIDEO,
lambda event:
pychromecast.play_youtube_video(host, event.data['video']))
return True
def setup_media_buttons(eventbus):
""" Listen for keyboard events. """
try:
import pykeyboard
except ImportError:
logging.getLogger(__name__).exception(
"MediaButtons: Error while importing dependency PyUserInput.")
return False
keyboard = pykeyboard.PyKeyboard()
keyboard.special_key_assignment()
eventbus.listen(EVENT_KEYBOARD_VOLUME_UP,
lambda event:
keyboard.tap_key(keyboard.volume_up_key))
eventbus.listen(EVENT_KEYBOARD_VOLUME_DOWN,
lambda event:
keyboard.tap_key(keyboard.volume_down_key))
eventbus.listen(EVENT_KEYBOARD_VOLUME_MUTE,
lambda event:
keyboard.tap_key(keyboard.volume_mute_key))
eventbus.listen(EVENT_KEYBOARD_MEDIA_PLAY_PAUSE,
lambda event:
keyboard.tap_key(keyboard.media_play_pause_key))
eventbus.listen(EVENT_KEYBOARD_MEDIA_NEXT_TRACK,
lambda event:
keyboard.tap_key(keyboard.media_next_track_key))
eventbus.listen(EVENT_KEYBOARD_MEDIA_PREV_TRACK,
lambda event:
keyboard.tap_key(keyboard.media_prev_track_key))
return True
| {"/homeassistant/actors.py": ["/homeassistant/__init__.py", "/homeassistant/util.py", "/homeassistant/observers.py", "/homeassistant/packages/__init__.py"], "/homeassistant/observers.py": ["/homeassistant/__init__.py"], "/homeassistant/remote.py": ["/homeassistant/__init__.py", "/homeassistant/httpinterface.py"], "/homeassistant/bootstrap.py": ["/homeassistant/__init__.py", "/homeassistant/observers.py", "/homeassistant/actors.py", "/homeassistant/httpinterface.py"], "/homeassistant/httpinterface.py": ["/homeassistant/__init__.py", "/homeassistant/util.py"], "/homeassistant/test.py": ["/homeassistant/__init__.py", "/homeassistant/remote.py", "/homeassistant/httpinterface.py"], "/start.py": ["/homeassistant/bootstrap.py"]} |
71,757 | jwveldhuis/home-assistant | refs/heads/master | /homeassistant/observers.py | """
homeassistant.observers
~~~~~~~~~~~~~~~~~~~~~~~
This module provides observers that can change the state or fire
events based on observations.
"""
import logging
import csv
import os
from datetime import datetime, timedelta
import threading
import re
import json
import requests
import homeassistant as ha
STATE_CATEGORY_SUN = "weather.sun"
STATE_ATTRIBUTE_NEXT_SUN_RISING = "next_rising"
STATE_ATTRIBUTE_NEXT_SUN_SETTING = "next_setting"
STATE_CATEGORY_ALL_DEVICES = 'all_devices'
STATE_CATEGORY_DEVICE_FORMAT = '{}'
SUN_STATE_ABOVE_HORIZON = "above_horizon"
SUN_STATE_BELOW_HORIZON = "below_horizon"
DEVICE_STATE_NOT_HOME = 'device_not_home'
DEVICE_STATE_HOME = 'device_home'
# After how much time do we consider a device not home if
# it does not show up on scans
TIME_SPAN_FOR_ERROR_IN_SCANNING = timedelta(minutes=1)
# Return cached results if last scan was less then this time ago
TOMATO_MIN_TIME_BETWEEN_SCANS = timedelta(seconds=5)
# Filename to save known devices to
KNOWN_DEVICES_FILE = "known_devices.csv"
def track_sun(eventbus, statemachine, latitude, longitude):
""" Tracks the state of the sun. """
logger = logging.getLogger(__name__)
try:
import ephem
except ImportError:
logger.exception("TrackSun:Error while importing dependency ephem.")
return False
sun = ephem.Sun() # pylint: disable=no-member
def update_sun_state(now): # pylint: disable=unused-argument
""" Method to update the current state of the sun and
set time of next setting and rising. """
observer = ephem.Observer()
observer.lat = latitude
observer.long = longitude
next_rising = ephem.localtime(observer.next_rising(sun))
next_setting = ephem.localtime(observer.next_setting(sun))
if next_rising > next_setting:
new_state = SUN_STATE_ABOVE_HORIZON
next_change = next_setting
else:
new_state = SUN_STATE_BELOW_HORIZON
next_change = next_rising
logger.info(
"Sun:{}. Next change: {}".format(new_state,
next_change.strftime("%H:%M")))
state_attributes = {
STATE_ATTRIBUTE_NEXT_SUN_RISING: ha.datetime_to_str(next_rising),
STATE_ATTRIBUTE_NEXT_SUN_SETTING: ha.datetime_to_str(next_setting)
}
statemachine.set_state(STATE_CATEGORY_SUN, new_state, state_attributes)
# +10 seconds to be sure that the change has occured
ha.track_time_change(eventbus, update_sun_state,
point_in_time=next_change + timedelta(seconds=10))
update_sun_state(None)
return True
class DeviceTracker(object):
""" Class that tracks which devices are home and which are not. """
def __init__(self, eventbus, statemachine, device_scanner):
self.statemachine = statemachine
self.eventbus = eventbus
self.device_scanner = device_scanner
self.logger = logging.getLogger(__name__)
self.lock = threading.Lock()
# Dictionary to keep track of known devices and devices we track
self.known_devices = {}
# Did we encounter a valid known devices file
self.invalid_known_devices_file = False
# Read known devices if file exists
if os.path.isfile(KNOWN_DEVICES_FILE):
with open(KNOWN_DEVICES_FILE) as inp:
default_last_seen = datetime(1990, 1, 1)
# Temp variable to keep track of which categories we use
# so we can ensure we have unique categories.
used_categories = []
try:
for row in csv.DictReader(inp):
device = row['device']
row['track'] = True if row['track'] == '1' else False
# If we track this device setup tracking variables
if row['track']:
row['last_seen'] = default_last_seen
# Make sure that each device is mapped
# to a unique category name
name = row['name']
if not name:
name = "unnamed_device"
tries = 0
suffix = ""
while True:
tries += 1
if tries > 1:
suffix = "_{}".format(tries)
category = STATE_CATEGORY_DEVICE_FORMAT.format(
name + suffix)
if category not in used_categories:
break
row['category'] = category
used_categories.append(category)
self.known_devices[device] = row
except KeyError:
self.invalid_known_devices_file = False
self.logger.warning((
"Invalid {} found. "
"We won't update it with new found devices.").
format(KNOWN_DEVICES_FILE))
if len(self.device_state_categories) == 0:
self.logger.warning(
"No devices to track. Please update {}.".format(
KNOWN_DEVICES_FILE))
ha.track_time_change(eventbus,
lambda time:
self.update_devices(
device_scanner.scan_devices()))
@property
def device_state_categories(self):
""" Returns a list containing all categories
that are maintained for devices. """
return [self.known_devices[device]['category'] for device
in self.known_devices if self.known_devices[device]['track']]
def update_devices(self, found_devices):
""" Update device states based on the found devices. """
self.lock.acquire()
temp_tracking_devices = [device for device in self.known_devices
if self.known_devices[device]['track']]
for device in found_devices:
# Are we tracking this device?
if device in temp_tracking_devices:
temp_tracking_devices.remove(device)
self.known_devices[device]['last_seen'] = datetime.now()
self.statemachine.set_state(
self.known_devices[device]['category'], DEVICE_STATE_HOME)
# For all devices we did not find, set state to NH
# But only if they have been gone for longer then the error time span
# Because we do not want to have stuff happening when the device does
# not show up for 1 scan beacuse of reboot etc
for device in temp_tracking_devices:
if (datetime.now() - self.known_devices[device]['last_seen'] >
TIME_SPAN_FOR_ERROR_IN_SCANNING):
self.statemachine.set_state(
self.known_devices[device]['category'],
DEVICE_STATE_NOT_HOME)
# Get the currently used statuses
states_of_devices = [self.statemachine.get_state(category)['state']
for category in self.device_state_categories]
# Update the all devices category
all_devices_state = (DEVICE_STATE_HOME if DEVICE_STATE_HOME
in states_of_devices else DEVICE_STATE_NOT_HOME)
self.statemachine.set_state(STATE_CATEGORY_ALL_DEVICES,
all_devices_state)
# If we come along any unknown devices we will write them to the
# known devices file but only if we did not encounter an invalid
# known devices file
if not self.invalid_known_devices_file:
unknown_devices = [device for device in found_devices
if device not in self.known_devices]
if len(unknown_devices) > 0:
try:
# If file does not exist we will write the header too
is_new_file = not os.path.isfile(KNOWN_DEVICES_FILE)
with open(KNOWN_DEVICES_FILE, 'a') as outp:
self.logger.info((
"DeviceTracker:Found {} new devices,"
" updating {}").format(len(unknown_devices),
KNOWN_DEVICES_FILE))
writer = csv.writer(outp)
if is_new_file:
writer.writerow(("device", "name", "track"))
for device in unknown_devices:
# See if the device scanner knows the name
temp_name = \
self.device_scanner.get_device_name(device)
name = temp_name if temp_name else "unknown_device"
writer.writerow((device, name, 0))
self.known_devices[device] = {'name': name,
'track': False}
except IOError:
self.logger.exception((
"DeviceTracker:Error updating {}"
"with {} new devices").format(
KNOWN_DEVICES_FILE, len(unknown_devices)))
self.lock.release()
class TomatoDeviceScanner(object):
""" This class queries a wireless router running Tomato firmware
for connected devices.
A description of the Tomato API can be found on
http://paulusschoutsen.nl/blog/2013/10/tomato-api-documentation/
"""
def __init__(self, host, username, password, http_id):
self.req = requests.Request('POST',
'http://{}/update.cgi'.format(host),
data={'_http_id': http_id,
'exec': 'devlist'},
auth=requests.auth.HTTPBasicAuth(
username, password)).prepare()
self.parse_api_pattern = re.compile(r"(?P<param>\w*) = (?P<value>.*);")
self.logger = logging.getLogger(__name__)
self.lock = threading.Lock()
self.date_updated = None
self.last_results = {"wldev": [], "dhcpd_lease": []}
self.success_init = self._update_tomato_info()
def scan_devices(self):
""" Scans for new devices and return a
list containing found device ids. """
self._update_tomato_info()
return [item[1] for item in self.last_results['wldev']]
def get_device_name(self, device):
""" Returns the name of the given device or None if we don't know. """
# Make sure there are results
if not self.date_updated:
self._update_tomato_info()
filter_named = [item[0] for item in self.last_results['dhcpd_lease']
if item[2] == device]
if len(filter_named) == 0 or filter_named[0] == "":
return None
else:
return filter_named[0]
def _update_tomato_info(self):
""" Ensures the information from the Tomato router is up to date.
Returns boolean if scanning successful. """
self.lock.acquire()
# if date_updated is None or the date is too old we scan for new data
if (not self.date_updated or datetime.now() - self.date_updated >
TOMATO_MIN_TIME_BETWEEN_SCANS):
self.logger.info("Tomato:Scanning")
try:
response = requests.Session().send(self.req, timeout=3)
# Calling and parsing the Tomato api here. We only need the
# wldev and dhcpd_lease values. For API description see:
# http://paulusschoutsen.nl/
# blog/2013/10/tomato-api-documentation/
if response.status_code == 200:
for param, value in \
self.parse_api_pattern.findall(response.text):
if param == 'wldev' or param == 'dhcpd_lease':
self.last_results[param] = \
json.loads(value.replace("'", '"'))
self.date_updated = datetime.now()
return True
elif response.status_code == 401:
# Authentication error
self.logger.exception((
"Tomato:Failed to authenticate, "
"please check your username and password"))
return False
except requests.exceptions.ConnectionError:
# We get this if we could not connect to the router or
# an invalid http_id was supplied
self.logger.exception((
"Tomato:Failed to connect to the router"
" or invalid http_id supplied"))
return False
except requests.exceptions.Timeout:
# We get this if we could not connect to the router or
# an invalid http_id was supplied
self.logger.exception(
"Tomato:Connection to the router timed out")
return False
except ValueError:
# If json decoder could not parse the response
self.logger.exception(
"Tomato:Failed to parse response from router")
return False
finally:
self.lock.release()
else:
# We acquired the lock before the IF check,
# release it before we return True
self.lock.release()
return True
| {"/homeassistant/actors.py": ["/homeassistant/__init__.py", "/homeassistant/util.py", "/homeassistant/observers.py", "/homeassistant/packages/__init__.py"], "/homeassistant/observers.py": ["/homeassistant/__init__.py"], "/homeassistant/remote.py": ["/homeassistant/__init__.py", "/homeassistant/httpinterface.py"], "/homeassistant/bootstrap.py": ["/homeassistant/__init__.py", "/homeassistant/observers.py", "/homeassistant/actors.py", "/homeassistant/httpinterface.py"], "/homeassistant/httpinterface.py": ["/homeassistant/__init__.py", "/homeassistant/util.py"], "/homeassistant/test.py": ["/homeassistant/__init__.py", "/homeassistant/remote.py", "/homeassistant/httpinterface.py"], "/start.py": ["/homeassistant/bootstrap.py"]} |
71,758 | jwveldhuis/home-assistant | refs/heads/master | /homeassistant/__init__.py | """
homeassistant
~~~~~~~~~~~~~
Module to control the lights based on devices at home and the state of the sun.
"""
import time
import logging
import threading
from collections import defaultdict, namedtuple
from datetime import datetime
logging.basicConfig(level=logging.INFO)
ALL_EVENTS = '*'
EVENT_HOMEASSISTANT_START = "homeassistant.start"
EVENT_HOMEASSISTANT_STOP = "homeassistant.stop"
EVENT_STATE_CHANGED = "state_changed"
EVENT_TIME_CHANGED = "time_changed"
TIMER_INTERVAL = 10 # seconds
# We want to be able to fire every time a minute starts (seconds=0).
# We want this so other modules can use that to make sure they fire
# every minute.
assert 60 % TIMER_INTERVAL == 0, "60 % TIMER_INTERVAL should be 0!"
DATE_STR_FORMAT = "%H:%M:%S %d-%m-%Y"
def start_home_assistant(eventbus):
""" Start home assistant. """
request_shutdown = threading.Event()
eventbus.listen_once(EVENT_HOMEASSISTANT_STOP,
lambda event: request_shutdown.set())
Timer(eventbus)
eventbus.fire(EVENT_HOMEASSISTANT_START)
while True:
try:
time.sleep(1)
if request_shutdown.isSet():
break
except KeyboardInterrupt:
break
def datetime_to_str(dattim):
""" Converts datetime to a string format.
@rtype : str
"""
return dattim.strftime(DATE_STR_FORMAT)
def str_to_datetime(dt_str):
""" Converts a string to a datetime object.
@rtype: datetime
"""
return datetime.strptime(dt_str, DATE_STR_FORMAT)
def ensure_list(parameter):
""" Wraps parameter in a list if it is not one and returns it.
@rtype : list
"""
return parameter if isinstance(parameter, list) else [parameter]
def matcher(subject, pattern):
""" Returns True if subject matches the pattern.
Pattern is either a list of allowed subjects or a '*'.
@rtype : bool
"""
return '*' in pattern or subject in pattern
def create_state(state, attributes=None, last_changed=None):
""" Creates a new state and initializes defaults where necessary. """
attributes = attributes or {}
last_changed = last_changed or datetime.now()
return {'state': state,
'attributes': attributes,
'last_changed': datetime_to_str(last_changed)}
def track_state_change(eventbus, category, from_state, to_state, action):
""" Helper method to track specific state changes. """
from_state = ensure_list(from_state)
to_state = ensure_list(to_state)
def listener(event):
""" State change listener that listens for specific state changes. """
if category == event.data['category'] and \
matcher(event.data['old_state']['state'], from_state) and \
matcher(event.data['new_state']['state'], to_state):
action(event.data['category'],
event.data['old_state'],
event.data['new_state'])
eventbus.listen(EVENT_STATE_CHANGED, listener)
# pylint: disable=too-many-arguments
def track_time_change(eventbus, action,
year='*', month='*', day='*',
hour='*', minute='*', second='*',
point_in_time=None, listen_once=False):
""" Adds a listener that will listen for a specified or matching time. """
year, month, day = ensure_list(year), ensure_list(month), ensure_list(day)
hour, minute = ensure_list(hour), ensure_list(minute)
second = ensure_list(second)
def listener(event):
""" Listens for matching time_changed events. """
now = str_to_datetime(event.data['now'])
if (point_in_time and now > point_in_time) or \
(not point_in_time and
matcher(now.year, year) and
matcher(now.month, month) and
matcher(now.day, day) and
matcher(now.hour, hour) and
matcher(now.minute, minute) and
matcher(now.second, second)):
# point_in_time are exact points in time
# so we always remove it after fire
if listen_once or point_in_time:
event.eventbus.remove_listener(EVENT_TIME_CHANGED, listener)
action(now)
eventbus.listen(EVENT_TIME_CHANGED, listener)
Event = namedtuple("Event", ["eventbus", "event_type", "data"])
class EventBus(object):
""" Class that allows code to listen for- and fire events. """
def __init__(self):
self._listeners = defaultdict(list)
self.logger = logging.getLogger(__name__)
@property
def listeners(self):
""" List of events that is being listened for. """
return {key: len(self._listeners[key])
for key in self._listeners.keys()
if len(self._listeners[key]) > 0}
def fire(self, event_type, event_data=None):
""" Fire an event. """
if not event_data:
event_data = {}
self.logger.info("EventBus:Event {}: {}".format(
event_type, event_data))
def run():
""" Fire listeners for event. """
event = Event(self, event_type, event_data)
# We do not use itertools.chain() because some listeners might
# choose to remove themselves as a listener while being executed
for listener in self._listeners[ALL_EVENTS] + \
self._listeners[event.event_type]:
try:
listener(event)
except Exception: # pylint: disable=broad-except
self.logger.exception("EventBus:Exception in listener")
# We dont want the eventbus to be blocking - run in a thread.
threading.Thread(target=run).start()
def listen(self, event_type, listener):
""" Listen for all events or events of a specific type.
To listen to all events specify the constant ``ALL_EVENTS``
as event_type.
"""
self._listeners[event_type].append(listener)
def listen_once(self, event_type, listener):
""" Listen once for event of a specific type.
To listen to all events specify the constant ``ALL_EVENTS``
as event_type.
Note: at the moment it is impossible to remove a one time listener.
"""
def onetime_listener(event):
""" Removes listener from eventbus and then fires listener. """
self.remove_listener(event_type, onetime_listener)
listener(event)
self.listen(event_type, onetime_listener)
def remove_listener(self, event_type, listener):
""" Removes a listener of a specific event_type. """
try:
self._listeners[event_type].remove(listener)
if len(self._listeners[event_type]) == 0:
del self._listeners[event_type]
except ValueError:
pass
class StateMachine(object):
""" Helper class that tracks the state of different categories. """
def __init__(self, eventbus):
self.states = dict()
self.eventbus = eventbus
self.lock = threading.Lock()
@property
def categories(self):
""" List of categories which states are being tracked. """
return self.states.keys()
def set_state(self, category, new_state, attributes=None):
""" Set the state of a category, add category if it does not exist.
Attributes is an optional dict to specify attributes of this state. """
attributes = attributes or {}
self.lock.acquire()
# Add category if it does not exist
if category not in self.states:
self.states[category] = create_state(new_state, attributes)
# Change state and fire listeners
else:
old_state = self.states[category]
if old_state['state'] != new_state or \
old_state['attributes'] != attributes:
self.states[category] = create_state(new_state, attributes)
self.eventbus.fire(EVENT_STATE_CHANGED,
{'category': category,
'old_state': old_state,
'new_state': self.states[category]})
self.lock.release()
def get_state(self, category):
""" Returns a dict (state,last_changed, attributes) describing
the state of the specified category. """
if category not in self.states:
return None
else:
# Make a copy so people won't accidently mutate the state
return dict(self.states[category])
def is_state(self, category, state):
""" Returns True if category exists and is specified state. """
cur_state = self.get_state(category)
return cur_state and cur_state['state'] == state
class Timer(threading.Thread):
""" Timer will sent out an event every TIMER_INTERVAL seconds. """
def __init__(self, eventbus):
threading.Thread.__init__(self)
self.daemon = True
self.eventbus = eventbus
eventbus.listen_once(EVENT_HOMEASSISTANT_START,
lambda event: self.start())
def run(self):
""" Start the timer. """
logging.getLogger(__name__).info("Timer:starting")
last_fired_on_second = -1
while True:
# Sleep till it is the next time that we have to fire an event.
# Aim for halfway through the second that matches TIMER_INTERVAL.
# So if TIMER_INTERVAL is 10 fire at .5, 10.5, 20.5, etc seconds.
# This will yield the best results because time.sleep() is not
# 100% accurate because of non-realtime OS's
now = datetime.now()
if now.second % TIMER_INTERVAL > 0 or \
now.second == last_fired_on_second:
slp_seconds = TIMER_INTERVAL - now.second % TIMER_INTERVAL + \
.5 - now.microsecond/1000000.0
time.sleep(slp_seconds)
now = datetime.now()
last_fired_on_second = now.second
self.eventbus.fire(EVENT_TIME_CHANGED,
{'now': datetime_to_str(now)})
class HomeAssistantException(Exception):
""" General Home Assistant exception occured. """
| {"/homeassistant/actors.py": ["/homeassistant/__init__.py", "/homeassistant/util.py", "/homeassistant/observers.py", "/homeassistant/packages/__init__.py"], "/homeassistant/observers.py": ["/homeassistant/__init__.py"], "/homeassistant/remote.py": ["/homeassistant/__init__.py", "/homeassistant/httpinterface.py"], "/homeassistant/bootstrap.py": ["/homeassistant/__init__.py", "/homeassistant/observers.py", "/homeassistant/actors.py", "/homeassistant/httpinterface.py"], "/homeassistant/httpinterface.py": ["/homeassistant/__init__.py", "/homeassistant/util.py"], "/homeassistant/test.py": ["/homeassistant/__init__.py", "/homeassistant/remote.py", "/homeassistant/httpinterface.py"], "/start.py": ["/homeassistant/bootstrap.py"]} |
71,759 | jwveldhuis/home-assistant | refs/heads/master | /homeassistant/remote.py | """
homeassistant.remote
~~~~~~~~~~~~~~~~~~~~
A module containing drop in replacements for core parts that will interface
with a remote instance of home assistant.
If a connection error occurs while communicating with the API a
HomeAssistantException will be raised.
"""
import threading
import logging
import json
import urlparse
import requests
import homeassistant as ha
import homeassistant.httpinterface as hah
METHOD_GET = "get"
METHOD_POST = "post"
def _setup_call_api(host, port, api_password):
""" Helper method to setup a call api method. """
port = port or hah.SERVER_PORT
base_url = "http://{}:{}".format(host, port)
def _call_api(method, path, data=None):
""" Makes a call to the Home Assistant api. """
data = data or {}
data['api_password'] = api_password
url = urlparse.urljoin(base_url, path)
if method == METHOD_GET:
return requests.get(url, params=data)
else:
return requests.request(method, url, data=data)
return _call_api
class EventBus(ha.EventBus):
""" Drop-in replacement for a normal eventbus that will forward events to
a remote eventbus.
"""
def __init__(self, host, api_password, port=None):
ha.EventBus.__init__(self)
self._call_api = _setup_call_api(host, port, api_password)
self.logger = logging.getLogger(__name__)
@property
def listeners(self):
""" List of events that is being listened for. """
try:
req = self._call_api(METHOD_GET, hah.URL_API_EVENTS)
if req.status_code == 200:
data = req.json()
return data['listeners']
else:
raise ha.HomeAssistantException(
"Got unexpected result (3): {}.".format(req.text))
except requests.exceptions.ConnectionError:
self.logger.exception("EventBus:Error connecting to server")
raise ha.HomeAssistantException("Error connecting to server")
except ValueError: # If req.json() can't parse the json
self.logger.exception("EventBus:Got unexpected result")
raise ha.HomeAssistantException(
"Got unexpected result: {}".format(req.text))
except KeyError: # If not all expected keys are in the returned JSON
self.logger.exception("EventBus:Got unexpected result (2)")
raise ha.HomeAssistantException(
"Got unexpected result (2): {}".format(req.text))
def fire(self, event_type, event_data=None):
""" Fire an event. """
data = {'event_data': json.dumps(event_data)} if event_data else None
try:
req = self._call_api(METHOD_POST,
hah.URL_API_EVENTS_EVENT.format(event_type),
data)
if req.status_code != 200:
error = "Error firing event: {} - {}".format(
req.status_code, req.text)
self.logger.error("EventBus:{}".format(error))
raise ha.HomeAssistantException(error)
except requests.exceptions.ConnectionError:
self.logger.exception("EventBus:Error connecting to server")
def listen(self, event_type, listener):
""" Not implemented for remote eventbus.
Will throw NotImplementedError. """
raise NotImplementedError
def remove_listener(self, event_type, listener):
""" Not implemented for remote eventbus.
Will throw NotImplementedError. """
raise NotImplementedError
class StateMachine(ha.StateMachine):
""" Drop-in replacement for a normal statemachine that communicates with a
remote statemachine.
"""
def __init__(self, host, api_password, port=None):
ha.StateMachine.__init__(self, None)
self._call_api = _setup_call_api(host, port, api_password)
self.lock = threading.Lock()
self.logger = logging.getLogger(__name__)
@property
def categories(self):
""" List of categories which states are being tracked. """
try:
req = self._call_api(METHOD_GET, hah.URL_API_STATES)
return req.json()['categories']
except requests.exceptions.ConnectionError:
self.logger.exception("StateMachine:Error connecting to server")
return []
except ValueError: # If req.json() can't parse the json
self.logger.exception("StateMachine:Got unexpected result")
return []
except KeyError: # If 'categories' key not in parsed json
self.logger.exception("StateMachine:Got unexpected result (2)")
return []
def set_state(self, category, new_state, attributes=None):
""" Set the state of a category, add category if it does not exist.
Attributes is an optional dict to specify attributes of this state. """
attributes = attributes or {}
self.lock.acquire()
data = {'new_state': new_state,
'attributes': json.dumps(attributes)}
try:
req = self._call_api(METHOD_POST,
hah.URL_API_STATES_CATEGORY.format(category),
data)
if req.status_code != 201:
error = "Error changing state: {} - {}".format(
req.status_code, req.text)
self.logger.error("StateMachine:{}".format(error))
raise ha.HomeAssistantException(error)
except requests.exceptions.ConnectionError:
self.logger.exception("StateMachine:Error connecting to server")
raise ha.HomeAssistantException("Error connecting to server")
finally:
self.lock.release()
def get_state(self, category):
""" Returns a dict (state,last_changed, attributes) describing
the state of the specified category. """
try:
req = self._call_api(METHOD_GET,
hah.URL_API_STATES_CATEGORY.format(category))
if req.status_code == 200:
data = req.json()
return ha.create_state(data['state'], data['attributes'],
ha.str_to_datetime(
data['last_changed']))
elif req.status_code == 422:
# Category does not exist
return None
else:
raise ha.HomeAssistantException(
"Got unexpected result (3): {}.".format(req.text))
except requests.exceptions.ConnectionError:
self.logger.exception("StateMachine:Error connecting to server")
raise ha.HomeAssistantException("Error connecting to server")
except ValueError: # If req.json() can't parse the json
self.logger.exception("StateMachine:Got unexpected result")
raise ha.HomeAssistantException(
"Got unexpected result: {}".format(req.text))
except KeyError: # If not all expected keys are in the returned JSON
self.logger.exception("StateMachine:Got unexpected result (2)")
raise ha.HomeAssistantException(
"Got unexpected result (2): {}".format(req.text))
| {"/homeassistant/actors.py": ["/homeassistant/__init__.py", "/homeassistant/util.py", "/homeassistant/observers.py", "/homeassistant/packages/__init__.py"], "/homeassistant/observers.py": ["/homeassistant/__init__.py"], "/homeassistant/remote.py": ["/homeassistant/__init__.py", "/homeassistant/httpinterface.py"], "/homeassistant/bootstrap.py": ["/homeassistant/__init__.py", "/homeassistant/observers.py", "/homeassistant/actors.py", "/homeassistant/httpinterface.py"], "/homeassistant/httpinterface.py": ["/homeassistant/__init__.py", "/homeassistant/util.py"], "/homeassistant/test.py": ["/homeassistant/__init__.py", "/homeassistant/remote.py", "/homeassistant/httpinterface.py"], "/start.py": ["/homeassistant/bootstrap.py"]} |
71,760 | jwveldhuis/home-assistant | refs/heads/master | /homeassistant/packages/__init__.py | """
Not all external Git repositories that we depend on are
available as a package for pip. That is why we include
them here.
PyChromecast
------------
https://github.com/balloob/pychromecast
"""
| {"/homeassistant/actors.py": ["/homeassistant/__init__.py", "/homeassistant/util.py", "/homeassistant/observers.py", "/homeassistant/packages/__init__.py"], "/homeassistant/observers.py": ["/homeassistant/__init__.py"], "/homeassistant/remote.py": ["/homeassistant/__init__.py", "/homeassistant/httpinterface.py"], "/homeassistant/bootstrap.py": ["/homeassistant/__init__.py", "/homeassistant/observers.py", "/homeassistant/actors.py", "/homeassistant/httpinterface.py"], "/homeassistant/httpinterface.py": ["/homeassistant/__init__.py", "/homeassistant/util.py"], "/homeassistant/test.py": ["/homeassistant/__init__.py", "/homeassistant/remote.py", "/homeassistant/httpinterface.py"], "/start.py": ["/homeassistant/bootstrap.py"]} |
71,761 | jwveldhuis/home-assistant | refs/heads/master | /homeassistant/util.py | """ Helper methods for various modules. """
import re
def sanitize_filename(filename):
""" Sanitizes a filename by removing .. / and \\. """
return re.sub(r"(~|(\.\.)|/|\+)", "", filename)
| {"/homeassistant/actors.py": ["/homeassistant/__init__.py", "/homeassistant/util.py", "/homeassistant/observers.py", "/homeassistant/packages/__init__.py"], "/homeassistant/observers.py": ["/homeassistant/__init__.py"], "/homeassistant/remote.py": ["/homeassistant/__init__.py", "/homeassistant/httpinterface.py"], "/homeassistant/bootstrap.py": ["/homeassistant/__init__.py", "/homeassistant/observers.py", "/homeassistant/actors.py", "/homeassistant/httpinterface.py"], "/homeassistant/httpinterface.py": ["/homeassistant/__init__.py", "/homeassistant/util.py"], "/homeassistant/test.py": ["/homeassistant/__init__.py", "/homeassistant/remote.py", "/homeassistant/httpinterface.py"], "/start.py": ["/homeassistant/bootstrap.py"]} |
71,762 | jwveldhuis/home-assistant | refs/heads/master | /homeassistant/bootstrap.py | """
Provides methods to bootstrap a home assistant instance.
"""
import ConfigParser
import logging
import homeassistant as ha
import homeassistant.observers as observers
import homeassistant.actors as actors
import homeassistant.httpinterface as httpinterface
# pylint: disable=too-many-branches
def from_config_file(config_path):
""" Starts home assistant with all possible functionality
based on a config file. """
statusses = []
# Read config
config = ConfigParser.SafeConfigParser()
config.read(config_path)
# Init core
eventbus = ha.EventBus()
statemachine = ha.StateMachine(eventbus)
# Init observers
# Device scanner
if config.has_option('tomato', 'host') and \
config.has_option('tomato', 'username') and \
config.has_option('tomato', 'password') and \
config.has_option('tomato', 'http_id'):
device_scanner = observers.TomatoDeviceScanner(
config.get('tomato', 'host'),
config.get('tomato', 'username'),
config.get('tomato', 'password'),
config.get('tomato', 'http_id'))
if device_scanner.success_init:
statusses.append(("Device Scanner - Tomato", True))
else:
statusses.append(("Device Scanner - Tomato", False))
device_scanner = None
else:
device_scanner = None
# Device Tracker
if device_scanner:
device_tracker = observers.DeviceTracker(
eventbus, statemachine, device_scanner)
statusses.append(("Device Tracker", True))
else:
device_tracker = None
# Sun tracker
if config.has_option("common", "latitude") and \
config.has_option("common", "longitude"):
statusses.append(("Weather - Ephem",
observers.track_sun(
eventbus, statemachine,
config.get("common", "latitude"),
config.get("common", "longitude"))))
# --------------------------
# Init actors
# Light control
if config.has_section("hue"):
if config.has_option("hue", "host"):
light_control = actors.HueLightControl(config.get("hue", "host"))
else:
light_control = actors.HueLightControl()
statusses.append(("Light Control - Hue", light_control.success_init))
else:
light_control = None
# Light trigger
if light_control:
actors.LightTrigger(eventbus, statemachine,
device_tracker, light_control)
statusses.append(("Light Trigger", True))
if config.has_option("chromecast", "host"):
statusses.append(("Chromecast",
actors.setup_chromecast(
eventbus, config.get("chromecast", "host"))))
if config.has_option("downloader", "download_dir"):
result = actors.setup_file_downloader(
eventbus, config.get("downloader", "download_dir"))
statusses.append(("Downloader", result))
statusses.append(("Webbrowser", actors.setup_webbrowser(eventbus)))
statusses.append(("Media Buttons", actors.setup_media_buttons(eventbus)))
# Init HTTP interface
if config.has_option("httpinterface", "api_password"):
httpinterface.HTTPInterface(
eventbus, statemachine,
config.get("httpinterface", "api_password"))
statusses.append(("HTTPInterface", True))
logger = logging.getLogger(__name__)
for component, success_init in statusses:
status = "initialized" if success_init else "Failed to initialize"
logger.info("{}: {}".format(component, status))
ha.start_home_assistant(eventbus)
| {"/homeassistant/actors.py": ["/homeassistant/__init__.py", "/homeassistant/util.py", "/homeassistant/observers.py", "/homeassistant/packages/__init__.py"], "/homeassistant/observers.py": ["/homeassistant/__init__.py"], "/homeassistant/remote.py": ["/homeassistant/__init__.py", "/homeassistant/httpinterface.py"], "/homeassistant/bootstrap.py": ["/homeassistant/__init__.py", "/homeassistant/observers.py", "/homeassistant/actors.py", "/homeassistant/httpinterface.py"], "/homeassistant/httpinterface.py": ["/homeassistant/__init__.py", "/homeassistant/util.py"], "/homeassistant/test.py": ["/homeassistant/__init__.py", "/homeassistant/remote.py", "/homeassistant/httpinterface.py"], "/start.py": ["/homeassistant/bootstrap.py"]} |
71,763 | jwveldhuis/home-assistant | refs/heads/master | /homeassistant/httpinterface.py | """
homeassistant.httpinterface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module provides an API and a HTTP interface for debug purposes.
By default it will run on port 8123.
All API calls have to be accompanied by an 'api_password' parameter and will
return JSON. If successful calls will return status code 200 or 201.
Other status codes that can occur are:
- 400 (Bad Request)
- 401 (Unauthorized)
- 404 (Not Found)
- 405 (Method not allowed)
The api supports the following actions:
/api/states - GET
Returns a list of categories for which a state is available
Example result:
{
"categories": [
"Paulus_Nexus_4",
"weather.sun",
"all_devices"
]
}
/api/states/<category> - GET
Returns the current state from a category
Example result:
{
"attributes": {
"next_rising": "07:04:15 29-10-2013",
"next_setting": "18:00:31 29-10-2013"
},
"category": "weather.sun",
"last_changed": "23:24:33 28-10-2013",
"state": "below_horizon"
}
/api/states/<category> - POST
Updates the current state of a category. Returns status code 201 if successful
with location header of updated resource and as body the new state.
parameter: new_state - string
optional parameter: attributes - JSON encoded object
Example result:
{
"attributes": {
"next_rising": "07:04:15 29-10-2013",
"next_setting": "18:00:31 29-10-2013"
},
"category": "weather.sun",
"last_changed": "23:24:33 28-10-2013",
"state": "below_horizon"
}
/api/events/<event_type> - POST
Fires an event with event_type
optional parameter: event_data - JSON encoded object
Example result:
{
"message": "Event download_file fired."
}
"""
import json
import threading
import logging
import re
import os
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from urlparse import urlparse, parse_qs
import homeassistant as ha
import homeassistant.util as util
SERVER_PORT = 8123
HTTP_OK = 200
HTTP_CREATED = 201
HTTP_MOVED_PERMANENTLY = 301
HTTP_BAD_REQUEST = 400
HTTP_UNAUTHORIZED = 401
HTTP_NOT_FOUND = 404
HTTP_METHOD_NOT_ALLOWED = 405
HTTP_UNPROCESSABLE_ENTITY = 422
URL_ROOT = "/"
URL_CHANGE_STATE = "/change_state"
URL_FIRE_EVENT = "/fire_event"
URL_API_STATES = "/api/states"
URL_API_STATES_CATEGORY = "/api/states/{}"
URL_API_EVENTS = "/api/events"
URL_API_EVENTS_EVENT = "/api/events/{}"
URL_STATIC = "/static/{}"
class HTTPInterface(threading.Thread):
""" Provides an HTTP interface for Home Assistant. """
# pylint: disable=too-many-arguments
def __init__(self, eventbus, statemachine, api_password,
server_port=None, server_host=None):
threading.Thread.__init__(self)
self.daemon = True
if not server_port:
server_port = SERVER_PORT
# If no server host is given, accept all incoming requests
if not server_host:
server_host = '0.0.0.0'
self.server = HTTPServer((server_host, server_port), RequestHandler)
self.server.flash_message = None
self.server.logger = logging.getLogger(__name__)
self.server.eventbus = eventbus
self.server.statemachine = statemachine
self.server.api_password = api_password
eventbus.listen_once(ha.EVENT_HOMEASSISTANT_START,
lambda event: self.start())
def run(self):
""" Start the HTTP interface. """
self.server.logger.info("Starting")
self.server.serve_forever()
class RequestHandler(BaseHTTPRequestHandler):
""" Handles incoming HTTP requests """
PATHS = [ # debug interface
('GET', '/', '_handle_get_root'),
('POST', re.compile(r'/change_state'), '_handle_change_state'),
('POST', re.compile(r'/fire_event'), '_handle_fire_event'),
# /states
('GET', '/api/states', '_handle_get_api_states'),
('GET',
re.compile(r'/api/states/(?P<category>[a-zA-Z\._0-9]+)'),
'_handle_get_api_states_category'),
('POST',
re.compile(r'/api/states/(?P<category>[a-zA-Z\._0-9]+)'),
'_handle_change_state'),
# /events
('GET', '/api/events', '_handle_get_api_events'),
('POST',
re.compile(r'/api/events/(?P<event_type>[a-zA-Z\._0-9]+)'),
'_handle_fire_event'),
# Statis files
('GET', re.compile(r'/static/(?P<file>[a-zA-Z\._\-0-9/]+)'),
'_handle_get_static')
]
use_json = False
def _handle_request(self, method): # pylint: disable=too-many-branches
""" Does some common checks and calls appropriate method. """
url = urlparse(self.path)
# Read query input
data = parse_qs(url.query)
# Did we get post input ?
content_length = int(self.headers.get('Content-Length', 0))
if content_length:
data.update(parse_qs(self.rfile.read(content_length)))
try:
api_password = data['api_password'][0]
except KeyError:
api_password = ''
if url.path.startswith('/api/'):
self.use_json = True
# Var to keep track if we found a path that matched a handler but
# the method was different
path_matched_but_not_method = False
# Var to hold the handler for this path and method if found
handle_request_method = False
# Check every handler to find matching result
for t_method, t_path, t_handler in RequestHandler.PATHS:
# we either do string-comparison or regular expression matching
if isinstance(t_path, str):
path_match = url.path == t_path
else:
# pylint: disable=maybe-no-member
path_match = t_path.match(url.path)
if path_match and method == t_method:
# Call the method
handle_request_method = getattr(self, t_handler)
break
elif path_match:
path_matched_but_not_method = True
# Did we find a handler for the incoming request?
if handle_request_method:
# Do not enforce api password for static files
if handle_request_method == self._handle_get_static or \
self._verify_api_password(api_password):
handle_request_method(path_match, data)
elif path_matched_but_not_method:
self.send_response(HTTP_METHOD_NOT_ALLOWED)
else:
self.send_response(HTTP_NOT_FOUND)
def do_GET(self): # pylint: disable=invalid-name
""" GET request handler. """
self._handle_request('GET')
def do_POST(self): # pylint: disable=invalid-name
""" POST request handler. """
self._handle_request('POST')
def _verify_api_password(self, api_password):
""" Helper method to verify the API password
and take action if incorrect. """
if api_password == self.server.api_password:
return True
elif self.use_json:
self._message(
"API password missing or incorrect.", HTTP_UNAUTHORIZED)
else:
self.send_response(HTTP_OK)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write((
"<html>"
"<head><title>Home Assistant</title>"
"<link rel='stylesheet' type='text/css' "
"href='/static/style.css'>"
"<link rel='icon' href='/static/favicon.ico' "
"type='image/x-icon' />"
"</head>"
"<body>"
"<div class='container'>"
"<form class='form-signin' action='{}' method='GET'>"
"<input type='text' class='form-control' name='api_password' "
"placeholder='API Password for Home Assistant' "
"required autofocus>"
"<button class='btn btn-lg btn-primary btn-block' "
"type='submit'>Enter</button>"
"</form>"
"</div>"
"</body></html>").format(self.path))
return False
# pylint: disable=unused-argument
def _handle_get_root(self, path_match, data):
""" Renders the debug interface. """
write = lambda txt: self.wfile.write(txt+"\n")
self.send_response(HTTP_OK)
self.send_header('Content-type', 'text/html')
self.end_headers()
write(("<html>"
"<head><title>Home Assistant</title>"
"<link rel='stylesheet' type='text/css' "
"href='/static/style.css'>"
"<link rel='icon' href='/static/favicon.ico' "
"type='image/x-icon' />"
"</head>"
"<body>"
"<div class='container'>"
"<div class='page-header'><h1>Home Assistant</h1></div>"
))
# Flash message support
if self.server.flash_message:
write(("<div class='row'><div class='col-xs-12'>"
"<div class='alert alert-success'>"
"{}</div></div></div>").format(self.server.flash_message))
self.server.flash_message = None
# Describe state machine:
categories = []
write(("<div class='row'>"
"<div class='col-xs-12'>"
"<div class='panel panel-primary'>"
"<div class='panel-heading'><h2 class='panel-title'>"
"States</h2></div>"
"<form method='post' action='/change_state' "
"class='form-change-state'>"
"<input type='hidden' name='api_password' value='{}'>"
"<table class='table'><tr>"
"<th>Category</th><th>State</th>"
"<th>Attributes</th><th>Last Changed</th>"
"</tr>").format(self.server.api_password))
for category in \
sorted(self.server.statemachine.categories,
key=lambda key: key.lower()):
categories.append(category)
state = self.server.statemachine.get_state(category)
attributes = "<br>".join(
["{}: {}".format(attr, state['attributes'][attr])
for attr in state['attributes']])
write(("<tr>"
"<td>{}</td><td>{}</td><td>{}</td><td>{}</td>"
"</tr>").format(
category,
state['state'],
attributes,
state['last_changed']))
# Change state form
write(("<tr><td><input name='category' class='form-control' "
"placeholder='Category'></td>"
"<td><input name='new_state' class='form-control' "
"placeholder='New State'></td>"
"<td><textarea rows='3' name='attributes' class='form-control' "
"placeholder='State Attributes (JSON, optional)'>"
"</textarea></td>"
"<td><button type='submit' class='btn btn-default'>"
"Set State</button></td></tr>"
"</table></form></div>"
"</div></div>"))
# Describe event bus:
write(("<div class='row'>"
"<div class='col-xs-6'>"
"<div class='panel panel-primary'>"
"<div class='panel-heading'><h2 class='panel-title'>"
"Events</h2></div>"
"<table class='table'>"
"<tr><th>Event Type</th><th>Listeners</th></tr>"))
for event_type, count in sorted(
self.server.eventbus.listeners.items()):
write("<tr><td>{}</td><td>{}</td></tr>".format(event_type, count))
write(("</table></div></div>"
"<div class='col-xs-6'>"
"<div class='panel panel-primary'>"
"<div class='panel-heading'><h2 class='panel-title'>"
"Fire Event</h2></div>"
"<div class='panel-body'>"
"<form method='post' action='/fire_event' "
"class='form-horizontal form-fire-event'>"
"<input type='hidden' name='api_password' value='{}'>"
"<div class='form-group'>"
"<label for='event_type' class='col-xs-3 control-label'>"
"Event type</label>"
"<div class='col-xs-9'>"
"<input type='text' class='form-control' id='event_type'"
" name='event_type' placeholder='Event Type'>"
"</div>"
"</div>"
"<div class='form-group'>"
"<label for='event_data' class='col-xs-3 control-label'>"
"Event data</label>"
"<div class='col-xs-9'>"
"<textarea rows='3' class='form-control' id='event_data'"
" name='event_data' placeholder='Event Data "
"(JSON, optional)'></textarea>"
"</div>"
"</div>"
"<div class='form-group'>"
"<div class='col-xs-offset-3 col-xs-9'>"
"<button type='submit' class='btn btn-default'>"
"Fire Event</button>"
"</div>"
"</div>"
"</form>"
"</div></div></div>"
"</div>").format(self.server.api_password))
write("</div></body></html>")
# pylint: disable=invalid-name
def _handle_change_state(self, path_match, data):
""" Handles updating the state of a category.
This handles the following paths:
/change_state
/api/states/<category>
"""
try:
try:
category = path_match.group('category')
except IndexError:
# If group 'category' does not exist in path_match
category = data['category'][0]
new_state = data['new_state'][0]
try:
attributes = json.loads(data['attributes'][0])
except KeyError:
# Happens if key 'attributes' does not exist
attributes = None
# Write state
self.server.statemachine.set_state(category,
new_state,
attributes)
# Return state if json, else redirect to main page
if self.use_json:
state = self.server.statemachine.get_state(category)
state['category'] = category
self._write_json(state, status_code=HTTP_CREATED,
location=
URL_API_STATES_CATEGORY.format(category))
else:
self._message(
"State of {} changed to {}".format(category, new_state))
except KeyError:
# If new_state don't exist in post data
self._message(
"No new_state submitted.", HTTP_BAD_REQUEST)
except ValueError:
# Occurs during error parsing json
self._message(
"Invalid JSON for attributes", HTTP_UNPROCESSABLE_ENTITY)
# pylint: disable=invalid-name
def _handle_fire_event(self, path_match, data):
""" Handles firing of an event.
This handles the following paths:
/fire_event
/api/events/<event_type>
"""
try:
try:
event_type = path_match.group('event_type')
except IndexError:
# If group event_type does not exist in path_match
event_type = data['event_type'][0]
try:
event_data = json.loads(data['event_data'][0])
except KeyError:
# Happens if key 'event_data' does not exist
event_data = None
self.server.eventbus.fire(event_type, event_data)
self._message("Event {} fired.".format(event_type))
except KeyError:
# Occurs if event_type does not exist in data
self._message("No event_type received.", HTTP_BAD_REQUEST)
except ValueError:
# Occurs during error parsing json
self._message(
"Invalid JSON for event_data", HTTP_UNPROCESSABLE_ENTITY)
# pylint: disable=unused-argument
def _handle_get_api_states(self, path_match, data):
""" Returns the categories which state is being tracked. """
self._write_json({'categories': self.server.statemachine.categories})
# pylint: disable=unused-argument
def _handle_get_api_states_category(self, path_match, data):
""" Returns the state of a specific category. """
category = path_match.group('category')
state = self.server.statemachine.get_state(category)
if state:
state['category'] = category
self._write_json(state)
else:
# If category does not exist
self._message("State does not exist.", HTTP_UNPROCESSABLE_ENTITY)
def _handle_get_api_events(self, path_match, data):
""" Handles getting overview of event listeners. """
self._write_json({'listeners': self.server.eventbus.listeners})
def _handle_get_static(self, path_match, data):
""" Returns a static file. """
req_file = util.sanitize_filename(path_match.group('file'))
path = os.path.join(os.path.dirname(__file__), 'www_static', req_file)
if os.path.isfile(path):
self.send_response(HTTP_OK)
# TODO: correct header for mime-type and caching
self.end_headers()
with open(path, 'rb') as inp:
data = inp.read(1024)
while data:
self.wfile.write(data)
data = inp.read(1024)
else:
self.send_response(HTTP_NOT_FOUND)
self.end_headers()
def _message(self, message, status_code=HTTP_OK):
""" Helper method to return a message to the caller. """
if self.use_json:
self._write_json({'message': message}, status_code=status_code)
elif status_code == HTTP_OK:
self.server.flash_message = message
self._redirect('/')
else:
self.send_error(status_code, message)
def _redirect(self, location):
""" Helper method to redirect caller. """
self.send_response(HTTP_MOVED_PERMANENTLY)
self.send_header(
"Location", "{}?api_password={}".format(
location, self.server.api_password))
self.end_headers()
def _write_json(self, data=None, status_code=HTTP_OK, location=None):
""" Helper method to return JSON to the caller. """
self.send_response(status_code)
self.send_header('Content-type', 'application/json')
if location:
self.send_header('Location', location)
self.end_headers()
if data:
self.wfile.write(json.dumps(data, indent=4, sort_keys=True))
| {"/homeassistant/actors.py": ["/homeassistant/__init__.py", "/homeassistant/util.py", "/homeassistant/observers.py", "/homeassistant/packages/__init__.py"], "/homeassistant/observers.py": ["/homeassistant/__init__.py"], "/homeassistant/remote.py": ["/homeassistant/__init__.py", "/homeassistant/httpinterface.py"], "/homeassistant/bootstrap.py": ["/homeassistant/__init__.py", "/homeassistant/observers.py", "/homeassistant/actors.py", "/homeassistant/httpinterface.py"], "/homeassistant/httpinterface.py": ["/homeassistant/__init__.py", "/homeassistant/util.py"], "/homeassistant/test.py": ["/homeassistant/__init__.py", "/homeassistant/remote.py", "/homeassistant/httpinterface.py"], "/start.py": ["/homeassistant/bootstrap.py"]} |
71,764 | jwveldhuis/home-assistant | refs/heads/master | /homeassistant/test.py | """
homeassistant.test
~~~~~~~~~~~~~~~~~~
Provides tests to verify that Home Assistant modules do what they should do.
"""
import unittest
import time
import requests
import homeassistant as ha
import homeassistant.remote as remote
import homeassistant.httpinterface as hah
API_PASSWORD = "test1234"
HTTP_BASE_URL = "http://127.0.0.1:{}".format(hah.SERVER_PORT)
def _url(path=""):
""" Helper method to generate urls. """
return HTTP_BASE_URL + path
class HAHelper(object): # pylint: disable=too-few-public-methods
""" Helper class to keep track of current running HA instance. """
core = None
def ensure_homeassistant_started():
""" Ensures home assistant is started. """
if not HAHelper.core:
core = {'eventbus': ha.EventBus()}
core['statemachine'] = ha.StateMachine(core['eventbus'])
core['eventbus'].listen('test_event', len)
core['statemachine'].set_state('test', 'a_state')
hah.HTTPInterface(core['eventbus'], core['statemachine'],
API_PASSWORD)
core['eventbus'].fire(ha.EVENT_HOMEASSISTANT_START)
# Give objects time to startup
time.sleep(1)
HAHelper.core = core
return HAHelper.core['eventbus'], HAHelper.core['statemachine']
# pylint: disable=too-many-public-methods
class TestHTTPInterface(unittest.TestCase):
""" Test the HTTP debug interface and API. """
@classmethod
def setUpClass(cls): # pylint: disable=invalid-name
""" things to be run when tests are started. """
cls.eventbus, cls.statemachine = ensure_homeassistant_started()
def test_debug_interface(self):
""" Test if we can login by comparing not logged in screen to
logged in screen. """
with_pw = requests.get(
_url("/?api_password={}".format(API_PASSWORD)))
without_pw = requests.get(_url())
self.assertNotEqual(without_pw.text, with_pw.text)
def test_api_password(self):
""" Test if we get access denied if we omit or provide
a wrong api password. """
req = requests.get(
_url(hah.URL_API_STATES_CATEGORY.format("test")))
self.assertEqual(req.status_code, 401)
req = requests.get(
_url(hah.URL_API_STATES_CATEGORY.format("test")),
params={"api_password": "not the password"})
self.assertEqual(req.status_code, 401)
def test_debug_change_state(self):
""" Test if we can change a state from the debug interface. """
self.statemachine.set_state("test.test", "not_to_be_set_state")
requests.post(_url(hah.URL_CHANGE_STATE),
data={"category": "test.test",
"new_state": "debug_state_change2",
"api_password": API_PASSWORD})
self.assertEqual(self.statemachine.get_state("test.test")['state'],
"debug_state_change2")
def test_debug_fire_event(self):
""" Test if we can fire an event from the debug interface. """
test_value = []
def listener(event): # pylint: disable=unused-argument
""" Helper method that will verify that our event got called and
that test if our data came through. """
if "test" in event.data:
test_value.append(1)
self.eventbus.listen_once("test_event_with_data", listener)
requests.post(
_url(hah.URL_FIRE_EVENT),
data={"event_type": "test_event_with_data",
"event_data": '{"test": 1}',
"api_password": API_PASSWORD})
# Allow the event to take place
time.sleep(1)
self.assertEqual(len(test_value), 1)
def test_api_list_state_categories(self):
""" Test if the debug interface allows us to list state categories. """
req = requests.get(_url(hah.URL_API_STATES),
data={"api_password": API_PASSWORD})
data = req.json()
self.assertEqual(self.statemachine.categories,
data['categories'])
def test_api_get_state(self):
""" Test if the debug interface allows us to get a state. """
req = requests.get(
_url(hah.URL_API_STATES_CATEGORY.format("test")),
data={"api_password": API_PASSWORD})
data = req.json()
state = self.statemachine.get_state("test")
self.assertEqual(data['category'], "test")
self.assertEqual(data['state'], state['state'])
self.assertEqual(data['last_changed'], state['last_changed'])
self.assertEqual(data['attributes'], state['attributes'])
def test_api_get_non_existing_state(self):
""" Test if the debug interface allows us to get a state. """
req = requests.get(
_url(hah.URL_API_STATES_CATEGORY.format("does_not_exist")),
params={"api_password": API_PASSWORD})
self.assertEqual(req.status_code, 422)
def test_api_state_change(self):
""" Test if we can change the state of a category that exists. """
self.statemachine.set_state("test.test", "not_to_be_set_state")
requests.post(_url(hah.URL_API_STATES_CATEGORY.format("test.test")),
data={"new_state": "debug_state_change2",
"api_password": API_PASSWORD})
self.assertEqual(self.statemachine.get_state("test.test")['state'],
"debug_state_change2")
# pylint: disable=invalid-name
def test_api_state_change_of_non_existing_category(self):
""" Test if the API allows us to change a state of
a non existing category. """
new_state = "debug_state_change"
req = requests.post(
_url(hah.URL_API_STATES_CATEGORY.format(
"test_category_that_does_not_exist")),
data={"new_state": new_state,
"api_password": API_PASSWORD})
cur_state = (self.statemachine.
get_state("test_category_that_does_not_exist")['state'])
self.assertEqual(req.status_code, 201)
self.assertEqual(cur_state, new_state)
# pylint: disable=invalid-name
def test_api_fire_event_with_no_data(self):
""" Test if the API allows us to fire an event. """
test_value = []
def listener(event): # pylint: disable=unused-argument
""" Helper method that will verify our event got called. """
test_value.append(1)
self.eventbus.listen_once("test.event_no_data", listener)
requests.post(
_url(hah.URL_API_EVENTS_EVENT.format("test.event_no_data")),
data={"api_password": API_PASSWORD})
# Allow the event to take place
time.sleep(1)
self.assertEqual(len(test_value), 1)
# pylint: disable=invalid-name
def test_api_fire_event_with_data(self):
""" Test if the API allows us to fire an event. """
test_value = []
def listener(event): # pylint: disable=unused-argument
""" Helper method that will verify that our event got called and
that test if our data came through. """
if "test" in event.data:
test_value.append(1)
self.eventbus.listen_once("test_event_with_data", listener)
requests.post(
_url(hah.URL_API_EVENTS_EVENT.format("test_event_with_data")),
data={"event_data": '{"test": 1}',
"api_password": API_PASSWORD})
# Allow the event to take place
time.sleep(1)
self.assertEqual(len(test_value), 1)
# pylint: disable=invalid-name
def test_api_fire_event_with_invalid_json(self):
""" Test if the API allows us to fire an event. """
test_value = []
def listener(event): # pylint: disable=unused-argument
""" Helper method that will verify our event got called. """
test_value.append(1)
self.eventbus.listen_once("test_event_with_bad_data", listener)
req = requests.post(
_url(hah.URL_API_EVENTS_EVENT.format("test_event")),
data={"event_data": 'not json',
"api_password": API_PASSWORD})
# It shouldn't but if it fires, allow the event to take place
time.sleep(1)
self.assertEqual(req.status_code, 422)
self.assertEqual(len(test_value), 0)
def test_api_get_event_listeners(self):
""" Test if we can get the list of events being listened for. """
req = requests.get(_url(hah.URL_API_EVENTS),
params={"api_password": API_PASSWORD})
data = req.json()
self.assertEqual(data['listeners'], self.eventbus.listeners)
class TestRemote(unittest.TestCase):
""" Test the homeassistant.remote module. """
@classmethod
def setUpClass(cls): # pylint: disable=invalid-name
""" things to be run when tests are started. """
cls.eventbus, cls.statemachine = ensure_homeassistant_started()
cls.remote_sm = remote.StateMachine("127.0.0.1", API_PASSWORD)
cls.remote_eb = remote.EventBus("127.0.0.1", API_PASSWORD)
cls.sm_with_remote_eb = ha.StateMachine(cls.remote_eb)
cls.sm_with_remote_eb.set_state("test", "a_state")
# pylint: disable=invalid-name
def test_remote_sm_list_state_categories(self):
""" Test if the debug interface allows us to list state categories. """
self.assertEqual(self.statemachine.categories,
self.remote_sm.categories)
def test_remote_sm_get_state(self):
""" Test if the debug interface allows us to list state categories. """
remote_state = self.remote_sm.get_state("test")
state = self.statemachine.get_state("test")
self.assertEqual(remote_state['state'], state['state'])
self.assertEqual(remote_state['last_changed'], state['last_changed'])
self.assertEqual(remote_state['attributes'], state['attributes'])
def test_remote_sm_get_non_existing_state(self):
""" Test if the debug interface allows us to list state categories. """
self.assertEqual(self.remote_sm.get_state("test_does_not_exist"), None)
def test_remote_sm_state_change(self):
""" Test if we can change the state of a category that exists. """
self.remote_sm.set_state("test", "set_remotely", {"test": 1})
state = self.statemachine.get_state("test")
self.assertEqual(state['state'], "set_remotely")
self.assertEqual(state['attributes']['test'], 1)
def test_remote_eb_listening_for_same(self):
""" Test if remote EB correctly reports listener overview. """
self.assertEqual(self.eventbus.listeners, self.remote_eb.listeners)
# pylint: disable=invalid-name
def test_remote_eb_fire_event_with_no_data(self):
""" Test if the remote eventbus allows us to fire an event. """
test_value = []
def listener(event): # pylint: disable=unused-argument
""" Helper method that will verify our event got called. """
test_value.append(1)
self.eventbus.listen_once("test_event_no_data", listener)
self.remote_eb.fire("test_event_no_data")
# Allow the event to take place
time.sleep(1)
self.assertEqual(len(test_value), 1)
# pylint: disable=invalid-name
def test_remote_eb_fire_event_with_data(self):
""" Test if the remote eventbus allows us to fire an event. """
test_value = []
def listener(event): # pylint: disable=unused-argument
""" Helper method that will verify our event got called. """
if event.data["test"] == 1:
test_value.append(1)
self.eventbus.listen_once("test_event_with_data", listener)
self.remote_eb.fire("test_event_with_data", {"test": 1})
# Allow the event to take place
time.sleep(1)
self.assertEqual(len(test_value), 1)
def test_local_sm_with_remote_eb(self):
""" Test if we get the event if we change a state on a
StateMachine connected to a remote eventbus. """
test_value = []
def listener(event): # pylint: disable=unused-argument
""" Helper method that will verify our event got called. """
test_value.append(1)
self.eventbus.listen_once(ha.EVENT_STATE_CHANGED, listener)
self.sm_with_remote_eb.set_state("test", "local sm with remote eb")
# Allow the event to take place
time.sleep(1)
self.assertEqual(len(test_value), 1)
| {"/homeassistant/actors.py": ["/homeassistant/__init__.py", "/homeassistant/util.py", "/homeassistant/observers.py", "/homeassistant/packages/__init__.py"], "/homeassistant/observers.py": ["/homeassistant/__init__.py"], "/homeassistant/remote.py": ["/homeassistant/__init__.py", "/homeassistant/httpinterface.py"], "/homeassistant/bootstrap.py": ["/homeassistant/__init__.py", "/homeassistant/observers.py", "/homeassistant/actors.py", "/homeassistant/httpinterface.py"], "/homeassistant/httpinterface.py": ["/homeassistant/__init__.py", "/homeassistant/util.py"], "/homeassistant/test.py": ["/homeassistant/__init__.py", "/homeassistant/remote.py", "/homeassistant/httpinterface.py"], "/start.py": ["/homeassistant/bootstrap.py"]} |
71,765 | jwveldhuis/home-assistant | refs/heads/master | /start.py | #!/usr/bin/python2
""" Starts home assistant with all possible functionality. """
import homeassistant.bootstrap
homeassistant.bootstrap.from_config_file("home-assistant.conf")
| {"/homeassistant/actors.py": ["/homeassistant/__init__.py", "/homeassistant/util.py", "/homeassistant/observers.py", "/homeassistant/packages/__init__.py"], "/homeassistant/observers.py": ["/homeassistant/__init__.py"], "/homeassistant/remote.py": ["/homeassistant/__init__.py", "/homeassistant/httpinterface.py"], "/homeassistant/bootstrap.py": ["/homeassistant/__init__.py", "/homeassistant/observers.py", "/homeassistant/actors.py", "/homeassistant/httpinterface.py"], "/homeassistant/httpinterface.py": ["/homeassistant/__init__.py", "/homeassistant/util.py"], "/homeassistant/test.py": ["/homeassistant/__init__.py", "/homeassistant/remote.py", "/homeassistant/httpinterface.py"], "/start.py": ["/homeassistant/bootstrap.py"]} |
71,766 | SamirIngley/tweet-gen | refs/heads/master | /sentence.py | import sample
def gen_sentence(source, words):
# text = clean(source)
counter = 1
sentence = []
while counter <= words:
if counter == 1: # upper case the first character
word1 = (sample.stochastic_sample(source))
word = word1.capitalize()
sentence.append(word)
counter += 1
# print(word1)
# elif word_count > counter > 0 : # lowercase the rest?
else:
word2 = sample.stochastic_sample(source)
sentence.append(word2.lower())
counter += 1
# add a period to the last word
period = "."
# counter += 1
# print(sentence)
sentence = " ".join(sentence)
sentence += period
print(sentence)
return sentence
if __name__ == '__main__':
gen_sentence("source_text.txt", 8) | {"/sentence.py": ["/sample.py"], "/sample.py": ["/histogram.py"], "/app.py": ["/sentence.py", "/markov_chain.py"]} |
71,767 | SamirIngley/tweet-gen | refs/heads/master | /histogram.py | # import sys
from clean_text import clean
def invert_hist(source):
text = clean(source)
# print(text)
histogram = []
# text = separate(text)
word_cache = []
instance_cache = []
hist_cache = []
for word in text:
num_occur = 0
# print(type(word))
if word not in word_cache: # counts occurances of unique words
word_cache.append(word)
for word2 in text:
if word == word2:
num_occur += 1
instance = [num_occur, word]
instance_cache.append(instance)
else: pass
for item in instance_cache:
lst = []
if item[1] not in hist_cache:
for item2 in instance_cache:
if item[0] == item2[0]:
hist_cache.append(item2[1])
lst.append(item2[1])
tup = (item[0], lst)
histogram.append(tuple(tup))
return histogram
def dict_hist(source):
'''opens a text file
creates empty dictionary
appends null instance and adds 1
or adds 1 to the existing instance
'''
histogram = {}
text = clean(source)
# print(text)
for word in text:
if word not in histogram:
histogram[word] = 0
histogram[word] += 1
# print(histogram)
return histogram
def list_hist(source):
text = clean(source)
histogram = []
for word in text:
instance = [word, 0]
for word2 in text:
if word == word2:
instance[1] += 1
if instance not in histogram:
histogram.append(instance)
# print(histogram)
return histogram
def tuple_hist(source):
text = clean(source)
histogram = []
# text = separate(text)
cache = []
for word in text:
if word not in cache:
cache.append(word)
num_occur = 0
for word2 in text:
if word2 == word:
num_occur += 1
instance = (word, num_occur)
tup = tuple(instance)
histogram.append(tup)
return histogram
def unique_words(histo):
''' returns length of histogram
'''
number = (len(histo))
print("unique words: {}".format(number))
return number
def frequency(word, histo):
''' returns number associated with word in histo
'''
freq = 0
for item in histo:
if item[0] == word:
freq = item[1]
print("freq of {}: {}".format(word, freq))
return freq
if __name__ == '__main__':
dicto_histo = dict_hist("source_text.txt")
listo_histo = list_hist("source_text.txt")
tuple_histo = tuple_hist("source_text.txt")
invert_histo = invert_hist("source_text.txt")
print(" ")
print("INVERT: ", invert_histo)
print(" ")
print("TUP: ", tuple_histo)
print(" ")
print("DICTO: ", dicto_histo)
print(" ")
print("LIST: ", listo_histo)
print(" ")
unique_words(listo_histo)
frequency("'the'", listo_histo)
print(" ")
| {"/sentence.py": ["/sample.py"], "/sample.py": ["/histogram.py"], "/app.py": ["/sentence.py", "/markov_chain.py"]} |
71,768 | SamirIngley/tweet-gen | refs/heads/master | /sample.py | from histogram import invert_hist
from clean_text import clean
import random
# def gen_sentence(source):
# # text = clean(source)
# word_count = 7
# counter = 1
# sentence = []
# while counter <= word_count:
# if counter == 1: # upper case the first character
# word1 = (stochastic_sample(source))
# word = word1.capitalize()
# sentence.append(word)
# counter += 1
# # print(word1)
# # elif word_count > counter > 0 : # lowercase the rest?
# else:
# word2 = stochastic_sample(source)
# sentence.append(word2.lower())
# counter += 1
# # add a period to the last word
# period = "."
# # counter += 1
# # print(sentence)
# sentence = " ".join(sentence)
# sentence += period
# print(sentence)
# return sentence
def stochastic_sample(source):
''' Histogram -> percentages -> random item'''
hist = invert_hist(source)
percentages = {}
# print(hist)
text = clean(source)
# with open(text) as file:
# word_list = (file.read().split(" "))
length = len(text)
dart = random.randint(0, 100) # index is not length
# print(length)
# print(word_list)
# print("dart: {}".format(dart))
# calculate total for percent
total = 0
for item in hist:
total += (len(item[1]) * item[0])
# print("len item: {}".format(len(item[1]))) # number of words per index
# individual percentages added to percent dict
for item in hist:
for word in item[1]:
word_percentage = item[0]/total
percentages[word] = word_percentage * 100
# finding the place the dart hit. each word has percentage assigned, we add them until we reach the dart
num = 0
target = None
for word in percentages:
num += percentages[word]
target = word
# print("trial - num: {}, target: {}".format(num, target)) #prints each trial through the hist
if num > dart: # we add til we break it. if we cross the line, we return
break
# print("num: {}, target: {}".format(num, target))
# print('')
# print(hist)
# print('')
# print(percentages)
# print(target)
# print(str(target))
return str(target)
if __name__ == '__main__':
stochastic_sample("source_text.txt") | {"/sentence.py": ["/sample.py"], "/sample.py": ["/histogram.py"], "/app.py": ["/sentence.py", "/markov_chain.py"]} |
71,769 | SamirIngley/tweet-gen | refs/heads/master | /test.py | # import sys
# import source_text.txt
# source = sys.argv[1]
from use import *
print(use('source_text'))
| {"/sentence.py": ["/sample.py"], "/sample.py": ["/histogram.py"], "/app.py": ["/sentence.py", "/markov_chain.py"]} |
71,770 | SamirIngley/tweet-gen | refs/heads/master | /app.py | from flask import Flask
import sentence
from markov_chain import Markov, random_walk
app = Flask(__name__)
@app.route('/')
def sentences():
# phrase = sentence.gen_sentence("source_text.txt")
markov_instance = Markov("source_text.txt")
random_walk(14, markov_instance)
return random_walk
| {"/sentence.py": ["/sample.py"], "/sample.py": ["/histogram.py"], "/app.py": ["/sentence.py", "/markov_chain.py"]} |
71,771 | SamirIngley/tweet-gen | refs/heads/master | /markov_chain.py | from random import choice
from pprint import pprint
from dictogram import Dictogram
from listogram import Listogram
class Markov:
def __init__(self, corpus):
self.corpus = corpus
self.states = {}
self.chain()
def chain(self):
last_word = None
# Loop over all words in the corpus
for word in self.corpus:
# Skip the first iteration if we don't have a last_word
if last_word is not None:
# If we haven't seen last_word before
if last_word not in self.states:
# Create a new empty histogram object as value
self.states[last_word] = Dictogram()
# Add a count for the next word after last_word
self.states[last_word].add_count(word)
# Keep track of this word for the next iteration
last_word = word
def __str__(self):
return str(self.states)
def random_walk(num_words, markov):
rand_sentence = []
length = len(rand_sentence)
# print(len(rand_sentence)) why won't this work ???
place = (markov.states)
# print("PLACEEEE: {}".format(place['fish']))
last_word = None
counter = 0
while counter < num_words:
# print(length, num_words)
if last_word == None:
last_word = choice(list(place.keys()))
# print("keys!! {}".format(list(place.keys())))
# print(last_word)
rand_sentence.append(last_word.capitalize())
counter += 1
else:
# print(place.items())
for item in place.items():
if item[0] == last_word:
# print("item1s: {}".format(list(item[1])))
last_word = choice(list(item[1]))
# print(last_word)
rand_sentence.append(last_word)
counter += 1
break
else:
pass
period = "."
sentence = " ".join(rand_sentence)
sentence += period
# print(sentence)
print(sentence)
return
# return " ".join(sentence)
# def second_order_markov(words):
# states ={}
# for i in range(len(words) -2):
# first_word = words[i]
# second_word = words[i +1]
# # pairs = (first_word, second_word)
# # print(pairs)
# third_word = words[i + 2]
# # states[first_word] = markov_dict
# if first_word not in states.keys():
# histo = []
# states[(first_word, second_word)] = histo
# states[(first_word, second_word)].append(third_word)
# values = states.items()
# for key, value in values:
# # markov_dict = {} # can't use a regular dict
# states[key] = Dictogram(value) # to use a dictogram method (add_count), we must first establish a Dictogram() object
# # markov_dict.add_count(second_word)
# # print(markov_dict)
# return states
# # print(states)
if __name__ == '__main__':
corpus = ("The ride of a lifetime by Robert Iger. The best book I read this year. A good example of a Nice guy coming first. What you do is who you are by Ben Horowitz. This book is going to make it fashionable to use the world “Culture” in boardrooms Who is Michael Ovitz by Michael Ovitz. A window into managing Hollywood talent - Future of managing top technical talent The dog who took me up a mountain by Rick Crandall .").split()
# corpus2 = (source_text.txt).split()
markov_instance = Markov(corpus)
# print(markov_instance.states)
# print(type(markov_instance.chain))
random_walk(14, markov_instance) | {"/sentence.py": ["/sample.py"], "/sample.py": ["/histogram.py"], "/app.py": ["/sentence.py", "/markov_chain.py"]} |
71,828 | carpyncho/bmdiff | refs/heads/master | /bmdiff.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
# =============================================================================
# DOCS
# =============================================================================
__doc__ = """Make band merges difference and union."""
__version__ = "0.0.2"
# =============================================================================
# IMPORTS
# =============================================================================
import sys
import argparse
import logging
import warnings
import numpy as np
with warnings.catch_warnings():
warnings.simplefilter("ignore")
from astropysics import coords
# =============================================================================
# LOG
# =============================================================================
logger = logging.getLogger("mmatch")
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
# =============================================================================
# CONSTANTS
# =============================================================================
DOC = __doc__
VERSION = __version__
EPILOG = "BSD-3 Licensed - IATE-OAC: http://iate.oac.uncor.edu/"
DEFAULT_RADIUS = 3 * 9.2592592592592588e-5
EPOCHS = 3
SOURCE_DTYPE = {
'names': ['ra_h', 'dec_h', 'ra_j', 'dec_j', 'ra_k', 'dec_k'],
'formats': [float, float, float, float, float, float]
}
USECOLS = [0, 1, 2, 3, 4, 5]
FORMATS = ['%i', '%1.18e', '%1.18e', '%1.18e', '%1.18e', '%1.18e', '%1.18e']
# =============================================================================
# MAIN
# =============================================================================
def add_columns(arr, extra_cols, append=False):
"""Add extra columns to the output of beamc"""
dtype = (
[(k, v.dtype) for k, v in extra_cols] +
[(n, f) for n, f in arr.dtype.descr])
extra_cols = dict(extra_cols)
# create an empty array and copy the values
data = np.empty(len(arr), dtype=dtype)
for name in data.dtype.names:
if name in extra_cols:
data[name] = extra_cols[name]
else:
data[name] = arr[name]
return data
def read_bm(fp, band="k", **kwargs):
logger.info("- Reading {}...".format(fp))
kwargs.setdefault("dtype", SOURCE_DTYPE)
kwargs.setdefault("skip_header", EPOCHS)
kwargs.setdefault("usecols", USECOLS)
arr = np.genfromtxt(fp, **kwargs)
if arr.ndim == 0:
arr = arr.flatten()
indexed = add_columns(arr, [("idx", np.arange(len(arr)))])
ra = "ra_{}".format(band)
flt = (indexed[ra] != -9999.0)
filtered = indexed[flt]
logger.info("Found {}/{} valid sources".format(len(filtered), len(arr)))
return filtered
def match(bm0_ra, bm0_dec, bm1_ra, bm1_dec, radius=DEFAULT_RADIUS):
logger.info("- Matching max distance of radius {}...".format(radius))
nearestind_bm1, distance_bm1, match_bm1 = coords.match_coords(
bm0_ra, bm0_dec, bm1_ra, bm1_dec, eps=radius, mode="nearest")
nearestind_bm0, distance_bm0, match_bm0 = coords.match_coords(
bm1_ra, bm1_dec, bm0_ra, bm0_dec, eps=radius, mode="nearest")
for idx_bm1, idx_bm0 in enumerate(nearestind_bm0):
if match_bm0[idx_bm1] and \
nearestind_bm1[idx_bm0] == idx_bm1 \
and match_bm1[idx_bm0]:
yield idx_bm0, idx_bm1
def difference(ibm, flts, radius=DEFAULT_RADIUS, band="k"):
ra, dec = "ra_{}".format(band), "dec_{}".format(band)
to_remove = None
logger.info("[MATCH]")
for flt in flts:
matches = np.fromiter(
match(ibm[ra], ibm[dec], flt[ra], flt[dec], radius=radius),
dtype=[("idx_ibm", int), ("idx_flt", int)])
logger.info("Found {} sources matches".format(len(matches)))
if to_remove is None:
to_remove = matches["idx_ibm"]
else:
to_remove = np.append(to_remove, matches["idx_ibm"])
logger.info("[FILTERING]")
uto_remove = np.unique(to_remove)
logger.info("{} unique sources to remove".format(len(uto_remove)))
clean_mask = ~np.in1d(np.arange(len(ibm)), uto_remove)
return ibm[clean_mask]
def union(bms, radius=DEFAULT_RADIUS, band="k"):
ra, dec = "ra_{}".format(band), "dec_{}".format(band)
united = None
for idx, bm in enumerate(bms):
bm_len = len(bm)
bm_idx = np.zeros(bm_len, dtype=int) + idx
bm = add_columns(bm, [("bm_idx", bm_idx)])
if united is None:
united = bm
else:
matches = np.fromiter(
match(united[ra], united[dec], bm[ra], bm[dec], radius=radius),
dtype=[("idx_united", int), ("idx_bm", int)])
logger.info("Found {} sources matches".format(matches.size))
logger.info("Filtering...")
clean_mask = ~np.in1d(np.arange(bm_len), matches["idx_bm"])
united = np.append(united, bm[clean_mask])
return united
| {"/test_bmdiff.py": ["/bmdiff.py"]} |
71,829 | carpyncho/bmdiff | refs/heads/master | /test_bmdiff.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import numpy as np
import bmdiff
bmdiff.logger.setLevel(logging.INFO)
def test_difference():
print("TESTING DIFFERENCE")
inputbm = bmdiff.read_bm("data/ibm.dat", "k")
filters = [
bmdiff.read_bm(flt, band="k")
for flt in ("data/flt0.dat", "data/flt1.dat")]
expected = np.loadtxt("data/diff.dat")
diff = bmdiff.difference(inputbm, filters, band="k")
for de, ee in zip(diff, expected):
np.testing.assert_array_equal(
np.asarray(de.tolist()), ee)
def test_union():
print("TESTING UNION")
bms = filters = [
bmdiff.read_bm(flt, band="k")
for flt in ("data/union0.dat", "data/union1.dat", "data/union2.dat")]
expected = np.loadtxt("data/union.dat")
union = bmdiff.union(bms, band="k")
for de, ee in zip(union, expected):
np.testing.assert_array_equal(
np.asarray(de.tolist()), ee)
| {"/test_bmdiff.py": ["/bmdiff.py"]} |
71,859 | flaxeater/downtime_event_generator | refs/heads/master | /runner.py | from EventEngine import EventEngine
from event_data_service import EventObjects
import owners as o
from owners import PlayerHoldings as p
from sys import exit;
def get_all_events_tables_for_printing():
ret = []
#print sorted(EventObjects.keys());exit();
for event in sorted(EventObjects.keys()):
table=[]
eventRows=EventObjects[event].eventRows
for key in sorted(eventRows.keys(), lambda x,y:int(x[0]) - int(y[0])):
table.append({'range':key,'body':eventRows[key][-1]})
ret.append({'name':event,'table':table})
return ret
encoded_player_data=[
p('Cain',[o.MILITARY_ACADEMY,o.MERCENARY_COMPANY]),
p('Oz',[o.MAGICAL_ACADEMY,o.ALCHEMIST,o.LIBRARY]),
p('sirus',[o.TEMPLE,o.SHOP,o.HOUSE]),
p('shazaman',[o.THIEVES_GUILD,o.CABAL,o.CULT])
]
engine = EventEngine(encoded_player_data,EventObjects);
events=[]
for x in range(365):
events.append(engine.next());
output="""
<html>
<head>
<title>High Level Campaign Rannick Event List</title>
</head>
<body>
<table border="1">
"""
for table in get_all_events_tables_for_printing():
output+='<tr style="background-color: lightgray;"><td colspan="2">%s</td></tr>' %table['name']
output+="\n";
for row in table['table']:
output+='<tr><td width="200px" align="center">%s</td><td>%s</td></tr>' %(str(row['range']),row['body'])
output+="\n";
output+="</table>"
output+="""
<table border="2">
<thead>
<td colspan="4"><h1>EVENTS</h1></td>
</thead>
<tr><td>Day</td><td>Name</td><td>Origin</td><td>Description</td></tr>
"""
days = 1;
for event in events:
if event:
output+="""
<tr>
<td>%s</td>
<td>%s</td>
<td>%s</td>
<td>%s</td>
</tr>
""" %(str(days),str(event[0]),str(event[1]),str(event[2]))
else:
output+="""
<tr><td>%s</td><td colspan="3"></td></tr>
""" %days
days+=1
output+="</body></html>"
print output
| {"/EventEngine.py": ["/event_data_service.py", "/owners.py"]} |
71,860 | flaxeater/downtime_event_generator | refs/heads/master | /EventEngine.py | from event_data_service import EventObjects
from owners import PlayerHoldings
import owners as o
import random
from random import choice
import sys
class EventTumbler:
def __init__(self):
self.starting_chance=.2
self.current_chance=self.starting_chance
def __iter__(self):
return self
def next(self):
if self.current_chance<=random.random():
self.current_chance = self.starting_chance
return True
else:
self.current_chance+=.05
return False
class EventEngine:
def __init__(self,players,events):
self.p = players;
self.e = events;
self.tumbler = EventTumbler()
def __iter__(self):
return self
def next(self):
if self.tumbler.next():
#add event picking logic
player = choice(self.p)
eventTable = choice(player.holdings)
#ok now roll in the generic table
generic_event = EventObjects[o.GENERIC].getEvent()
if generic_event.startswith("Building-Specific Event:"):
event = EventObjects[eventTable].getEvent()
else:
event = generic_event
return player, eventTable, event
else:
return None;
| {"/EventEngine.py": ["/event_data_service.py", "/owners.py"]} |
71,861 | flaxeater/downtime_event_generator | refs/heads/master | /owners.py | ALCHEMIST='Alchemist'
BARDIC_COLLEGE='Bardic College'
CABAL='Cabal'
CASTERS_TOWER="Caster's Tower"
CASTLE='Castle'
CULT='Cult'
DANCE_HALL='Dance Hall'
GENERIC='Generic'
GUILDHALL='Guildhall'
HERBALIST='Herbalist'
HOUSE='House'
INN='Inn'
LIBRARY='Library'
MAGICAL_ACADEMY='Magical Academy'
MAGIC_SHOP='Magic Shop'
MENAGERIE='Menagerie'
MERCENARY_COMPANY='Mercenary Company'
MILITARY_ACADEMY='Military Academy'
MONASTERY='Monastery'
SHOP='Shop'
SMITHY='Smithy'
STABLE='Stable'
TAVERN='Tavern'
TEMPLE='Temple'
THEATER='Theater'
THIEVES_GUILD="Thieves' Guild"
class PlayerHoldings:
def __init__(self,name,holdings):
self.name=name
self.holdings=holdings
def __str__(self):
return self.name
def __repr__(self):
return self.name
| {"/EventEngine.py": ["/event_data_service.py", "/owners.py"]} |
71,862 | flaxeater/downtime_event_generator | refs/heads/master | /event_data_service.py | #CONFIG
DATA_FILE='data/events.csv'
#/CONFIG
import csv
from random import choice
import sys
d100=lambda:choice(range(1,101))
class EventTable:
def __init__(self):
self.eventRows = {}
pass
def __str__(self):
return repr(self.eventRows)
def __repr__(self):
return repr(self.eventRows)
def addRow(self,row):
self.eventRows[(row[1],row[2])]=row[3:]
def getEvent(self):
roll=d100()
for low,high in self.eventRows:
if roll>=int(low) and roll<=int(high):
return self.eventRows[(low,high)][-1]
return roll
EventObjects = {}
with open(DATA_FILE) as csvfile:
reader = csv.reader(csvfile)
reader.next()
for row in reader:
if not EventObjects.has_key(row[0]):
EventObjects[row[0]]=EventTable()
EventObjects[row[0]].addRow(row)
if __name__ == '__main__':
print EventObjects['Guildhall']
print EventObjects
| {"/EventEngine.py": ["/event_data_service.py", "/owners.py"]} |
71,869 | bellyfat/Adaptive-Schedule | refs/heads/main | /markdown_helper.py | import dash_html_components as html
import dash_core_components as dcc
from textwrap import dedent
def markdown_popup():
return html.Div(
id='markdown',
className='modal',
style={'display': 'none'},
children=(
html.Div(
className='markdown-container',
children=[
html.Div(
className='close-container',
children=html.Button(
'Close',
id='markdown_close',
n_clicks=0,
className='closeButton',
),
),
html.Div(
className='markdown-text',
children=[
dcc.Markdown(
children=dedent(
r"""
##### What am I looking at?
This app finds an optimal schedule for $n$ clients, that is, a sequence of arrival epochs $t_1,\dots,t_n$
that minimizes the objective function
$$\sum_{i=1}^{n}\omega\mathbb{E}I_i + (1-\omega)\mathbb{E}W_i.$$
In here, $I_i$ is the idle time prior to the arrival of client $i$ and $W_i$ is the waiting time of client $i$.
The factor $\omega$ reflects the relative importance of both components. The arrival epoch of client $i$ is
denoted by $t_i$, whereas the corresponding interarrival times are denoted by $x_i$.
The minimization is done by performing a so-called phase-type fit. The scheduler inserts the mean and squared
coefficient of variation (that is, the ratio of the variance to the mean) of the service time $B$ of the clients.
For ease, the webapp assumes that all service times $B_i$ are independent and stem from the same distribution $B$.
Given these parameters of the true distribution, we determine the optimal schedule for a phase-type distribution
with the same parameters. It has been shown that the error introduced by this fit can be considered as negligible.
After obtaining the initial schedule, the schedule can be made adaptive by rescheduling at any time point. The
key difference between precalculated and adaptive schedules is that the relevant information at each point of
rescheduling can be used. While the number of clients $n$ that still remain to be scheduled also needs to be updated,
the (relevant) information at each time point is the number of clients that are waiting in the system \#$wis$ and the
time $u$ that the currently served client is in service (if any).
##### More about this app
The purpose of this app is to determine optimal schedules at the start of and during any service process with a single
server given the characteristics of the clients. The schedules are generated in real time using Python. To read
more about it, please send an email to Roshan Mahes ([roshan-1@live.com](mailto:roshan-1@live.com)), Michel Mandjes
([m.r.h.mandjes@uva.nl](mailto:M.R.H.Mandjes@uva.nl)) or Marko Boon ([m.a.a.boon@tue.nl](mailto:m.a.a.boon@tue.nl)).
"""
)
)
],
),
],
)
),
)
| {"/app.py": ["/markdown_helper.py", "/adaptive_scheduling.py"]} |
71,870 | bellyfat/Adaptive-Schedule | refs/heads/main | /adaptive_scheduling.py | import math
import numpy as np
from scipy.stats import poisson
from scipy.optimize import minimize, LinearConstraint # optimization
from scipy.linalg.blas import dgemm, dgemv # matrix multiplication
from scipy.linalg import inv # matrix inversion
from scipy.sparse.linalg import expm # matrix exponential
def find_Salpha(mean, SCV, u):
"""
Returns the transition rate matrix, initial distribution
and parameters of the phase-fitted service times given
the mean, SCV, and the time that the client is in service at time 0.
"""
# weighted Erlang case
if SCV < 1:
# parameters
K = math.floor(1/SCV)
p = ((K + 1) * SCV - math.sqrt((K + 1) * (1 - K * SCV))) / (SCV + 1)
mu = (K + 1 - p) / mean
# initial dist. client in service
alpha_start = np.zeros((1,K+1))
B_sf = poisson.cdf(K-1, mu*u) + (1 - p) * poisson.pmf(K,mu*u)
for z in range(K+1):
alpha_start[0,z] = poisson.pmf(z,mu*u) / B_sf
alpha_start[0,K] *= (1 - p)
# initial dist. other clients
alpha = np.zeros((1,K+1))
alpha[0,0] = 1
# transition rate matrix
S = -mu * np.eye(K+1)
for i in range(K-1):
S[i,i+1] = mu
S[K-1,K] = (1-p) * mu
# hyperexponential case
else:
# parameters
p = (1 + np.sqrt((SCV - 1) / (SCV + 1))) / 2
mu1 = 2 * p / mean
mu2 = 2 * (1 - p) / mean
# initial dist. client in service
alpha_start = np.zeros((1,2))
B_sf = p * np.exp(-mu1 * u) + (1 - p) * np.exp(-mu2 * u)
alpha_start[0,0] = p * np.exp(-mu1 * u) / B_sf
alpha_start[0,1] = 1 - alpha_start[0,0]
# initial dist. other clients
alpha = np.zeros((1,2))
alpha[0,0] = p
alpha[0,1] = 1 - p
# transition rate matrix
S = np.zeros((2,2))
S[0,0] = -mu1
S[1,1] = -mu2
return S, alpha_start, alpha
def create_Sn(S, alpha_start, alpha, N):
"""
Creates the matrix Sn as given in Kuiper, Kemper, Mandjes, Sect. 3.2.
"""
B = np.matrix(-sum(S.T)).T @ alpha
m = S.shape[0]
S_new = np.zeros(((N+1)*m, (N+1)*m))
# compute S2
S_new[0:m,0:m] = S
S_new[m:2*m, m:2*m] = S
S_new[0:m, m:2*m] = np.matrix(-sum(S.T)).T @ alpha_start
# compute Si
for i in range(1,N+1):
S_new[i*m:((i+1)*m), i*m:(i+1)*m] = S
S_new[(i-1)*m:i*m, i*m:(i+1)*m] = B
return S_new
def Transient_EIEW(x, alpha_start, alpha, Sn, Sn_inv, omega, wis):
"""
Evaluates the cost function given all parameters.
In here, we used the FORTRAN dgem-functions
instead of @ for efficient matrix multiplication.
"""
N = x.shape[0]
m = alpha.shape[1]
P_alpha_F = alpha_start
cost = omega * np.sum(x)
# cost of clients already entered (only waiting time)
for i in range(1,wis+1):
cost += (omega - 1) * np.sum(dgemm(1, P_alpha_F, Sn_inv[0:i*m,0:i*m]))
F = 1 - np.sum(P_alpha_F)
P_alpha_F = np.hstack((np.matrix(P_alpha_F), alpha * F))
# cost of clients to be scheduled
for i in range(wis+1,N+wis+1):
exp_Si = expm(Sn[0:i*m,0:i*m] * x[i-wis-1])
cost += float(dgemv(1, dgemm(1, P_alpha_F, Sn_inv[0:i*m,0:i*m]), np.sum(omega * np.eye(i*m) - exp_Si,1)))
P = dgemm(1, P_alpha_F, exp_Si)
F = 1 - np.sum(P)
P_alpha_F = np.hstack((np.matrix(P), alpha * F))
return cost
def Transient_IA(SCV, u, omega, N, x0, wis=0, tol=1e-4):
"""
Computes the optimal schedule.
wis = waiting in system.
"""
# sojourn time distribution transition rate matrices
S, alpha_start, alpha = find_Salpha(1, SCV, u)
Sn = create_Sn(S, alpha_start, alpha, N)
Sn_inv = inv(Sn)
# minimization
if not x0:
x0 = np.array([1.5 + wis] + [1.5] * (N - wis - 1))
Trans_EIEW = lambda x: Transient_EIEW(x, alpha_start, alpha, Sn, Sn_inv, omega, wis)
lin_cons = LinearConstraint(np.eye(N - wis), 0, np.inf)
optimization = minimize(Trans_EIEW, x0, constraints=lin_cons, method='SLSQP', tol=tol)
x = optimization.x
fval = optimization.fun
return x, fval
| {"/app.py": ["/markdown_helper.py", "/adaptive_scheduling.py"]} |
71,871 | bellyfat/Adaptive-Schedule | refs/heads/main | /app.py | # imports
from logging import PlaceHolder
import dash
import dash_table as dt
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output, State
from markdown_helper import markdown_popup
import numpy as np
import pandas as pd
from adaptive_scheduling import Transient_IA
import plotly.graph_objs as go
# import plotly.io as pio
# pio.templates.default = 'plotly_white'
# initial table & figure
df = pd.DataFrame({r'Client (\(i\))': [''],
r'Interarrival time (\(x_i\))': ['Computing appointment schedule...'],
r'Arrival time (\(t_i\))': ['']})
df = df.to_dict('records')
no_fig = {
'layout': {
'xaxis': {'visible': False},
'yaxis': {'visible': False},
'paper_bgcolor': 'rgba(0,0,0,0)',
'plot_bgcolor': 'rgba(0,0,0,0)'
}
}
columns = [{'name': [f'Appointment Schedule', k], 'id': k} for k in df[0].keys()]
# main app
# app = dash.Dash(__name__, external_scripts=['https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js?config=TeX-MML-AM_CHTML'])
app = dash.Dash(__name__, external_scripts=['https://cdn.jsdelivr.net/npm/mathjax@2.7.9/MathJax.js?config=TeX-MML-AM_CHTML'])
# app = dash.Dash(__name__, external_scripts=['https://cdn.jsdelivr.net/npm/mathjax@3.2.0/es5/node-main.min.js?config=TeX-MML-AM_CHTML'])
app.title = 'Adaptive Schedule'
server = app.server
def app_layout():
app_layout = html.Div(id='main',children=[
dcc.Interval(id='interval-updating-graphs', interval=1000, n_intervals=0),
html.Div(id='top-bar'),
html.Div(
className='container',
children=[
html.Div(
id='left-side-column',
className='eight columns',
children=[
html.H4('Adaptive Schedule'),
html.P(
['This webapp solves the minimization problem' +
r'$$\min_{t_1,\dots,t_n}\omega \sum_{i=1}^{n}\mathbb{E}I_i + (1 - \omega)\sum_{i=1}^{n}\mathbb{E}W_i,$$' +
r'where \(I_i\) and \(W_i\) are the idle time and waiting time associated to client \(i\), respectively. ' +
r'The sequence of arrival epochs \(t_1,\dots,t_n\) is called the schedule. ' +
r'By entering the state information \((k, u)\), ' +
'this application can be used to generate adaptive schedules. ',
'Click ', html.A('here', id='learn-more-button', n_clicks=0), ' to learn more.']
),
html.P('Please fill in the parameters below.'),
html.Table(
id='my_table',
children=
# Header
[html.Tr([html.Td(''), html.Th('Parameter'), html.Th('Value'), html.Th('Range'), html.Th('Explanation')])] +
# Body
# [html.Tr([html.Td('test', style={'text-align': 'center'})])] +
[html.Tr([html.Th('Schedule Characteristics'),
html.Td(r'\(\omega\)'),
dcc.Input(id='omega', min=0, max=1, type='number', placeholder="e.g. '0.5'"),
html.Td(r'\((0,1)\)'),
html.Td('idle : waiting time')])] +
[html.Tr([html.Td(''),
html.Td(r'\(n\)'),
dcc.Input(id='n', min=1, max=20, step=1, type='number', placeholder="e.g. '4'"),
html.Td(r'\(\mathbb{N}_{\leq 20}\)'),
html.Td('#clients to serve')])] +
[html.Tr([html.Th('Patient Characteristics'),
html.Td(r'\(\mathbb{E}B_i \)'),
html.Div(dcc.Input(id='mean', type='text', placeholder="e.g. '1' or '(1,1,1,1)'")), ### TODO: eval, list
html.Td(r'\([0,\infty)^n\)'),
html.Td('mean(s)')])] +
[html.Tr([html.Td(''),
html.Td(r'\(\mathbb{S}(B_i)\)'),
html.Div(dcc.Input(id='SCV', type='text', placeholder="e.g. '(0.8,1.1,0.9,1.0)'")), ### TODO: eval, list
html.Td(r'\([0.2,2]^n\)'),
html.Td('SCV(s)')])] +
[html.Tr([html.Th('State Information'),
html.Td(r'\(k\)'),
dcc.Input(id='wis', min=0, max=5, step=1, type='number', placeholder="optional, e.g. '2'"), ### TODO: wis should be k!!!
html.Td(r'\(\mathbb{N}_{\leq 5}\)'), ### TODO: optional -> empty == 0
html.Td('#clients in system')])] +
[html.Tr([html.Td(''),
html.Td(r'\(u\)'),
dcc.Input(id='u', min=0, type='number', placeholder="optional, e.g. '0.33'"), ### TODO: optional -> empty == 0
html.Td(r'\([0,\infty)\)'),
html.Td('elapsed service time')])] +
[html.Tr([html.Th('Optional Constraints'),
html.Td(r'\(k\)'),
dcc.Input(id='wis2', min=0, max=5, step=1, type='number', placeholder="optional, e.g. '2'"), ### TODO: wis should be k!!!
html.Td(r'\([0,\infty)\times \dots\times [0,\infty)\)'), ### TODO: optional -> empty == 0
html.Td('fixed arrivals')])] +
[html.Tr([html.Td(''),
html.Td(r'\(u\)'),
dcc.Input(id='u2', min=0, type='number', placeholder="optional, e.g. '0.33'"), ### TODO: optional -> empty == 0
html.Td(r'\([0,\infty)\)'),
html.Td('first arrival moment')])], style={'width': '100%'}
),
html.Button(id='submit-button', n_clicks=0, children='Compute Appointment Schedule', style={'font-style': 'italic'}),
]
),
html.Div(
id='right-side-column',
className='dynamic schedule',
children=[
html.Div(
dt.DataTable(
id='schedule_df',
columns=columns,
data=df,
merge_duplicate_headers=True,
style_header={'textAlign': 'center', 'backgroundColor': '#f9f9f9', 'fontWeight': 'bold'},
style_cell={'textAlign': 'center'},
style_data_conditional=[
{
'if': {'row_index': 'odd'},
'backgroundColor': '#f9f9f9'
},
{
'if': {'state': 'selected'},
'backgroundColor': '#dce9f9',
'border': '1px solid #242582',
}
],
),
),
html.Div([
dcc.Graph(
id='graph_df',
figure = no_fig,
config={'displayModeBar': False},
)], className='graphic'),
],
),
],
),
markdown_popup(),
])
return app_layout
# learn more popup
@app.callback(
Output('markdown', 'style'),
[Input('learn-more-button', 'n_clicks'), Input('markdown_close', 'n_clicks')],
)
def update_click_output(button_click, close_click):
ctx = dash.callback_context
prop_id = ""
if ctx.triggered:
prop_id = ctx.triggered[0]['prop_id'].split(".")[0]
if prop_id == 'learn-more-button':
return {'display': 'block'}
else:
return {'display': 'none'}
# schedule & graph
@app.callback(
[Output('schedule_df', 'columns'), Output('schedule_df', 'data'), Output('graph_df', 'figure')],
[Input('submit-button', 'n_clicks')],
[State('mean', 'value'), State('SCV', 'value'), State('omega', 'value'),
State('n', 'value'), State('wis', 'value'), State('u', 'value')],
)
def updateTable(n_clicks, mean, SCV, omega, n, wis, u):
mean = eval(mean)
SCV = eval(SCV)
N = n + wis
tol = None if N < 15 else 1e-4
u = u / mean
if not u and not wis:
N = N - 1
x, y = Transient_IA(SCV, u, omega, N, [], wis, tol)
x = np.pad(x, (1,0))
else:
x, y = Transient_IA(SCV, u, omega, N, [], wis, tol)
x = x * mean
df = pd.DataFrame({r'Client (\(i\))': list(np.arange(1,len(x)+1)),
r'Interarrival time (\(x_i\))': [f'{np.round(i,4):.4f}' for i in x],
r'Arrival time (\(t_i\))': [f'{np.round(i,4):.4f}' for i in np.cumsum(x)]})
figure = go.Figure(data=[go.Scatter(x=df.iloc[:,0], y=x, marker={'color': '#242582'})],
layout=go.Layout(
title=go.layout.Title(text=r'$\text{Optimal interarrival times } (x_i)$', x=0.5, xanchor='center'), # Plotly 4
# title=r'$\text{Optimal interarrival times } (x_i)$', # Plotly 2
xaxis={'title': r'$\text{Client } (i)$', 'tick0': 1, 'dtick': 1, 'range': [0.7,len(x) + 0.3]},
yaxis={'title': r'$\text{Interarrival time } (x_i)$'},
paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)'))
columns = [{'name': [f'Appointment Schedule (Cost: {y * mean:.4f})', k], 'id': k} for k in df.columns]
return columns, df.to_dict('records'), figure
app.layout = app_layout
if __name__ == '__main__':
app.run_server()
| {"/app.py": ["/markdown_helper.py", "/adaptive_scheduling.py"]} |
71,919 | blu-barry/notion-google-task-integration | refs/heads/main | /Main.py |
# imports for google client
from __future__ import print_function
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from GoogleClient import TaskLists
from GoogleClient import Tasks
from GoogleClient import GoogleClient
import pandas as pd
import requests
import json
from datetime import datetime
import configparser
from getKeys import get_Keys
from NotionPyApi import Database, Page, Properties
from NotionFunctions import readDatabaase, createNotionPage, updateNotionPage
user_keys = get_Keys()
NOTION_DATABASE_ID = user_keys["NOTION_DATABASE_ID"]
NOTION_INTEGRATION_TOKEN = user_keys["NOTION_INTEGRATION_TOKEN"]
GOOGLE_TO_DO_DAILY_TASKLIST_ID = user_keys["GOOGLE_TO_DO_DAILY_TASKLIST_ID"]
print(NOTION_DATABASE_ID)
#print(NOTION_INTEGRATION_TOKEN)
print(GOOGLE_TO_DO_DAILY_TASKLIST_ID)
#checks if a task in notion exists in google
# -> also need to check if the task is archieved
def convert_notion_status(notion_status):
if notion_status == False:
return "needsAction"
elif notion_status == True:
return "completed"
def convert_google_status(google_status):
if google_status == "needsAction":
return False
elif google_status == "completed":
return True
# establish class instances of clients
# set up notion client
pDatabasejson = Database(NOTION_DATABASE_ID,
NOTION_INTEGRATION_TOKEN)
pDatabasejson.query_database() #print(pDatabasejson.results)
all_notion_task_json = pDatabasejson.results # all_task_json is a list of pages ( which are tasks - the properties portion of page hopes the data for each column)
all_notion_tasks_by_nKey = {}
all_notion_tasks_by_google_key = {}
created_goog_tasks = {}
for task in all_notion_task_json: # task is a dictionary that reprents the task row but also holds the properties which is another dictionary that holds the task stuff
task_page = Page(task, NOTION_INTEGRATION_TOKEN) # retrieves the page object for the the task
task_properties = Properties(task_page.properties, task['id'], task_page.last_edited_time, task_page.archived, task, NOTION_INTEGRATION_TOKEN) # retrieves the task page object's properties object
print("line 55 - notion task due date")
print(task_properties.Task_Name)
print(task_properties.Due_Date)
try:
all_notion_tasks_by_google_key[task_properties.Google_Task_ID] = task_properties
#print("line 57 main") # stores all notion task properties objects by google task id key
#print(task_properties.Google_Task_ID)
except TypeError:
print("google_task_id field is empty")
#print(task_properties)
# set up google client
gClient = GoogleClient() # google client object
gClient.set_up_service() # sets up oauth connection with google api
daily_to_do = TaskLists(gClient.service.tasks().list(tasklist=GOOGLE_TO_DO_DAILY_TASKLIST_ID, showDeleted = True, showHidden = True).execute()) # gets daily to do list from google tasks
#print(daily_to_do.items)
all_google_tasks_by_google_key = {} # key google_task_id, val object -> holds all of the google task objects in a dictionary
for task in daily_to_do.items: # puts all of the google task objects into a dictionary with the id as the key and the object as the value
print("task")
print(task)
daily_task = Tasks(task)
all_google_tasks_by_google_key[daily_task.id] = daily_task
if daily_task.id in all_notion_tasks_by_google_key: # task already exists in notion -> if notion task google key matches the google task id then update the older task with the newer tasks info
print("all_notion_tasks_by_google_key[daily_task.id].last_edited_time")
print(all_notion_tasks_by_google_key[daily_task.id].last_edited_time)
print("all_google_tasks_by_google_key[daily_task.id].updated")
print(all_google_tasks_by_google_key[daily_task.id].updated)
print("notion task version was edited last")
if all_notion_tasks_by_google_key[daily_task.id].last_edited_time > all_google_tasks_by_google_key[daily_task.id].updated: # notion task version was edited last
updated_task_body = {'status' : convert_notion_status(all_notion_tasks_by_google_key[daily_task.id].Status),
'kind' : "tasks#task",
'title' : all_notion_tasks_by_google_key[daily_task.id].Task_Name,
'due' : all_notion_tasks_by_google_key[daily_task.id].Due_Date,
'notes' : all_notion_tasks_by_google_key[daily_task.id].Notes,
'deleted' : all_notion_tasks_by_google_key[daily_task.id].archived,
'id': all_notion_tasks_by_google_key[daily_task.id].Google_Task_ID}
print("updated_task_body")
print(updated_task_body)
temp_new_task_google = gClient.service.tasks().update(tasklist=GOOGLE_TO_DO_DAILY_TASKLIST_ID, task = daily_task.id, body = updated_task_body).execute() # update google task on google
else: # google task is newer -> update notion task
print("google task is newer -> update notion task")
print("all_notion_tasks_by_google_key[daily_task.id].Notes")
print(all_notion_tasks_by_google_key[daily_task.id].Notes)
updateNotionPage(all_notion_tasks_by_google_key[daily_task.id].id, all_google_tasks_by_google_key[daily_task.id].id, all_google_tasks_by_google_key[daily_task.id].notes, all_google_tasks_by_google_key[daily_task.id].title, all_google_tasks_by_google_key[daily_task.id].due, convert_google_status(all_google_tasks_by_google_key[daily_task.id].status), all_google_tasks_by_google_key[daily_task.id].deleted)
else:# Google task does not exist in Notion
print("Google task does not exist in Notion")
print(all_google_tasks_by_google_key[daily_task.id])
createNotionPage(all_google_tasks_by_google_key[daily_task.id].id, all_google_tasks_by_google_key[daily_task.id].notes, all_google_tasks_by_google_key[daily_task.id].title, all_google_tasks_by_google_key[daily_task.id].due, convert_google_status(all_google_tasks_by_google_key[daily_task.id].status), all_google_tasks_by_google_key[daily_task.id].deleted)
| {"/Main.py": ["/GoogleClient.py", "/getKeys.py", "/NotionPyApi.py", "/NotionFunctions.py"], "/NotionFunctions.py": ["/getKeys.py"]} |
71,920 | blu-barry/notion-google-task-integration | refs/heads/main | /getKeys.py | import json
import ast
from keyPath import PATH as PATH
def get_Keys():
with open(PATH) as f:
data = json.load(f)
parsed_data = ast.literal_eval(data)
print("***SECRET***")
#print(data)
#print(parsed_data)
NOTION_DATABASE_ID = parsed_data['NOTION_DATABASE_ID']
NOTION_INTEGRATION_TOKEN = parsed_data["NOTION_INTEGRATION_TOKEN"]
GOOGLE_TO_DO_DAILY_TASKLIST_ID= parsed_data["GOOGLE_TO_DO_DAILY_TASKLIST_ID"]
return(parsed_data)
#print(NOTION_DATABASE_ID)
#print(NOTION_INTEGRATION_TOKEN)
#print(GOOGLE_TO_DO_DAILY_TASKLIST_ID)
def set_Keys(dictionary_of_keys):
keys_dict = dictionary_of_keys
print(keys_dict)
secret_keys_json = json.dumps(keys_dict)
with open('secret_keys.json', 'w') as json_file:
json.dump(secret_keys_json, json_file)
def convert_to_RFC_datetime(year=1900, month=1, day=1, hour=0, minute=0):
dt = datetime.datetime(year, month, day, hour, minute, 0, 000).isoformat() + 'Z'
return dt
#set_Keys({"NOTION_DATABASE_ID" : "", "NOTION_INTEGRATION_TOKEN" : "", "GOOGLE_TO_DO_DAILY_TASKLIST_ID" : ""})
#get_Keys()
| {"/Main.py": ["/GoogleClient.py", "/getKeys.py", "/NotionPyApi.py", "/NotionFunctions.py"], "/NotionFunctions.py": ["/getKeys.py"]} |
71,921 | blu-barry/notion-google-task-integration | refs/heads/main | /GoogleClient.py | from __future__ import print_function
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
class GoogleClient():
def __init__(self):
self.service = self.set_up_service()
def set_up_service(self):
#quickstart from goog api - begin
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json')
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
return build('tasks', 'v1', credentials=creds)
class TaskLists():
def __init__(self, results_json):
self.kind = results_json['kind']
self.etag = results_json['etag']
self.nextPageToken = results_json['nextPageToken']
self.items = results_json['items'] # this is a list of all of the tasks
class Tasks():
def __init__(self, items):
try:
self.kind = items['kind']
except KeyError:
self.kind = ""
try:
self.id = items['id']
except KeyError:
self.id = ""
try:
self.etag = items['etag']
except KeyError:
self.etag = ""
try:
self.title = items['title']
except KeyError:
self.title = ""
try:
self.updated = items['updated']
except KeyError:
self.updated = ""
try:
self.selfLink = items['selfLink']
except KeyError:
self.selfLink = ""
try:
self.position = items['position']
except KeyError:
self.position = ""
try:
self.status = items['status']
except KeyError:
self.status = ""
try:
self.due = items['due']
except KeyError:
self.due = ""
try:
self.notes = items['notes']
except KeyError:
self.notes = ""
try:
self.deleted = items['deleted']
except KeyError:
self.deleted = False
"""
gClient = GoogleClient() # google client object
gClient.set_up_service()
results = gClient.service.tasks().list(tasklist='MDgyMDQzMTQxMzIyMTgyMDcxMzI6MDow', maxResults=10).execute()
print(results)"""
| {"/Main.py": ["/GoogleClient.py", "/getKeys.py", "/NotionPyApi.py", "/NotionFunctions.py"], "/NotionFunctions.py": ["/getKeys.py"]} |
71,922 | blu-barry/notion-google-task-integration | refs/heads/main | /NotionPyApi.py | import requests
#emulate the structure of the js apis
class Database():
"""Parent Node Database"""
def __init__(self, database_id, integration_token):
# variables for syncing the database
self.database_id = database_id
self.notion_db_url = "https://api.notion.com/v1/databases/"
self.integration_token = integration_token
self.cmplt_database_url = self.notion_db_url + self.database_id
#self.data_json = self.query_database(database_url, integration_token)
self.database = {}
self.all_task_ids_dict = {}
self.all_task_ids_list = []
# variables that the NOTION JS API has that I need
#
self.data_json = None
# 'object' is the type of object that json file contains on the outer layer (the parent class/node)
self.dObject = None
# 'results' contains a
self.results = None
def query_database(self):
#queries the entire database and returns it as a json file
temp_url = self.cmplt_database_url+"/query"
# need to use post keyword when querying for some reason -> review request documentation
response = requests.post(temp_url, headers={"Authorization": f"{self.integration_token}", "Notion-Version": "2021-08-16" })
self.data_json = response.json()
print(self.data_json)
self.dObject = self.data_json['object']
self.results = self.data_json['results']
#return(response.json())
def initialize_results(self, data_json_results):
results = []
for page in data_json_results:
results.append(page)
return results
class Page():
"""Child Node of Parent"""
def __init__(self, page_data_json, integration_token):
self.page = {}
self.notion_pg_url = "https://api.notion.com/v1/pages/"
self.integration_token = integration_token
#function calls to create variables?
#self.page[self.pObject]: page_data_json['object']
self.pObject = page_data_json['object']
self.id = page_data_json['id']
self.created_time = page_data_json['created_time']
self.last_edited_time = page_data_json['last_edited_time']
self.cover = page_data_json['cover']
self.parent = page_data_json['parent']['database_id']
self.archived = page_data_json['archived']
self.properties = page_data_json['properties']
#Database.__init__(self, database_id, integration_token)
def update_page(self):
#queries the entire database and returns it as a json file
temp_url = self.notion_pg_url+self.id
# need to use post keyword when querying for some reason -> review request documentation
response = requests.PATCH(temp_url, headers={"Authorization": f"{self.integration_token}",
"Notion-Version": "2021-08-16"}, archived = self.archived, data = {'properties': {'Due Date': {'date' : {'start' : self.Due_Date}},
'Google Task ID': {'rich_text' : Google_Task_ID}, 'Task Name' : {'title' : [{'text' : {'content': self.Task_Name},
'plain_text': Task_Name}]}, 'Status': {'checkbox': status}, 'Notes': {'rich_text': self.Notes}}})
# need a too string method
def create_page(self, Due_Date, Google_Task_ID, Task_Name, Notes, archived_input, status):
#queries the entire database and returns it as a json file
temp_url = self.notion_pg_url+self.id
# need to use post keyword when querying for some reason -> review request documentation
response = requests.POST(temp_url, headers={"Authorization": f"{self.integration_token}",
"Notion-Version": "2021-08-16"}, archived = archived_input, data = {'properties': {'Due Date': {'date' : {'start' : Due_Date}},
'Google Task ID': {'rich_text' : self.Google_Task_ID}, 'Task Name' : {'title' : [{'text' : {'content': self.Task_Name},
'plain_text': self.Task_Name}]}, 'Status': {'checkbox': self.status}, 'Notes': {'rich_text': self.Notes}}})
# do I need an archieve/delete function?
"""Add function to delete (archieve) a page"""
class Properties(Page):
def __init__(self, properties_data_json, page_id, last_edited_time, archived, page_data_json, integration_token):
self.properties_dict = {}
self.last_edited_time = last_edited_time
self.archived = archived
self.id = page_id
self.Task_Name = properties_data_json['Task Name']['title'][0]['plain_text']
self.Notes = self.set_Notes(properties_data_json['Notes']['rich_text'])
self.Due_Date = self.set_due_date(properties_data_json['Due Date'])
self.Google_Task_ID = self.set_Google_Task_ID(properties_data_json['Google Task ID']['rich_text'])
self.Status = properties_data_json['Status']['checkbox']
self.page_data_json = page_data_json
Page.__init__(self, page_data_json, integration_token)
#need a too string method
def set_due_date(self, properties_data_due_date):
try:
temp_due_date = properties_data_due_date['date']
print("temp_due_date")
print(self.Task_Name)
print(properties_data_due_date['date'])
ddate = temp_due_date['start']
print("ddate")
print(ddate)
except TypeError:
print("error in set due date")
print(self.Task_Name)
ddate = None
print("ddate")
print(ddate)
return ddate
def set_Google_Task_ID(self, properties_data_json_Google_Task_ID_rich_text):
# properties_data_json_Google_Task_ID_rich_text -> [{'type': 'text', 'text': {'content': 'VWZoX0JuSzdjWTYzWnRseQ', 'link': None}, 'annotations': {'bold': False, 'italic': False, 'strikethrough': False, 'underline': False, 'code': False, 'color': 'default'}, 'plain_text': 'VWZoX0JuSzdjWTYzWnRseQ', 'href': None}]
try:
temp_id = properties_data_json_Google_Task_ID_rich_text[0]['plain_text']
#print("temp_id")
#print(temp_id)
return temp_id
except IndexError:
return None
def set_Notes(self, properties_data_json_Notes_rich_text):
#'notes': [{'type': 'text', 'text': {'content': 'test test test', 'link': None}, 'annotations': {'bold': False, 'italic': False, 'strikethrough': False, 'underline': False, 'code': False, 'color': 'default'}, 'plain_text': 'test test test', 'href': None}]
try:
temp_notes = properties_data_json_Notes_rich_text[0]['plain_text']
#print("temp_id")
#print(temp_id)
return temp_notes
except TypeError:
return ""
| {"/Main.py": ["/GoogleClient.py", "/getKeys.py", "/NotionPyApi.py", "/NotionFunctions.py"], "/NotionFunctions.py": ["/getKeys.py"]} |
71,923 | blu-barry/notion-google-task-integration | refs/heads/main | /NotionFunctions.py |
import re
import requests
import json
from datetime import datetime
from getKeys import get_Keys
user_keys = get_Keys()
NOTION_DATABASE_ID = user_keys["NOTION_DATABASE_ID"]
NOTION_INTEGRATION_TOKEN = user_keys["NOTION_INTEGRATION_TOKEN"]
headers = {
'Authorization' : 'Bearer '+NOTION_INTEGRATION_TOKEN,
'Content-Type' : "application/json",
'Notion-Version': '2021-08-16'
}
def readDatabaase():
readUrl = f'https://api.notion.com/v1/databases/{NOTION_DATABASE_ID}'
results = request("GET", readUrl, headers=headers)
print(results.status_code)
def createNotionPage(google_id, notes, task_title, due_date, status, archived):
print(google_id)
print(notes)
print(task_title)
print(due_date)
print(status)
print(archived)
if notes == []:
notes =""
createUrl = f'https://api.notion.com/v1/pages'
#api info: https://developers.notion.com/reference/page#property-value-object
if validate_iso8601(due_date) is False:
print("validate_iso8601 is false - createNotionPage")
newPageData = {
"parent": {"database_id": NOTION_DATABASE_ID},
"properties" : {
"Task Name": {
"title": [{
"type": "text",
"text": {
"content": task_title
}
}]
},
"Google Task ID": {
"rich_text": [{
"text": {
"content": google_id
}
}]
},
"Notes": {
"rich_text": [{
"text": {
"content": notes
}
}]
},
"Status": {
"checkbox": status
}
}
}
else:
newPageData = {
"parent": {"database_id": NOTION_DATABASE_ID},
"properties" : {
"Task Name": {
"title": [{
"type": "text",
"text": {
"content": task_title
}
}]
},
"Google Task ID": {
"rich_text": [{
"text": {
"content": google_id
}
}]
},
"Notes": {
"rich_text": [{
"text": {
"content": notes
}
}]
},
"Due Date" : {
"date": {
"start": due_date
}
},
"Status": {
"checkbox": status
}
}
}
data = json.dumps(newPageData)
#print(str(uploadData))
results = requests.request("POST", createUrl, headers = headers, data=data)
print(results.status_code)
print(results.json())
def updateNotionPage(page_id, google_id, notes, task_title, due_date, status, archived):
print(page_id)
print(google_id)
print(notes)
print(task_title)
print(due_date)
print(status)
print(archived)
if notes == []:
notes =""
createUrl = f'https://api.notion.com/v1/pages/{page_id}'
# have keywards to control which properties to add
#api info: https://developers.notion.com/reference/page#property-value-object
if validate_iso8601(due_date) is False:
print("validate_iso8601 is False - updateNotionPage")
newPageData = {
"parent": {"database_id": NOTION_DATABASE_ID},
"properties" : {
"Task Name": {
"title": [{
"type": "text",
"text": {
"content": task_title
}
}]
},
"Google Task ID": {
"rich_text": [{
"text": {
"content": google_id
}
}]
},
"Notes": {
"rich_text": [{
"text": {
"content": notes
}
}]
},
"Status": {
"checkbox": status
}
}
}
else:
newPageData = {
"parent": {"database_id": NOTION_DATABASE_ID},
"properties" : {
"Task Name": {
"title": [{
"type": "text",
"text": {
"content": task_title
}
}]
},
"Google Task ID": {
"rich_text": [{
"text": {
"content": google_id
}
}]
},
"Notes": {
"rich_text": [{
"text": {
"content": notes
}
}]
},
"Due Date" : {
"date": {
"start": due_date
}
},
"Status": {
"checkbox": status
}
}
}
data = json.dumps(newPageData)
#print(str(uploadData))
print("headers")
print(headers)
results = requests.request("PATCH", createUrl, headers = headers, data=data)
print(results.status_code)
def validate_iso8601(date):
regex = r'^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$'
match_iso8601 = re.compile(regex).match
try:
if match_iso8601(date) is not None:
return True
except:
pass
return False
| {"/Main.py": ["/GoogleClient.py", "/getKeys.py", "/NotionPyApi.py", "/NotionFunctions.py"], "/NotionFunctions.py": ["/getKeys.py"]} |
71,926 | PGCHM/python_ndvi | refs/heads/master | /plot_me.py | # Dr. M. Disney, Aug 2011
# C. Peng, Sep 2013
from matplotlib.pyplot import *
import matplotlib.pyplot as plt
def plot(data, title, xlabel, ylabel, colorbar_label, ofile, min, max):
"plot(data, title, xlabel, ylabel, colorbar_label, save, ofile): module to plot data with colorbar"
fig = plt.figure()
ax = fig.add_subplot(111)
cax = ax.imshow(data)
cax.set_clim(min, max)
ax.set_title('%s'%(title))
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
cbar = fig.colorbar(cax, orientation='vertical')
cbar.set_label(colorbar_label)
if ofile:
# will save as emf, eps, pdf, png, ps, raw, rgba, svg, svgz
plt.savefig(ofile)
else:
plt.show()
plt.close()
| {"/do_ndvi.py": ["/ndvi.py", "/hdf_utils.py", "/plot_me.py"], "/plot_pixel.py": ["/hdf_utils.py"]} |
71,927 | PGCHM/python_ndvi | refs/heads/master | /do_ndvi.py | #!/usr/bin/env python
# http://pysclint.sourceforge.net/pyhdf/example.html
# Dr. M. Disney, Aug 2011
# C. Peng, Sep 2013
import sys
import argparse
import numpy as np
from pyhdf.SD import SD, SDC
from matplotlib.pyplot import *
import matplotlib.pyplot as plt
# local files
import ndvi, hdf_utils, plot_me
def do_ndvi():
"do_ndvi(): function to read MODIS refl HDF file and calculate NDVI"
ifile = 'MOD09GA.A2004154.h19v10.005.2008152141836.hdf'
title = ifile
ofile = 0
red = 'sur_refl_b01_1'
nir = 'sur_refl_b02_1'
xmin = 0
xmax = 500
ymin = 0
ymax = 500
xlabel = 'Col'
ylabel = 'Row'
# min and max values for colourbar on plot
ndvimin = 0.1
ndvimax = 0.8
# default plot value
plot = 1
if options.ifile:
ifile = options.ifile
if options.ofile:
ofile = options.ofile
if options.sdsName:
sdsName = options.sdsName
# read HDF file
md = hdf_utils.r(ifile)
if options.v:
# o/p file datasets
sys.stderr.write('ifile: %s\n'%(ifile))
md.datasets()
sds_red = md.select(red)
sds_nir = md.select(nir)
# do we have extract i.e. xmin, xmax, ymin, ymax?
if options.xmin:
xmin = options.xmin
if options.ymin:
ymin = options.ymin
if options.xmax:
xmax = options.xmax
if options.ymax:
ymax = options.ymax
# scale ndvi values?
if options.ndvimin:
ndvimin = options.ndvimin
if options.ndvimax:
ndvimax = options.ndvimax
if options.xlabel:
xlabel = options.xlabel
if options.ylabel:
ylabel = options.ylabel
if options.title:
title = options.title
# check dims
if (sds_red.dimensions('XDim').values()[0][0] != sds_nir.dimensions('XDim').values()[0][0]) or (sds_red.dimensions('YDim').values()[0][0] != sds_nir.dimensions('YDim').values()[0][0]):
sys.stderr.write('%s: dimension error - x, y dims of SDS %s and %s do not match\n'%(ifile,red,nir))
# get extract if required, and cast to float32
if (options.xmin or options.ymin or options.xmax or options.ymax):
sds_red = np.float32(sds_red[xmin:xmax,ymin:ymax])
sds_nir = np.float32(sds_nir[xmin:xmax,ymin:ymax])
n = np.zeros([xmax-xmin,ymax-ymin])
else:
xdim = sds_red.dimensions('XDim').values()[0][0]
ydim = sds_red.dimensions('YDim').values()[0][0]
sds_red = np.float32(sds_red[0:xdim, 0:ydim])
sds_nir = np.float32(sds_nir[0:xdim, 0:ydim])
n = np.zeros([xdim, ydim])
# calculate ndvi from bands 1, 2
n = ndvi.ndvi(sds_red, sds_nir)
# clip if required
np.clip(n,-1.,1.,out=n)
if options.v:
# o/p file datasets
sys.stderr.write('ifile: %s\n'%(ifile))
md.datasets()
if options.plot:
# o/p file datasets
plot = 1
if plot:
plot_me.plot(n, title, xlabel, ylabel, 'NDVI', ofile, ndvimin, ndvimax)
def main ():
do_ndvi()
# parser - note how this is virtually the same as for hdf_read_example.py - reduce, reuse, recycle!
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--ifile", help="read in data from FILE", metavar="FILE")
parser.add_argument("-o", "--ofile", help="output file: emf, eps, pdf, png, ps, raw, rgba, svg, svgz", metavar="FILE")
parser.add_argument("-s", "--sdsName", help="SDS name")
parser.add_argument("-t", "--title", help="title")
parser.add_argument("--xlabel", help="xlabel name")
parser.add_argument("--ylabel", help="ylabel name")
parser.add_argument("--xmin", type=int, help="xmin")
parser.add_argument("--xmax", type=int, help="xmax")
parser.add_argument("--ymin", type=int, help="ymin")
parser.add_argument("--ymax", type=int, help="ymax")
parser.add_argument("--ndvimin", type=float, help="ndvimin")
parser.add_argument("--ndvimax", type=float, help="ndvimax")
parser.add_argument("-p", "--plot", action="store_true", help="switch plotting on")
parser.add_argument("-v", action="store_true", help="switch verbose on")
options = parser.parse_args()
main()
| {"/do_ndvi.py": ["/ndvi.py", "/hdf_utils.py", "/plot_me.py"], "/plot_pixel.py": ["/hdf_utils.py"]} |
71,928 | PGCHM/python_ndvi | refs/heads/master | /ndvi.py | # called within do_ndvi
def ndvi(red, nir):
"ndvi(md, red, nir): module to calculate ndvi from MODIS HDF refl data md, \
i.e. with arrays of red and nir bands passed through"
ndvi = (nir - red) / (nir + red)
return(ndvi)
| {"/do_ndvi.py": ["/ndvi.py", "/hdf_utils.py", "/plot_me.py"], "/plot_pixel.py": ["/hdf_utils.py"]} |
71,929 | PGCHM/python_ndvi | refs/heads/master | /hdf_utils.py | from pyhdf.SD import SD, SDC
import numpy as np
# http://pysclint.sourceforge.net/pyhdf/example.html
# http://hdfeos.org/software/pyhdf.php
# Dr. M. Disney, Aug 2011
# C. Peng, Sep 2013
def r(ifile):
"hdf_read.r(ifile): function to read a HDF file"
# read hd file
md = SD(ifile, SDC.READ)
return(md)
def w(ofile, data, sds_base_name, nbands, xrange, yrange, units, rowdimname, coldimname):
"hdf_utils.w(ofile, 'sds_base_name', nbands, xdim, ydim, 'units', 'rowdimname', 'coldimname')"
"Function to create hdf file ofile and write data to it"
# Dr. M. Disney, Sep 2011
# create and write hdf file via SD
dataopf = SD(ofile, SDC.WRITE | SDC.CREATE | SDC.TRUNC)
for i in np.arange(0,nbands):
sds = dataopf.create(sds_base_name + str(i), SDC.FLOAT32, (xrange, yrange))
sds.name = '' + str(i)
sds.units = units
sds.setfillvalue(0)
dim1 = sds.dim(0)
dim1.setname(rowdimname)
dim2 = sds.dim(1)
dim2.setname(coldimname)
if nbands > 1:
sds[:] = np.float32(data[i])
else:
sds[:] = np.float32(data)
sds.endaccess()
dataopf.end()
| {"/do_ndvi.py": ["/ndvi.py", "/hdf_utils.py", "/plot_me.py"], "/plot_pixel.py": ["/hdf_utils.py"]} |
71,930 | PGCHM/python_ndvi | refs/heads/master | /hdf_read_example.py | #!/usr/bin/env python
# http://pysclint.sourceforge.net/pyhdf/example.html
# Dr. M. Disney, Aug 2011
# C. Peng, Sep 2013
import sys
import argparse
import numpy as np
from pyhdf.SD import SD, SDC
from matplotlib.pyplot import *
import matplotlib.pyplot as plt
def hdf_read_example():
"hdf_read_example(): function to read a HDF file, take an extract and plot it in a graph. Default is ifile and sdsName"
ifile = 'C:\\Users\\Janie\\Downloads\\MOD09GA.A2004154.h19v10.005.2008152141836.hdf'
sdsName = 'sur_refl_b01_1'
ofile = ''
xmin = 0
xmax = 2000
ymin = 0
ymax = 2000
# default plot value
plot = 1
if options.ifile:
ifile = options.ifile
if options.ofile:
ofile = options.ofile
if options.xmin:
xmin = options.xmin
if options.xmax:
xmax = options.xmax
if options.ymin:
ymin = options.ymin
if options.ymax:
ymax = options.ymax
if options.sdsName:
sdsName = options.sdsName
# read sds
md = SD(ifile, SDC.READ)
sds = md.select(sdsName)
if options.v:
# o/p file datasets
sys.stderr.write('ifile: %s\n'%(ifile))
md.datasets()
if options.plot:
# o/p file datasets
plot = 1
if plot:
ex = sds[xmin:xmax,ymin:ymax]
np.clip(ex,0.,10000,out=ex)
imshow(ex)
plt.colorbar(drawedges="True")
plt.title('%s'%(sdsName))
plt.ylabel("Row")
plt.xlabel("Col")
if ofile:
# will save as emf, eps, pdf, png, ps, raw, rgba, svg, svgz
plt.savefig(ofile)
else:
plt.show()
def main ():
hdf_read_example()
# parser - this but checks to see what arguments are passed to the script and then handles them accordingly
# The resulting values are then stored in an object called 'options', which we can then access in our program
# above as options.ifile, options.ofile, ptions.xmin etc.
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--ifile", help="read in data from FILE", metavar="FILE")
parser.add_argument("-o", "--ofile", help="output file: emf, eps, pdf, png, ps, raw, rgba, svg, svgz", metavar="FILE")
parser.add_argument("-s", "--sdsName", help="SDS name")
parser.add_argument("--xmin", type=int, help="xmin")
parser.add_argument("--xmax", type=int, help="xmax")
parser.add_argument("--ymin", type=int, help="ymin")
parser.add_argument("--ymax", type=int, help="ymax")
parser.add_argument("-p", "--plot", action="store_true", help="switch plotting on")
parser.add_argument("-v", action="store_true", help="switch verbose on")
options = parser.parse_args()
main()
| {"/do_ndvi.py": ["/ndvi.py", "/hdf_utils.py", "/plot_me.py"], "/plot_pixel.py": ["/hdf_utils.py"]} |
71,931 | PGCHM/python_ndvi | refs/heads/master | /plot_pixel.py | #!/usr/bin/env python
# Dr. M. Disney, Aug 2011
# C. Peng, Sep 2013
import sys
import numpy as np
from pyhdf.SD import SD, SDC
from matplotlib.pyplot import *
import matplotlib.pyplot as plt
import matplotlib.font_manager
from optparse import OptionParser
# local files
import hdf_utils
# def modis_swap(roh): return(np.array([roh[2], roh[3], roh[0], roh[1], roh[4], roh[5], roh[6]]))
def plot_pixel ():
"plot_pixel(): module to plot a pixel from two images (orig and fwd model)"
wbfile = 'wb.modis.dat'
wb = np.genfromtxt(wbfile)
nbands = len(wb)
scale = 10000
kfiles = ['ksoil.dat.modis', 'kdrygrass.dat.modis', 'kleaf.dat.modis', 'koffset.dat.modis']
nparams = len(kfiles)
kdat = np.zeros((nparams,nbands))
for i in np.arange(nparams):
kdat[i] = np.genfromtxt(kfiles[i], unpack=True)[1]
orig = 'C:\\Users\\Janie\\Downloads\\MOD09GA.A2004154.h19v10.005.2008152141836.hdf'
# fwd = 'MOD09GA.A2004154.h19v10.005.2008152141836.hdf.fwd'
opdat = 'op.plot.dat'
saveplot = 'op.plot.dat.png'
origbase = 'sur_refl_b0'
if options.wbfile:
wbfile = options.wbfile
wb = np.genfromtxt(wbfile)
# swap for plotting
wb = modis_swap(wb)
if options.orig:
orig = options.orig
if options.fwd:
fwd = options.fwd
if options.saveplot:
saveplot = options.saveplot
if options.opdat:
opdat = options.opdat
if options.origbase:
origbase = options.origbase
if options.fwdbase:
fwdbase = options.fwdbase
origip = hdf_utils.r(orig)
# fwdip = hdf_utils.r(fwd)
# pixel location defaults
x = 1000
y = 300
if options.x:
x = options.x
if options.y:
y = options.y
origdata = np.zeros(nbands)
# read in pixel values
for i in np.arange(0,nbands):
origsds = origip.select(origbase + str(i+1) + '_1')
origdata[i] = np.float32(origsds[x,y])/scale
origdata = modis_swap(origdata)
# do the inversion - np.linalg.lstsq() returns an array of 4 things, the most important of which are the first two:
# params[0] = an array of our 4 model parameters
# params[1] = sum of residuals i.e. the sum of the differences between original data the fwd model values
# so RMSE (root mean square error) = np.sqrt(params[1]/nbands)
params = np.linalg.lstsq(np.transpose(kdat), origdata)
# calculate our fwd model values and RMSE (multiply by 100 to get %)
fwddata = np.dot(params[0],kdat)
rmse = np.sqrt(params[1]/nbands) * 100.
# set up a plot figure, and a subplot ax that we can modify
fig = plt.figure()
ax = plt.subplot(111)
# plot the original data and fwd modelled values
ax.plot(wb, origdata, 'ro', label=r'$\rho_{orig}$')
ax.plot(wb, fwddata, 'k+', label=r'$\rho_{fwd}$')
# if we want them, plot our end member spectra too, to compare
if options.e:
for i in np.arange(nparams):
ax.plot(wb, kdat[i], label='%s'%(kfiles[i]))
#box = ax.get_position()
#ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# plotting niceties. Set the y axis limits, x and y axis labels, a title and then add some
# text to the plot showing the RMSE and the parameter values
ax.set_ylim(0, 0.9)
ax.set_xlabel('$\lambda$ (nm)')
ax.set_ylabel(r'Reflectance, $\rho$')
ax.set_title('Simple linear mixture model')
ax.text(1250, 0.1, 'RMSE (%%) = %f'%(rmse))
ax.text(1250, 0.05, 'params = %.2f, %.2f, %.2f, %.2f'%(params[0][0], params[0][1], params[0][2], params[0][3]))
# set the font size and then scale the legend accordingly, and put it at 0.5, 0.95 (in relative plot coordinates)
# i.e. 50% along (x axis) and 5% from the top (y axis)
prop = matplotlib.font_manager.FontProperties(size=10)
ax.legend(loc='upper center', bbox_to_anchor=(0.5, 0.95), ncol=3, fancybox=True, shadow=True, prop=prop)
# do we want to save to file or plot to the screen?
if options.saveplot:
plt.savefig(saveplot)
else:
plt.show()
# plt.close(1)
if options.opdat:
tmp = np.zeros((nbands, 3))
for i in np.arange(0,nbands):
tmp[i, 0] = wb[i]
tmp[i, 1] = origdata[i]
tmp[i, 2] = fwddata[i]
# print "%i %f %f"%(wb[i], origdata[i], fwddata[i])
np.savetxt(opdat, tmp)
def modis_swap(roh):
# swap elements of reflectance array assuming modis wb order i.e. swap:
# 660, 840, 485, 570, 1240, 1650, 2220
# to
# 485, 570, 660, 840, 1240, 1650, 2220
return(np.array([roh[2], roh[3], roh[0], roh[1], roh[4], roh[5], roh[6]]))
def main ():
plot_pixel()
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-o", "--original", dest="orig", help="original MODIS reflectance file", metavar="FILE")
parser.add_option("-f", "--forward", dest="fwd", help="fwd model reflectance file", metavar="FILE")
parser.add_option("-w", "--wavebands", dest="wbfile", help="File containing MODIS wavebands", metavar="FILE")
parser.add_option("-p", "--plot", dest="saveplot", help="output plotfile: emf, eps, pdf, png, ps, raw, rgba, svg, svgz", metavar="FILE")
parser.add_option("-d", "--data", dest="opdat", help="output orig & fwd model data", metavar="FILE")
parser.add_option("--fwdbase", dest="fwdbase", help="fwd model SDS name")
parser.add_option("--origbase", dest="origbase", help="orig refl SDS name")
parser.add_option("--xmin", type="int", dest="xmin", help="xmin")
parser.add_option("--xmax", type="int", dest="xmax", help="xmax")
parser.add_option("--ymin", type="int", dest="ymin", help="ymin")
parser.add_option("--ymax", type="int", dest="ymax", help="ymax")
parser.add_option("-x", type="int", dest="x", help="x location")
parser.add_option("-y", type="int", dest="y", help="y location")
parser.add_option("-e", action="store_true", help="plot end members")
(options, args) = parser.parse_args()
main()
| {"/do_ndvi.py": ["/ndvi.py", "/hdf_utils.py", "/plot_me.py"], "/plot_pixel.py": ["/hdf_utils.py"]} |
71,932 | PGCHM/python_ndvi | refs/heads/master | /do_linmod.py | #!/usr/bin/env python
# Dr. M. Disney, Aug 2011
# C. Peng, Sep 2013
import sys
import argparse
import numpy as np
from pyhdf.SD import SD, SDC
from matplotlib.pyplot import *
import matplotlib.pyplot as plt
# local files
import hdf_utils, plot_me, linmod_me
def do_linmod():
"do_linmod(): function to read MODIS refl HDF file and invert spectral end-members against refl data, per pixel"
ifile = 'MOD09GA.A2004154.h19v10.005.2008152141836.hdf'
ofile = ifile + str('.fwd')
rmsefig = ifile + str('.rmse.png')
paramfig = ifile + str('.params')
# reflectance value scalar for modis data
scale = 10000
# Default multi-linear model in this example is:
# roh(lambda) = p1 * soil(lambda) + p2 * dry_grass(lambda) + p3 * green_leaf(lambda) + p4 * offset(lambda)
# where offset is a scaling term (effectively a sort of overall brightness). We can add (or remove) end-members
# as we see fit (see below).
# spectral end-member file defaults list - file format is 2 columns: lambda, roh(lambda)
# kfiles = ['ksoil.dat.modis', 'kleaf.dat.modis']
kfiles = ['ksoil.dat.modis', 'kdrygrass.dat.modis', 'kleaf.dat.modis', 'koffset.dat.modis']
# default no. of spectral endmembers (params)
nparams = len(kfiles)
# default no. of bands - modis bands
nbands = 7
# default sds base name
sdsName = 'sur_refl_b0'
# default extents of image
xmin = 0
xmax = 250
ymin = 0
ymax = 250
xlabel = 'Col'
ylabel = 'Row'
# specify array from nbands + nparams
kdat = np.zeros((nparams,nbands))
# get wb from first end-member file
wb = np.genfromtxt(kfiles[0], unpack=True)[0]
# read in end-members from kfiles
for i in np.arange(nparams):
kdat[i] = np.genfromtxt(kfiles[i], unpack=True)[1]
# We can now plot the end-member files interactively if we want to, as we have a wb array (wb) and
# each of the endmember files - you don't need the labels and legend, but these can be useful.....
#for i in np.arange(nparams):
# plt.plot(wb, kdat[i], label='%s'%(kfiles[i]))
#plt.xlabel('Wavelength ($\lambda$)')
#plt.ylabel(r'Reflectance, $\mathrm{\rho}$')
#plt.legend()
#plt.show()
#plt.savefig('endmembers.png')
# default plot value
plot = 1
if options.ifile:
ifile = options.ifile
if options.ofile:
ofile = options.ofile
if options.rmsefile:
rmsefile = options.rmsefile
if options.rmsefig:
rmsefig = options.rmsefig
if options.paramfig:
paramfig = options.paramfig
if options.sdsName:
sdsName = options.sdsName
# read HDF file
hdfip = hdf_utils.r(ifile)
if options.v:
# o/p file datasets
sys.stderr.write('ifile: %s\n'%(ifile))
hdfip.datasets()
# xdim sds_red.dimensions('XDim').values()[0][0]
# ydim sds_red.dimensions('YDim').values()[0][0]
# do we have extract i.e. xmin, xmax, ymin, ymax?
if options.xmin:
xmin = options.xmin
if options.ymin:
ymin = options.ymin
if options.xmax:
xmax = options.xmax
if options.ymax:
ymax = options.ymax
if options.xlabel:
xlabel = options.xlabel
if options.ylabel:
ylabel = options.ylabel
xrange = xmax - xmin
yrange = ymax - ymin
# specify input and output data arrays
data = np.zeros((nbands,xrange,yrange))
dataop = np.zeros((nbands,xrange,yrange))
rmseop = np.zeros((xrange,yrange))
paramsop = np.zeros((nparams,xrange,yrange))
for i in np.arange(1,nbands+1):
# sort out name of SDS
sds = hdfip.select(sdsName + str(i) + '_1')
# read required extract of SDS into data array AND divide by scalar
data[i-1] = np.float32(sds[xmin:xmax,ymin:ymax])/scale
# now do inversion for each pixel to calculate parameters
for x in np.arange(xrange):
for y in np.arange(yrange):
# pass obs, kerns to linmod BUT remember to change order of refl data to wb order
rho = modis_swap(data[:,x,y])
# this is the line that does the inversion
pp = np.linalg.lstsq(np.transpose(kdat), rho)
# returns: array containing:
# pp[0] = params
# pp[1] = sum of residuals, so rmse = np.sqrt(pp[1]/nbands)
# pp[2] = rank of matrix (nparams)
# Singular values of param vector (p[0]) from inversion procedure. A cut-off for these values rcond, can be specified
# np.linalg.lstsq(np.transpose(kdat), rho, rcond=0.01) so that values < rcond will be set to 0.
# so get params, rmse and fwd model
params = pp[0]
rmse = np.sqrt(pp[1]/nbands)
fwd = np.dot(params, kdat)
dataop[:,x,y] = fwd
rmseop[x,y] = rmse * 100.
paramsop[:,x,y] = params
# keep us informed of progress per row
if options.v:
sys.stderr.write('\b\b\b\b\b\b(%d%%)'%(int(100.*x/xrange)))
# open/create o/p files and write data to hdf datasets
hdf_utils.w(ofile, dataop, 'fwd_model', nbands, xrange, yrange, 'reflectance', 'row', 'col')
hdf_utils.w(rmsefig, rmseop, 'RMSE', 1, xrange, yrange, '%', 'row', 'col')
hdf_utils.w(paramfig, paramsop, 'params', nparams, xrange, yrange, 'val.', 'row', 'col')
# now o/p the data to plot (png files if required) - scale each for plotting i.e. mean +/- 1 sd for rmse set lo to 0 if below.
hi, lo = hilo(rmseop, 1.)
lo = 0 if lo < 0 else lo
plot_me.plot(rmseop, rmsefig, xlabel, ylabel, 'RMSE %', rmsefig, lo, hi)
hi, lo = hilo(dataop, 2.)
lo = 0 if lo < 0 else lo
for i in np.arange(nbands):
plot_me.plot(dataop[i], ofile + 'b: ' + str(i), xlabel, ylabel, 'refl.', ofile + '.b' + str(i) + '.png', lo, hi)
for i in np.arange(nparams):
hi, lo = hilo(paramsop[i], 2)
plot_me.plot(paramsop[i], 'param: ' + str(i), xlabel, ylabel, 'param val.', paramfig + '.' + str(i) + '.png', lo, hi)
def modis_swap(rho):
'''
modis_swap(rho)L: swap elements of reflectance array assuming modis wb order i.e. swap:
660, 840, 485, 570, 1240, 1650, 2220
to
485, 570, 660, 840, 1240, 1650, 2220
'''
s = np.array([rho[2], rho[3], rho[0], rho[1], rho[4], rho[5], rho[6]])
return(s)
def hilo(array, n):
'''
hilo(array, n): return array_mean + n*stdev, array_mean - n*stdev
'''
lo = array.mean() - n*np.std(array)
hi = array.mean() + n*np.std(array)
return(hi,lo)
def main ():
do_linmod()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--ifile", help="read in data from FILE", metavar="FILE")
parser.add_argument("-o", "--ofile", help="output fwd model file", metavar="FILE")
parser.add_argument("-r", "--rmsefile", help="rmse file", metavar="FILE")
parser.add_argument("-p", "--paramfig", help="param fig base name: emf, eps, pdf, png, ps, raw, rgba, svg, svgz", metavar="FILE")
parser.add_argument("-m", "--rmsefig", help="rmse fig: emf, eps, pdf, png, ps, raw, rgba, svg, svgz", metavar="FILE")
parser.add_argument("-s", "--sdsName", help="SDS name")
parser.add_argument("--xlabel", help="xlabel name")
parser.add_argument("--ylabel", help="ylabel name")
parser.add_argument("--xmin", type=int, help="xmin")
parser.add_argument("--xmax", type=int, help="xmax")
parser.add_argument("--ymin", type=int, help="ymin")
parser.add_argument("--ymax", type=int, help="ymax")
parser.add_argument("-v", action="store_true", help="switch verbose on")
options = parser.parse_args()
main()
| {"/do_ndvi.py": ["/ndvi.py", "/hdf_utils.py", "/plot_me.py"], "/plot_pixel.py": ["/hdf_utils.py"]} |
71,935 | usrlocalben/justified | refs/heads/master | /justified/justified.py | """
Knuth-Plass Text Justification (Dynamic Programming)
Benjamin Yates, 2016
Inspired by my favorite of the MIT OpenCourseWare lectures
for 6.006 Introduction to Algorithms:
https://www.youtube.com/watch?v=ENyox7kNKeY
The last line of the paragraph remains unjustified. To
do this, the last line does not contribute to the total
badness, and the output pass does not expand it. I'm not
sure how it is implemented in LaTeX, but this method seems
to work well.
Because this is a fixed-width justification routine, it may
distribute uneven amounts of padding as it expands lines.
A PRNG seeded with the line-text itself is used to give
stable, uniform distribution of the padding spaces.
This docstring was formatted by this code.
"""
from random import Random
DEMO_WIDTH = 60
MOBY = """
No, when I go to sea, I go as a simple sailor, right before the mast,
plumb down into the forecastle, aloft there to the royal mast-head. True,
they rather order me about some, and make me jump from spar to spar, like
a grasshopper in a May meadow. And at first, this sort of thing is
unpleasant enough. It touches one's sense of honour, particularly if you
come of an old established family in the land, the Van Rensselaers, or
Randolphs, or Hardicanutes. And more than all, if just previous to putting
your hand into the tar-pot, you have been lording it as a country
schoolmaster, making the tallest boys stand in awe of you. The transition
is a keen one, I assure you, from a schoolmaster to a sailor, and requires
a strong decoction of Seneca and the Stoics to enable you to grin and bear
it. But even this wears off in time.
""".strip()
def main():
print KnuthPlassFormatter(DEMO_WIDTH).format(MOBY)
class KnuthPlassFormatter(object):
def __init__(self, width):
self.width = width
def format(self, text):
"""
Format a paragraph string as fully justified text
Args:
text: one parapgraph of text to format
Returns:
formatted text string
"""
self._memo = {}
self._parent = {}
self.words = text.split()
self.best_break(0, len(self.words))
return '\n'.join(self.wrapped_lines())
def packed(self, words):
"""Fit set of words as tightly as possible."""
return ' '.join(words)
def expanded(self, words, width):
"""Fit set of words in <width> chars, padding as needed"""
if len(words) == 1:
return words[0]
unspaced_words = ''.join(words)
length = len(unspaced_words)
space_left = width - length
gaps = [0 for _ in range(len(words) - 1)]
while space_left:
for idx, gap in enumerate(gaps):
if not space_left:
break
gaps[idx] += 1
space_left -= 1
# stable, random distribution of spaces
Random(unspaced_words).shuffle(gaps)
gaps.append(0) # one empty gap for zip()
spaces = (gap * ' ' for gap in gaps)
return ''.join(word + space for word, space in zip(words, spaces))
def badness(self, i, j):
"""LaTeX 'badness' function"""
# fun: try adding a non-negative value to length
length = len(self.packed(self.words[i:j])) # + 20
if length > self.width:
return float('inf')
else:
return (self.width - length) ** 3.0
def best_break(self, i, j):
"""
dynamic program for finding the best locations to place
line-breaks in a paragraph when producing fully justified
text
Args:
i: start word index, inclusive
j: last word index, inclusive
Returns:
best (minimum) badness score found
Side-effect:
_memo & _parent are updated with scores and links
for finding the final path through the graph after
all line-breaks are found.
"""
try:
return self._memo[(i, j)]
except KeyError:
pass
if j == len(self.words):
length = len(self.packed(self.words[i:j]))
if length <= self.width:
# base-case: this is the last line.
# it doesn't contribute badness
self._memo[(i, j)] = 0
self._parent[i] = j
return 0
# evaluate every possible break position
vals = []
for n in reversed(range(i, j)):
total_badness = self.badness(i, n + 1) + self.best_break(n + 1, j)
vals.append((total_badness, n + 1))
# choose the break with the minimum total badness
best_val, best_idx = min(vals, key=lambda pair: pair[0])
self._memo[(i, j)] = best_val
self._parent[i] = best_idx
return best_val
def wrapped_lines(self):
"""
render a paragraph of justified text using the graph
constructed by best_break()
Returns:
paragraph (string) of justified text
"""
a = 0
b = self._parent[0]
while True:
words = self.words[a:b]
if b == len(self.words):
# this is the last line, so
# we don't justify the text
yield self.packed(words)
return
yield self.expanded(words, self.width)
a = b
b = self._parent[a]
class GreedyFormatter(KnuthPlassFormatter):
def format(self, text):
"""
Format a paragraph string as fully justified text
using a greedy method
Args:
text: one parapgraph of text to format
Returns:
formatted text string
"""
self._memo = {}
self._parent = {}
self.words = text.split()
self.lines = []
cur_line = []
for word in self.words:
tmp = cur_line + [word]
if len(self.packed(tmp)) <= self.width:
cur_line += [word]
else:
self.lines.append(cur_line)
cur_line = [word]
if cur_line:
self.lines.append(cur_line)
return '\n'.join(self.wrapped_lines())
def wrapped_lines(self):
last = len(self.lines) - 1
for idx, words in enumerate(self.lines):
if idx == last:
yield self.packed(words)
return
yield self.expanded(words, self.width)
TextFormatter = KnuthPlassFormatter
if __name__ == '__main__':
main()
# vim: tabstop=4 shiftwidth=4 softtabstop=4 expandtab
| {"/justified/__init__.py": ["/justified/justified.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.