index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
51,464 | nirvguy/pyS | refs/heads/master | /pyS/S/math.py | import math
import operator
from functools import reduce
from itertools import count
def product(nums):
"""Returns the product of all values
Attributes:
nums (iterable) Iterator of intergers
"""
return reduce(operator.mul, nums, 1)
def is_prime(num):
"""Returns True if `num` is prime
"""
if num < 2: return False
for x in range(2, num):
if num % x == 0:
return False
return True
def primes():
"""Returns an iterator of all primes number
"""
return (x for x in count() if is_prime(x))
def factorize(num):
"""Factorization in primes of a number
Attributes:
num (int) Number to factorize
Returns:
An interator that returns every powers of the factorization
Examples:
>>> list(factorize(126)) == [1, 2, 0, 1] # 2**1 * 3**2 * 5**0 * 7**1 = 126
"""
for p in primes():
if p > num: return
c = 0
while num % p == 0:
num //= p
c += 1
yield c
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,465 | nirvguy/pyS | refs/heads/master | /pyS/parser/lexer_rules.py | from .tokens import *
class LexerException(Exception):
def __init__(self, lineno, colno, msg):
self._lineno = lineno
self._colno = colno
self._msg = msg
def __str__(self):
return "Lexer error at line {}, column {}: {}".format(self._lineno,
self._colno,
self._msg)
def t_error(tok):
msg = "Unknown token " + str(tok.value)
raise LexerException(tok.lineno, tok.lexpos, msg)
def t_IGNORE(tok):
r"\n+"
tok.lexer.lineno += len(tok.value)
def t_LABEL(tok):
r"E_[1-9][0-9]*"
tok.value = int(tok.value[2:])
return tok
def t_VAR(tok):
r"V_[0-9]+"
tok.value = int(tok.value[2:])
return tok
t_ignore_WHITESPACES = r"[ \t]"
t_ASSIGN = r"<-"
t_ADD = r"\+"
t_SUB = r"\-"
t_NOT = r"=/="
t_COLON = r":"
t_ZERO = r"0"
t_ONE = r"1"
t_IF = r"IF"
t_GOTO = r"GOTO"
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,466 | nirvguy/pyS | refs/heads/master | /setup.py | #!/usr/bin/env python3
from setuptools import setup
import pyS
from pyS import config
setup(name = 'pyS',
version = config.VERSION,
description = 'A parser and interpreter for the Turing Complete S language (Davis/Sigal/Weyuker)',
author = 'Juan Cruz Sosa',
author_email = 'nirvguy@gmail.com',
packages = ['pyS'],
install_requires = ['ply'],
entry_points={
'console_scripts': ['Scompiler = pyS.Scompiler:main',
'Sdump = pyS.Sdump:main'],
}
)
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,467 | nirvguy/pyS | refs/heads/master | /pyS/Sdbg.py | #!/usr/bin/env python3
import sys
import io
import getopt
import re
from .S.language import SProgram
from .S.interp import StopProgram, SInterp
from . import config
def usage():
print("./Sdump [-h | -v] FILE")
print()
print(" -h, --help Displays this message")
print(" -v, --version Displays the version of pyS")
print()
class Sdb:
def __init__(self, filename, s_program):
self._filename = filename
self._interp = SInterp(s_program)
self._breakpoints = set()
self._display_vars = set()
self._list_pos = 0
self._is_running = False
def step(self):
try:
self._interp.step()
self._list_pos = self._interp.pos()
self.display_current_line()
self.display_vars()
except StopProgram:
self._is_running = False
print("The program is no longer running")
def _run_until_breakpoint(self):
while True:
try:
self._interp.step()
if self._interp.pos() in self._breakpoints:
self._list_pos = self._interp.pos()
self.display_current_line()
break
except StopProgram:
self._is_running = False
print("Program finished")
break
self._list_pos = self._interp.pos()
self.display_vars()
def _load_program(self, args):
self._interp.rewind(args)
self._list_pos = 0
self._is_running = True
self._run_until_breakpoint()
def add_breakpoint(self, lineno):
self._breakpoints.add(lineno)
def display_current_line(self):
self.list_lines(self._list_pos, 1)
def display_vars(self, local_var_nums = None, sep='\n'):
if len(self._display_vars) == 0:
return
if local_var_nums is None:
dvars = self._display_vars
else:
dvars = local_var_nums
print(sep.join(("V_{} : {}".format(nvar, \
self._interp.var_value(nvar)) \
for nvar in dvars)))
def list_lines(self, line, n = config.LIST_LINES):
try:
for i, rt_inst in self._interp.list(line, n):
print("{}{}. {}".format(" " if i != self._interp.pos() else "",
i, rt_inst))
except StopProgram:
print("Line number {} out of range; "
"{} has {} lines".format(line, \
self._filename, \
self._interp.lines()))
def run(self):
history = []
while True:
orig_command = input('(sdb) ').strip()
if len(orig_command) == 0:
orig_command = history[-1]
else:
history.append(orig_command)
commands = orig_command.split(' ', 1)
command = commands[0].strip()
args = []
if len(commands) > 1:
args = commands[1].split(",")
args = [arg.strip() for arg in args]
if command in ('r', 'run'):
self.cmd_run(args)
elif command in ('c', 'continue'):
self.cmd_continue(args)
elif command in ('b', 'break'):
self.cmd_break(args)
elif command in ('s', 'steap'):
self.cmd_step(args)
elif command in ('l', 'list'):
self.cmd_list(args)
elif command in ('d', 'display'):
self.cmd_display(args)
elif command in ('q', 'quit'):
self.cmd_quit(args)
else:
print("Error: command {} unknown".format(command))
continue
def cmd_run(self, args):
if len(args) > 1:
print("Invalid syntax error")
if self._is_running:
while True:
print("The program is already started.")
resp = input("Start it from the beginning? (y or n) ")
if resp in ('n', 'no'):
print("Program not restarted.")
return
elif resp in ('y', 'yes'):
break
s_args = [] if len(args) == 0 else args[0].split(" ")
in_params = {}
for i, s_arg in enumerate(s_args):
in_params[i+1] = int(s_arg)
print("Starting program: {}".format(self._filename))
self._load_program(in_params)
def cmd_break(self, args):
if len(args) != 1:
print("Error: invalid breakpoint")
return
self.add_breakpoint(int(args[0]))
def cmd_step(self, args):
if len(args) != 0:
print("Error: invalid step call")
return
self.step()
def cmd_continue(self, args):
if len(args) != 0:
print("Error: invalid continue call")
return
self._run_until_breakpoint()
def cmd_list(self, args):
line = self._list_pos
if len(args) > 1:
print("Error: invalid list")
return
if len(args) == 2:
line = int(args[0])
self.list_lines(line, config.LIST_LINES)
self._list_pos += min(config.LIST_LINES/2, self._interp.lines())
def cmd_display(self, args):
if len(args) == 0:
self.display_vars()
else:
local_disp_vars = set()
disp_vars = args
exp_digits = re.compile("^V_\d+$")
for var in disp_vars:
var = var.strip()
if not exp_digits.match(var):
print("{} is not a valid name."
"Valid variables are: V_[0-9]+".format(var))
return
local_disp_vars.add(int(var[2:]))
self._display_vars.add(int(var[2:]))
self.display_vars(local_disp_vars, ', ')
def cmd_quit(self, args):
sys.exit(0)
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hv", ["help", "version"])
except getopt.GetoptError as err:
print(str(err))
usage()
sys.exit(1)
outfile = sys.stdout
infile = sys.stdin
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit(0)
elif o in ("-v", "--version"):
printf("pyS {}".format(config.VERSION))
sys.exit(0)
if len(args) > 1:
sys.stderr.write("Too many sources passed")
filename = 'stdin'
if len(args) == 1:
try:
filename = args[0]
infile = open(filename, 'r')
except IOError as e:
sys.stderr.write("Could not open file for read {} (error {}): {}".format(args[0], e.errno, e.strerror))
sys.exit(e.errno)
s_program = SProgram(int(infile.read()))
sdb = Sdb(filename, s_program)
sdb.run()
infile.close()
outfile.close()
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,468 | nirvguy/pyS | refs/heads/master | /pyS/Sdump.py | #!/usr/bin/env python3
import sys
import io
import getopt
import S.language
def usage():
print("./Sdump [-h] [-o OUTPUT] FILE")
print()
print(" -h, --help Displays this message")
print(" -o OUTPUT Output file to S")
print(" -v, --version Displays the version of pyS")
print()
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hvpo:", ["help", "version"])
except getopt.GetoptError as err:
print(str(err))
usage()
sys.exit(1)
outfile = sys.stdout
infile = sys.stdin
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit(0)
elif o in ("-v", "--version"):
printf("pyS {}".format(config.VERSION))
sys.exit(0)
elif o == "-o":
try:
outfile = open(a, 'w')
except IOError as e:
sys.stderr.write("Could not open file for write {} (error {}): {}\n".format(a, e.errno, e.strerror))
sys.exit(e.errno)
else:
sys.stderr.write("Unknown option {0}\n".format(o))
if len(args) > 1:
sys.stderr.write("Too many sources passed\n")
if len(args) == 1:
try:
infile = open(args[0], 'r')
except IOError as e:
sys.stderr.write("Could not open file for read {} (error {}): {}\n".format(args[0], e.errno, e.strerror))
sys.exit(e.errno)
s_program = S.language.SProgram(int(infile.read()))
outfile.write(str(s_program)+'\n')
infile.close()
outfile.close()
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,469 | nirvguy/pyS | refs/heads/master | /testing/test_math.py | #!/usr/bin/env python3
import sys
import unittest
from itertools import islice
sys.path.append("..")
from pyS.S import math
import primes_list
factorization_result = { 0 : [],
1 : [],
2 : [1],
3 : [0, 1],
4 : [2],
5 : [0, 0, 1],
6 : [1, 1],
7 : [0, 0, 0, 1],
8 : [3],
9 : [0, 2] }
class TestPrimes(unittest.TestCase):
def test_is_prime(self):
self.assertFalse(math.is_prime(0))
self.assertFalse(math.is_prime(1))
for i in range(0, len(primes_list.PRIMES)):
p = primes_list.PRIMES[i]
self.assertTrue(math.is_prime(primes_list.PRIMES[i]))
if i < len(primes_list.PRIMES) - 1:
pn = primes_list.PRIMES[i+1]
for i in range(p+1,pn):
self.assertFalse(math.is_prime(i))
self.assertTrue(math.is_prime(pn))
def test_iter_primos(self):
self.assertEqual(list(islice(math.primes(), 0, len(primes_list.PRIMES))),
primes_list.PRIMES)
def test_factorizar(self):
for n, l in factorization_result.items():
self.assertEqual(list(math.factorize(n)), l)
if __name__ == '__main__':
unittest.main()
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,470 | nirvguy/pyS | refs/heads/master | /pyS/Srun.py | #!/usr/bin/env python3
import sys
import io
import getopt
from .S.language import SProgram
from .S.interp import StopProgram, SInterp
def usage():
print("./Sdump [-h | -v] FILE")
print()
print(" -h, --help Displays this message")
print(" -v, --version Displays the version of pyS")
print()
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hv", ["help", "version"])
except getopt.GetoptError as err:
print(str(err))
usage()
sys.exit(1)
outfile = sys.stdout
infile = sys.stdin
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit(0)
elif o in ("-v", "--version"):
printf("pyS {}".format(config.VERSION))
sys.exit(0)
if len(args) > 1:
sys.stderr.write("Too many sources passed\n")
if len(args) == 1:
try:
infile = open(args[0], 'r')
except IOError as e:
sys.stderr.write("Could not open file for read {} (error {}): {}\n".format(args[0], e.errno, e.strerror))
sys.exit(e.errno)
s_program = SProgram(int(infile.read()))
interp = SInterp(s_program)
while True:
try:
# print("{} : {} : {}".format(interp.pos(), interp.var_value(1), interp.var_value(2)))
interp.step()
except StopProgram:
break
print(interp.var_value(1))
infile.close()
outfile.close()
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,471 | nirvguy/pyS | refs/heads/master | /pyS/S/datatypes.py | from .math import product, primes, factorize
class SInt:
"""S Integer datatype
"""
def __init__(self, z):
"""
Constructor
"""
if isinstance(z, SInt):
self.z = z.z
elif isinstance(z, int):
self.z = z
else:
raise Exception("wrong type")
def decode(self):
return self.z
@staticmethod
def encode(x):
return SInt(self.z)
def __str__(self):
return str(self.decode())
class STuple(SInt):
"""S tuple datatype
"""
def __init__(self, z):
"""Constructor
"""
SInt.__init__(self, z)
def decode(self):
"""Decodes a S tuple into a Python tuple
"""
x = self.l()
y = ((self.z + 1) // 2 ** x - 1) // 2
return (x, y)
def l(self):
"""Gets the lefthand number of the S tuple
"""
return max(range(0, self.z + 1),
key=lambda t: t if (self.z + 1) % 2**t == 0 else 0)
def r(self):
"""Gets the righthand number of the S tuple
"""
return max(range(0, self.z + 1),
key=lambda t: t if (self.z + 1) % (2*t + 1) == 0 else 0)
@staticmethod
def encode(v):
"""Encodes tuple (x,y) into a S tuple
"""
return STuple(2**v[0] * (2 * v[1] + 1) - 1)
class SList(SInt):
"""S list data type
"""
def __init__(self, z):
"""Constructor
"""
SInt.__init__(self, z)
def decode(self):
"""Decodes a S list into a values iterator
"""
return factorize(self.z)
@staticmethod
def encode(zs):
"""Encodes a list of numbers into a S list
"""
return SList(product((x[0] ** x[1] for x in zip(primes(), zs))))
def __str__(self):
return str(list(map(str, list(self.decode()))))
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,472 | nirvguy/pyS | refs/heads/master | /testing/test_datatypes.py | #!/usr/bin/env python3
import sys
import unittest
import itertools
sys.path.append('..')
from pyS.S.datatypes import STuple, SList
class TestSTuple(unittest.TestCase):
RANGE_X = 20
RANGE_Y = 20
RANGE = RANGE_X * RANGE_Y
def test_decode_encode(self):
for i in range(0, TestSTuple.RANGE):
self.assertEqual(STuple.encode(STuple(i).decode()).z, i)
def test_encode_decode(self):
for x in range(0, TestSTuple.RANGE_X):
for y in range(0, TestSTuple.RANGE_Y):
self.assertEqual(STuple.encode((x,y)).decode(),(x,y))
def all_sequences(k, elems):
return (l for i in range(0, k+1) for l in product(elems, repeat=i))
class TestSList(unittest.TestCase):
RANGE = 1000
RANGE_N = 5
RANGE_K = 8
def test_decode_encode(self):
for i in range(1, TestSList.RANGE):
self.assertEqual(SList.encode(SList(i).decode()).z, i)
def test_encode_decode(self):
for l in all_sequences(TestSList.RANGE_K, range(1, TestSList.RANGE_N)):
self.assertEqual(list(SList.encode(l).decode()),l)
if __name__ == '__main__':
unittest.main()
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,473 | nirvguy/pyS | refs/heads/master | /pyS/parser/parser_rules.py | from .tokens import *
from ..S import RT
class ParserBaseException(Exception):
def __str__(self):
return "Parsing error"
class ParserException(ParserBaseException):
def __init__(self, se, couse, msg):
self._se = se
self._couse = couse
self._msg = msg
def __str__(self):
return "Parsing error at line {}: {}: {}".format(self._se.lineno(1),
self._couse,
self._msg)
class NotSameVarException(ParserException):
def __init__(self, se, tok, var1, var2):
ParserException.__init__(self,
se,
tok,
'V_{0}, V_{1} : Different variables'.format(var1, var2))
def p_error(se):
if se is None:
raise ParserBaseException
elif type(se.value) is tuple:
raise ParserException(se, "rule " + se.type, "\n\tsubexpression value: " + se.value[0])
else:
raise ParserException(se, "rule " + se.type, "\n\tsubexpression value: " + se.value)
def p_instrlist(se):
"""
instrlist :
| linstr instrlist
"""
if len(se) == 3: # linstr instrlist
se[0] = [se[1]] + se[2]
else:
se[0] = []
def p_linstr(se):
"""
linstr : instr
| LABEL COLON instr
"""
if len(se) == 2: # instr
se[0] = se[1]
else: # LABEL COLON instr
temp = se[3]
temp.label = se[1]
se[0] = temp
def p_instr_incr(se):
"""
instr : VAR ASSIGN VAR
"""
if not se[1] == se[3]:
raise NotSameVarException(se, se[2], se[1], se[3])
se[0] = RT.Instruction(label=None,
nvar=se[1],
instr_t=RT.InstructionType.Assign,
glabel = None)
def p_instr_inc(se):
"""
instr : VAR ASSIGN VAR ADD ONE
"""
if not se[1] == se[3]:
raise NotSameVarException(se, se[4], se[1], se[3])
se[0] = RT.Instruction(label=None,
nvar=se[1],
instr_t=RT.InstructionType.Increment,
glabel = None)
def p_instr_dec(se):
"""
instr : VAR ASSIGN VAR SUB ONE
"""
if not se[1] == se[3]:
raise NotSameVarException(se, se[4], se[1], se[3])
se[0] = RT.Instruction(label=None,
nvar=se[1],
instr_t=RT.InstructionType.Decrement,
glabel = None)
def p_instr_goto(se):
"""
instr : IF VAR NOT ZERO GOTO LABEL
"""
se[0] = RT.Instruction(label=None,
nvar=se[2],
instr_t=RT.InstructionType.Goto,
glabel=se[6])
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,474 | nirvguy/pyS | refs/heads/master | /pyS/parser/tokens.py | tokens = [
'IF',
'GOTO',
'VAR',
'LABEL',
'COLON',
'ASSIGN',
'NOT',
'ADD',
'SUB',
'ZERO',
'ONE'
]
| {"/pyS/S/language.py": ["/pyS/S/datatypes.py"], "/pyS/S/interp.py": ["/pyS/S/language.py", "/pyS/S/RT.py"], "/pyS/parser/parser.py": ["/pyS/S/language.py"], "/pyS/parser/lexer_rules.py": ["/pyS/parser/tokens.py"], "/pyS/Sdbg.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/Srun.py": ["/pyS/S/language.py", "/pyS/S/interp.py"], "/pyS/S/datatypes.py": ["/pyS/S/math.py"], "/testing/test_datatypes.py": ["/pyS/S/datatypes.py"], "/pyS/parser/parser_rules.py": ["/pyS/parser/tokens.py"]} |
51,487 | stevethehat/flow | refs/heads/master | /flow/server.py | import imp
import sys
import app.config as installation_config
from wsgiref.simple_server import make_server
from pyramid.config import Configurator
from pyramid.response import Response
from server_base import ServerBase
class App(ServerBase):
pass
def app_start():
print "starting app..."
app = App(installation_config)
app.start()
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,488 | stevethehat/flow | refs/heads/master | /flow/web/page.py | class Page:
def __init__():
pass | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,489 | stevethehat/flow | refs/heads/master | /modules/test_module.py | def test_dynamic_loaded_handler(request):
return(Response("hello this is a dynamic loaded page"))
def setup_web_routes(config):
print "setup test module routes..."
config.add_route("dynamicloadtest1", "/dynamicload{url}")
config.add_view(test_dynamic_loaded_handler, route_name="dynamicloadtest1")
def init_web(config):
print "init test module...."
config.include(setup_web_routes) | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,490 | stevethehat/flow | refs/heads/master | /flow/web/server.py | import imp
import flow.web.config as installation_config
from wsgiref.simple_server import make_server
from pyramid.config import Configurator
from pyramid.response import Response
from flow.util.server_base import ServerBase
def page_server(request):
response = "<h1>Test</h1><p>[%s]</p><p>[%s]</p>[%s]" % ("/".join(request.subpath), request.user_agent, str(request.registry))
return Response(response)
class WebApp(ServerBase):
pass
def web_app_start():
print "starting web app..."
web_app = WebApp(installation_config)
web_app.start()
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,491 | stevethehat/flow | refs/heads/master | /flow/app/nodetypes.py | import os, ast, imp, sys, pprint, inspect
from decorators import BaseDecorator
from flowbase import FlowBase
"""
nodetypes
"""
sys.path.append("/Users/stevethehat/Development/flow")
class Nodetypes(FlowBase):
def __init__(self, flow):
super(Nodetypes, self).__init__(flow)
self.root_path = os.path.join(self.env.config["approot"], "modules")
self.node_definitions_filename = os.path.join(self.root_path, "nodedefinitions")
self.node_classes_filename = os.path.join(self.root_path, "nodeclasses")
self.node_actions_filename = os.path.join(self.root_path, "nodeactions")
self.definitions = {}
self.classes = []
self.actions = []
if not(os.path.exists(self.node_definitions_filename)) or not(os.path.exists(self.node_classes_filename)) or not(os.path.exists(self.node_actions_filename)):
self.rebuild()
self.load()
def load(self):
self.definitions = self.load_object(self.node_definitions_filename, {})
self.classes = self.load_object(self.node_classes_filename, [])
self.actions = self.load_object(self.node_actions_filename, [])
def nodedefinition_processor(self, file_name):
print "nodedefinition_processor %s" % file_name
try:
definition_file = open(file_name, "r")
definition = ast.literal_eval(definition_file.read())
definition_file.close()
def ensure_definition_element(key, default):
if not(definition.has_key(key)):
definition[key] = default
ensure_definition_element("description", "No description specified")
ensure_definition_element("icon", "default")
ensure_definition_element("editor", [])
ensure_definition_element("parentnodes", [])
ensure_definition_element("childnodes", [])
ensure_definition_element("actions", [])
self.definitions[definition["name"]] = definition
except Exception, e:
print "Exception processing %s\n%s" % (file_name, e.message)
def nodemodule_processor(self, file_name):
print "nodeclass_processor %s" % file_name
module_file = open(file_name, "r")
code = module_file.read()
module_file.close()
module_processor = ModuleProcessor()
module_processor.visit(ast.parse(code))
self.classes = self.classes + module_processor.node_classes
self.actions = self.actions + module_processor.node_actions
def process_definitions(self):
def process_parent_nodes(definition):
parent_nodes = definition["parentnodes"]
for parent_node in parent_nodes:
self.definitions[parent_node]["childnodes"].append(definition["name"])
for name in self.definitions:
definition = self.definitions[name]
process_parent_nodes(definition)
def rebuild(self):
print "rebuild nodetypes..."
if os.path.exists(self.node_definitions_filename):
os.remove(self.node_definitions_filename)
if os.path.exists(self.node_classes_filename):
os.remove(self.node_classes_filename)
if os.path.exists(self.node_actions_filename):
os.remove(self.node_actions_filename)
print "\nbuild nodedefs"
self.walk_directories(".nodedef", self.nodedefinition_processor)
print "\nbuild classes"
self.walk_directories(".py", self.nodemodule_processor)
self.process_definitions()
self.save_object(self.node_definitions_filename, self.definitions)
self.save_object(self.node_classes_filename, self.classes)
self.save_object(self.node_actions_filename, self.actions)
self.load()
def walk_directory(self, path, file_type, processor):
print "walking node dir '%s'" % path
ext = file_type[:len(file_type)]
for file_name in os.listdir(path):
full_file_name = os.path.join(path, file_name)
if os.path.isdir(full_file_name):
self.walk_directory(full_file_name, file_type, processor)
if os.path.isfile(full_file_name):
if full_file_name.endswith(ext):
processor(full_file_name)
def walk_directories(self, file_type, processor):
self.walk_directory(self.root_path, file_type, processor)
def output(self):
print ""
print "definitions\n%s" % pprint.pformat(self.definitions)
print "nodeclasses\n%s" % pprint.pformat(self.classes)
print "nodeactions\n%s" % pprint.pformat(self.actions)
class ModuleProcessor(ast.NodeVisitor):
def __init__(self):
self.current_class = ""
self.node_classes = []
self.node_actions = []
def get_decorator_info(self, function_def, required_decorator):
result = None
for decorator in function_def.decorator_list:
if str(decorator.func.id) == str(required_decorator):
result = []
for arg in decorator.args:
result.append(arg.s)
return(result)
def process_possible_node_action(self, function_def):
decorator_info = self.get_decorator_info(function_def, "NodeAction")
if decorator_info != None:
self.node_actions.append(
{
"arguments": decorator_info,
"name": function_def.name,
"class": self.current_class
}
)
def process_possible_node_class(self, class_def):
decorator_info = self.get_decorator_info(class_def, "NodeClass")
if decorator_info != None:
self.node_classes.append(
{
"class": class_def.name,
"arguments": decorator_info
}
)
self.current_class = class_def.name
def generic_visit(self, node):
node_type = type(node).__name__
if node_type == "ClassDef":
self.process_possible_node_class(node)
if node_type == "FunctionDef":
self.process_possible_node_action(node)
ast.NodeVisitor.generic_visit(self, node)
class NodeDecorator(BaseDecorator):
def __init__(self):
pass
class NodeClass(NodeDecorator):
def __init__(self, nodetype):
pass
class NodeAction(NodeDecorator):
def __init__(self, description):
pass
if __name__ == "__main__":
os.system("clear")
nt = NodeTypes("/Users/stevethehat/Development/flow/modules")
nt.rebuild()
nt.output() | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,492 | stevethehat/flow | refs/heads/master | /flow/app/flowbase.py | import os, ast
class FlowBase(object):
def __init__(self, flow):
self.flow = flow
self.env = flow.env
self.config = flow.env.config
def load_object(self, full_file_name, default):
result = default
if os.path.exists(full_file_name):
object_file = open(full_file_name, "r")
result = ast.literal_eval(object_file.read())
object_file.close()
return(result)
def save_object(self, full_file_name, obj):
if os.path.exists(full_file_name):
os.remove(full_file_name)
object_file = open(full_file_name, "w")
object_file.write(str(obj))
object_file.close() | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,493 | stevethehat/flow | refs/heads/master | /flow/app/flow.py | from env import Env
from hierarchy import Hierarchy
from nodetypes import Nodetypes
class Flow(object):
def __init__(self, config):
self.env = Env(config)
self.env.log("init Flow")
self.hierarchy = Hierarchy(self)
self.nodetypes = Nodetypes(self) | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,494 | stevethehat/flow | refs/heads/master | /flow/tests/automated/node_loading_1.py | from flow.app.hierarchy import Hierarchy
import shutil
def test_1(flow):
flow.hierarchy.init()
print "load root node..."
root = flow.hierarchy.get_node(1)
print "add admin node..."
print ""
admin = flow.hierarchy.add_node("admin", "Administration", parent_node = root)
websites = flow.hierarchy.add_node("websites", "Websites", parent_node = root)
website = flow.hierarchy.add_node("website", "Steves Website", parent_node = websites)
print "root = '%s'\n" % root
print "admin = '%s'\n" % admin
print "websites = '%s'\n" % websites
children = root.children()
print "children = '%s'\n" % children
print flow.nodetypes.definitions
print ""
print admin.definition
print admin.definition | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,495 | stevethehat/flow | refs/heads/master | /flow/web/site.py | class Site:
def __init__():
pass
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,496 | stevethehat/flow | refs/heads/master | /flow/tests/asttest.py | import ast, pprint, os
code = """
from flow.app.nodetypes import *
@NodeClass("page")
class Page:
def __init__(self):
pass
@NodeAction("Edit")
def edit(self):
pass
@NodeAction("Edit Content")
def edit_content(self):
pass
"""
#os.system("clear")
class v(ast.NodeVisitor):
def __init__(self):
self.current_class = ""
self.node_classes = []
self.node_actions = []
def get_decorator_info(self, function_def, required_decorator):
result = None
for decorator in function_def.decorator_list:
if str(decorator.func.id) == str(required_decorator):
result = []
for arg in decorator.args:
result.append(arg.s)
return(result)
def process_possible_node_action(self, function_def):
decorator_info = self.get_decorator_info(function_def, "NodeAction")
if decorator_info != None:
self.node_actions.append(
{
"arguments": decorator_info,
"name": function_def.name,
"class": self.current_class
}
)
def process_possible_node_class(self, class_def):
decorator_info = self.get_decorator_info(class_def, "NodeClass")
if decorator_info != None:
self.node_classes.append(
{
"class": class_def.name,
"arguments": decorator_info
}
)
self.current_class = class_def.name
def generic_visit(self, node):
node_type = type(node).__name__
if node_type == "ClassDef":
self.process_possible_node_class(node)
if node_type == "FunctionDef":
self.process_possible_node_action(node)
ast.NodeVisitor.generic_visit(self, node)
tree = ast.parse(code)
os.system("clear")
test = v()
test.visit(tree)
print "classes"
print test.node_classes
print ""
print "acrions"
print test.node_actions
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,497 | stevethehat/flow | refs/heads/master | /flow/web/config.py | from pyramid.response import Response
# example config
static_paths = [
{ "url_prefix": "assets", "local_path": "C:\\Development\\Personal\\flow\\static\\frontend\\assets" },
{ "url_prefix": "js", "local_path": "C:\\Development\\Personal\\flow\\static\\shared\\js" },
{ "url_prefix": "css", "local_path": "C:\\Development\\Personal\\flow\\static\\shared\\css" },
]
def test_dynamic_handler(request):
return(Response("hello this is a dynamic page"))
dynamic_url_handlers = [
{ "route": "/dynamic/{url}", "name": "dynamic_test", "view": test_dynamic_handler}
] | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,498 | stevethehat/flow | refs/heads/master | /flow/app/nodestore_file.py | import os, ast, shutil
from flowbase import FlowBase
class NodeStore_File(FlowBase):
"""
Basic NodeStore implementation based on filesystem.
"""
def __init__(self, env):
self.env = env
self.root_path = os.path.join(env.config["approot"], "nodes")
def get_uid(self):
uid_file_name = os.path.join(self.root_path, "uid")
new_uid = 1
if os.path.exists(uid_file_name):
uid_file = open(uid_file_name, "r")
new_uid = int(uid_file.read()) + 1
uid_file.close()
os.remove(uid_file_name)
uid_file = open(uid_file_name, "w")
uid_file.write(str(new_uid))
uid_file.close()
return(str(new_uid))
def init(self):
if os.path.exists(self.root_path):
shutil.rmtree(self.root_path)
os.makedirs(self.root_path)
def get_node_directory_path(self, uid):
node_directory_path = os.path.join(self.root_path, str(uid))
return(node_directory_path)
def get_node_data_path(self, uid):
return(os.path.join(self.get_node_directory_path(uid), ".node"))
def get(self, uid):
result = None
full_file_name = self.get_node_data_path(uid)
if os.path.exists(full_file_name):
result = self.load_object(full_file_name, None)
return(result)
def add(self, data):
uid = uid = self.get_uid()
node_directory_path = self.get_node_directory_path(uid)
if not(os.path.exists(node_directory_path)):
os.makedirs(node_directory_path)
self.update(uid, data)
return(uid)
def update(self, uid, data):
full_file_name = self.get_node_data_path(uid)
self.save_object(full_file_name, data)
def delete(self, uid):
pass
def children(self, uid):
print "load children of '%s'" % uid
node = self.load_object(str(uid), None)
print "node = %s" % node
print "children = %s" % node["child_uids"]
results = []
if node != None:
for child_node_uid in node["child_uids"]:
print "add child"
results.append(self.load_object(child_node_uid, None))
return(results)
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,499 | stevethehat/flow | refs/heads/master | /flow/server_base.py | import imp
from wsgiref.simple_server import make_server
from pyramid.config import Configurator
from pyramid.response import Response
def page_server(request):
response = """
<h1>/admin</h1>
<p>[%s]</p>
<p>[%s]</p>
<p>[%s]</p>""" % ("/".join(request.subpath), request.user_agent, str(request.registry))
return Response(response)
class ServerBase:
def __init__(self, installation_config):
"""
initialize Configurator and setup required routes
"""
self._app_config = Configurator()
self._installation_config = installation_config
# setup routes
self.init_static_routes()
self.init_modules()
self.init_dynamic_routes()
self.init_default_routes()
def init_default_routes(self):
self._app_config.add_route("page_server", "/*subpath")
self._app_config.add_view(page_server, route_name="page_server")
def init_static_routes(self):
if self._installation_config.static_paths:
for static_path in self._installation_config.static_paths:
print "adding static path '/%s' > '%s'" % (static_path["url_prefix"], static_path["local_path"])
self._app_config.add_static_view(name=static_path["url_prefix"], path=static_path["local_path"])
def init_dynamic_routes(self):
if self._installation_config.dynamic_url_handlers:
for dynamic_url_handler in self._installation_config.dynamic_url_handlers:
print "adding dynamic url handler '%s'" % (dynamic_url_handler["route"])
self._app_config.add_route(dynamic_url_handler["name"], dynamic_url_handler["route"])
self._app_config.add_view(dynamic_url_handler["view"], route_name=dynamic_url_handler["name"])
def init_modules(self):
#module = imp.load_source("test_module", "C:\\Development\\Personal\\flow\\modules\\test_module.py")
#module.init_web(self._app_config)
pass
def start(self):
self.app = self._app_config.make_wsgi_app()
self.server = make_server("0.0.0.0", 8080, self.app)
self.server.serve_forever()
def app_start():
print "starting web app..."
app = App()
app.start()
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,500 | stevethehat/flow | refs/heads/master | /flow/app/hierarchy.py | import os
from nodestore_file import NodeStore_File
from node import Node
from flowbase import FlowBase
class Hierarchy(FlowBase):
def __init__(self, flow):
super(Hierarchy, self).__init__(flow);
self.env.log("init Hierarchy")
self.store = NodeStore_File(self.env)
def init(self):
self.store.init()
self.store.add({ "nodetype": "root", "description": "Flow Root", "child_uids": []})
def get_node(self, uid):
node = Node(self.flow, uid)
return(node)
def add_node(self, nodetype, description, parent_node = None, parent_uid = None):
if parent_node == None:
parent_node = self.get_node(parent_uid)
result = parent_node.add_child(nodetype, description)
return(result)
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,501 | stevethehat/flow | refs/heads/master | /flow/app/config.py | import os
from pyramid.response import Response
import admin_handler
root_path = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir))
# example config
static_paths = [
{ "url_prefix": "js", "local_path": os.path.join(root_path, "static", "app", "js") },
{ "url_prefix": "css", "local_path": os.path.join(root_path, "static", "app", "css") },
{ "url_prefix": "html", "local_path": os.path.join(root_path, "static", "app", "html") },
{ "url_prefix": "fonts", "local_path": os.path.join(root_path, "static", "app", "fonts") },
{ "url_prefix": "assets", "local_path": os.path.join(root_path, "static", "app", "assets") },
]
def test_dynamic_handler(request):
return(Response("hello this is a dynamic page"))
def nodes_handler(request):
return(admin_handler.admin_handler(request))
dynamic_url_handlers = [
{ "route": "/nodes/{action}/*subpath", "name": "nodes_handler", "view": nodes_handler}
] | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,502 | stevethehat/flow | refs/heads/master | /flow/tests/introspection.py | class Test(object):
"""
this is the docstring for the class Test
"""
def __init__(self):
pass
#code
test = Test()
print test
print test.__doc__
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,503 | stevethehat/flow | refs/heads/master | /tests.py | import os
from flow.app.flow import Flow
from flow.tests.automated.node_loading_1 import *
os.system("clear")
flow = Flow(
{
"approot": os.path.dirname(os.path.realpath(__file__))
}
)
test_1(flow) | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,504 | stevethehat/flow | refs/heads/master | /flow/app/node.py | from decorators import *
from nodestore_file import NodeStore_File
from flowbase import FlowBase
class Node(FlowBase):
def __init__(self, flow, uid, data = None):
super(Node, self).__init__(flow)
self.hierarchy = self.flow.hierarchy
self.uid = uid
if data != None:
self.data = data
else:
self.data = self.hierarchy.store.get(uid)
self.nodetype = self.data["nodetype"]
@cacheprop
def definition(self):
return(self.flow.nodetypes.definitions[self.nodetype])
def edit(self):
return("the result of the edit...")
def update(self):
self.hierarchy.store.update(self.uid, self.data)
def add_child(self, nodetype, description):
data = { "parent_uid": self.uid, "nodetype": nodetype, "description": description, "child_uids": []}
uid = self.hierarchy.store.add(data)
self.data["child_uids"].append(uid)
self.update()
return(self.hierarchy.get_node(uid))
def children(self):
children = []
for child_node_uid in self.data["child_uids"]:
children.append(self.hierarchy.get_node(child_node_uid))
return(children)
def __repr__(self):
return("\n%s" % {"uid": self.uid, "data": self.data})
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,505 | stevethehat/flow | refs/heads/master | /web.py | from flow.web.server import *
web_app_start()
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,506 | stevethehat/flow | refs/heads/master | /modules/web/page/page.py | from flow.app.nodetypes import *
@NodeClass("page")
class Page:
def __init__(self):
pass
@NodeAction("Edit")
def edit(self):
pass
@NodeAction("Edit Content")
def edit_content(self):
pass
def other_method(self):
pass
class NotANodeClass():
def __init__(self):
pass | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,507 | stevethehat/flow | refs/heads/master | /flow/app/env.py | class Env():
def __init__(self, config):
self.config = config
def log(self, message):
print message
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,508 | stevethehat/flow | refs/heads/master | /flow/app/decorators.py | class BaseDecorator:
def __call__(self, f):
#if self.oncall:
# self.oncall(f)
def wrapper(*args):
wrapped_f = f(*args)
return(wrapped_f)
return(wrapper)
def cacheprop(fn):
attr_name = '_lazy_' + fn.__name__
@property
def _cacheprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _cacheprop | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,509 | stevethehat/flow | refs/heads/master | /flow/tests/decorators.py | import os
os.system("cls")
class BaseDecorator:
def __init__(self):
self._message = ""
def __call__(self, f):
def wrapper(*args):
print "\n>>%s" % self._message
wrapped_f = f(*args)
print "<<%s\n" % self._message
return(wrapped_f)
return(wrapper)
class NodeClass(BaseDecorator):
def __init__(self, nodetype):
print "we have a node implementor!!! '%s'" % nodetype
self._message = "NodeClass init"
class NodeAction(BaseDecorator):
def __init__(self, description):
print "we have a node action implementor!!! '%s'" % description
self._message = "NodeAction call"
@NodeClass("page")
class Page:
def __init__(self):
print "in page init"
@NodeAction("Edit Content")
def EditContent(self):
print "Edit content on page"
print "test page 1"
page = Page()
page.EditContent()
print "test page 2"
page2 = Page()
page2.EditContent()
print "decorators test" | {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,510 | stevethehat/flow | refs/heads/master | /flow/app/server.py | import imp
import sys
import config as installation_config
from wsgiref.simple_server import make_server
from pyramid.config import Configurator
from pyramid.response import Response
from flow.util.server_base import ServerBase
def page_server(request):
response = "<h1>Test 1</h1><p>[%s]</p><p>[%s]</p>[%s]" % ("/".join(request.subpath), request.user_agent, str(request.registry))
return Response(response)
class App(ServerBase):
pass
def app_start():
print "starting app..."
app = App(installation_config)
app.start()
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,511 | stevethehat/flow | refs/heads/master | /flow/app/admin_handler.py | from pyramid.response import Response
from flow import Flow
def admin_handler(request):
flow = Flow(
{ "approot": "C:\\Development\\Personal\\flow" }
)
uid = "/".join(request.subpath)
action = request.matchdict["action"]
node = flow.hierarchy.get_node(uid)
print node
action_response = getattr(node, action, [])()
response = """
<h1>node test response</h1>
<p>uid = '%s'</p>
<p>action = '%s'</p>
<p>result = '%s'</p>
""" % (uid, action, action_response)
return(Response(response))
| {"/tests.py": ["/flow/app/flow.py", "/flow/tests/automated/node_loading_1.py"], "/web.py": ["/flow/web/server.py"], "/modules/web/page/page.py": ["/flow/app/nodetypes.py"]} |
51,512 | mzdzioch/hpebomcenverting | refs/heads/master | /hpeconverter.py | import tkinter as tk
from tkinter import filedialog as fd
from exconv import run
root = tk.Tk()
root.title("ABB Power Grid HPE Converter")
canvas1 = tk.Canvas(root, width=300, height=300)
canvas1.pack()
def hello():
filename = fd.askopenfilename()
print(filename)
if(run(filename)):
label1 = tk.Label(root, text='Done', fg='green', font=('helvetica', 12, 'bold'))
else:
label1 = tk.Label(root, text='Please find correct file!', fg='red', font=('helvetica', 12, 'bold'))
canvas1.create_window(150, 200, window=label1)
button1 = tk.Button(text='Find a file and convert', command=hello, bg='brown', fg='white')
canvas1.create_window(150, 150, window=button1)
root.mainloop() | {"/hpeconverter.py": ["/exconv.py"]} |
51,513 | mzdzioch/hpebomcenverting | refs/heads/master | /exconv.py | from openpyxl.styles import Alignment, Font
from expy import open_xls_as_xlsx as open_xls_as_xlsx
import re
def swap(sheets, columnfirst, columnsecond):
for a in sheets[columnfirst]:
new_cell = a.offset(column=6)
new_cell.value = a.value
for b in sheets[columnsecond]:
new_cell_2 = b.offset(column=-1)
new_cell_2.value = b.value
for c in sheets[columnsecond]:
c.value = c.offset(column=5).value
c.offset(column=5).value = None
def cellalign(worksheet, column):
for c in worksheet[column]:
c.alignment = Alignment(horizontal='center')
c.font = Font(bold=True)
def setcloumnwidth(worksheet):
worksheet.column_dimensions['A'].width = 53
worksheet.column_dimensions['B'].width = 60.6423
worksheet.column_dimensions['C'].width = 20
worksheet.column_dimensions['D'].width = 20
worksheet.column_dimensions['E'].width = 30
worksheet.column_dimensions['F'].width = 30
def setcloumnwidthforfirstsheet(worksheet):
worksheet.column_dimensions['A'].width = 53
worksheet.column_dimensions['B'].width = 40
worksheet.column_dimensions['C'].width = 30
worksheet.column_dimensions['D'].width = 30
worksheet.column_dimensions['E'].width = 30
worksheet.column_dimensions['F'].width = 30
def setwrapping(worksheet, column):
for c in worksheet[column]:
c.alignment = Alignment(wrap_text=True)
def removestringafterchar(worksheet, column, char):
for c in worksheet[column]:
if(c.value.find(char)):
text = c.value
c.value = text.split(char, 1)[0]
def findtext(worksheet, column):
count = 1
for c in worksheet[column]:
if(re.match("HPE [\d, 0-9]{1,3}[GB|TB]", c.value)):
if (re.search("Memory", c.value)):
splitted = c.value.split(")", 1)
c.value = '="' + splitted[0] + ') (in total "&D'+str(count) + '*' + str(findvolume(c.value)) + '&"' + findunit(c.value) + ')' + splitted[1] + '"'
else:
splitted = c.value.split(" ", 2)
c.value = '="' + splitted[0] + splitted[1] + ' (in total "&D'+str(count) + '*' + str(findvolume(c.value)) + '&"' + findunit(c.value) + ') ' + splitted[2] + '"'
else:
print()
count = count + 1
def findvolume(text):
array = text.split(" ")
for word in array:
if(re.match("[\d, 0-9]{1,3}[GB|TB]", word)):
return int(re.match(r'\d+', word).group())
return 0
def findunit(text):
array = text.split(" ")
for word in array:
if(re.match("[\d, 0-9]{1,3}TB", word)):
return "TB"
if(re.match("[\d, 0-9]{1,3}GB", word)):
return "GB"
return "null"
def run(filename, true=None):
# wb = open_xls_as_xlsx('/home/michal/Downloads/DL380_ESX_Host.XLS')
if(re.search(".XLS", filename)):
wb = open_xls_as_xlsx(filename)
source = wb.active
sheets = wb.copy_worksheet(source)
if(re.search("Configuration Summary", sheets['A1'].value)):
#sheets = wb.get_sheet_by_name(wb.sheetnames[0])
cellalign(sheets, 'D')
swap(sheets, 'C', 'D')
setcloumnwidth(sheets)
setcloumnwidthforfirstsheet(source)
removestringafterchar(sheets, 'C', '#')
findtext(sheets, 'B')
else:
return False
wb.save(filename)
return True
else:
return False
| {"/hpeconverter.py": ["/exconv.py"]} |
51,522 | mahtabfarrokh/compiler | refs/heads/master | /test.py |
#print ('\u627 \u6AF \u631'.decode('unicode_escape'))
# print ('\u0628\u0631\u0646\u0627\u0645\u0647')
#
# dic = {
# }
# dic ['\u0628\u0631\u0646\u0627\u0645\u0647'] = "hey"
# print(dic)
# "کلید"
import unicodedata
print("hello")
q = 'A'
print(ord(q))
q = 'Z'
print(ord(q))
q = 'a'
print(ord(q))
q = 'z'
print(ord(q))
q = 'ا'
print(ord(q))
q = 'ب'
print(ord(q))
q = 'پ'
print(ord(q))
q = 'ی'
print(ord(q)) | {"/parsercompiler.py": ["/lexercompiler.py", "/QuadRuples.py", "/SymbolTable.py"]} |
51,523 | mahtabfarrokh/compiler | refs/heads/master | /parsercompiler.py | import ply.yacc as yacc
from lexercompiler import tokens
import lexercompiler
from QuadRuples import QuadRuple
from SymbolTable import Entry
class Parser:
tokens = tokens
def __init__(self):
self.quadruples = []
self.counter = 0
self.counter2 = 0
self.definedvar = []
self.symbolTable = []
self.meghdardehi = []
def printSymbol(self):
print("Symbol Table : ")
for i in self.symbolTable:
print(i[0], i[1])
print(i[2], i[3])
def goto_detect (self , myP ):
if myP == 0 :
return 0
else:
x = myP - 1
print("=======================================")
self.printQuadruples()
print("=======================================")
if x < len(self.quadruples) and x >= 0:
del self.quadruples[x]
else:
print(" :( " , len(self.quadruples) , x-1 )
if x-1 < len(self.quadruples) and x > 0:
del self.quadruples[x-1]
print("=======================================")
self.printQuadruples()
print("=======================================")
return 0
def printQuadruples(self):
print("QuadRuples : ")
for i in self.quadruples:
print("op : ", i.op, "arg1 : ", i.arg_one, "arg2 : ", i.arg_two, "result : ",i.result)
print(i.result)
def newTemp(self):
self.counter += 1
return 'Temp' + str(self.counter)
def varType(self, newName):
for i in self.symbolTable:
if i[0] == newName:
return i[1]
def findTList(self, newName):
for i in self.symbolTable:
if i[0] == newName:
print(i[2])
return i[2]
return []
def findFList(self, newName):
for i in self.symbolTable:
if i[0] == newName:
return i[3]
return []
def findType(self, name):
for i in self.symbolTable:
if i[0] == name:
return i[1]
return []
def updateTList(self, newName , trueL):
for i in self.symbolTable:
if i[0] == newName:
i[2] = trueL
def updateFList(self, newName , falseL):
for i in self.symbolTable:
if i[0] == newName:
i[3] = falseL
def handle_goto(self):
for q in self.quadruples:
if q.result.replace(" ", "") == "goto":
print("---------------")
print("yaaaaaaaaaafffftaaaaaaaaaaammmm")
print("---------------")
q.result = q.result + str(len(self.quadruples))
def p_R1(self, p):
'''
barnameh : PROGRAM ID tarifha
'''
print("Rule 1: barnameh -> PROGRAM ID tarifha")
def p_R2(self, p):
'''
tarifha : tarifha tarif
| tarif
'''
print("Rule 2: tarifha -> tarifha tarif | tarif ")
def p_R3(self, p):
'''
tarif : STRUCT ID LBRACE tarifhayeMahalli RBRACE
| STRUCT ID LBRACE RBRACE
| jens tarifhayeMotheghayyerha SEMI
| jens ID LPAREN vorudiha RPAREN jomle
| jens ID LPAREN RPAREN jomle
| ID LPAREN vorudiha RPAREN jomle
| ID LPAREN RPAREN jomle
'''
self.handle_goto()
print("Rule 3: STRUCT ID LBRACE tarifhayeMahalli RBRACE"
"| STRUCT ID LBRACE RBRACE"
"| jens tarifhayeMotheghayyerha SEMI"
"| jens ID LPAREN vorudiha RPAREN jomle"
"| jens ID LPAREN RPAREN jomle"
"| ID LPAREN vorudiha RPAREN jomle"
"| ID LPAREN RPAREN jomle")
def p_R5(self, p):
'''
tarifhayeMahalli : tarifhayeMahalli tarifeMoteghayyereMahdud
| tarifeMoteghayyereMahdud
'''
print("Rule 5: tarifhayeMahalli -> tarifhayeMahalli tarifeMoteghayyereMahdud "
"| tarifeMoteghayyereMahdud ")
def p_R6(self, p):
'''
tarifeMoteghayyereMahdud : jenseMahdud tarifhayeMotheghayyerha SEMI
'''
p[0] = Entry()
p[0].type = p[1].type
for name in p[2]:
if not int(name.lenght) <= 1:
base = name.place
for i in range(0,int(name.lenght)):
var = [base + str(i), p[1].type, name.true_list, name.false_list]
print(var)
self.symbolTable.append(var)
else:
var = [name.place, p[1].type, name.true_list, name.false_list]
self.symbolTable.append(var)
print("Rule 6: tarifeMoteghayyereMahdud -> jenseMahdud tarifhayeMotheghayyerha SEMI")
def p_R7(self, p):
'''
jenseMahdud : CONSTANT jens
| jens
'''
if len(p) == 2:
p[0] = p[1]
else:
p[0] = p[2]
# TODO
print("Rule 7: jenseMahdud -> CONSTANT jens | jens")
def p_R8_0(self, p):
'''
jens : BOOL
'''
p[0] = Entry()
p[0].type = 'bool'
print("Rule 8_0 : jens : BOOL ")
def p_R8_1(self, p):
'''
jens : INT
'''
p[0] = Entry()
p[0].type = 'arith'
print("Rule 8: jens : INT")
def p_R8_2(self, p):
'''
jens : FLOAT
'''
p[0] = Entry()
p[0].type = 'arith'
print("Rule 8: jens : FLOAT")
def p_R8(self, p):
'''
jens : CHAR
'''
p[0] = Entry()
p[0].type = 'char'
print("Rule 8: jens : CHAR ")
def p_R10(self, p):
'''
tarifhayeMotheghayyerha : tarifeMeghdareAvvalie
| tarifhayeMotheghayyerha COMMA tarifeMeghdareAvvalie
'''
# p[0] = Entry()
if len(p) == 2:
print("kkkkkk" , p[1].place )
p[0] = []
p[0].append(p[1])
else:
print(":|||||||||")
p[1].append(p[3])
p[0] = p[1]
print("Rule 10: tarifhayeMotheghayyerha -> tarifeMeghdareAvvalie"
"| tarifhayeMotheghayyerha COMMA tarifeMeghdareAvvalie ")
def p_R11(self, p):
'''
tarifeMeghdareAvvalie : tarifeShenaseyeMoteghayyer
| tarifeShenaseyeMoteghayyer EQUALS ebarateSade
'''
p[0] = p[1]
if len(p) == 2 :
self.meghdardehi.append([p[1].place, '1111111111111111'])
if len(p) == 4:
self.meghdardehi.append([p[1].place, p[3].place])
p[0].true_list = p[3].true_list
p[0].false_list = p[3].false_list
p[0].detect = self.goto_detect(p[3].detect)
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect,
p[0].true_list, p[0].false_list)
self.updateTList(p[0].place, p[0].true_list)
self.updateFList(p[0].place, p[0].false_list)
print("Rule 11: tarifeMeghdareAvvalie -> tarifeShenaseyeMoteghayyer"
"| tarifeShenaseyeMoteghayyer = ebarateSade ")
def p_R12(self, p):
'''
tarifeShenaseyeMoteghayyer : ID
| ID LBR ICONST RBR
'''
p[0] = Entry()
p[0].place = p[1]
print("bemir ;) " , len(p))
if len(p) == 5:
p[0].lenght = p[3]
print("Rule 12: tarifeShenaseyeMoteghayyer -> ID | ID [ ICONST ] ")
def p_R15(self, p):
'''
vorudiha : vorudiha SEMI jensVorudiha
| jensVorudiha
'''
print("Rule 15: vorudiha -> vorudiha ; jensVorudiha | jensVorudiha ")
def p_R16(self, p):
'''
jensVorudiha : jens shenaseyeVorudiha
'''
print("Rule 16: jensVorudiha -> jens shenaseyeVorudiha ")
def p_R17(self, p):
'''
shenaseyeVorudiha : shenaseyeVorudiha COMMA shenaseyeVorudi
| shenaseyeVorudi
'''
print("Rule 17: shenaseyeVorudiha -> shenaseyeVorudiha COMMA shenaseyeVorudi"
"| shenaseyeVorudi ")
def p_R18(self, p):
'''shenaseyeVorudi : ID
| ID LBR RBR
'''
p[0] = Entry()
p[0].place = p[1]
print("Rule 42: shenaseyeVorudi -> id | id [] ")
def p_19_0(self, p):
'''
jomle : unmatched
| matched
'''
p[0] = p[1]
print ('to gaveeeee mani ')
print("rule 19: jomle -> matched | unmatched ")
def p_19_1(self, p):
''' matched : IF ebarateSade THEN M matched Nelse M matched
| otherJomle
'''
p[0] = Entry()
p[0].type = 'bool'
if len (p) == 2 :
p[0] = p[1]
else:
if p[2] is None :
p[2].type = self.findType(p[2].place)
if p[2].type == 'bool':
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[2].detect,
p[2].true_list, p[2].false_list)
print("keeeeeeeeeeeeyliii khaaariiiiiiiiii" , p[4].quad , p[7].quad)
self.printQuadruples()
Entry.backpatch(p[2].true_list, self.quadruples, p[4].quad)
Entry.backpatch(p[2].false_list, self.quadruples, p[7].quad)
p[0].next_list = p[5].next_list + p[6].next_list + p[8].next_list
self.printQuadruples()
else:
pass
self.handle_goto()
print("rule 19_1: matched : IF ebarateSade THEN matched ELSE matched | otherJomle")
def p_19_2(self, p):
''' unmatched : IF ebarateSade THEN M jomle
| IF ebarateSade THEN M matched Nelse M unmatched
'''
p[0] = Entry()
if len(p) == 6 :
Entry.backpatch(p[2].true_list, self.quadruples, p[4].quad)
p[0].next_list = p[2].false_list + p[5].next_list
self.handle_goto()
else:
if p[2] is None:
p[2].type = self.findType(p[2].place)
if p[2].type == 'bool':
Entry.backpatch(p[2].true_list, self.quadruples, p[4].quad)
Entry.backpatch(p[2].false_list, self.quadruples, p[7].quad)
p[0].next_list = p[5].next_list + p[6].next_list + p[8].next_list
self.printQuadruples()
print("rule 19_2: unmatched : IF ebarateSade THEN jomle |"
" IF ebarateSade THEN matched ELSE unmatched")
self.handle_goto()
def p_Nelse(self, p):
'''
Nelse : ELSE
'''
p[0] = Entry()
p[0].next_list.append(len(self.quadruples))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
def p_N(self, p):
'''
N :
'''
print('saaaaaaaaaaaaaaaaaaaaaaaaaaaalllllllllllllllllllllllllllllllllllllaaaaaaaaaaaaaaaaaaam')
p[0] = Entry()
self.quadruples.append(QuadRuple('', '', '', 'goto'))
p[0].quad = (len(self.quadruples))
def p_R20(self, p):
'''otherJomle : LBRACE tarifhayeMahalli jomleha RBRACE
| LBRACE tarifhayeMahalli RBRACE
| ebarat SEMI
| SEMI
| jomleyeTekrar
| RETURN SEMI
| RETURN ebarat SEMI
| BREAK SEMI
'''
p[0] = Entry()
print("Rule 20: jomle -> LBRACE tarifhayeMahalli jomleha RBRACE"
"| LBRACE tarifhayeMahalli RBRACE"
"| ebarat SEMI"
"| SEMI"
"| KEY LPAREN ebarateSade RPAREN onsoreHalat onsorePishfarz END"
"| KEY LPAREN ebarateSade RPAREN onsoreHalat END"
"| WHILE LPAREN ebarateSade RPAREN jomle"
"| RETURN SEMI"
"| RETURN ebarat SEMI"
"| BREAK SEMI")
def p_R20_1(self, p):
'''jomleyeTekrar : WHILE M ebarateSade M jomle '''
p[0] = Entry()
if p[3].true_list is None:
p[3].true_list = self.findTList(p[3].place)
# print (p[7].next_list , p[4].next_list , 'ablahhhhhhhhhhhhh')
Entry.backpatch(p[5].next_list, self.quadruples, p[3].quad)
Entry.backpatch(p[3].true_list, self.quadruples, p[4].quad)
p[0].next_list = p[3].false_list
self.quadruples.append(QuadRuple('', '', '', 'goto' + str(p[3].quad )))
def p_R20_2(self, p):
'''otherJomle : KEY LPAREN ebarateSade RPAREN onsoreHalat END
'''
p[0] = Entry()
c = 0
for q in self.quadruples:
if q.op == '!=' and q.arg_one == 'x' and q.result[:4] == 'goto' :
if not self.quadruples[c+2].op == "=":
print(" ================================================")
print(" :) ")
print(" ================================================")
self.quadruples[c+1] = QuadRuple('', '', '', 'goto' + str(c+2))
break
c += 1
self.handle_goto()
print("Rule 20_2 : otherJomle -> KEY LPAREN ebarateSade RPAREN onsoreHalat END ")
def p_R20_3 (self,p):
'''otherJomle : KEY LPAREN ebarateSade RPAREN onsoreHalat onsorePishfarz END'''
print("Rule 20_3 : otherJomle -> KEY LPAREN ebarateSade RPAREN onsoreHalat onsorePishfarz END ")
def p_R21(self, p):
'''
jomleha : jomleha jomle
| jomle
'''
print("Rule 21: jomleha -> jomleha jomle")
def p_R24(self, p):
'''onsoreHalat : stateKW2
| onsoreHalat stateKW2
'''
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
print("Rule 24: onsoreHalat -> state ADAD: jomle ;"
"| onsoreHalat state ADAD : jomle ; ")
where = 0
def p_R24_0 (self, p):
'''stateKW1 : STATE ICONST COLON
'''
p[0] = Entry()
p[0].IDdetect = False
p[0].true_list.append(len(self.quadruples))
p[0].false_list.append(len(self.quadruples) + 1)
self.quadruples.append(QuadRuple('!=', 'x', str(p[2]), 'goto' ))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.where = [len(self.quadruples) - 2]
self.printQuadruples()
p[0].type = 'bool'
print("Rule 24_0 : stateKW1 -> STATE ICONST COLON")
def p_R24_1 (self, p):
'''stateKW2 : stateKW1 jomle N SEMI
'''
# s elf.quadruples.append(QuadRuple('', '', '', 'goto'))
Entry.backpatch(self.where, self.quadruples, p[3].quad )
print("Rule 24_1 : stateKW2 -> stateKW1 jomle N SEMI")
def p_R25(self, p):
'onsorePishfarz : DEFAULT COLON jomle SEMI'
# TODO after a while :D
print("Rule 25: onsorePishfarz -> default : jomle ; ")
def p_R29_1(self, p):
'''ebarat : taghirpazir EQUALS ebarat
'''
p[0] = Entry()
p[0].place = p[1].place
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect,
p[0].true_list, p[0].false_list)
p[0].detect = self.goto_detect(p[3].detect)
print ('---------------',p[3].place,'--------------')
self.quadruples.append(QuadRuple('=', p[3].place, '', p[1].place))
self.updateTList(p[0].place, p[3].true_list)
self.updateFList(p[0].place, p[3].false_list)
self.printQuadruples()
print('salam salam hale bozghale ')
print("Rule 29: ebarat -> taghirpazir = ebarat")
def p_R29_2(self, p):
'''ebarat : taghirpazir PLUSEQUAL ebarat
'''
p[0] = Entry()
self.quadruples.append(QuadRuple('+', p[1].place, p[3].place, p[1].place))
print("Rule 29: ebarat -> taghirpazir += ebarat")
def p_R29_3(self, p):
'''ebarat : taghirpazir MINUSEQUAL ebarat
'''
p[0] = Entry()
self.quadruples.append(QuadRuple('-', p[1].place, p[3].place, p[1].place))
print("Rule 29: ebarat -> taghirpazir -= ebarat")
def p_R29_4(self, p):
'''ebarat : taghirpazir TIMESEQUAL ebarat
'''
p[0] = Entry()
self.quadruples.append(QuadRuple('*', p[1].place, p[3].place, p[1].place))
print("Rule 29: ebarat -> taghirpazir *= ebarat")
def p_R29_5(self, p):
'''ebarat : taghirpazir DIVEQUAL ebarat
'''
p[0] = Entry()
self.quadruples.append(QuadRuple('/', p[1].place, p[3].place, p[1].place))
print("Rule 29: ebarat -> taghirpazir /= ebarat")
def p_R29_6(self, p):
'''ebarat : taghirpazir PLUSPLUS
'''
p[0] = Entry()
p[0] = p[1]
self.quadruples.append(QuadRuple('+', p[1].place, '1', p[1].place))
print("Rule 29: ebarat -> taghirpazir ++ ")
def p_R29_7(self, p):
'''ebarat : taghirpazir MINUSMINUS
'''
p[0] = Entry()
p[0] = p[1]
self.quadruples.append(QuadRuple('-', p[1].place, '1', p[1].place))
print("Rule 29: ebarat -> taghirpazir -- ")
def p_R29_8(self, p):
'''ebarat : ebarateSade
'''
p[0] = p[1]
print("Rule 29: ebarat -> ebarateSade ")
def p_R30(self, p):
'''
ebarateSade : ebarateSade3
| ebarateRiaziManteghi2 amalgareRiazi2 ebarateRiaziManteghi2
| ebarateRiaziManteghi1 amalgareRiazi1 ebarateRiaziManteghi1
| ebaratRabetei
'''
if len(p) == 2:
p[0] = p[1]
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect,
p[0].true_list, p[0].false_list)
else:
p[1].detect = self.goto_detect(p[1].detect)
p[3].detect = self.goto_detect(p[3].detect)
p[0] = Entry()
p[0].place = self.newTemp()
p[0].type = 'arith'
p[1].false_list = self.findFList(p[1].place)
p[3].false_list = self.findFList(p[3].place)
p[1].true_list = self.findTList(p[1].place)
p[3].true_list = self.findTList(p[3].place)
if p[1].type == 'char':
print("heeelooooo jooonemadaret ")
p[1].type = 'arith'
x = p[1].place
p[1].place = str(ord(x[1]))
if p[3].type == 'char':
print("heeelooooo jooonemadaret ")
p[3].type = 'arith'
x = p[3].place
p[3].place = str(ord(x[1]))
if p[3].type is None:
p[3].type = self.varType(p[3].place)
if p[1].type is None:
p[1].type = self.varType(p[1].place)
if p[1].type == 'arith' and p[3].type == 'arith':
self.quadruples.append(QuadRuple(p[2].place, p[1].place, p[3].place, p[0].place))
elif p[1].type == 'arith' and p[3].type == 'bool':
Entry.backpatch(p[3].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[1].place, '1', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[3].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[1].place, '0', p[0].place))
elif p[1].type == 'bool' and p[3].type == 'arith':
Entry.backpatch(p[1].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[3].place, '1', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[1].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[3].place, '0', p[0].place))
elif p[3].type == 'bool' and p[1].type == 'bool':
temp = self.newTemp()
print("ahmagh : ", p[1].true_list, p[1].false_list, p[3].true_list, p[3].false_list)
self.printQuadruples()
Entry.backpatch(p[1].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple('', '1', '', temp))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[1].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple('', '0', '', temp))
print(p[3].place, 'tu ruhe in zendegiiiiiiiiiiiii')
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(p[3].true_list[0])))
Entry.backpatch(p[3].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, temp, '1', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[3].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple('', temp, '', p[0].place))
else:
print(' shit :| ')
print("keeeeeeeeeeeeyliii khaaariiiiiiiiii")
self.printQuadruples()
print("Rule 30: ebarateSade -> ebarateSade3 "
"| ebarateRiaziManteghi2 amalgareRiazi2 ebarateRiaziManteghi2"
"| ebarateRiaziManteghi1 amalgareRiazi1 ebarateRiaziManteghi1")
def p_30_0_0(self, p):
'''
ebarateSade0 : amalgareYegani ebarateYegani
'''
if p[1].place == '-':
p[1].place = p[1].place + p[2].place
p[1].type = 'arith'
p[0] = p[1]
# TODO ssssaaaaaaaaggggg
print("Rule 30_0_0: ebarateSade0 -> amalgareYegani ebarateYegani ")
def p_30_0_1(self, p):
'''
ebarateSade0 : ID
'''
p[0] = Entry()
p[0].place = p[1]
p[0].true_list = self.findTList(p[0].place)
p[0].false_list = self.findFList(p[0].place)
p[0].type = self.findType(p[0].place)
p[0].IDdetect = True
p[0].detect = len(self.quadruples) + 1
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect, p[0].true_list,
p[0].false_list)
self.quadruples.append(QuadRuple('', '', '', 'goto'))
# self.updateTList(p[0].place, p[0].true_list)
# self.updateFList(p[0].place, p[0].false_list)
print("Rule 30_0_1: ebarateSade0 -> ID ")
def p_30_0_2(self, p):
'''
ebarateSade0 : taghirpazir LBR ebarat RBR
'''
p[0] = Entry()
p[0].place = p[1].place + str(p[3].place)
p[0].true_list = self.findTList(p[0].place)
p[0].false_list = self.findFList(p[0].place)
p[0].type = self.findType(p[0].place)
p[0].detect = len(self.quadruples) + 1
p[0].IDdetect = True
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect, p[0].true_list,
p[0].false_list)
self.quadruples.append(QuadRuple('', '', '', 'goto'))
print("Rule 30_0_2: ebarateSade0 -> taghirpazir LBR ebarat RBR ")
def p_30_0_3(self, p):
'''
ebarateSade0 : taghirpazir DOT ID
'''
print("Rule 30_0_3: ebarateSade0 -> taghirpazir DOT ID ")
def p_30_0_4(self, p):
'''
ebarateSade0 : LPAREN ebarat RPAREN
'''
p[0] = p[2]
if p[2].IDdetect == True :
print("khaaaaaaaheeesh mikonam dorost bash :( ")
place = '1'
detect = len(self.quadruples) + 1
true_list = [len(self.quadruples)]
self.quadruples.append(QuadRuple('', '', '', 'goto'))
print("Rule 43 : meghdaresabet -> TRUE ")
p[0] = Entry()
p[0].IDdetect = False
p[2].detect = self.goto_detect(p[2].detect)
detect = self.goto_detect(detect)
p[0].true_list.append(len(self.quadruples))
p[0].false_list.append(len(self.quadruples) + 1)
self.quadruples.append(QuadRuple('==', p[2].place, place, 'goto'))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
p[0].type = 'bool'
print("Rule 30_0_4 : ebarateSade0 -> LPAREN ebarat RPAREN ")
def p_30_0_5(self, p):
'''
ebarateSade0 : sedaZadan
'''
# TODO after a while :D
print("Rule 30_0: ebarateSade0 -> sedaZadan ")
def p_30_6(self, p):
'''
ebarateSade0 : meghdaresabet
'''
p[0] = p[1]
p[0].IDdetect = False
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> " , p[0].detect , p[0].true_list , p[0].false_list)
print("Rule 30_0: ebarateSade0 -> meghdaresabet")
def p_R30_1(self, p):
'''
ebarateSade1 : NOT ebarateSade1
| ebarateSade0
'''
if len(p) == 2:
p[0] = p[1]
p[0].IDdetect = True and p[1].IDdetect
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect,
p[0].true_list, p[0].false_list)
else:
p[0] = p[1]
p[0].IDdetect = False
p[0].true_list = p[1].false_list
p[0].false_list = p[1].true_list
self.updateTList(p[0].place, p[0].true_list)
self.updateFList(p[0].place, p[0].false_list)
print("Rule 30_1: ebarateSade1 -> | NOT ebarateSade1| ebarateSade0 ")
def p_R30_2(self, p):
'''
ebarateSade2 : ebarateSade2 AND M ebarateSade2
| ebarateSade1
'''
if len(p) == 2:
p[0] = p[1]
p[0].IDdetect = True and p[1].IDdetect
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect,
p[0].true_list, p[0].false_list)
else:
# p[1].detect = self.goto_detect(p[1].detect)
# p[3].detect = self.goto_detect(p[3].detect)
p[0] = Entry()
p[0].place = self.newTemp()
p[0].type = 'bool'
p[0].IDdetect = False
nakonim = False
if p[1].false_list == [] :
p[1].false_list = self.findFList(p[1].place)
if not p[1].false_list == []:
nakonim = True
if p[1].true_list == [] :
p[1].true_list = self.findTList(p[1].place)
if not p[1].true_list == []:
nakonim = True
if p[4].false_list == [] :
p[4].false_list = self.findFList(p[4].place)
if not p[4].false_list == []:
nakonim = True
if p[4].true_list == []:
p[4].true_list = self.findTList(p[4].place)
if not p[4].true_list == []:
nakonim = True
if p[4].type == None:
p[4].type = self.varType(p[4].place)
if p[1].type == None:
p[1].type = self.varType(p[1].place)
if p[4].type == None:
p[4].type = self.varType(p[4].place)
if p[1].type == None:
p[1].type = self.varType(p[1].place)
if p[1].type == 'bool' and p[4].type == 'bool':
print ('saaaaalaaaaaaaaaaaaaaaaaaaaam')
# if nakonim:
# self.quadruples.append(QuadRuple('>', p[1].place, '0', 'goto' + str(len(self.quadruples) + 2)))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
#
# self.quadruples.append(QuadRuple('>', p[4].place, '0', 'goto' + str(len(self.quadruples) + 2)))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
# self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
Entry.backpatch(p[1].true_list, self.quadruples, p[3].quad)
# if nakonim:
# self.quadruples.append(QuadRuple('=', '0', '', p[0].place))
p[0].true_list = p[4].true_list
p[0].false_list = p[1].false_list + p[4].false_list
self.updateTList(p[0].place, p[0].true_list)
self.updateFList(p[0].place, p[0].false_list)
elif p[1].type == 'arith' and p[4].type == 'bool':
t = Entry()
t.true_list.append(len(self.quadruples))
t.false_list.append(len(self.quadruples) + 1)
self.quadruples.append(QuadRuple('>', p[1].place, '0', 'goto' + str(len(self.quadruples) + 2)))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('>', p[4].place, '0', 'goto' + str(len(self.quadruples) + 2)))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
Entry.backpatch(t.true_list, self.quadruples, p[3].quad)
self.quadruples.append(QuadRuple('=', '0', '', p[0].place))
p[0].true_list = p[4].true_list
p[0].false_list = p[4].false_list + t.false_list
self.updateTList(p[0].place, p[0].true_list)
self.updateFList(p[0].place, p[0].false_list)
elif p[1].type == 'bool' and p[4].type == 'arith':
t = Entry()
t.true_list.append(len(self.quadruples))
t.false_list.append(len(self.quadruples) + 1)
self.quadruples.append(QuadRuple('>', p[1].place, '0', 'goto' + (str(len(self.quadruples) + 2 ))))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('>', p[4].place, '0', 'goto' + (str(len(self.quadruples) + 2 ))))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
Entry.backpatch(p[1].true_list, self.quadruples, p[3].quad)
self.quadruples.append(QuadRuple('=', '0', '', p[0].place))
p[0].true_list = t.true_list
p[0].false_list = p[1].false_list + t.false_list
self.updateTList(p[0].place, p[0].true_list)
self.updateFList(p[0].place, p[0].false_list)
elif p[1].type == 'arith' and p[4].type == 'arith':
if int ( p[1].place ) > 0 :
p[1].place = '1'
else :
p[1].place = '0'
if int(p[4].place) > 0:
p[4].place = '1'
else:
p[4].place = '0'
self.quadruples.append(QuadRuple('>', p[1].place, '0', 'goto' + str(len(self.quadruples) + 2)))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('>', p[4].place, '0', 'goto' + str(len(self.quadruples) + 2)))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
Entry.backpatch(p[1].true_list, self.quadruples, p[3].quad)
self.quadruples.append(QuadRuple('=', '0', '', p[0].place))
p[0].true_list = p[4].true_list
p[0].false_list = p[1].false_list + p[4].false_list
self.updateTList(p[0].place, p[0].true_list)
self.updateFList(p[0].place, p[0].false_list)
# t = Entry()
# t.true_list.append(len(self.quadruples))
# t.false_list.append(len(self.quadruples) + 1)
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
# self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
# self.quadruples.append(QuadRuple('>', p[1].place, '0', 'goto' + (str(len(self.quadruples) + 2 ))))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
#
# t1 = Entry()
# t1.true_list.append(len(self.quadruples))
# t1.false_list.append(len(self.quadruples) + 1)
# self.quadruples.append(QuadRuple('>', p[4].place, '0', 'goto' + (str(len(self.quadruples) + 2 ))))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
# self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
#
# Entry.backpatch(t.true_list, self.quadruples, p[3].quad)
# self.quadruples.append(QuadRuple('=', '0', '', p[0].place))
# p[0].true_list = t1.true_list
# p[0].false_list = t.false_list + t1.false_list
# self.updateTList(p[0].place, p[0].true_list)
# self.updateFList(p[0].place, p[0].false_list)
print("Rule 30_2: ebarateSade2 -> ebarateSade2 AND ebarateSade2 | ebarateSade1 ")
def p_R30_3(self, p):
'''
ebarateSade2 : ebarateSade2 ANDTHEN ebarateSade2
'''
# TODO what we should DO ? :(
print("Rule 30_2: ebarateSade2 -> ebarateSade2 ANDTHEN ebarateSade2 ")
def p_R30_4(self, p):
'''
ebarateSade3 : ebarateSade3 OR M ebarateSade3
| ebarateSade2
'''
if len(p) == 2:
p[0] = p[1]
p[0].IDdetect = True and p[1].IDdetect
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect,
p[0].true_list, p[0].false_list)
else:
# p[1].detect = self.goto_detect(p[1].detect)
# p[3].detect = self.goto_detect(p[3].detect)
p[0] = Entry()
p[0].place = self.newTemp()
p[0].type = 'bool'
p[0].IDdetect = False
nakonim = False
if p[1].false_list == [] :
p[1].false_list = self.findFList(p[1].place)
if not p[1].false_list == []:
nakonim = True
if p[1].true_list == [] :
p[1].true_list = self.findTList(p[1].place)
if not p[1].true_list == []:
nakonim = True
if p[4].false_list == [] :
p[4].false_list = self.findFList(p[4].place)
if not p[4].false_list == []:
nakonim = True
if p[4].true_list == []:
p[4].true_list = self.findTList(p[4].place)
if not p[4].true_list == []:
nakonim = True
if p[4].type == None:
p[4].type = self.varType(p[4].place)
if p[1].type == None:
p[1].type = self.varType(p[1].place)
if p[1].type == 'bool' and p[4].type == 'bool':
# print('://///////////////////////////////////////////////////////////')
# if nakonim:
# self.quadruples.append(QuadRuple('>', p[1].place, '0', 'goto' + str(len(self.quadruples) + 2)))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
# self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
#
# self.quadruples.append(QuadRuple('>', p[4].place, '0', 'goto' + str(len(self.quadruples) + 2)))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
# self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
Entry.backpatch(p[1].false_list, self.quadruples, p[3].quad)
# if nakonim:
# self.quadruples.append(QuadRuple('=', '0', '', p[0].place))
p[0].true_list = p[1].true_list + p[4].true_list
p[0].false_list = p[4].false_list
self.updateTList(p[0].place, p[0].true_list)
self.updateFList(p[0].place, p[0].false_list)
elif p[1].type == 'arith' and p[4].type == 'bool':
t = Entry()
t.true_list.append(len(self.quadruples))
t.false_list.append(len(self.quadruples) + 1)
self.quadruples.append(QuadRuple('>', p[1].place, '0', 'goto' + str(len(self.quadruples) + 2)))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('>', p[4].place, '0', 'goto' + str(len(self.quadruples) + 2)))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
Entry.backpatch(p[4].false_list, self.quadruples, p[3].quad)
self.quadruples.append(QuadRuple('=', '0', '', p[0].place))
p[0].true_list = p[4].true_list + t.true_list
p[0].false_list = t.false_list
self.updateTList(p[0].place, p[0].true_list)
self.updateFList(p[0].place, p[0].false_list)
p[0].type = 'bool'
elif p[1].type == 'bool' and p[4].type == 'arith':
t = Entry()
t.true_list.append(len(self.quadruples))
t.false_list.append(len(self.quadruples) + 1)
self.quadruples.append(QuadRuple('>', p[1].place, '0', 'goto' + str(len(self.quadruples) + 2)))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('>', p[4].place, '0', 'goto' + str(len(self.quadruples) + 2)))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
Entry.backpatch(p[1].false_list, self.quadruples, p[3].quad)
self.quadruples.append(QuadRuple('=', '0', '', p[0].place))
p[0].true_list = p[1].true_list + t.true_list
p[0].false_list = t.false_list
self.updateTList(p[0].place, p[0].true_list)
self.updateFList(p[0].place, p[0].false_list)
p[0].type = 'bool'
elif p[1].type == 'arith' and p[4].type == 'arith':
if int ( p[1].place ) > 0 :
p[1].place = '1'
else :
p[1].place = '0'
if int(p[4].place) > 0:
p[4].place = '1'
else:
p[4].place = '0'
self.quadruples.append(QuadRuple('>', p[1].place, '0', 'goto' + str(len(self.quadruples) + 2)))
self.quadruples.append(QuadRuple('', '', '', 'goto' + str(len(self.quadruples) + 3)))
# p[1].false_list.append(str(len(self.quadruples) + 3))
self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('>', p[4].place, '0', 'goto' + str(len(self.quadruples) + 2)))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
Entry.backpatch(p[1].false_list, self.quadruples, p[3].quad)
self.quadruples.append(QuadRuple('=', '0', '', p[0].place))
p[0].true_list = p[1].true_list + p[4].true_list
p[0].false_list = p[4].false_list
self.updateTList(p[0].place, p[0].true_list)
self.updateFList(p[0].place, p[0].false_list)
# t1 = Entry()
# t1.true_list.append(len(self.quadruples))
# t1.false_list.append(len(self.quadruples) + 1)
# self.quadruples.append(QuadRuple('>', p[1].place, '0', 'goto' + (str(len(self.quadruples) + 2 ))))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
# self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
#
# t2 = Entry()
# t2.true_list.append(len(self.quadruples))
# t2.false_list.append(len(self.quadruples) + 1)
# self.quadruples.append(QuadRuple('>', p[4].place, '0', 'goto' + (str(len(self.quadruples) + 2 ))))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
# self.quadruples.append(QuadRuple('=', '1', '', p[0].place))
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
#
# Entry.backpatch(t1.false_list, self.quadruples, p[3].quad)
# self.quadruples.append(QuadRuple('=', '0', '', p[0].place))
#
# p[0].true_list = t1.true_list + t2.true_list
# p[0].false_list = t2.false_list
# self.updateTList(p[0].place, p[0].true_list)
# self.updateFList(p[0].place, p[0].false_list)
p[0].type = 'bool'
print("Rule 30_3: ebarateSade3 -> ebarateSade3 OR ebarateSade3 | ebarateSade2 ")
def p_M(self, p):
'''
M :
'''
print("MMMMMMMMMMMMM")
p[0] = Entry()
p[0].quad = len(self.quadruples)
# print("empty rule")
def p_R30_5(self, p):
'''
ebarateSade3 : ebarateSade3 ORELSE ebarateSade3
'''
# TODO what we should DO ? :(
print("Rule 30_3: ebarateSade3 -> ebarateSade3 ORELSE ebarateSade3 ")
def p_R31(self, p):
''' ebaratRabetei : ebarateRiaziManteghi1 amalgareRabetei ebarateRiaziManteghi1
| ebarateRiaziManteghi2 amalgareRabetei ebarateRiaziManteghi2
'''
p[0] = Entry()
p[0].IDdetect = False
p[1].detect = self.goto_detect(p[1].detect)
p[3].detect = self.goto_detect(p[3].detect)
p[0].true_list.append(len(self.quadruples))
p[0].false_list.append(len(self.quadruples) + 1)
self.quadruples.append(QuadRuple(p[2].place, p[1].place, p[3].place, 'goto'))
self.quadruples.append(QuadRuple('', '', '', 'goto'))
p[0].type = 'bool'
print("Rule 31_3: ebarateRiaziManteghi1 amalgareRabetei ebarateRiaziManteghi1 "
"| ebarateRiaziManteghi2 amalgareRabetei ebarateRiaziManteghi2 ")
def p_R32_0(self, p):
'''amalgareRabetei : LT '''
p[0] = Entry()
p[0].place = '<'
print("Rule 32_0: amalgareRabetei -> < ")
def p_R32_1(self, p):
'''amalgareRabetei : LE '''
p[0] = Entry()
p[0].place = '<='
print("Rule 32: amalgareRabetei -> <= ")
def p_R32_2(self, p):
'''amalgareRabetei : GE '''
p[0] = Entry()
p[0].place = '>='
print("Rule 32: amalgareRabetei -> >= ")
def p_R32_3(self, p):
'''amalgareRabetei : EQ '''
p[0] = Entry()
p[0].place = '=='
print("Rule 32: amalgareRabetei -> == ")
def p_R32_4(self, p):
'''amalgareRabetei : GT'''
p[0] = Entry()
p[0].place = '>'
print("Rule 32: amalgareRabetei -> >")
def p_R33_1(self, p):
'''ebarateRiaziManteghi1 : ebarateSade0
| ebarateRiaziManteghi1 amalgareRiazi1 ebarateRiaziManteghi1 '''
if len(p) == 2:
p[0] = p[1]
p[0].IDdetect = True and p[1].IDdetect
else:
p[1].detect = self.goto_detect(p[1].detect)
p[3].detect = self.goto_detect(p[3].detect)
p[0] = Entry()
p[0].IDdetect = False
p[0].place = self.newTemp()
p[0].type = 'arith'
p[1].false_list = self.findFList(p[1].place)
p[3].false_list = self.findFList(p[3].place)
p[1].true_list = self.findTList(p[1].place)
p[3].true_list = self.findTList(p[3].place)
if p[1].type == 'char':
print("heeelooooo jooonemadaret ")
p[1].type = 'arith'
x = p[1].place
p[1].place = str(ord(x[0]))
if p[3].type == 'char':
print("heeelooooo jooonemadaret ")
p[3].type = 'arith'
x = p[3].place
p[3].place = str(ord(x[2]))
if p[3].type == None:
p[3].type = self.varType(p[3].place)
if p[1].type == None:
p[1].type = self.varType(p[1].place)
if p[1].type == 'arith' and p[3].type == 'arith':
self.quadruples.append(QuadRuple(p[2].place, p[1].place, p[3].place, p[0].place))
elif p[1].type == 'arith' and p[3].type == 'bool':
Entry.backpatch(p[3].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[1].place, '1', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[3].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[1].place, '0', p[0].place))
elif p[1].type == 'bool' and p[3].type == 'arith':
Entry.backpatch(p[1].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[3].place, '1', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[1].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[3].place, '0', p[0].place))
elif p[3].type == 'bool' and p[1].type == 'bool':
temp = self.newTemp()
Entry.backpatch(p[1].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple('', '1', '', temp))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[1].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple('', '0', '', temp))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(p[3].true_list[0])))
Entry.backpatch(p[3].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, temp, '1', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[3].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple('', temp, '', p[0].place))
else:
print(' shit :| ')
print(
"Rule 33_1: ebarateRiaziManteghi1 -> ebarateSade0 "
" | ebarateRiaziManteghi1 amalgareRiazi1 ebarateRiaziManteghi1 ")
def p_R33_2(self, p):
'''ebarateRiaziManteghi2 : ebarateRiaziManteghi1
| ebarateRiaziManteghi2 amalgareRiazi2 ebarateRiaziManteghi2
'''
if len(p) == 2:
p[0] = p[1]
p[0].IDdetect = True and p[1].IDdetect
else:
p[1].detect = self.goto_detect(p[1].detect)
p[3].detect = self.goto_detect(p[3].detect)
p[0] = Entry()
p[0].IDdetect = False
p[0].place = self.newTemp()
p[1].false_list = self.findFList(p[1].place)
p[3].false_list = self.findFList(p[3].place)
p[1].true_list = self.findTList(p[1].place)
p[3].true_list = self.findTList(p[3].place)
if p[1].type == 'char':
p[1].type = 'arith'
print("heeelooooo jooonemadaret ")
x = p[1].place
p[1].place = str(ord(x[1]))
if p[3].type == 'char':
print("heeelooooo jooonemadaret ")
p[3].type = 'arith'
x = p[3].place
p[3].place = str(ord(x[1]))
if p[3].type == None:
p[3].type = self.varType(p[3].place)
if p[1].type == None:
p[1].type = self.varType(p[1].place)
if p[1].type == 'arith' and p[3].type == 'arith':
self.quadruples.append(QuadRuple(p[2].place, p[1].place, p[3].place, p[0].place))
elif p[1].type == 'arith' and p[3].type == 'bool':
Entry.backpatch(p[3].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[1].place, '1', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[3].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[1].place, '0', p[0].place))
elif p[1].type == 'bool' and p[3].type == 'arith':
Entry.backpatch(p[1].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[3].place, '1', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[1].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, p[3].place, '0', p[0].place))
elif p[3].type == 'bool' and p[1].type == 'bool':
temp = self.newTemp()
Entry.backpatch(p[1].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple('', '1', '', temp))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[1].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple('', '0', '', temp))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(p[3].true_list[0])))
Entry.backpatch(p[3].true_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple(p[2].place, temp, '1', p[0].place))
self.quadruples.append(QuadRuple('', '', '', 'goto ' + str(len(self.quadruples) + 2)))
Entry.backpatch(p[3].false_list, self.quadruples, len(self.quadruples))
self.quadruples.append(QuadRuple('', temp, '', p[0].place))
else:
print(' shit :| ')
print("Rule 33_2: ebarateRiaziManteghi2 -> ebarateRiaziManteghi1 "
" | ebarateRiaziManteghi2 amalgareRiazi2 ebarateRiaziManteghi2 ")
def p_R34_0(self, p):
'''
amalgareRiazi1 : TIMES
'''
p[0] = Entry()
p[0].place = '*'
print("Rule 34_0: amalgareRiazi1 -> * ")
def p_R34_1(self, p):
'''
amalgareRiazi1 : DIVIDE
'''
p[0] = Entry()
p[0].place = '/'
print("Rule 34_1: amalgareRiazi1 -> / ")
def p_R34_2(self, p):
'''
amalgareRiazi1 : MOD
'''
p[0] = Entry()
p[0].place = '%'
print("Rule 34_2: amalgareRiazi1 -> % ")
def p_R34_3(self, p):
'''
amalgareRiazi2 : PLUS
'''
p[0] = Entry()
p[0].place = '+'
print("Rule 34_3 : amalgareRiazi -> + ")
def p_R34_4(self, p):
'''
amalgareRiazi2 : MINUS
'''
p[0] = Entry()
p[0].place = '-'
print("Rule 34_4 : amalgareRiazi -> - ")
def p_R35_0(self, p):
'''
ebarateYegani : amalgareYegani ebarateYegani
'''
if p[1].place == '-':
p[1].place = p[1].place + p[2].place
p[1].type = 'arith'
p[0] = p[1]
# TODO ssssaaaaaaaaggggg
print("Rule 35_0 : ebarateYegani -> amalgareYegani ebarateYegani")
def p_R35_1(self, p):
'''
ebarateYegani : ID
'''
p[0] = Entry()
p[0].IDdetect = True
p[0].place = p[1]
print("Rule 35_1: ebarateYegani -> ID ")
def p_R35_2(self, p):
'''
ebarateYegani : taghirpazir LBR ebarat RBR
'''
print("Rule 35_2: ebarateYegani -> taghirpazir LBR ebarat RBR ")
def p_R35_3(self, p):
'''
ebarateYegani : taghirpazir DOT ID
'''
# TODO after a while :D
print("Rule 35_3: ebarateYegani -> taghirpazir DOT ID ")
def p_R35_4(self, p):
'''
ebarateYegani : taghirnapazir
'''
p[0] = p[1]
print("Rule 35_4: ebarateYegani -> taghirnapazir")
def p_R36_0(self, p):
'''
amalgareYegani : MINUS
'''
p[0] = Entry()
p[0].place = '-'
print("Rule 36: amalgareYegani -> - ")
def p_R36_1(self, p):
'''
amalgareYegani : TIMES
'''
p[0] = Entry()
p[0].place = '*'
print("Rule 36: amalgareYegani -> * ")
def p_R36_2(self, p):
'''
amalgareYegani : QM
'''
p[0] = Entry()
p[0].place = '?'
print("Rule 36: amalgareYegani -> ? ")
def p_R38_0(self, p):
'''
taghirpazir : ID
'''
p[0] = Entry()
p[0].place = p[1]
p[0].IDdetect = True
print("Rule 38_0: taghirpazir -> ID")
def p_R38_1(self, p):
'''
taghirpazir : taghirpazir LBR ebarat RBR
'''
p[0] = Entry()
p[0].place = p[1].place + str(p[3].place)
p[0].true_list = self.findTList(p[0].place)
p[0].false_list = self.findFList(p[0].place)
p[0].type = self.findType(p[0].place)
p[0].detect = len(self.quadruples) + 1
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect, p[0].true_list,
p[0].false_list)
# self.quadruples.append(QuadRuple('', '', '', 'goto'))
print("Rule 38_1: taghirpazir -> taghirpazir LBR ebarat RBR ")
def p_R38_2(self, p):
'''
taghirpazir : taghirpazir DOT ID
'''
# TODO after a while :D
print("Rule 38_2: taghirpazir -> taghirpazir DOT ID ")
def p_R39_0(self, p):
'''
taghirnapazir : LPAREN ebarat RPAREN
'''
p[0] = p[2]
print("Rule 39: taghirnapazir -> LPAREN ebarat RPAREN ")
def p_R39_1(self, p):
'''
taghirnapazir : sedaZadan
'''
# TODO after a while :D
print("Rule 39_1: taghirnapazir -> sedaZadan ")
def p_R39_2(self, p):
'''
taghirnapazir : meghdaresabet
'''
p[0] = p[1]
p[0].IDdetect = False
print("Rule 39_2: taghirnapazir -> meghdaresabet")
def p_R40(self, p):
'''
sedaZadan : ID LPAREN bordareVorudiha RPAREN
| ID LPAREN RPAREN
'''
# TODO after a while :D
print("Rule 40: sedaZadan -> ID LPAREN bordareVorudiha RPAREN | ID LPAREN RPAREN ")
def p_R42(self, p):
'''
bordareVorudiha : bordareVorudiha COMMA ebarat
| ebarat
'''
if len(p) == 2:
p[0] = p[1]
else:
p[0] = p[3]
print("Rule 42: bordareVorudiha -> bordareVorudiha COMMA ebarat | ebarat ")
def p_R43(self, p):
'''
meghdaresabet : ICONST
'''
p[0] = Entry()
p[0].place = p[1]
num = int(p[1])
if num > 0 :
p[0].detect = len(self.quadruples) + 1
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect,
p[0].true_list, p[0].false_list)
p[0].type = 'bool'
p[0].true_list = [len(self.quadruples)]
else:
p[0].detect = len(self.quadruples) + 1
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ", p[0].detect,
p[0].true_list, p[0].false_list)
p[0].type = 'bool'
p[0].false_list = [len(self.quadruples)]
self.printQuadruples()
p[0].type = 'arith'
print("Rule 43 : meghdaresabet -> ICONST ")
def p_R44(self, p):
'''
meghdaresabet : CCONST
'''
print("heeeeeeeeeeey : ", p[1])
p[0] = Entry()
p[0].place = p[1]
p[0].type = 'char'
print("Rule 44 : meghdaresabet -> CCONST ")
def p_R45(self, p):
'''
meghdaresabet : TRUE
'''
p[0] = Entry()
p[0].place = p[1]
p[0].detect = len(self.quadruples) + 1
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> " , p[0].detect , p[0].true_list , p[0].false_list)
p[0].type = 'bool'
p[0].true_list = [len(self.quadruples)]
self.quadruples.append(QuadRuple('', '', '', 'goto'))
# var = [p[0].place,p[0].type, p[0].true_list, p[0].false_list]
# self.symbolTable.append(var)
print("Rule 43 : meghdaresabet -> TRUE ")
def p_R46(self, p):
'''
meghdaresabet : FALSE
'''
p[0] = Entry()
p[0].place = p[1]
p[0].detect = len(self.quadruples) + 1
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> " , p[0].detect , p[0].true_list , p[0].false_list)
p[0].type = 'bool'
p[0].false_list = [len(self.quadruples)]
self.quadruples.append(QuadRuple('', '', '', 'goto'))
# var = [p[0].place, p[0].type, p[0].true_list, p[0].false_list]
# self.symbolTable.append(var)
print("Rule 43 : meghdaresabet -> FALSE ")
def p_error(self, t):
print("what the fuck ... whoa whoa jesus christ", t)
def build(self, **kwargs):
'''
build the parser
'''
self.parser = yacc.yacc(module=self, **kwargs)
return self.parser
def var_handler(self, word):
for v in self.definedvar:
if v[0] == word:
return v[1]
name = 'var' + str(self.counter2)
newv = [word, name]
self.definedvar.append(newv)
self.counter2 += 1
return name
def numconverter(self, num):
try:
return {
'\u06f0': '0',
'\u06f1': '1',
'\u06f2': '2',
'\u06f3': '3',
'\u06f4': '4',
'\u06f5': '5',
'\u06f6': '6',
'\u06f7': '7',
'\u06f8': '8',
'\u06f9': '9',
}[num]
except KeyError:
return False
def checkinSym(self, word):
for i in self.symbolTable:
if i[0] == word:
return True
return False
def generateC(self):
# print('-6'.isdigit())
qNum = 0
line = 0
line += 1
print('L', line, ': ', "int main () {")
for s in self.symbolTable:
type = ""
if s[1] == 'arith':
type = "int"
elif s[1] == "bool":
type = "bool"
elif s[1] == "char":
type = "char"
if s[0] == "غلط" or s[0] == "درست":
continue
name = self.var_handler(s[0])
line += 1
print('L', line, ': ', type, name, ";" )
line += 1
for var in self.meghdardehi:
if s[0] == var[0]:
kooft = ""
if not (self.numconverter(var[1]) == False):
kooft = self.numconverter(var[1])
elif var[1].lstrip('-+').isdigit():
kooft = var[1]
elif var[1] == 'درست':
kooft = '1'
elif var[1] == 'غلط':
kooft = '0'
elif var[1].isalpha():
kooft = var[1] # TODO man convert nakardam age char farsi bashe !
if kooft != '1111111111111111' :
print('L', line, ': ', name, ' = ', kooft, " ;" )
for q in self.quadruples:
qNum += 1
arg1 = ""
arg2 = ""
op = ""
result = ""
if q.arg_one.startswith("Temp"):
arg1 = q.arg_one
elif self.checkinSym(q.arg_one):
arg1 = self.var_handler(q.arg_one)
elif not (self.numconverter(q.arg_one) == False):
arg1 = self.numconverter(q.arg_one)
elif q.arg_one.lstrip('-+').isdigit():
arg1 = q.arg_one
elif q.arg_one == 'درست':
arg1 = '1'
elif q.arg_one == 'غلط':
arg1 = '0'
elif q.arg_one.isalpha():
arg1 = q.arg_one # TODO man convert nakardam age char farsi bashe !
else:
arg1 = "aaannn e man :) "
if q.arg_two.startswith("Temp"):
arg2 = q.arg_two
elif self.checkinSym(q.arg_two):
arg2 = self.var_handler(q.arg_two)
elif not (self.numconverter(q.arg_two) == False):
arg2 = self.numconverter(q.arg_two)
elif q.arg_two.lstrip('-+').isdigit():
arg2 = q.arg_two
elif q.arg_two == 'درست':
arg2 = '1'
elif q.arg_two == 'غلط':
arg2 = '0'
elif q.arg_two.isalpha():
arg2 = q.arg_two # TODO man convert nakardam age char farsi bashe !
else:
arg2 = "aaannn e man :) "
if q.result.startswith("Temp"):
result = q.result
elif self.checkinSym(q.result):
result = self.var_handler(q.result)
elif q.result == "goto":
pass # TODO
else:
result = "aaannn e man :) "
if q.op == '+' or q.op == '-' or q.op == '*' or q.op == '-':
line += 1
print('L', line, ': ', result, " = ", arg1, q.op, arg2, ";")
elif q.op == '=':
line += 1
print('L', line, ': ', result, " = ", arg1, ";")
elif (q.op == '<' or q.op == '>' or q.op == '<=' or q.op == '>=' or q.op == '==' or q.op == '!='):
line += 1
if q.result == 'goto' or q.result == 'goto ':
print('L', line, ': ', "if ", arg1, q.op, arg2, q.result)
else:
n1 = (q.result.strip()[4:])
max = 0
for i in n1.split(" "):
if not i == "":
if max < int(i):
max = int(i)
newLine = max - qNum + 1 + line
print('L', line, ': ', "if ", arg1, q.op, arg2, 'goto', newLine)
elif q.op == '' and q.result.strip()[:4] == 'goto':
line += 1
if q.result == 'goto' or q.result == 'goto ':
print('L', line, ': ', q.result)
else:
n2 = q.result.strip()[4:]
max = 0
for i in n2.split(" "):
if not i == "":
if max < int(i):
max = int(i)
newLine = max - qNum + 1 + line
print('L', line, ': ', 'goto', newLine)
print("}")
if __name__ == "__main__":
data = '''برنامه تست
حرف اصلي ( ) {
صحیح منیره = 1 ;
کلید ( منیره )
حالت 0 : اگر (منیره == 4)
انگاه
منیره = منیره + 1 ;;
حالت 2 : منیره = 4 ;;
تمام
وقتی (منیره < 8)
منیره = 9;
}
'''
path = "code.txt"
f = open(path)
# data = f.read()
f.close()
parser = Parser()
p = parser.build()
p.parse(data, lexer=lexercompiler.build(), debug=False)
parser.printQuadruples()
parser.printSymbol()
print()
parser.generateC() | {"/parsercompiler.py": ["/lexercompiler.py", "/QuadRuples.py", "/SymbolTable.py"]} |
51,524 | mahtabfarrokh/compiler | refs/heads/master | /lexercompiler.py | import ply.lex as lex
import io
# Reserved words
reserved = (
'PROGRAM', 'MAIN', 'STRUCT', 'CONSTANT', 'CHAR' , 'BOOL',
'FLOAT', 'INT', 'IF', 'THEN', 'ELSE', 'KEY', 'END', 'STATE',
'DEFAULT', 'WHILE', 'RETURN', 'BREAK', 'OR', 'AND',
'ORELSE', 'ANDTHEN', 'NOT', 'FALSE', 'TRUE'
)
tokens = reserved + (
# Literals (identifier, integer constant, float constant, string constant,
# char const)
'ID', 'TYPEID', 'ICONST', 'CCONST',
# Operators (+,-,*,/,%, !, <, <=, >, >=, == ,?)
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
'LT', 'LE', 'GT', 'GE', 'EQ','QM',
# Assignment (=, *=, /=, +=, -= )
'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
# Increment/decrement (++,--)
'PLUSPLUS', 'MINUSMINUS',
# Delimeters ( ) { } , ; : . [ ]
'LPAREN', 'RPAREN',
'LBRACE', 'RBRACE',
'COMMA', 'SEMI', 'COLON', 'DOT',
'LBR' , 'RBR'
)
# Completely ignored characters
t_ignore = ' \t\x0c'
# Newlines
def t_NEWLINE(t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
reserved_map = {}
#identifier & reserved words
def t_ID(t):
r'[\u0627\u0628\u067E\u062A\u062B\u062C\u0686\u062D' \
r'\u062E\u062F\u0630\u0631\u0632\u0698\u0633\u0634\u0635\u0636\u0637' \
r'\u0638\u0639\u063A\u0641\u0642\u06A9\u06AF\u0644\u0645\u0646\u0648\u0647\u06CC' \
r'\u0061\u0062\u0063\u0064\u0065\u0066\u0067\u0068\u0069\u006a\u006b\u006c\u006d' \
r'\u006e\u006f\u0070\u0071\u0072\u0073\u0074\u0075\u0076\u0077\u0078\u0079\u007a_][\w_]*'
if (t.value == '\u0628\u0631\u0646\u0627\u0645\u0647'):
t.type = reserved_map.get(t.value, "PROGRAM") # برنامه
return t
elif (t.value == '\u0627\u0635\u0644\u06CC'):
t.type = reserved_map.get(t.value,"MAIN") #اصلی
return t
elif (t.value =='\u0633\u0627\u062E\u062A\u0627\u0631'):
t.type = reserved_map.get(t.value, "STRUCT") # ساختار
return t
elif (t.value =='\u062B\u0627\u0628\u062A'):
t.type = reserved_map.get(t.value,"CONSTANT" ) # ثابت
return t
elif (t.value =='\u062D\u0631\u0641' ):
t.type = reserved_map.get(t.value, "CHAR") # حرف
return t
elif (t.value == '\u0645\u0646\u0637\u0642\u06CC'):
t.type = reserved_map.get(t.value,"BOOL") #منطقی
return t
elif (t.value =='\u0627\u0639\u0634\u0627\u0631\u06CC'):
t.type = reserved_map.get(t.value, "FLOAT") #اعشاری
return t
elif (t.value =='\u0635\u062D\u06CC\u062D'):
t.type = reserved_map.get(t.value,"INT" ) #صحیح
return t
elif (t.value =='\u0627\u06AF\u0631'):
t.type = reserved_map.get(t.value,"IF" ) #اگر
return t
elif (t.value == '\u0627\u0646\u06AF\u0627\u0647'):
t.type = reserved_map.get(t.value,"THEN") # آنگاه
return t
elif (t.value =='\u0648\u06AF\u0631\u0646\u0647'):
t.type = reserved_map.get(t.value,"ELSE" ) # وگرنه
return t
elif (t.value =='\u06A9\u0644\u06CC\u062F'):
t.type = reserved_map.get(t.value,"KEY" ) # کلید
return t
elif (t.value =='\u062A\u0645\u0627\u0645' ):
t.type = reserved_map.get(t.value,"END" ) # تمام
return t
elif (t.value =='\u062D\u0627\u0644\u062A' ):
t.type = reserved_map.get(t.value,"STATE") # حالت
return t
elif (t.value =='\u067E\u06CC\u0634\u0641\u0631\u0636'):
t.type = reserved_map.get(t.value,"DEFAULT" ) # پیشفرض
return t
elif (t.value == '\u0648\u0642\u062A\u06CC'):
t.type = reserved_map.get(t.value,"WHILE" ) # وقتی
return t
elif (t.value =='\u0628\u0631\u06AF\u0631\u062F\u0627\u0646'):
t.type = reserved_map.get(t.value,"RETURN" ) # برگردان
return t
elif (t.value =='\u0628\u0634\u06A9\u0646' ):
t.type = reserved_map.get(t.value, "BREAK") # بشکن
return t
elif (t.value =='\u06CC\u0627'):
t.type = reserved_map.get(t.value,"OR" ) # یا
return t
elif (t.value == '\u0648'):
t.type = reserved_map.get(t.value,"AND" ) # و
return t
elif (t.value =='\u06CC\u0627\u0648\u06AF\u0631\u0646\u0647'):
t.type = reserved_map.get(t.value, "ORELSE") # یا وگرنه
return t
elif (t.value == '\u0648\u0647\u0645\u0686\u0646\u06CC\u0646' ):
t.type = reserved_map.get(t.value, "ANDTHEN") # و همچنین
return t
elif (t.value =='\u062E\u0644\u0627\u0641' ):
t.type = reserved_map.get (t.value, "NOT" ) # خلاف
return t
elif (t.value == '\u063A\u0644\u0637'):
t.type = reserved_map.get (t.value, "FALSE") # غلط
return t
elif (t.value =='\u062F\u0631\u0633\u062A' ):
t.type = reserved_map.get (t.value,"TRUE" ) # درست
return t
t.type = reserved_map.get(t.value, "ID")
return t
# Operators
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_DIVIDE = r'/'
t_MOD = r'%'
t_LT = r'<'
t_GT = r'>'
t_LE = r'<='
t_GE = r'>='
t_EQ = r'=='
t_QM = r'\?'
# Assignment operators
t_EQUALS = r'='
t_TIMESEQUAL = r'\*='
t_DIVEQUAL = r'/='
t_PLUSEQUAL = r'\+='
t_MINUSEQUAL = r'-='
# Increment/decrement
t_PLUSPLUS = r'\+\+'
t_MINUSMINUS = r'--'
# Delimeters
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACE = r'\{'
t_RBRACE = r'\}'
t_COMMA = r','
t_SEMI = r';'
t_COLON = r':'
t_DOT = r'\.'
t_LBR = r'\['
t_RBR = r'\]'
# Integer literal
t_ICONST = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
L2 = {'\u0627', '\u0628', '\u067E', '\u062A', '\u062B', '\u062C', '\u0686', '\u062D',
'\u062E', '\u062F', '\u0630', '\u0631', '\u0632', '\u0698', '\u0633', '\u0634', '\u0635', '\u0636', '\u0637',
'\u0638', '\u0639', '\u063A', '\u0641', '\u0642', '\u06A9', '\u06AF', '\u0644', '\u0645', '\u0646', '\u0648',
'\u0647', '\u06CC',
'\u0061', '\u0062', '\u0063', '\u0064', '\u0065', '\u0066', '\u0067', '\u0068', '\u0069', '\u006a', '\u006b',
'\u006c', '\u006d', '\u006e', '\u006f', '\u0070', '\u0071', '\u0072', '\u0073', '\u0074', '\u0075', '\u0076',
'\u0077', '\u0078', '\u0079', '\u007a'}
# Character constant 'c' or L'c'
t_CCONST = r'(L2)?\'([^\\\n]|(\\.))*?\''
# Comments
def t_comment(t):
r'(/\*(.|\n)*?\*/)|//'
t.lexer.lineno += t.value.count('\n')
# Preprocessor directive (ignored)
def t_preprocessor(t):
r'\#(.)*?\n'
t.lexer.lineno += 1
def t_error(t):
print("Illegal character %s" % repr(t.value[0]))
t.lexer.skip(1)
def build():
'''
build the lexer
'''
lexer = lex.lex()
return lexer
if __name__ == "__main__":
# lex.runmain(lexer)
path = "code.txt"
f = open(path)
# with io.open(path, 'r', encoding='utf8') as f:
# text = f.read()
text = f.read()
f.close()
lexer = lex.lex()
lexer.input(text)
#f2 = open("res.txt" , "w+")
for token in lexer:
s = str (token)
x = s.find(",")
print(s)
# f2.write(s[9:x] + "\n")
#f2.close() | {"/parsercompiler.py": ["/lexercompiler.py", "/QuadRuples.py", "/SymbolTable.py"]} |
51,525 | mahtabfarrokh/compiler | refs/heads/master | /QuadRuples.py | class QuadRuple:
def __init__(self, op, arg1, arg2, result):
self.op = op
self.arg_one = arg1
self.arg_two = arg2
self.result = result
| {"/parsercompiler.py": ["/lexercompiler.py", "/QuadRuples.py", "/SymbolTable.py"]} |
51,526 | mahtabfarrokh/compiler | refs/heads/master | /SymbolTable.py | class Entry:
def __init__(self):
self.true_list = []
self.false_list = []
self.next_list = []
self.type = None
self.value = None
self.quad = 0
self.place = ""
self.lenght = 1
self.detect = 0
self.IDdetect = False
# self.case_dict = []
@staticmethod
def backpatch(indexes, quad_list, target):
for index in indexes:
quad_list[index].result += ' ' + str(target) | {"/parsercompiler.py": ["/lexercompiler.py", "/QuadRuples.py", "/SymbolTable.py"]} |
51,530 | leovt/dungeon | refs/heads/master | /dungeon_map.py | tiles = {
'1': (0,0),
'3': (2,0)
}
tex_tiles = 8.0
tri_coords = [(0,0), (1,0), (0,1), (1,0), (1,1), (0,1)]
vertex_coords = {
tid: [((x+u)/tex_tiles, (y+1-v)/tex_tiles, u, v)
for (u,v) in tri_coords]
for tid, (x,y) in tiles.items()
}
import random
def gen_map():
map = [['1']*40 for y in range(40)]
for r in range(20):
w = random.randrange(3, 12)
h = random.randrange(3, 12)
x = random.randrange(1,39-w)
y = random.randrange(1,39-h)
if all(map[u][v] == '1' for u in range(x-1, x+w+1) for v in range(y-1, y+h+1)):
for u in range(x, x+w):
for v in range(y, y+h):
map[u][v] = '3'
return map
class DungeonMap:
def __init__(self):
self.map = gen_map()
def vertex_data(self):
for y, row in enumerate(self.map):
for x, tid in enumerate(row):
for tu, tv, u, v in vertex_coords[tid]:
yield from (x+u, y+v, 0, 1, tu, tv, 0, 0)
| {"/main.py": ["/shaders.py", "/gltools/glprogram.py", "/dungeon_map.py"]} |
51,531 | leovt/dungeon | refs/heads/master | /shaders.py | vertex_sprite = b'''
#version 130
attribute vec4 position;
attribute vec4 tex_coord;
out vec4 frag_tex_coord;
uniform vec2 scale;
uniform vec2 offset;
void main()
{
gl_Position = vec4((position.x-offset.x) * scale.x,
(position.y-offset.y) * scale.y,
position.z / 256.0,
1.0);
frag_tex_coord = tex_coord;
}
'''
fragment_sprite = b'''
#version 130
uniform sampler2D tex;
varying vec4 frag_tex_coord;
out vec4 FragColor;
void main()
{
FragColor = texture2D(tex, frag_tex_coord.xy);
}
'''
| {"/main.py": ["/shaders.py", "/gltools/glprogram.py", "/dungeon_map.py"]} |
51,532 | leovt/dungeon | refs/heads/master | /gltools/gltexture.py | import PIL.Image
from pyglet import gl
import logging
import ctypes
def make_texture(filename, indexed=False):
name = gl.GLuint(0)
gl.glGenTextures(1, ctypes.pointer(name))
gl.glBindTexture(gl.GL_TEXTURE_2D, name)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_NEAREST)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_NEAREST)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP_TO_EDGE)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP_TO_EDGE)
image = PIL.Image.open(filename)
if indexed:
assert image.mode == 'P'
else:
image = image.convert('RGBA')
logging.debug('loading %s mode=%s', filename, image.mode)
width, height = image.size
if indexed:
assert len(image.tobytes()) == width * height
gl.glTexImage2D(gl.GL_TEXTURE_2D,
0, # level
gl.GL_R8,
width,
height,
0,
gl.GL_RED,
gl.GL_UNSIGNED_BYTE,
ctypes.create_string_buffer(image.tobytes()))
else:
assert len(image.tobytes()) == width * height * 4
gl.glTexImage2D(gl.GL_TEXTURE_2D,
0, # level
gl.GL_RGBA8,
width,
height,
0,
gl.GL_RGBA,
gl.GL_UNSIGNED_BYTE,
ctypes.create_string_buffer(image.tobytes()))
gl.glBindTexture(gl.GL_TEXTURE_2D, 0)
return name
| {"/main.py": ["/shaders.py", "/gltools/glprogram.py", "/dungeon_map.py"]} |
51,533 | leovt/dungeon | refs/heads/master | /main.py | import sys
import logging.config
import ctypes
import pyglet
from pyglet import gl
from pyglet.window import key
import shaders
from gltools.glprogram import GlProgram
from gltools import gltexture
from dungeon_map import DungeonMap
class Application:
def __init__(self, window):
self.window = window
self.initialize_gl()
self.dungeon_map = DungeonMap()
def update(self, dt):
pass
def initialize_gl(self):
self.program = GlProgram(shaders.vertex_sprite, shaders.fragment_sprite)
self.program.uniform2f(b'offset', 0, 0)
self.buffer = gl.GLuint(0)
gl.glGenBuffers(1, ctypes.pointer(self.buffer))
gl.glActiveTexture(gl.GL_TEXTURE0)
self.sprite_texture = gltexture.make_texture('sprites.png')
gl.glBindTexture(gl.GL_TEXTURE_2D, self.sprite_texture)
self.program.uniform1i(b'tex', 0)
gl.glEnable(gl.GL_BLEND)
gl.glEnable(gl.GL_DEPTH_TEST)
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)
def on_resize(self, width, height):
logging.debug('window resized to %sx%s', width, height)
gl.glViewport(0, 0, width, height)
TILE_SIZE = 64
self.program.uniform2f(b'scale', 2*TILE_SIZE/width, 2*TILE_SIZE/height)
self.vp_width = width
self.vp_height = height
def on_draw(self):
gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT)
STRIDE = 8
self.program.use()
self.program.vertex_attrib_pointer(self.buffer, b'position', 4, stride=STRIDE * ctypes.sizeof(gl.GLfloat))
self.program.vertex_attrib_pointer(self.buffer, b'tex_coord', 4, stride=STRIDE * ctypes.sizeof(gl.GLfloat), offset=4 * ctypes.sizeof(gl.GLfloat))
nb_vertices = 6*40*40
data = self.dungeon_map.vertex_data()
data = (gl.GLfloat * (STRIDE * nb_vertices))(*data)
gl.glBufferData(gl.GL_ARRAY_BUFFER, ctypes.sizeof(data), data, gl.GL_DYNAMIC_DRAW)
gl.glDrawArrays(gl.GL_TRIANGLES, 0, nb_vertices)
def on_key_press(self, symbol, modifiers):
logging.debug('Key Press {} {}'.format(symbol, modifiers))
if symbol == key.I:
logging.info('FPS: {}'.format(pyglet.clock.get_fps()))
def initialize_gl(context):
logging.info('OpenGL Version {}'.format(context.get_info().get_version()))
gl.glClearColor(0.5, 0.5, 0.35, 1)
def main():
logging.config.fileConfig('logging.conf')
try:
if not ctypes.windll.user32.SetProcessDPIAware():
logging.error('could not set dpi awareness')
except AttributeError:
pass
try:
window = pyglet.window.Window(resizable=True)
initialize_gl(window.context)
app = Application(window)
window.push_handlers(app)
pyglet.clock.schedule_interval(app.update, 0.01)
pyglet.app.run()
except:
logging.exception('Uncaught Exception')
sys.exit(1)
if __name__ == '__main__':
main()
| {"/main.py": ["/shaders.py", "/gltools/glprogram.py", "/dungeon_map.py"]} |
51,534 | leovt/dungeon | refs/heads/master | /gltools/glprogram.py | import logging
import ctypes
from pyglet import gl
def shader(stype, src):
'''
create and compile a shader of type stype with source code src
return the shader (the return value of glCreateShader)
'''
handle = gl.glCreateShader(stype)
buffer = ctypes.create_string_buffer(src)
buf_pointer = ctypes.cast(ctypes.pointer(ctypes.pointer(buffer)), ctypes.POINTER(ctypes.POINTER(ctypes.c_char)))
length = ctypes.c_int(len(src) + 1)
gl.glShaderSource(handle, 1, buf_pointer, ctypes.byref(length))
gl.glCompileShader(handle)
success = gl.GLint(0)
gl.glGetShaderiv(handle, gl.GL_COMPILE_STATUS, ctypes.pointer(success))
length = gl.GLint(0)
gl.glGetShaderiv(handle, gl.GL_INFO_LOG_LENGTH, ctypes.pointer(length))
buffer = ctypes.create_string_buffer(length.value)
gl.glGetShaderInfoLog(handle, length, None, buffer)
log = buffer.value[:length.value].decode('ascii')
for line in log.splitlines():
logging.debug('GLSL: ' + line)
if not success:
raise Exception('Compiling of the shader failed.')
return handle
class GlProgram:
def __init__(self, vertex_shader, fragment_shader):
self.handle = gl.glCreateProgram()
logging.debug('GLProgram.__init__: self.handle = %r', self.handle)
gl.glAttachShader(self.handle, shader(gl.GL_VERTEX_SHADER, vertex_shader))
gl.glAttachShader(self.handle, shader(gl.GL_FRAGMENT_SHADER, fragment_shader))
gl.glLinkProgram(self.handle)
#call use() early so some programm errors are caught early
self.use()
def use(self):
gl.glUseProgram(self.handle)
def vertex_attrib_pointer(self, buffer, name, size, dtype=gl.GL_FLOAT, normalized=False, stride=0, offset=0):
self.use()
loc = gl.glGetAttribLocation(self.handle, ctypes.create_string_buffer(name))
if loc < 0:
logging.warning('Attribute %s is not in the shader.', name)
return
gl.glEnableVertexAttribArray(loc)
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, buffer)
gl.glVertexAttribPointer(loc, size, dtype, normalized, stride, ctypes.c_void_p(offset))
def uniform1i(self, name, value):
self.use()
loc = gl.glGetUniformLocation(self.handle, ctypes.create_string_buffer(name))
if loc < 0:
logging.warning('Uniform %s is not in the shader.', name)
return
gl.glUniform1i(loc, value)
def uniform2f(self, name, v0, v1):
self.use()
loc = gl.glGetUniformLocation(self.handle, ctypes.create_string_buffer(name))
if loc < 0:
logging.warning('Uniform %s is not in the shader.', name)
return
gl.glUniform2f(loc, v0, v1)
| {"/main.py": ["/shaders.py", "/gltools/glprogram.py", "/dungeon_map.py"]} |
51,547 | nitinx/zomato-mart | refs/heads/master | /mylibrary/zmt_20180331.py | # 28 Nov 2017 | Zomato Client
"""Zomato Client
Library that:
1. From database, retrieves parameters that restrict data fetched from Zomato.com
2. Fetches data from Zomato.com via Zomato's public APIs
3. Populates the data into the Zomato datamart
API Documentation: https://developers.zomato.com/api#headline1
"""
import requests
import logging
import json
from mylibrary.db_oracle import OracleClient
from mylibrary.zomato_db_oracle import ZomatoDBInsertOracle
from time import gmtime, strftime
# Define Zomato Base URL
base_url = "https://developers.zomato.com/api/v2.1"
# Define Oracle Variables
DB = OracleClient()
db_conn = DB.db_login()
db_cur_one = db_conn.cursor()
db_cur_two = db_conn.cursor()
ZmtInsert = ZomatoDBInsertOracle()
log = logging.getLogger(__name__)
class ZomatoParameters:
def getparam_city_names(self):
"""Retrieve Parameter | City Names"""
log.debug("getparam_city_names() | <START>")
city = ''
# Retrieve Parameter | City Names
db_cur_one.execute("select count(distinct CITY_NAME) from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for count in db_cur_one:
if count[0] is 0:
log.info("getparam_city_names() | Parameter: CITY_NAME missing. Please define.")
else:
db_cur_two.execute("select distinct CITY_NAME from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for city_name in db_cur_two:
city = city_name[0]
log.info("getparam_city_names() | PARAMETER City: " + city)
log.debug("getparam_city_names() | <END>")
return city
def getparam_localities(self):
"""Retrieve Parameter | Localities"""
log.debug("getparam_localities() | <START>")
localities = []
# Retrieve Parameter | Localities
db_cur_one.execute("select count(distinct LOCALITY) from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for count in db_cur_one:
if count[0] is 0:
log.info("getparam_localities() | Parameter: LOCALITY missing. Please define.")
else:
db_cur_two.execute("select distinct LOCALITY from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for locality in db_cur_two:
localities.append(locality[0])
log.info("getparam_localities() | PARAMETER Locality: " + str(localities))
log.debug("getparam_localities() | <END>")
return localities
class ZomatoClient:
def get_categories(self, headers):
"""Refresh Zomato Categories data"""
log.debug("get_categories() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from zmt_categories where TO_CHAR(INSERT_DT,'YYYY') = "
"TO_CHAR(SYSDATE, 'YYYY')")
for values in db_cur_one:
if values[0] is 0:
log.info("get_categories() | Data stale/unavailable. Refreshing...")
# Request data and cleanup table
response = requests.get(base_url + '/categories', params='', headers=headers).json()
db_cur_two.execute("truncate table ZMT_CATEGORIES")
# Loop through response and populate table
for category in range(len(response['categories'])):
log.info("get_categories() | Adding Category: "
+ response['categories'][category]['categories']['name'])
#db_cur_two.execute("insert into ZMT_CATEGORIES values (:category_id, :category_name, SYSDATE)",
# category_id=response['categories'][category]['categories']['id'],
# category_name=response['categories'][category]['categories']['name'])
ZmtInsert.insert_categories(response['categories'][category]['categories']['id'],
response['categories'][category]['categories']['name'])
#db_conn.commit()
else:
log.info("get_categories() | Data is current. Refresh skipped.")
log.debug("get_categories() | <END>")
def get_cities(self, headers, query):
"""Refresh Zomato Cities data"""
log.debug("get_cities() | <START>")
# Request data
response = requests.get(base_url + '/cities?q=' + query + '&count=1', params='', headers=headers).json()
# Check if data exists. Populate table if yes, ignore response otherwise.
db_cur_one.execute("select count(*) from ZMT_CITIES where CITY_NAME = :name", name=query)
for values in db_cur_one:
if values[0] is 0:
log.info("get_cities() | Adding City: " + query)
# db_cur_two.execute("insert into ZMT_CITIES values (:city_id, :city_name, :country_id, :country_name, "
# "SYSDATE)",
# city_id=response['location_suggestions'][0]['id'],
# city_name=response['location_suggestions'][0]['name'],
# country_id=response['location_suggestions'][0]['country_id'],
# country_name=response['location_suggestions'][0]['country_name'])
#db_conn.commit()
ZmtInsert.insert_cities(response['location_suggestions'][0]['id'],
response['location_suggestions'][0]['name'],
response['location_suggestions'][0]['country_id'],
response['location_suggestions'][0]['country_name'])
log.debug("get_cities() | <END>")
return str(response['location_suggestions'][0]['id'])
def get_cuisines(self, headers, city_id):
"""Refresh Zomato Cuisines data"""
log.debug("get_cuisines() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from zmt_cuisines where TO_CHAR(INSERT_DT,'YYYY') = "
"TO_CHAR(SYSDATE, 'YYYY')")
for values in db_cur_one:
if values[0] is 0:
log.info("get_cuisines() | Data is stale/unavailable. Refreshing...")
# Request data and cleanup table
response = requests.get(base_url + '/cuisines?city_id=' + city_id, params='', headers=headers).json()
db_cur_two.execute("truncate table ZMT_CUISINES")
# Loop through response and populate table
for cuisine in range(len(response['cuisines'])):
log.info("get_cuisines() | Adding Cuisine: "
+ response['cuisines'][cuisine]['cuisine']['cuisine_name'])
#db_cur_two.execute("insert into ZMT_CUISINES values (:city_id, :cuisine_id, :cuisine_name, "
# "SYSDATE)",
# city_id=city_id,
# cuisine_id=response['cuisines'][cuisine]['cuisine']['cuisine_id'],
# cuisine_name=response['cuisines'][cuisine]['cuisine']['cuisine_name'])
ZmtInsert.insert_cuisines(city_id,
response['cuisines'][cuisine]['cuisine']['cuisine_id'],
response['cuisines'][cuisine]['cuisine']['cuisine_name'])
#db_conn.commit()
else:
log.info("get_cuisines() | Data is current. Refresh skipped.")
log.debug("get_cuisines() | <END>")
return 0
def get_establishments(self, headers, city_id):
"""Refresh Zomato Establishments data"""
log.debug("get_establishments() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from zmt_establishments where TO_CHAR(INSERT_DT,'YYYY') = "
"TO_CHAR(SYSDATE, 'YYYY')")
for values in db_cur_one:
if values[0] is 0:
log.info("get_establishments() | Data is stale/unavailable. Refreshing...")
# Request data and cleanup table
response = requests.get(base_url + '/establishments?city_id=' + city_id, params='',
headers=headers).json()
db_cur_two.execute("truncate table ZMT_ESTABLISHMENTS")
# Loop through response and populate table
for establishment in range(len(response['establishments'])):
log.info("get_establishments() | Adding Establishment: "
+ response['establishments'][establishment]['establishment']['name'])
#db_cur_two.execute("insert into ZMT_ESTABLISHMENTS values (:city_id, :establishment_id, "
# ":establishment_name, SYSDATE)",
# city_id=city_id,
# establishment_id=response['establishments'][establishment]['establishment']
# ['id'],
# establishment_name=response['establishments'][establishment]['establishment']
# ['name'])
ZmtInsert.insert_establishments(city_id,
response['establishments'][establishment]['establishment']['id'],
response['establishments'][establishment]['establishment']['name'])
#db_conn.commit()
else:
log.info("get_establishments() | Data is current. Refresh skipped.")
log.debug("get_establishments() | <END>")
return 0
def get_collections(self, headers, city_id):
"""Refresh Zomato Collections data"""
log.debug("get_collections() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from ZMT_COLLECTIONS where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM') and "
"CITY_ID = :city_id",
city_id=city_id)
for values in db_cur_one:
if values[0] is 0:
log.info("get_collections() | Data stale/unavailable. Refreshing...")
# Request data and cleanup table
response = requests.get(base_url + '/collections?city_id=' + city_id, params='', headers=headers).json()
db_cur_one.execute("delete from ZMT_COLLECTIONS where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM') and "
"CITY_ID = :city_id",
city_id=city_id)
# Loop through response and populate table
for collection in range(len(response['collections'])):
log.info("get_collections() | Adding Collection: "
+ response['collections'][collection]['collection']['title'])
#db_cur_one.execute("insert into ZMT_COLLECTIONS values (TO_CHAR(SYSDATE, 'YYYYMM'), :city_id, "
# ":collection_id, :title, :description, :url, :share_url, :res_count, SYSDATE)",
# city_id=city_id,
# collection_id=response['collections'][collection]['collection']['collection_id'],
# title=response['collections'][collection]['collection']['title'],
# description=response['collections'][collection]['collection']['description'],
# url=response['collections'][collection]['collection']['url'],
# share_url=response['collections'][collection]['collection']['share_url'],
# res_count=response['collections'][collection]['collection']['res_count'])
ZmtInsert.insert_collections(city_id,
response['collections'][collection]['collection']['collection_id'],
response['collections'][collection]['collection']['title'],
response['collections'][collection]['collection']['description'],
response['collections'][collection]['collection']['url'],
response['collections'][collection]['collection']['share_url'],
response['collections'][collection]['collection']['res_count'])
#db_conn.commit()
else:
log.info("get_collections() | Data is current. Refresh skipped.")
log.debug("get_collections() | <END>")
return 0
def get_locations(self, headers, query):
"""Refresh Zomato Locations data"""
log.debug("get_locations() | <START>")
# Request data and cleanup table
response = requests.get(base_url + '/locations?query=' + query + '&count=1', params='', headers=headers).json()
db_cur_one.execute("delete from ZMT_LOCATIONS where ENTITY_ID = :entity_id ",
entity_id=str(response['location_suggestions'][0]['entity_id']))
# Populate table
log.info("get_locations() | Adding Location: " + response['location_suggestions'][0]['title'])
#db_cur_one.execute("insert into ZMT_LOCATIONS values (:entity_id, :entity_type, :title, :latitude, :longitude, "
# ":city_id, :city_name, :country_id, :country_name, SYSDATE)",
# entity_id=response['location_suggestions'][0]['entity_id'],
# entity_type=response['location_suggestions'][0]['entity_type'],
# title=response['location_suggestions'][0]['title'],
# latitude=response['location_suggestions'][0]['latitude'],
# longitude=response['location_suggestions'][0]['longitude'],
# city_id=response['location_suggestions'][0]['city_id'],
# city_name=response['location_suggestions'][0]['city_name'],
# country_id=response['location_suggestions'][0]['country_id'],
# country_name=response['location_suggestions'][0]['country_name'])
#db_conn.commit()
ZmtInsert.insert_locations(response['location_suggestions'][0]['entity_id'],
response['location_suggestions'][0]['entity_type'],
response['location_suggestions'][0]['title'],
response['location_suggestions'][0]['latitude'],
response['location_suggestions'][0]['longitude'],
response['location_suggestions'][0]['city_id'],
response['location_suggestions'][0]['city_name'],
response['location_suggestions'][0]['country_id'],
response['location_suggestions'][0]['country_name'])
log.debug("get_locations() | <END>")
return str(response['location_suggestions'][0]['entity_id']), response['location_suggestions'][0]['entity_type']
def get_location_details(self, headers, entity_id, entity_type):
"""Refresh Zomato Location Details data"""
log.debug("get_locations_details() | <START>")
# Request data and cleanup table
response = requests.get(base_url + '/location_details?entity_id=' + entity_id + '&entity_type=' + entity_type,
params='', headers=headers).json()
db_cur_one.execute("delete from ZMT_LOCATIONS_EXT where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM') and "
"ENTITY_ID = :entity_id", entity_id=entity_id)
# Populate table
log.debug(str(response['location']['entity_id'])
+ ' ' + response['location']['entity_type']
+ ' ' + str(response['popularity'])
+ ' ' + str(response['nightlife_index'])
+ ' ' + str(response['top_cuisines'])
+ ' ' + str(response['popularity_res'])
+ ' ' + str(response['nightlife_res'])
+ ' ' + str(response['num_restaurant']))
#db_cur_one.execute("insert into ZMT_LOCATIONS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), :entity_id, :popularity, "
# ":nightlife_index, :top_cuisines, :popularity_res, :nightlife_res, :num_restaurant, "
# "SYSDATE)",
# entity_id=entity_id,
# popularity=response['popularity'],
# nightlife_index=response['nightlife_index'],
# top_cuisines=str(response['top_cuisines']),
# popularity_res=response['popularity_res'],
# nightlife_res=response['nightlife_res'],
# num_restaurant=response['num_restaurant'])
#db_conn.commit()
ZmtInsert.insert_locations_ext(entity_id,
response['popularity'],
response['nightlife_index'],
str(response['top_cuisines']),
response['popularity_res'],
response['nightlife_res'],
response['num_restaurant'])
log.debug("get_locations_details() | <END>")
return 0
def get_search_bylocation(self, headers, query, entity_id, entity_type):
"""Search Zomato Restaurants by Location"""
log.debug("get_search_bylocation() | <START>")
search_parameters = ('entity_id=' + entity_id + '&entity_type=' + entity_type + '&q=' + query)
results_start = 0
results_end = 100
results_shown = 20
# Due to API restriction, request restricted to <= 20 records
while results_start < results_end:
response = requests.get(base_url + '/search?' + search_parameters + '&start=' + str(results_start) +
'&count=' + str(results_shown) + '&sort=rating&order=desc', params='',
headers=headers).json()
# results_found = response['results_found']
results_start = response['results_start']
results_shown = response['results_shown']
log.debug("Results Start:" + str(results_start))
log.debug("Results Shown:" + str(results_shown))
# Loop through response and populate table
for restaurant in range(len(response['restaurants'])):
log.debug(str(response['restaurants'][restaurant]['restaurant']['id'])
+ ' ' + response['restaurants'][restaurant]['restaurant']['name']
+ ' ' + response['restaurants'][restaurant]['restaurant']['url']
+ ' ' + response['restaurants'][restaurant]['restaurant']['location']['locality']
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['location']['city_id'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['location']['latitude'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['location']['longitude'])
+ ' ' + response['restaurants'][restaurant]['restaurant']['cuisines']
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['average_cost_for_two'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['user_rating']
['aggregate_rating'])
+ ' ' + response['restaurants'][restaurant]['restaurant']['user_rating']['rating_text']
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['user_rating']['votes'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['has_online_delivery'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['has_table_booking']))
# Check if Restaurant data exists. Populate table if no, ignore otherwise.
db_cur_one.execute("select count(*) from ZMT_RESTAURANTS where RESTAURANT_ID = :restaurant_id",
restaurant_id=response['restaurants'][restaurant]['restaurant']['id'])
for values in db_cur_one:
if values[0] is 0:
log.info("get_search_bylocation() | Adding Restaurant: "
+ response['restaurants'][restaurant]['restaurant']['name'] + ', '
+ response['restaurants'][restaurant]['restaurant']['location']['locality'])
#db_cur_two.execute("insert into ZMT_RESTAURANTS values (:restaurant_id, :restaurant_name, "
# ":url, :locality, :city_id, :latitude, :longitude, :search_parameters, "
# "SYSDATE)",
# restaurant_id=response['restaurants'][restaurant]['restaurant']['id'],
# restaurant_name=response['restaurants'][restaurant]['restaurant']['name'],
# url=response['restaurants'][restaurant]['restaurant']['url'],
# locality=response['restaurants'][restaurant]['restaurant']['location']
# ['locality'],
# city_id=response['restaurants'][restaurant]['restaurant']['location']
# ['city_id'],
# latitude=response['restaurants'][restaurant]['restaurant']['location']
# ['latitude'],
# longitude=response['restaurants'][restaurant]['restaurant']['location']
# ['longitude'],
# search_parameters=search_parameters)
ZmtInsert.insert_restaurants(response['restaurants'][restaurant]['restaurant']['id'],
response['restaurants'][restaurant]['restaurant']['name'],
response['restaurants'][restaurant]['restaurant']['url'],
response['restaurants'][restaurant]['restaurant']['location']
['locality'],
response['restaurants'][restaurant]['restaurant']['location']
['city_id'],
response['restaurants'][restaurant]['restaurant']['location']
['latitude'],
response['restaurants'][restaurant]['restaurant']['location']
['longitude'],
search_parameters)
# Cleanup current month's data, if any
db_cur_one.execute("""delete from ZMT_RESTAURANTS_EXT
where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')
and RESTAURANT_ID = :restaurant_id""",
restaurant_id=response['restaurants'][restaurant]['restaurant']['id'])
# Populate table
#db_cur_one.execute("insert into ZMT_RESTAURANTS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), "
# ":restaurant_id, :cuisines, :average_cost_for_two, :user_rating_aggregate, "
# ":user_rating_text, :user_rating_votes, :has_online_delivery, :has_table_booking, "
# "SYSDATE)",
# restaurant_id=response['restaurants'][restaurant]['restaurant']['id'],
# cuisines=response['restaurants'][restaurant]['restaurant']['cuisines'],
# average_cost_for_two=response['restaurants'][restaurant]['restaurant']
# ['average_cost_for_two'],
# user_rating_aggregate=response['restaurants'][restaurant]['restaurant']
# ['user_rating']['aggregate_rating'],
# user_rating_text=response['restaurants'][restaurant]['restaurant']['user_rating']
# ['rating_text'],
# user_rating_votes=response['restaurants'][restaurant]['restaurant']['user_rating']
# ['votes'],
# has_online_delivery=response['restaurants'][restaurant]['restaurant']
# ['has_online_delivery'],
# has_table_booking=response['restaurants'][restaurant]['restaurant']
# ['has_table_booking'])
ZmtInsert.insert_restaurants_ext(response['restaurants'][restaurant]['restaurant']['id'],
response['restaurants'][restaurant]['restaurant']['cuisines'],
response['restaurants'][restaurant]['restaurant']
['average_cost_for_two'],
response['restaurants'][restaurant]['restaurant']['user_rating']
['aggregate_rating'],
response['restaurants'][restaurant]['restaurant']['user_rating']
['rating_text'],
response['restaurants'][restaurant]['restaurant']['user_rating']
['votes'],
response['restaurants'][restaurant]['restaurant']
['has_online_delivery'],
response['restaurants'][restaurant]['restaurant']['has_table_booking'])
results_start = results_start + 20
# Determine request limit
if results_end - results_start < 20:
results_shown = results_end - results_start
#db_conn.commit()
log.debug("get_search_bylocation() | <END>")
return 0
def get_search_bycollection(self, headers, query):
"""Search Zomato Restaurants by Collections"""
log.debug("get_search_bycollection() | <START>")
# Cleanup current month's data, if any
# db_cur_one.execute("delete from ZMT_COLLECTIONS_EXT where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from ZMT_COLLECTIONS_EXT where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')")
for values in db_cur_one:
if values[0] is 0:
log.info("get_search_bycollection() | Data stale/unavailable. Refreshing...")
# Loop through Collection list
db_cur_two.execute("select distinct CITY_ID, COLLECTION_ID from ZMT_COLLECTIONS order by CITY_ID, "
"COLLECTION_ID")
for values in db_cur_two:
collection_id = values[1]
search_parameters = ('collection_id=' + str(collection_id) + '&q=' + query)
results_start = 0
results_end = 100
results_shown = 20
# Due to API restriction, request restricted to <= 20 records
while results_start < results_end:
response = requests.get(
base_url + '/search?' + search_parameters + '&start=' + str(results_start)
+ '&count=' + str(results_shown) + '&sort=rating&order=desc', params='',
headers=headers).json()
# results_found = response['results_found']
results_start = response['results_start']
results_shown = response['results_shown']
log.debug("Results Start:" + str(results_start))
log.debug("Results Shown:" + str(results_shown))
# Loop through response and populate table
for restaurant in range(len(response['restaurants'])):
log.debug(str(response['restaurants'][restaurant]['restaurant']['location']['city_id'])
+ ' ' + str(collection_id)
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['id']))
#db_cur_one.execute(
# "insert into ZMT_COLLECTIONS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), :city_id, "
# ":collection_id, :restaurant_id, :search_parameters, SYSDATE)",
# city_id=response['restaurants'][restaurant]['restaurant']['location']['city_id'],
# collection_id=collection_id,
# restaurant_id=response['restaurants'][restaurant]['restaurant']['id'],
# search_parameters=search_parameters)
ZmtInsert.insert_collections_ext(response['restaurants'][restaurant]['restaurant']
['location']['city_id'],
collection_id,
response['restaurants'][restaurant]['restaurant']['id'],
search_parameters)
results_start = results_start + 20
# Determine request limit
if results_end - results_start < 20:
results_shown = results_end - results_start
db_conn.commit()
else:
log.info("get_collections_ext() | Data is current. Refresh skipped.")
log.debug("get_search_bycollection() | <END>")
return 0
def get_restaurant_bycollection(self, headers):
"""Retrieve Zomato Restaurants data for Collections"""
log.debug("get_restaurant_bycollection() | <START>")
# Determine Restaurants for which data is not available
db_cur_one.execute("""select distinct RESTAURANT_ID
from ZMT_COLLECTIONS_EXT
where RESTAURANT_ID not in (select distinct RESTAURANT_ID from ZMT_RESTAURANTS)
order by RESTAURANT_ID""")
# Loop through Restaurant list, request data and populate tables
for values in db_cur_one:
res_id = values[0]
search_parameters = ('res_id=' + str(res_id))
response = requests.get(base_url + '/restaurant?' + search_parameters, params='', headers=headers).json()
log.debug(str(response['id'])
+ ' ' + response['name']
+ ' ' + response['url']
+ ' ' + response['location']['locality']
+ ' ' + str(response['location']['city_id'])
+ ' ' + str(response['location']['latitude'])
+ ' ' + str(response['location']['longitude'])
+ ' ' + response['cuisines']
+ ' ' + str(response['average_cost_for_two'])
+ ' ' + str(response['user_rating']['aggregate_rating'])
+ ' ' + response['user_rating']['rating_text']
+ ' ' + str(response['user_rating']['votes'])
+ ' ' + str(response['has_online_delivery'])
+ ' ' + str(response['has_table_booking']))
log.info("get_restaurant_bycollection() | Adding Restaurant: " + response['name'] + ', '
+ response['location']['locality'])
#db_cur_two.execute("insert into ZMT_RESTAURANTS values (:restaurant_id, :restaurant_name, :url, "
# ":locality, :city_id, :latitude, :longitude, :search_parameters, SYSDATE)",
# restaurant_id=str(response['id']),
# restaurant_name=response['name'],
# url=response['url'],
# locality=response['location']['locality'],
# city_id=str(response['location']['city_id']),
# latitude=str(response['location']['latitude']),
# longitude=str(response['location']['longitude']),
# search_parameters=search_parameters)
ZmtInsert.insert_restaurants_ext(str(response['id']),
response['name'],
response['url'],
response['location']['locality'],
str(response['location']['city_id']),
str(response['location']['latitude']),
str(response['location']['longitude']),
search_parameters)
#db_cur_two.execute("insert into ZMT_RESTAURANTS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), :restaurant_id, "
# ":cuisines, :average_cost_for_two, :user_rating_aggregate, :user_rating_text, "
# ":user_rating_votes, :has_online_delivery, :has_table_booking, SYSDATE)",
# restaurant_id=str(response['id']),
# cuisines=response['cuisines'],
# average_cost_for_two=str(response['average_cost_for_two']),
# user_rating_aggregate=str(response['user_rating']['aggregate_rating']),
# user_rating_text=response['user_rating']['rating_text'],
# user_rating_votes=str(response['user_rating']['votes']),
# has_online_delivery=str(response['has_online_delivery']),
# has_table_booking=str(response['has_table_booking']))
ZmtInsert.insert_restaurants_ext(str(response['id']),
response['cuisines'],
str(response['average_cost_for_two']),
str(response['user_rating']['aggregate_rating']),
response['user_rating']['rating_text'],
str(response['user_rating']['votes']),
str(response['has_online_delivery']),
str(response['has_table_booking']))
#db_conn.commit()
log.debug("get_restaurant_bycollection() | <END>")
return 0
class ZomatoAlerts:
def compose_alert(self, locality):
"""Compose Alert"""
log.debug("compose_alert() " + locality + " | <START>")
alert_body = ""
# Check if data exists
db_cur_one.execute("select COUNT(*)"
" from ZMT_RESTAURANTS ZR, ZMT_RESTAURANTS_EXT ZR_EXT"
" where ZR.RESTAURANT_ID = ZR_EXT.RESTAURANT_ID"
" and TO_CHAR(ZR.INSERT_DT, 'YYYYMM') = TO_CHAR(SYSDATE, 'YYYYMM')"
" and ZR_EXT.PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')"
" and ZR.LOC_LOCALITY like :locality", locality=locality)
for count in db_cur_one:
if count[0] is 0:
log.info("compose_alert() | " + locality + " | Data unavailable. Alert skipped.")
alert_body = "0"
else:
db_cur_two.execute("select ZR.LOC_LOCALITY, ZR.RESTAURANT_NAME, ZR_EXT.USER_RATING_AGGREGATE, "
" ZR_EXT.AVERAGE_COST_FOR_TWO, ZR_EXT.CUISINES, ZR.URL"
" from ZMT_RESTAURANTS ZR, ZMT_RESTAURANTS_EXT ZR_EXT"
" where ZR.RESTAURANT_ID = ZR_EXT.RESTAURANT_ID"
" and TO_CHAR(ZR.INSERT_DT, 'YYYYMM') = TO_CHAR(SYSDATE, 'YYYYMM')"
" and ZR_EXT.PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')"
" and ZR.LOC_LOCALITY like :locality", locality=locality)
for values in db_cur_two:
res_locality = values[0]
res_name = values[1]
res_user_rating = values[2]
res_cost_for_two = values[3]
res_cuisines = values[4]
res_url = values[5]
alert_body += '<tr>' \
+ '<td>' + res_locality + '</td>' \
+ '<td>' + '<a href=' + res_url + '>' + res_name + '</a>' + '</td>' \
+ '<td>' + str(res_user_rating) + '</td>' \
+ '<td>' + str(res_cost_for_two) + '</td>' \
+ '<td>' + res_cuisines + '</td>' \
+ '</tr>'
alert_body += '</table></body>'
log.debug("compose_alert() " + locality + " | <END>")
return alert_body
def send_alert(self, api_key, alert_body, locality):
"""Send Alert"""
log.debug("send_alert() " + locality + " | <START>")
alert_header = "<head>" \
" <style>" \
" table {font-family: arial, sans-serif; border-collapse: collapse; width: 100%; } " \
" td, th {border: 1px solid #dddddd; text-align: left; padding: 8px; } " \
" tr:nth-child(even) {background-color: #dddddd; } " \
" </style>" \
"</head>" \
"<body>" \
" <table style='width:100%'>" \
" <tr>" \
" <th>Locality</th>" \
" <th>Restaurant Name</th>" \
" <th>Rating</th>" \
" <th>Cost For Two</th>" \
" <th>Cuisines</th>" \
" </tr>"
if alert_body != "0":
# Check if subscribers exists
db_cur_one.execute("select COUNT(*) from ZMT_ALERTS")
for count in db_cur_one:
if count[0] is 0:
log.info("compose_alert() | " + locality + " | No subscribers. Alert skipped.")
alert_body = "0"
else:
db_cur_two.execute("select SUBS_NAME, SUBS_MAIL_ID from ZMT_ALERTS")
for values in db_cur_two:
subs_name = values[0]
subs_mail_id = values[1]
requests.post(
"https://api.mailgun.net/v3/sandboxd7ddf28978bc465596fa4cad095cb3ac.mailgun.org/messages",
auth=("api", api_key),
data={"from": "Mailgun Sandbox "
"<postmaster@sandboxd7ddf28978bc465596fa4cad095cb3ac.mailgun.org>",
"to": subs_name + " <" + subs_mail_id + ">",
"subject": "Zomato Alert | " + locality,
"html": alert_header + alert_body})
log.debug("send_alert() " + locality + " | <END>")
return 0
| {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,548 | nitinx/zomato-mart | refs/heads/master | /mylibrary/zmt_parameters.py | # 31 Mar 2018 | Zomato Parameters
"""Zomato Parameters
Fetches user defined parameters (including city, localities, etc) from a relational database. These parameters are
used to restrict data fetched from Zomato.com
"""
import logging
from mylibrary.db_oracle import OracleClient
# Define Oracle Variables
DB = OracleClient()
db_conn = DB.db_login()
db_cur_one = db_conn.cursor()
db_cur_two = db_conn.cursor()
log = logging.getLogger(__name__)
class ZomatoParameters:
def getparam_city_names(self):
"""Retrieve Parameter | City Names"""
log.debug("getparam_city_names() | <START>")
city = ''
# Retrieve Parameter | City Names
db_cur_one.execute("select count(distinct CITY_NAME) from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for count in db_cur_one:
if count[0] is 0:
log.info("getparam_city_names() | Parameter: CITY_NAME missing. Please define.")
else:
db_cur_two.execute("select distinct CITY_NAME from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for city_name in db_cur_two:
city = city_name[0]
log.info("getparam_city_names() | PARAMETER City: " + city)
log.debug("getparam_city_names() | <END>")
return city
def getparam_localities(self):
"""Retrieve Parameter | Localities"""
log.debug("getparam_localities() | <START>")
localities = []
# Retrieve Parameter | Localities
db_cur_one.execute("select count(distinct LOCALITY) from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for count in db_cur_one:
if count[0] is 0:
log.info("getparam_localities() | Parameter: LOCALITY missing. Please define.")
else:
db_cur_two.execute("select distinct LOCALITY from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for locality in db_cur_two:
localities.append(locality[0])
log.info("getparam_localities() | PARAMETER Locality: " + str(localities))
log.debug("getparam_localities() | <END>")
return localities
| {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,549 | nitinx/zomato-mart | refs/heads/master | /mylibrary/zmt_alerts.py | # 31 Mar 2018 | Zomato Alerts
"""Zomato Alerts
Composes and sends out New Restaurant alerts by locality to subscribers
"""
import logging
import requests
from mylibrary.db_oracle import OracleClient
# Define Oracle Variables
DB = OracleClient()
db_conn = DB.db_login()
db_cur_one = db_conn.cursor()
db_cur_two = db_conn.cursor()
log = logging.getLogger(__name__)
class ZomatoAlerts:
def compose_alert(self, locality):
"""Compose Alert"""
log.debug("compose_alert() " + locality + " | <START>")
alert_body = ""
# Check if data exists
db_cur_one.execute("select COUNT(*)"
" from ZMT_RESTAURANTS ZR, ZMT_RESTAURANTS_EXT ZR_EXT"
" where ZR.RESTAURANT_ID = ZR_EXT.RESTAURANT_ID"
" and TO_CHAR(ZR.INSERT_DT, 'YYYYMM') = TO_CHAR(SYSDATE, 'YYYYMM')"
" and ZR_EXT.PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')"
" and ZR.LOC_LOCALITY like :locality", locality=locality)
for count in db_cur_one:
if count[0] is 0:
log.info("compose_alert() | " + locality + " | Data unavailable. Alert skipped.")
alert_body = "0"
else:
db_cur_two.execute("select ZR.LOC_LOCALITY, ZR.RESTAURANT_NAME, ZR_EXT.USER_RATING_AGGREGATE, "
" ZR_EXT.AVERAGE_COST_FOR_TWO, ZR_EXT.CUISINES, ZR.URL"
" from ZMT_RESTAURANTS ZR, ZMT_RESTAURANTS_EXT ZR_EXT"
" where ZR.RESTAURANT_ID = ZR_EXT.RESTAURANT_ID"
" and TO_CHAR(ZR.INSERT_DT, 'YYYYMM') = TO_CHAR(SYSDATE, 'YYYYMM')"
" and ZR_EXT.PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')"
" and ZR.LOC_LOCALITY like :locality", locality=locality)
for values in db_cur_two:
res_locality = values[0]
res_name = values[1]
res_user_rating = values[2]
res_cost_for_two = values[3]
res_cuisines = values[4]
res_url = values[5]
alert_body += '<tr>' \
+ '<td> NEW </td>' \
+ '<td>' + res_locality + '</td>' \
+ '<td>' + '<a href=' + res_url + '>' + res_name + '</a>' + '</td>' \
+ '<td>' + str(res_user_rating) + '</td>' \
+ '<td>' + str(res_cost_for_two) + '</td>' \
+ '<td>' + res_cuisines + '</td>' \
+ '</tr>'
db_cur_two.execute("select ZR.LOC_LOCALITY, ZR.RESTAURANT_NAME, ZR_EXT.USER_RATING_AGGREGATE, "
" ZR_EXT.AVERAGE_COST_FOR_TWO, ZR_EXT.CUISINES, ZR.URL"
" from ZMT_RESTAURANTS ZR, ZMT_RESTAURANTS_EXT ZR_EXT "
" where ZR.RESTAURANT_ID = ZR_EXT.RESTAURANT_ID "
" and ZR_EXT.PERIOD = (select max(PERIOD) "
" from ZMT_RESTAURANTS_EXT "
" where PERIOD < (select max(PERIOD) "
" from ZMT_RESTAURANTS_EXT))"
" and NOT EXISTS (select 'X' "
" from ZMT_RESTAURANTS_EXT ZR_EXT_IN "
" where ZR_EXT_IN.RESTAURANT_ID = ZR_EXT.RESTAURANT_ID "
" and PERIOD = (select max(PERIOD) from ZMT_RESTAURANTS_EXT))"
" and ZR.LOC_LOCALITY like :locality", locality=locality)
for values in db_cur_two:
res_locality = values[0]
res_name = values[1]
res_user_rating = values[2]
res_cost_for_two = values[3]
res_cuisines = values[4]
res_url = values[5]
alert_body += '<tr>' \
+ '<td> CLOSED </td>' \
+ '<td>' + res_locality + '</td>' \
+ '<td>' + '<a href=' + res_url + '>' + res_name + '</a>' + '</td>' \
+ '<td>' + str(res_user_rating) + '</td>' \
+ '<td>' + str(res_cost_for_two) + '</td>' \
+ '<td>' + res_cuisines + '</td>' \
+ '</tr>'
alert_body += '</table></body>'
log.debug("compose_alert() " + locality + " | <END>")
return alert_body
def send_alert(self, api_key, alert_body, locality):
"""Send Alert"""
log.debug("send_alert() " + locality + " | <START>")
alert_header = "<head>" \
" <style>" \
" table {font-family: arial, sans-serif; border-collapse: collapse; width: 100%; } " \
" td, th {border: 1px solid #dddddd; text-align: left; padding: 8px; } " \
" tr:nth-child(even) {background-color: #dddddd; } " \
" </style>" \
"</head>" \
"<body>" \
" <table style='width:100%'>" \
" <tr>" \
" <th>Status</th>" \
" <th>Locality</th>" \
" <th>Restaurant Name</th>" \
" <th>Rating</th>" \
" <th>Cost For Two</th>" \
" <th>Cuisines</th>" \
" </tr>"
if alert_body != "0":
# Check if subscribers exists
db_cur_one.execute("select COUNT(*) from ZMT_ALERTS")
for count in db_cur_one:
if count[0] is 0:
log.info("compose_alert() | " + locality + " | No subscribers. Alert skipped.")
alert_body = "0"
else:
db_cur_two.execute("select SUBS_NAME, SUBS_MAIL_ID from ZMT_ALERTS")
for values in db_cur_two:
subs_name = values[0]
subs_mail_id = values[1]
requests.post(
"https://api.mailgun.net/v3/sandboxd7ddf28978bc465596fa4cad095cb3ac.mailgun.org/messages",
auth=("api", api_key),
data={"from": "Mailgun Sandbox "
"<postmaster@sandboxd7ddf28978bc465596fa4cad095cb3ac.mailgun.org>",
"to": subs_name + " <" + subs_mail_id + ">",
"subject": "Zomato Alert | " + locality,
"html": alert_header + alert_body})
log.debug("send_alert() " + locality + " | <END>")
return 0
def send_analytics(self, api_key, mail_body):
"""Send Analytics"""
log.debug("send_analytics() | <START>")
mail_header = "<head>" \
" <style>" \
" table {font-family: arial, sans-serif; border-collapse: collapse; width: 100%; } " \
" td, th {border: 1px solid #dddddd; text-align: left; padding: 8px; } " \
" tr:nth-child(even) {background-color: #dddddd; } " \
" </style>" \
"</head>" \
if mail_body != "0":
# Check if subscribers exists
db_cur_one.execute("select COUNT(*) from ZMT_ALERTS")
for count in db_cur_one:
if count[0] is 0:
log.info("compose_alert() | " + locality + " | No subscribers. Alert skipped.")
mail_body = "0"
else:
db_cur_two.execute("select SUBS_NAME, SUBS_MAIL_ID from ZMT_ALERTS")
for values in db_cur_two:
subs_name = values[0]
subs_mail_id = values[1]
requests.post(
"https://api.mailgun.net/v3/sandboxd7ddf28978bc465596fa4cad095cb3ac.mailgun.org/messages",
auth=("api", api_key),
files=[("inline", open("plot.png", 'rb'))],
data={"from": "Mailgun Sandbox "
"<postmaster@sandboxd7ddf28978bc465596fa4cad095cb3ac.mailgun.org>",
"to": subs_name + " <" + subs_mail_id + ">",
"subject": "Zomato | Rating Analytics | Across Localities",
"html": '<html><img src="cid:plot.png"></html>'})
log.debug("send_analytics() | <END>")
return 0
| {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,550 | nitinx/zomato-mart | refs/heads/master | /mylibrary/zmt_requests.py | # 31 Mar 2018 | Zomato Requests
"""Zomato Requests
Library that requests data from Zomato's API endpoints
API Documentation: https://developers.zomato.com/api#headline1
"""
import requests
import logging
import json
# Define Zomato Base URL
base_url = "https://developers.zomato.com/api/v2.1"
log = logging.getLogger(__name__)
class ZomatoRequests(object):
def __init__(self, headers):
self.headers = headers
def get_categories(self):
"""Get Categories"""
return requests.get(base_url + '/categories', params='', headers=self.headers).json()
def get_cities(self, query):
"""Get Cities"""
return requests.get(base_url + '/cities?q=' + query + '&count=1', params='', headers=self.headers).json()
def get_cuisines(self, city_id):
"""Get Cuisines"""
return requests.get(base_url + '/cuisines?city_id=' + city_id, params='', headers=self.headers).json()
def get_establishments(self, city_id):
"""Get Establishments"""
return requests.get(base_url + '/establishments?city_id=' + city_id, params='', headers=self.headers).json()
def get_collections(self, city_id):
"""Get Collections"""
return requests.get(base_url + '/collections?city_id=' + city_id, params='', headers=self.headers).json()
def get_locations(self, query):
"""Get Locations"""
return requests.get(base_url + '/locations?query=' + query + '&count=1', params='', headers=self.headers).json()
def get_location_details(self, entity_id, entity_type):
"""Get Location Details"""
return requests.get(base_url + '/location_details?entity_id=' + entity_id + '&entity_type=' + entity_type,
params='', headers=self.headers).json()
def get_search(self, search_parameters, results_start, results_shown):
"""Get Search"""
return requests.get(base_url + '/search?' + search_parameters + '&start=' + results_start + '&count='
+ results_shown + '&sort=rating&order=desc', params='', headers=self.headers).json()
def get_restaurant(self, search_parameters):
"""Get Restaurant"""
return requests.get(base_url + '/restaurant?' + search_parameters, params='', headers=self.headers).json()
| {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,551 | nitinx/zomato-mart | refs/heads/master | /mylibrary/apikey.py | # 11 Mar 2018 | API Key Retrieval
import json
import logging
from time import gmtime, strftime
base_dir = 'e:\\GitHub\\python\\keys\\'
log = logging.getLogger(__name__)
class APIKey:
def retrieve_key(self, category):
log.debug("[API Key] " + category + " | Retrieval Initiated")
# Open KEY files
with open(base_dir + category + '.key') as key_file:
key = json.load(key_file)
log.debug("[API Key] " + str(key) + " >")
log.debug("[API Key] " + category + " | Retrieval Completed")
return key
| {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,552 | nitinx/zomato-mart | refs/heads/master | /mylibrary/http.py | import logging
import requests
log = logging.getLogger(__name__)
class BaseHTTPClient(object):
def __init__(self, base_url, headers):
self._base_url = base_url
self._headers = {'Accept': 'application/json'}
self._headers.update(headers)
def get(self, url, *args, **kwargs):
res_url = self._base_url + url
cur_headers = kwargs.get("headers", {})
cur_headers.update(self._headers)
kwargs["headers"] = cur_headers
return requests.get(res_url, *args, **kwargs).json()
| {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,553 | nitinx/zomato-mart | refs/heads/master | /application.py | # 28 Nov 2017 | Zomato Data Mart
"""Zomato Datamart
Application that:
1. Fetches data from Zomato.com via Zomato's public APIs
2. Populates the data into the Zomato datamart
3. Maintains history at a monthly time grain
4. Fetch is restricted via parameters
5. Sends out new restaurant alerts to subscribers
API Documentation: https://developers.zomato.com/api#headline1
"""
import logging
from mylibrary.apikey import APIKey
from mylibrary.zmt_parameters import ZomatoParameters
from mylibrary.zmt_client import ZomatoClient
from mylibrary.zmt_alerts import ZomatoAlerts
from mylibrary.zmt_analytics import ZomatoAnalytics
from time import gmtime, strftime
log = logging.getLogger(__name__)
if __name__ == '__main__':
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] <START>")
# Initialize variables
city = ''
localities = []
# Logger | Initialize
fmt_string = "%(asctime)s | %(levelname)s | %(module)s | %(message)s"
fmtr = logging.Formatter(fmt=fmt_string)
sh = logging.StreamHandler()
sh.setFormatter(fmtr)
my_lib_logger = logging.getLogger("mylibrary")
my_lib_logger.addHandler(sh)
# Logger | Set Level
my_lib_logger.setLevel("DEBUG")
# Initialize Zomato API Key Objects
ZomatoAPIKey = APIKey()
api_key_zomato = ZomatoAPIKey.retrieve_key("zomato")[0]['API_KEY']
headers = {'Accept': 'application/json', 'user-key': api_key_zomato}
# Initialize Mailgun API Key Objects
MailgunAPIKey = APIKey()
api_key_mailgun = MailgunAPIKey.retrieve_key("mailgun")[0]['API_KEY']
# Initialize Zomato Objects
ZmtParams = ZomatoParameters()
ZmtClient = ZomatoClient(headers)
ZmtAlert = ZomatoAlerts()
ZmtPlot = ZomatoAnalytics()
# Retrieve Parameters
city = ZmtParams.getparam_city_names()
localities = ZmtParams.getparam_localities()
# Fetch Category data
ZmtClient.get_categories()
# Fetch City data
city_id = ZmtClient.get_cities(city)
ZmtClient.get_cuisines(city_id)
ZmtClient.get_establishments(city_id)
# Fetch Location/Restaurant data
for locality in range(len(localities)):
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] Processing Locality: " + localities[locality])
entity = ZmtClient.get_locations(localities[locality])
ZmtClient.get_location_details(entity[0], entity[1])
ZmtClient.get_search_bylocation(localities[locality], entity[0], entity[1])
# Fetch Collection/Restaurant data
#ZmtClient.get_collections(city_id)
#ZmtClient.get_search_bycollection(city)
#ZmtClient.get_restaurant_bycollection()
# Send New Restaurant Alert(s)
for locality in range(len(localities)):
ZmtAlert.send_alert(api_key_mailgun, ZmtAlert.compose_alert('%' + localities[locality] + '%'),
localities[locality])
ZmtAlert.send_analytics(api_key_mailgun, ZmtPlot.plot_locality_stats())
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] <END>")
| {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,554 | nitinx/zomato-mart | refs/heads/master | /zomatomart.py | # 28 Nov 2017 | Zomato Data Mart
"""Zomato Datamart
Program that:
1. Fetches data from Zomato.com via Zomato's public APIs
2. Populates the data into the Zomato datamart
3. Maintains history at a monthly time grain
4. Fetch is currently restricted via parameters
API Documentation: https://developers.zomato.com/api#headline1
"""
import requests
import logging
import json
from mylibrary.nxcommon import NXKey
from mylibrary.nxcommon import NXOracle
from mylibrary.http import BaseHTTPClient
from time import gmtime, strftime
# Define Zomato Base URL
base_url = "https://developers.zomato.com/api/v2.1"
# Define Oracle Variables
db_conn = NXOracle().db_login()
db_cur_one = db_conn.cursor()
db_cur_two = db_conn.cursor()
log = logging.getLogger(__name__)
def get_user_key():
"""Get the Zomato API Key"""
return NXKey().key_zomato()[0]['API_KEY']
class ZomatoClient:
def get_categories(self, headers):
"""Refresh Zomato Categories data"""
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_categories()] <START>")
log.info("get_categories() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from zmt_categories where TO_CHAR(INSERT_DT,'YYYY') = TO_CHAR(SYSDATE, 'YYYY')")
for values in db_cur_one:
if values[0] is 0:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_categories()] Data stale/unavailable. "
"Refreshing...")
# Request data and cleanup table
response = requests.get(base_url + '/categories', params='', headers=headers).json()
db_cur_two.execute("truncate table ZMT_CATEGORIES")
# Loop through response and populate table
for category in range(len(response['categories'])):
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_categories()] Adding Category: "
+ response['categories'][category]['categories']['name'])
db_cur_two.execute("insert into ZMT_CATEGORIES values (:category_id, :category_name, SYSDATE)",
category_id=response['categories'][category]['categories']['id'],
category_name=response['categories'][category]['categories']['name'])
db_conn.commit()
else:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_categories()] Data is current. Refresh skipped.")
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_categories()] <END>")
log.info("get_categories() | <END>")
def get_cities(self, headers, query):
"""Refresh Zomato Cities data"""
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_cities()] <START>")
log.info("get_cities() | <START>")
# Request data
response = requests.get(base_url + '/cities?q=' + query + '&count=1', params='', headers=headers).json()
# Check if data exists. Populate table if yes, ignore response otherwise.
db_cur_one.execute("select count(*) from ZMT_CITIES where CITY_NAME = :name", name=query)
for values in db_cur_one:
if values[0] is 0:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_cities()] Adding City: " + query)
db_cur_two.execute("insert into ZMT_CITIES values (:city_id, :city_name, :country_id, :country_name, "
"SYSDATE)",
city_id=response['location_suggestions'][0]['id'],
city_name=response['location_suggestions'][0]['name'],
country_id=response['location_suggestions'][0]['country_id'],
country_name=response['location_suggestions'][0]['country_name'])
db_conn.commit()
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_cities()] <END>")
log.info("get_cities() | <END>")
return str(response['location_suggestions'][0]['id'])
def get_cuisines(self, headers, city_id):
"""Refresh Zomato Cuisines data"""
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_cuisines()] <START>")
log.info("get_cuisines() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from zmt_cuisines where TO_CHAR(INSERT_DT,'YYYY') = TO_CHAR(SYSDATE, 'YYYY')")
for values in db_cur_one:
if values[0] is 0:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_cuisines()] Data is stale/unavailable. "
"Refreshing...")
# Request data and cleanup table
response = requests.get(base_url + '/cuisines?city_id=' + city_id, params='', headers=headers).json()
db_cur_two.execute("truncate table ZMT_CUISINES")
# Loop through response and populate table
for cuisine in range(len(response['cuisines'])):
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_categories()] Adding Cuisine: "
+ response['cuisines'][cuisine]['cuisine']['cuisine_name'])
db_cur_two.execute("insert into ZMT_CUISINES values (:city_id, :cuisine_id, :cuisine_name, SYSDATE)",
city_id=city_id,
cuisine_id=response['cuisines'][cuisine]['cuisine']['cuisine_id'],
cuisine_name=response['cuisines'][cuisine]['cuisine']['cuisine_name'])
db_conn.commit()
else:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_cuisines()] Data is current. Refresh skipped.")
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_cuisines()] <END>")
log.info("get_cuisines() | <END>")
return 0
def get_establishments(self, headers, city_id):
"""Refresh Zomato Establishments data"""
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_establishments()] <START>")
log.info("get_establishments() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from zmt_establishments where TO_CHAR(INSERT_DT,'YYYY') = "
"TO_CHAR(SYSDATE, 'YYYY')")
for values in db_cur_one:
if values[0] is 0:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_establishments()] Data is stale/unavailable. "
"Refreshing...")
# Request data and cleanup table
response = requests.get(base_url + '/establishments?city_id=' + city_id, params='', headers=headers).json()
db_cur_two.execute("truncate table ZMT_ESTABLISHMENTS")
# Loop through response and populate table
for establishment in range(len(response['establishments'])):
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_establishments()] Adding Establishment: "
+ response['establishments'][establishment]['establishment']['name'])
db_cur_two.execute("insert into ZMT_ESTABLISHMENTS values (:city_id, :establishment_id, "
":establishment_name, SYSDATE)",
city_id=city_id,
establishment_id=response['establishments'][establishment]['establishment']['id'],
establishment_name=response['establishments'][establishment]['establishment'][
'name'])
db_conn.commit()
else:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_establishments()] Data is current. "
"Refresh skipped.")
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_establishments()] <END>")
log.info("get_establishments() | <END>")
return 0
def get_collections(self, headers, city_id):
"""Refresh Zomato Collections data"""
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_collections()] <START>")
log.info("get_collections() | <START>")
# Request data and cleanup table
response = requests.get(base_url + '/collections?city_id=' + city_id, params='', headers=headers).json()
db_cur_one.execute("delete from ZMT_COLLECTIONS where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM') and CITY_ID = :city_id",
city_id=city_id)
# Loop through response and populate table
for collection in range(len(response['collections'])):
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_collections()] Adding Collection: "
+ response['collections'][collection]['collection']['title'])
db_cur_one.execute("insert into ZMT_COLLECTIONS values (TO_CHAR(SYSDATE, 'YYYYMM'), :city_id, :collection_id, "
":title, :description, :url, :share_url, :res_count, SYSDATE)",
city_id=city_id,
collection_id=response['collections'][collection]['collection']['collection_id'],
title=response['collections'][collection]['collection']['title'],
description=response['collections'][collection]['collection']['description'],
url=response['collections'][collection]['collection']['url'],
share_url=response['collections'][collection]['collection']['share_url'],
res_count=response['collections'][collection]['collection']['res_count'])
db_conn.commit()
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_collections()] <END>")
log.info("get_collections() | <END>")
return 0
def get_locations(self, headers, query):
"""Refresh Zomato Locations data"""
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_locations()] <START>")
log.info("get_locations() | <START>")
# Request data and cleanup table
response = requests.get(base_url + '/locations?query=' + query + '&count=1', params='', headers=headers).json()
db_cur_one.execute("delete from ZMT_LOCATIONS where ENTITY_ID = :entity_id ",
entity_id=str(response['location_suggestions'][0]['entity_id']))
# Populate table
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_locations()] Adding Location: "
+ response['location_suggestions'][0]['title'])
db_cur_one.execute("insert into ZMT_LOCATIONS values (:entity_id, :entity_type, :title, :latitude, :longitude, "
":city_id, :city_name, :country_id, :country_name, SYSDATE)",
entity_id=response['location_suggestions'][0]['entity_id'],
entity_type=response['location_suggestions'][0]['entity_type'],
title=response['location_suggestions'][0]['title'],
latitude=response['location_suggestions'][0]['latitude'],
longitude=response['location_suggestions'][0]['longitude'],
city_id=response['location_suggestions'][0]['city_id'],
city_name=response['location_suggestions'][0]['city_name'],
country_id=response['location_suggestions'][0]['country_id'],
country_name=response['location_suggestions'][0]['country_name'])
db_conn.commit()
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_locations()] <END>")
log.info("get_locations() | <END>")
return str(response['location_suggestions'][0]['entity_id']), response['location_suggestions'][0]['entity_type']
def get_location_details(self, headers, entity_id, entity_type, debug_mode):
"""Refresh Zomato Location Details data"""
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_location_details()] <START>")
log.info("get_locations_details() | <START>")
# Request data and cleanup table
response = requests.get(base_url + '/location_details?entity_id=' + entity_id + '&entity_type=' + entity_type,
params='', headers=headers).json()
db_cur_one.execute("delete from ZMT_LOCATIONS_EXT where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM') and "
"ENTITY_ID = :entity_id", entity_id=entity_id)
# Populate table
if debug_mode is 'Y':
print(str(response['location']['entity_id'])
+ ' ' + response['location']['entity_type']
+ ' ' + str(response['popularity'])
+ ' ' + str(response['nightlife_index'])
+ ' ' + str(response['top_cuisines'])
+ ' ' + str(response['popularity_res'])
+ ' ' + str(response['nightlife_res'])
+ ' ' + str(response['num_restaurant']))
db_cur_one.execute("insert into ZMT_LOCATIONS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), :entity_id, :popularity, "
":nightlife_index, :top_cuisines, :popularity_res, :nightlife_res, :num_restaurant, SYSDATE)",
entity_id=entity_id,
popularity=response['popularity'],
nightlife_index=response['nightlife_index'],
top_cuisines=str(response['top_cuisines']),
popularity_res=response['popularity_res'],
nightlife_res=response['nightlife_res'],
num_restaurant=response['num_restaurant'])
db_conn.commit()
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_location_details()] <END>")
log.info("get_locations_details() | <END>")
return 0
def get_search_bylocation(self, headers, query, entity_id, entity_type, debug_mode):
"""Search Zomato Restaurants by Location"""
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_search_bylocation()] <START>")
log.info("get_search_bylocation() | <START>")
search_parameters = ('entity_id=' + entity_id + '&entity_type=' + entity_type + '&q=' + query)
results_start = 0
results_end = 100
results_shown = 20
# Due to API restriction, request restricted to <= 20 records
while results_start < results_end:
response = requests.get(base_url + '/search?' + search_parameters + '&start=' + str(results_start) + '&count='
+ str(results_shown) + '&sort=rating&order=desc', params='', headers=headers).json()
# results_found = response['results_found']
results_start = response['results_start']
results_shown = response['results_shown']
if debug_mode is 'Y':
print("Results Start:" + str(results_start))
print("Results Shown:" + str(results_shown))
# Loop through response and populate table
for restaurant in range(len(response['restaurants'])):
if debug_mode is 'Y':
print(str(response['restaurants'][restaurant]['restaurant']['id'])
+ ' ' + response['restaurants'][restaurant]['restaurant']['name']
+ ' ' + response['restaurants'][restaurant]['restaurant']['url']
+ ' ' + response['restaurants'][restaurant]['restaurant']['location']['locality']
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['location']['city_id'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['location']['latitude'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['location']['longitude'])
+ ' ' + response['restaurants'][restaurant]['restaurant']['cuisines']
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['average_cost_for_two'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['user_rating']['aggregate_rating'])
+ ' ' + response['restaurants'][restaurant]['restaurant']['user_rating']['rating_text']
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['user_rating']['votes'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['has_online_delivery'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['has_table_booking']))
# Check if Restaurant data exists. Populate table if no, ignore otherwise.
db_cur_one.execute("select count(*) from ZMT_RESTAURANTS where RESTAURANT_ID = :restaurant_id",
restaurant_id=response['restaurants'][restaurant]['restaurant']['id'])
for values in db_cur_one:
if values[0] is 0:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_search_bylocation()] Adding Restaurant: "
+ response['restaurants'][restaurant]['restaurant']['name'] + ', '
+ response['restaurants'][restaurant]['restaurant']['location']['locality'])
db_cur_two.execute("insert into ZMT_RESTAURANTS values (:restaurant_id, :restaurant_name, :url, "
":locality, :city_id, :latitude, :longitude, :search_parameters, SYSDATE)",
restaurant_id=response['restaurants'][restaurant]['restaurant']['id'],
restaurant_name=response['restaurants'][restaurant]['restaurant']['name'],
url=response['restaurants'][restaurant]['restaurant']['url'],
locality=response['restaurants'][restaurant]['restaurant']['location'][
'locality'],
city_id=response['restaurants'][restaurant]['restaurant']['location']['city_id'],
latitude=response['restaurants'][restaurant]['restaurant']['location'][
'latitude'],
longitude=response['restaurants'][restaurant]['restaurant']['location'][
'longitude'],
search_parameters=search_parameters)
# Cleanup current month's data, if any
db_cur_one.execute("""delete from ZMT_RESTAURANTS_EXT
where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')
and RESTAURANT_ID = :restaurant_id""",
restaurant_id=response['restaurants'][restaurant]['restaurant']['id'])
# Populate table
db_cur_one.execute("insert into ZMT_RESTAURANTS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), :restaurant_id, "
":cuisines, :average_cost_for_two, :user_rating_aggregate, :user_rating_text, "
":user_rating_votes, :has_online_delivery, :has_table_booking, SYSDATE)",
restaurant_id=response['restaurants'][restaurant]['restaurant']['id'],
cuisines=response['restaurants'][restaurant]['restaurant']['cuisines'],
average_cost_for_two=response['restaurants'][restaurant]['restaurant']
['average_cost_for_two'],
user_rating_aggregate=response['restaurants'][restaurant]['restaurant']['user_rating']
['aggregate_rating'],
user_rating_text=response['restaurants'][restaurant]['restaurant']['user_rating']
['rating_text'],
user_rating_votes=response['restaurants'][restaurant]['restaurant']['user_rating'][
'votes'],
has_online_delivery=response['restaurants'][restaurant]['restaurant'][
'has_online_delivery'],
has_table_booking=response['restaurants'][restaurant]['restaurant']['has_table_booking'])
results_start = results_start + 20
# Determine request limit
if results_end - results_start < 20:
results_shown = results_end - results_start
db_conn.commit()
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_search_bylocation()] <END>")
log.info("get_search_bylocation() | <END>")
return 0
def get_search_bycollection(self, headers, query, debug_mode):
"""Search Zomato Restaurants by Collections"""
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_search_bycollection()] <START>")
log.info("get_search_bycollection() | <START>")
# Cleanup current month's data, if any
db_cur_one.execute("delete from ZMT_COLLECTIONS_EXT where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')")
# Loop through Collection list
db_cur_two.execute("select distinct CITY_ID, COLLECTION_ID from ZMT_COLLECTIONS order by CITY_ID, COLLECTION_ID")
for values in db_cur_two:
collection_id = values[1]
search_parameters = ('collection_id=' + str(collection_id) + '&q=' + query)
results_start = 0
results_end = 100
results_shown = 20
# Due to API restriction, request restricted to <= 20 records
while results_start < results_end:
response = requests.get(base_url + '/search?' + search_parameters + '&start=' + str(results_start)
+ '&count=' + str(results_shown) + '&sort=rating&order=desc', params='',
headers=headers).json()
# results_found = response['results_found']
results_start = response['results_start']
results_shown = response['results_shown']
if debug_mode is 'Y':
print("Results Start:" + str(results_start))
print("Results Shown:" + str(results_shown))
# Loop through response and populate table
for restaurant in range(len(response['restaurants'])):
if debug_mode is 'Y':
print(str(response['restaurants'][restaurant]['restaurant']['location']['city_id'])
+ ' ' + str(collection_id)
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['id']))
db_cur_one.execute("insert into ZMT_COLLECTIONS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), :city_id, "
":collection_id, :restaurant_id, :search_parameters, SYSDATE)",
city_id=response['restaurants'][restaurant]['restaurant']['location']['city_id'],
collection_id=collection_id,
restaurant_id=response['restaurants'][restaurant]['restaurant']['id'],
search_parameters=search_parameters)
results_start = results_start + 20
# Determine request limit
if results_end - results_start < 20:
results_shown = results_end - results_start
db_conn.commit()
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_search_bycollection()] <END>")
log.info("get_search_bycollection() | <END>")
return 0
def get_restaurant_bycollection(self, headers, debug_mode):
"""Retrieve Zomato Restaurants data for Collections"""
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_collection_restaurant()] <START>")
log.info("get_restaurant_bycollection() | <START>")
# Determine Restaurants for which data is not available
db_cur_one.execute("""select distinct RESTAURANT_ID
from ZMT_COLLECTIONS_EXT
where RESTAURANT_ID not in (select distinct RESTAURANT_ID from ZMT_RESTAURANTS)
order by RESTAURANT_ID""")
# Loop through Restaurant list, request data and populate tables
for values in db_cur_one:
res_id = values[0]
search_parameters = ('res_id=' + str(res_id))
response = requests.get(base_url + '/restaurant?' + search_parameters, params='', headers=headers).json()
if debug_mode is 'Y':
print(str(response['id'])
+ ' ' + response['name']
+ ' ' + response['url']
+ ' ' + response['location']['locality']
+ ' ' + str(response['location']['city_id'])
+ ' ' + str(response['location']['latitude'])
+ ' ' + str(response['location']['longitude'])
+ ' ' + response['cuisines']
+ ' ' + str(response['average_cost_for_two'])
+ ' ' + str(response['user_rating']['aggregate_rating'])
+ ' ' + response['user_rating']['rating_text']
+ ' ' + str(response['user_rating']['votes'])
+ ' ' + str(response['has_online_delivery'])
+ ' ' + str(response['has_table_booking']))
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_search_bylocation()] Adding Restaurant: "
+ response['name'] + ', '
+ response['location']['locality'])
db_cur_two.execute("insert into ZMT_RESTAURANTS values (:restaurant_id, :restaurant_name, :url, "
":locality, :city_id, :latitude, :longitude, :search_parameters, SYSDATE)",
restaurant_id=str(response['id']),
restaurant_name=response['name'],
url=response['url'],
locality=response['location']['locality'],
city_id=str(response['location']['city_id']),
latitude=str(response['location']['latitude']),
longitude=str(response['location']['longitude']),
search_parameters=search_parameters)
db_cur_two.execute("insert into ZMT_RESTAURANTS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), :restaurant_id, "
":cuisines, :average_cost_for_two, :user_rating_aggregate, :user_rating_text, "
":user_rating_votes, :has_online_delivery, :has_table_booking, SYSDATE)",
restaurant_id=str(response['id']),
cuisines=response['cuisines'],
average_cost_for_two=str(response['average_cost_for_two']),
user_rating_aggregate=str(response['user_rating']['aggregate_rating']),
user_rating_text=response['user_rating']['rating_text'],
user_rating_votes=str(response['user_rating']['votes']),
has_online_delivery=str(response['has_online_delivery']),
has_table_booking=str(response['has_table_booking']))
db_conn.commit()
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [get_collection_restaurant()] <END>")
log.info("get_restaurant_bycollection() | <END>")
return 0
'''
def main():
"""Run App"""
# Initialize variables
headers = {'Accept': 'application/json', 'user-key': get_user_key()}
debug_mode = 'N'
city = ''
localities = []
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] <START>")
log.info("main() | <START>")
log.debug("main() | <START>")
# Retrieve Parameter | City Names
db_cur_one.execute("select count(distinct CITY_NAME) from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for count in db_cur_one:
if count[0] is 0:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] Parameter: CITY_NAME missing. Please define. ")
else:
db_cur_two.execute("select distinct CITY_NAME from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for city_name in db_cur_two:
city = city_name[0]
# Retrieve Parameter | Localities
db_cur_one.execute("select count(distinct LOCALITY) from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for count in db_cur_one:
if count[0] is 0:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] Parameter: LOCALITY missing. Please define. ")
else:
db_cur_two.execute("select distinct LOCALITY from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for locality in db_cur_two:
localities.append(locality[0])
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] PARAMETER City: " + city)
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] PARAMETER Localities: " + str(localities))
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] PARAMETER Debug Mode: " + debug_mode)
# Fetch Category data
get_categories(headers)
# Fetch City data
city_id = get_cities(headers, city)
get_cuisines(headers, city_id)
get_establishments(headers, city_id)
# Fetch Location/Restaurant data
for locality in range(len(localities)):
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] Processing Locality: " + localities[locality])
entity = get_locations(headers, localities[locality])
get_location_details(headers, entity[0], entity[1], debug_mode)
get_search_bylocation(headers, localities[locality], entity[0], entity[1], debug_mode)
# Fetch Collection/Restaurant data
get_collections(headers, city_id)
get_search_bycollection(headers, city, debug_mode)
get_restaurant_bycollection(headers, debug_mode)
# Close Oracle Connections
db_cur_one.close()
db_cur_two.close()
db_conn.close()
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] <END>")
'''
'''
if __name__ == '__main__':
fmt_string = "%(asctime)s | %(levelname)s | %(module)s | %(message)s"
fmtr = logging.Formatter(fmt=fmt_string)
sh = logging.StreamHandler()
sh.setFormatter(fmtr)
my_lib_logger = logging.getLogger("mylibrary")
my_lib_logger.addHandler(sh)
my_lib_logger.setLevel("DEBUG")
zom = ZomatoClient()
#main(zom)
# Initialize variables
headers = {'Accept': 'application/json', 'user-key': get_user_key()}
debug_mode = 'N'
city = ''
localities = []
#print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] <START>")
log.info("main() | <START>")
log.debug("main() | <START>")
# Retrieve Parameter | City Names
db_cur_one.execute("select count(distinct CITY_NAME) from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for count in db_cur_one:
if count[0] is 0:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] Parameter: CITY_NAME missing. Please define. ")
else:
db_cur_two.execute("select distinct CITY_NAME from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for city_name in db_cur_two:
city = city_name[0]
# Retrieve Parameter | Localities
db_cur_one.execute("select count(distinct LOCALITY) from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for count in db_cur_one:
if count[0] is 0:
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] Parameter: LOCALITY missing. Please define. ")
else:
db_cur_two.execute("select distinct LOCALITY from ZMT_PARAMETERS where ACTIVE_FLAG = 'Y'")
for locality in db_cur_two:
localities.append(locality[0])
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] PARAMETER City: " + city)
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] PARAMETER Localities: " + str(localities))
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] PARAMETER Debug Mode: " + debug_mode)
# Fetch Category data
zom.get_categories(headers)
# Fetch City data
city_id = zom.get_cities(headers, city)
zom.get_cuisines(headers, city_id)
zom.get_establishments(headers, city_id)
# Fetch Location/Restaurant data
for locality in range(len(localities)):
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] Processing Locality: " + localities[locality])
entity = zom.get_locations(headers, localities[locality])
zom.get_location_details(headers, entity[0], entity[1], debug_mode)
zom.get_search_bylocation(headers, localities[locality], entity[0], entity[1], debug_mode)
# Fetch Collection/Restaurant data
zom.get_collections(headers, city_id)
zom.get_search_bycollection(headers, city, debug_mode)
zom.get_restaurant_bycollection(headers, debug_mode)
# Close Oracle Connections
db_cur_one.close()
db_cur_two.close()
db_conn.close()
print(strftime("%Y-%b-%d %H:%M:%S", gmtime()) + " | [main()] <END>")
''' | {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,555 | nitinx/zomato-mart | refs/heads/master | /mylibrary/zmt_analytics.py | # 09 Feb 2019 | Zomato Data Analytics
"""Zomato Client
Library that:
1. From database, retrieves parameters that restrict data fetched from Zomato.com
2. Fetches data from Zomato.com via Zomato's public APIs
3. Populates the data into the Zomato datamart
API Documentation: https://developers.zomato.com/api#headline1
"""
import logging
from mylibrary.db_oracle import OracleClient
from mylibrary.zmt_db_oracle import ZomatoDBSelectOracle
import matplotlib.pyplot as plt, mpld3
import pandas as pd
# Define Oracle Variables
DB = OracleClient()
db_conn = DB.db_login()
db_cur = db_conn.cursor()
ZmtSelect = ZomatoDBSelectOracle()
log = logging.getLogger(__name__)
class ZomatoAnalytics(object):
def plot_locality_stats(self):
"""Plot Locality Stats"""
log.debug("plot_locality_stats() | <START>")
LOC = ['HSR', 'Indiranagar', 'Sarjapur Road']
# Retrieve Stats from Database
loc_analytics = ZmtSelect.select_locality_stats()
# Create DataFrame
df = pd.DataFrame(loc_analytics, columns=['LOCALITY', 'PERIOD', 'RSTRNT_CNT_OTH', 'RSTRNT_CNT_TOP',
'RSTRNT_PCT_TOP', 'AVG_COST_FOR_TWO', 'AVG_RTNG_ALL', 'TOP_RTNG_ALL'])
df = df.astype({"PERIOD": int, "RSTRNT_CNT_OTH": int, "RSTRNT_CNT_TOP": int, "RSTRNT_PCT_TOP": int})
# Create DataFrame by Locality
df1 = df[df['LOCALITY'] == 'HSR']
df2 = df[df['LOCALITY'] == 'Indiranagar']
df3 = df[df['LOCALITY'] == 'Sarjapur Road']
df1_all = df1.groupby(['PERIOD'])['RSTRNT_CNT_TOP', 'RSTRNT_CNT_OTH'].mean()
df2_all = df2.groupby(['PERIOD'])['RSTRNT_CNT_TOP', 'RSTRNT_CNT_OTH'].mean()
df3_all = df3.groupby(['PERIOD'])['RSTRNT_CNT_TOP', 'RSTRNT_CNT_OTH'].mean()
# Create Matplotlib Figures & Axes
fig = plt.figure(figsize=(10, 6.5))
ax1 = fig.add_axes([0.10, 0.6, 0.2, 0.3])
ax2 = fig.add_axes([0.35, 0.6, 0.2, 0.3])
ax3 = fig.add_axes([0.60, 0.6, 0.2, 0.3])
ax4 = fig.add_axes([0.2, 0.1, 0.2, 0.3])
ax5 = fig.add_axes([0.5, 0.1, 0.2, 0.3])
# Plot Restaurant Distribution by Locality
df1_all.plot(kind='bar', stacked=True, ax=ax1, legend=None, ylim=(0, 120), grid=True, title='HSR')
df2_all.plot(kind='bar', stacked=True, ax=ax2, legend=None, ylim=(0, 120), grid=True, title='Indiranagar')
df3_all.plot(kind='bar', stacked=True, ax=ax3, legend=None, ylim=(0, 120), grid=True, title='Sarjapur Road')
ax3.legend(('Rating >= 4', 'Rating < 4'), loc='center left', bbox_to_anchor=(1, 0.5))
ax1.set_ylabel('# of Restaurants')
ax1.set_xlabel('Month of Year')
ax2.set_xlabel('Month of Year')
ax3.set_xlabel('Month of Year')
# Plot Top Rated Restaurants % across all localities
df1.plot(kind='line', x='PERIOD', y='RSTRNT_PCT_TOP', ax=ax4, legend=None, grid=True, title='Top Rated %')
df2.plot(kind='line', x='PERIOD', y='RSTRNT_PCT_TOP', ax=ax4, legend=None, grid=True)
df3.plot(kind='line', x='PERIOD', y='RSTRNT_PCT_TOP', ax=ax4, legend=None, grid=True)
# Plot Average Restaurant Rating across all localities
df1.plot(kind='line', x='PERIOD', y='AVG_RTNG_ALL', ax=ax5, legend=None, grid=True, title='Average Rating',
label='HSR')
df2.plot(kind='line', x='PERIOD', y='AVG_RTNG_ALL', ax=ax5, legend=None, grid=True, label='Indiranagar')
df3.plot(kind='line', x='PERIOD', y='AVG_RTNG_ALL', ax=ax5, legend=None, grid=True, label='Sarjapur Road')
ax5.legend(loc='center left', bbox_to_anchor=(1, 0.5))
df_pct = df.groupby(['LOCALITY', 'PERIOD'])['RSTRNT_PCT_TOP'].mean()
#df_top = df.groupby(['LOCALITY', 'PERIOD'])['RSTRNT_CNT_TOP'].mean()
#df_all = df.groupby(['LOCALITY', 'PERIOD'])['RSTRNT_CNT_TOP', 'RSTRNT_CNT_OTH'].mean()
#df_top.unstack().plot.barh()
#df_all.stack(0).plot.barh()
#df_pct.plot(kind='line', ax=ax4, legend=None, grid=True, title='Top Rating %')
#df_all.plot(kind='barh', stacked=True, ax=ax5)
#color='#FFC222'
#color='#F78F1E'
#plt.show()
plt.savefig('plot.png')
#fig_html = mpld3.fig_to_html(fig, no_extras=False, template_type='simple')
#print(fig_html)
#return fig_html
log.debug("plot_locality_stats() | <END>") | {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,556 | nitinx/zomato-mart | refs/heads/master | /mylibrary/zmt_db_oracle.py | # 28 Nov 2017 | Zomato Client
"""Zomato Client
Library that:
1. From database, retrieves parameters that restrict data fetched from Zomato.com
2. Fetches data from Zomato.com via Zomato's public APIs
3. Populates the data into the Zomato datamart
API Documentation: https://developers.zomato.com/api#headline1
"""
import requests
import logging
import json
from mylibrary.db_oracle import OracleClient
from time import gmtime, strftime
# Define Zomato Base URL
base_url = "https://developers.zomato.com/api/v2.1"
# Define Oracle Variables
DB = OracleClient()
db_conn = DB.db_login()
db_cur_one = db_conn.cursor()
db_cur_two = db_conn.cursor()
log = logging.getLogger(__name__)
class ZomatoDBInsertOracle:
def insert_categories(self, category_id, category_name):
"""Insert into ZMT_CATEGORIES"""
log.debug("insert_categories() | <START>")
db_cur_two.execute("insert into ZMT_CATEGORIES values (:category_id, :category_name, SYSDATE)",
category_id=category_id,
category_name=category_name)
db_conn.commit()
log.debug("insert_categories() | <END>")
def insert_cities(self, city_id, city_name, country_id, country_name):
"""Insert into ZMT_CITIES"""
log.debug("insert_cities() | <START>")
db_cur_two.execute("insert into ZMT_CITIES values (:city_id, :city_name, :country_id, :country_name, "
"SYSDATE)",
city_id=city_id,
city_name=city_name,
country_id=country_id,
country_name=country_name)
db_conn.commit()
log.debug("insert_cities() | <END>")
def insert_cuisines(self, city_id, cuisine_id, cuisine_name):
"""Insert into ZMT_CUISINES"""
log.debug("insert_cuisines() | <START>")
db_cur_two.execute("insert into ZMT_CUISINES values (:city_id, :cuisine_id, :cuisine_name, "
"SYSDATE)",
city_id=city_id,
cuisine_id=cuisine_id,
cuisine_name=cuisine_name)
db_conn.commit()
log.debug("insert_cuisines() | <END>")
def insert_establishments(self, city_id, establishment_id, establishment_name):
"""Insert into ZMT_ESTABLISHMENTS"""
log.debug("insert_establishments() | <START>")
db_cur_two.execute("insert into ZMT_ESTABLISHMENTS values (:city_id, :establishment_id, "
":establishment_name, SYSDATE)",
city_id=city_id,
establishment_id=establishment_id,
establishment_name=establishment_name)
db_conn.commit()
log.debug("insert_establishments() | <END>")
def insert_collections(self, city_id, collection_id, title, description, url, share_url, res_count):
"""Insert into ZMT_COLLECTIONS"""
log.debug("insert_collections() | <START>")
db_cur_one.execute("insert into ZMT_COLLECTIONS values (TO_CHAR(SYSDATE, 'YYYYMM'), :city_id, "
":collection_id, :title, :description, :url, :share_url, :res_count, SYSDATE)",
city_id=city_id,
collection_id=collection_id,
title=title,
description=description,
url=url,
share_url=share_url,
res_count=res_count)
db_conn.commit()
log.debug("insert_collections() | <END>")
def insert_collections_ext(self, city_id, collection_id, restaurant_id, search_parameters):
"""Insert into ZMT_COLLECTIONS_EXT"""
log.debug("insert_collections_ext() | <START>")
db_cur_one.execute(
"insert into ZMT_COLLECTIONS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), :city_id, "
":collection_id, :restaurant_id, :search_parameters, SYSDATE)",
city_id=city_id,
collection_id=collection_id,
restaurant_id=restaurant_id,
search_parameters=search_parameters)
db_conn.commit()
log.debug("insert_collections_ext() | <END>")
def insert_locations(self, entity_id, entity_type, title, latitude, longitude, city_id, city_name, country_id,
country_name):
"""Insert into ZMT_LOCATIONS"""
log.debug("insert_locations() | <START>")
db_cur_one.execute("insert into ZMT_LOCATIONS values (:entity_id, :entity_type, :title, :latitude, :longitude, "
":city_id, :city_name, :country_id, :country_name, SYSDATE)",
entity_id=entity_id,
entity_type=entity_type,
title=title,
latitude=latitude,
longitude=longitude,
city_id=city_id,
city_name=city_name,
country_id=country_id,
country_name=country_name)
db_conn.commit()
log.debug("insert_locations() | <END>")
def insert_locations_ext(self, entity_id, popularity, nightlife_index, top_cuisines, popularity_res, nightlife_res,
num_restaurant):
"""Insert into ZMT_LOCATIONS_EXT"""
log.debug("insert_locations_ext() | <START>")
db_cur_one.execute("insert into ZMT_LOCATIONS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), :entity_id, :popularity, "
":nightlife_index, :top_cuisines, :popularity_res, :nightlife_res, :num_restaurant, "
"SYSDATE)",
entity_id=entity_id,
popularity=popularity,
nightlife_index=nightlife_index,
top_cuisines=top_cuisines,
popularity_res=popularity_res,
nightlife_res=nightlife_res,
num_restaurant=num_restaurant)
db_conn.commit()
log.debug("insert_locations_ext() | <END>")
def insert_restaurants(self, restaurant_id, restaurant_name, url, locality, city_id, latitude, longitude,
search_parameters):
"""Insert into ZMT_RESTAURANTS"""
log.debug("insert_restaurants() | <START>")
db_cur_two.execute("insert into ZMT_RESTAURANTS values (:restaurant_id, :restaurant_name, "
":url, :locality, :city_id, :latitude, :longitude, :search_parameters, "
"SYSDATE, NULL)",
restaurant_id=restaurant_id,
restaurant_name=restaurant_name,
url=url,
locality=locality,
city_id=city_id,
latitude=latitude,
longitude=longitude,
search_parameters=search_parameters)
db_conn.commit()
log.debug("insert_restaurants() | <END>")
def insert_restaurants_ext(self, restaurant_id, cuisines, average_cost_for_two, user_rating_aggregate,
user_rating_text, user_rating_votes, has_online_delivery, has_table_booking):
"""Insert into ZMT_RESTAURANTS_EXT"""
log.debug("insert_restaurants_ext() | <START>")
try:
db_cur_two.execute("insert into ZMT_RESTAURANTS_EXT values (TO_CHAR(SYSDATE, 'YYYYMM'), :restaurant_id, "
":cuisines, :average_cost_for_two, :user_rating_aggregate, :user_rating_text, "
":user_rating_votes, :has_online_delivery, :has_table_booking, SYSDATE)",
restaurant_id=restaurant_id,
cuisines=cuisines,
average_cost_for_two=average_cost_for_two,
user_rating_aggregate=user_rating_aggregate,
user_rating_text=user_rating_text,
user_rating_votes=user_rating_votes,
has_online_delivery=has_online_delivery,
has_table_booking=has_table_booking)
db_conn.commit()
except:
pass
log.debug("insert_restaurants_ext() | <END>")
class ZomatoDBUpdateOracle:
def update_restaurants(self, restaurant_id, establishment_id):
"""Update ZMT_RESTAURANTS"""
log.debug("update_restaurants() | <START>")
db_cur_two.execute("update ZMT_RESTAURANTS set ESTABLISHMENT_ID = :establishment_id "
"where RESTAURANT_ID = :restaurant_id)",
restaurant_id=restaurant_id,
establishment_id=establishment_id)
db_conn.commit()
log.debug("update_restaurants() | <END>")
class ZomatoDBSelectOracle:
def select_locality_stats(self):
"""Select ZMT_RESTAURANTS"""
log.debug("select_locality_stats() | <START>")
loc_analytics = []
db_cur_one.execute("SELECT locality, period, rstrnt_cnt_oth, rstrnt_cnt_top, "
" round( (rstrnt_cnt_top / (rstrnt_cnt_top + rstrnt_cnt_oth) * 100),0) rstrnt_pct_top, "
" avg_cost_for_two, avg_rtng_all, top_rtng_all "
" FROM (SELECT zre.period, zr.loc_locality AS locality, "
" SUM(CASE "
" WHEN to_number(zre.user_rating_aggregate,'9.9') >= 4 "
" THEN 1 "
" ELSE 0 END) AS rstrnt_cnt_top, "
" SUM(CASE "
" WHEN to_number(zre.user_rating_aggregate,'9.9') < 4 "
" THEN 1 "
" ELSE 0 END) AS rstrnt_cnt_oth, "
" round(AVG(zre.average_cost_for_two),-2) AS avg_cost_for_two, "
" round(AVG(to_number(zre.user_rating_aggregate,'9.9') ),1) AS avg_rtng_all, "
" round(MAX(to_number(zre.user_rating_aggregate,'9.9') ),1) AS top_rtng_all"
" FROM zmt_restaurants zr, zmt_restaurants_ext zre, zmt_parameters zp "
" WHERE zr.restaurant_id = zre.restaurant_id "
" AND zr.loc_locality = zp.locality "
" AND zp.active_flag = 'Y' "
" GROUP BY zr.loc_locality, zre.period ) "
"ORDER BY locality, period")
for row in db_cur_one:
loc_analytics.append(row)
log.debug("select_locality_stats() | <END>")
return loc_analytics
| {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,557 | nitinx/zomato-mart | refs/heads/master | /mylibrary/zmt_client.py | # 28 Nov 2017 | Zomato Main Library
"""Zomato Client
Library that:
1. From database, retrieves parameters that restrict data fetched from Zomato.com
2. Fetches data from Zomato.com via Zomato's public APIs
3. Populates the data into the Zomato datamart
API Documentation: https://developers.zomato.com/api#headline1
"""
import logging
from mylibrary.db_oracle import OracleClient
from mylibrary.zmt_requests import ZomatoRequests
from mylibrary.zmt_db_oracle import ZomatoDBInsertOracle
# Define Oracle Variables
DB = OracleClient()
db_conn = DB.db_login()
db_cur_one = db_conn.cursor()
db_cur_two = db_conn.cursor()
ZmtInsert = ZomatoDBInsertOracle()
log = logging.getLogger(__name__)
class ZomatoClient(object):
def __init__(self, headers):
self.headers = headers
self.ZmtRequest = ZomatoRequests(self.headers)
def get_categories(self):
"""Refresh Zomato Categories data"""
log.debug("get_categories() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from zmt_categories where TO_CHAR(INSERT_DT,'YYYY') = "
"TO_CHAR(SYSDATE, 'YYYY')")
for values in db_cur_one:
if values[0] is 0:
log.info("get_categories() | Data stale/unavailable. Refreshing...")
# Request data and cleanup table
response = self.ZmtRequest.get_categories()
db_cur_two.execute("truncate table ZMT_CATEGORIES")
# Loop through response and populate table
for category in range(len(response['categories'])):
log.info("get_categories() | Adding Category: "
+ response['categories'][category]['categories']['name'])
ZmtInsert.insert_categories(response['categories'][category]['categories']['id'],
response['categories'][category]['categories']['name'])
else:
log.info("get_categories() | Data is current. Refresh skipped.")
log.debug("get_categories() | <END>")
def get_cities(self, query):
"""Refresh Zomato Cities data"""
log.debug("get_cities() | <START>")
# Request data
response = self.ZmtRequest.get_cities(query)
# Check if data exists. Populate table if yes, ignore response otherwise.
db_cur_one.execute("select count(*) from ZMT_CITIES where CITY_NAME = :name", name=query)
for values in db_cur_one:
if values[0] is 0:
log.info("get_cities() | Adding City: " + query)
ZmtInsert.insert_cities(response['location_suggestions'][0]['id'],
response['location_suggestions'][0]['name'],
response['location_suggestions'][0]['country_id'],
response['location_suggestions'][0]['country_name'])
log.debug("get_cities() | <END>")
return str(response['location_suggestions'][0]['id'])
def get_cuisines(self, city_id):
"""Refresh Zomato Cuisines data"""
log.debug("get_cuisines() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from zmt_cuisines where TO_CHAR(INSERT_DT,'YYYY') = "
"TO_CHAR(SYSDATE, 'YYYY')")
for values in db_cur_one:
if values[0] is 0:
log.info("get_cuisines() | Data is stale/unavailable. Refreshing...")
# Request data and cleanup table
response = self.ZmtRequest.get_cuisines(city_id)
db_cur_two.execute("truncate table ZMT_CUISINES")
# Loop through response and populate table
for cuisine in range(len(response['cuisines'])):
log.info("get_cuisines() | Adding Cuisine: "
+ response['cuisines'][cuisine]['cuisine']['cuisine_name'])
ZmtInsert.insert_cuisines(city_id,
response['cuisines'][cuisine]['cuisine']['cuisine_id'],
response['cuisines'][cuisine]['cuisine']['cuisine_name'])
else:
log.info("get_cuisines() | Data is current. Refresh skipped.")
log.debug("get_cuisines() | <END>")
return 0
def get_establishments(self, city_id):
"""Refresh Zomato Establishments data"""
log.debug("get_establishments() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from zmt_establishments where TO_CHAR(INSERT_DT,'YYYY') = "
"TO_CHAR(SYSDATE, 'YYYY')")
for values in db_cur_one:
if values[0] is 0:
log.info("get_establishments() | Data is stale/unavailable. Refreshing...")
# Request data and cleanup table
response = self.ZmtRequest.get_establishments(city_id)
db_cur_two.execute("truncate table ZMT_ESTABLISHMENTS")
# Loop through response and populate table
for establishment in range(len(response['establishments'])):
log.info("get_establishments() | Adding Establishment: "
+ response['establishments'][establishment]['establishment']['name'])
ZmtInsert.insert_establishments(city_id,
response['establishments'][establishment]['establishment']['id'],
response['establishments'][establishment]['establishment']['name'])
else:
log.info("get_establishments() | Data is current. Refresh skipped.")
log.debug("get_establishments() | <END>")
return 0
def get_collections(self, city_id):
"""Refresh Zomato Collections data"""
log.debug("get_collections() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from ZMT_COLLECTIONS where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM') and "
"CITY_ID = :city_id",
city_id=city_id)
for values in db_cur_one:
if values[0] is 0:
log.info("get_collections() | Data stale/unavailable. Refreshing...")
# Request data and cleanup table
response = self.ZmtRequest.get_collections(city_id)
db_cur_one.execute("delete from ZMT_COLLECTIONS where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM') and "
"CITY_ID = :city_id",
city_id=city_id)
# Loop through response and populate table
for collection in range(len(response['collections'])):
log.info("get_collections() | Adding Collection: "
+ response['collections'][collection]['collection']['title'])
ZmtInsert.insert_collections(city_id,
response['collections'][collection]['collection']['collection_id'],
response['collections'][collection]['collection']['title'],
response['collections'][collection]['collection']['description'],
response['collections'][collection]['collection']['url'],
response['collections'][collection]['collection']['share_url'],
response['collections'][collection]['collection']['res_count'])
else:
log.info("get_collections() | Data is current. Refresh skipped.")
log.debug("get_collections() | <END>")
return 0
def get_locations(self, query):
"""Refresh Zomato Locations data"""
log.debug("get_locations() | <START>")
# Request data and cleanup table
response = self.ZmtRequest.get_locations(query)
db_cur_one.execute("delete from ZMT_LOCATIONS where ENTITY_ID = :entity_id ",
entity_id=str(response['location_suggestions'][0]['entity_id']))
# Populate table
log.info("get_locations() | Adding Location: " + response['location_suggestions'][0]['title'])
ZmtInsert.insert_locations(response['location_suggestions'][0]['entity_id'],
response['location_suggestions'][0]['entity_type'],
response['location_suggestions'][0]['title'],
response['location_suggestions'][0]['latitude'],
response['location_suggestions'][0]['longitude'],
response['location_suggestions'][0]['city_id'],
response['location_suggestions'][0]['city_name'],
response['location_suggestions'][0]['country_id'],
response['location_suggestions'][0]['country_name'])
log.debug("get_locations() | <END>")
return str(response['location_suggestions'][0]['entity_id']), response['location_suggestions'][0]['entity_type']
def get_location_details(self, entity_id, entity_type):
"""Refresh Zomato Location Details data"""
log.debug("get_locations_details() | <START>")
# Request data and cleanup table
response = self.ZmtRequest.get_location_details(entity_id, entity_type)
db_cur_one.execute("delete from ZMT_LOCATIONS_EXT where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM') and "
"ENTITY_ID = :entity_id", entity_id=entity_id)
# Populate table
try:
log.debug(str(response['location']['entity_id'])
+ ' ' + response['location']['entity_type']
+ ' ' + str(response['popularity'])
+ ' ' + str(response['nightlife_index'])
+ ' ' + str(response['top_cuisines'])
+ ' ' + str(response['popularity_res'])
+ ' ' + str(response['nightlife_res'])
+ ' ' + str(response['num_restaurant']))
ZmtInsert.insert_locations_ext(entity_id,
response['popularity'],
response['nightlife_index'],
str(response['top_cuisines']),
response['popularity_res'],
response['nightlife_res'],
response['num_restaurant'])
except KeyError:
print("KeyError: " + str(response))
log.debug("get_locations_details() | <END>")
return 0
def get_search_bylocation(self, query, entity_id, entity_type):
"""Search Zomato Restaurants by Location"""
log.debug("get_search_bylocation() | <START>")
search_parameters = ('entity_id=' + entity_id + '&entity_type=' + entity_type + '&q=' + query)
results_start = 0
results_end = 100
results_shown = 20
# Due to API restriction, request restricted to <= 20 records
while results_start < results_end:
response = self.ZmtRequest.get_search(search_parameters, str(results_start), str(results_shown))
# results_found = response['results_found']
results_start = response['results_start']
results_shown = response['results_shown']
log.debug("Results Start:" + str(results_start))
log.debug("Results Shown:" + str(results_shown))
# Loop through response and populate table
for restaurant in range(len(response['restaurants'])):
log.debug(str(response['restaurants'][restaurant]['restaurant']['id'])
+ ' ' + response['restaurants'][restaurant]['restaurant']['name']
+ ' ' + response['restaurants'][restaurant]['restaurant']['url']
+ ' ' + response['restaurants'][restaurant]['restaurant']['location']['locality']
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['location']['city_id'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['location']['latitude'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['location']['longitude'])
+ ' ' + response['restaurants'][restaurant]['restaurant']['cuisines']
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['average_cost_for_two'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['user_rating']
['aggregate_rating'])
+ ' ' + response['restaurants'][restaurant]['restaurant']['user_rating']['rating_text']
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['user_rating']['votes'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['has_online_delivery'])
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['has_table_booking']))
# Check if Restaurant data exists. Populate table if no, ignore otherwise.
db_cur_one.execute("select count(*) from ZMT_RESTAURANTS where RESTAURANT_ID = :restaurant_id",
restaurant_id=response['restaurants'][restaurant]['restaurant']['id'])
for values in db_cur_one:
if values[0] is 0:
log.info("get_search_bylocation() | Adding Restaurant: "
+ response['restaurants'][restaurant]['restaurant']['name'] + ', '
+ response['restaurants'][restaurant]['restaurant']['location']['locality'])
ZmtInsert.insert_restaurants(response['restaurants'][restaurant]['restaurant']['id'],
response['restaurants'][restaurant]['restaurant']['name'],
response['restaurants'][restaurant]['restaurant']['url'],
response['restaurants'][restaurant]['restaurant']['location']
['locality'],
response['restaurants'][restaurant]['restaurant']['location']
['city_id'],
response['restaurants'][restaurant]['restaurant']['location']
['latitude'],
response['restaurants'][restaurant]['restaurant']['location']
['longitude'],
search_parameters)
# Cleanup current month's data, if any
db_cur_one.execute("""delete from ZMT_RESTAURANTS_EXT
where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')
and RESTAURANT_ID = :restaurant_id""",
restaurant_id=response['restaurants'][restaurant]['restaurant']['id'])
# Populate table
ZmtInsert.insert_restaurants_ext(response['restaurants'][restaurant]['restaurant']['id'],
response['restaurants'][restaurant]['restaurant']['cuisines'],
response['restaurants'][restaurant]['restaurant']
['average_cost_for_two'],
response['restaurants'][restaurant]['restaurant']['user_rating']
['aggregate_rating'],
response['restaurants'][restaurant]['restaurant']['user_rating']
['rating_text'],
response['restaurants'][restaurant]['restaurant']['user_rating']
['votes'],
response['restaurants'][restaurant]['restaurant']
['has_online_delivery'],
response['restaurants'][restaurant]['restaurant']['has_table_booking'])
results_start = results_start + 20
# Determine request limit
if results_end - results_start < 20:
results_shown = results_end - results_start
log.debug("get_search_bylocation() | <END>")
return 0
def get_search_bycollection(self, query):
"""Search Zomato Restaurants by Collections"""
log.debug("get_search_bycollection() | <START>")
# Cleanup current month's data, if any
# db_cur_one.execute("delete from ZMT_COLLECTIONS_EXT where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from ZMT_COLLECTIONS_EXT where PERIOD = TO_CHAR(SYSDATE, 'YYYYMM')")
for count in db_cur_one:
if count[0] is 0:
log.info("get_search_bycollection() | Data stale/unavailable. Refreshing...")
# Loop through Collection list
db_cur_two.execute("select distinct CITY_ID, COLLECTION_ID from ZMT_COLLECTIONS order by CITY_ID, "
"COLLECTION_ID")
for values in db_cur_two:
collection_id = values[1]
search_parameters = ('collection_id=' + str(collection_id) + '&q=' + query)
results_start = 0
results_end = 100
results_shown = 20
# Due to API restriction, request restricted to <= 20 records
while results_start < results_end:
response = self.ZmtRequest.get_search(search_parameters, str(results_start), str(results_shown))
# results_found = response['results_found']
results_start = response['results_start']
results_shown = response['results_shown']
log.debug("Results Start:" + str(results_start))
log.debug("Results Shown:" + str(results_shown))
# Loop through response and populate table
for restaurant in range(len(response['restaurants'])):
log.debug(str(response['restaurants'][restaurant]['restaurant']['location']['city_id'])
+ ' ' + str(collection_id)
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['id']))
ZmtInsert.insert_collections_ext(response['restaurants'][restaurant]['restaurant']
['location']['city_id'],
collection_id,
response['restaurants'][restaurant]['restaurant']['id'],
search_parameters)
results_start = results_start + 20
# Determine request limit
if results_end - results_start < 20:
results_shown = results_end - results_start
else:
log.info("get_collections_ext() | Data is current. Refresh skipped.")
log.debug("get_search_bycollection() | <END>")
return 0
def get_search_byestablishmenttype(self, query):
"""Search Zomato Restaurants by Establishment Type"""
log.debug("get_search_byestablishmenttype() | <START>")
# Check if data exists / is stale (> 1 month)
db_cur_one.execute("select COUNT(*) from ZMT_RESTAURANTS where ESTABLISHMENT_ID is NULL "
"and LOC_LOCALITY in (select LOCALITY from zmt_parameters)")
for count in db_cur_one:
if count[0] is 0:
log.info("get_search_byestablishmenttype() | Data stale/unavailable. Refreshing...")
# Loop through Collection list
db_cur_two.execute("select RESTAURANT_ID from ZMT_RESTAURANTS where ESTABLISHMENT_ID is NULL "
"and LOC_LOCALITY in (select LOCALITY from zmt_parameters)")
for values in db_cur_two:
collection_id = values[1]
search_parameters = ('collection_id=' + str(collection_id) + '&q=' + query)
results_start = 0
results_end = 100
results_shown = 20
# Due to API restriction, request restricted to <= 20 records
while results_start < results_end:
response = self.ZmtRequest.get_search(search_parameters, str(results_start), str(results_shown))
# results_found = response['results_found']
results_start = response['results_start']
results_shown = response['results_shown']
log.debug("Results Start:" + str(results_start))
log.debug("Results Shown:" + str(results_shown))
# Loop through response and populate table
for restaurant in range(len(response['restaurants'])):
log.debug(str(response['restaurants'][restaurant]['restaurant']['location']['city_id'])
+ ' ' + str(collection_id)
+ ' ' + str(response['restaurants'][restaurant]['restaurant']['id']))
ZmtInsert.insert_collections_ext(response['restaurants'][restaurant]['restaurant']
['location']['city_id'],
collection_id,
response['restaurants'][restaurant]['restaurant']['id'],
search_parameters)
results_start = results_start + 20
# Determine request limit
if results_end - results_start < 20:
results_shown = results_end - results_start
else:
log.info("get_search_byestablishmenttype() | Data is current. Refresh skipped.")
log.debug("get_search_byestablishmenttype() | <END>")
return 0
def get_restaurant_bycollection(self):
"""Retrieve Zomato Restaurants data for Collections"""
log.debug("get_restaurant_bycollection() | <START>")
# Determine Restaurants for which data is not available
db_cur_one.execute("""select distinct RESTAURANT_ID
from ZMT_COLLECTIONS_EXT
where RESTAURANT_ID not in (select distinct RESTAURANT_ID from ZMT_RESTAURANTS)
order by RESTAURANT_ID""")
# Loop through Restaurant list, request data and populate tables
for values in db_cur_one:
res_id = values[0]
search_parameters = ('res_id=' + str(res_id))
response = self.ZmtRequest.get_restaurant(search_parameters)
log.debug(str(response['id'])
+ ' ' + response['name']
+ ' ' + response['url']
+ ' ' + response['location']['locality']
+ ' ' + str(response['location']['city_id'])
+ ' ' + str(response['location']['latitude'])
+ ' ' + str(response['location']['longitude'])
+ ' ' + response['cuisines']
+ ' ' + str(response['average_cost_for_two'])
+ ' ' + str(response['user_rating']['aggregate_rating'])
+ ' ' + response['user_rating']['rating_text']
+ ' ' + str(response['user_rating']['votes'])
+ ' ' + str(response['has_online_delivery'])
+ ' ' + str(response['has_table_booking']))
log.info("get_restaurant_bycollection() | Adding Restaurant: " + response['name'] + ', '
+ response['location']['locality'])
ZmtInsert.insert_restaurants_ext(str(response['id']),
response['name'],
response['url'],
response['location']['locality'],
str(response['location']['city_id']),
str(response['location']['latitude']),
str(response['location']['longitude']),
search_parameters)
ZmtInsert.insert_restaurants_ext(str(response['id']),
response['cuisines'],
str(response['average_cost_for_two']),
str(response['user_rating']['aggregate_rating']),
response['user_rating']['rating_text'],
str(response['user_rating']['votes']),
str(response['has_online_delivery']),
str(response['has_table_booking']))
log.debug("get_restaurant_bycollection() | <END>")
return 0
| {"/application.py": ["/mylibrary/apikey.py", "/mylibrary/zmt_parameters.py", "/mylibrary/zmt_client.py", "/mylibrary/zmt_alerts.py", "/mylibrary/zmt_analytics.py"], "/zomatomart.py": ["/mylibrary/http.py"], "/mylibrary/zmt_analytics.py": ["/mylibrary/zmt_db_oracle.py"], "/mylibrary/zmt_client.py": ["/mylibrary/zmt_requests.py", "/mylibrary/zmt_db_oracle.py"]} |
51,585 | J0seca/freshgrowpi | refs/heads/master | /scripts/vent_control.py | #!/usr/bin/python3
#-*- coding: utf-8 -*-
#Automatiza ventiladores adicionales.
#Ventiladores se prenden cuando
#se supera cierta temperatura o humedad
import time
import Adafruit_DHT
import RPi.GPIO as GPIO
from config.variables import vent_temp_max, vent_hum_max
#configurando GPIO
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(24, GPIO.OUT)
#variables de sensor
sensor = Adafruit_DHT.DHT11
pin = 4
temp_max = vent_temp_max
hum_max = vent_hum_max
#funciones de encendido/apagado 1/0
def prende_ventilador():
GPIO.output(24, True)
#print(GPIO.input(24))
def apaga_ventilador():
GPIO.output(24, False)
#print(GPIO.input(24))
def main():
while True:
hum, temp = Adafruit_DHT.read_retry(sensor, pin)
temp = str(temp)[0:2]
hum = str(hum)[0:1]
head = "Temp:", temp, "Hum:", hum, "Temp max:", temp_max, "Hum max:", hum_max
print(head)
estado_v = GPIO.input(24)
print("estado de ventilador:", estado_v)
try:
if int(temp) >= temp_max and int(estado_v) == 0:
print("Temperatura muy alta. Prendiendo ventiladores.")
prende_ventilador()
time.sleep(600) #ventiladores encendidos por 10 min.
print("Apagando ventiladores.")
apaga_ventilador()
elif int(hum) >= hum_max and int(estado_v) == 0:
print("Humedad muy alta. Prendiendo ventiladores.")
prende_ventilador()
time.sleep(600) #ventiladores encendidos por 10 min.
print("Apagando ventiladores.")
apaga_ventilador()
else:
print("Parámetros correctos. Apagando ventilador.")
except:
print("Error de datos")
apaga_ventilador()
print("Descansando")
time.sleep(30)
main()
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,586 | J0seca/freshgrowpi | refs/heads/master | /flask/bin/flask_reporte.py | #!/usr/bin/python3
# -*- coding: utf8 -*-
import os
import time
import pandas as pd
import numpy as np
def procesa_datos(archivo_log):
df = pd.read_csv(archivo_log, sep= ";", header=None)
df.columns= ['Fecha','Hora','Temperatura','Humedad','Ventilador','Extractor','Luz']
#guardamos variable fecha
del(df['Fecha'])
#cambiamos tipo de variables
df = df.astype({"Ventilador":'object', "Extractor":'object', "Luz":'object'})
#cambiando 1 y 0 por On y Off
df.loc[df['Ventilador'] == 1, 'Ventilador'] = 'On'
df.loc[df['Ventilador'] == 0, 'Ventilador'] = 'Off'
df.loc[df['Extractor'] == 1, 'Extractor'] = 'On'
df.loc[df['Extractor'] == 0, 'Extractor'] = 'Off'
df.loc[df['Luz'] == 1, 'Luz'] = 'On'
df.loc[df['Luz'] == 0, 'Luz'] = 'Off'
temp_min = np.amin(df['Temperatura'])
temp_max = np.amax(df['Temperatura'])
temp_media = int(round(np.mean(df['Temperatura']), 1))
hum_min = int(np.amin(df['Humedad']))
hum_max = int(np.amax(df['Humedad']))
hum_media = int(round(np.mean(df['Humedad']), 1))
if len(df) > 2:
prop_vent = int(round( (len(df.loc[ df['Ventilador'] == "On"]) / len(df) * 100) , 1))
prop_ext = int(round( (len(df.loc[ df['Extractor'] == "On"]) / len(df) * 100) , 1))
else:
prop_vent = "-"
prop_ext = "-"
return temp_min, temp_max, temp_media, hum_min, hum_max, hum_media, prop_vent, prop_ext
def reporte():
archivo_log = "/home/pi/freshgrowpi/log/log_clima_" + time.strftime("%d-%m-%y") + ".csv"
if os.path.isfile(archivo_log) == False:
#print("Error abriendo archivo LOG")
temp_max = "0"
temp_min = "0"
temp_media = "0"
hum_max = "0"
hum_min = "0"
hum_media = "0"
prop_vent = "0"
prop_ext = "0"
return temp_min, temp_max, temp_media, hum_min, hum_max, hum_media, prop_vent, prop_ext
else:
variables = procesa_datos(archivo_log)
return variables
#reporte()
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,587 | J0seca/freshgrowpi | refs/heads/master | /scripts/config/variables.py | vent_temp_max = 21
vent_hum_max = 80
ext_temp_max = 23
ext_hum_max = 90
luz_hora_encendido = "10:00"
luz_hora_apagado = "23:00"
correo_datos = "carlos@freshgrowpi.cl"
frecuencia_correos = "12"
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,588 | J0seca/freshgrowpi | refs/heads/master | /scripts/log_service.py | #!/usr/bin/python3
# -*- coding: utf8 -*-
import os
import time
import Adafruit_DHT
import RPi.GPIO as GPIO
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(24,GPIO.OUT)
GPIO.setup(25,GPIO.OUT)
GPIO.setup(21,GPIO.OUT)
sensor = Adafruit_DHT.DHT11
pin = 4
sleep = 1800 #duerme en segundos 1800 para media hora
while True:
try:
hum, temp = Adafruit_DHT.read_retry(sensor, pin)
temp = str(temp)[0:2]
hum = str(hum)[0:2]
fecha = time.strftime("%d-%m-%y")
hora = time.strftime("%H:%M")
if hum == "No":
hum = "0"
if temp == "No":
temp = "0"
log_data = fecha + ";" + hora + ";" + temp + ";" + hum + ";" + str(GPIO.input(24)) + ";" + str(GPIO.input(25)) + ";" + str(GPIO.input(21))
#print(log_data)
except ValueError:
temp = "0"
hum = "0"
fecha = time.strftime("%d-%m-%y")
hora = time.strftime("%H:%M")
log_data = fecha + ";" + hora + ";" + temp + ";" + hum
logfile = "/home/pi/freshgrowpi/log/log_clima_" + time.strftime("%d-%m-%y") + ".csv"
if os.path.isfile(logfile):
print("Archivo encontrado: ", logfile, " Escribiendo datos.")
_log = open(logfile, "r")
_log_lines = _log.readlines()
#_log_lines.append(log_data)
_log.close()
_log = open(logfile, "w")
for line in _log_lines:
#print("Escribiendo:", line)
line = line.strip("\n")
_log.write(line + "\n")
_log.write(log_data)
_log.close()
else:
print("Archivo de registro no existe. Creando nuevo archivo CSV.")
_log = open(logfile, "w")
print("Archivo creado. Escribiendo datos...")
_log.write(log_data)
_log.close()
time.sleep(sleep)
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,589 | J0seca/freshgrowpi | refs/heads/master | /configure.py | #!/usr/bin/python3
# --*-- coding: utf-8 --*--
from scripts.config.variables import vent_temp_max, vent_hum_max, ext_temp_max, ext_hum_max, luz_hora_encendido, luz_hora_apagado, correo_datos, frecuencia_correos
import os
import time
#definimos nuevas
nvent_temp_max = vent_temp_max
nvent_hum_max = vent_hum_max
next_temp_max = ext_temp_max
next_hum_max = ext_hum_max
nluz_hora_encendido = luz_hora_encendido
nluz_hora_apagado = luz_hora_apagado
ncorreo_datos = correo_datos
nfrecuencia_correos = frecuencia_correos
#primero que todo:
def clr():
os.system("clear")
def inicio():
clr()
print("""
---------------------*---------------------
___ _ ___ ___ _
| __| _ ___ __| |_ / __|_ _ _____ __ _| _ (_)
| _| '_/ -_|_-< ' \ (_ | '_/ _ \ V V / _/ |
|_||_| \___/__/_||_\___|_| \___/\_/\_/|_| |_|
---------------------*---------------------
Configuración:
1- Humedad y temperatura máxima ventilador.
2- Humedad y temperatura máxima extractor.
3- Fotoperiodo.
4- Notificación por correo electrónico.
5- Guardar nueva configuración.
6- Salir.
""")
main_option = input("Ingrese opción: ")
#verificando si se ingresó una opción correcta:
try:
main_option = int(main_option)
except ValueError:
print("\n\n Opción incorrecta!")
time.sleep(1)
inicio()
if main_option == 1:
configura_ventilador()
elif main_option == 2:
configura_extractor()
elif main_option == 3:
configura_fotoperiodo()
elif main_option == 4:
configura_correo()
elif main_option == 5:
guarda_configuracion()
elif main_option == 6:
clr()
print("""
__ _ _
/ _|_ _ ___ __| |_ __ _ _ _ _____ __ ___ __(_)
| _| '_/ -_|_-< ' \/ _` | '_/ _ \ V V / '_ \ |
|_| |_| \___/__/_||_\__, |_| \___/\_/\_/| .__/_|
|___/ |_|
Saliendo del programa! """)
time.sleep(1)
exit()
else:
print("\n\n Opción incorrecta!*" , main_option)
time.sleep(1)
inicio()
def configura_ventilador():
clr()
print("""
_ _ _ _
__ _____ _ _| |_(_) |__ _ __| |___ _ _
\ V / -_) ' \ _| | / _` / _` / _ \ '_|
\_/\___|_||_\__|_|_\__,_\__,_\___/_|
""")
#Configurando tempreratura:
print("-"*25)
print("Temperatura de encendido actual:", vent_temp_max, "°C")
print("-"*25)
global nvent_temp_max
nvent_temp_max = input("Ingrese nueva temperatura de encendido: ")
try:
nvent_temp_max = int(nvent_temp_max)
if (1 <= nvent_temp_max < 45):
##### ERROR vent_temp_max = nvent_temp_max
print("Nueva temperatura registrada:", nvent_temp_max, "°C\n")
time.sleep(2)
#input("Enter para continuar...\n\n")
#inicio()
else:
print("-"*25)
print("Datos fuera de Rango. Rango permitido (1 °C a 44 °C)")
time.sleep(3)
configura_ventilador()
except ValueError:
print("-"*25)
print("Error ingresando datos.")
time.sleep(2)
configura_ventilador()
#Configurando humedad:
print("-"*25)
print("Humedad de encendido actual:", vent_hum_max, "%")
print("-"*25)
global nvent_hum_max
nvent_hum_max = input("Ingrese nueva humedad de encendido: ")
try:
nvent_hum_max = int(nvent_hum_max)
if (1 <= nvent_hum_max < 99):
print("Nueva humedad registrada:", nvent_hum_max, "%")
time.sleep(2)
input("\nEnter para continuar...")
inicio()
else:
print("-"*25)
print("Datos fuera de Rango. Rango permitido (1% a 99%)")
time.sleep(3)
configura_ventilador()
except ValueError:
print("-"*25)
print("Error ingresando datos.")
time.sleep(2)
configura_ventilador()
return nvent_temp_max, nvent_hum_max
def configura_extractor():
clr()
print("""
_ _
_____ _| |_ _ _ __ _ __| |_ ___ _ _
/ -_) \ / _| '_/ _` / _| _/ _ \ '_|
\___/_\_\\__|_| \__,_\__|\__\___/_|
""")
#Configurando tempreratura:
print("-"*25)
print("Temperatura de encendido actual:", ext_temp_max, "°C")
print("-"*25)
global next_temp_max
next_temp_max = input("Ingrese nueva temperatura de encendido: ")
try:
next_temp_max = int(next_temp_max)
if (1 <= next_temp_max < 45):
print("Nueva temperatura registrada:", next_temp_max, "°C\n")
time.sleep(2)
#input("Enter para continuar...\n\n")
else:
print("-"*25)
print("Datos fuera de Rango. Rango permitido (1 °C a 44 °C)")
time.sleep(3)
configura_extractor()
except ValueError:
print("-"*25)
print("Error ingresando datos.")
time.sleep(2)
configura_extractor()
#Configurando humedad:
print("-"*25)
print("Humedad de encendido actual:", ext_hum_max, "%")
print("-"*25)
global next_hum_max
next_hum_max = input("Ingrese nueva humedad de encendido: ")
try:
next_hum_max = int(next_hum_max)
if (1 <= next_hum_max < 99):
print("Nueva humedad registrada:", next_hum_max, "%")
time.sleep(2)
input("\nEnter para continuar...")
inicio()
else:
print("-"*25)
print("Datos fuera de Rango. Rango permitido (1% a 99%)")
time.sleep(3)
configura_extractor()
except ValueError:
print("-"*25)
print("Error ingresando datos.")
time.sleep(2)
configura_extractor()
return next_temp_max, next_hum_max
def configura_fotoperiodo():
clr()
print("""
__ _ _ _
/ _|___| |_ ___ _ __ ___ _ _(_)___ __| |___
| _/ _ \ _/ _ \ '_ \/ -_) '_| / _ \/ _` / _ \\
|_| \___/\__\___/ .__/\___|_| |_\___/\__,_\___/
|_|
""")
global nluz_hora_encendido
print("-"*25)
print("Hora de encendido registrada:", luz_hora_encendido)
print("-"*25)
nluz_hora_encendido = input("Ingrese nueva hora de encendido: ")
#verificamos formato:
try:
hora = int(nluz_hora_encendido[0:2])
minuto = int(nluz_hora_encendido[3:5])
sep = nluz_hora_encendido[2]
if(0 <= hora <= 23) and (0 <= minuto <= 59) and (sep == ":") and (len(nluz_hora_encendido) == 5):
print("\nRegistrando nueva hora de endendido:", nluz_hora_encendido)
time.sleep(3)
#inicio()
else:
print("\nError de formato!, Ejemplo: 01:34")
time.sleep(3)
configura_fotoperiodo()
except ValueError:
print("\nError de formato!, Ejemplo: 01:34")
time.sleep(3)
configura_fotoperiodo()
global nluz_hora_apagado
print("-"*25)
print("Hora de apagado registrada:", luz_hora_apagado)
print("-"*25)
nluz_hora_apagado = input("Ingrese nueva hora de apagado: ")
#verificamos formato:
try:
hora = int(nluz_hora_apagado[0:2])
minuto = int(nluz_hora_apagado[3:5])
sep = nluz_hora_apagado[2]
if(0 <= hora <= 23) and (0 <= minuto <= 59) and (sep == ":") and (len(nluz_hora_apagado) == 5):
print("\nRegistrando nueva hora de apagado:", nluz_hora_apagado)
time.sleep(3)
input("\nEnter para continuar...")
inicio()
else:
print("\nError de formato!, Ejemplo: 01:34")
time.sleep(3)
configura_fotoperiodo()
except ValueError:
print("\nError de formato!, Ejemplo: 01:34")
time.sleep(3)
configura_fotoperiodo()
return nluz_hora_encendido, nluz_hora_apagado
def configura_correo():
clr()
print("""
__ ___ _ _ _ _ ___ ___
/ _/ _ \ '_| '_/ -_) _ \\
\__\___/_| |_| \___\___/
""")
global ncorreo_datos, nfrecuencia_correos
print("-"*25)
print("Dirección de correo registrada:", correo_datos)
print("Frecuencia actual de correo diario (en horas):", frecuencia_correos)
print("-"*25)
ncorreo_datos = input("\nIngrese nuevo correo: ")
#Verificando correo
if (ncorreo_datos.find("@") > 4 and ncorreo_datos.find(".") > 0):
print("Registrando nuevo correo:", ncorreo_datos)
time.sleep(1)
else:
print("Error en formato de correo!")
time.sleep(2)
configura_correo()
#modificando frecuencia
print("-"*25)
nfrecuencia_correos = input("\nIngrese nueva frecuencia en horas (1 - 24): ")
#verificando si dato es correcto
try:
nfrecuencia_correos = int(nfrecuencia_correos)
if(1 <= nfrecuencia_correos <= 24):
print("Nueva frecuencia registrada:", nfrecuencia_correos)
input("Enter para continuar...")
inicio()
else:
print("\nError en frecuencia ingresada. Debe ser entre 1 y 24 hrs.")
input("Enter para reintentar...")
configura_correo()
except ValueError:
print("\nError en frecuencia ingresada. Debe ser entre 1 y 24 hrs.")
input("Enter para reintentar...")
configura_correo()
return ncorreo_datos, nfrecuencia_correos
def rellena(r):
relleno = 30-len(str(r))
r = str(r) + " "*relleno
return r
def guardando_datos():
var_file = open("./scripts/config/variables.py","w")
line = "vent_temp_max = " + str(nvent_temp_max) + "\n"
var_file.write(line)
line = "vent_hum_max = " + str(nvent_hum_max) + "\n"
var_file.write(line)
line = "ext_temp_max = " + str(next_temp_max) + "\n"
var_file.write(line)
line = "ext_hum_max = " + str(next_hum_max) + "\n"
var_file.write(line)
line = "luz_hora_encendido = " + "'" + nluz_hora_encendido + "'" + "\n"
var_file.write(line)
line = "luz_hora_apagado = " + "'" + nluz_hora_apagado + "'" + "\n"
var_file.write(line)
line = "correo_datos = " + "'" + ncorreo_datos + "'" + "\n"
var_file.write(line)
line = "frecuencia_correos = " + str(nfrecuencia_correos)
var_file.write(line)
var_file.close()
def reinicia_servicios():
os.system('sudo systemctl restart ext_control.service fotoperiodo.service clima_log.service vent_control.service')
def guarda_configuracion():
clr()
print("""
___ _ _
/ __|_ _ __ _ _ _ __| |__ _ _ _ __| |___
| (_ | || / _` | '_/ _` / _` | ' \/ _` / _ \\
\___|\_,_\__,_|_| \__,_\__,_|_||_\__,_\___/
""")
print("Estos son los cambios realizados:\n")
global correo_datos
print(rellena("Datos Anteriores") + "-> Datos nuevos")
print(30*"-" + "+" + 30*"-")
cambios = 0
#revisando cambios en los datos:
if(vent_temp_max != nvent_temp_max):
print("\nTemperatura máxima de ventilador")
print(rellena(vent_temp_max) + "-> " + str(nvent_temp_max))
cambios = 1
if(vent_hum_max != nvent_hum_max):
print("\nHumedad máxima de ventilador")
print(rellena(vent_hum_max) + "-> " + str(nvent_hum_max))
cambios = 1
if(ext_temp_max != next_temp_max):
print("\nTemperatura máxima de extractor")
print(rellena(ext_temp_max) + "-> " + str(next_temp_max))
cambios = 1
if(ext_hum_max != next_hum_max):
print("\nHumedad máxima de extractor")
print(rellena(ext_hum_max) + "-> " + str(next_hum_max))
cambios = 1
if(luz_hora_encendido != nluz_hora_encendido):
print("\nHora encendido de luces")
print(rellena(luz_hora_encendido) + "-> " + str(nluz_hora_encendido))
cambios = 1
if(luz_hora_apagado != nluz_hora_apagado):
print("\nHora apagado de luces")
print(rellena(luz_hora_apagado) + "-> " + str(nluz_hora_apagado))
cambios = 1
if(correo_datos != ncorreo_datos):
print("\nCorreo para envío de datos")
print(rellena(correo_datos) + "-> " + str(ncorreo_datos))
cambios = 1
if(frecuencia_correos != nfrecuencia_correos):
print("\nFrecuencia de envío de datos")
print(rellena(frecuencia_correos) + "-> " + str(nfrecuencia_correos))
cambios = 1
if(cambios == 0):
input("No se encontraron diferencias. Enter para continuar...")
#time.sleep(3)
inicio()
guarda = input("\nEscriba Si para guardar: ")
if guarda.upper() == "SI":
print("\nGuardando nuevos datos de configuración.")
guardando_datos()
print("\nDatos guardados!. Reiniciando servicios")
reinicia_servicios()
input("Nueva configuración aplicada. Enter para continuar...")
inicio()
else:
input("No se han guardado los datos. Enter para continuar...")
#time.sleep(3)
inicio()
def main():
inicio()
main()
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,590 | J0seca/freshgrowpi | refs/heads/master | /scripts/ext_control.py | #!/usr/bin/python3
#-*- coding: utf-8 -*-
#Automatiza extractores adicionales.
#Extractores se prenden cuando
#se supera cierta temperatura o humedad
import time
import Adafruit_DHT
import RPi.GPIO as GPIO
from config.variables import ext_temp_max, ext_hum_max
#configurando GPIO
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(25, GPIO.OUT)
#variables de sensor
sensor = Adafruit_DHT.DHT11
pin = 4
temp_max = ext_temp_max
hum_max = ext_hum_max
#funciones de encendido/apagado 1/0
def prende_extractor():
GPIO.output(25, True)
#print(GPIO.input(25))
def apaga_extractor():
GPIO.output(25, False)
#print(GPIO.input(25))
def main():
while True:
hum, temp = Adafruit_DHT.read_retry(sensor, pin)
temp = str(temp)[0:2]
hum = str(hum)[0:1]
print(temp, hum, temp_max, hum_max)
estado_e = GPIO.input(25)
try:
if int(temp) >= int(temp_max) and int(estado_e) == 0:
print("Temp muy alta. Prendiendo extractores.")
prende_extractor()
time.sleep(600) #extractores encendidos por 10 min.
print("Apagando extractores.")
apaga_extractor()
elif int(hum) >= int(hum_max) and int(estado_e) == 0:
print("Hum muy alta. Prendiendo extractores.")
prende_extractor()
time.sleep(600) #extractores encendidos por 10 min.
print("Apagando extractores.")
apaga_extractor()
else:
print("Estado correcto.")
except:
print("Error")
print("Descansando")
time.sleep(30)
apaga_extractor()
main()
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,591 | J0seca/freshgrowpi | refs/heads/master | /flask/bin/recogedatos.py | #!/usr/bin/python3
from flask_reporte import *
from flask_log import *
#var_reporte = reporte()
#print(var_reporte)
var_log = flask_log()
print(var_log)
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,592 | J0seca/freshgrowpi | refs/heads/master | /flask/bin/log_historico.py | #!/usr/bin/python3
#-*- coding: utf-8 -*-
import os
import pandas as pd
def lista_log():
#listamos archivos de directorio log
arch_log = os.listdir("/home/pi/freshgrowpi/log/")
lista_fechas = []
for i in arch_log:
i = i[10:18]
lista_fechas.append(i)
return lista_fechas
def consulta_log(fecha):
archivo_log = "/home/pi/freshgrowpi/log/log_clima_" + fecha + ".csv"
df = pd.read_csv(archivo_log, sep= ";", header=None)
df.columns = ['Fecha','Hora','Temp','Hum','Vent','Ext','Luz']
#eliminamos columnas
del(df['Fecha'])
#cambiamos tipo de variables
df = df.astype({"Vent":'object', "Ext":'object', "Luz":'object',"Hum":'int64'})
df['Temp'] = df['Temp'].astype(str) + " °C"
df['Hum'] = df['Hum'].astype(str) + " %"
#cambiando 1 y 0 por On y Off
df.loc[df['Vent'] == 1, 'Vent'] = 'On'
df.loc[df['Vent'] == 0, 'Vent'] = 'Off'
df.loc[df['Ext'] == 1, 'Ext'] = 'On'
df.loc[df['Ext'] == 0, 'Ext'] = 'Off'
df.loc[df['Luz'] == 1, 'Luz'] = 'On'
df.loc[df['Luz'] == 0, 'Luz'] = 'Off'
return df
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,593 | J0seca/freshgrowpi | refs/heads/master | /scripts/fotoperiodo.old.py | #!/usr/bin/python3
#-*- coding: utf-8 -*-
import os
import time
import RPi.GPIO as GPIO
from config.variables import luz_hora_encendido, luz_hora_apagado
#configurando GPIO
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(21, GPIO.OUT)
#GPIO.input(21)
def comienza_dia():
GPIO.output(21, True)
print("Comenzando el día!, Luces ON!", GPIO.input(21))
def comienza_noche():
GPIO.output(21, False)
print("Terminando el día. Buenas noches", GPIO.input(21))
print("Encendiendo a las: ", luz_hora_encendido)
print("Apagando a las: ", luz_hora_apagado)
while True:
hora_actual = int(time.strftime("%H"))
minuto_actual = int(time.strftime("%M"))
hora_encendido = int(luz_hora_encendido[0:2])
minuto_encendido = int(luz_hora_encendido[3:5])
hora_apagado = int(luz_hora_apagado[0:2])
minuto_apagado = int(luz_hora_apagado[3:5])
print("Hora actual:", hora_actual)
print("Minuto actual:", minuto_actual)
print("Hora encendido:", hora_encendido)
print("Minuto encendido:", minuto_encendido)
print("Hora apagado:", hora_apagado)
print("Minuto apagado:", minuto_apagado)
if (hora_actual == hora_encendido) and (minuto_actual == minuto_encendido) and (GPIO.input(21) == 0):
comienza_dia()
if (hora_actual == hora_apagado) and (minuto_actual == minuto_apagado) and (GPIO.input(21) == 1):
comienza_noche()
else:
print("Luces en estado correcto:", GPIO.input(21))
time.sleep(30)
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,594 | J0seca/freshgrowpi | refs/heads/master | /scripts/ajustar_hora.py | #!/usr/bin/python3
import os
os.system('sudo timedatectl set-timezone America/Santiago')
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,595 | J0seca/freshgrowpi | refs/heads/master | /flask/bin/consulta_estado.py | #!/usr/bin/python3
#-*- coding: utf-8 -*-
import RPi.GPIO as GPIO
import time
import Adafruit_DHT
import sys
#importando datos del archivo de variables
#primero agregamos directorio a los directorios del sistema
sys.path.append('/home/pi/freshgrowpi/scripts/config/')
from variables import *
sensor = Adafruit_DHT.DHT11
pin = 4
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(25,GPIO.OUT) #extractor
GPIO.setup(24,GPIO.OUT) #ventilador
GPIO.setup(21,GPIO.OUT) #luces
def consulta():
#extractor
if GPIO.input(25) == 1:
estado_ext = "on"
elif GPIO.input(25) == 0:
estado_ext = "off"
else:
estado_ext = "Error"
#ventilador
if GPIO.input(24) == 1:
estado_vent = "on"
elif GPIO.input(24) == 0:
estado_vent = "off"
else:
estado_vent = "Error"
#luces
if GPIO.input(21) == 1:
estado_luces = "on"
elif GPIO.input(21) == 0:
estado_luces = "off"
else:
estado_luces = "Error"
try:
hum, temp = Adafruit_DHT.read_retry(sensor, pin)
temp_actual = str(temp)[0:2]
hum_actual = str(hum)[0:2]
hora_actual = time.strftime("%H:%M")
except:
temp_actual = "Error"
hum_actual = "Error"
hora_actual = "Error"
return estado_ext, estado_vent, estado_luces, temp_actual, hum_actual, hora_actual, correo_datos, vent_temp_max, vent_hum_max, ext_temp_max, ext_hum_max, luz_hora_encendido, luz_hora_apagado, correo_datos, frecuencia_correos
#print(consulta())
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,596 | J0seca/freshgrowpi | refs/heads/master | /flask/bin/flask_log.py | #!/usr/bin/python3
# -*- coding: utf8 -*-
import os
import time
import pandas as pd
import numpy as np
def procesa_datos(archivo_log):
df = pd.read_csv(archivo_log, sep= ";", header=None)
df.columns = ['Fecha','Hora','Temp','Hum','Vent','Ext','Luz']
#eliminamos columnas
del(df['Fecha'])
df['Temp'] = df['Temp'].astype(str) + " °C"
df['Hum'] = df['Hum'].astype(str) + " %"
#cambiando 1 y 0 por On y Off
df.loc[df['Vent'] == 1, 'Vent'] = 'On'
df.loc[df['Vent'] == 0, 'Vent'] = 'Off'
df.loc[df['Ext'] == 1, 'Ext'] = 'On'
df.loc[df['Ext'] == 0, 'Ext'] = 'Off'
df.loc[df['Luz'] == 1, 'Luz'] = 'On'
df.loc[df['Luz'] == 0, 'Luz'] = 'Off'
return df
def flask_log():
archivo_log = "/home/pi/freshgrowpi/log/log_clima_" + time.strftime("%d-%m-%y") + ".csv"
if os.path.isfile(archivo_log) == False:
#print("Error abriendo archivo LOG")
df = ['Fecha','Hora','Temp','Hum','Vent','Ext','Luz_Err']
return df
else:
variables = procesa_datos(archivo_log)
return variables
def test():
archivo_log = "/home/pi/freshgrowpi/log/log_clima_" + time.strftime("%d-%m-%y") + ".csv"
arch = open(archivo_log,"r")
print(archivo_log)
print(arch)
print(procesa_datos(archivo_log))
test() #para pruebas
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,597 | J0seca/freshgrowpi | refs/heads/master | /flask/flaskgrowpi.py | #!/usr/bin/python3
from flask import Flask as F
from flask import render_template, redirect
from bin.flask_log import *
from bin.flask_reporte import *
from bin.consulta_estado import *
from bin.flask_control import *
from bin.log_historico import *
app = F(__name__)
####INDEX
@app.route('/')
def flaskgrowpi():
#return redirect('/reporte')
return render_template('inicio.html')
####PAGINA REPORTE
@app.route('/reporte')
def reporte_web():
datos_reporte = reporte()
return render_template('reporte.html',
temp_min=datos_reporte[0],
temp_max=datos_reporte[1],
temp_prom=datos_reporte[2],
hum_min=datos_reporte[3],
hum_max=datos_reporte[4],
hum_prom=datos_reporte[5],
prop_vent=datos_reporte[6],
prop_ext=datos_reporte[7])
estilo_tabla = """
<style type="text/css" media="screen">
table.tablalog {
font-family: "Trebuchet MS", Arial;
font-size: 1em;
background: #6F9D76;
border-collapse: collapse;
text-align: left;
border: 2px solid purple;
width: 100%;
}
th {
color: #68D21A;
background-color: #64006C;
}
td {
color: #64006C;
}
td.on {
color:#68D21A;
background-color: #64006C;
}
}
</style>
"""
####PAGINA LOG
@app.route('/log')
def log():
fecha = time.strftime("%d-%m-%y")
datos_log = flask_log()
try:
#intenta modificar estilo, si no puede es por que dataframe está vacío
datos = estilo_tabla + datos_log.to_html(index=False).replace("dataframe", "tablalog")
datos = datos.replace("<td>On</td>", "<td class='on'><b>On</b></td>")
except:
datos = estilo_tabla + """
<table border="1" class="tablalog">
<thead>
<tr style="text-align: right;">
<th>Hora</th>
<th>Temp</th>
<th>Hum</th>
<th>Vent</th>
<th>Ext</th>
<th>Luz</th>
</tr>
</thead>"""
return render_template('log.html', fecha=fecha, datos_log=datos)
###LOG HISTORICO
@app.route('/lista_log')
def listando_log():
lista = lista_log()
return render_template('lista_log.html', lista=lista)
@app.route('/log_consulta/<fecha>', methods=['GET', 'POST'])
def log_consulta(fecha):
datos = consulta_log(fecha)
datos = estilo_tabla + datos.to_html(index=False).replace("dataframe", "tablalog")
datos = datos.replace("<td>On</td>", "<td class='on'><b>On</b></td>")
return render_template('log.html', fecha=fecha, datos_log=datos)
####PAGINA CONTROL
@app.route('/control')
def control():
datos_actualizados = consulta()
return render_template('control.html', estado_ext=datos_actualizados[0],
estado_vent=datos_actualizados[1],
estado_luces=datos_actualizados[2],
temp_actual=datos_actualizados[3],
hum_actual=datos_actualizados[4],
hora_actual=datos_actualizados[5],
vent_temp_max=datos_actualizados[7],
vent_hum_max=datos_actualizados[8],
ext_temp_max=datos_actualizados[9],
ext_hum_max=datos_actualizados[10],
luz_hora_encendido=datos_actualizados[11],
luz_hora_apagado=datos_actualizados[12])
@app.route('/vent_on')
def vent_on():
prende_ventilador()
return redirect('/control')
@app.route('/vent_off')
def vent_off():
apaga_ventilador()
return redirect('/control')
@app.route('/ext_on')
def ext_on():
prende_extractor()
return redirect('/control')
@app.route('/ext_off')
def ext_off():
apaga_extractor()
return redirect('/control')
@app.route('/luces_on')
def luces_on():
prende_luces()
return redirect('/control')
@app.route('/luces_off')
def luces_off():
apaga_luces()
return redirect('/control')
if __name__ == '__main__':
app.run(host='192.168.0.13', port=8888, debug=True)
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,598 | J0seca/freshgrowpi | refs/heads/master | /flask/bin/flask_control.py | #!/usr/bin/python3
#-*- coding: utf-8 -*-
#controla dispositivos desde flask
import RPi.GPIO as GPIO
#configurando GPIO
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(25, GPIO.OUT)
GPIO.setup(24, GPIO.OUT)
GPIO.setup(21, GPIO.OUT)
#funciones de encendido/apagado 1/0
def prende_ventilador():
GPIO.output(24, True)
def apaga_ventilador():
GPIO.output(24, False)
def prende_extractor():
GPIO.output(25, True)
def apaga_extractor():
GPIO.output(25, False)
def prende_luces():
GPIO.output(21, True)
def apaga_luces():
GPIO.output(21, False)
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,599 | J0seca/freshgrowpi | refs/heads/master | /scripts/fotoperiodo.py | #!/usr/bin/python3
#-*- coding: utf-8 -*-
import os
import time
import datetime
import RPi.GPIO as GPIO
from config.variables import luz_hora_encendido, luz_hora_apagado
#configurando GPIO
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(21, GPIO.OUT)
#GPIO.input(21)
def comienza_dia():
GPIO.output(21, True)
print("Comenzando el día!, Luces ON!", GPIO.input(21))
def comienza_noche():
GPIO.output(21, False)
print("Terminando el día. Buenas noches", GPIO.input(21))
print("Encendiendo a las: ", luz_hora_encendido)
print("Apagando a las: ", luz_hora_apagado)
hora_encendido = luz_hora_encendido
hora_apagado = luz_hora_apagado
hora_encendido = datetime.datetime.strptime(hora_encendido, '%H:%M')
hora_apagado = datetime.datetime.strptime(hora_apagado, '%H:%M')
#haciendo rango de horas de día en minutos segun rango:
rango_encendido = []
hora = hora_encendido
while hora != hora_apagado:
#agregamos hora a lista de rango:
rango_encendido.append(hora)
#print(hora)
#sumamos un minuto:
hora = hora + datetime.timedelta(minutes=1)
#aca buscamos el limite del otro día, ya que el horario puede pasar las 00:00
#por lo que se reinicia a las 00:00 del día 1
limite = "00:00"
limite = datetime.datetime.strptime(limite, '%H:%M') +datetime.timedelta(days=1)
if hora == limite:
hora = "00:00" #reseteamos a día 1
hora = datetime.datetime.strptime(hora, '%H:%M') #aplicamos formato
#time.sleep(0.2)
#verificando luces prendidas
while True:
hora_consulta = datetime.datetime.now().strftime("%H:%M")
hora_consulta = datetime.datetime.strptime(hora_consulta, '%H:%M')
if hora_consulta in rango_encendido:
if (GPIO.input(21) == 0):
comienza_dia()
else:
print("Estado de luces correcto.")
else:
if (GPIO.input(21) == 1):
comienza_noche()
else:
print("Estado de luces correcto.")
time.sleep(58)
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,600 | J0seca/freshgrowpi | refs/heads/master | /scripts/envio_log.py | #!/usr/bin/python3
#-*- coding: utf-8 -*-
import smtplib
import time
import pandas as pd
#from email.mime.text import MIMEText
#from email.mime.multipart import MiMEMultipart
from config.variables import correo_datos, frecuencia_correos
horas_espera = int(frecuencia_correos) * 3600
smtp_server= 'smtp.gmail.com'
port = 587 #465
smtp_user = 'freshgrowpi@gmail.com'
smtp_pass = '************'
html="""
<style>h1.p{color: blue;}</style>
<h1 class="p">
Informe clima
</h1>
"""
fecha = time.strftime("%d-%m-%y")
text = fecha + """
Datos del día:
"""
mensaje = text
#mensaje = MIMEText(mensaje, "html")
#mensaje = MIMEMultipart()
#mensaje["Subjet"] = "Control clima"
s = smtplib.SMTP(smtp_server + ":" + str(port))
s.starttls()
s.login(smtp_user,smtp_pass)
s.sendmail(smtp_user, correo_datos, mensaje)
s.quit()
print('Correo enviado.')
| {"/configure.py": ["/scripts/config/variables.py"]} |
51,644 | ParentJA/strangedeck | refs/heads/master | /strangedeck/forms.py | __author__ = 'jason.parent@carneylabs.com (Jason Parent)'
# Django imports...
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import AuthenticationForm
User = get_user_model()
class BootstrapMixin(object):
def __init__(self, use_bootstrap=True, *args, **kwargs):
super(BootstrapMixin, self).__init__(*args, **kwargs)
if use_bootstrap:
for key in self.fields:
self.fields[key].widget.attrs.update({
'class': 'form-control'
})
class SignUpForm(BootstrapMixin, forms.ModelForm):
error_messages = {
'duplicate_email': 'A user with that email already exists.',
'password_mismatch': 'The two password fields didn\'t match.'
}
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(
label='Password Confirmation',
widget=forms.PasswordInput,
help_text='Enter your password again.'
)
class Meta:
model = User
fields = ('first_name', 'last_name', 'email')
def clean_email(self):
email = self.cleaned_data['email']
try:
User.objects.get(email=email)
except User.DoesNotExist:
return email
raise forms.ValidationError(
self.error_messages['duplicate_email'],
code='duplicate_email'
)
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
def save(self, commit=True):
user = super(SignUpForm, self).save(commit=False)
user.username = self.cleaned_data['email']
user.set_password(self.cleaned_data['password1'])
if commit:
user.save()
return user
class LogInForm(BootstrapMixin, AuthenticationForm):
pass | {"/strangedeck/views.py": ["/strangedeck/forms.py"]} |
51,645 | ParentJA/strangedeck | refs/heads/master | /ninetynine/urls.py | __author__ = 'jason.a.parent@gmail.com (Jason Parent)'
# Django imports...
from django.conf.urls import patterns
from django.conf.urls import url
urlpatterns = patterns('ninetynine.views',
url(r'^home/$', 'home', name='home'),
url(r'^game/$', 'game', name='game'),
) | {"/strangedeck/views.py": ["/strangedeck/forms.py"]} |
51,646 | ParentJA/strangedeck | refs/heads/master | /strangedeck/urls.py | __author__ = 'jason.a.parent@gmail.com (Jason Parent)'
# Django imports...
from django.conf.urls import include
from django.conf.urls import patterns
from django.conf.urls import url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^$', 'strangedeck.views.home', name='home'),
url(r'^sign_up/$', 'strangedeck.views.sign_up', name='sign_up'),
url(r'^log_out/$', 'strangedeck.views.log_out', name='log_out'),
url(r'^ninetynine/', include('ninetynine.urls', namespace='ninetynine')),
url(r'^admin/', include(admin.site.urls)),
) | {"/strangedeck/views.py": ["/strangedeck/forms.py"]} |
51,647 | ParentJA/strangedeck | refs/heads/master | /ninetynine/views.py | __author__ = 'jason.a.parent@gmail (Jason Parent)'
# Django imports...
from django.shortcuts import render
def home(request):
return render(request, 'ninetynine/home.html')
def game(request):
return render(request, 'ninetynine/game.html') | {"/strangedeck/views.py": ["/strangedeck/forms.py"]} |
51,648 | ParentJA/strangedeck | refs/heads/master | /strangedeck/backends.py | __author__ = 'jason.parent@carneylabs.com (Jason Parent)'
class GoogleBackend(object):
"""Creates a user with a Google account."""
pass
class FacebookBackend(object):
"""Creates a user with a Facebook account."""
pass
class TwitterBackend(object):
"""Creates a user with a Twitter account."""
pass | {"/strangedeck/views.py": ["/strangedeck/forms.py"]} |
51,649 | ParentJA/strangedeck | refs/heads/master | /ninetynine/tests/test_views.py | __author__ = 'jason.a.parent@gmail.com (Jason Parent)'
# Django imports...
from django.test import TestCase
class HomeTest(TestCase):
def test_home_view_renders_home_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'ninetynine/home.html') | {"/strangedeck/views.py": ["/strangedeck/forms.py"]} |
51,650 | ParentJA/strangedeck | refs/heads/master | /strangedeck/views.py | __author__ = 'jason.parent@carneylabs.com (Jason Parent)'
# Django imports...
from django.contrib import messages
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.contrib.auth import logout
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.shortcuts import render
# Local imports...
from .forms import LogInForm
from .forms import SignUpForm
def sign_up(request):
form = SignUpForm()
if request.method == 'POST':
form = SignUpForm(data=request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data['email']
password = form.cleaned_data['password1']
# Log in new user...
return log_in(request, username, password)
return render(request, 'strangedeck/sign_up.html', {
'form': form
})
def log_in(request, username, password):
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
else:
messages.error(request, 'Account disabled.')
else:
messages.error(request, 'Invalid login.')
return redirect(reverse('home', current_app='strangedeck'))
@login_required
def log_out(request):
logout(request)
return redirect(reverse('home', current_app='strangedeck'))
def home(request):
if request.user.is_authenticated():
return render(request, 'strangedeck/home.html')
else:
form = LogInForm(request)
if request.method == 'POST':
form = LogInForm(data=request.POST)
if form.is_valid():
login(request, form.get_user())
else:
messages.error(request, 'Invalid login.')
return redirect(reverse('home', current_app='strangedeck'))
return render(request, 'strangedeck/log_in.html', {
'form': form
}) | {"/strangedeck/views.py": ["/strangedeck/forms.py"]} |
51,651 | ParentJA/strangedeck | refs/heads/master | /functional_tests.py | __author__ = 'jason.a.parent@gmail.com (Jason Parent)'
# Standard library imports...
import unittest
# Third-party imports...
from selenium import webdriver
class NewVisitorTest(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_can_visit_the_homepage_and_start_a_new_game(self):
# Jason navigates to the Strangedeck homepage...
self.browser.get('http://localhost:8100')
# He notices that the page title says the brand name...
self.assertIn('Strangedeck', self.browser.title)
# He also notices that the header says the brand name too...
header_text = self.browser.find_element_by_tag_name('h1').text
self.assertIn('Strangedeck', header_text)
# He notices a button that says 'Start a new game'...
start_button = self.browser.find_element_by_id('id-start-button')
self.assertEqual('Start a new game', start_button.text)
# He presses the 'Start a new game' button and it disappears...
start_button.click()
self.assertFalse(start_button.is_displayed())
# He notices that a new view appears that asks him to enter his name
# and then press a button labeled 'Play'...
name_input = self.browser.find_element_by_id('id-name-input')
self.assertTrue(name_input)
if __name__ == '__main__':
unittest.main() | {"/strangedeck/views.py": ["/strangedeck/forms.py"]} |
51,652 | quakkels/rssdiscoveryengine | refs/heads/master | /tests/test_helpers.py | import pytest
import rssfinderasync.rssfinderhelpers as helpers
def test_build_possible_rss_url():
assert helpers.build_possible_rss_url('mailto:rdengine@example.invalid') is None
assert helpers.build_possible_rss_url('https://example.invalid/') == 'https://example.invalid/feed'
assert helpers.build_possible_rss_url('https://example.invalid/subpath/') == 'https://example.invalid/feed'
assert helpers.build_possible_rss_url('https://example.invalid/?q=5') == 'https://example.invalid/feed'
assert helpers.build_possible_rss_url('https://example.invalid/#fragment') == 'https://example.invalid/feed'
def test_add_protocol_urlprefix():
assert helpers.add_protocol_urlprefix('https://blog.example.com', '//blog.example.com/path/feed') == 'https://blog.example.com/path/feed'
assert helpers.add_protocol_urlprefix('https://blog.example.com/', '//blog.example.com/path/feed') == 'https://blog.example.com/path/feed'
assert helpers.add_protocol_urlprefix('https://blog.example.com/index.xyz', '//blog.example.com/path/feed') == 'https://blog.example.com/path/feed'
assert helpers.add_protocol_urlprefix('https://blog.example.com', '/path/feed') == 'https://blog.example.com/path/feed'
assert helpers.add_protocol_urlprefix('https://blog.example.com/index.xyz', '/path/feed') == 'https://blog.example.com/path/feed'
assert helpers.add_protocol_urlprefix('https://blog.example.com', 'path/feed') == 'https://blog.example.com/path/feed'
assert helpers.add_protocol_urlprefix('https://blog.example.com/slug/#asdf', 'path/feed') == 'https://blog.example.com/slug/path/feed'
assert helpers.add_protocol_urlprefix('https://blog.example.com?dsa=asd', 'path/feed') == 'https://blog.example.com/path/feed'
assert helpers.add_protocol_urlprefix('https://blog.example.com/index.xyz', 'path/feed') == 'https://blog.example.com/path/feed'
assert helpers.add_protocol_urlprefix('https://blog.example.com/index.xyz?sdf=dsa', 'path/feed') == 'https://blog.example.com/path/feed'
@pytest.mark.parametrize('content_type, expected', [
('application/rss', True),
('application/rss; charset=UTF-8', True),
('application/atom', True),
('application/xml', True),
('text/html', False),
])
def test_is_feed_content_type(content_type, expected):
assert helpers.is_feed_content_type(content_type) is expected
| {"/tests/test_helpers.py": ["/rssfinderasync/rssfinderhelpers.py"], "/rssfinderasync/rfasync.py": ["/rssdiscoveryengine_app/headers.py"], "/rssfinderasync/rssfinderhelpers.py": ["/rssdiscoveryengine_app/headers.py"]} |
51,653 | quakkels/rssdiscoveryengine | refs/heads/master | /rssdiscoveryengine_app/__init__.py | from flask import Flask
def create_app():
app = Flask(__name__)
from . import home
app.register_blueprint(home.bp)
return app | {"/tests/test_helpers.py": ["/rssfinderasync/rssfinderhelpers.py"], "/rssfinderasync/rfasync.py": ["/rssdiscoveryengine_app/headers.py"], "/rssfinderasync/rssfinderhelpers.py": ["/rssdiscoveryengine_app/headers.py"]} |
51,654 | quakkels/rssdiscoveryengine | refs/heads/master | /setup.py | from setuptools import find_packages, setup
setup(
name='tiaamd',
version='0.1.2',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'flask',
'beautifulsoup4',
'feedparser',
'requests',
'aiohttp',
'asyncio',
'gunicorn'
],
python_requires='>=3.7'
)
| {"/tests/test_helpers.py": ["/rssfinderasync/rssfinderhelpers.py"], "/rssfinderasync/rfasync.py": ["/rssdiscoveryengine_app/headers.py"], "/rssfinderasync/rssfinderhelpers.py": ["/rssdiscoveryengine_app/headers.py"]} |
51,655 | quakkels/rssdiscoveryengine | refs/heads/master | /rssfinderasync/rfasync.py | import asyncio
import aiohttp
import feedparser
from rssdiscoveryengine_app.headers import HTTP_HEADERS
from rssfinderasync import rssfinderhelpers as helpers
async def fetch(blog_url, session):
rss_url = None
result = None
try:
async with session.get(blog_url, timeout=4) as response:
result = await response.read()
except Exception as e:
print(f"<Exception>{e}</Exception><blog_url>{blog_url}</blog_url>")
if result:
rss_url = helpers.find_rss_url_in_html(result)
if not rss_url:
rss_url = helpers.build_possible_rss_url(blog_url)
rss_url = helpers.add_protocol_urlprefix(blog_url, rss_url)
try:
async with session.get(rss_url) as response:
if response.status != 200 \
or not helpers.is_feed_content_type(response.headers["Content-Type"]):
return
feed = feedparser.parse(await response.read())
if feed.bozo > 0:
print(f"BOZO FOUND: {rss_url}")
return
return helpers.unescape_feed(feed.feed)
except Exception as e:
print(f"<Exception>{e}</Exception><blog_url>{blog_url}</blog_url>")
async def fetch_bound_async(sem, url, session):
async with sem:
result = await fetch(url, session)
return result
async def run(urls):
sem = asyncio.Semaphore(1000)
tasks = []
timeout = aiohttp.ClientTimeout(total=10)
async with aiohttp.ClientSession(timeout=timeout, headers=HTTP_HEADERS) as session:
for url in urls:
task = asyncio.ensure_future(
fetch_bound_async(sem, url, session)
)
tasks.append(task)
responses = await asyncio.gather(*tasks)
return responses
def initiate_finder(blog_url):
blog_url = blog_url.strip()
html = helpers.get_response_content(blog_url)
if not html:
return
rss_url = helpers.find_rss_url_in_html(html)
if not rss_url:
rss_url = helpers.build_possible_rss_url(blog_url)
rss_url = helpers.add_protocol_urlprefix(blog_url, rss_url)
urls = helpers.get_urls_from_rss_feed(rss_url)
if not urls:
return
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
future = asyncio.ensure_future(run(urls))
result = loop.run_until_complete(future)
result = helpers.find_unique_results(result)
return result
| {"/tests/test_helpers.py": ["/rssfinderasync/rssfinderhelpers.py"], "/rssfinderasync/rfasync.py": ["/rssdiscoveryengine_app/headers.py"], "/rssfinderasync/rssfinderhelpers.py": ["/rssdiscoveryengine_app/headers.py"]} |
51,656 | quakkels/rssdiscoveryengine | refs/heads/master | /rssdiscoveryengine_app/home.py | from flask import (
Blueprint,
render_template,
abort,
request,
flash
)
from jinja2 import TemplateNotFound
from rssfinderasync import rfasync
bp = Blueprint('home', __name__,
template_folder='templates')
@bp.route('/', methods=(['GET']))
def index():
results = None
blog_url = request.args.get('blog_url')
print(f'blog_url: {blog_url}')
if blog_url is None:
blog_url = ''
elif is_blog_url_valid(blog_url):
results = rfasync.initiate_finder(blog_url)
print("finished getting results")
return render_template('home.html', blog_url = blog_url,
results = results)
def is_blog_url_valid(blog_url):
has_http = blog_url.lower().startswith("http")
has_slashes = "://" in blog_url
return has_http and has_slashes | {"/tests/test_helpers.py": ["/rssfinderasync/rssfinderhelpers.py"], "/rssfinderasync/rfasync.py": ["/rssdiscoveryengine_app/headers.py"], "/rssfinderasync/rssfinderhelpers.py": ["/rssdiscoveryengine_app/headers.py"]} |
51,657 | quakkels/rssdiscoveryengine | refs/heads/master | /rssdiscoveryengine_app/headers.py | USER_AGENT = 'RSS Discovery Engine 0.1'
HTTP_HEADERS = {
'User-Agent': USER_AGENT,
}
| {"/tests/test_helpers.py": ["/rssfinderasync/rssfinderhelpers.py"], "/rssfinderasync/rfasync.py": ["/rssdiscoveryengine_app/headers.py"], "/rssfinderasync/rssfinderhelpers.py": ["/rssdiscoveryengine_app/headers.py"]} |
51,658 | quakkels/rssdiscoveryengine | refs/heads/master | /rssfinderasync/rssfinderhelpers.py | from urllib.parse import urljoin, urlparse
import feedparser
import requests
import html
from bs4 import BeautifulSoup
from rssdiscoveryengine_app.headers import HTTP_HEADERS
def get_response_content(url):
response = None
try:
response = requests.get(url, headers=HTTP_HEADERS)
except:
return
if not response.ok:
return
return response.content
def find_rss_url_in_html(html):
soup = BeautifulSoup(html, "html.parser", from_encoding="iso-8859-1")
link_tag = soup.find("link", {"type":"application/rss+xml"})
if link_tag is None:
link_tag = soup.find("link", {"type":"application/atom+xml"})
if link_tag is None:
return None
rss_url = link_tag.get("href")
return rss_url
def build_possible_rss_url(url):
parsed = urlparse(url)
if not parsed.scheme in ['http', 'https']:
return None
# Remove query string & fragment noise if present
parsed = parsed._replace(path='/feed', query='', fragment='')
return parsed.geturl()
def add_protocol_urlprefix(blog_url, rss_url):
return urljoin(blog_url, rss_url)
def get_urls_from_rss_feed(rss_url):
feed = feedparser.parse(rss_url, request_headers=HTTP_HEADERS)
if feed.bozo > 0:
return
urls = []
for entry in feed.entries:
anchors = find_anchors(entry)
for anchor in anchors:
url = anchor.get("href")
if is_valid_url(url):
urls.append(url)
return urls
def find_anchors(entry):
soup = BeautifulSoup(entry.description, "html.parser")
anchors = soup.find_all('a')
if entry.has_key('content'):
for content in entry.content:
soup = BeautifulSoup(content.value, "html.parser")
anchors += soup.find_all('a')
return anchors
def is_valid_url(url):
return url and url.startswith("http")
def is_feed_content_type(content_type):
if content_type.startswith("application/rss") \
or content_type.startswith("application/atom") \
or content_type.startswith("application/xml"):
return True
return False
# mutates feedparser object
def unescape_feed(feed):
if "title" in feed:
feed["title"] = html.unescape(feed["title"])
if "title_detail" in feed:
title_detail = feed["title_detail"]
if "value" in title_detail:
title_detail["value"] = html.unescape(title_detail["value"])
return feed
def find_unique_results(results):
results = [x for x in results if x is not None]
unique = {}
for result in results:
if not result.link in unique:
unique[result.link] = result
return unique.values()
| {"/tests/test_helpers.py": ["/rssfinderasync/rssfinderhelpers.py"], "/rssfinderasync/rfasync.py": ["/rssdiscoveryengine_app/headers.py"], "/rssfinderasync/rssfinderhelpers.py": ["/rssdiscoveryengine_app/headers.py"]} |
51,809 | christoflemke/flaskplan | refs/heads/master | /website/rejseplanen.py | import urllib
import json
class RejseplanClient:
'Access to the rejseplanen.dk api (http://labs.rejseplanen.dk/api)'
def __init__(self, baseurl):
self.baseurl = baseurl
def execute(self,path,params):
url = self.baseurl+"/"+path+"?%s" % params;
f = urllib.urlopen(url)
response = json.loads(f.read());
return response;
def stopsNearby(self,coordX, coordY, maxRadius = 500, maxNumbers = 30):
'retrieve the stops closest to the location'
'the coordinates are given as floats in the WSG 84 system'
params = urllib.urlencode({
'coordX': int(coordX*1000000),
'coordY': int(coordY*1000000),
'maxRadius' : maxRadius,
'maxNumbers' : maxNumbers,
'format' : 'json'})
return self.execute('stopsNearby',params)
def departureBoard(stationId):
params = urllib.urlencode({'id' : stationId, 'format' : 'json'})
return self.execute('departureBoard', params);
def convertDepartures(json):
return [{
'name' : r['name'],
'time' : r['time'],
'direction' : r['direction']
} for r in json['DepartureBoard']['Departure']]
def convertStops(json):
return [{
'id' : location['id'],
'name' : location['name'],
'lat': float(location['y'])/1000000.0,
'lng': float(location['x'])/1000000.0,
'distance' : location['distance']
} for location in json['LocationList']['StopLocation']];
| {"/test/test_app.py": ["/website/__init__.py"], "/test/test_rejseplanen.py": ["/website/rejseplanen.py"]} |
51,810 | christoflemke/flaskplan | refs/heads/master | /website/__init__.py | from flask import Flask, render_template, request
import urllib
import os
from flask import json
from rejseplanen import RejseplanClient, convertStops, convertDepartures
app = Flask(__name__)
baseurl = "http://xmlopen.rejseplanen.dk/bin/rest.exe"
rejseplanClient = RejseplanClient(baseurl)
apikey = 'AIzaSyCyOJLtB3rOMlCrdipSb5G4IWvS5Rwx6DM'
@app.route("/")
def hello():
app.logger.debug('index')
if apikey:
return render_template("index.html", apikey = apikey)
else:
return render_template("index.html")
@app.route("/location")
def locaiton():
app.logger.debug('location')
x = request.args.get('lat')
y = request.args.get('lon')
if not x:
return ('missing parameter lat',400)
if not y:
return ('missing paremeter lon',400)
app.logger.debug("got lat: %s, lon: %s",x,y)
coordX=float(x)
coordY=float(y)
response = rejseplanClient.stopsNearby(coordX,coordY);
app.logger.debug(response)
converted=convertStops(response);
app.logger.debug(converted)
return (json.dumps(converted),200, {
'Content-Type' : 'application/json'
})
@app.route("/departures")
def departures():
app.logger.debug('departures')
stationId = request.args.get('id');
if not stationId:
return ('missing parameter stationid',400)
app.logger.debug('got id: %s',stationId);
params = urllib.urlencode({'id' : stationId, 'format' : 'json'})
url = baseurl+"/departureBoard?%s" % params
app.logger.debug(url)
f = urllib.urlopen(url)
rawResponse = f.read()
jsonRsp = json.loads(rawResponse);
app.logger.debug(jsonRsp);
converted = convertDepartures(jsonRsp);
app.logger.debug(converted)
return (json.dumps(converted), 200, {
'Content-Type' : 'application/json'
})
if __name__ == "__main__":
app.run()
| {"/test/test_app.py": ["/website/__init__.py"], "/test/test_rejseplanen.py": ["/website/rejseplanen.py"]} |
51,811 | christoflemke/flaskplan | refs/heads/master | /test/test_app.py | # -*- coding: utf-8 -*-
import sys
import os
import unittest
import json
sys.path.insert(0, os.path.abspath('..'))
from website import app
class TestApp(unittest.TestCase):
'test app with rejseplanen api'
def setUp(self):
app.config['TESTING'] = True
app.config['DEBUG'] = True
self.app = app.test_client()
def test_index(self):
rv = self.app.get('/')
assert b'<title>Flaskplan</title>' in rv.data
def test_missing_lat(self):
rv = self.app.get('/location?lon=10.197282399999999')
self.assertEqual(rv.status,'400 BAD REQUEST')
def test_missing_lon(self):
rv = self.app.get('/location?lat=56.17118730000001')
self.assertEqual(rv.status,'400 BAD REQUEST')
def test_stops(self):
rv = self.app.get('/location?lat=56.17118730000001&lon=10.197282399999999')
app.logger.debug(rv.data)
jsn = json.loads(rv.data)
self.assertIsInstance(jsn,list)
# it would be strange if the request would return less thatn 2 results
self.assertGreater(len(jsn),1)
# test the structure of the response
for location in jsn:
self.assertIsInstance(location,dict)
keys=set(location.keys())
self.assertSetEqual(keys, set(['distance','id','lat','lng','name']))
def test_missing_id(self):
rv = self.app.get('departures')
self.assertEqual(rv.status,'400 BAD REQUEST')
def test_departures(self):
# this id is likely to change in the future...
rv = self.app.get('departures?id=751465300')
app.logger.debug(rv.data)
jsn = json.loads(rv.data)
self.assertIsInstance(jsn,list)
# should be more than 2 if the id is still valid
self.assertGreater(len(jsn),1)
# test the structure of the response
for departure in jsn:
self.assertIsInstance(departure,dict)
keys=set(departure.keys())
self.assertSetEqual(keys, set(['name','direction','time']))
if __name__ == '__main__':
unittest.main()
| {"/test/test_app.py": ["/website/__init__.py"], "/test/test_rejseplanen.py": ["/website/rejseplanen.py"]} |
51,812 | christoflemke/flaskplan | refs/heads/master | /test/test_rejseplanen.py | # -*- coding: utf-8 -*-
import sys
import os
import unittest
import json
sys.path.insert(0, os.path.abspath('..'))
from website.rejseplanen import convertStops,convertDepartures
class TestRejseplanen(unittest.TestCase):
'test conversion of data given example data'
def testConvertStops(self):
with open('data/stopsNearby.json','r') as f:
j=json.load(f,'UTF-8')
stops = convertStops(j);
self.assertEqual(stops[0], {
u"name": u"Halmstadgade v. Skøjtehallen (Aarhus)",
u"lng": 10.188778,
u"lat": 56.181871,
u"id":u"751419500",
u"distance":u"115"
})
def testConvertDepartures(self):
with open('data/departures.json','r') as f:
j=json.load(f,'UTF-8')
departures = convertDepartures(j)
self.assertEqual(departures[0], {
'name' : 'Bybus 1A',
'time' : '21:56',
'direction' : 'Trige via AUH Skejby'
})
if __name__ == '__main__':
unittest.main()
| {"/test/test_app.py": ["/website/__init__.py"], "/test/test_rejseplanen.py": ["/website/rejseplanen.py"]} |
51,816 | abeedshaik786/confidencial | refs/heads/master | /models.py | from django.db import models
# Create your models here.
class Customer(models.Model):
FirstName = models.TextField()
SecondName = models.TextField()
CompanyName = models.TextField()
GstTax = models.IntegerField(null=True,)
def __str__(self):
return self.FirstName
class Address(models.Model):
FirstAddress = models.CharField(max_length=500)
ScondAddress = models.CharField(max_length=500)
customer = models.ForeignKey(Customer,on_delete=models.CASCADE, null=True)
def __str__(self):
return self.FirstAddress
class Contacts(models.Model):
MobileNumber = models.IntegerField()
EmailId = models.EmailField()
Position = models.TextField(max_length=100)
customer = models.ForeignKey(Customer,on_delete=models.CASCADE, null=True)
| {"/views.py": ["/models.py"]} |
51,817 | abeedshaik786/confidencial | refs/heads/master | /views.py | from django.shortcuts import render
from .models import Customer,Address,Contacts
from django.http import HttpResponse,HttpResponseRedirect
from django.shortcuts import redirect
from django.urls import reverse
from django.http import JsonResponse
from django.core import serializers
from django.views.decorators.csrf import csrf_protect
import json
# Create your views here.
def customer_rigistration(request):
subject = Customer.objects.all()
return render(request,'customer-listview.html',{'subject':subject})
def customer_view(request, customer_id):
cust_obj = Customer.objects.filter(id=customer_id).first()
add_obj=None
concat_obj = None
if cust_obj:
add_obj = Address.objects.filter(customer=cust_obj)
concat_obj = Contacts.objects.filter(customer=cust_obj)
return render(request,'customer.html',{
'cust_obj':cust_obj,
'add_obj':add_obj
})
def customer_saving(request):
try:
if request.method =='POST':
product_id = request.POST.get("subject")
firstname = request.POST.get('firstname')
secondname = request.POST.get('secondname')
companyname = request.POST.get('companyname')
gsttax = request.POST.get('gsttax')
if product_id:
obj=Customer.objects.get(id = product_id)
else:
obj = Customer()
obj.FirstName=firstname,
obj.SecondName=secondname,
obj.CompanyName=companyname,
obj.GstTax=gsttax
obj.save()
subject = Customer.objects.all()
return render(request,'customer-listview.html',{'subject':subject})
except(ValueError):
return HttpResponse("<html><body bgcolor=red><h1 style=text-align:center;>invalid data</h1></body></html>")
@csrf_protect
def Customer_Edite(request):
import pdb;pdb.set_trace
if request.method == 'POST':
# "| print(request.POST) |" this is for debuging purpose that data is to the view or not
customer_id = request.POST.get('customer_id')
firstname = request.POST.get('firstname')
secondname = request.POST.get('secondname')
companyname = request.POST.get('companyname')
gsttax = request.POST.get('gsttax')
obj = Customer.objects.get(id = customer_id)
obj.FirstName=firstname
obj.SecondName=secondname
obj.CompanyName=companyname
obj.GstTax=gsttax
obj.save()
response_data = serializers.serialize('json', Customer.objects.fillter(id = customer_id ))
return JsonResponse(response_data,safe=False)
# return HttpResponseRedirect(reverse('Customer:customer_view', args={cust_obj.id}))
def Address_saving(request):
print(request.POST)
if request.method=="POST":
import pdb;pdb.set_trace
customer_id = request.POST.get('customer_id')
firstaddress = request.POST.get('firstaddress')
secondaddress = request.POST.get('secondaddress')
cust_obj=Customer.objects.get(id=customer_id)
subj= Address()
subj.FirstAddress=firstaddress
subj.ScondAddress=secondaddress
subj.customer=cust_obj
subj.save()
# Address_data = serializers.serialize('json', Customer.objects.filter( id = customer_id ))
# print(Address_data)
return JsonResponse({'success':True})
#subject = Address.objects.all()
#return HttpResponseRedirect(reverse('Customer:customer_view', args={ cust_obj.id }))
# return redirct("{% url 'customer:customer_view' customer_id=%s %}")
# return render(request,'designe.html',{'Address_data':subject })
def Address_Edite(request):
address = request.GET.get('address_id')
data = serializers.serialize('json', Address.objects.filter(id = address))
return JsonResponse(data,safe=False)
def Address_Edite_Saving(request):
if request.method=="POST":
address_id = request.POST.get('Address_id')
customer_id = request.POST.get('customer_id')
firstaddress = request.POST.get('firstaddress')
secondaddress = request.POST.get('secondaddress')
cust_obj = Customer.objects.get(id=customer_id)
sub= Address.objects.get(id=address_id)
# sub= Address()
sub.FirstAddress=firstaddress
sub.ScondAddress=secondaddress
# sub.costomer=cust_obj
sub.save()
return HttpResponseRedirect(reverse('Customer:customer_view', args={ cust_obj.id }))
#subject = Address.objects.all()
# return render(request,'designe.html')
def Contacts_Edite_Saving(request):
if request.method == "POST":
contacts_id = request.POST.get('contacts_id')
customer_id = request.POST.get('customer_id')
MobileNumber = request.POST.get('mobilenumber')
EmailId = request.POST.get('email')
Position = request.POST.get('position')
cust_obj = Customer.objects.get(id = customer_id)
cobj = Contacts.objects.get( id = contacts_id)
cobj.MobileNumber = MobileNumber
cobj.EmailId = EmailId
cobj.Position = Position
cobj.save()
return HttpResponseRedirect(reverse('Customer:customer_view', args={ cust_obj.id}))
def Contacts_Edite(request):
import pdb;pdb.set_trace
contacts = request.GET.get('contact_id')
data=serializers.serialize('json',Contacts.objects.filter(id = contacts))
return JsonResponse(data,safe=False)
def Contacts_saving(request):
import pdb;pdb.set_trace
customer_id = request.POST.get('customer_id')
MobileNumber = request.POST.get('mobilenumber')
EmailId = request.POST.get('email')
Position = request.POST.get('position')
cust_obj = Customer.objects.get(id=customer_id)
cobj = Contacts()
cobj.MobileNumber=MobileNumber
cobj.EmailId=EmailId
cobj.Position=Position
cobj.customer=cust_obj
cobj.save()
#subjecte = Address.objects.all()
return HttpResponseRedirect(reverse('Customer:customer_view', args={ cust_obj.id }))
# return render(request,'home.html',{'Con_subject':subjecte})
def customer_Delete(request,idd):
obj = Customer.objects.get(id = idd)
obj.delete()
subject = Customer.objects.all()
return render(request,'customer-listview.html',{'subject':subject})
def Address_Delete(request,idd):
obj = Address.objects.get(id=idd)
obj.delete()
return render(request,'customer.html')
| {"/views.py": ["/models.py"]} |
51,819 | STrucks/EventAnalysis | refs/heads/master | /data_crawler/crawler.py | from datetime import datetime
import requests
from bs4 import BeautifulSoup
from data_crawler.configurations.config_loader import ConfigLoader
from data_crawler.models.reddit_post import RedditPost
class RedditNewsCrawler:
"""
This class is designed to crawl posts from the fresh section of 9gag.com. It will save the posts (parts of it)
in the DB.
"""
def __init__(self):
cl = ConfigLoader()
self.base_url = cl.get("reddit_base_url")
self.headers = {'User-Agent': 'Mozilla/5.0'}
def crawl(self, amount=5):
"""
This function crawls the newest 5 posts on https://www.reddit.com/r/news/new/ and returns them as
RedditPost object.
:return:
"""
session = requests.session()
response = session.get(self.base_url, headers=self.headers)
html = BeautifulSoup(response.text, 'html.parser')
# find the current 5 newest post:
newest_posts = html.find_all("h3", {"class": "_eYtD2XCVieq6emjKBH3m"})[0:amount]
newest_posts_text = [post.text for post in newest_posts]
posts = [RedditPost(headline=text, date=datetime.today().strftime('%Y-%m-%d'),
time=datetime.today().strftime('%H:%M:%S'),
section="News") for text in newest_posts_text]
return posts
| {"/data_crawler/crawler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"], "/data_crawler/data_manager.py": ["/data_crawler/crawler.py", "/data_crawler/database_handler.py"], "/data_crawler/__main__.py": ["/data_crawler/data_manager.py"], "/data_crawler/database_handler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"]} |
51,820 | STrucks/EventAnalysis | refs/heads/master | /data_crawler/data_manager.py | import logging
import time
from data_crawler.crawler import RedditNewsCrawler
from data_crawler.database_handler import DatabaseHandler
class DataManager:
"""
This class manages the crawling of new posts and the uploading to the db. It will set up a schedule for the
Crawler and upload every new post.
"""
def __init__(self):
self.crawler = RedditNewsCrawler()
self.dbh = DatabaseHandler()
def next_batch(self):
# get the 5 newest posts:
posts = self.crawler.crawl(amount=5)
# try to upload them into DB:
for post in posts:
_post = self.dbh.find("REDDITPOSTS", post)
if _post is None:
# the post is not in the DB, we can insert them:
self.dbh.upload_post("REDDITPOSTS", post)
else:
logging.debug("Post with headline %s already exists..." % _post.headline)
self.dbh.summary("REDDITPOSTS")
self.wait()
def wait(self):
time.sleep(30)
| {"/data_crawler/crawler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"], "/data_crawler/data_manager.py": ["/data_crawler/crawler.py", "/data_crawler/database_handler.py"], "/data_crawler/__main__.py": ["/data_crawler/data_manager.py"], "/data_crawler/database_handler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"]} |
51,821 | STrucks/EventAnalysis | refs/heads/master | /data_crawler/configurations/config_loader.py | import json
import logging
class ConfigLoader:
"""
This class loads all configurations and stores them in a config_store
"""
def __init__(self):
with open("configurations/conf.json", "r", encoding="utf-8") as f:
config = json.load(f)
with open("configurations/local_conf.json", "r", encoding="utf-8") as f:
config_local = json.load(f)
self.config_store = {**config, **config_local}
def get(self, key):
return self.config_store[key]
def get_logging_level(self):
if self.config_store['logging_level'] == "debug":
return logging.DEBUG
elif self.config_store['logging_level'] == "warn":
return logging.WARNING
else:
return logging.INFO
| {"/data_crawler/crawler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"], "/data_crawler/data_manager.py": ["/data_crawler/crawler.py", "/data_crawler/database_handler.py"], "/data_crawler/__main__.py": ["/data_crawler/data_manager.py"], "/data_crawler/database_handler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"]} |
51,822 | STrucks/EventAnalysis | refs/heads/master | /data_crawler/models/reddit_post.py |
class RedditPost:
"""
This model contains basic information about a Reddit post.
"""
def __init__(self, headline, section=None, date=None, time=None):
self.headline = headline
self.section = section
self.date = date
self.time = time
def to_dict(self):
return {
"headline": self.headline,
"section": self.section,
"date": self.date,
"time": self.time
} | {"/data_crawler/crawler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"], "/data_crawler/data_manager.py": ["/data_crawler/crawler.py", "/data_crawler/database_handler.py"], "/data_crawler/__main__.py": ["/data_crawler/data_manager.py"], "/data_crawler/database_handler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.