index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
20,873
|
chshaiiith/PySimulator
|
refs/heads/master
|
/simulator.py
|
import Queue
import stats
# XXX: Override the comparator of priority queue to support our functionality
Q = Queue.PriorityQueue()
time = 0
required_request_count = 0
def schedule(event):
Q.put((event["time"], event))
def run(no_of_request = None):
global time
global required_request_count
required_request_count = no_of_request
if no_of_request:
while not Q.empty():
time, event = Q.get()
event["callback"](event)
reset()
return
def reset():
stats.print_stat()
stats.global_reset()
global time
time = 0
while not Q.empty():
temp = Q.get()
# Resetting request count
global current_request_count
current_request_count = 0
# Little hack not a good way but it works
# XXX: Reseting the completion time of servers here . Circular dependencies :-(
# XXX: Ideally we should reset it for request_handler not request_handler_fifo. Very small thing do ASAP
import request_handler_fifo
request_handler_fifo.reset()
return
|
{"/request_stream.py": ["/distribution.py", "/arrival.py", "/request.py", "/request_handler.py", "/simulator.py"], "/request_handler.py": ["/request_handler_fifo.py", "/request_handler_priority.py"], "/distribution.py": ["/deterministic.py", "/possion.py"], "/simulator.py": ["/stats.py", "/request_handler_fifo.py"], "/shortest_job_first_policy.py": ["/request_handler_fifo.py"], "/arrival.py": ["/distribution.py", "/simulator.py"], "/request_handler_fifo.py": ["/simulator.py", "/allocation_policy.py", "/stats.py", "/distribution.py"]}
|
20,874
|
chshaiiith/PySimulator
|
refs/heads/master
|
/request_handler_priority.py
|
import Queue
import json
import random
import simulator
from allocation_policy import AllocationPolicy
from stats import Stats
from arrival import Arrival
from distribution import Distribution
event_map = {}
class RequesthandlerPriority:
def __init__(self):
with open("properties.json") as fp:
config = json.load(fp)
# self.q = Queue.Queue()
# # current completion time of a queue
policy = config["server"]["allocationPolicy"]
self.allocation_policy = AllocationPolicy.get_policy(policy)
self.stat = Stats()
# TODO : Use this code if we want to use multiple queues
self.write_server = config["server"]["writeServer"]
self.read_server = config["server"]["readServer"]
self.no_of_read_response_required = config["server"]["noOfReadResponse"]
self.no_of_write_response_required = config["server"]["noOfWriteResponse"]
self.server_queues = []
self.completion_time = []
for i in range(0, config["server"]["numberOfServers"]):
self.server_queues.append(Queue.PriorityQueue())
self.completion_time.append(0)
self.dist = Distribution.get_distribution(config["request"]["distribution"] , rate=1)
def add_request(self, request):
servers = self.allocation_policy.get_server(request["type"])
for i in servers:
self.server_queues[i].put(request)
request["request_size"] = self.dist.next()
print "priority size: " + request["id"] + " " + str(request["request_size"])
if self.completion_time[i] > simulator.time:
self.completion_time[i] = self.completion_time[i] + request["request_size"]
else:
self.completion_time[i] = simulator.time + request["request_size"]
event = {
"time": self.completion_time[i],
"request": request,
"callback": callback,
"handler": self,
"index": i
}
simulator.schedule(event)
def callback(event):
global event_map
with open("properties.json") as fp:
config = json.load(fp)
# I assumed Type1 is read request
if event["request"]["type"] == "read":
no_of_request_required = config["server"]["noOfReadResponse"]
total_request = config["server"]["readServer"]
else:
no_of_request_required = config["server"]["noOfWriteResponse"]
total_request = config["server"]["writeServer"]
# Processing of request and deleting once we reached max
if event["request"]["id"] in event_map:
event_map[event["request"]["id"]] = event_map[event["request"]["id"]] + 1
else:
event_map[event["request"]["id"]] = 1
if event_map[event["request"]["id"]] == no_of_request_required:
#print "reached here"
event["handler"].stat.collect_stats(event)
new_event = {
"time": simulator.time,
"callback": removal,
"request": event["request"],
"handler": event["handler"],
"index": event["index"]
}
simulator.schedule(new_event)
elif event_map[event["request"]["id"]] == total_request:
del event_map[event["request"]["id"]]
return
def removal(event):
q = Queue.PriorityQueue()
while not simulator.Q.empty():
elem = simulator.Q.get()
if elem[1]["request"]["id"] != event["request"]["id"]:
q.put(elem)
simulator.Q = q
|
{"/request_stream.py": ["/distribution.py", "/arrival.py", "/request.py", "/request_handler.py", "/simulator.py"], "/request_handler.py": ["/request_handler_fifo.py", "/request_handler_priority.py"], "/distribution.py": ["/deterministic.py", "/possion.py"], "/simulator.py": ["/stats.py", "/request_handler_fifo.py"], "/shortest_job_first_policy.py": ["/request_handler_fifo.py"], "/arrival.py": ["/distribution.py", "/simulator.py"], "/request_handler_fifo.py": ["/simulator.py", "/allocation_policy.py", "/stats.py", "/distribution.py"]}
|
20,875
|
chshaiiith/PySimulator
|
refs/heads/master
|
/shortest_job_first_policy.py
|
import random
import json
from operator import itemgetter
# Ideally it should be request handler .
# XXX: Move all global variables to request_handler than RequestFiFo Handler
import request_handler_fifo
class SJF:
def __init__(self):
with open("properties.json") as fp:
config = json.load(fp)
self.number_of_servers = config["server"]["numberOfServers"]
self.read_server = config["server"]["readServer"]
self.write_server = config["server"]["writeServer"]
self.serverlist = [x for x in range(0, self.number_of_servers)]
def get_server(self, type_of_request, possible_servers=None):
if type_of_request == "read":
count = self.read_server
sorted_server = self.sort_server_on_completion_time(possible_servers)
return sorted_server[:count]
else:
count = self.write_server
sorted_server = self.sort_server_on_completion_time(self.serverlist)
# print request_handler_fifo.completion_time
# print sorted_server
return sorted_server[:count]
def sort_server_on_completion_time(self, servers):
dict = []
for server in servers:
dict.append([request_handler_fifo.completion_time[server], server])
data_list = sorted(dict, key=itemgetter(0))
out = []
for data in data_list:
out.append(data[1])
return out
|
{"/request_stream.py": ["/distribution.py", "/arrival.py", "/request.py", "/request_handler.py", "/simulator.py"], "/request_handler.py": ["/request_handler_fifo.py", "/request_handler_priority.py"], "/distribution.py": ["/deterministic.py", "/possion.py"], "/simulator.py": ["/stats.py", "/request_handler_fifo.py"], "/shortest_job_first_policy.py": ["/request_handler_fifo.py"], "/arrival.py": ["/distribution.py", "/simulator.py"], "/request_handler_fifo.py": ["/simulator.py", "/allocation_policy.py", "/stats.py", "/distribution.py"]}
|
20,876
|
chshaiiith/PySimulator
|
refs/heads/master
|
/arrival.py
|
from distribution import Distribution
import simulator
import json
class Arrival:
def __init__(self):
with open("properties.json") as fp:
config = json.load(fp)
rate = config["arrival"]["rate"]
self.distribution = Distribution.get_distribution(config["arrival"]["distribution"] , rate=rate)
def next_arrival(self):
return self.distribution.next() + simulator.time
|
{"/request_stream.py": ["/distribution.py", "/arrival.py", "/request.py", "/request_handler.py", "/simulator.py"], "/request_handler.py": ["/request_handler_fifo.py", "/request_handler_priority.py"], "/distribution.py": ["/deterministic.py", "/possion.py"], "/simulator.py": ["/stats.py", "/request_handler_fifo.py"], "/shortest_job_first_policy.py": ["/request_handler_fifo.py"], "/arrival.py": ["/distribution.py", "/simulator.py"], "/request_handler_fifo.py": ["/simulator.py", "/allocation_policy.py", "/stats.py", "/distribution.py"]}
|
20,877
|
chshaiiith/PySimulator
|
refs/heads/master
|
/request_handler_fifo.py
|
import Queue
import json
import simulator
from allocation_policy import AllocationPolicy
from stats import Stats
import sys
from distribution import Distribution
event_map = {}
request_to_server_map = {}
completion_time = []
class RequesthandlerFiFo:
def __init__(self):
global completion_time
with open("properties.json") as fp:
config = json.load(fp)
policy = config["server"]["allocationPolicy"]
self.allocation_policy = AllocationPolicy.get_policy(policy)
self.stat = Stats()
# Todo : Use this code if we want to use multiple queues
self.write_server = config["server"]["writeServer"]
self.read_server = config["server"]["readServer"]
self.no_of_read_response_required = config["server"]["noOfReadResponse"]
self.no_of_write_response_required = config["server"]["noOfWriteResponse"]
self.server_queues = []
for i in range(0, config["server"]["numberOfServers"]):
self.server_queues.append(Queue.Queue())
completion_time.append(0)
rate = config["job"]["rate"]
self.dist = Distribution.get_distribution(config["job"]["distribution"] , rate=rate)
def add_request(self, request):
global completion_time
# Todo: Make it for both read and write. Currently all request read type
if request["id"] in request_to_server_map:
servers = self.allocation_policy.get_server(request["type"],
request_to_server_map[request["id"]])
# print servers
else:
servers = self.allocation_policy.get_server(request["type"])
request_to_server_map[request["id"]] = servers
for i in servers:
request["request_size"] = self.dist.next()
# print "fifo size: " + str(request["id"]) + " " + str(request["request_size"])
self.server_queues[i].put(request)
if completion_time[i] > simulator.time:
completion_time[i] = completion_time[i] + request["request_size"]
else:
completion_time[i] = simulator.time + request["request_size"]
event = {
"time": completion_time[i],
"request": request,
"callback": callback,
"handler": self,
"index": i,
"type" : "completion"
}
simulator.schedule(event)
def callback(event):
global event_map
with open("properties.json") as fp:
config = json.load(fp)
# I assumed Type1 is read request
if event["request"]["type"] == "read":
no_of_request_required = config["server"]["noOfReadResponse"]
total_request = config["server"]["readServer"]
else:
no_of_request_required = config["server"]["noOfWriteResponse"]
total_request = config["server"]["writeServer"]
# Processing of request
current_queue = event["handler"].server_queues[event["index"]]
current_request = current_queue.get()
# Since it is FIFO this should to be true
assert(current_request["id"] == event["request"]["id"])
if event["request"]["id"] in event_map:
event_map[event["request"]["id"]] = event_map[event["request"]["id"]] + 1
else:
event_map[event["request"]["id"]] = 1
if event_map[event["request"]["id"]] == no_of_request_required:
event["handler"].stat.collect_stats(event)
if event_map[event["request"]["id"]] == total_request:
del event_map[event["request"]["id"]]
return
def reset():
global completion_time
for i in range(len(completion_time)):
completion_time[i] = 0
|
{"/request_stream.py": ["/distribution.py", "/arrival.py", "/request.py", "/request_handler.py", "/simulator.py"], "/request_handler.py": ["/request_handler_fifo.py", "/request_handler_priority.py"], "/distribution.py": ["/deterministic.py", "/possion.py"], "/simulator.py": ["/stats.py", "/request_handler_fifo.py"], "/shortest_job_first_policy.py": ["/request_handler_fifo.py"], "/arrival.py": ["/distribution.py", "/simulator.py"], "/request_handler_fifo.py": ["/simulator.py", "/allocation_policy.py", "/stats.py", "/distribution.py"]}
|
20,878
|
dougsc/gp
|
refs/heads/master
|
/engine/terminals/basic.py
|
from terminal_set import TerminalSet
from random import randint
def t_basic_terminals():
ts = TerminalSet(__name__)
ts.add_terminal_function(name='rand_int', func_ref=t_basic_rand_int, value_type='int', args=[0,9])
return ts
def t_basic_rand_int(lower_bound, upper_bound):
return randint(lower_bound, upper_bound)
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,879
|
dougsc/gp
|
refs/heads/master
|
/engine/utils/stats.py
|
from redis import Redis
class RedisWrap:
def __init__(self):
try:
self.redis_cli = Redis()
self.redis_cli.info()
except Exception, e:
print 'failed to connect to redis: %s' % (str(e))
self.redis_cli = None
def append(self, key, value, timestamp=False):
if self.redis_cli:
if timestamp:
value['ts'] = self.redis_cli.time()
res = self.redis_cli.rpush(key, value)
def delete(self, key):
if self.redis_cli:
self.redis_cli.delete(key)
class Stats:
def __init__(self, key_root):
self.stats_cli = RedisWrap()
self.key_root = key_root
def _get_full_key(self, key):
return '%s:%s' % (self.key_root, key)
def init_series(self, key):
key_list = isinstance(key, list) and key or [key]
map(lambda x:self.stats_cli.delete(self._get_full_key(x)), key_list)
def add_to_series(self, key, value, timestamp=False):
self.stats_cli.append(self._get_full_key(key), value, timestamp)
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,880
|
dougsc/gp
|
refs/heads/master
|
/engine/terminal_set.py
|
class TerminalSet:
NODE_TYPE = 'terminal'
@classmethod
def is_terminal_value(cls, node):
return node['node_type'] == cls.NODE_TYPE and node.has_key('value') and (node['type'] in ['int', 'float'])
@classmethod
def terminal_value(cls, value):
return {'node_type': cls.NODE_TYPE, 'name': str(value), 'value': value, 'type': type(value).__name__}
def __init__(self):
self.terminal_set = []
def add_terminal_value(self, name, value):
self.terminal_set.append({'node_type': self.NODE_TYPE, 'name': name, 'value': value, 'type': type(value).__name__})
def add_terminal_function(self, name, func_ref, value_type, args=[]):
self.terminal_set.append({'node_type': self.NODE_TYPE, 'name': name, 'function': func_ref, 'type': value_type, 'args': args})
def add_terminal_function_to_value(self, func_ref, args=[]):
self.terminal_set.append({'node_type': self.NODE_TYPE, 'function': func_ref, 'args': args})
def get(self):
return self.terminal_set
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,881
|
dougsc/gp
|
refs/heads/master
|
/run_gp.py
|
import importlib
from engine.individual import Individual
from engine.runner import Runner
import argparse
def run(cls_path, cls_args, tree_depth, pop_size, max_gen, tourny_size, error_threshold):
print "debug with: run('%s', '%s', %d, %d, %d, %d, %f)" % (cls_path, cls_args, tree_depth, pop_size, max_gen, tourny_size, error_threshold)
exp_lib = importlib.import_module('.'.join(cls_path.split('.')[:-1]))
exp_cls = getattr(exp_lib, cls_path.split('.')[-1])
exp_args = cls_args and cls_args.split(',') or []
print 'Using class: %s, args: %s' % (exp_cls.__name__, exp_args)
pop_size = pop_size - (pop_size % 24)
print 'Using population size: %d, tree depth: %d, max generations: %d' % (pop_size, tree_depth, max_gen)
population = map(lambda x:Individual(exp_cls, exp_args), range(pop_size))
map(lambda x:x.generate(tree_depth=tree_depth), population)
r = Runner(population, termination_error_threshold=error_threshold, max_generations=max_gen, tournament_size=tourny_size)
r.run()
return r
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Run GP experiments.')
parser.add_argument('--class-path', help='class path for experiment', required=True, dest='cls_path')
parser.add_argument('--class-args', help='constructor args for experiment', dest='cls_args')
parser.add_argument('--tree-depth', help='Max tree depth', dest='tree_depth', default=4, type=int)
parser.add_argument('--pop-size', help='Population Size (rounded down to mod 24)', dest='pop_size', default=100, type=int)
parser.add_argument('--max-gens', help='Maximum number of generations', dest='max_gen', default=500, type=int)
parser.add_argument('--tourney-size', help='Tournament Size (factor of 24)', dest='tourny_size', default=2, type=int)
parser.add_argument('--threshold', help='Error threshold', dest='error_threshold', default=0, type=float)
args = parser.parse_args()
run(**vars(args))
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,882
|
dougsc/gp
|
refs/heads/master
|
/exp/line.py
|
# def get_terminal_set(self):
# def get_function_set(self):
# def initialize(self):
# def next(self):
# def function_lookup(self):
# def error(self, value):
from engine import *
from random import randint
from os import path
import csv
from engine.function_set import FunctionSet
import engine.functions.signs as gp_f_signs
import engine.functions.trig as gp_f_trig
from engine.experiment import Experiment
class LineExp(Experiment):
_target_data = None
function_set = FunctionSet()
gp_f_signs.add_functions(function_set)
gp_f_trig.add_functions(function_set)
terminal_set = TerminalSet()
terminal_set.add_terminal_function(name='x_var', func_ref='get_x', value_type=int.__name__)
terminal_set.add_terminal_function_to_value(func_ref=randint, args=[-9,9])
def __init__(self, filename):
self.current_data_index = 0
self.read_target_data(filename)
@classmethod
def set_target_data(cls, data):
assert cls._target_data == None, 'attempt to reset target data'
cls._target_data = data
@classmethod
def read_target_data(cls, filename):
if cls._target_data != None:
return
fh = open(filename)
(_, ext) = path.splitext(filename)
if ext == '.csv':
csv_data = csv.reader(fh)
cls.set_target_data(map(lambda raw_data:map(lambda x:float(x), raw_data), csv_data))
else:
raise Exception('unknonw data file type: %s' % (ext))
def get_x(self):
return self.index()
def initialize(self):
self.current_data_index = 0
def next(self):
if (self.current_data_index + 1) < len(self._target_data):
self.current_data_index += 1
return True
return False
def index(self):
return self._target_data[self.current_data_index][0]
def norm_error(self, value):
return abs(self.error(value))
def error(self, value):
return (self._target_data[self.current_data_index][1] - value)
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,883
|
dougsc/gp
|
refs/heads/master
|
/engine/functions/trig.py
|
from math import tan,sin,cos
def add_functions(function_set):
function_set.add_function(name='tan', func_ref=f_trig_tan, arity=1)
function_set.add_function(name='sin', func_ref=f_trig_sin, arity=1)
function_set.add_function(name='cos', func_ref=f_trig_cos, arity=1)
def f_trig_tan(a):
return tan(a)
def f_trig_sin(a):
return sin(a)
def f_trig_cos(a):
return cos(a)
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,884
|
dougsc/gp
|
refs/heads/master
|
/engine/individual.py
|
from tree import Tree
import numpy
class Individual:
STANDING_LIMITS = {'min': 1, 'max': 10, 'starting': 5}
def __init__(self, exp_class, exp_args=[]):
self.exp_class = exp_class
self.exp_args = exp_args
self._error = 0
self._standing = None
@property
def error(self):
return self._error
@property
def standing(self):
return self._standing
def increment_standing(self):
self._standing = min(self._standing + 1, self.STANDING_LIMITS['max'])
def decrement_standing(self):
self._standing = max(self._standing - 1, self.STANDING_LIMITS['min'])
def init_experiment(self):
self._error = 0
self.experiment = self.exp_class(*self.exp_args)
self.experiment.initialize()
def generate(self, extra_terminal_set=[], extra_function_set=[], tree_depth=3, tree_function_bias=1):
self._standing = self.STANDING_LIMITS['starting']
self.init_experiment()
self.tree = Tree()
self.tree.create(self.experiment.get_terminal_set() + extra_terminal_set,
self.experiment.get_function_set() + extra_function_set,
function_bias=tree_function_bias, max_depth=tree_depth)
def clone(self):
clone = self.__class__(self.exp_class, self.exp_args)
clone._standing = self._standing
clone.init_experiment()
clone.tree = Tree()
clone.tree.clone(self.tree)
return clone
def mutate(self):
mutant = self.__class__(self.exp_class, self.exp_args)
mutant._standing = self._standing
mutant.init_experiment()
mutant.tree = Tree()
mutant.tree.mutate(self.tree)
return mutant
def reproduce(self, other_individual):
child = self.__class__(self.exp_class, self.exp_args)
child._standing = int(numpy.average([self._standing, other_individual._standing]))
child.init_experiment()
child.tree = Tree()
child.tree.subtree_crossover(self.tree, other_individual.tree)
return child
def get_func(self, function_name):
return self.experiment.function_lookup(function_name)
def evaluate(self):
loop = True
while loop:
self._error += self.experiment.norm_error(self.tree.execute(self))
loop = self.experiment.next()
def evaluate_data(self):
samples = []
loop = True
self.experiment.initialize()
while loop:
actual_value = self.tree.execute(self)
sample = {'value': actual_value, 'error': self.experiment.norm_error(actual_value)}
if self.experiment.index() != None:
sample['index'] = self.experiment.index()
samples.append(sample)
loop = self.experiment.next()
return samples
def simplify(self):
self.tree.simplify(self)
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,885
|
dougsc/gp
|
refs/heads/master
|
/engine/runner.py
|
import numpy
import bisect
import random
import sys
from pprint import pformat
from utils.logger import GP_Logger
from utils.stats import Stats
class Runner:
NEW_GEN_DIST = {'mutate': 0.05, 'reproduce': 0.5}
# Stats Keys:
SK_LOWEST_ERROR = 'lowest_error'
SK_BEST_INDIVIDUAL = 'best_individual'
SK_TARGET_SAMPLES = 'target_samples'
SK_ACTUAL_SAMPLES = 'actual_samples'
SK_BEST_TREE = 'best_tree'
@classmethod
def log(cls):
return GP_Logger.logger(cls.__name__)
def __init__(self, population, termination_error_threshold, max_generations, tournament_size=2):
self.population = population
self.termination_error_threshold = termination_error_threshold
self.current_generation = 1
self.current_best_error = sys.maxint
self.max_generations = max_generations
self.tournament_size = tournament_size
self.stats = Stats(self.__class__.__name__)
self.stats.init_series([self.SK_LOWEST_ERROR, self.SK_BEST_INDIVIDUAL, self.SK_TARGET_SAMPLES, self.SK_ACTUAL_SAMPLES,
self.SK_BEST_TREE])
def store_target_samples(self):
experiment = self.population[0].exp_class(*self.population[0].exp_args)
map(lambda x:self.stats.add_to_series(self.SK_TARGET_SAMPLES, x), experiment.target_data())
def store_actual_samples(self, individual):
self.stats.init_series(self.SK_ACTUAL_SAMPLES)
map(lambda x:self.stats.add_to_series(self.SK_ACTUAL_SAMPLES, x), individual.evaluate_data())
def findIndexOfBest(self):
return numpy.argmin(map(lambda x:x.error, self.population))
def evaluate(self):
self.log().debug('evaluating generation %d' % (self.current_generation))
for individual in self.population:
individual.evaluate()
best = self.findIndexOfBest()
self.log().debug('population member %d was best with %d error (target: %d)' % (best, self.population[best].error, self.termination_error_threshold))
self.stats.add_to_series(self.SK_LOWEST_ERROR, {'error': self.population[best].error, 'index': self.current_generation}, timestamp=True)
self.stats.add_to_series(self.SK_BEST_INDIVIDUAL, {'best_ix': best, 'index': self.current_generation}, timestamp=True)
return self.population[best]
def update_standings(self):
for i in xrange(0, len(self.population), self.tournament_size):
lowest_error = min(map(lambda x:x.error, self.population[i:i+self.tournament_size]))
winners = filter(lambda x:x.error == lowest_error, self.population[i:i+self.tournament_size])
losers = filter(lambda x:x.error > lowest_error, self.population[i:i+self.tournament_size])
assert len(winners) + len(losers) == self.tournament_size, 'Expected winners (%d) + losers (%d) = tournament size (%d)' % (len(winners), len(losers), self.tournament_size)
if len(losers) == 0:
continue
self.log().debug('best in tournament [%d:%d](error: %d): %d winners' % (i, i+self.tournament_size, lowest_error, len(winners)))
map(lambda x:x.increment_standing(), winners)
map(lambda x:x.decrement_standing(), losers)
def random_select_n_unique(self, number, weight_list):
selection = []
assert number < len(weight_list), 'attemt to get %d unique values from a list of %d elements' % (number, len(weight_list))
weight_max = weight_list[-1]
while len(selection) < number:
ix = bisect.bisect_right(weight_list, random.uniform(0, weight_max))
if not ix in selection:
selection.append(ix)
return selection
def generate_new_population(self):
new_population = []
self.update_standings()
weight_list = list(numpy.cumsum(map(lambda x:x.standing, self.population)))
pop_size = len(self.population)
chosen_number = int(pop_size * self.NEW_GEN_DIST['reproduce'])
chosen_number = chosen_number - (chosen_number % 2)
individuals_chosen = self.random_select_n_unique(chosen_number, weight_list)
self.log().debug('%d indiviuals chosen to reproduce - %s' % (len(individuals_chosen), sorted(individuals_chosen)))
for ix in xrange(0, len(individuals_chosen), 2):
new_population.append(self.population[individuals_chosen[ix]].reproduce(self.population[individuals_chosen[ix+1]]))
chosen_number = int(pop_size * self.NEW_GEN_DIST['mutate'])
individuals_chosen = self.random_select_n_unique(chosen_number, weight_list)
self.log().debug('%d indiviuals chosen to mutate - %s' % (len(individuals_chosen), sorted(individuals_chosen)))
for ix in xrange(0, len(individuals_chosen)):
new_population.append(self.population[individuals_chosen[ix]].mutate())
chosen_number = len(self.population) - len(new_population)
individuals_chosen = self.random_select_n_unique(chosen_number, weight_list)
self.log().debug('%d indiviuals chosen to clone - %s' % (len(individuals_chosen), sorted(individuals_chosen)))
for ix in xrange(0, len(individuals_chosen)):
new_population.append(self.population[individuals_chosen[ix]].clone())
assert len(self.population) == len(new_population), 'new population size does not match original'
self.population = new_population
self.current_generation += 1
def check_evaluation(self, best):
if best.error <= self.current_best_error:
self.current_best_error = best.error
self.stats.add_to_series(self.SK_BEST_TREE, {'tree': best.tree.dump_structure()})
self.store_actual_samples(best)
return (best.error <= self.termination_error_threshold)
def run(self):
self.store_target_samples()
success = self.check_evaluation(self.evaluate())
while self.current_generation <= self.max_generations and success == False:
self.generate_new_population()
self.log().debug('average standing for generation %d: %f' % (self.current_generation,
numpy.average(map(lambda x:x.standing, self.population))))
success = self.check_evaluation(self.evaluate())
print 'success: %s' % (success)
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,886
|
dougsc/gp
|
refs/heads/master
|
/viewer/app/utils/stats.py
|
from datetime import datetime as dt
from redis import Redis
def convert_sample(raw_sample):
sample = eval(raw_sample)
if sample.has_key('ts'):
sample['ts'] = dt.fromtimestamp(float('%d.%d' % (sample['ts'])))
return sample
def get_data(key):
r = Redis()
return map(lambda x:convert_sample(x), r.lrange(key, 0, -1))
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,887
|
dougsc/gp
|
refs/heads/master
|
/engine/functions/__init__.py
|
# Module definition
import signs
import trig
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,888
|
dougsc/gp
|
refs/heads/master
|
/viewer/app/utils/__init__.py
|
# module def
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,889
|
dougsc/gp
|
refs/heads/master
|
/engine/experiment.py
|
class Experiment:
function_set = None
terminal_set = None
@classmethod
def get_terminal_set(cls):
return cls.terminal_set.get()
@classmethod
def get_function_set(cls):
return cls.function_set.get()
def function_lookup(self, name):
return getattr(self, name)
def index(self):
return None
def target_data(self):
self.initialize()
samples = []
loop = True
while loop:
sample = {'value': self.error(0)}
if self.index() != None:
sample['index'] = self.index()
samples.append(sample)
loop = self.next()
return samples
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,890
|
dougsc/gp
|
refs/heads/master
|
/viewer/app/utils/tree_render.py
|
import pydot
import tempfile
class TreeRender:
def __init__(self, tree_data):
self.tree_data = tree_data
self.dot_index = 0
self.data = None
def _create_dot_node(self, layer, name):
print 'add node: ix: %d, lyr: %d, name: %s' % (self.dot_index, layer, name)
dot_node = pydot.Node('index_%d_layer_%d' % (self.dot_index, layer), label=name)
self.dot_index += 1
return dot_node
def _draw_nodes(self, nodes, graph, parent_dot_node, layer):
for node in nodes:
dot_node = self._create_dot_node(layer, node['name'])
graph.add_node(dot_node)
graph.add_edge(pydot.Edge(parent_dot_node, dot_node))
self._draw_nodes(node['lower_nodes'], graph, dot_node, layer+1)
def create(self):
graph = pydot.Dot(graph_type='graph')
layer = 0
self.dot_index = 0
dot_node = self._create_dot_node(layer, self.tree_data['name'])
graph.add_node(dot_node)
self._draw_nodes(self.tree_data['lower_nodes'], graph, dot_node, layer+1)
(_, filename) = tempfile.mkstemp()
graph.write_svg(filename)
print 'writing to: %s' % (filename)
with open(filename) as fh:
self.data = fh.read()
# os.remove(filename)
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,891
|
dougsc/gp
|
refs/heads/master
|
/viewer/app/views.py
|
from flask import render_template, jsonify
from app import app
from utils import stats
from utils.tree_render import TreeRender
def _tree_render(index):
resp_data = {'index': index, 'tree': 'tree index %s not found' % (index)}
tree_data = stats.get_data('Runner:best_tree')
tree_render = TreeRender(tree_data[index]['tree'])
tree_render.create()
resp_data['tree'] = tree_render.data
return jsonify(**resp_data)
@app.route('/tree/<int:index>')
def tree_render_ix(index):
return _tree_render(index)
@app.route('/tree/latest')
def tree_render_latest():
return _tree_render(-1)
@app.route('/')
@app.route('/graph')
def graph():
return render_template('graph.html',
title='Home')
@app.route('/graph/line/<string:data_id>')
def graph_data(data_id):
data_id_map = {
'lowest_error': {
'data_key': 'Runner:lowest_error',
'title': 'Lowest Error',
'x_title': 'Generation',
'y_title': 'Abs Error',
'data_label': 'Abs Error'
},
'best_individual': {
'data_key': 'Runner:best_individual',
'title': 'Best Individual',
'x_title': 'Generation',
'y_title': 'Individual Index',
'data_label': 'Best Individual'
},
'target_samples': {
'data_key': 'Runner:target_samples',
'title': 'Target Samples',
'x_title': 'Index',
'y_title': 'Value',
'data_label': 'Target'
},
'actual_samples': {
'data_key': 'Runner:actual_samples',
'title': 'Actual Samples',
'x_title': 'Index',
'y_title': 'Value',
'data_label': 'Actual'
},
}
resp_data = data_id_map[data_id]
resp_data['data'] = stats.get_data(resp_data['data_key'])
return jsonify(**resp_data)
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,892
|
dougsc/gp
|
refs/heads/master
|
/exp/test.py
|
# def get_terminal_set(self):
# def get_function_set(self):
# def initialize(self):
# def next(self):
# def function_lookup(self):
# def error(self, value):
from engine import *
from random import randint
from engine.function_set import FunctionSet
import engine.functions.signs as gp_f_signs
from engine.experiment import Experiment
class TestExp(Experiment):
function_set = FunctionSet()
gp_f_signs.add_functions(function_set)
terminal_set = TerminalSet()
terminal_set.add_terminal_function(name='x_var', func_ref='get_x', value_type=int.__name__)
terminal_set.add_terminal_function_to_value(func_ref=randint, args=[0,4])
def __init__(self):
self.x = 0
def get_x(self):
return self.x
def initialize(self):
self.x = -5
def next(self):
if self.x <= 5:
self.x += 1
return True
return False
def index(self):
return self.x
def norm_error(self, value):
return abs(self.error(value))
def error(self, value):
# print 'value: %f, error: %f' % (value, abs(((self.x * self.x) + self.x + 1) - value))
return (((self.x * self.x * self.x) + self.x + 1) - value)
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,893
|
dougsc/gp
|
refs/heads/master
|
/engine/function_set.py
|
class FunctionSet:
NODE_TYPE = 'function'
def __init__(self):
self.function_set = []
def add_function(self, name, func_ref, arity):
self.function_set.append({'node_type': self.NODE_TYPE, 'name': name, 'function': func_ref, 'arity': arity})
def get(self):
return self.function_set
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,894
|
dougsc/gp
|
refs/heads/master
|
/engine/utils/logger.py
|
import logging
import os
class GP_Logger:
LOG_DIR = '/Users/dclark/code/logs'
@classmethod
def logger(cls, name):
logger = logging.getLogger(name)
if len(logger.handlers) == 0:
# initialize the logger
print 'creating new logger for: %s' % (name)
logger.setLevel(logging.DEBUG)
fileLogger = logging.FileHandler(os.path.join(cls.LOG_DIR, 'gp-%s.log' % (name)))
formatter = logging.Formatter('%(asctime)s-%(name)s-%(levelname)s-%(message)s')
fileLogger.setFormatter(formatter)
logger.addHandler(fileLogger)
return logger
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,895
|
dougsc/gp
|
refs/heads/master
|
/engine/functions/basic_ops.py
|
from numpy import product, average
def add_functions(function_set, arity):
function_set.add_function(name='max<%d>' % arity, func_ref=f_max, arity=arity)
function_set.add_function(name='min<%d>' % arity, func_ref=f_min, arity=arity)
function_set.add_function(name='sum<%d>' % arity, func_ref=f_sum, arity=arity)
function_set.add_function(name='prod<%d>' % arity, func_ref=f_prod, arity=arity)
function_set.add_function(name='ave<%d>' % arity, func_ref=f_ave, arity=arity)
function_set.add_function(name='median<%d>' % arity, func_ref=f_median, arity=arity)
def f_max(*args):
return max(args)
def f_min(*args):
return min(args)
def f_sum(*args):
return sum(args)
def f_prod(*args):
return product(*args)
def f_ave(*args):
return average(*args)
def f_median(*args):
return median(*args)
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,896
|
dougsc/gp
|
refs/heads/master
|
/engine/functions/signs.py
|
def add_functions(function_set):
function_set.add_function(name='add', func_ref=f_math_add, arity=2)
function_set.add_function(name='subtract', func_ref=f_math_sub, arity=2)
function_set.add_function(name='multiply', func_ref=f_math_times, arity=2)
function_set.add_function(name='divide', func_ref=f_math_divide, arity=2)
def f_math_add(a, b):
return a+b
def f_math_sub(a, b):
return a-b
def f_math_times(a, b):
return a*b
def f_math_divide(a, b):
return b == 0 and 1 or float(a)/b
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,897
|
dougsc/gp
|
refs/heads/master
|
/engine/terminals/__init__.py
|
# Module definition
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,898
|
dougsc/gp
|
refs/heads/master
|
/engine/__init__.py
|
# Module definition
# Expose the basic interfaces
from individual import Individual
from runner import Runner
from terminal_set import TerminalSet
from function_set import FunctionSet
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,899
|
dougsc/gp
|
refs/heads/master
|
/engine/tree.py
|
import random
from pprint import pformat
from copy import deepcopy
from utils.logger import GP_Logger
from terminal_set import TerminalSet
class Tree:
@classmethod
def log(cls):
return GP_Logger.logger(cls.__name__)
def __init__(self):
self.terminal_set=None
self.function_set=None
self.function_bias=None
self.max_depth=None
self.tree = None
def clone(self, clone_tree):
assert clone_tree.tree != None, 'trying to clone from an uninitialized tree'
self.terminal_set = clone_tree.terminal_set
self.function_set = clone_tree.function_set
self.function_bias = clone_tree.function_bias
self.max_depth = clone_tree.max_depth
self.tree = deepcopy(clone_tree.tree)
def mutate(self, clone_tree):
self.clone(clone_tree)
mutation_node = random.choice(self.get_node_list())
self.log().debug('mutating at node %s - current depth: %d' % (mutation_node['node']['name'], mutation_node['depth']))
self._create_new_node(mutation_node['depth'], mutation_node)
self.log().debug('node mutated to %s' % (mutation_node['node']['name']))
self._add_layer(mutation_node)
def subtree_crossover(self, clone_tree, other_tree):
self.clone(clone_tree)
this_crossover_node = random.choice(self.get_node_list())
other_crossover_node = random.choice(other_tree.get_node_list())
self.log().debug('x-over node 1: %s (depth: %d), node 2: %s (depth: %d)' % (this_crossover_node['node']['name'],
this_crossover_node['depth'],
other_crossover_node['node']['name'],
other_crossover_node['depth']))
this_crossover_node['node'] = deepcopy(other_crossover_node['node'])
this_crossover_node['lower_nodes'] = deepcopy(other_crossover_node['lower_nodes'])
self.recalculate_depth(this_crossover_node['lower_nodes'], this_crossover_node['depth'] + 1)
def create(self, terminal_set=[], function_set=[], function_bias=1, max_depth=3):
self.terminal_set=terminal_set
self.function_set=function_set
self.function_bias=function_bias
self.max_depth=max_depth
self.tree = {}
self._create_new_node(1, self.tree)
self._add_layer(current_node=self.tree)
def _create_new_node(self, depth, node):
node_set = []
if depth == 1:
node_set = self.function_set
elif depth >= self.max_depth:
node_set = self.terminal_set
else:
node_set = self.function_set * self.function_bias + self.terminal_set
chosen_node = random.choice(node_set)
if not chosen_node.has_key('name'):
# this needs converting to a named node
value = chosen_node['function'](*chosen_node['args'])
chosen_node = TerminalSet.terminal_value(value)
node['node'] = chosen_node
node['lower_nodes'] = []
node['depth'] = depth
def _add_layer(self, current_node):
new_node_count = current_node['node'].has_key('arity') and current_node['node']['arity'] or 0
self.log().debug('adding %d nodes below %s - current depth = %d' % (new_node_count, current_node['node']['name'], current_node['depth']))
for i in range(new_node_count):
new_node = {}
self._create_new_node(current_node['depth'] + 1, new_node)
current_node['lower_nodes'].append(new_node)
map(lambda x:self._add_layer(x), current_node['lower_nodes'])
def dump(self):
print 'Tree: \n%s' % pformat(self.tree)
def _dump_structure(self, from_nodes, to_nodes):
for from_node in from_nodes:
new_node = {'name': from_node['node']['name'], 'lower_nodes': []}
to_nodes.append(new_node)
self._dump_structure(from_node['lower_nodes'], new_node['lower_nodes'])
def dump_structure(self):
structure = {'name': self.tree['node']['name'], 'lower_nodes': []}
self._dump_structure(self.tree['lower_nodes'], structure['lower_nodes'])
return structure
def execute_node(self, node, function_lookup, args=None):
assert node.has_key('value') or node.has_key('function'), 'node does not have a function or value'
value = None
if node.has_key('value'):
value = node['value']
else:
if args == None:
args = node['args']
if isinstance(node['function'], str):
value = function_lookup.get_func(node['function'])(*args)
else:
value = node['function'](*args)
return value
def get_lower_node_value(self, function_lookup, lower_node):
if lower_node['node']['node_type'] == 'terminal':
return self.execute_node(lower_node['node'], function_lookup)
else:
result_list = map(lambda x:self.get_lower_node_value(function_lookup, x), lower_node['lower_nodes'])
return self.execute_node(lower_node['node'], function_lookup, result_list)
def execute(self, function_lookup):
result_list = map(lambda x:self.get_lower_node_value(function_lookup, x), self.tree['lower_nodes'])
return self.execute_node(self.tree['node'], function_lookup, result_list)
def iterate_tree(self, nodes, callback):
for node in nodes:
callback(node)
self.iterate_tree(node['lower_nodes'], callback)
def recalculate_depth(self, nodes, depth):
for node in nodes:
node['depth'] = depth
self.recalculate_depth(node['lower_nodes'], depth+1)
def _get_node_list(self, nodes, node_list):
for node in nodes:
node_list.append(node)
self._get_node_list(node['lower_nodes'], node_list)
def get_node_list(self):
node_list = []
self._get_node_list(self.tree['lower_nodes'], node_list)
return node_list
def _simplify(self, node, function_lookup):
if len(node['lower_nodes']) == 0:
return
terminal_value_count = filter(lambda x:TerminalSet.is_terminal_value(x['node']), node['lower_nodes'])
if node['node']['arity'] == terminal_value_count:
value = self.execute_node(node, function_lookup, args=map(lambda x:x['node']['value'], node['lower_nodes']))
self.log().debug('Replacing existing node: %s' % pformat(node['node']))
node['lower_nodes'] = []
node['node'] = TerminalSet.terminal_value(value)
self.log().debug(' -- with node: %s' % pformat(node['node']))
self.is_simplified = False
else:
map(lambda x:self._simplify(x, function_lookup), node['lower_nodes'])
def simplify(self, function_lookup):
self.is_simplified = False
simplify_loop_count = 1
while not self.is_simplified:
self.log().debug('Simplification %d' % (simplify_loop_count))
self.is_simplified = True
self._simplify(self.tree, function_lookup)
simplify_loop_count += 1
|
{"/exp/line.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/functions/trig.py", "/engine/experiment.py"], "/exp/test.py": ["/engine/__init__.py", "/engine/function_set.py", "/engine/functions/signs.py", "/engine/experiment.py"]}
|
20,955
|
Arsha-Meenu/2021_Project_django_river_fakejira
|
refs/heads/master
|
/dj_river_app/apps.py
|
from django.apps import AppConfig
class DjRiverAppConfig(AppConfig):
name = 'dj_river_app'
|
{"/dj_river_app/views.py": ["/dj_river_app/models.py"], "/dj_river_app/admin.py": ["/dj_river_app/models.py"]}
|
20,956
|
Arsha-Meenu/2021_Project_django_river_fakejira
|
refs/heads/master
|
/dj_river_app/migrations/0001_initial.py
|
# Generated by Django 2.2.13 on 2021-02-23 03:24
from django.db import migrations, models
import django.db.models.deletion
import river.models.fields.state
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('river', '0002_auto_20210222_1224'),
]
operations = [
migrations.CreateModel(
name='Ticket',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('no', models.CharField(default=uuid.uuid4, editable=False, max_length=50, unique=True, verbose_name='Ticket Number')),
('subject', models.CharField(max_length=100, verbose_name='Subject')),
('description', models.TextField(blank=True, max_length=500, null=True, verbose_name='Description')),
('status', river.models.fields.state.StateField(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='river.State')),
],
),
]
|
{"/dj_river_app/views.py": ["/dj_river_app/models.py"], "/dj_river_app/admin.py": ["/dj_river_app/models.py"]}
|
20,957
|
Arsha-Meenu/2021_Project_django_river_fakejira
|
refs/heads/master
|
/dj_river_app/migrations/0004_mymodel.py
|
# Generated by Django 2.2.13 on 2021-03-01 04:51
from django.db import migrations, models
import django.db.models.deletion
import river.models.fields.state
class Migration(migrations.Migration):
dependencies = [
('river', '0002_auto_20210222_1224'),
('dj_river_app', '0003_auto_20210223_1217'),
]
operations = [
migrations.CreateModel(
name='MyModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('my_state_field', river.models.fields.state.StateField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='river.State')),
],
),
]
|
{"/dj_river_app/views.py": ["/dj_river_app/models.py"], "/dj_river_app/admin.py": ["/dj_river_app/models.py"]}
|
20,958
|
Arsha-Meenu/2021_Project_django_river_fakejira
|
refs/heads/master
|
/dj_river_app/models.py
|
from django.db import models
# Create your models here.
import uuid
# Create your models here.
from river.models.fields.state import StateField
class Ticket(models.Model):
no = models.CharField("Ticket Number", max_length=50, default=uuid.uuid4, null=False, blank=False, editable=False,
unique=True)
subject = models.CharField("Subject", max_length=100, null=False, blank=False)
description = models.TextField("Description", max_length=500, null=True, blank=True)
status = StateField(editable=False)
def natural_key(self):
return self.no
|
{"/dj_river_app/views.py": ["/dj_river_app/models.py"], "/dj_river_app/admin.py": ["/dj_river_app/models.py"]}
|
20,959
|
Arsha-Meenu/2021_Project_django_river_fakejira
|
refs/heads/master
|
/dj_river_app/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def Sample(request):
return HttpResponse(' Django River Example')
# django river
from django.urls import reverse
from django.shortcuts import get_object_or_404, redirect
from river.models import State
from dj_river_app.models import Ticket
def approve_ticket(request, ticket_id, next_state_id=None):
ticket = get_object_or_404(Ticket, pk=ticket_id)
next_state = get_object_or_404(State, pk=next_state_id)
try:
ticket.river.status.approve(as_user=request.user, next_state=next_state)
return redirect(reverse('admin:dj_river_app_ticket_changelist'))
except Exception as e:
return HttpResponse(e.message)
|
{"/dj_river_app/views.py": ["/dj_river_app/models.py"], "/dj_river_app/admin.py": ["/dj_river_app/models.py"]}
|
20,960
|
Arsha-Meenu/2021_Project_django_river_fakejira
|
refs/heads/master
|
/dj_river_app/admin.py
|
from django.contrib import admin
import river_admin
# Register your models here.
from django.urls import reverse
from django.utils.safestring import mark_safe
from dj_river_app.models import Ticket
# here shows the river action functionality ie, what will happen after click on each button in the given actions
def create_river_button(obj, transition_approval):
approve_ticket_url = reverse('approve_ticket', kwargs={'ticket_id': obj.pk, 'next_state_id': transition_approval.transition.destination_state.pk})
return f"""
<input
type="button"
style="margin:2px;2px;2px;2px;"
value="{transition_approval.transition.source_state} >> {transition_approval.transition.destination_state}"
onclick="location.href=\'{approve_ticket_url}\'"
/>
"""
class TicketAdmin(admin.ModelAdmin):
list_display = ('no', 'subject', 'description', 'status', 'river_actions')
def get_list_display(self, request):
self.user = request.user
return super(TicketAdmin, self).get_list_display(request)
def river_actions(self, obj):
content = ""
for transition_approval in obj.river.status.get_available_approvals(as_user=self.user):# get_available_approvals :to fetch all available approvals waitiong for a specific user according to given source and destination states.
content += create_river_button(obj, transition_approval)
return mark_safe(content) #marksafe: mark a string as safe for output purpose.
admin.site.register(Ticket, TicketAdmin)
class TicketRiverAdmin(river_admin.RiverAdmin):
name = "Django River Fakejira"
# icon = "mdi-ticket-account"
list_displays = ['pk', 'no', 'subject', 'description', 'status']
river_admin.site.register(Ticket, "status", TicketRiverAdmin)
|
{"/dj_river_app/views.py": ["/dj_river_app/models.py"], "/dj_river_app/admin.py": ["/dj_river_app/models.py"]}
|
20,961
|
Arsha-Meenu/2021_Project_django_river_fakejira
|
refs/heads/master
|
/dj_river_app/migrations/0003_auto_20210223_1217.py
|
# Generated by Django 2.2.13 on 2021-02-23 06:47
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dj_river_app', '0002_auto_20210223_1157'),
]
operations = [
migrations.RenameField(
model_name='ticket',
old_name='my_state_field',
new_name='status',
),
]
|
{"/dj_river_app/views.py": ["/dj_river_app/models.py"], "/dj_river_app/admin.py": ["/dj_river_app/models.py"]}
|
20,962
|
Arsha-Meenu/2021_Project_django_river_fakejira
|
refs/heads/master
|
/dj_river_app/migrations/0005_delete_mymodel.py
|
# Generated by Django 2.2.13 on 2021-03-01 08:21
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dj_river_app', '0004_mymodel'),
]
operations = [
migrations.DeleteModel(
name='MyModel',
),
]
|
{"/dj_river_app/views.py": ["/dj_river_app/models.py"], "/dj_river_app/admin.py": ["/dj_river_app/models.py"]}
|
20,986
|
nhlinh99/SSD
|
refs/heads/master
|
/ssd/utils/misc.py
|
import errno
import os
from PIL import Image
def str2bool(s):
return s.lower() in ('true', '1')
def mkdir(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def reorient_image(im):
try:
image_exif = im._getexif()
image_orientation = image_exif[274]
if image_orientation in (2,'2'):
return im.transpose(Image.FLIP_LEFT_RIGHT)
elif image_orientation in (3,'3'):
return im.transpose(Image.ROTATE_180)
elif image_orientation in (4,'4'):
return im.transpose(Image.FLIP_TOP_BOTTOM)
elif image_orientation in (5,'5'):
return im.transpose(Image.ROTATE_90).transpose(Image.FLIP_TOP_BOTTOM)
elif image_orientation in (6,'6'):
return im.transpose(Image.ROTATE_270)
elif image_orientation in (7,'7'):
return im.transpose(Image.ROTATE_270).transpose(Image.FLIP_TOP_BOTTOM)
elif image_orientation in (8,'8'):
return im.transpose(Image.ROTATE_90)
else:
return im
except (KeyError, AttributeError, TypeError, IndexError):
return im
|
{"/ssd/data/datasets/my_dataset.py": ["/ssd/utils/misc.py"]}
|
20,987
|
nhlinh99/SSD
|
refs/heads/master
|
/convert_pascalvoc_dataset/data_preprocess.py
|
from argparse import ArgumentParser
from tqdm import tqdm
import numpy as np
import random
import json
import math
import os
ALLOWED_EXTENSIONS = [".jpg", ".jpeg", ".png", ".gif"]
def parse_inputs():
""" Parser function to take care of the inputs """
parser = ArgumentParser(description='Argument: python data_preprocess.py <data_direction> <output_annotation_path> <test_ratio>')
parser.add_argument('data_dir', type=str,
help='Enter path to data direction.')
parser.add_argument('output_annotation_path', type=str,
help='Enter the path of the output of annotation files.')
parser.add_argument('test_ratio', default=0.1, type=float,
help='Test Ratio.')
args = parser.parse_args()
return (args.data_dir, args.output_annotation_path, args.test_ratio)
def distance_two_points(p1, p2):
return math.sqrt(math.pow(p1[0] - p2[0], 2) + math.pow(p1[1] - p2[1], 2))
def get_center_point(points):
center_point = [0, 0]
for point in points:
center_point[0] += point[0] / len(points)
center_point[1] += point[1] / len(points)
return np.array(center_point)
def shrinking_points(points, change_pixel):
center_point = get_center_point(points)
distance_from_corner_to_center = distance_two_points(points[0], center_point)
increase_ratio = (change_pixel + distance_from_corner_to_center) / distance_from_corner_to_center
new_points = []
for point in points:
new_point = (np.array(point) - center_point) * increase_ratio + center_point
new_points.append(new_point.tolist())
return new_points
def train_test_split(image_dir, test_ratio, val_ratio = 0.2):
types_id = ["cccd", "cmnd"]
types_face = ["back", "top"]
folder_dirs = []
for id in types_id:
for face in types_face:
folder_dirs.append(os.path.join(image_dir + "/", id + "/", face))
images = []
for folder in folder_dirs:
for tail_img in ALLOWED_EXTENSIONS:
images.extend([os.path.join(folder + "/", f) for f in os.listdir(folder) if tail_img in f])
random.seed(1234)
random.shuffle(images)
batch_size = 32
train_num = int(len(images) * (1 - val_ratio - test_ratio) // batch_size * batch_size)
val_num = int(len(images) * val_ratio)
images_train = images[:train_num]
images_val = images[train_num:train_num + val_num]
images_test = images[train_num + val_num:]
return images_train, images_val, images_test
def parse_annotation(data_dir, image_list, output_annotation):
json_file = []
for tail_img in ALLOWED_EXTENSIONS:
json_file.extend([f.replace(tail_img, ".json") for f in image_list if tail_img in f])
result_str = []
print("Getting Annotations {}...".format(output_annotation))
for f in tqdm(json_file):
base_folder_path = os.path.dirname(f)
fi = open(os.path.join(data_dir, f), "r", encoding = "utf-8")
data = json.load(fi)
str_data = []
str_data.append(os.path.join(base_folder_path + "/", data["imagePath"]))
annotations = data["shapes"]
width = data["imageWidth"]
height = data["imageHeight"]
points = []
for i in range(len(annotations)):
points.append(annotations[i]["points"][0])
center_point = get_center_point(points)
thresh = distance_two_points(center_point, points[0]) / 8
shrinking_thresh = thresh * 5 / 4
points = shrinking_points(points, -shrinking_thresh)
for i in range(len(annotations)):
label = annotations[i]["label"]
if (label not in ["top_left", "top_right", "bottom_left", "bottom_right"]):
continue
point = points[i]
x1 = int(max(point[0] - thresh, 0))
x2 = int(min(point[0] + thresh, width - 1))
y1 = int(max(point[1] - thresh, 0))
y2 = int(min(point[1] + thresh, height - 1))
str_data.extend([label, str(x1), str(y1), str(x2), str(y2)])
str_data = ",".join(str_data)
result_str.append(str_data)
result_str = "\n".join(result_str)
fo = open(output_annotation, "w", encoding = "utf-8")
fo.write(result_str)
fo.close()
if __name__ == "__main__":
#!python convert_pascalvoc_dataset/data_preprocess.py "/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/dataset" "/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/Annotations" 0.1
# OR test on local:
# python convert_pascalvoc_dataset/data_preprocess.py "../dataset" "../Annotations" 0.1
data_dir, output_annotation_path, test_ratio = parse_inputs()
if (not os.path.isdir(output_annotation_path)):
os.mkdir(output_annotation_path)
train_annotaion_file = os.path.join(output_annotation_path, "Train_annotation.txt")
val_annotation_file = os.path.join(output_annotation_path, "Val_annotation.txt")
test_annotation_file = os.path.join(output_annotation_path, "Test_annotation.txt")
train_img, val_img, test_img = train_test_split(data_dir, test_ratio)
parse_annotation(data_dir, train_img, train_annotaion_file)
parse_annotation(data_dir, val_img, val_annotation_file)
parse_annotation(data_dir, test_img, test_annotation_file)
|
{"/ssd/data/datasets/my_dataset.py": ["/ssd/utils/misc.py"]}
|
20,988
|
nhlinh99/SSD
|
refs/heads/master
|
/convert_pascalvoc_dataset/pascal_voc/pascal_voc.py
|
#! -*- coding: utf-8 -*-
import os
from PIL import Image
from utils.file_utils import create_if_not_exists, copy_file
from utils.xml_utils import create_xml_file
from tqdm import tqdm
import json
def reorient_image(im):
try:
image_exif = im._getexif()
image_orientation = image_exif[274]
if image_orientation in (2,'2'):
return im.transpose(Image.FLIP_LEFT_RIGHT)
elif image_orientation in (3,'3'):
return im.transpose(Image.ROTATE_180)
elif image_orientation in (4,'4'):
return im.transpose(Image.FLIP_TOP_BOTTOM)
elif image_orientation in (5,'5'):
return im.transpose(Image.ROTATE_90).transpose(Image.FLIP_TOP_BOTTOM)
elif image_orientation in (6,'6'):
return im.transpose(Image.ROTATE_270)
elif image_orientation in (7,'7'):
return im.transpose(Image.ROTATE_270).transpose(Image.FLIP_TOP_BOTTOM)
elif image_orientation in (8,'8'):
return im.transpose(Image.ROTATE_90)
else:
return im
except (KeyError, AttributeError, TypeError, IndexError):
return im
class PASCALVOC07(object):
def __init__(self, trainval_anno, val_anno, test_anno, out_dir, attrs):
self._trainval_anno = trainval_anno
self._val_anno = val_anno
self._test_anno = test_anno
self._out_dir = out_dir
self._attrs = attrs
self._jpegimages_dir = None
self._imagesets_dir = None
self._annotations_dir = None
self._img_idx = 0
def _build_voc_dir(self):
self._out_dir = self._out_dir
create_if_not_exists(os.path.join(self._out_dir, 'Annotations'))
create_if_not_exists(os.path.join(self._out_dir, 'ImageSets'))
create_if_not_exists(os.path.join(self._out_dir, 'ImageSets', 'Layout'))
create_if_not_exists(os.path.join(self._out_dir, 'ImageSets', 'Main'))
create_if_not_exists(os.path.join(self._out_dir, 'ImageSets', 'Segmentation'))
create_if_not_exists(os.path.join(self._out_dir, 'JPEGImages'))
create_if_not_exists(os.path.join(self._out_dir, 'SegmentationClass'))
create_if_not_exists(os.path.join(self._out_dir, 'SegmentationObject'))
self._annotations_dir = os.path.join(self._out_dir, 'Annotations')
self._jpegimages_dir = os.path.join(self._out_dir, 'JPEGImages')
self._imagesets_dir = os.path.join(self._out_dir, 'ImageSets', 'Main')
def _create_annotation(self, image_idx, boxes):
anno_file = os.path.join(self._annotations_dir, "{:06d}.xml".format(image_idx))
attrs = dict()
attrs['image_name'] = "{:06d}.jpg".format(image_idx)
attrs['boxes'] = boxes
img = Image.open(os.path.join(self._jpegimages_dir, "{:06d}.jpg".format(image_idx)))
img = reorient_image(img)
width, height = img.size
attrs['width'] = str(width)
attrs['height'] = str(height)
for k, v in self._attrs.items():
attrs[k] = v
create_xml_file(anno_file, attrs)
def _build_subset(self, start_idx, phase, anno_file, verbose=True, delimiter=' '):
fout = open(os.path.join(self._imagesets_dir, '{}.txt'.format(phase)), 'w')
# dictionary_image_id = {}
n = 0
with open(anno_file, 'r', encoding = "utf-8") as anno_f:
for line in tqdm(anno_f):
line_split = line.strip().split(delimiter)
# image saved path
image_path = line_split[0]
# a ground truth with bounding box
boxes = []
for i in range(int((len(line_split) - 1) / 5)):
category = line_split[1 + i * 5 + 0]
x1 = line_split[1 + i * 5 + 1]
y1 = line_split[1 + i * 5 + 2]
x2 = line_split[1 + i * 5 + 3]
y2 = line_split[1 + i * 5 + 4]
boxes.append((category, x1, y1, x2, y2))
image_idx = start_idx + n
n += 1
# copy and rename image by index number
copy_file(image_path, self._jpegimages_dir, '{:06}.jpg'.format(image_idx))
# dictionary_image_id[image_path.split("/")[-1]] = '{:06}.jpg'.format(image_idx)
# write image idx to imagesets file
fout.write('{:06}'.format(image_idx) + '\n')
# create annotation file
self._create_annotation(image_idx, boxes)
fout.close()
if (phase == "test"):
fout = open(os.path.join(self._imagesets_dir, 'test_full.txt'), 'w')
for image_idx in range(1, n + 1):
fout.write('{:06}'.format(image_idx) + '\n')
fout.close()
# with open("../data/dictionary_image.json", "a") as outfile:
# json.dump(dictionary_image_id, outfile)
# outfile.close()
return n
def build(self, start_idx=1, verbose=True):
self._build_voc_dir()
n_train = self._build_subset(start_idx, "train", self._trainval_anno, verbose, delimiter = ",")
n_val = self._build_subset(n_train + start_idx, "val", self._val_anno, verbose, delimiter = ",")
self._build_subset(n_train + n_val + start_idx, "test", self._test_anno, verbose, delimiter = ",")
|
{"/ssd/data/datasets/my_dataset.py": ["/ssd/utils/misc.py"]}
|
20,989
|
nhlinh99/SSD
|
refs/heads/master
|
/convert_pascalvoc_dataset/transform_images.py
|
# import cv2
# import os
# from tqdm import tqdm
# import json
# import numpy as np
# from argparse import ArgumentParser
# import math
# import random
# ALLOWED_EXTENSIONS = [".jpg", ".jpeg", ".png", ".gif"]
# pixel_border = 40
# def parse_inputs():
# """ Parser function to take care of the inputs """
# parser = ArgumentParser(description='Argument: python transform_images.py <data_direction> <output_dir>')
# parser.add_argument('data_dir', type=str, default="../cmnd_back",
# help='Enter path to data direction.')
# parser.add_argument('output_dir', type=str, default="../cmnd_back_transform",
# help='Enter the path of the output of transformation.')
# args = parser.parse_args()
# return (args.data_dir, args.output_dir)
# def distance_two_points(p1, p2):
# return math.sqrt(math.pow(p1[0] - p2[0], 2) + math.pow(p1[1] - p2[1], 2))
# def get_center_point(points):
# center_point = [0, 0]
# for point in points:
# center_point[0] += point[0] / len(points)
# center_point[1] += point[1] / len(points)
# return np.array(center_point)
# def adjust_gamma(image, gamma=1.0):
# invGamma = 1.0 / gamma
# table = np.array([((i / 255.0) ** invGamma) * 255
# for i in np.arange(0, 256)]).astype("uint8")
# return cv2.LUT(image, table)
# def preprocess_image(image):
# dst = cv2.detailEnhance(image, sigma_s=10, sigma_r=0.15)
# dst = cv2.copyMakeBorder(dst, pixel_border, pixel_border, pixel_border, pixel_border, cv2.BORDER_CONSTANT,value=(255,255,255))
# return dst
# def rotate_box_in_image(corners, angle, width, height, nW, nH):
# center_image = [width//2, height//2]
# dW = nW - width
# dH = nH - height
# rad = angle * math.pi / 180
# result = []
# for corner in corners:
# x_new = center_image[0] + (corner[0] - center_image[0])*math.cos(rad) + (corner[1] - center_image[1])*math.sin(rad) + dW / 2
# y_new = center_image[1] - (corner[0] - center_image[0])*math.sin(rad) + (corner[1] - center_image[1])*math.cos(rad) + dH / 2
# result.append([x_new, y_new])
# return result
# def rotate_image(image, angle, gamma):
# height, width, _ = image.shape
# image = adjust_gamma(image, gamma)
# M = cv2.getRotationMatrix2D((width//2, height//2), angle, 1.0)
# cos = np.abs(M[0, 0])
# sin = np.abs(M[0, 1])
# nW = int((height * sin) + (width * cos))
# nH = int((height * cos) + (width * sin))
# M[0, 2] += (nW / 2) - width//2
# M[1, 2] += (nH / 2) - height//2
# new_img = cv2.warpAffine(image, M, (nW, nH), borderValue=(255,255,255))
# return new_img
# def augmentation(image, points, labels):
# height, width, _ = image.shape
# angles = [random.uniform(-180, 180) for i in range(5)]
# gammas = [random.uniform(0.5, 1.7) for i in range(5)]
# res_images = [image]
# res_points = [points]
# res_labels = [labels]
# for i in range(len(angles)):
# new_image = rotate_image(image, angles[i], gammas[i])
# nH, nW, _ = new_image.shape
# new_points = rotate_box_in_image(points, angles[i], width, height, nW, nH)
# res_images.append(new_image)
# res_points.append(new_points)
# res_labels.append(labels)
# return res_images, res_points, res_labels
# def transform_images(input_dir, output_dir, augmentation_check = True):
# if (not os.path.isdir(output_dir)):
# os.mkdir(output_dir)
# img_list = []
# for extension in ALLOWED_EXTENSIONS:
# img_list.extend([f for f in os.listdir(input_dir) if extension in f])
# for img_name in tqdm(img_list):
# img = cv2.imdecode(np.fromfile(os.path.join(input_dir, img_name), dtype=np.uint8), cv2.IMREAD_COLOR)
# dst = preprocess_image(img)
# for extension in ALLOWED_EXTENSIONS:
# if (extension in img_name):
# fi = open(os.path.join(input_dir, img_name).replace(extension, ".json"), "r", encoding = "utf-8")
# data = json.load(fi)
# annotations = data["shapes"]
# points = []
# labels = []
# for i in range(len(annotations)):
# point = annotations[i]["points"][0]
# point[0] += pixel_border
# point[1] += pixel_border
# points.append(point)
# labels.append(annotations[i]["label"])
# if (augmentation_check):
# images, new_set_points, set_labels = augmentation(dst, points, labels)
# for i in range(len(images)):
# new_points = new_set_points[i]
# new_labels = set_labels[i]
# img = images[i]
# nH, nW, _ = img.shape
# new_img_name = "{}_".format(i + 1) + img_name
# for k in range(len(data["shapes"])):
# label_name = data["shapes"][k]["label"]
# data["shapes"][k]["points"][0] = new_points[new_labels.index(label_name)]
# data["imageHeight"] = nH
# data["imageWidth"] = nW
# data["imagePath"] = new_img_name
# for extension in ALLOWED_EXTENSIONS:
# if (extension in img_name):
# fo = open(os.path.join(output_dir, new_img_name).replace(extension, ".json"), "w", encoding = "utf-8")
# json.dump(data, fo, indent = 4)
# fo.close()
# cv2.imwrite(os.path.join(output_dir, new_img_name), img)
# if __name__ == "__main__":
# input_dir, output_dir = parse_inputs()
# transform_images(input_dir, output_dir, augmentation_check = True)
|
{"/ssd/data/datasets/my_dataset.py": ["/ssd/utils/misc.py"]}
|
20,990
|
nhlinh99/SSD
|
refs/heads/master
|
/demo.py
|
from vizer.draw import draw_boxes
from PIL import Image
import numpy as np
import collections
import argparse
import torch
import glob
import time
import cv2
import os
from ssd.config import cfg
from ssd.data.datasets import COCODataset, VOCDataset, MyDataset
from ssd.data.transforms import build_transforms
from ssd.modeling.detector import build_detection_model
from ssd.utils import mkdir
from ssd.utils.checkpoint import CheckPointer
def distance_two_points(point_1, point_2):
return np.sqrt(np.power(point_1[0] - point_2[0], 2) + np.power(point_1[1] - point_2[1], 2))
def get_center_bbox(box):
a = (box[0] + box[2]) / 2
b = (box[1] + box[3]) / 2
return np.array([a, b])
def check_point(point, image):
w = image.shape[1]
h = image.shape[0]
if (point[0] < 0):
point[0] = 0
elif (point[0] > w):
point[0] = w - 1
if (point[1] < 0):
point[1] = 0
elif (point[1] > h):
point[1] = h - 1
return point
def perspective_transform(image, source_points):
dest_points = np.float32([[0, 0], [500, 0], [500, 300], [0, 300]])
M = cv2.getPerspectiveTransform(source_points, dest_points)
dst = cv2.warpPerspective(image, M, (500, 300))
cv2.cvtColor(dst, cv2.COLOR_BGR2RGB)
return dst
def align_image(image, top_left, top_right, bottom_right, bottom_left, expand_alignment = False):
top_left_point = get_center_bbox(top_left)
top_right_point = get_center_bbox(top_right)
bottom_right_point = get_center_bbox(bottom_right)
bottom_left_point = get_center_bbox(bottom_left)
if (expand_alignment):
x_val = (top_left_point[0] + top_right_point[0] + bottom_right_point[0] + bottom_left_point[0]) / 4
y_val = (top_left_point[1] + top_right_point[1] + bottom_right_point[1] + bottom_left_point[1]) / 4
center_point = np.array([x_val, y_val])
distance_from_corner_to_center = distance_two_points(top_left_point, center_point)
increase_pixel = distance_from_corner_to_center / 4.5
increase_ratio = (increase_pixel + distance_from_corner_to_center) / distance_from_corner_to_center
top_left_point = (top_left_point - center_point) * increase_ratio + center_point
top_right_point = (top_right_point - center_point) * increase_ratio + center_point
bottom_right_point = (bottom_right_point - center_point) * increase_ratio + center_point
bottom_left_point = (bottom_left_point - center_point) * increase_ratio + center_point
top_left_point = check_point(top_left_point, image)
top_right_point = check_point(top_right_point, image)
bottom_right_point = check_point(bottom_right_point, image)
bottom_left_point = check_point(bottom_left_point, image)
source_points = np.float32(
[top_left_point, top_right_point, bottom_right_point, bottom_left_point]
)
crop = perspective_transform(image, source_points)
return crop
def image_processing(image):
# # Detail enhance and create border
# dst = cv2.detailEnhance(image, sigma_s=10, sigma_r=0.15)
# dst= cv2.copyMakeBorder(dst, pixel_border, pixel_border, pixel_border, pixel_border, cv2.BORDER_CONSTANT,value=(255,255,255))
dst = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
dst = Image.fromarray(dst)
return np.asarray(dst)
def process_duplicate_labels(labels, scores, boxes, check_9_labels):
# Delete duplicate 2 sides of id card
if (check_9_labels):
group_items = np.array([(i - 1)//4 for i in labels]) # There are 8 labels including (TL, TR, BR, BL) for each side of id card
list_indices_top = np.where(group_items==0)
list_indices_back = np.where(group_items==1)
num_top = np.count_nonzero(group_items==0)
num_back = np.count_nonzero(group_items==1)
if (num_top > num_back):
list_del_indices = list_indices_back
elif (num_top < num_back):
list_del_indices = list_indices_top
else:
value_top = np.take(scores, list_indices_top)
value_back = np.take(scores, list_indices_top)
if (np.sum(value_top) > np.sum(value_back)):
list_del_indices = list_indices_back
else:
list_del_indices = list_indices_top
labels = np.delete(labels, list_del_indices)
scores = np.delete(scores, list_del_indices)
boxes = np.delete(boxes, list_del_indices, 0)
# Delete duplicate of labels for one side
list_duplicate = [item for item, count in collections.Counter(labels).items() if count > 1]
for dup in list_duplicate:
list_indices = [i for (i, item) in enumerate(labels) if item == dup]
max_conf_indice = list_indices[0]
for indice in list_indices:
if scores[indice] > scores[max_conf_indice]:
max_conf_indice = indice
list_indices.remove(max_conf_indice)
labels = np.delete(labels, list_indices)
scores = np.delete(scores, list_indices)
boxes = np.delete(boxes, list_indices, 0)
return labels, scores, boxes
@torch.no_grad()
def run_demo(cfg, ckpt, score_threshold, images_dir, output_dir, dataset_type, check_9_labels=False):
if dataset_type == "voc":
class_names = VOCDataset.class_names
elif dataset_type == 'coco':
class_names = COCODataset.class_names
elif dataset_type == "custom":
if (check_9_labels):
class_names = MyDataset.class_names_9_labels
else:
class_names = MyDataset.class_names_5_labels
else:
raise NotImplementedError('Not implemented now.')
device = torch.device(cfg.MODEL.DEVICE)
model = build_detection_model(cfg)
model = model.to(device)
checkpointer = CheckPointer(model, save_dir=cfg.OUTPUT_DIR)
checkpointer.load(ckpt, use_latest=ckpt is None)
weight_file = ckpt if ckpt else checkpointer.get_checkpoint_file()
print('Loaded weights from {}'.format(weight_file))
types_id = ["cccd", "cmnd"]
types_face = ["back", "top"]
folder_dirs = []
for id in types_id:
for face in types_face:
folder_dirs.append(os.path.join(id + "/", face))
image_paths = []
for folder in folder_dirs:
image_paths.extend(glob.glob(os.path.join(images_dir + "/", folder + "/", '*.jpg')))
result_output_dir = os.path.join(output_dir, "result/", folder)
mkdir(result_output_dir)
output_dir_crop = os.path.join(output_dir, 'crop/', folder)
mkdir(output_dir_crop)
cpu_device = torch.device("cpu")
transforms = build_transforms(cfg, is_train=False)
model.eval()
count_true = 0
count_error_1 = 0
count_error_more_2 = 0
error_images = []
images_missing_1_corner = []
for i, image_path in enumerate(image_paths):
start = time.time()
image_name = os.path.basename(image_path)
image = cv2.imdecode(np.fromfile(image_path, dtype=np.uint8), cv2.IMREAD_COLOR)
# image_show = image.copy()
# cv2.cvtColor(image_show, cv2.COLOR_BGR2RGB)
# width = image.shape[1]
# height = image.shape[0]
# ratio_resize = 1
# if (width * height > 6 * 10**6):
# ratio_resize = 4
# elif (width * height > 8 * 10**5):
# ratio_resize = 1.5
# image = cv2.resize(image, (int(width / ratio_resize), int(height / ratio_resize)))
preprocessed_image = image_processing(image)
height, width = preprocessed_image.shape[:2]
images = transforms(preprocessed_image)[0].unsqueeze(0)
load_time = time.time() - start
start = time.time()
result = model(images.to(device))[0]
inference_time = time.time() - start
result = result.resize((width, height)).to(cpu_device).numpy()
boxes, labels, scores = result['boxes'], result['labels'], result['scores']
indices = scores > score_threshold
boxes = boxes[indices]
labels = labels[indices]
scores = scores[indices]
meters = ' | '.join(
[
'objects {:02d}'.format(len(boxes)),
'load {:03d}ms'.format(round(load_time * 1000)),
'inference {:03d}ms'.format(round(inference_time * 1000)),
'FPS {}'.format(round(1.0 / inference_time))
]
)
print('({:04d}/{:04d}) {}: {}'.format(i + 1, len(image_paths), image_name, meters))
labels, scores, boxes = process_duplicate_labels(labels, scores, boxes, check_9_labels)
# for i in range(len(boxes)):
# for k in range(len(boxes[i])):
# boxes[i][k] -= pixel_border
# boxes[i][k] *= ratio_resize
drawn_bounding_box_image = draw_boxes(image, boxes, labels, scores, class_names).astype(np.uint8)
# Crop image
pair = zip(labels, boxes)
sort_pair = sorted(pair)
boxes = [element for _, element in sort_pair]
labels = [element for element, _ in sort_pair]
labels_name = [class_names[i] for i in labels]
if len(boxes) == 4:
count_true += 1
crop = align_image(image, boxes[0], boxes[1], boxes[2], boxes[3], True)
elif len(boxes) == 3:
# Find fourth missed corner
thresh = 0
images_missing_1_corner.append(os.path.join(os.path.basename(os.path.dirname(image_path)), image_name))
count_error_1 += 1
if "top_left" not in ",".join(labels_name):
midpoint = np.add(get_center_bbox(boxes[0]), get_center_bbox(boxes[2])) / 2
y = int(2 * midpoint[1] - get_center_bbox(boxes[1])[1] + thresh)
x = int(2 * midpoint[0] - get_center_bbox(boxes[1])[0] + thresh)
TL = np.array([x, y, x, y])
crop = align_image(image, TL, boxes[0], boxes[1], boxes[2], True)
elif "top_right" not in ",".join(labels_name):
midpoint = np.add(get_center_bbox(boxes[0]), get_center_bbox(boxes[1])) / 2
y = int(2 * midpoint[1] - get_center_bbox(boxes[2])[1] + thresh)
x = int(2 * midpoint[0] - get_center_bbox(boxes[2])[0] + thresh)
TR = np.array([x, y, x, y])
crop = align_image(image, boxes[0], TR, boxes[1], boxes[2], True)
elif "bottom_right" not in ",".join(labels_name):
midpoint = np.add(get_center_bbox(boxes[2]), get_center_bbox(boxes[1])) / 2
y = int(2 * midpoint[1] - get_center_bbox(boxes[0])[1] + thresh)
x = int(2 * midpoint[0] - get_center_bbox(boxes[0])[0] + thresh)
BR = np.array([x, y, x, y])
crop = align_image(image, boxes[0], boxes[1], BR, boxes[2], True)
elif "bottom_left" not in ",".join(labels_name):
midpoint = np.add(get_center_bbox(boxes[0]), get_center_bbox(boxes[2])) / 2
y = int(2 * midpoint[1] - get_center_bbox(boxes[1])[1] + thresh)
x = int(2 * midpoint[0] - get_center_bbox(boxes[1])[0] + thresh)
BL = np.array([x, y, x, y])
crop = align_image(image, boxes[0], boxes[1], boxes[2], BL, True)
else:
count_error_more_2 += 1
error_images.append(os.path.join(os.path.basename(os.path.dirname(image_path)), image_name))
print("Please take a photo again, number of detected corners is:", len(boxes))
continue
face_type = os.path.basename(os.path.dirname(image_path))
id_type = os.path.basename(os.path.dirname(os.path.dirname(image_path)))
cv2.imwrite(os.path.join(output_dir, "crop", face_type, id_type, image_name), crop)
cv2.imwrite(os.path.join(output_dir, "result", face_type, id_type, image_name), drawn_bounding_box_image)
print("Number of true images: {}".format(count_true))
print("Number of 3 corner images: {}".format(count_error_1))
print("Number of 2 corner images: {}".format(count_error_more_2))
print("Image have 3 corners: {}".format(images_missing_1_corner))
print("Error Images: {}".format(error_images))
def main():
parser = argparse.ArgumentParser(description="SSD Demo.")
parser.add_argument(
"--config-file",
default="",
metavar="FILE",
help="path to config file",
type=str,
)
parser.add_argument("--ckpt", type=str, default=None, help="Trained weights.")
parser.add_argument("--score_threshold", type=float, default=0.7)
parser.add_argument("--images_dir", default='demo', type=str, help='Specify a image dir to do prediction.')
parser.add_argument("--output_dir", default='demo/result/', type=str, help='Specify a image dir to save predicted images.')
parser.add_argument("--dataset_type", default="custom", type=str, help='Specify dataset type. Currently support voc and coco.')
parser.add_argument("--check_9_labels", default=False, action="store_true", help='Allow the dataset of 9 labels (4 corners of 2 face including top and back of id card)')
parser.add_argument(
"opts",
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
print("Loaded configuration file {}".format(args.config_file))
with open(args.config_file, "r") as cf:
config_str = "\n" + cf.read()
print(config_str)
print("Running with config:\n{}".format(cfg))
run_demo(cfg=cfg,
ckpt=args.ckpt,
score_threshold=args.score_threshold,
images_dir=args.images_dir,
output_dir=args.output_dir,
dataset_type=args.dataset_type,
check_9_labels=args.check_9_labels)
if __name__ == '__main__':
main()
|
{"/ssd/data/datasets/my_dataset.py": ["/ssd/utils/misc.py"]}
|
20,991
|
nhlinh99/SSD
|
refs/heads/master
|
/ssd/data/datasets/my_dataset.py
|
import os
import torch.utils.data
import numpy as np
import xml.etree.ElementTree as ET
from PIL import Image
from ssd.utils.misc import reorient_image
from ssd.structures.container import Container
class MyDataset(torch.utils.data.Dataset):
class_names_5_labels = ('__background__', 'top_left', 'top_right', 'bottom_right', 'bottom_left')
class_names_9_labels = ('__background__', 'top_left_top', 'top_right_top', 'bottom_right_top', 'bottom_left_top',
'top_left_back', 'top_right_back', 'bottom_right_back', 'bottom_left_back')
def __init__(self, data_dir, split, transform=None, target_transform=None, keep_difficult=True, check_9_labels = False):
"""Dataset for VOC data.
Args:
data_dir: the root of the dataset, the directory contains the following sub-directories:
Annotations, ImageSets, JPEGImages, SegmentationClass, SegmentationObject.
"""
if (check_9_labels):
self.class_names = self.class_names_9_labels
else:
self.class_names = self.class_names_5_labels
self.data_dir = data_dir
self.split = split
self.transform = transform
self.target_transform = target_transform
image_sets_file = os.path.join(self.data_dir, "ImageSets", "Main", "%s.txt" % self.split)
self.ids = MyDataset._read_image_ids(image_sets_file)
self.keep_difficult = keep_difficult
self.class_dict = {class_name: i for i, class_name in enumerate(self.class_names)}
def __getitem__(self, index):
image_id = self.ids[index]
boxes, labels, is_difficult = self._get_annotation(image_id)
if not self.keep_difficult:
boxes = boxes[is_difficult == 0]
labels = labels[is_difficult == 0]
image = self._read_image(image_id)
if self.transform:
image, boxes, labels = self.transform(image, boxes, labels)
if self.target_transform:
boxes, labels = self.target_transform(boxes, labels)
targets = Container(
boxes=boxes,
labels=labels,
)
return image, targets, index
def get_annotation(self, index):
image_id = self.ids[index]
return image_id, self._get_annotation(image_id)
def __len__(self):
return len(self.ids)
@staticmethod
def _read_image_ids(image_sets_file):
ids = []
with open(image_sets_file) as f:
for line in f:
ids.append(line.rstrip())
return ids
def _get_annotation(self, image_id):
annotation_file = os.path.join(self.data_dir, "Annotations", "%s.xml" % image_id)
objects = ET.parse(annotation_file).findall("object")
boxes = []
labels = []
is_difficult = []
for obj in objects:
class_name = obj.find('name').text.lower().strip()
bbox = obj.find('bndbox')
x1 = float(bbox.find('xmin').text)
y1 = float(bbox.find('ymin').text)
x2 = float(bbox.find('xmax').text)
y2 = float(bbox.find('ymax').text)
boxes.append([x1, y1, x2, y2])
labels.append(self.class_dict[class_name])
is_difficult_str = obj.find('difficult').text
is_difficult.append(int(is_difficult_str) if is_difficult_str else 0)
return (np.array(boxes, dtype=np.float32),
np.array(labels, dtype=np.int64),
np.array(is_difficult, dtype=np.uint8))
def get_img_info(self, index):
img_id = self.ids[index]
annotation_file = os.path.join(self.data_dir, "Annotations", "%s.xml" % img_id)
anno = ET.parse(annotation_file).getroot()
size = anno.find("size")
im_info = tuple(map(int, (size.find("height").text, size.find("width").text)))
return {"height": im_info[0], "width": im_info[1]}
def _read_image(self, image_id):
image_file = os.path.join(self.data_dir, "JPEGImages", "%s.jpg" % image_id)
image = Image.open(image_file)
image = reorient_image(image).convert("RGB")
image = np.array(image)
return image
|
{"/ssd/data/datasets/my_dataset.py": ["/ssd/utils/misc.py"]}
|
20,992
|
nhlinh99/SSD
|
refs/heads/master
|
/ssd/engine/inference.py
|
import logging
import os
import torch
import torch.utils.data
from tqdm import tqdm
from ssd.data.build import make_data_loader
from ssd.data.datasets.evaluation import evaluate
from ssd.utils import dist_util, mkdir
from ssd.utils.dist_util import synchronize, is_main_process
import cv2
def _accumulate_predictions_from_multiple_gpus(predictions_per_gpu):
all_predictions = dist_util.all_gather(predictions_per_gpu)
if not dist_util.is_main_process():
return
# merge the list of dicts
predictions = {}
for p in all_predictions:
predictions.update(p)
# convert a dict where the key is the index in a list
image_ids = list(sorted(predictions.keys()))
if len(image_ids) != image_ids[-1] + 1:
logger = logging.getLogger("SSD.inference")
logger.warning(
"Number of images that were gathered from multiple processes is not "
"a contiguous set. Some images might be missing from the evaluation"
)
# convert to a list
predictions = [predictions[i] for i in image_ids]
return predictions
def compute_on_dataset(model, data_loader, device):
results_dict = {}
for batch in tqdm(data_loader):
images, targets, image_ids = batch
cpu_device = torch.device("cpu")
with torch.no_grad():
outputs = model(images.to(device))
outputs = [o.to(cpu_device) for o in outputs]
results_dict.update(
{int(img_id): result for img_id, result in zip(image_ids, outputs)}
)
return results_dict
def inference(model, data_loader, dataset_name, device, output_folder=None, use_cached=False, allow_write_img = False, image_size = 512, **kwargs):
dataset = data_loader.dataset
logger = logging.getLogger("SSD.inference")
logger.info("Evaluating {} dataset({} images):".format(dataset_name, len(dataset)))
predictions_path = os.path.join(output_folder, 'predictions.pth')
if use_cached and os.path.exists(predictions_path):
predictions = torch.load(predictions_path, map_location='cpu')
else:
predictions = compute_on_dataset(model, data_loader, device)
synchronize()
predictions = _accumulate_predictions_from_multiple_gpus(predictions)
if not is_main_process():
return
if output_folder:
torch.save(predictions, predictions_path)
if (allow_write_img):
if (not os.path.isdir("eval_results")):
os.mkdir("eval_results")
LABEL = dataset.class_names
for i in range(len(dataset)):
image_id, annotation = dataset.get_annotation(i)
img = dataset._read_image(image_id)
img_info = dataset.get_img_info(i)
prediction = predictions[i]
boxes, labels, scores = prediction['boxes'], prediction['labels'], prediction['scores']
for i in range(len(boxes)):
b1 = int(max(boxes[i][0] * img_info["width"] / image_size, 0))
b2 = int(max(boxes[i][1] * img_info["height"] / image_size, 0))
b3 = int(min(boxes[i][2] * img_info["width"] / image_size, img_info["width"]))
b4 = int(min(boxes[i][3] * img_info["height"] / image_size, img_info["height"]))
img = cv2.rectangle(img, (b1, b2), (b3, b4), (255, 0, 0), 2)
img = cv2.putText(img, "{}".format(LABEL[labels[i]]), (b1, b2 - 30), cv2.FONT_HERSHEY_SIMPLEX,
0.8, (0, 0, 255), 2, cv2.LINE_AA)
img = cv2.putText(img, "{}".format(round(float(scores[i]), 2)), (b1, b2 - 5), cv2.FONT_HERSHEY_SIMPLEX,
0.8, (0, 0, 255), 2, cv2.LINE_AA)
cv2.imwrite(os.path.join("eval_results", "{}.jpg".format(image_id)), img)
return evaluate(dataset=dataset, predictions=predictions, output_dir=output_folder, **kwargs)
@torch.no_grad()
def do_evaluation(cfg, model, distributed, check_write_img = False, check_9_labels = False, **kwargs):
if isinstance(model, torch.nn.parallel.DistributedDataParallel):
model = model.module
model.eval()
device = torch.device(cfg.MODEL.DEVICE)
data_loaders_val = make_data_loader(cfg, is_train=False, distributed=distributed, check_9_labels=check_9_labels)
eval_results = []
for dataset_name, data_loader in zip(cfg.DATASETS.TEST, data_loaders_val):
output_folder = os.path.join(cfg.OUTPUT_DIR, "inference", dataset_name)
if not os.path.exists(output_folder):
mkdir(output_folder)
eval_result = inference(model, data_loader, dataset_name, device, output_folder, allow_write_img=check_write_img, image_size = cfg.INPUT.IMAGE_SIZE, **kwargs)
eval_results.append(eval_result)
return eval_results
|
{"/ssd/data/datasets/my_dataset.py": ["/ssd/utils/misc.py"]}
|
20,993
|
nhlinh99/SSD
|
refs/heads/master
|
/convert_pascalvoc_dataset/build.py
|
#! -*- coding: utf-8 -*-
import os
import sys
from argparse import ArgumentParser
from easydict import EasyDict as edict
from pascal_voc.pascal_voc import PASCALVOC07
config = edict()
config.author = "Sunshine Tech"
config.root = "annotation"
config.folder = "VOC2007"
config.annotation = "PASCAL VOC2007"
config.segmented = "0"
config.difficult = "0"
config.truncated = "0"
config.pose = "Unspecified"
config.database = "CMND_BACK"
config.depth = "3"
def parse_inputs():
""" Parser function to take care of the inputs """
parser = ArgumentParser(description='Argument: python data_preprocess.py <annotation_path> <output_direction>')
parser.add_argument('annotation_dir', default="Annotations", type=str,
help='Enter the path of annotation files.')
parser.add_argument('output_dir', type=str,
help='Enter the path of the output.')
args = parser.parse_args()
return (args.output_dir, args.annotation_dir)
if __name__ == "__main__":
# !python convert_pascalvoc_dataset/build.py "/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/Annotations" "/content/drive/MyDrive/Colab Notebooks/Sunshine Tech/data"
# OR test on local
# python convert_pascalvoc_dataset/build.py "Annotations/" "data/"
output_dir, annotation_dir = parse_inputs()
if (not os.path.isdir(output_dir)):
os.mkdir(output_dir)
print("Building PASCAL VOC 2007...")
trainval_anno = os.path.join(annotation_dir, 'Train_annotation.txt')
val_anno = os.path.join(annotation_dir, 'Val_annotation.txt')
test_anno = os.path.join(annotation_dir, 'Test_annotation.txt')
p = PASCALVOC07(trainval_anno, val_anno, test_anno, output_dir, config)
p.build(True)
|
{"/ssd/data/datasets/my_dataset.py": ["/ssd/utils/misc.py"]}
|
20,994
|
akashkulkarni1192/Project-Text-Classification-using-RNN
|
refs/heads/master
|
/rnn_momentum_poetry_classifier.py
|
import theano
import theano.tensor as T
import numpy as np
import matplotlib.pyplot as plt
from sklearn.utils import shuffle
import util as myutil
class RNN_class:
def __init__(self, hidden_layer_size, vocab_size):
self.M = hidden_layer_size
self.V = vocab_size
def fit(self, X, Y, learning_rate=10e-1, mu=0.99, activation=T.tanh, epochs=500):
M = self.M
V = self.V
K = len(set(Y))
X, Y = shuffle(X, Y)
N_entries_valid = 10
Xvalid, Yvalid = X[-N_entries_valid:], Y[-N_entries_valid:]
X, Y = X[:-N_entries_valid], Y[:-N_entries_valid]
N = len(X)
# initialize weights
Wx = myutil.init_weight(V, M)
Wh = myutil.init_weight(M, M)
bh = np.zeros(M)
h0 = np.zeros(M)
Wo = myutil.init_weight(M, K)
bo = np.zeros(K)
# Create theano variables
thX, thY, py_x, prediction = self.set(Wx, Wh, bh, h0, Wo, bo, activation)
cost = -T.mean(T.log(py_x[thY]))
grads = T.grad(cost, self.params)
dparams = [theano.shared(p.get_value() * 0) for p in self.params]
lr = T.scalar('learning_rate') # Learning rate would be decremented by factor of 0.99 at each epcch
updates = [
(p, p + mu * dp - lr * g) for p, dp, g in zip(self.params, dparams, grads)
] + [
(dp, mu * dp - lr * g) for dp, g in zip(dparams, grads)
]
self.train_op = theano.function(
inputs=[thX, thY, lr],
outputs=[cost, prediction],
updates=updates,
allow_input_downcast=True,
)
costs = []
for i in range(epochs):
X, Y = shuffle(X, Y)
n_correct = 0
cost = 0
# stochastic gradient descent
for j in range(N):
c, p = self.train_op(X[j], Y[j], learning_rate)
cost += c
if p == Y[j]:
n_correct += 1
# Decrement the learning rate
learning_rate *= 0.9999
# calculate validation accuracy
n_correct_valid = 0
for j in range(N_entries_valid):
p = self.predict_op(Xvalid[j])
if p == Yvalid[j]:
n_correct_valid += 1
print("i:{0} cost:{1} correction_rate:{2}".format(i, cost, (float(n_correct) / N)))
print("Validation correction rate:{0}".format(float(n_correct_valid) / Nvalid))
costs.append(cost)
plt.plot(costs)
plt.show()
def set(self, Wx, Wh, bh, h0, Wo, bo, activation):
self.f = activation
# redundant - see how you can improve it
self.Wx = theano.shared(Wx)
self.Wh = theano.shared(Wh)
self.bh = theano.shared(bh)
self.h0 = theano.shared(h0)
self.Wo = theano.shared(Wo)
self.bo = theano.shared(bo)
self.params = [self.Wx, self.Wh, self.bh, self.h0, self.Wo, self.bo]
thX = T.ivector('X')
thY = T.iscalar('Y')
def recurrence(x, prev_h):
# here Wx[x] will get the one hot encoded vector of POS tagging of a sentence x and then perform the recurrent operation
h = self.f(self.Wx[x] + prev_h.dot(self.Wh) + self.bh)
y = T.nnet.softmax(h.dot(self.Wo) + self.bo)
return h, y
[h, y], _ = theano.scan(
fn=recurrence,
outputs_info=[self.h0, None],
sequences=thX,
n_steps=thX.shape[0],
)
py_x = y[-1, 0, :] # only interested in the final classification of the sequence
prediction = T.argmax(py_x)
self.predict_op = theano.function(
inputs=[thX],
outputs=prediction,
allow_input_downcast=True,
)
return thX, thY, py_x, prediction
def train_poetry():
X, Y, V = myutil.get_poetry_classifier_data(samples_per_class=500)
rnn = RNN_class(2, V)
rnn.fit(X, Y, learning_rate=10e-7, activation=T.nnet.relu, epochs=10)
if __name__ == '__main__':
train_poetry()
|
{"/rnn_momentum_poetry_classifier.py": ["/util.py"]}
|
20,995
|
akashkulkarni1192/Project-Text-Classification-using-RNN
|
refs/heads/master
|
/util.py
|
import string
import nltk
import numpy as np
from sklearn.utils import shuffle
def init_weights(fi, fo):
return np.random.randn(fi, fo) / np.sqrt(fi + fo)
def remove_punctuation(s):
return s.translate(string.punctuation)
def get_poetry_classifier_data(samples_per_class=700):
rf_data = open('robert_frost.txt')
ea_data = open('edgar_allan.txt')
X = []
pos2idx = {}
cur_idx = 0
# Remove punctuation from both data
rf_text = [remove_punctuation(s.strip().lower()) for s in rf_data]
ea_text = [remove_punctuation(s.strip().lower()) for s in ea_data]
# Loop through to form sequences of pos_tag for both the data
rf_line_count = 0
for s in rf_text:
tokens = nltk.pos_tag(s.split())
if tokens:
seq = []
for (label, val) in tokens:
if val not in pos2idx:
pos2idx[val] = cur_idx
cur_idx += 1
seq += [pos2idx[val]]
X.append(seq)
rf_line_count += 1
if rf_line_count == samples_per_class:
break
ea_line_count = 0
for s in ea_text:
tokens = nltk.pos_tag(s.split())
if tokens:
seq = []
for (label, val) in tokens:
if val not in pos2idx:
pos2idx[val] = cur_idx
cur_idx += 1
seq += [pos2idx[val]]
X.append(seq)
ea_line_count += 1
if ea_line_count == samples_per_class:
break
# Set Y to 0 for robert frost poems and 1 for edgar allan poems
Y = np.array([0] * rf_line_count + [1] * ea_line_count).astype(np.int32)
X, Y = shuffle(X, Y)
return X, Y, len(pos2idx)
|
{"/rnn_momentum_poetry_classifier.py": ["/util.py"]}
|
20,997
|
swiich/face_recognize
|
refs/heads/master
|
/opencv.py
|
import cv2
vc = cv2.VideoCapture('test.avi')
c = 1
if vc.isOpened():
rval, frame = vc.read()
else:
rval = False
timF = 1000
while rval:
rval,frame = vc.read()
if(c%timF == 0):
cv2.imwrite('image/'+ str(c) + '.jpg', frame)
c += 1
cv2.waitKey(1)
vc.release()
|
{"/main.py": ["/avHash.py", "/hamming.py"]}
|
20,998
|
swiich/face_recognize
|
refs/heads/master
|
/main.py
|
import avHash
import hamming
img_path = ('.\img\\timg1.jpg', '.\img\\timg0.jpg')
if __name__ == '__main__':
ham = hamming.hamming(avHash.get_hash(img_path[0]), avHash.get_hash(img_path[1]))
print(avHash.get_hash(img_path[0]))
print(avHash.get_hash(img_path[1]))
print(ham)
if ham == 0:
print('the same pic')
elif ham <= 5:
print("image alike")
else:
print('not alike')
|
{"/main.py": ["/avHash.py", "/hamming.py"]}
|
20,999
|
swiich/face_recognize
|
refs/heads/master
|
/faceDetection.py
|
#-*-coding:utf8-*-#
import cv2
from PIL import Image, ImageDraw
import numpy as np
def detectFaces(image_name):
#img = cv2.imread(image_name)
img = Image.open(image_name)
# face_cascade = cv2.CascadeClassifier("C:\\Users\Asshole\Anaconda3\pkgs\opencv-3.2.0-np112py36_203\Library\etc\haarcascades\haarcascade_frontalface_default.xml") #face
# face_cascade = cv2.CascadeClassifier("E:\Python\PycharmProjects\ImgHash\img\ma\\negdata\data\cascade.xml") #mayun
face_cascade = cv2.CascadeClassifier("E:\Python\PycharmProjects\ImgHash\img\\brand\\negdata\data\cascade.xml")
#gray = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
img = img.convert('L')
gray = np.asarray(img)
'''
参数3:scaleFactor - -表示在前后两次相继的扫描中,搜索窗口的比例系数。默认为1.1
即每次搜索窗口依次扩大10 %;
参数4:minNeighbors - -表示构成检测目标的相邻矩形的最小个数(默认为3个)。
如果组成检测目标的小矩形的个数和小于min_neighbors - 1都会被排除。
如果min_neighbors为0, 则函数不做任何操作就返回所有的被检候选矩形框,
这种设定值一般用在用户自定义对检测结果的组合程序上
'''
#1.1 即每次图像尺寸减小比例10%
#5 每一个目标至少检测到4次才算真的目标
faces = face_cascade.detectMultiScale(gray, 1.07, 5)
result = []
for (x, y, width, height) in faces:
result.append((x, y, x+width, y+height))
return result
def drawFaces(image_name):
faces = detectFaces(image_name)
if faces:
img = Image.open(image_name)
draw_instance = ImageDraw.Draw(img)
for (x1,y1,x2,y2) in faces:
# region = (x1, y1, x2, y2)
# cropImg = img.crop(region)
draw_instance.rectangle((x1,y1,x2,y2), outline=(255, 0, 0))
# cropImg.save('E:\Python\PycharmProjects\ImgHash\img\\faces\\'+str(x1)+'.jpg')
# img.save('drawfaces_'+image_name)
Image._show(img)
if __name__ == '__main__':
folder = 'E:\\Python\\PycharmProjects\\ImgHash\\img\\'
picName = 'brd.jpg'
path = folder + picName
drawFaces(path)
# drawFaces('E:\\Python\\PycharmProjects\\ImgHash\\img\\whereisma.jpg')
# drawFaces('E:\\Python\\PycharmProjects\\ImgHash\\img\\arrow.jpg')
# drawFaces('E:\\Python\\PycharmProjects\\ImgHash\\img\\bruce.jpg')
# drawFaces('E:\\Python\\PycharmProjects\\ImgHash\\img\\bat.jpg')
# drawFaces('E:\\Python\\PycharmProjects\\ImgHash\\img\\gakki.jpg') #gakki 1.07
# # drawFaces('E:\\Python\\PycharmProjects\\ImgHash\\img\\prison.jpg')
# drawFaces('E:\\Python\\PycharmProjects\\ImgHash\\img\\brd.jpg')
drawFaces('E:\\Python\\PycharmProjects\\ImgHash\\img\\s.jpg')
|
{"/main.py": ["/avHash.py", "/hamming.py"]}
|
21,000
|
swiich/face_recognize
|
refs/heads/master
|
/recognition.py
|
import dlib, os, glob, numpy
from skimage import io
from tkinter import filedialog, messagebox
def descriptor(path):
detector = dlib.get_frontal_face_detector()
shape = dlib.shape_predictor('..\shape_predictor_68_face_landmarks.dat')
faceRecog = dlib.face_recognition_model_v1('..\dlib_face_recognition_resnet_model_v1.dat')
windows = dlib.image_window()
descriptors = []
for i in glob.glob(os.path.join(path)):
print('Processing file: {}'.format(i))
img = io.imread(i)
windows.clear_overlay()
windows.set_image(img)
detect = detector(img, 1) #人脸检测
print('Number of faces detected: {}'.format(len(detect)))
for d in detect:
sp = shape(img, d) #关键点检测
windows.clear_overlay()
windows.add_overlay(d)
windows.add_overlay(sp)
face_descriptor = faceRecog.compute_face_descriptor(img, sp) #描述子提取
arr = numpy.array(face_descriptor)
descriptors.append(arr)
return descriptors
def recogFace():
detector = dlib.get_frontal_face_detector() # 正脸检测器
shape = dlib.shape_predictor('..\shape_predictor_68_face_landmarks.dat') # 关键点检测器
# 人脸识别模型
faceRecog = dlib.face_recognition_model_v1('..\dlib_face_recognition_resnet_model_v1.dat')
# descriptors = []
descriptors = descriptor('..\img\\faces\*.jpg')
#读取待检测图片
img = io.imread(openFile())
# img = io.imread('..\img\\faces\\torec\\6.jpg')
detect = detector(img, 1)
dist = []
for d in detect:
sp = shape(img, d)
face_descriptor = faceRecog.compute_face_descriptor(img, sp)
d_test = numpy.array(face_descriptor)
#欧氏距离
for i in descriptors:
dist_ = numpy.linalg.norm(i - d_test)
dist.append(dist_)
candidate = ['施瓦辛格','马云','马云','斯嘉丽约翰逊','施瓦辛格',
'斯嘉丽约翰逊','奥巴马','奥巴马','奥巴马','山下智久','金正恩','金正恩', \
'库里', '库里']
dt = dict(zip(candidate, dist))
dt_sorted = sorted(dt.items(), key=lambda d:d[1])
messagebox.showinfo('whosthis',str(dt_sorted[0][0]))
# print('its: ',dt_sorted[0][0])
# dlib.hit_enter_to_continue()
def openFile():
op = filedialog.askopenfilename(title='打开文件',filetypes = [('Img','*.bmp *.jpg')])
return op
if __name__ == '__main__':
recogFace()
|
{"/main.py": ["/avHash.py", "/hamming.py"]}
|
21,001
|
swiich/face_recognize
|
refs/heads/master
|
/PCAfail.py
|
import os, glob, cv2
import numpy as np
from PIL import Image
folder = 'E:\\Python\\PycharmProjects\\ImgHash\\img\\ma'
def pca(data, k):
data = np.float32(np.mat(data))
rows, cols = data.shape #图像大小
data_mean = np.mean(data, 0) #均值
Z = data - np.tile(data_mean, (rows, 1))
T = Z*Z.T
D, V = np.linalg.eig(T) #特征值与特征向量
V1 = V[:, :k] #取前K个特征向量
V1 = Z.T*V1
for i in range(k): #特征向量归一化
V1[:, i] /= np.linalg.norm(V1[:, i])
data_new = np.dot(Z, V1)
return data_new, data_mean, V1
#covert image to vector
def img2vector(filename):
#img = cv2.imread(filename, 0) #read as 'gray'
img = Image.open(filename)
img = img.convert('L')
rows, cols = img.size
imgVector = np.zeros((1,rows*cols)) #create a none vectore:to raise speed
imgVector = np.reshape(img,(1,rows*cols)) #change img from 2D to 1D
return imgVector
def convertL(file):
img = Image.open(file)
img = img.convert('L')
return img
def loadImgSet(folder):
trainData = []; testData = []; yTrain = []; yTest = []
for k in range(10):
data = [convertL(d) for d in glob.glob('E:\\Python\\PycharmProjects\\ImgHash\\img\\ma\\*.jpg')]
trainData.extend(np.ravel(data[i]) for i in range(10))
testData.extend(np.ravel(data[0]))
yTest.extend([k]*1)
yTrain.extend([k]*10)
return np.array(trainData), np.array(yTrain), np.array(testData), np.array(yTest)
def main():
xTrain_, yTrain, xTest_, yTest = loadImgSet(folder)
num_train, num_test = xTrain_.shape[0], xTest_.shape[0]
xTrain, data_mean, V = pca(xTrain_, 10)
xTest = np.array((xTest_- np.tile(data_mean, (num_test, 1))) * V) #特征脸在特征向量下的数据
yPredict = [yTrain[np.sum((xTrain-np.tile(d, (num_train, 1)))**2, 1).argmin()] for d in xTest]
print("欧式距离法识别率:%.2f%%"% ((yPredict == np.array(yTest)).mean()*100))
if __name__ == '__main__':
main()
|
{"/main.py": ["/avHash.py", "/hamming.py"]}
|
21,002
|
swiich/face_recognize
|
refs/heads/master
|
/hamming.py
|
def hamming(str, str1):
ham = 0
for i in range(0, 15):
if str[i] != str1[i]:
ham += 1
return ham
|
{"/main.py": ["/avHash.py", "/hamming.py"]}
|
21,003
|
swiich/face_recognize
|
refs/heads/master
|
/ffmpeg.py
|
import subprocess
ffmpegPath = "E:\Python\PycharmProjects\ImgHash\\ffmpeg-20170605-4705edb-win64-static\\bin\\ffplay.exe "
curMediaPath = "E:\Python\PycharmProjects\ImgHash\\img\\test.mp4"
cmd = ffmpegPath + curMediaPath
# os.popen(cmd)
# os.system(cmd)
subprocess.call(cmd)
|
{"/main.py": ["/avHash.py", "/hamming.py"]}
|
21,004
|
swiich/face_recognize
|
refs/heads/master
|
/recognitionFail.py
|
import numpy as np
import cv2, os
def loadImg(path):
matrix = np.mat(np.zeros((9, 64*64)))
j = 0
for i in os.listdir(path):
if i.split('.')[1] == 'jpg':
try:
img = cv2.imread(path+i, 0)
except:
print('load %s failed' % i)
matrix[j, :] = np.mat(img).flatten()
j += 1
return matrix
'''
step1: load the face image data ,get the matrix consists of all image
step2: average the FaceMat
step3: calculate the difference of avgimg and all image data(FaceMat)
step4: calculate eigenvector of covariance matrix (because covariance matrix will cause memory error)
'''
def recogVector(selecthr = 0.8):
faceMat = loadImg('E:\Python\PycharmProjects\ImgHash\img\m').T ######
avgImg = np.mean(faceMat, 1)
diffTrain = faceMat - avgImg
eigVals, eigVects = np.linalg.eig(np.mat(diffTrain.T * diffTrain))
eigSortIndex = np.argsort(-eigVals)
for i in range(np.shape(faceMat)[1]): ########
if (eigVals[eigSortIndex[:i]]/eigVals.sum()).sum() >= selecthr:
eigSortIndex = eigSortIndex[:i]
break
covVects = diffTrain * eigVects[:,eigSortIndex] #the eigenvector of covariance matrix
#avgImg均值图像,covVects协方差矩阵的特征向量,diffTrain偏差矩阵
return avgImg, covVects, diffTrain
def whosthis(oringinImg, faceVector, avgImg, difftrain):
diff = oringinImg.T - avgImg
wVec = faceVector.T * diff
res = 0
resVal = np.inf
for i in range(9):
trainVec = faceVector.T * difftrain[:,i]
if (np.array(wVec - trainVec)**2).sum() < resVal:
res = i
resVal = (np.array(wVec - trainVec)**2).sum()
return res+1
def similar(oriImg):
avgImg, faceVector, diffTrain = recogVector()
oriImg = cv2.imread(oriImg, 0)
gray = np.mat(oriImg).flatten()
if whosthis(gray, faceVector, avgImg, diffTrain) == 1:
return True
else:
return False
if __name__ == '__main__':
if similar('D:\\6.bmp'):
print('1111')
else:
print('0')
|
{"/main.py": ["/avHash.py", "/hamming.py"]}
|
21,005
|
swiich/face_recognize
|
refs/heads/master
|
/avHash.py
|
#coding:utf-8
'''
第一步,缩小尺寸。
将图片缩小到8×8的尺寸,总共64个像素。这一步的作用是去除图片的细节,只保留结构、明暗等基本信息,摒弃不同尺寸、比例带来的图片差异。
第二步,简化色彩。
将缩小后的图片,转为64级灰度。也就是说,所有像素点总共只有64种颜色。
第三步,计算平均值。
计算所有64个像素的灰度平均值。
第四步,比较像素的灰度。
将每个像素的灰度,与平均值进行比较。大于或等于平均值,记为1;小于平均值,记为0。
第五步,计算哈希值。
将上一步的比较结果,组合在一起,就构成了一个64位的整数,这就是这张图片的指纹。组合的次序并不重要,只要保证所有图片都采用同样次序就行了。
得到指纹以后,就可以对比不同的图片,看看64位中有多少位是不一样的。在理论上,这等同于计算“汉明距离”(Hamming distance)。如果不相同的数据位不超过5,就说明两张图片很相似;如果大于10,就说明这是两张不同的图片。
'''
from PIL import Image
def get_hash(img_path):
img = Image.open(img_path)
#antialias 抗锯齿
#convert L为转换为灰度图像
img = img.resize((8, 8), Image.ANTIALIAS).convert('L')
#像素平均值
avg = sum(list(img.getdata()))/64.0
#将avg和每个像素比较,得到长度为64的字符串
str = ''.join(map(lambda i: '0' if i < avg else '1', img.getdata()))
#str切割,每4个字符一组,转成16进制字符
return ''.join(map(lambda x: '%x' % int(str[x:x+4], 2), range(0, 64, 4)))
|
{"/main.py": ["/avHash.py", "/hamming.py"]}
|
21,023
|
havy-nguyen/FLASK-Book-Search
|
refs/heads/master
|
/project/routes.py
|
import os
import requests
from project.forms import RegistrationForm, LoginForm, SearchForm, ReviewForm
from project.models import User, Book, Review
from flask_sqlalchemy import SQLAlchemy
from project import app, db, bcrypt
from sqlalchemy import and_
from flask import render_template, url_for, flash, redirect, request, abort, jsonify
from flask_login import login_user, current_user, logout_user, login_required
@app.route("/", methods=['GET', 'POST'])
@app.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = RegistrationForm()
if form.validate_on_submit():
hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')
user = User(username=form.username.data, email=form.email.data, password=hashed_password)
db.session.add(user)
db.session.commit()
return redirect(url_for('login'))
return render_template("register.html", pageTitle="Register Page", form=form)
@app.route("/login", methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user and bcrypt.check_password_hash(user.password, form.password.data):
login_user(user, remember=form.remember.data)
return redirect(url_for('index'))
else:
flash('Login unsuccessful. Please check email and password', 'danger')
return render_template('login.html', pageTitle='Sign In', form=form)
@app.route("/logout")
def logout():
logout_user()
return redirect(url_for('login'))
@app.route('/index', methods=['GET', 'POST'])
@login_required
def index():
form = SearchForm()
if form.validate_on_submit():
input_isbn = str(form.isbn.data)
input_title = str(form.title.data)
input_author = str(form.author.data)
isbn = request.args.get("isbn", input_isbn)
title = request.args.get("title", input_title)
author = request.args.get("author", input_author)
books = Book.query.filter(and_(Book.isbn.like("%" + input_isbn.strip() + "%"),
Book.title.like("%" + input_title.title().strip() + "%"),
Book.author.like("%" + input_author.title().strip() + "%"))).paginate(page=1, per_page=8, error_out=False)
return render_template("index.html", pageTitle="Search Results",
books=books, form=form, isbn=input_isbn, title=input_title, author=input_author)
else:
page = request.args.get('page', 2, type=int)
isbn = request.args.get("isbn", "isbn")
title = request.args.get("title", "title")
author = request.args.get("author", "author")
books = Book.query.filter(and_(Book.isbn.like("%" + isbn.strip() + "%"),
Book.title.like("%" + title.title().strip() + "%"),
Book.author.like("%" + author.title().strip() + "%"))).paginate(page=page, per_page=8, error_out=False)
return render_template("results.html", pageTitle="Search Results",
books=books, form=form, isbn=isbn, title=title, author=author)
@app.route("/index/<int:id>", methods=['GET', 'POST'])
@login_required
def book(id):
book = Book.query.get_or_404(id)
page = request.args.get('page', 1, type=int)
reviews = Review.query.join(Book).filter(Book.id == book.id).order_by(Review.id.desc()).paginate(page=page, per_page=4, error_out=False)
res = requests.get("https://www.goodreads.com/book/review_counts.json", params={"key": os.environ.get('GOODREADS_KEY'), "isbns": book.isbn})
goodreads_json = res.json()
goodreads_ratings = goodreads_json['books'][0]['work_ratings_count']
goodreads_avg = goodreads_json['books'][0]['average_rating']
form = ReviewForm()
if form.is_submitted():
review = Review(content=form.content.data, rate=form.rate.data, reviewer=current_user, book=book)
if form.validate_on_submit():
flash("Your review has been added.", 'info')
db.session.add(review)
db.session.commit()
return redirect(url_for('book', id=book.id))
else:
flash("Please also rate book.", 'info')
return render_template('book.html', pageTitle=book.title, book=book, form=form,
reviews=reviews, goodreads_ratings=goodreads_ratings, goodreads_avg=goodreads_avg)
@app.route("/index/api/<isbn>", methods=['GET', 'POST'])
def book_api(isbn):
book = Book.query.filter(Book.isbn == isbn).first()
if book is None:
return jsonify({"Error": "Invalid Isbn"}), 422
total_rating = 0
for review in book.reviews:
total_rating += review.rate
try:
avg_rating = round(total_rating / int(len(book.reviews)),2)
return jsonify({"title": book.title,
"author": book.author,
"year": book.year,
"isbn": book.isbn,
"review_count": total_rating,
"average_score": avg_rating})
except ZeroDivisionError:
return jsonify({"title": book.title,
"author": book.author,
"year": book.year,
"isbn": book.isbn,
"review_count": "no rating",
"average_score": "no rating"})
|
{"/project/routes.py": ["/project/forms.py", "/project/models.py", "/project/__init__.py"], "/project/forms.py": ["/project/models.py"], "/project/models.py": ["/project/__init__.py"]}
|
21,024
|
havy-nguyen/FLASK-Book-Search
|
refs/heads/master
|
/project/__init__.py
|
import os
from flask import Flask, session
from flask_session import Session
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from email_validator import *
from flask_login import LoginManager
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
app = Flask(__name__)
# Check for environment variable
if not os.getenv("DATABASE_URL"):
raise RuntimeError("DATABASE_URL is not set")
# Bind engine to perform query
engine = create_engine(os.getenv("DATABASE_URL"))
database = scoped_session(sessionmaker(bind=engine))
# Configure session to use filesystem
app.config["SESSION_PERMANENT"] = False
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config["SESSION_TYPE"] = "filesystem"
app.config['SECRET_KEY'] = "cff2150872a5e6d41b0c019021cc31fa38a2e86"
ss = Session(app)
# Set up database
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get('DATABASE_URL')
db = SQLAlchemy(app)
# Hash password
bcrypt = Bcrypt(app)
login_manager = LoginManager(app)
login_manager.login_view = "login"
login_manager.login_message_category = "info"
from project import routes
|
{"/project/routes.py": ["/project/forms.py", "/project/models.py", "/project/__init__.py"], "/project/forms.py": ["/project/models.py"], "/project/models.py": ["/project/__init__.py"]}
|
21,025
|
havy-nguyen/FLASK-Book-Search
|
refs/heads/master
|
/project/forms.py
|
from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed
from flask_login import current_user
from wtforms import StringField, PasswordField, SubmitField, BooleanField, TextAreaField, RadioField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError
from project.models import User, Book, Review
class RegistrationForm(FlaskForm):
username = StringField("Username",
validators=[DataRequired(), Length(min=2, max=20)])
email = StringField("Email", validators=[DataRequired(), Email()])
password = PasswordField("Password", validators=[DataRequired()])
confirmPassword = PasswordField("Confirm Password", validators=[DataRequired(), EqualTo("password")])
submit = SubmitField("Register")
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError("Username is not available!")
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError("Email is not available!")
class LoginForm(FlaskForm):
email = StringField("Email", validators=[DataRequired(), Email()])
password = PasswordField("Password", validators=[DataRequired()])
remember = BooleanField("Remember me")
submit = SubmitField("Sign in")
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if not user:
raise ValidationError("Email is incorrect!")
class SearchForm(FlaskForm):
isbn = StringField("Isbn", validators=[Length(max=30)])
title = StringField("Title", validators=[Length(max=150)])
author = StringField("Author", validators=[Length(max=120)])
submit = SubmitField("Search")
class ReviewForm(FlaskForm):
content = TextAreaField('Write a review', validators=[DataRequired()])
rate = RadioField("Rate book", validators=[DataRequired()], choices=[("1","rate"),("2","rate"),("3","rate"),("4","rate"),("5","rate")])
submit = SubmitField('Post review')
|
{"/project/routes.py": ["/project/forms.py", "/project/models.py", "/project/__init__.py"], "/project/forms.py": ["/project/models.py"], "/project/models.py": ["/project/__init__.py"]}
|
21,026
|
havy-nguyen/FLASK-Book-Search
|
refs/heads/master
|
/project/models.py
|
from project import db, login_manager
from datetime import datetime
from email_validator import *
from flask_login import UserMixin
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(20), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
password = db.Column(db.String(60), nullable=False)
reviews = db.relationship("Review", backref="reviewer", lazy=True)
def __repr__(self):
return f"User('{self.username}', '{self.email}')"
class Book(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
isbn = db.Column(db.String(30), nullable=False)
title = db.Column(db.String(150), nullable=False)
author = db.Column(db.String(120), nullable=False)
year = db.Column(db.Integer, nullable=False)
reviews = db.relationship("Review", backref="book", lazy=True)
def __repr__(self):
return f"Book('{self.id}', '{self.isbn}', '{self.title}', '{self.author}', '{self.year}')"
class Review(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
content = db.Column(db.Text, nullable=False)
date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
rate = db.Column(db.Integer, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=False)
book_id = db.Column(db.Integer, db.ForeignKey("book.id"), nullable=False)
def __repr__(self):
return f"Review('{self.content}', '{self.date}', '{self.rate}')"
|
{"/project/routes.py": ["/project/forms.py", "/project/models.py", "/project/__init__.py"], "/project/forms.py": ["/project/models.py"], "/project/models.py": ["/project/__init__.py"]}
|
21,027
|
voonshunzhi/world
|
refs/heads/master
|
/migrations/versions/398de4f723c4_.py
|
"""empty message
Revision ID: 398de4f723c4
Revises: b6a4b6f45fc8
Create Date: 2019-04-12 09:37:07.818252
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '398de4f723c4'
down_revision = 'b6a4b6f45fc8'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('language',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('country_code', sa.String(length=128), nullable=True),
sa.Column('language', sa.String(length=128), nullable=True),
sa.Column('percentage_of_use', sa.Float(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_foreign_key(None, 'CountryLanguage', 'language', ['language_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'CountryLanguage', type_='foreignkey')
op.drop_table('language')
# ### end Alembic commands ###
|
{"/world/models/seed/seedCountry.py": ["/world/models/models.py"], "/app.py": ["/world/models/models.py"], "/world/models/seed/seedLanguage.py": ["/world/models/models.py"], "/world/models/seed/seedCity.py": ["/world/models/models.py"]}
|
21,028
|
voonshunzhi/world
|
refs/heads/master
|
/migrations/versions/7cdfe2d4f328_.py
|
"""empty message
Revision ID: 7cdfe2d4f328
Revises: 5409c24fd08e
Create Date: 2019-04-15 13:06:21.520423
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7cdfe2d4f328'
down_revision = '5409c24fd08e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('city_country_code_fkey', 'city', type_='foreignkey')
op.create_foreign_key(None, 'city', 'country', ['country_code'], ['country_code'], ondelete='CASCADE')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'city', type_='foreignkey')
op.create_foreign_key('city_country_code_fkey', 'city', 'country', ['country_code'], ['country_code'])
# ### end Alembic commands ###
|
{"/world/models/seed/seedCountry.py": ["/world/models/models.py"], "/app.py": ["/world/models/models.py"], "/world/models/seed/seedLanguage.py": ["/world/models/models.py"], "/world/models/seed/seedCity.py": ["/world/models/models.py"]}
|
21,029
|
voonshunzhi/world
|
refs/heads/master
|
/world/models/seed/seedCountry.py
|
import csv
from world.models.models import Country;
from world import db;
f = open('../country.csv')
csv_f = csv.reader(f)
for _ in range(1):
next(csv_f)
for row in csv_f:
print(row)
row[4] = None if row[4] == 'NULL' or row[4] == '' else int(row[4])
row[5] = None if row[5] == 'NULL' or row[5] == '' else int(row[5])
row[6] = None if row[6] == 'NULL' or row[6] == '' else int(row[6])
row[7] = None if row[7] == 'NULL' or row[7] == '' else float(row[7])
row[8] = None if row[8] == 'NULL' or row[8] == '' else float(row[8])
row[9] = None if row[9] == 'NULL' or row[9] == '' else int(row[9])
country = Country(row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13])
db.session.add(country);
db.session.commit();
|
{"/world/models/seed/seedCountry.py": ["/world/models/models.py"], "/app.py": ["/world/models/models.py"], "/world/models/seed/seedLanguage.py": ["/world/models/models.py"], "/world/models/seed/seedCity.py": ["/world/models/models.py"]}
|
21,030
|
voonshunzhi/world
|
refs/heads/master
|
/migrations/versions/d4c7203bf19c_.py
|
"""empty message
Revision ID: d4c7203bf19c
Revises:
Create Date: 2019-04-10 19:34:28.957803
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd4c7203bf19c'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('country',
sa.Column('id', sa.Integer(), nullable=True),
sa.Column('country_code', sa.String(length=128), nullable=False),
sa.Column('country_name', sa.String(length=128), nullable=True),
sa.Column('continent', sa.String(length=128), nullable=True),
sa.Column('region', sa.String(length=128), nullable=True),
sa.Column('area', sa.Integer(), nullable=True),
sa.Column('year_of_independence', sa.Integer(), nullable=True),
sa.Column('population', sa.Float(), nullable=True),
sa.Column('life_expectancy', sa.Float(), nullable=True),
sa.Column('gnp', sa.Float(), nullable=True),
sa.Column('gnpid', sa.Float(), nullable=True),
sa.Column('alternative_name', sa.Text(), nullable=True),
sa.Column('ruling_system', sa.Text(), nullable=True),
sa.Column('founder', sa.Text(), nullable=True),
sa.Column('iso_code', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('country_code'),
sa.UniqueConstraint('country_code')
)
op.create_table('language',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('country_code', sa.String(length=128), nullable=True),
sa.Column('language', sa.String(length=128), nullable=True),
sa.Column('percentage_of_use', sa.Float(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('city',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('city_name', sa.String(length=128), nullable=True),
sa.Column('province', sa.String(length=128), nullable=True),
sa.Column('population', sa.Float(), nullable=True),
sa.Column('country_code', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['country_code'], ['country.country_code'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_foreign_key(None, 'CountryLanguage', 'country', ['country_code'], ['country_code'])
op.create_foreign_key(None, 'CountryLanguage', 'language', ['language_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'CountryLanguage', type_='foreignkey')
op.drop_constraint(None, 'CountryLanguage', type_='foreignkey')
op.drop_table('city')
op.drop_table('language')
op.drop_table('country')
# ### end Alembic commands ###
|
{"/world/models/seed/seedCountry.py": ["/world/models/models.py"], "/app.py": ["/world/models/models.py"], "/world/models/seed/seedLanguage.py": ["/world/models/models.py"], "/world/models/seed/seedCity.py": ["/world/models/models.py"]}
|
21,031
|
voonshunzhi/world
|
refs/heads/master
|
/world/models/models.py
|
from world import db;
CountryLanguage = db.Table('CountryLanguage',
db.Column('language_id', db.Integer, db.ForeignKey('language.id'), primary_key=True),
db.Column('country_code', db.String(128), db.ForeignKey('country.country_code'), primary_key=True),
)
class Country(db.Model):
__tablename__ = "country"
id = db.Column(db.Integer);
cities = db.relationship('City', backref='country',lazy=True,passive_deletes=True)
country_code = db.Column(db.String(128),unique=True,primary_key=True)
country_name = db.Column(db.String(128));
continent = db.Column(db.String(128));
region = db.Column(db.String(128));
area = db.Column(db.Integer);
year_of_independence = db.Column(db.Integer,nullable=True);
population = db.Column(db.Float);
life_expectancy = db.Column(db.Float,nullable=True);
gnp = db.Column(db.Float);
gnpid = db.Column(db.Float,nullable=True);
alternative_name = db.Column(db.Text);
ruling_system = db.Column(db.Text);
founder = db.Column(db.Text);
iso_code = db.Column(db.String(128))
def __init__(self,country_code,country_name,continent,region,area,year_of_independence,population,life_expectancy,gnp,gnpid,alternative_name,ruling_system,founder,iso_code):
self.country_code = country_code;
self.country_name = country_name
self.continent = continent;
self.region = region;
self.area = area;
self.year_of_independence = year_of_independence;
self.population = population;
self.life_expectancy = life_expectancy
self.gnp = gnp;
self.gnpid = gnpid;
self.alternative_name = alternative_name;
self.ruling_system = ruling_system;
self.founder = founder;
self.iso_code = iso_code;
def __repr__(self):
return "This is the language of " + self.country_name;
class City(db.Model):
__tablename__ = "city"
id = db.Column(db.Integer, primary_key=True);
city_name = db.Column(db.String(128));
province= db.Column(db.String(128));
population = db.Column(db.Float);
country_code = db.Column(db.String(128), db.ForeignKey('country.country_code',ondelete='CASCADE'),nullable=False)
def __init__(self,city_name,country_code,province,population):
self.city_name = city_name;
self.population = population;
self.country_code = country_code;
self.province = province;
def __repr__(self):
return "This is the language of " + self.city_name;
class Language(db.Model):
__tablename__ = "language"
id = db.Column(db.Integer, primary_key=True);
country_code = db.Column(db.String(128))
language = db.Column(db.String(128));
official_language = db.Column(db.Boolean,default=False, nullable=False),
percentage_of_use = db.Column(db.Float)
def __init__(self,country_code,language,official_language,percentage_of_use):
self.language = language;
self.country_code = country_code;
self.official_language = official_language;
self.percentage_of_use = percentage_of_use;
def __repr__(self):
return "This is the language of " + self.language ;
|
{"/world/models/seed/seedCountry.py": ["/world/models/models.py"], "/app.py": ["/world/models/models.py"], "/world/models/seed/seedLanguage.py": ["/world/models/models.py"], "/world/models/seed/seedCity.py": ["/world/models/models.py"]}
|
21,032
|
voonshunzhi/world
|
refs/heads/master
|
/migrations/versions/b6a4b6f45fc8_.py
|
"""empty message
Revision ID: b6a4b6f45fc8
Revises: d4c7203bf19c
Create Date: 2019-04-12 09:34:23.479972
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b6a4b6f45fc8'
down_revision = 'd4c7203bf19c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint(None, 'country', ['country_code'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'country', type_='unique')
# ### end Alembic commands ###
|
{"/world/models/seed/seedCountry.py": ["/world/models/models.py"], "/app.py": ["/world/models/models.py"], "/world/models/seed/seedLanguage.py": ["/world/models/models.py"], "/world/models/seed/seedCity.py": ["/world/models/models.py"]}
|
21,033
|
voonshunzhi/world
|
refs/heads/master
|
/app.py
|
from world import app;
from world.models.models import Language,CountryLanguage,City,Country;
from flask import render_template,request,redirect,url_for,abort;
import csv
from world import db;
@app.route("/")
def index():
return render_template("index.html")
@app.route("/search")
def search():
search = request.args.get('search');
languages = Language.query.filter(Language.language.like('%' + search.title() +'%')).distinct(Language.language).group_by(Language.id,Language.language).all()
countries = Country.query.filter(Country.country_name.like('%' + search.title() + '%')).distinct(Country.country_name).group_by(Country.id,Country.country_name,Country.country_code).all()
cites = City.query.filter(City.city_name.like('%' + search.title() + '%')).distinct(City.city_name).group_by(City.id,City.city_name).all()
totalLength = len(cites + countries + languages)
results = "result" if len(languages) == 0 or languages == 1 else "results"
return render_template("search.html",totalLength=totalLength,result=results,search=search,countries=countries,languages=languages,cities=cites);
@app.route("/country/<id>")
def country(id):
country = Country.query.get(id)
return render_template("country.html",country=country)
@app.route("/language/<id>")
def language(id):
language = Language.query.get(id)
languages = Language.query.filter_by(language=language.language).distinct(Language.country_code).group_by(Language.id,Language.country_code).all()
print(languages)
return render_template("language.html",language=language,languages=enumerate(languages))
@app.route("/city/<id>")
def city(id):
city = City.query.get(id)
return render_template("city.html",city=city)
@app.route("/update/city/<id>",methods=['GET','POST'])
def updateCity(id):
city = City.query.get(id)
if request.method == 'GET':
return render_template("update/city.html",city=city)
else:
city.city_name = request.form['city_name']
city.province = request.form['province']
city.population = request.form['population']
db.session.commit();
return redirect(url_for('city',id=city.id),code=302)
@app.route("/update/country/<id>",methods=['GET','POST'])
def updateCountry(id):
country = Country.query.get(id)
if request.method == 'GET':
return render_template("update/country.html",country=country)
else:
country.continent = request.form['continent']
country.region = request.form['region']
country.area = request.form['area'];
country.year_of_independence = request.form['year_of_independence'];
country.population = request.form['population'];
country.life_expectancy = request.form['life_expectancy']
country.gnp = request.form['gnp'];
country.gnpid = request.form['gnpid'];
country.alternative_name = request.form['alternative_name'];
country.ruling_system = request.form['ruling_system'];
country.founder = request.form['founder'];
country.iso_code = request.form['iso_code'];
db.session.commit();
return redirect(url_for('country',id=country.country_code),code=302)
@app.route("/update/language/<id>",methods=['GET','POST'])
def updateLanguage(id):
language = Language.query.get(id);
if request.method == 'GET':
return render_template('update/language.html',language=language);
else:
language.percentage_of_use = request.form['percentage_of_use']
db.session.commit();
return redirect(url_for('language',id=language.id),code=302)
@app.route("/delete/country/<id>",methods=['POST'])
def deleteCountry(id):
if request.method == 'POST':
country = Country.query.get(id);
languages = Language.query.filter_by(country_code=id);
for language in languages:
db.session.delete(language);
db.session.commit();
db.session.delete(country);
db.session.commit();
return redirect(url_for('index'));
else:
return abort(404)
if __name__ == "__main__":
app.run(debug=True);
|
{"/world/models/seed/seedCountry.py": ["/world/models/models.py"], "/app.py": ["/world/models/models.py"], "/world/models/seed/seedLanguage.py": ["/world/models/models.py"], "/world/models/seed/seedCity.py": ["/world/models/models.py"]}
|
21,034
|
voonshunzhi/world
|
refs/heads/master
|
/world/models/seed/seedLanguage.py
|
import csv
from world.models.models import Language;
from world import db;
f = open('../language.csv')
csv_f = csv.reader(f)
for _ in range(1):
next(csv_f)
for row in csv_f:
print(row)
if row[2] == 'T':
row[2] = True
else:
row[2] = False
row[3] = float(row[3])
language = Language(row[0],row[1],row[2],row[3])
db.session.add(language);
db.session.commit();
|
{"/world/models/seed/seedCountry.py": ["/world/models/models.py"], "/app.py": ["/world/models/models.py"], "/world/models/seed/seedLanguage.py": ["/world/models/models.py"], "/world/models/seed/seedCity.py": ["/world/models/models.py"]}
|
21,035
|
voonshunzhi/world
|
refs/heads/master
|
/world/models/seed/seedCity.py
|
import csv
from world.models.models import City;
from world import db;
f = open('../city.csv')
csv_f = csv.reader(f)
for _ in range(1):
next(csv_f)
for row in csv_f:
print(row)
city = City(row[1],row[2],row[3],row[4])
db.session.add(city);
db.session.commit();
|
{"/world/models/seed/seedCountry.py": ["/world/models/models.py"], "/app.py": ["/world/models/models.py"], "/world/models/seed/seedLanguage.py": ["/world/models/models.py"], "/world/models/seed/seedCity.py": ["/world/models/models.py"]}
|
21,049
|
Rnazx/Assignment-08
|
refs/heads/main
|
/Q1.py
|
import library as rands
import matplotlib.pyplot as plt
import math
figure, axes = plt.subplots(nrows=3, ncols=2)#define no. of rows and collumns
RMS = []
Nroot = []
l = 100# no. of iterations
N = 250# start from N=250
i=1
while (i<=5):
print("************************************************************************************************************")
print(" For Steps = "+str(N)+" and Number of walks = 5 ")
print("************************************************************************************************************")
X, Y, rms, avgx, avgy, radialdis = rands.randomwalk(l, N)
print("The root mean square distance = ", rms)
print("Average displacement in the x direction = ", avgx)
print("Average displacement in the y direction = ", avgy)
print("The Radial distance R = ", radialdis)
RMS.append(rms)
Nroot.append(math.sqrt(N))
k=u=0
if (i<3) :
u=0
k=i-1
elif(i==3 or i==4) :
u=1
k=i-3
else :
u=2
k=0
for j in range(5):
axes[u,k].set_xlabel('X')
axes[u,k].set_ylabel('Y')
axes[u,k].grid(True)
axes[u,k].set_title("No. of steps = "+ str(N))
axes[u,k].plot(X[j],Y[j])
N +=250
i+=1
plt.figure()
plt.title("RMS distance vs root of N plot for different number of steps starting from 250 ")
plt.ylabel('RMS distance')
plt.xlabel('squareroot of N')
plt.plot(Nroot, RMS)
plt.grid(True)
plt.show()
'''************************************************************************************************************
For Steps = 250 and Number of walks = 5
************************************************************************************************************
The root mean square distance = 15.629352039696183
Average displacement in the x direction = 2.880434476082775
Average displacement in the y direction = -1.2758385256133062
The Radial distance R = 3.1503439041548127
************************************************************************************************************
For Steps = 500 and Number of walks = 5
************************************************************************************************************
The root mean square distance = 22.76072031959625
Average displacement in the x direction = 0.7067189887553609
Average displacement in the y direction = 2.151122020124929
The Radial distance R = 2.2642388731169145
************************************************************************************************************
For Steps = 750 and Number of walks = 5
************************************************************************************************************
The root mean square distance = 26.504581980149055
Average displacement in the x direction = 1.2664351771738056
Average displacement in the y direction = -0.349450693936544
The Radial distance R = 1.3137632379831536
************************************************************************************************************
For Steps = 1000 and Number of walks = 5
************************************************************************************************************
The root mean square distance = 32.50692594653746
Average displacement in the x direction = 4.3253278465174425
Average displacement in the y direction = -1.743836441343079
The Radial distance R = 4.663628041987837
************************************************************************************************************
For Steps = 1250 and Number of walks = 5
************************************************************************************************************
The root mean square distance = 33.981880998320044
Average displacement in the x direction = -0.4284515473756484
Average displacement in the y direction = 1.4508808279631595
The Radial distance R = 1.5128205132796326'''
|
{"/Q1.py": ["/library.py"], "/Q2.py": ["/library.py"]}
|
21,050
|
Rnazx/Assignment-08
|
refs/heads/main
|
/Q2.py
|
import matplotlib.pyplot as plt
import library as lib
n = []
V = []
error = []
anavol = 12.56637#Analytical volume
a=1#dimensions of the ellipsoid
b=1.5
c=2
def elip(x,y,z):#equation of the ellipsoid
return ((x**2)/(a**2))+((y**2)/(b**2))+((z**2)/(c**2))
N = 45000
i = 0
while i < N:
i +=100
X1, Y1, Z1, err, vol = lib.montecarlovolume(-a,a,-b,b,-c,c,elip,i, anavol)
n.append(i)
V.append(vol)
error.append(err)
print("**************************************************************************************************************")
print("The volume of the ellipsoid obtained from montecarlo method is",vol,"and the fractional error in the estimation is ",err)
print("**************************************************************************************************************")
plt.figure()
plt.plot(n, V)
plt.axhline(12.56637, color='r')
plt.text(30000, 12.6, "Analytical value = 12.56637", size=16,
va="baseline", ha="left", multialignment="left")
plt.title("Estimated volume of an ellipsoid vs number of points")
plt.xlabel("Number of points")
plt.ylabel("Volume of ellipsoid")
plt.figure()
plt.plot(n,error)
plt.title("Fractional error as a function of number of points")
plt.xlabel("No. of points")
plt.ylabel("Fractional Error")
plt.show()
X, Y, Z, f, V = lib.montecarlovolume(-a,a,-b,b,-c,c,elip,10000, anavol)
fig = plt.figure (figsize = (16, 9))
ax = plt.axes (projection = "3d")
ax.scatter3D (X, Y, Z, color = "green")
plt.title ("3D scatter plot of ellipsoid for N=10,000")
plt.show ()
'''
**************************************************************************************************************
The volume of the ellipsoid obtained from montecarlo method is 12.585066666666668 and the fractional error in the estimation is 0.0014878335324098213
**************************************************************************************************************'''
|
{"/Q1.py": ["/library.py"], "/Q2.py": ["/library.py"]}
|
21,051
|
Rnazx/Assignment-08
|
refs/heads/main
|
/library.py
|
import math
import random
import time
import matplotlib.pyplot as plt
def randomwalk(iterations, steps):
random.seed(None)
dis = 0
X = []
Y = []
sumdis = 0
for j in range(0, iterations):
X = []
Y = []
x = 0.0
y = 0.0
dis = 0.0
X.append(x)
Y.append(y)
for i in range(0, steps):
theta = 2 * math.pi * random.random()
x += math.cos(theta)
y += math.sin(theta)
X.append(x)
Y.append(y)
X.append(X)
Y.append(Y)
dis = x ** 2 + y ** 2
sumdis += dis
rms = math.sqrt(sumdis / iterations)
sumx = 0
sumy = 0
for i in range(iterations):
sumx += X[i][- 1]
sumy += Y[i][- 1]
avgx = sumx / iterations
avgy = sumy / iterations
radialdis = math.sqrt(avgx ** 2 + avgy ** 2)
return X, Y, rms, avgx, avgy, radialdis
def montecarlovolume(x1, x2, y1, y2, z1, z2, f, N, analytical):
X = []
Y = []
Z = []
volbox = (x2 - x1) * (y2 - y1) * (z2 - z1)
volcurve = 0
j= 0
for i in range(N):
x = random.uniform(x1, x2)
y = random.uniform(y1, y2)
z = random.uniform(z1, z2)
if (f(x, y, z) <= 1):
X.append(x)
Y.append(y)
Z.append(z)
j += 1
volcurve = (volbox/float(N)) * j
fracerror = abs(volcurve - analytical)/analytical
return X, Y, Z, fracerror,volcurve
|
{"/Q1.py": ["/library.py"], "/Q2.py": ["/library.py"]}
|
21,054
|
ShipraShalini/social_connect
|
refs/heads/main
|
/post/models.py
|
import uuid
from django.contrib.auth.models import User
from django.db.models import (
CASCADE,
PROTECT,
CharField,
DateTimeField,
ForeignKey,
Model,
TextField,
UUIDField,
)
class Post(Model):
uuid = UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = ForeignKey(
User, related_name="posts", on_delete=CASCADE, null=False, blank=False
)
title = CharField(max_length=510)
message = TextField()
created_at = DateTimeField(auto_now_add=True)
updated_at = DateTimeField(auto_now=True)
created_by = ForeignKey(
User, on_delete=PROTECT, null=False, blank=False, related_name="created_posts"
)
updated_by = ForeignKey(
User, on_delete=PROTECT, null=False, blank=False, related_name="updated_posts"
)
class Meta:
ordering = ["-created_at"]
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,055
|
ShipraShalini/social_connect
|
refs/heads/main
|
/social_connect/utils.py
|
from user_agents import parse
def get_user_agent(headers):
"""Get user agent from the request."""
raw_agent = headers.get("HTTP_USER_AGENT") or ""
pretty_agent = str(parse(raw_agent))
return raw_agent, pretty_agent
def get_ip(headers):
"""Get IP from the request headers."""
return headers.get("HTTP_X_FORWARDED_FOR") or headers.get("REMOTE_ADDR")
def is_api_request(request):
"""Check if the request is consuming an API."""
return "api" in request.path
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,056
|
ShipraShalini/social_connect
|
refs/heads/main
|
/post/v1/urls.py
|
from django.urls import path
from post.v1.views import AdminPostViewSet, PostViewSet
app_name = "post"
post_list = PostViewSet.as_view({"get": "list", "post": "create"})
post_detail = PostViewSet.as_view(
{"get": "retrieve", "put": "update", "patch": "partial_update", "delete": "destroy"}
)
admin_post_list = AdminPostViewSet.as_view({"get": "list", "post": "create"})
admin_post_detail = AdminPostViewSet.as_view(
{"get": "retrieve", "put": "update", "patch": "partial_update", "delete": "destroy"}
)
urlpatterns = [
path("post/", post_list, name="post-list"),
path("post/<uuid:pk>/", post_detail, name="post-detail"),
path("admin/post/", admin_post_list, name="admin-post-list"),
path("admin/post/<uuid:pk>/", admin_post_detail, name="admin-post-detail"),
]
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,057
|
ShipraShalini/social_connect
|
refs/heads/main
|
/post/v1/views.py
|
from rest_framework.permissions import IsAuthenticated
from post.serializers import PostSerializer
from social_connect.admin_override_views import AbstractAdminOverrideViewSet
from social_connect.custom_views import CustomModelViewSet
class PostViewSet(CustomModelViewSet):
"""A simple ViewSet for Post CRUD"""
serializer_class = PostSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return self.request.user.posts.all()
def create(self, request, *args, **kwargs):
request.data["user"] = request.user.id
request.data["created_by"] = request.user.id
request.data["updated_by"] = request.user.id
return super().create(request, *args, **kwargs)
def partial_update(self, request, *args, **kwargs):
request.data["updated_by"] = request.user.id
return super().partial_update(request, *args, **kwargs)
class AdminPostViewSet(AbstractAdminOverrideViewSet):
"""
A Post CRUD for the admins.
"""
serializer_class = PostSerializer
def get_queryset(self):
return self.request.access_req.user.posts.all()
def create(self, request, *args, **kwargs):
request.data["user"] = request.access_req.user_id
request.data["created_by"] = request.access_req.admin_id
request.data["updated_by"] = request.access_req.admin_id
return super().create(request, *args, **kwargs)
def partial_update(self, request, *args, **kwargs):
request.data["updated_by"] = request.access_req.admin_id
return super().partial_update(request, *args, **kwargs)
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,058
|
ShipraShalini/social_connect
|
refs/heads/main
|
/social_connect/exception_handler.py
|
import logging
from datetime import datetime
from urllib.parse import quote
from django.views.defaults import page_not_found, permission_denied
from rest_framework import status
from social_connect.api_response import APIResponse
from social_connect.constants import BUILTIN_ERROR_MESSAGE, CLIENT_ERROR_SET
from social_connect.utils import get_ip, get_user_agent, is_api_request
logger = logging.getLogger("access_log")
def get_exception_message(exception):
"""Get error message from the exception."""
exception_name = exception.__class__.__name__
message = BUILTIN_ERROR_MESSAGE.get(exception_name)
if message:
return message
message = getattr(exception, "message", None)
if message is not None:
return str(message)
message = getattr(exception, "args", None)
if message:
return str(message[0] if isinstance(message, tuple) else message)
else:
return exception_name
class ExceptionHandler:
"""Exception handler for the API requests."""
def get_status_code(self, exc):
"""Get HTTP status code for the exception."""
status_code = getattr(exc, "status_code", None)
if status_code is not None:
return status_code
if exc.__class__.__name__ in CLIENT_ERROR_SET:
return status.HTTP_400_BAD_REQUEST
else:
return status.HTTP_500_INTERNAL_SERVER_ERROR
def handle_exception(self, request, exception):
headers = request.headers
status_code = self.get_status_code(exception)
_, user_agent = get_user_agent(headers)
error_data = {
"status": status_code,
"date": datetime.utcnow(),
"IP": get_ip(headers),
"user_agent": user_agent,
"user": getattr(request.user, "username", "AnonymousUser"),
"error": exception.__class__.__name__,
"error_msg": get_exception_message(exception),
}
logger.error("error_log", extra=error_data, exc_info=True)
return error_data
def drf_exception_handler(exception, context):
"""Custom exception handler for DRF."""
request = context["request"]
error_data = ExceptionHandler().handle_exception(request, exception)
return APIResponse(error_data, is_success=False, status=error_data["status"])
def json_page_not_found(request, exception, *args, **kwargs):
"""Override 404 error to return a JSON Error"""
if not is_api_request(request):
return page_not_found(request, exception, *args, **kwargs)
context = {
"request_path": quote(request.path),
"exception": get_exception_message(exception),
}
return APIResponse(context, is_success=False, status=status.HTTP_404_NOT_FOUND)
def json_permission_denied(request, exception, *args, **kwargs):
"""Override 403 error to return a JSON Error"""
if not is_api_request(request):
return permission_denied(request, exception, *args, **kwargs)
context = {
"request_path": quote(request.path),
"exception": get_exception_message(exception),
}
return APIResponse(context, is_success=False, status=status.HTTP_403_FORBIDDEN)
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,059
|
ShipraShalini/social_connect
|
refs/heads/main
|
/social_connect/admin_override_views.py
|
from django.db import transaction
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from access.access_request_handler import AccessRequestHandler
from access.constants import STATUS_APPROVED, STATUS_IN_USE, STATUS_USED
from social_connect.custom_views import CustomModelViewSet
class AbstractAdminOverrideViewSet(CustomModelViewSet):
"""
A CRUD viewset for the admins.
Checks for a valid approved access request for the request to be authorized.
SuperAdmins also need a valid approved access request for record.
"""
permission_classes = [IsAuthenticated, IsAdminUser]
def dispatch(self, request, *args, **kwargs):
"""
Adds attribute `access_req` to request before checking permissions.
`access_req` attribute can be `None`.
Updates AccessRequest status at different stages of request processing along
with regular dispatch functions.
"""
self.args = args
self.kwargs = kwargs
request = self.initialize_request(request, *args, **kwargs)
self.request = request
self.headers = self.default_response_headers # deprecate?
try:
self.initial(request, *args, **kwargs)
# Get the appropriate handler method
if request.method.lower() in self.http_method_names:
handler = getattr(
self, request.method.lower(), self.http_method_not_allowed
)
response = self.call_handler(request, handler, *args, **kwargs)
else:
handler = self.http_method_not_allowed
response = handler(request, *args, **kwargs)
except Exception as exc:
response = self.handle_exception(exc)
self.response = self.finalize_response(request, response, *args, **kwargs)
return self.response
def call_handler(self, request, handler, *args, **kwargs):
# Adding attribute `access_req` to request.
self.get_approved_access_req(request)
# Adding `updated_by` to the request data.
request.data["updated_by"] = request.user.id
# Keeping this out of the atomic block as
# it needs to be set before starting the transaction.
self.access_req_handler.mark_status(request.access_req, STATUS_IN_USE)
try:
with transaction.atomic():
response = handler(request, *args, **kwargs)
# Reverting the status back to approved as the process failed.
self.access_req_handler.mark_status(request.access_req, STATUS_USED)
except Exception:
self.access_req_handler.mark_status(request.access_req, STATUS_APPROVED)
raise
return response
def get_approved_access_req(self, request):
"""
Check if the admin has proper access.
If yes, attach the access_req to the request.
"""
admin = request.user
user_id = request.data.get("user_id")
if not user_id:
raise ValidationError("`user_id` is required.")
self.access_req_handler = AccessRequestHandler()
access_req = self.access_req_handler.get_oldest_valid_approved_access_req(
admin, user_id
)
if not access_req:
raise PermissionDenied("No valid approved access request found.")
request.access_req = access_req
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,060
|
ShipraShalini/social_connect
|
refs/heads/main
|
/access/utils.py
|
from datetime import datetime, timedelta
from access.constants import ACCESS_REQUEST_VALID_DAYS
def get_last_valid_access_req_date():
"""Returns the last valid date for access request."""
return datetime.utcnow() - timedelta(days=ACCESS_REQUEST_VALID_DAYS)
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,061
|
ShipraShalini/social_connect
|
refs/heads/main
|
/access/migrations/0001_initial.py
|
# Generated by Django 3.1.7 on 2021-03-20 10:32
import uuid
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='AccessRequest',
fields=[
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('request_reason', models.TextField(blank=True, null=True)),
('decision_reason', models.TextField(blank=True, null=True)),
('status', models.CharField(choices=[('pending', 'Pending'), ('approved', 'Approved'), ('declined', 'Declined'), ('in_use', 'In Use'), ('used', 'Used'), ('expired', 'Expired')], default='pending', max_length=10)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(blank=True, null=True)),
('used_at', models.DateTimeField(blank=True, null=True)),
('admin', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='admin_requests', to=settings.AUTH_USER_MODEL)),
('superadmin', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='superadmin_requests', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-created_at'],
},
),
]
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,062
|
ShipraShalini/social_connect
|
refs/heads/main
|
/social_connect/api_response.py
|
from rest_framework import status as http_status
from rest_framework.response import Response
from social_connect.constants import (
CONTENT_TYPE_JSON,
RESPONSE_KEY_DATA,
RESPONSE_KEY_ERROR,
RESPONSE_KEY_IS_SUCCESS,
)
class APIResponse(Response):
"""Custom API Response class."""
def __init__(
self,
data=None,
status=http_status.HTTP_200_OK,
is_success=None,
content_type=CONTENT_TYPE_JSON,
**kwargs
):
"""Initialize API response."""
is_success = (
http_status.is_success(status) if is_success is None else is_success
)
key = RESPONSE_KEY_DATA if is_success else RESPONSE_KEY_ERROR
if not data and not isinstance(data, list):
data = {}
response_data = {RESPONSE_KEY_IS_SUCCESS: is_success, key: data}
super().__init__(
data=response_data, status=status, content_type=content_type, **kwargs
)
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,063
|
ShipraShalini/social_connect
|
refs/heads/main
|
/social_connect/constants.py
|
HTTP_HEADER_LIST = [
"REMOTE_ADDR",
"REMOTE_HOST",
"X_FORWARDED_FOR",
"TZ",
"QUERY_STRING",
"CONTENT_LENGTH",
"CONTENT_TYPE",
"LC_CTYPE",
"SERVER_PROTOCOL",
"SERVER_SOFTWARE",
]
MASKED_DATA = "XXXXXXXXX"
CONTENT_TYPE_JSON = "application/json"
CONTENT_TYPE_METHOD_MAP = {CONTENT_TYPE_JSON: "_get_json_data"}
CLIENT_ERROR_SET = {
"AttributeError",
"IntegrityError",
"KeyError",
"ValidationError",
}
BUILTIN_ERROR_MESSAGE = {
"Http404": "Not found",
"PermissionDenied": "Permission denied.",
}
MODEL_VIEWSET_METHODNAMES = ["create", "retrieve", "list", "update", "destroy"]
RESPONSE_KEY_DATA = "data"
RESPONSE_KEY_ERROR = "error"
RESPONSE_KEY_IS_SUCCESS = "is_success"
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,064
|
ShipraShalini/social_connect
|
refs/heads/main
|
/access/models.py
|
import uuid
from django.contrib.auth.models import User
from django.db import models
from django.db.models import (
CASCADE,
PROTECT,
CharField,
DateTimeField,
ForeignKey,
TextField,
UUIDField,
)
from access.constants import (
ACCESS_REQUEST_STATUS_CHOICES,
STATUS_EXPIRED,
STATUS_PENDING,
)
from access.utils import get_last_valid_access_req_date
class AccessRequest(models.Model):
uuid = UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
admin = ForeignKey(User, on_delete=PROTECT, related_name="admin_requests")
superadmin = ForeignKey(
User,
on_delete=PROTECT,
related_name="superadmin_requests",
null=True,
blank=True,
)
user = ForeignKey(User, on_delete=CASCADE)
request_reason = TextField(null=True, blank=True)
decision_reason = TextField(null=True, blank=True)
status = CharField(
max_length=10, choices=ACCESS_REQUEST_STATUS_CHOICES, default="pending"
)
created_at = DateTimeField(auto_now_add=True)
updated_at = DateTimeField(null=True, blank=True)
used_at = DateTimeField(null=True, blank=True)
class Meta:
ordering = ["-created_at"]
def is_expired(self):
# Todo: Run a periodic task to mark the request expired.
return self.status == STATUS_EXPIRED or (
self.status == STATUS_PENDING
and self.created_at >= get_last_valid_access_req_date()
)
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,065
|
ShipraShalini/social_connect
|
refs/heads/main
|
/social_connect/urls.py
|
"""social_connect URL Configuration"""
from django.contrib import admin
from django.urls import include, path
from drf_spectacular.views import (
SpectacularAPIView,
SpectacularRedocView,
SpectacularSwaggerView,
)
from rest_framework.decorators import api_view
from rest_framework_simplejwt.views import (
TokenObtainPairView,
TokenRefreshView,
TokenVerifyView,
)
from social_connect.api_response import APIResponse
# Overriding default exception handlers for 404 & 403 errors.
handler404 = "social_connect.exception_handler.json_page_not_found"
handler403 = "social_connect.exception_handler.json_permission_denied"
@api_view(("GET",))
def health(request):
return APIResponse({"status": "healthy"})
auth_urls = [
path(
"auth/token/",
TokenObtainPairView.as_view(),
name="token_obtain_pair",
),
path(
"auth/token/refresh/",
TokenRefreshView.as_view(),
name="token_refresh",
),
path("auth/token/verify/", TokenVerifyView.as_view(), name="token_verify"),
]
schema_urls = [
path("schema/", SpectacularAPIView.as_view(), name="schema"),
path(
"schema/swaggerui/",
SpectacularSwaggerView.as_view(url_name="schema"),
name="swagger-ui",
),
path(
"schema/redoc/",
SpectacularRedocView.as_view(url_name="schema"),
name="redoc",
),
]
v1_urls = [
# Auth URLs
*auth_urls,
path("", include("post.v1.urls")),
path("access_req/", include("access.v1.urls")),
]
urlpatterns = [
# Admin URLs
path("admin/", admin.site.urls),
# Verion 1 URLs
path(
"api/",
include(
[
path("v1/", include(v1_urls)),
]
),
),
# Schema URLs
*schema_urls,
path("", health),
]
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,066
|
ShipraShalini/social_connect
|
refs/heads/main
|
/access/serializers.py
|
from rest_framework.serializers import ModelSerializer
from access.models import AccessRequest
from social_connect.serializers import MinimalUserSerializer
class AccessRequestSerializer(ModelSerializer):
admin = MinimalUserSerializer()
superadmin = MinimalUserSerializer()
user = MinimalUserSerializer()
class Meta:
model = AccessRequest
fields = "__all__"
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,067
|
ShipraShalini/social_connect
|
refs/heads/main
|
/social_connect/serializers.py
|
from django.contrib.auth.models import User
from rest_framework.serializers import ModelSerializer
class UserSerializer(ModelSerializer):
"""DRF Serializer for User model"""
class Meta:
model = User
exclude = ["password"]
class MinimalUserSerializer(ModelSerializer):
"""DRF Serializer for User model when only a few public fields are needed."""
class Meta:
model = User
fields = ["id", "username", "first_name", "last_name", "email", "is_active"]
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,068
|
ShipraShalini/social_connect
|
refs/heads/main
|
/access/v1/urls.py
|
from django.urls import path
from access.v1.views import (
AdminAccessRequestView,
SuperAdminAccessRequestDecisionView,
SuperAdminAccessRequestListView,
)
app_name = "access"
urlpatterns = [
path("admin/", AdminAccessRequestView.as_view(), name="admin-access"),
path(
"superadmin/",
SuperAdminAccessRequestListView.as_view(),
name="superadmin-list",
),
path(
"decision/<uuid:access_req_id>",
SuperAdminAccessRequestDecisionView.as_view(),
name="superadmin-decision",
),
]
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,069
|
ShipraShalini/social_connect
|
refs/heads/main
|
/social_connect/custom_views.py
|
from rest_framework import mixins
from rest_framework.viewsets import GenericViewSet
from social_connect.api_response import APIResponse
def get_status_code(response):
"""Get Status code from the response."""
for attr in ["status", "status_code"]:
code = getattr(response, attr, None)
if code:
return code
class CustomCreateModelMixin(mixins.CreateModelMixin):
"""Create a model instance."""
def create(self, request, *args, **kwargs):
"""Create an object."""
response = super(CustomCreateModelMixin, self).create(request, *args, **kwargs)
return APIResponse(
data=response.data,
status=get_status_code(response),
headers=response._headers,
)
class CustomListModelMixin(mixins.ListModelMixin):
"""List a queryset."""
def list(self, request, *args, **kwargs):
"""Retrieve a list of objects."""
response = super(CustomListModelMixin, self).list(request, *args, **kwargs)
return APIResponse(
data=response.data, status=response.status_code, headers=response._headers
)
class CustomRetrieveModelMixin(mixins.RetrieveModelMixin):
"""Retrieve a model instance."""
def retrieve(self, request, *args, **kwargs):
"""Retrieve an object."""
response = super(CustomRetrieveModelMixin, self).retrieve(
request, *args, **kwargs
)
return APIResponse(
data=response.data, status=response.status_code, headers=response._headers
)
class CustomUpdateModelMixin(mixins.UpdateModelMixin):
"""Update a model instance."""
def update(self, request, *args, **kwargs):
"""Update an object."""
response = super(CustomUpdateModelMixin, self).update(request, *args, **kwargs)
return APIResponse(data=response.data, status=get_status_code(response))
class CustomDestroyModelMixin(mixins.DestroyModelMixin):
"""Destroy a model instance."""
def destroy(self, request, *args, **kwargs):
"""Delete an object."""
response = super(CustomDestroyModelMixin, self).destroy(
request, *args, **kwargs
)
return APIResponse(data=response.data, status=get_status_code(response))
class CustomModelViewSet(
CustomCreateModelMixin,
CustomListModelMixin,
CustomRetrieveModelMixin,
CustomUpdateModelMixin,
CustomDestroyModelMixin,
GenericViewSet,
):
pass
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,070
|
ShipraShalini/social_connect
|
refs/heads/main
|
/access/v1/views.py
|
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.views import APIView
from access.access_request_handler import AccessRequestHandler
from social_connect.api_response import APIResponse
from social_connect.permissions import IsSuperAdminUser
class AdminAccessRequestView(APIView):
permission_classes = (IsAuthenticated, IsAdminUser)
def post(self, request, *args, **kwargs):
admin = request.user
data = request.data
req = AccessRequestHandler().create(admin, data)
return APIResponse(req)
def get(self, request, *args, **kwargs):
data = AccessRequestHandler().get_request_list({"admin_id": request.user})
return APIResponse(data)
class SuperAdminAccessRequestListView(APIView):
permission_classes = (IsAuthenticated, IsSuperAdminUser)
def get(self, request, *args, **kwargs):
data = AccessRequestHandler().get_request_list({"superadmin_id": request.user})
return APIResponse(data)
class SuperAdminAccessRequestDecisionView(APIView):
permission_classes = (IsAuthenticated, IsSuperAdminUser)
def patch(self, request, access_req_id, *args, **kwargs):
superadmin = request.user
data = request.data
req = AccessRequestHandler().take_decision(access_req_id, superadmin, data)
return APIResponse(req)
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,071
|
ShipraShalini/social_connect
|
refs/heads/main
|
/social_connect/middlewares.py
|
import json
import logging
from datetime import datetime
from urllib.parse import parse_qs
from django.core.serializers.json import DjangoJSONEncoder
from rest_framework.status import is_success
from social_connect.api_response import APIResponse
from social_connect.constants import (
CONTENT_TYPE_METHOD_MAP,
HTTP_HEADER_LIST,
MASKED_DATA,
)
from social_connect.exception_handler import ExceptionHandler
from social_connect.utils import get_ip, get_user_agent, is_api_request
logger = logging.getLogger("access_log")
class LogMiddleware:
"""Log all the requests that come to the app."""
def __init__(self, get_response):
"""Initialize."""
self.get_response = get_response
def _get_urlencoded_data(self, request_body, **kwargs):
"""Return the URL Encoded data from request body."""
return parse_qs(request_body)
def _get_json_data(self, request_body, **kwargs):
"""Return JSON data from the request body."""
return json.loads(request_body)
def _decode_unicode_data(self, request_body):
"""Decoding unicode data first else the following statement may fail."""
if isinstance(request_body, bytes):
try:
return request_body.decode("utf-8")
except UnicodeDecodeError:
pass
def get_request_data(self, request, request_body):
"""
Process request data.
Handling only JSON data, can be extended to get other formats.
"""
request_body = self._decode_unicode_data(request_body)
method_name = CONTENT_TYPE_METHOD_MAP.get(request.content_type, "")
method = getattr(self, method_name, None)
try:
return (
method(request=request, request_body=request_body) if method else None
)
except Exception: # noqa
return None
def get_headers(self, request):
"""Return the headers from the request."""
headers = {}
for header, value in request.META.items():
if header.startswith("HTTP_") or header in HTTP_HEADER_LIST:
headers[header] = value
return headers
def mask_auth_token(self, response_data):
"""
Mask token if present in response.
This can be extended to mask tokens sent for
reset password, email verification etc.
"""
if not isinstance(response_data, dict):
return
data = response_data
if "refresh" in response_data:
data["refresh"] = self._mask_token(data["refresh"])
if "access" in response_data:
data["access"] = self._mask_token(data["access"])
def _mask_token(self, token):
"""
Mask the bearer token.
This is done so that one has some idea about the token format.
"""
return f"{token[:15]}{MASKED_DATA}{token[-10:]}"
def mask_data(self, request_data, response_data, headers):
"""Mask sensitive data before logging."""
if (
request_data
and isinstance(request_data, dict)
and "password" in request_data
):
request_data["password"] = MASKED_DATA
if response_data:
self.mask_auth_token(response_data)
if headers and "HTTP_AUTHORIZATION" in headers:
auth_header = headers["HTTP_AUTHORIZATION"]
headers["HTTP_AUTHORIZATION"] = self._mask_token(auth_header)
def get_response_data(self, request, response):
"""Get response data, if there's an error get error data."""
error_data = getattr(request, "error_data", None)
if error_data:
return error_data
try:
return json.loads(response.content.decode("utf8"))
except json.decoder.JSONDecodeError:
return None
def get_log_message(self, status_code, request):
"""Return message to be logged by the logger."""
return (
"error_log"
if not is_success(status_code) or getattr(request, "error_data", None)
else "access_log"
)
def __call__(self, request):
"""Middleware call method."""
if not is_api_request(request):
return self.get_response(request)
request_body = request.body
requested_at = datetime.utcnow()
response = self.get_response(request)
path = request.get_full_path()
method = request.method
status_code = response.status_code
response_data = self.get_response_data(request, response)
request_body = self.get_request_data(request, request_body)
response_time = datetime.utcnow() - requested_at
response_time = round(response_time.total_seconds() * 1000)
response_data = json.loads(json.dumps(response_data, cls=DjangoJSONEncoder))
user = request.user if request.user.is_authenticated else None
headers = self.get_headers(request)
self.mask_data(request_body, response_data, headers)
raw_agent, pretty_agent = get_user_agent(headers)
try:
logger.info(
"access_log",
extra={
"user": user.username if user else None,
"path": path,
"method": method,
"request_data": request_body,
"requested_at": requested_at,
"response_time": int(response_time),
"status_code": status_code,
"response_data": response_data,
"ip": get_ip(headers),
"raw_user_agent": raw_agent,
"user_agent": pretty_agent,
"headers": headers,
},
)
except Exception as e: # noqa
logger.error(e, exc_info=True)
if getattr(request, "error_data", None):
return APIResponse(
request.error_data, is_success=False, status=response.status_code
)
return response
class JSONExceptionMiddleWare:
"""Return all API exceptions as JSON."""
def __init__(self, get_response):
"""Initialize."""
self.get_response = get_response
def __call__(self, request, *args, **kwargs):
return self.get_response(request)
def process_exception(self, request, exception):
if not is_api_request(request):
return
error_data = ExceptionHandler().handle_exception(request, exception)
request.error_data = error_data
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,072
|
ShipraShalini/social_connect
|
refs/heads/main
|
/social_connect/permissions.py
|
from rest_framework.permissions import BasePermission
class IsSuperAdminUser(BasePermission):
"""Allows access only to SuperAdmin users."""
def has_permission(self, request, view):
"""Check condition for the permission."""
return bool(request.user and request.user.is_superuser)
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,073
|
ShipraShalini/social_connect
|
refs/heads/main
|
/access/access_request_handler.py
|
from rest_framework.exceptions import ValidationError
from access.constants import (
STATUS_APPROVED,
STATUS_DECLINED,
STATUS_EXPIRED,
STATUS_IN_USE,
STATUS_PENDING,
STATUS_USED,
)
from access.models import AccessRequest
from access.serializers import AccessRequestSerializer
from access.utils import get_last_valid_access_req_date
class AccessRequestHandler:
"""Class for handling AccessRequests."""
def create(self, admin, data):
"""Create AccessRequest."""
# Discarding all other keys provided in the data as
# only the following fields should be updated.
data = {
"admin": admin,
"request_reason": data.get("request_reason"),
"user_id": data["user_id"],
}
req = AccessRequestSerializer().create(data)
return AccessRequestSerializer(req).data
def get_request_list(self, query):
"""Return the list of all access requests for an admin or a superadmin."""
data = {
STATUS_PENDING: [],
STATUS_APPROVED: [],
STATUS_DECLINED: [],
STATUS_USED: [],
STATUS_IN_USE: [],
}
# Get only valid requests.
last_valid_date = get_last_valid_access_req_date()
requests = AccessRequest.objects.filter(
**query, created_at__gte=last_valid_date
)
if "superadmin_id" in query:
requests |= AccessRequest.objects.filter(
status=STATUS_PENDING, created_at__gte=last_valid_date
)
requests = AccessRequestSerializer(requests, many=True).data
for req in requests:
data[req["status"]].append(req)
return data
def take_decision(self, access_req_id, superadmin, data):
"""Approve or Decline an AccessRequest."""
status = data.get("status")
if status not in [STATUS_APPROVED, STATUS_DECLINED]:
raise ValidationError("Status is missing or is invalid.")
# Discarding all other keys provided in the data as
# only the following fields should be updated.
data = {
"superadmin": superadmin,
"decision_reason": data.get("decision_reason"),
"status": status,
}
AccessRequest.objects.filter(uuid=access_req_id).update(**data)
req = AccessRequest.objects.get(uuid=access_req_id)
return AccessRequestSerializer(req).data
def mark_status(self, access_req, status):
access_req.status = status
access_req.save()
def mark_expired(self):
"""Mark a request expired."""
# TODO: Run a periodic task to mark requests expired.
last_valid_date = get_last_valid_access_req_date()
AccessRequest.objects.filter(
status=STATUS_PENDING, created_at__lt=last_valid_date
).update(status=STATUS_EXPIRED)
def get_oldest_valid_approved_access_req(self, admin, user_id):
"""Return the oldest valid aprroved access req as it will be used first."""
return (
AccessRequest.objects.select_related("user")
.filter(
admin=admin,
user_id=user_id,
status=STATUS_APPROVED,
created_at__gte=get_last_valid_access_req_date(),
)
.last()
)
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,074
|
ShipraShalini/social_connect
|
refs/heads/main
|
/access/admin.py
|
from django.contrib import admin
from access.models import AccessRequest
admin.site.register(AccessRequest)
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,075
|
ShipraShalini/social_connect
|
refs/heads/main
|
/access/constants.py
|
ACCESS_REQUEST_VALID_DAYS = 5 # In number of days
STATUS_PENDING = "pending"
STATUS_APPROVED = "approved"
STATUS_DECLINED = "declined"
STATUS_IN_USE = "in_use" # solely for acquiring lock.
STATUS_USED = "used"
STATUS_EXPIRED = "expired"
ACCESS_REQUEST_STATUS_CHOICES = (
(STATUS_PENDING, "Pending"),
(STATUS_APPROVED, "Approved"),
(STATUS_DECLINED, "Declined"),
(STATUS_IN_USE, "In Use"),
(STATUS_USED, "Used"),
(STATUS_EXPIRED, "Expired"),
)
|
{"/post/v1/urls.py": ["/post/v1/views.py"], "/post/v1/views.py": ["/social_connect/admin_override_views.py", "/social_connect/custom_views.py"], "/social_connect/exception_handler.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/utils.py"], "/social_connect/admin_override_views.py": ["/access/access_request_handler.py", "/access/constants.py", "/social_connect/custom_views.py"], "/access/utils.py": ["/access/constants.py"], "/social_connect/api_response.py": ["/social_connect/constants.py"], "/access/models.py": ["/access/constants.py", "/access/utils.py"], "/social_connect/urls.py": ["/social_connect/api_response.py"], "/access/serializers.py": ["/access/models.py", "/social_connect/serializers.py"], "/access/v1/urls.py": ["/access/v1/views.py"], "/social_connect/custom_views.py": ["/social_connect/api_response.py"], "/access/v1/views.py": ["/access/access_request_handler.py", "/social_connect/api_response.py", "/social_connect/permissions.py"], "/social_connect/middlewares.py": ["/social_connect/api_response.py", "/social_connect/constants.py", "/social_connect/exception_handler.py", "/social_connect/utils.py"], "/access/access_request_handler.py": ["/access/constants.py", "/access/models.py", "/access/serializers.py", "/access/utils.py"], "/access/admin.py": ["/access/models.py"]}
|
21,103
|
ShafigullinIK/python-project-lvl1
|
refs/heads/master
|
/brain_games/games/prime.py
|
import random
import math
from brain_games.utils import is_even, convert_to_yes_no
GAME_DESCRIPTION = (
'Answer "yes" if given number is prime.'
'Otherwise answer "no".'
)
MAX_INT = 100
MIN_INT = 0
def create_question():
number = random.randint(MIN_INT, MAX_INT)
question = "{}".format(number)
correct_answer = convert_to_yes_no(is_prime(number))
return(question, correct_answer)
def is_prime(n):
if n == 2:
return True
if is_even(n):
return False
counter = 3
bound = math.sqrt(n)
while counter <= bound:
if not n % counter:
return False
counter = counter + 2
return True
|
{"/brain_games/games/prime.py": ["/brain_games/utils.py"], "/brain_games/games/even.py": ["/brain_games/utils.py"]}
|
21,104
|
ShafigullinIK/python-project-lvl1
|
refs/heads/master
|
/brain_games/games/progression.py
|
import random
GAME_DESCRIPTION = 'What number is missing in the progression?'
MAX_INT = 10
MIN_STEP = 1
MAX_STEP = 10
PROGRESSION_LENGTH = 10
def create_question():
number = random.randint(0, MAX_INT)
step = random.randint(MIN_STEP, MAX_STEP)
position = random.randint(0, PROGRESSION_LENGTH-1)
counter = 0
question = ""
correct_answer = ""
while counter < PROGRESSION_LENGTH:
term = ""
if counter == position:
term = ".."
correct_answer = str(number)
else:
term = str(number)
question = question + " {}".format(term)
counter = counter + 1
number = number + step
return(question, correct_answer)
|
{"/brain_games/games/prime.py": ["/brain_games/utils.py"], "/brain_games/games/even.py": ["/brain_games/utils.py"]}
|
21,105
|
ShafigullinIK/python-project-lvl1
|
refs/heads/master
|
/brain_games/games/even.py
|
import random
from brain_games.utils import is_even, convert_to_yes_no
GAME_DESCRIPTION = 'Answer "yes" if number even otherwise answer "no"'
MAX_INT = 100
MIN_INT = 0
def create_question():
number = random.randint(MIN_INT, MAX_INT)
correct_answer = convert_to_yes_no(is_even(number))
return (str(number), correct_answer)
|
{"/brain_games/games/prime.py": ["/brain_games/utils.py"], "/brain_games/games/even.py": ["/brain_games/utils.py"]}
|
21,106
|
ShafigullinIK/python-project-lvl1
|
refs/heads/master
|
/brain_games/games/gcd.py
|
import random
GAME_DESCRIPTION = 'Find the greatest common divisor of given numbers.'
MAX_INT = 100
MIN_INT = 0
def create_question():
number1 = random.randint(MIN_INT, MAX_INT)
number2 = random.randint(MIN_INT, MAX_INT)
question = "{} {}".format(number1, number2)
correct_answer = str(find_gcd(number1, number2))
return(question, correct_answer)
def find_gcd(first, second):
if second > first:
(first, second) = (second, first)
while second != 0:
(first, second) = (second, first % second)
return first
|
{"/brain_games/games/prime.py": ["/brain_games/utils.py"], "/brain_games/games/even.py": ["/brain_games/utils.py"]}
|
21,107
|
ShafigullinIK/python-project-lvl1
|
refs/heads/master
|
/brain_games/games/calc.py
|
import random
import operator
GAME_DESCRIPTION = 'What is the result of the expression?'
MAX_INT = 100
MIN_INT = 0
def create_question():
number1 = random.randint(MIN_INT, MAX_INT)
number2 = random.randint(MIN_INT, MAX_INT)
operations = [
(operator.add, "+"), (operator.sub, "-"), (operator.mul, "*")
]
(operation, symbol) = random.choice(operations)
question = "{} {} {} =".format(number1, symbol, number2)
correct_answer = str(operation(number1, number2))
return (question, correct_answer)
|
{"/brain_games/games/prime.py": ["/brain_games/utils.py"], "/brain_games/games/even.py": ["/brain_games/utils.py"]}
|
21,108
|
ShafigullinIK/python-project-lvl1
|
refs/heads/master
|
/brain_games/engine.py
|
import brain_games.cli as cli
WIN_MESSAGE = "Congratulations, {}!"
LOSE_MESSAGE = "Let's try again, {}!"
WRONG_ANSWER_MESSAGE = (
"'{}' is wrong answer ;(. Correct answer was '{}'"
)
def run(game):
cli.greet(game.GAME_DESCRIPTION)
name = cli.ask_name()
player_won = True
counter = 0
while counter < 3:
(question, correct_answer) = game.create_question()
answer = cli.ask_question(question)
if answer == correct_answer:
print("Correct!")
counter = counter + 1
else:
print(WRONG_ANSWER_MESSAGE.format(answer, correct_answer))
player_won = False
break
message = WIN_MESSAGE if player_won else LOSE_MESSAGE
print(message.format(name))
|
{"/brain_games/games/prime.py": ["/brain_games/utils.py"], "/brain_games/games/even.py": ["/brain_games/utils.py"]}
|
21,109
|
ShafigullinIK/python-project-lvl1
|
refs/heads/master
|
/brain_games/games/__init__.py
|
from brain_games.games import calc, even, gcd, prime, progression # noqa
|
{"/brain_games/games/prime.py": ["/brain_games/utils.py"], "/brain_games/games/even.py": ["/brain_games/utils.py"]}
|
21,110
|
ShafigullinIK/python-project-lvl1
|
refs/heads/master
|
/brain_games/utils.py
|
def is_even(number):
return not (number % 2)
def convert_to_yes_no(val):
if val:
return "yes"
return "no"
|
{"/brain_games/games/prime.py": ["/brain_games/utils.py"], "/brain_games/games/even.py": ["/brain_games/utils.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.