index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
37,790,934
|
metaphysicser/DP-Star
|
refs/heads/main
|
/utils/__init__.py
|
#!/usr/bin/env python
# -*-coding:utf-8-*-
# Author: nomalocaris <nomalocaris.top>
""""""
from __future__ import (absolute_import, unicode_literals)
from ._progressbar import ProgressBar
from _vector_cal import to_vec_add, to_vec_sub, to_vec_times, to_vec_dot
from ._plot import plot_scatter, plot_traj
from ._vector_cal import to_vec_add, to_vec_sub, to_vec_times, to_vec_dot
import os
import datetime
import numpy as np
def signum(x):
"""cal signum
:param x:
:return:
"""
if x > 0:
return 1.0
if x < 0:
return -1.0
if x == 0:
return 0
def vlen(pi1, pi2):
"""cal the distance between pi1 and pi2
:param pi1:
:param pi2:
:return:
"""
return ((pi1[0] - pi2[0]) ** 2 + (pi1[1] - pi2[1]) ** 2) ** 0.5
def traj_range(dpath='dataset/old/raw_trajs_pure/Trajectory/', dtype='common'):
"""计算轨迹的经纬度范围
"""
trajfs = os.listdir(dpath)
traj_fnum = len(trajfs)
print(traj_fnum)
if dtype == 'common':
min_lon, min_lat = np.inf, np.inf
max_lon, max_lat = -np.inf, -np.inf
else:
min_lon, min_lat = 180, 90
max_lon, max_lat = -180, -90
p = ProgressBar(traj_fnum, '计算经纬度范围')
for i in range(traj_fnum):
p.update(i)
trajf = trajfs[i]
with open(dpath + trajf) as fr:
for line in fr.readlines():
pos = list(map(float, line.replace('\n', '').split(',')))
min_lon = pos[0] if pos[0] < min_lon else min_lon
max_lon = pos[0] if pos[0] > max_lon else max_lon
min_lat = pos[1] if pos[1] < min_lat else min_lat
max_lat = pos[1] if pos[1] > max_lat else max_lat
return [min_lon, max_lon], [min_lat, max_lat]
def cal_time_interval(prev_time, next_time):
"""cal the time interval, format: HH:MM:SS like 16:01:31
:param prev_time:
:param next_time:
:return:
"""
def split_time(t):
"""split and cal the time
:param t:
:return:
"""
tta = t.split(':')
t_hour = tta[0]
t_mini = tta[1]
t_sec = tta[2]
return datetime.datetime(1, 1, 1, int(t_hour), int(t_mini), int(t_sec))
prev_datetime = split_time(prev_time)
next_datetime = split_time(next_time)
return (next_datetime - prev_datetime).seconds
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,935
|
metaphysicser/DP-Star
|
refs/heads/main
|
/utils/get_plot.py
|
import matplotlib.pyplot as plt
plt.rcParams['font.sans-serif']=['SimHei'] # 用来正常显示中文标签
import matplotlib.font_manager as fm # 字体管理器
def run_data():
data = {
"Query AvRE": [6.85e-04,8.22e-03,8.15e-02,8.54],
"Freq Patten AvRE": [0.13,1.08,1.24,1.31],
"Trip Error": [2.27e-03,4.17e-02,3.67e-01,6.80e-01],
"Diameter Error": [1.04e-05,4.98e-04,0.27,0.41]
}
return data
def get_plot(metrics, data):
x_data = [0.1, 0.5, 1, 2]
rage = [str([0.0001, 0.00099]), str([0.001, 0.0099]), str([0.01, 0.099]), str([0.1, 0.99])]
paper_data = {
"Query AvRE": [0.295, 0.203, 0.171, 0.159],
"Freq Patten AvRE": [0.340, 0.329, 0.322, 0.329],
"Trip Error": [0.071, 0.054, 0.034, 0.017],
"Diameter Error": [0.103, 0.089, 0.078, 0.076]
}
# 防止乱码
plt.rcParams["font.sans-serif"] = ["SimHei"]
plt.rcParams["axes.unicode_minus"] = False
plt.plot(rage, data, color="red", marker="*", label="offset")
# plt.plot(x_data, data, color="red", marker="*", label="geolife")
# plt.plot(x_data, paper_data[metrics], color="blue", marker="o", label="paper")
plt.ylabel("指标结果")
plt.title(metrics)
plt.legend()
plt.savefig('../data/Geolife Trajectories 1.3/' + metrics + ".jpg")
plt.show()
if __name__ == '__main__':
for index, key in run_data().items():
get_plot(index, key)
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,936
|
metaphysicser/DP-Star
|
refs/heads/main
|
/dpstar/mobility_model_construction.py
|
"""
-------------------------------------
# -*- coding: utf-8 -*-
# @Time : 2021/4/16 12:03:46
# @Author : Giyn
# @Email : giyn.jy@gmail.com
# @File : mobility_model_construction.py
# @Software: PyCharm
-------------------------------------
"""
import numpy as np
from utils import ProgressBar
def markov_model(trajs: list, n_grid: int, _epsilon: float) -> np.ndarray:
"""
马尔可夫模型
Args:
trajs : 轨迹数据(二维数组)
n_grid : 二级网格数
_epsilon: 隐私预算
Returns:
O_: 中间点转移概率矩阵
"""
O_ = np.zeros((n_grid, n_grid)) # 建立 n_grid * n_grid 的转移概率矩阵
for t in trajs:
O_sub = np.zeros((n_grid, n_grid))
for i in range(len(t) - 1):
curr_point = t[i]
next_point = t[i + 1]
O_sub[curr_point][next_point] += 1
O_sub /= (len(t) - 1) # 该轨迹的转移概率
O_ += O_sub
p = ProgressBar(n_grid, '生成中间点转移概率矩阵')
for i in range(n_grid):
p.update(i)
for j in range(n_grid):
noise = np.random.laplace(0, 1 / _epsilon) # 添加拉普拉斯噪声
O_[i][j] += noise
if O_[i][j] < 0:
O_[i][j] = 0
# compute X
row_sum = [sum(O_[i]) for i in range(n_grid)]
for j in range(n_grid):
O_[j] /= row_sum[j]
return O_
def mobility_model_main(n_grid: int, _epsilon: float, grid_trajs_path: str,
midpoint_movement_path: str):
"""
主函数
Args:
n_grid : 网格数
_epsilon : 隐私预算
grid_trajs_path : 网格轨迹文件路径
midpoint_movement_path: 中间点转移概率矩阵文件路径
Returns:
"""
with open(grid_trajs_path, 'r') as grid_trajs_file:
T = [eval(traj) for traj in grid_trajs_file.readlines()] # 网格轨迹数据(list)
with open(midpoint_movement_path, 'w') as midpoint_movement_file:
midpoint_movement_matrix = markov_model(T, n_grid, _epsilon)
for item in midpoint_movement_matrix:
each_line = ' '.join([str(i) for i in item]) + '\n'
midpoint_movement_file.writelines(each_line)
if __name__ == '__main__':
epsilon = 0.1
mobility_model_main(64, epsilon * 3 / 9,
f'../data/Geolife Trajectories 1.3/Middleware/grid_trajs_epsilon_{epsilon}.txt',
f'../data/Geolife Trajectories 1.3/Middleware/midpoint_movement_epsilon_{epsilon}.txt')
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,937
|
metaphysicser/DP-Star
|
refs/heads/main
|
/utils/_vector_cal.py
|
#!/usr/bin/env python
# -*-coding:utf-8-*-
# Author: nomalocaris <nomalocaris.top>
""""""
from __future__ import (absolute_import, unicode_literals)
def to_vec_sub(p1, p2):
""""""
return [p1[0] - p2[0], p1[1] - p2[1]]
def to_vec_add(p1, p2):
""""""
return [p1[0] + p2[0], p1[1] + p2[1]]
def to_vec_times(c, p):
""""""
return [p[0] * c, p[1] * c]
def to_vec_dot(p1, p2):
""""""
return p1[0] * p2[0] + p1[1] * p2[1]
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,938
|
metaphysicser/DP-Star
|
refs/heads/main
|
/metrics/__init__.py
|
#!/usr/bin/env python
# -*-coding:utf-8-*-
# Author: nomalocaris <nomalocaris.top>
""""""
from __future__ import (absolute_import, unicode_literals)
from .DiameterE import SD_len, D_len, cal_diameter_e
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,939
|
metaphysicser/DP-Star
|
refs/heads/main
|
/config.py
|
#!/usr/bin/env python
# -*-coding:utf-8-*-
# Author: nomalocaris <nomalocaris.top>
"""
the config of parm for DP-Star
"""
from __future__ import (absolute_import, unicode_literals)
# trajectory geo range
gps_range = {'lat': (0, 1320), 'lon': (0, 1760)}
# the number of the top grid
n_top_grid = 7
# a significant parm for adaptive grid, the smaller it is, the more bottom grid will be generate.
beta_factor = 80
mdl_trajectories_input_dir = 'data/Geolife Trajectories 1.3/MDL1100'
# the ada grid construction
top_grid_path = f'data/Geolife Trajectories 1.3/middleware/top_grid_MDL1100_ep{epsilon}.txt'
# the grid trajectories
grid_traj_path = f'data/Geolife Trajectories 1.3/middleware/grid_traj_MDL1100_ep{epsilon}.txt'
# the top grid range
omega_path = f'data/Geolife Trajectories 1.3/middleware/grid_block_gps_range_MDL1100_ep{epsilon}.txt'
trip_distribution_path = f'data/Geolife Trajectories 1.3/middleware/trip_distribution_MDL1100_ep{epsilon}.txt'
midpoint_movement_path = f'data/Geolife Trajectories 1.3/middleware/midpoint_movement_MDL1100_ep{epsilon}.txt'
length_traj_path = f'data/Geolife Trajectories 1.3/middleware/length_traj_MDL1100_ep{epsilon}.txt'
# grid sd trajectories
sd_path = f'data/Geolife Trajectories 1.3/middleware/sd_MDL1100_ep{epsilon}.txt'
# ture sd trajectories dir
sd_final_path = f'data/Geolife Trajectories 1.3/sd/sd_final_MDL1100_ep{epsilon}.txt'
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,940
|
metaphysicser/DP-Star
|
refs/heads/main
|
/main.py
|
#!/usr/bin/env python
# -*-coding:utf-8-*-
# Author: nomalocaris <nomalocaris.top>
""""""
from __future__ import (absolute_import, unicode_literals)
from dpstar import generate_adaptive_grid, read_mdl_data
from dpstar import generate_sd_grid_mapping_traj
from dpstar import trip_distribution_main
from dpstar import mobility_model_main
from dpstar import route_length_estimate_main
from dpstar import syn
from joblib import Parallel, delayed
from config import *
import numpy as np
import matplotlib.pyplot as plt
# privacy budget
epsilon_list = [0.1, 0.5, 1.0, 2.0]
# budget allocation
epsilon_alloc = {
'ag': (1/9), # adaptive Grid Construction
'td': (3/9), # trip distribution extraction
'markov': (3/9), # mobility model construction
'mle': (2/9) # route length estimation(a median length estimation method)
}
def run(epsilon):
# generate adaptive grid
n_grid = generate_adaptive_grid(
idir_traj=mdl_trajectories_input_dir,
opath_top_grid=top_grid_path,
opath_grid_traj=grid_traj_path,
opath_grid_block_gps_range=omega_path,
n_top_grid=n_top_grid,
epsilon_alloc=epsilon_alloc['ag'] * epsilon,
epsilon_tot=epsilon,
gps_range=gps_range,
beta_factor=beta_factor
)
trip_distribution_main(n_grid, epsilon=epsilon_alloc['td'] * epsilon,
src_file=grid_traj_path, out_file=trip_distribution_path)
mobility_model_main(n_grid, epsilon=epsilon_alloc['markov'] * epsilon,
src_file=grid_traj_path, out_file=midpoint_movement_path)
maxT = route_length_estimate_main(n_grid, epsilon=epsilon_alloc['mle'] * epsilon,
src_file=grid_traj_path, out_file=length_traj_path)
syn(n_grid, maxT, trip_distribution_path, midpoint_movement_path, length_traj_path, sd_path, 14650)
# generate sd trajectory
generate_sd_grid_mapping_traj(
ipath_sd=sd_path,
n_top_grid=n_top_grid,
ipath_top_grid=top_grid_path,
ipath_grid_block_gps_range=omega_path,
odir_sd=sd_final_path,
mapping_rate=1100,
mapping_bais={'lat': 39.6, 'lon': 115.8}
)
if __name__ == '__main__':
Parallel(n_jobs=4)(delayed(run)(i) for i in epsilon_list)
# tot_traj = read_mdl_data(idir_mdl_traj)
# tot_points = []
# for traj in tot_traj:
# tot_points += traj
# tot_points = np.array(tot_points)
# print(tot_points.min(axis=0))
# print(tot_points.max(axis=0))
# tot_traj = read_mdl_data(idir_mdl_traj)
# plt.figure(figsize=(6, 5))
# for traj in tot_traj:
# plt.plot([x[0] for x in traj], [y[1] for y in traj], c='blue')
# plt.scatter([x[0] for x in traj], [y[1] for y in traj], c='blue')
# for traj in reverse_mapped_trajs:
# plt.plot([x[0] for x in traj], [y[1] for y in traj], c='red')
# plt.scatter([x[0] for x in traj], [y[1] for y in traj], c='red')
# # plot top gird lines
# top_gird_lines = cal_split(
# (gps_range['lat'][0], gps_range['lat'][1]),
# (gps_range['lon'][0], gps_range['lon'][1]),
# n_top_grid)
# for line in top_gird_lines:
# plt.plot([x[0] for x in line], [y[1] for y in line], c='black')
# # plot bottom grid lines
# for i in range(C):
# if M[i] > 1:
# bottom_grid_lines = cal_split(
# (top_grid_block_gps_range[i][0][0], top_grid_block_gps_range[i][1][0]),
# (top_grid_block_gps_range[i][0][1], top_grid_block_gps_range[i][1][1]),
# M[i]
# )
# for line in bottom_grid_lines:
# plt.plot([x[0] for x in line], [y[1] for y in line], c='black')
# print(M[i])
# plt.xlim(gps_range['lat'][0], gps_range['lat'][1])
# plt.ylim(gps_range['lon'][1], gps_range['lon'][0])
# plt.xlabel('Lat')
# plt.ylabel('Lon')
# ax = plt.gca()
# ax.xaxis.set_ticks_position('top')
# plt.savefig('grid_traj')
# plt.show()
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,941
|
metaphysicser/DP-Star
|
refs/heads/main
|
/utils/_plot.py
|
#!/usr/bin/env python
# -*-coding:utf-8-*-
# Author: nomalocaris <nomalocaris.top>
"""
the functions for plot.
"""
from __future__ import (absolute_import, unicode_literals)
import numpy as np
import matplotlib.pyplot as plt
from utils import ProgressBar
def plot_scatter(points, fig_size=(6, 6), color='mediumpurple', size=5, title=''):
"""plot the points
"""
plt.figure(figsize=fig_size)
plt.scatter(x=[p[0] for p in points], y=[p[1] for p in points], color=color, s=size)
plt.title(title)
plt.show()
def plot_traj(trajs, fig_size=(6, 6), color="mediumpurple", size=5,
title='',
is_plot_line=False, od_only=False, offset=None):
"""plot the traj
"""
if offset is None:
offset = [0, 0]
p = ProgressBar(len(trajs), '绘制轨迹图')
plt.figure(figsize=fig_size)
for i in range(len(trajs)):
p.update(i)
traj = np.array(trajs[i])
if od_only:
traj = [traj[0], traj[-1]]
x = [x[0] + np.random.uniform(-offset[0], offset[0]) for x in traj]
y = [y[1] + np.random.uniform(-offset[1], offset[1]) for y in traj]
if od_only:
if is_plot_line:
plt.plot(x[0], y[0], c=color)
plt.plot(x[1], y[1], c="yellowgreen")
plt.scatter(x[0], y[0], c=color, s=size)
plt.scatter(x[1], y[1], c="yellowgreen", s=size)
else:
if is_plot_line:
plt.plot(x, y, c=color)
plt.scatter(x, y, c=color, s=size)
plt.title(title)
plt.show()
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,942
|
metaphysicser/DP-Star
|
refs/heads/main
|
/metrics/trip_test.py
|
# -*- encoding:utf-8 -*-
import os
from math import log
import numpy as np
def KLD(p, q): # 计算KL散度
p = p + np.spacing(1)
q = q + np.spacing(1)
# print(p/q)
return sum([_p * log(_p / _q) for (_p, _q) in zip(p, q)])
def JSD_core(p, q): # 计算JS散度(Jensen–Shannon divergence)
M = [0.5 * (_p + _q) for _p, _q in zip(p, q)]
# p = p + np.spacing(1)
# q = q + np.spacing(1)
return 0.5 * KLD(p, M) + 0.5 * KLD(q, M)
def main():
min_latitude = 39.6
min_longitude = 115.8
len_latitude = 1.2
len_longitude = 1.6
wei = len_latitude / 7
jing = len_longitude / 7
A = 49
RD = np.zeros(A * A)
RSD = np.zeros(A * A)
D = []
SD = []
path_all = []
base_path_list = os.listdir('../data/Geolife Trajectories 1.3/Trajectories7000/')
for path in base_path_list:
file_object = open('../data/Geolife Trajectories 1.3/Trajectories7000/' + path, 'r')
T0 = []
path_all.append(path)
for line in file_object.readlines():
jw = line.strip().split(',')
w = jw[0].strip()
w = float(w)
w = int((w - min_latitude) / wei)
j = jw[1].strip()
j = float(j)
j = int((j - min_longitude) / jing)
T0.append(w * 6 + j)
D.append(T0)
# print(T0[0]*A+T0[-1])
RD[T0[0] * A + T0[-1]] += 1
# print(T0)
path_all = []
base_path_list = os.listdir("../data/Geolife Trajectories 1.3/sd/sd_final_MDL1100_ep0.1")
for path in base_path_list:
file_object = open(r"../data/Geolife Trajectories 1.3/sd/sd_final_MDL1100_ep0.1/" + path, 'r')
T0 = []
path_all.append(path)
for line in file_object.readlines():
jw = line.strip().split(',')
w = jw[0].strip()
w = float(w)
w = int((w - min_latitude) / wei)
j = jw[1].strip()
j = float(j)
j = int((j - min_longitude) / jing)
if w * 6 + j <= 35:
T0.append(w * 6 + j)
if T0:
SD.append(T0)
try:
RSD[T0[0] * A + T0[-1]] += 1
except:
continue
# print(T0)
RD = RD / np.sum(RD)
RSD = RSD / np.sum(RSD)
RD = RD.tolist()
RSD = RSD.tolist()
print(JSD_core(RD, RSD))
if __name__ == '__main__':
main()
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,943
|
metaphysicser/DP-Star
|
refs/heads/main
|
/metrics/FP_KT.py
|
import numpy as np
import os
pattern = ['Frequent_Pattern_init.txt', 'Frequent_Pattern_sd.txt']
path_list = ['../../data/Geolife Trajectories 1.3/Trajectories7000/', '../../data/Geolife Trajectories '
'1.3/sd/sd_final_MDL1100_ep0.1/',
'../../data/Geolife '
'Trajectories '
'1.3/sd/sd_final_MDL1100_ep0.5/',
'../../data/Geolife Trajectories 1.3/sd/sd_final_MDL1100_ep1.0/', '../../data/Geolife Trajectories '
'1.3/sd/sd_final_MDL1100_ep2.0/']
path_test = ['../../data/Geolife Trajectories 1.3/Trajectories7000/', '../../data/Geolife Trajectories '
'1.3/test/0/', '../../data/Geolife '
'Trajectories '
'1.3/test/1/',
'../../data/Geolife Trajectories 1.3/test/2/', '../../data/Geolife Trajectories '
'1.3/test/3/']
def get_Frequent_Pattern(init_path='../../data/Geolife Trajectories 1.3/Trajectories/',
min_latitude=39.6,
min_longitude=115.8,
len_latitude=1.2,
len_longitude=1.6, para="init"):
"""
存储数据的频繁模式
:param init_path:数据路径
:param min_latitude:最小纬度
:param min_longitude:最小经度
:param len_latitude:纬度差值
:param len_longitude:经度差值
:param para:计算频繁模式,选择数据集
:return:排序好的频繁模式集
"""
lat_accu = len_latitude / 6 # 维度边的跨度
lon_accu = len_longitude / 6 # 经度边的跨度
# 存频繁模式
Frequent_Pattern = {}
base_path_list = os.listdir(init_path)
for path in base_path_list:
file_object = open(init_path + path, 'r') # 读取轨迹数据文件
T0 = []
for line in file_object.readlines():
w = float(line.strip().split(',')[0].strip())
w = int((w - min_latitude) / lat_accu) # 维度对应网格的位置
j = float(line.strip().split(',')[1].strip())
j = int((j - min_longitude) / lon_accu) # 经度对应网格的位置
if len(T0) > 0 and w * 6 + j == T0[-1]: # 排除连续出现在一个格子里面的情况
continue
if w * 6 + j in T0:
continue
T0.append(w * 6 + j) # 格子的编号
P = tuple(T0.copy())
if 3 <= len(P):
if P in Frequent_Pattern.keys():
Frequent_Pattern[P] += 1
else:
Frequent_Pattern[P] = 1
if para == "init":
f = open(pattern[0], 'w')
else:
f = open(pattern[1], 'w')
for record in Frequent_Pattern.keys():
f.writelines(str(record) + ':' + str(Frequent_Pattern[record]) + '\n')
f.close()
return sorted(Frequent_Pattern.items(), key=lambda x: x[1], reverse=True)
def get_Fredata(para="init"):
"""
获取频繁模式数据
:param para:计算频繁模式,选择数据集
:return:
"""
if para == "init":
f = open(pattern[0], 'r')
else:
f = open(pattern[1], 'r')
Fre_dict = {}
for line in f.readlines():
Fre_dict[tuple((line.split(':')[0].strip()[1:-1]).split(','))] = int(line.split(':')[1].strip())
dict_ = {}
for item in sorted(Fre_dict.items(), key=lambda x: x[1], reverse=True):
dict_[item[0]] = item[1]
return dict_
def get_FP(init_dict, sd_dict):
"""
计算FP指标
:param init_dict:原始数据的频繁模式字典
:param sd_dict:生成数据的频繁模式字典
:return:FP指标
"""
FP = 0
for p in list(init_dict.keys())[:50]:
if p in sd_dict.keys():
re = abs(init_dict[p] - sd_dict[p]) / init_dict[p]
FP += re
return FP / 50
def extra_same_elem(list1, list2):
set1 = set(list1)
set2 = set(list2)
iset = set1.intersection(set2)
return list(iset)
def get_KT(init_dict, sd_dict):
"""
计算KT指标
:param init_dict:原始数据的频繁模式字典
:param sd_dict:生成数据的频繁模式字典
:return:KT指标
"""
concor_count = 0
discor_count = 0
k = 0
# 取前50各的方法
for i in range(len(list(init_dict.keys()))):
if k >= 50:
break
if list(init_dict.keys())[i] in sd_dict.keys():
k += 1
for i in range(len(list(init_dict.keys())[:k])):
if list(init_dict.keys())[i] in sd_dict.keys():
for j in range(i + 1, len(list(init_dict.keys())[:k])):
if list(init_dict.keys())[j] in sd_dict.keys():
if (init_dict[list(init_dict.keys())[i]] >= init_dict[list(init_dict.keys())[j]] and sd_dict[
list(init_dict.keys())[i]] > sd_dict[list(init_dict.keys())[j]]) or \
(init_dict[list(init_dict.keys())[i]] < init_dict[list(init_dict.keys())[j]] and sd_dict[
list(init_dict.keys())[i]] < sd_dict[list(init_dict.keys())[j]]):
concor_count += 1
else:
discor_count += 1
# 对于所有数据集
# union_ = extra_same_elem(list(init_dict.keys()), list(sd_dict.keys()))
# for key in range(len(union_)-1):
# for key2 in range(key+1, len(union_)):
# if (init_dict[union_[key]] >= init_dict[union_[key2]] and sd_dict[
# union_[key]] > sd_dict[union_[key2]]) or \
# (init_dict[union_[key]] < init_dict[union_[key2]] and sd_dict[
# union_[key]] < sd_dict[union_[key2]]):
# concor_count += 1
# else:
# discor_count += 1
# print("KT差值:", (concor_count - discor_count))
KT = (concor_count - discor_count) / (50*49 / 2)
return KT
def get_check(init_path='../../data/Geolife Trajectories 1.3/Trajectories7000/',
min_latitude=39.6001216362634,
min_longitude=115.80024136052477,
len_latitude=1.2006028726893376,
len_longitude=1.6006907968450292):
"""
存储数据的频繁模式
:param init_path:数据路径
:param min_latitude:最小纬度
:param min_longitude:最小经度
:param len_latitude:纬度差值
:param len_longitude:经度差值
:param para:计算频繁模式,选择数据集
:return:排序好的频繁模式集
"""
lat_accu = len_latitude / 6 # 维度边的跨度
lon_accu = len_longitude / 6 # 经度边的跨度
# 存频繁模式
Frequent_Pattern = {}
base_path_list = os.listdir(init_path)
base_path_list2 = os.listdir('../../data/Geolife Trajectories 1.3/sd/sd_final_MDL1100_ep0.1/')
for i in range(len(base_path_list)):
file_object = open(init_path + base_path_list[i], 'r') # 读取轨迹数据文件
file_object2 = open('../../data/Geolife Trajectories 1.3/sd/sd_final_MDL1100_ep0.1/' + base_path_list2[i], 'r')
T0 = []
T1 = []
l0 = []
l1 = []
k0 = []
k1 = []
for line in file_object.readlines():
w = float(line.strip().split(',')[0].strip()[:5])
w = int((w - min_latitude) / lat_accu) # 维度对应网格的位置
j = float(line.strip().split(',')[1].strip()[:6])
j = int((j - min_longitude) / lon_accu) # 经度对应网格的位置
l0.append((w, j))
k0.append((float(line.strip().split(',')[0].strip()[:5]) - min_latitude,
float(line.strip().split(',')[1].strip()[:6]) - min_longitude))
if len(T0) > 0 and w * 6 + j == T0[-1]: # 排除连续出现在一个格子里面的情况
continue
if w * 6 + j in T0:
continue
T0.append(w * 6 + j) # 格子的编号
for line1 in file_object2.readlines():
w = float(line1.strip().split(',')[0].strip())
w = int((w - min_latitude) / lat_accu) # 维度对应网格的位置
j = float(line1.strip().split(',')[1].strip())
j = int((j - min_longitude) / lon_accu) # 经度对应网格的位置
l1.append((w, j))
k1.append((float(line1.strip().split(',')[0].strip()) - min_latitude,
float(line1.strip().split(',')[1].strip()) - min_longitude))
if len(T1) > 0 and w * 6 + j == T1[-1]: # 排除连续出现在一个格子里面的情况
continue
if w * 6 + j in T1:
continue
T1.append(w * 6 + j) # 格子的编号
print(l0)
print(l1)
print(T0)
print(T1)
print(k0)
input(k1)
def get_data2():
f = open("Frequent_Pattern_init.txt", "r")
count = []
for line in f.readlines():
count.append(line.strip().split(":")[-1])
print(count)
if __name__ == '__main__':
get_Frequent_Pattern(path_list[0],
min_latitude=39.6,
min_longitude=115.8,
len_latitude=1.2,
len_longitude=1.6,
para="init")
dict_init = get_Fredata(para="init")
for i in range(1, len(path_list)):
get_Frequent_Pattern(path_list[i],
min_latitude=39.6,
min_longitude=115.8,
len_latitude=1.2,
len_longitude=1.6,
para="sd")
dict_sd = get_Fredata(para="sd")
FP = get_FP(dict_init, dict_sd)
KT = get_KT(dict_init, dict_sd)
print(path_list[i], FP, KT)
input()
get_data2()
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,944
|
metaphysicser/DP-Star
|
refs/heads/main
|
/metrics/diameter_error.py
|
"""
-------------------------------------
# -*- coding: utf-8 -*-
# @Author :
nomalocaris
Giyn
HZT
# @File : diameter_error.py
# @Software: PyCharm
-------------------------------------
"""
import os
from math import log
import numpy as np
from utils import ProgressBar
def KLD(p, q): # 计算KL散度
p += np.spacing(1)
q += np.spacing(1)
return sum([_p * log(_p/_q) for (_p, _q) in zip(p, q)])
def JSD_core(p, q): # 计算JS散度(Jensen–Shannon divergence)
M = [0.5 * (_p + _q) for _p, _q in zip(p, q)]
return 0.5 * KLD(p, M) + 0.5 * KLD(q, M)
def D_len(d_path):
D = []
path_all = []
base_path_list = os.listdir(d_path)
for path in base_path_list:
file_object = open(d_path + path, 'r')
T0 = []
path_all.append(path)
for line in file_object.readlines():
jw = line.strip().split(',')
w = jw[0]
w = float(w)
j = jw[1].strip()
j = float(j)
T0.append((w, j))
D.append(T0)
D_maxlen_arr = []
D_maxlen = 0
d_len = len(D)
p = ProgressBar(d_len, '计算D轨迹长度')
for i in range(d_len): # 多条轨迹
p.update(i)
T0 = D[i]
T_maxlen = 0
for i in range(len(T0)):
for j in range(i, len(T0)):
if i==j:
continue
else:
now_len = ((T0[i][0] - T0[j][0])**2 + (T0[i][1] - T0[j][1])**2)**0.5
if now_len > D_maxlen:
D_maxlen = now_len
if now_len > T_maxlen:
T_maxlen = now_len
D_maxlen_arr.append(T_maxlen)
return D_maxlen, D_maxlen_arr
def _cal_diameter_e(D_maxlen, D_maxlen_arr, SD_maxlen, SD_maxlen_arr):
# 存入epsilon数组
ep_D = [0 for _ in range(20)]
ep_SD = [0 for _ in range(20)]
for item in D_maxlen_arr:
num = int(item/(D_maxlen/20))
if num < 20:
ep_D[num] += 1
else:
ep_D[19] += 1
for item in SD_maxlen_arr:
num = int(item/(SD_maxlen/20))
if num < 20:
ep_SD[num] += 1
else:
ep_SD[19] += 1
ep_D = np.array(ep_D, dtype='float32')
ep_D /= np.sum(ep_D)
ep_D = ep_D.tolist()
ep_SD = np.array(ep_SD, dtype='float32')
ep_SD /= np.sum(ep_SD)
ep_SD = ep_SD.tolist()
dia_e = JSD_core(ep_D, ep_SD)
return dia_e
def cal_diameter_e(d_path, sd_path):
""""""
# dm, dm_arr = D_len(d_path)
dm = 1.4979605451980367
with open("dm_arr.txt", "r") as output:
dm_arr = eval(output.read())
sdm, sdm_arr = D_len(sd_path)
return _cal_diameter_e(dm, dm_arr, sdm, sdm_arr)
def count_d_path(d_path):
dm, dm_arr = D_len(d_path)
print("dm:", dm)
with open("dm_arr.txt", "w") as output:
output.write(str(dm_arr))
print(dm_arr)
if __name__ == '__main__':
# print(cal_diameter_e("../../data/Geolife Trajectories 1.3/Trajectories/", "../../data/Geolife Trajectories "
# "1.3/sd/sd_final_MDL1100_ep0.1/"))
# print(cal_diameter_e("../../data/Geolife Trajectories 1.3/Trajectories/", "../../data/Geolife Trajectories "
# "1.3/sd/sd_final_MDL1100_ep0.5/"))
# print(cal_diameter_e("../../data/Geolife Trajectories 1.3/Trajectories/", "../../data/Geolife Trajectories "
# "1.3/sd/sd_final_MDL1100_ep1.0/"))
# print(cal_diameter_e("../../data/Geolife Trajectories 1.3/Trajectories/", "../../data/Geolife Trajectories "
# "1.3/sd/sd_final_MDL1100_ep2.0/"))
# print(cal_diameter_e("../../data/Geolife Trajectories 1.3/Trajectories/", "../../data/Geolife Trajectories "
# "1.3/test/0/"))
# print(cal_diameter_e("../../data/Geolife Trajectories 1.3/Trajectories/", "../../data/Geolife Trajectories "
# "1.3/test/1/"))
print(cal_diameter_e("../../data/Geolife Trajectories 1.3/Trajectories/", "../../data/Geolife Trajectories "
"1.3/test/2/"))
print(cal_diameter_e("../../data/Geolife Trajectories 1.3/Trajectories/", "../../data/Geolife Trajectories "
"1.3/test/3/"))
# count_d_path("../../data/Geolife Trajectories 1.3/Trajectories7000/")
# with open("dm_arr.txt", "r") as output:
# print(eval(output.read()))
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,790,945
|
metaphysicser/DP-Star
|
refs/heads/main
|
/dpstar/trip_distribution_extraction.py
|
"""
-------------------------------------
# -*- coding: utf-8 -*-
# @Time : 2021/4/16 11:57:44
# @Author : Giyn
# @Email : giyn.jy@gmail.com
# @File : trip_distribution_extraction.py
# @Software: PyCharm
-------------------------------------
"""
import numpy as np
from utils import ProgressBar
def trip_distribution(trajs: list, n_grid: int, _epsilon: float) -> np.ndarray:
"""
获取转移概率矩阵
Args:
trajs : 轨迹数据(二维数组)
n_grid : 二级网格数
_epsilon: 隐私预算
Returns:
R: 转移概率矩阵
"""
R = np.zeros((n_grid, n_grid)) # 建立 n_grid * n_grid 的转移概率矩阵
for t in trajs:
if len(t) > 1:
sta = t[0]
end = t[-1]
R[sta][end] += 1
count = int(np.sum(R)) # 轨迹条数
p = ProgressBar(n_grid, '生成转移概率矩阵')
for i in range(n_grid):
p.update(i)
for j in range(n_grid):
noise = np.random.laplace(0, 1 / _epsilon) # 添加拉普拉斯噪声
R[i][j] += noise
if R[i][j] < 0:
R[i][j] = 0
R /= count
return R
def trip_distribution_main(n_grid: int, _epsilon: float, grid_trajs_path: str,
trip_distribution_path: str):
"""
主函数(将转移概率矩阵写入文件)
Args:
n_grid : 网格数
_epsilon : 隐私预算
grid_trajs_path : 网格轨迹文件路径
trip_distribution_path: 转移概率矩阵输出文件路径
Returns:
"""
with open(grid_trajs_path, 'r') as grid_trajs_file:
# 网格轨迹数据(list)
T = [eval(grid_traj) for grid_traj in grid_trajs_file.readlines()]
with open(trip_distribution_path, 'w') as trip_distribution_file:
trip_distribution_matrix = trip_distribution(T, n_grid, epsilon)
for item in trip_distribution_matrix:
each_line = ' '.join([str(i) for i in item]) + '\n'
trip_distribution_file.writelines(each_line)
if __name__ == '__main__':
epsilon = 0.1
trip_distribution_main(64, 0.1 * 3 / 9,
f'../data/Geolife Trajectories 1.3/Middleware/grid_trajs_epsilon_{epsilon}.txt',
f'../data/Geolife Trajectories 1.3/Middleware/trip_distribution_epsilon_{epsilon}.txt')
|
{"/dpstar/synthetic_trajectory_generation.py": ["/utils/__init__.py"], "/dpstar/__init__.py": ["/dpstar/adaptive_grid_construction.py", "/dpstar/trip_distribution_extraction.py", "/dpstar/mobility_model_construction.py", "/dpstar/route_length_estimation.py", "/dpstar/synthetic_trajectory_generation.py"], "/dpstar/route_length_estimation.py": ["/utils/__init__.py"], "/dpstar/trip_distribution_extraction.py": ["/utils/__init__.py"], "/dpstar/adaptive_grid_construction.py": ["/utils/__init__.py"], "/dpstar/mobility_model_construction.py": ["/utils/__init__.py"], "/dpstar/mdl.py": ["/utils/__init__.py"], "/dpstar/extract_data.py": ["/utils/__init__.py"], "/utils/__init__.py": ["/utils/_plot.py", "/utils/_vector_cal.py"], "/main.py": ["/dpstar/__init__.py", "/config.py"], "/utils/_plot.py": ["/utils/__init__.py"], "/metrics/diameter_error.py": ["/utils/__init__.py"]}
|
37,841,615
|
marcosguilhermef/oddsing-python
|
refs/heads/master
|
/scrapingOdds.py
|
from warnings import catch_warnings, simplefilter
import connect
from bs4 import BeautifulSoup
import re
import json
import datetime
import database
from difflib import SequenceMatcher
class RaparOddsSa():
def __init__(self,link):
self.link = link
self.Request()
self.StartSoup()
self.oddsJSON = {}
self.oddsJSON['data_hora'] = datetime.datetime.now()
self.oddsJSON['sistema'] = 'sa sports'
def Request(self):
self.body = connect.ConectSA(self.link).getBody()
def StartSoup(self):
self.soup = BeautifulSoup(self.body, 'html.parser')
def scrapCasaFora(self):
self.CasaFora = self.soup.find(id='conteudo_tituloCampeonato')
self.CasaFora = re.sub('Apostas Disponíveis - ','',self.CasaFora.text)
splod = re.split('x',self.CasaFora)
self.oddsJSON['tCasa'] = splod[0]
self.oddsJSON['tFora'] = splod[1]
return self.CasaFora
def scrapOdds(self):
#print(self.body)
tipoDeOdd = None
self.oddsSoap = self.soup.find_all('tr', limit=False)
self.oddsJSON['odds'] = []
for odd in self.oddsSoap:
td = odd.find_all('td')
try:
parametrizar = { "nome":td[0].get_text(),"Taxa":td[1].get_text(), "tipo": tipoDeOdd}
self.oddsJSON['odds'].append(parametrizar)
except IndexError:
if len(td) != 0:
tipoDeOdd = td[0].get_text()
return self.oddsJSON
#json.dumps(self.oddsJSON, indent = 4)
def scrapNomeBanca(self):
self.oddsJSON['banca'] = re.search('(?<=https://)\w{1,}|(?<=http://)\w{1,}', self.link).group(0)
def setStatus(self):
self.oddsJSON['ativo'] = True
def scrapCompleto(self):
self.scrapNomeBanca()
self.scrapCasaFora()
self.scrapOdds()
self.setStatus()
return self.getOddsJSON()
def getOddsJSON(self):
return self.oddsJSON
class rasparDadosKbets():
listaDeJogos = None
def __init__(self,link,casa= '',fora = ''):
self.link = link
self.casa = casa
self.fora = fora
self.Request()
self.oddsJSON = {}
self.oddsJSON['sistema'] = 'kbets'
def Request(self):
self.body = connect.ConectKbets(self.link).getBody()
def Start(self):
self.setBanca()
self.setHora()
self.setStats()
self.setCasaFora()
self.setOdds()
return self.oddsJSON
def setBanca(self):
self.oddsJSON['banca'] = re.search('(?<=https://)\w{1,}|(?<=http://)\w{1,}', self.link).group(0)
def setHora(self):
self.oddsJSON['data_hora'] = datetime.datetime.now()
def setStats(self):
self.oddsJSON['ativo'] = True
def setCasaFora(self):
self.gerarNome()
NomesSubstituiveis = self.obterNomeCasaNomeFora(self.casa, self.fora)
self.oddsJSON['tCasa'] = NomesSubstituiveis['casa']
self.oddsJSON['tFora'] = NomesSubstituiveis['fora']
self.oddsJSON['tCasaOriginal'] = self.casa
self.oddsJSON['tForaHoriginal'] = self.fora
def gerarNome(self):
db = database.Database()
lista = db.getAllTimes()
self.listaSAgames = lista
def obterNomeCasaNomeFora(self, casa,fora):
listaSAgames = self.listaSAgames
casaResult = list(filter(lambda x: SequenceMatcher(None,x['tCasa'],casa).ratio() > 0.70 ,listaSAgames))
if len(casaResult) == 0:
casaResult = casa
else:
casaResult =casaResult[0]['tCasa']
print('ratio: ',SequenceMatcher(None,casaResult,casa).ratio()," casa: "+casaResult," casa2: ", casa)
foraResult = list(filter(lambda x: SequenceMatcher(None,x['tFora'],fora).ratio() > 0.70,listaSAgames))
if len(foraResult) == 0:
foraResult = fora
else:
foraResult = foraResult[0]['tFora']
print('ratio: ',SequenceMatcher(None,foraResult,fora).ratio()," fora: "+foraResult," fora2: ", fora)
return { "casa": casaResult, "fora": foraResult }
def setOdds(self):
self.oddsJSON['odds'] = list(map( lambda x : { "tipo": x["grupo"], "taxa": x["taxa"], "nome": x["odds"] } , self.body))
|
{"/scrapingOdds.py": ["/connect.py", "/database.py", "/OddParametrizacao.py"], "/scrapingLinks.py": ["/connect.py"], "/images.py": ["/database.py"], "/main.py": ["/scrapingLinks.py", "/datasets.py", "/lerArquivoDeLinks.py", "/connect.py", "/scrapingOdds.py", "/SalvarEmTexto.py", "/database.py"]}
|
37,841,616
|
marcosguilhermef/oddsing-python
|
refs/heads/master
|
/main.py
|
from pymongo import database
import datasets
from scrapingLinks import RaparLinksOddsSa as scrap
from scrapingLinks import RaparLinksOddsKbets as scrapK
from datasets import Link
from lerArquivoDeLinks import ArquivoSA as lerSa
from lerArquivoDeLinks import ArquivoKBETS as lerK
from connect import ConectSA
from scrapingOdds import RaparOddsSa as scrapsaodds
from scrapingOdds import rasparDadosKbets as scrapkbetsodds
from SalvarEmTexto import SalvarArquivoTexto as save
from threading import Thread
from database import Database
class CarregamentoDeLinks():
def __init__(self):
self.links = self.carregamentoSa()
self.linksK = self.carregamentoLbets()
self.bancaListLink = None
self.listLinkOdds = None
self.database = Database()
def carregamentoSa(self):
links = lerSa().ler()
bancaListLink = Link(links)
self.bancaListLink = bancaListLink.getLinkList()
return self.bancaListLink
def carregamentoLbets(self):
links = lerK().ler()
bancaListLink = Link(links)
self.bancaListLink = bancaListLink.getLinkList()
return self.bancaListLink
def ScrapingLinksSA(self):
newList = []
for link in self.links:
try:
responseBody= scrap(link)
listLinksScraping = list(map( lambda x: link +'/simulador'+ x, responseBody.Raspar()))
except:
print('pula')
newList = newList + listLinksScraping
self.listLinkOdds = Link(newList)
return self.listLinkOdds
def ScrapingLinksKbets(self):
for k in self.linksK:
self.scrapingOddsKbets(scrapK(k))
def scrapingOddsKbets(self,instance):
for item in instance.getAllId():
result = scrapkbetsodds(item['link'], casa=item['gameItem'][0]['tc'], fora=item['gameItem'][0]['tf']).Start()
self.salve(result)
def ScrapingOddSA(self):
linkOdds = self.ScrapingLinksSA()
linkOdds = list(linkOdds.getLinkList())
arrReturning = []
for link in linkOdds:
try:
a = scrapsaodds(link).scrapCompleto()
self.salve(a)
except:
print('pula')
#self.salve(arrReturning)
""" def ScrapingOddKbet(self):
linkOdds = self.ScrapingLinksSA()
linkOdds = list(linkOdds.getLinkList())
for link in linkOdds:
a = scrapsaodds(link).scrapCompleto()
arrReturning.append(a)
self.salve(a)
try:
pass
except:
print('pula') """
#self.salve(arrReturning)
def salve(self,body):
print('salvando')
self.database.insertMongo(body)
#self.database.insertMongo(body)
a = CarregamentoDeLinks()
data = Database().desativarAtivos()
a.ScrapingOddSA()
a.ScrapingLinksKbets()
|
{"/scrapingOdds.py": ["/connect.py", "/database.py", "/OddParametrizacao.py"], "/scrapingLinks.py": ["/connect.py"], "/images.py": ["/database.py"], "/main.py": ["/scrapingLinks.py", "/datasets.py", "/lerArquivoDeLinks.py", "/connect.py", "/scrapingOdds.py", "/SalvarEmTexto.py", "/database.py"]}
|
37,841,617
|
marcosguilhermef/oddsing-python
|
refs/heads/master
|
/scrapingLinks.py
|
from os import link, stat
from warnings import catch_warnings
import connect
from bs4 import BeautifulSoup
import re
class RaparLinksOddsSa():
def __init__(self,link):
self.link = link
self.body = self.getLinksMaisOdds()
def getLinksMaisOdds(self):
return connect.ConectSA(self.link).getBody()
def Raspar(self):
soup = BeautifulSoup(self.body, 'html.parser')
#tr = soup.find_all('tr',limit=False)
a = soup.find_all('a')
links = map(lambda re : re['href'], a)
linksTratados = map(lambda link: re.sub('(./)','/',link),links)
linksUteis = map(lambda link: link if re.search('apostas.aspx',link) != None else None ,linksTratados)
linksUteisSemNones = self.removeNone(linksUteis)
return linksUteisSemNones
def removeNone(self, link):
newList = []
for i in link:
if i != None:
newList.append(i)
return newList
#a = RaparLinksOddsSa("https://nbet91.com/simulador/jogos.aspx?idesporte=102&idcampeonato=575067")
class RaparLinksOddsKbets():
def __init__(self,link):
self.link = link
self.oddsJSON = {}
self.Request()
self.Start()
def Request(self):
self.body = connect.ConectKbets(self.link).getBody()
def Start(self):
self.oddsGroups = self.body['odds']
self.gameList = self.body['lista']
def getAllId(self):
newArr = []
for item in self.gameList:
newArr.append({"link":"http://bestgameonline.net/axios/oddsWithGroups/"+item['id'], "id":item['id'], "gameItem": list(filter( lambda x : x['id'] == item['id'] ,self.gameList)) })
return newArr
def getOddsGroups(self):
return self.oddsGroups
def getgameList(self):
return self.gameList
|
{"/scrapingOdds.py": ["/connect.py", "/database.py", "/OddParametrizacao.py"], "/scrapingLinks.py": ["/connect.py"], "/images.py": ["/database.py"], "/main.py": ["/scrapingLinks.py", "/datasets.py", "/lerArquivoDeLinks.py", "/connect.py", "/scrapingOdds.py", "/SalvarEmTexto.py", "/database.py"]}
|
37,841,618
|
marcosguilhermef/oddsing-python
|
refs/heads/master
|
/connect.py
|
import requests
#essa class faz a primeira requisição ao site
class ConectSA():
def __init__(self,SALink):
self.link = SALink
self.response = self.setResponse()
def setResponse(self):
r = requests.get(self.link)
return r
def getBody(self):
return self.response.text
def getResponseCode(self):
return self.response.status_code
class ConectKbets():
def __init__(self,link):
self.link = link
self.response = self.setResponse()
def setResponse(self):
r = requests.get(self.link)
return r
def getBody(self):
return self.response.json()
def getResponseCode(self):
return self.response.status_code
|
{"/scrapingOdds.py": ["/connect.py", "/database.py", "/OddParametrizacao.py"], "/scrapingLinks.py": ["/connect.py"], "/images.py": ["/database.py"], "/main.py": ["/scrapingLinks.py", "/datasets.py", "/lerArquivoDeLinks.py", "/connect.py", "/scrapingOdds.py", "/SalvarEmTexto.py", "/database.py"]}
|
37,841,619
|
marcosguilhermef/oddsing-python
|
refs/heads/master
|
/database.py
|
import sqlite3
import pymongo
from pymongo import aggregation
import json
class Database():
mongo = pymongo.MongoClient("mongodb://localhost:27017/")
databaseM = mongo["oddsing"]
collection = databaseM['odds']
listaDeJogos = None
def setCollection(self):
self.collection = self.databaseM['odds']
def insertDados(self, dados):
self.desativarAtivos()
self.dados = dados
self.cur.execute("insert into jogo (tCasa,tFora,banca,odds) values (?, ?, ?, ?)", (self.dados['tCasa'], self.dados['tFora'], self.dados['banca'], str(self.dados['odds'])))
self.con.commit()
def insertMongo(self,dados):
result = self.collection.insert_one(dados)
print('carregado: '+str(result.inserted_id))
def desativarAtivos(self):
self.collection.update_many({"ativo": True}, { "$set": {"ativo": False}})
def getAllTimes(self):
aggregate = [{"$match": { "ativo": True, "sistema": "sa sports" } },{"$group" : { "_id": { "tCasa": "$tCasa", "tFora" : "$tFora"}} }]
result = Database.mongo["oddsing"]["odds"].aggregate(aggregate)
result = list(result)
newResult = map( lambda x: x['_id'] ,result)
self.listaDeJogos = newResult
return list(newResult)
|
{"/scrapingOdds.py": ["/connect.py", "/database.py", "/OddParametrizacao.py"], "/scrapingLinks.py": ["/connect.py"], "/images.py": ["/database.py"], "/main.py": ["/scrapingLinks.py", "/datasets.py", "/lerArquivoDeLinks.py", "/connect.py", "/scrapingOdds.py", "/SalvarEmTexto.py", "/database.py"]}
|
37,855,304
|
krootca/App-Flask
|
refs/heads/main
|
/database.py
|
#!/bin/python3
import sqlite3 #sqlite3 como base de datos
import random, re, time
#random para generar numeros aleatorios
#re para usar regex, expresiones regulares
#time para proporcionar al algoritmo información del sistema, fecha, hora
#regex o expresiones regulares
nick_re = re.compile(r"^[a-zA-ZàáâäãåąčćęèéêëėįìíîïłńòóôöõøùúûüųūÿýżźñçčšžÀÁÂÄÃÅĄĆČĖĘÈÉÊËÌÍÎÏĮŁŃÒÓÔÖÕØÙÚÛÜŲŪŸÝŻŹÑßÇŒÆČŠŽ∂ð,.'-]{4,14}$")
age_re = re.compile(r'^((19|20)\d\d)-(0?[1-9]|1[012])-(0?[1-9]|[12][0-9]|3[01])$')
email_re = re.compile(r"\b[\w.%+-]+@[\w.-]+\.[a-zA-Z]{2,6}\b")
password_re = re.compile(r"^[a-zA-Z0-9\W\w]{7,30}$")
url_re = re.compile(r"^(https?:\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?$")
ip_re = re.compile(r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$")
profile_re = re.compile(r"^(images)/([a-zA-Z0-9\W]{1,60})+\.(webp|jpeg|png|jpg)$")
tel_peru_re = re.compile(r"^.+(51)([0-9\w]{9})$")
direction_re= re.compile(r"^[a-zA-Z0-9]{3,30}$")
#---------
#-------------CREATE ALL DB----------
def createDB(): #Crea la primera base de datos
db_app = sqlite3.connect("database.db", check_same_thread=False) #Crea un archivo database.db
cursor = db_app.cursor() #sirve para crear algoritmos de bases de datos
cursor.execute("""CREATE TABLE "accounts" (
"user_name" TEXT NOT NULL,
"user_surname" TEXT NOT NULL,
"email" TEXT NOT NULL UNIQUE,
"password" TEXT NOT NULL,
"phone" INT NOT NULL UNIQUE,
"direction" TEXT NOT NULL,
"age" TEXT NOT NULL,
"user_id" INT NOT NULL UNIQUE);""")
db_app.commit() # guarda lo creado
db_app.close()
def computerDB(): #lo mismo que arriba pero con otros datos para crear
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
cursor.execute("""CREATE TABLE "computerDB" (
"codigo" TEXT NOT NULL,
"nombre_equipo" TEXT NOT NULL,
"marca" TEXT NOT NULL,
"modelo" TEXT NOT NULL,
"estado" TEXT NOT NULL,
"observacion" TEXT NOT NULL,
"transferencia" TEXT NOT NULL,
"stock" TEXT NOT NULL,
"area" TEXT NOT NULL,
"user_id" TEXT NOT NULL,
"producto_id" TEXT NOt NULL);""")
db_app.commit()
db_app.close()
def moveDB():
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
cursor.execute("""CREATE TABLE "moveDB" (
"codigo" TEXT NOT NULL,
"marca" TEXT NOT NULL,
"nombre_equipo" TEXT NOT NULL,
"precio" TEXT NOT NULL,
"estado" TEXT NOT NULL,
"modelo" TEXT NOT NULL,
"observacion" TEXT NOT NULL,
"fecha_salida" TEXT NOT NULL,
"fecha_ingreso" TEXT NOT NULL,
"area" TEXT NOT NULL,
"user_id" TEXT NOT NULL,
"producto_id" TEXT NOt NULL);""")
db_app.commit()
db_app.close()
def incidentsDB():
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
cursor.execute("""CREATE TABLE "incidentsDB" (
"codigo" TEXT NOT NULL,
"marca" TEXT NOT NULL,
"nombre_equipo" TEXT NOT NULL,
"precio" TEXT NOT NULL,
"estado" TEXT NOT NULL,
"modelo" TEXT NOT NULL,
"observacion" TEXT NOT NULL,
"fecha_compra" TEXT NOT NULL,
"tecnico" TEXT NOT NULL,
"area" TEXT NOT NULL,
"user_id" TEXT NOT NULL,
"producto_id" TEXT NOt NULL);""")
db_app.commit()
db_app.close()
def imagesDB():
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
cursor.execute("""CREATE TABLE "imagesDB" (
"url" TEXT NOT NULL,
"marca" TEXT NOT NULL,
"nombre_equipo" TEXT NOT NULL,
"user_id" TEXT NOT NULL,
"producto_id" TEXT NOt NULL);""")
db_app.commit()
db_app.close()
def createAllDB(): # esta funcion solo crea todas las bases de datos
createDB()
computerDB()
moveDB()
incidentsDB()
imagesDB()
#------------------------------------------------------------
try: # intenta crear las bases de datos, pero si no lo logra no hace nada, sirve para que al momento de correr el archivo, cree la base de datos, si ocurre un error, no dice nada
createAllDB()
except Exception as e:
pass
def return_all(): #retorna todos los datos de la base de datos
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
try:
datos=cursor.execute('SELECT * FROM accounts').fetchall()
db_app.close()
return datos #aqui retorna todo
except Exception as e: # si no funciona no hace nada
pass
def account_login(email, password): #sirve para logearse, si el password y el email existe, retorna true, si no, retorna false
for i in return_all():
if str(email_re.search(email))!="None" and str(password_re.search(password))!="None":
if email==i[2] and password == i[3]: #if email is a email
return True
else: #email or password is incorrect
return False
def cityVerific(direction): # sirve para verificar datos
try:
if int(direction)==1: #si la dirección es igual a 1, la dirección toma el valor de "Peru, Lima" siguiendo así con los otros
direction="Peru, Lima"
return direction #Luego retorna el valor
elif int(direction)==2:
direction="Peru, Cusco"
return direction
elif int(direction)==3:
direction="Peru, Puno"
return direction
elif int(direction)==4:
direction="Peru, Junin"
return direction
elif (direction)==5:
direction="Peru, Piura"
return direction
else:
direction="Peru, Lima"
return direction #si ninguno coincide, entonces solo retorna Peru, Lima
except Exception as e:
return "Peru, Lima" # si el valor introducido es erroneo, como colocar algo extraño, retorna "Peru, Lima"
def account_signup(user_name, user_surname, email, password, telephone, direction, age): #para registrarse
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
#si los valores de un numero es None significa que no existe un numero, lo que es admitible, pues un numero debe ser unico
# si el email no existe en la base de datos, es admitible, por que debe ser unico
# luego con ayuda de regex o expresiones regulares, si los otros datos son diferentes de "None", son correctos y admitibles
#Si todo el condicional retorna True, entra al condicional para insertar los datos a la base de datos
if numerRETURN(email)==None and nameRETURN(email)==None and str(nick_re.search(user_name))!="None" and str(nick_re.search(user_surname))!="None" and str(email_re.search(email))!="None" and str(password_re.search(password))!="None" and str(tel_peru_re.search(telephone))!="None" and direction!="None" and str(age_re.search(age))!="None":
user_id=random.randint(99999,99999999) #crea un id con ayuda de un modulo random
content=[(user_name), (user_surname), (email), (password), (telephone), (cityVerific(direction)), (age), (user_id)]
cursor.execute("INSERT or IGNORE INTO accounts (user_name,user_surname, email, password, phone, direction, age, user_id) VALUES (?,?,?,?,?,?,?,?)",content)
db_app.commit() #guarda
db_app.close() #cierra la conexión
return True #retorna true
else:
db_app.close() #si el condicional no es correcto, por que la persona no ingresó datos correctos, cierra la conexión, y retorna false
return False
def nameRETURN(correo): #esto solo retorna el nombre
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
account_data=cursor.execute('SELECT * FROM accounts ORDER BY _rowid_').fetchall()
for i in account_data: #en un bucle recorre los datos para verificar si el email es igual a uno que exisa
if correo==i[2]: #si existe
db_app.close() #cierra la base de datos
return i[0] #retorna el nombre a la cual el correo esté conectado
else: #si no, no retorna None
db_app.close()
pass
def numerRETURN(correo): #Basicamente es el mismo al de arriba
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
account_data=cursor.execute('SELECT * FROM accounts ORDER BY _rowid_').fetchall()
for i in account_data:
if correo==i[2]: # corre el bucle y determina si el correo es igual a uno que exista
db_app.close()
return i[4] #retorna el numero
else:
db_app.close() #si no existe retorna None y cierra la conexión
pass
def accountUPDATE(data_update, code_user, data_modific, password): #esto es para actualizar los datos de la cuenta
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
for i in return_all(): #recorre los datos
if i[3]==str(password) and str(i[6])==str(code_user): #verifica si el correo y la contraseña son correctos
cursor.execute("UPDATE accounts set {0}='{1}' where user_id='{2}'".format(data_modific, data_update, code_user)) #modifica los datos
db_app.commit()
db_app.close()
return True
db_app.close()
def registerComputer(): #recorre los datos de la tabla computerDB
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
datos=cursor.execute('SELECT * FROM computerDB').fetchall()
db_app.close()
return datos
def registerIncidents(): #igual que el de arriba pero con incidentsDB
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
datos=cursor.execute('SELECT * FROM incidentsDB').fetchall()
db_app.close()
return datos
def showComputer(): #igual que el de arriba
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
datos=cursor.execute('SELECT * FROM computerDB').fetchall()
db_app.close()
return datos
def showIncidents(): #igual que el de arriba
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
datos=cursor.execute('SELECT * FROM incidentsDB').fetchall()
db_app.close()
return datos
def showMove(): #igual que el de arriba
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
datos=cursor.execute('SELECT * FROM moveDB').fetchall()
db_app.close()
return datos
def showImage(): #igual que el de arriba
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
datos=cursor.execute('SELECT * FROM imagesDB').fetchall()
db_app.close()
return datos
def deleteAccounts(email, password): # elimina una cuenta
db_app = sqlite3.connect("database.db", check_same_thread=False)
cursor = db_app.cursor()
for i in return_all():
if str(email_re.search(email))!="None" and str(password_re.search(password))!="None": #verifica si el correo y el usuario existan
if email==i[2] and password == i[3]: #if email is a email
datos=cursor.execute('DELETE FROM accounts WHERE email = "{0}";'.format(email)) # si existe, lo elimina
db_app.commit()
db_app.close()
return True
else:
db_app.close()
return False
|
{"/init.py": ["/database.py"]}
|
37,855,305
|
krootca/App-Flask
|
refs/heads/main
|
/init.py
|
from flask import Flask, flash, render_template, request, redirect, url_for, abort, session, escape, make_response, send_from_directory #el modulo de Flask
from flask_socketio import SocketIO, send, emit #Usaremos socket.io
import random,time,os #El modulo random, para obtener numeros aleatorios, time
# para obtener las horas, y os, para ejecutar código en el SO
from database import *
#-------------
#lo de arriba son módulos.
#-------------
#lo de abajo es el algoritmo en si.
#---------
ip="127.0.0.1" #la ip
port=2021 #el puerto
#---------
app=Flask(__name__) #la aplicación
socketio = SocketIO(app) #socketio
app.secret_key="secret_key" #un código secreto, para realizar peticiones GET, POST
@app.route("/", methods=["GET","POST"]) #Crea un link y admite los metodos get y post
def Index(): # Se ejecuta lo que esta adentro cuando entran al link
return render_template("index.html") #Se ejecuta para visualizar la página.
@app.route("/login", methods=["GET","POST"])#Crea un link y admite los metodos get y post
def login(): #si entran al link /login ejecuta el contenido de la función
if 'emailXZK' in session: #verifica si existe un coockie
usuario=nameRETURN(escape(session["emailXZK"])) #si existe, usuario toma valor del coockie que es un nombre de usuario
return render_template("home/home.html", user_name=usuario) #muestra la pagina html y envía una variable en el html
else:
if request.method == "POST": # si no existe el coockie, este condicional espera a que se envien metodos http post
email=request.form.get('email'); #si se envian entra al condicional, obteniendo datos del <form>
password=request.form.get('password'); #obtiene datos del email y password
if password != None or email != None: #este condicional verifica si password y email son diferentes de None, si lo son, entra al condicional
AL=account_login(email=email, password=password) #envía datos a la base de datos, la función account_login en database.py para logearse
if AL == None: # si la función retorna None simplemente renderiza nuevamente la pagina del login con un mensaje que dice la cuenta no existe
return render_template("login/login.html", style_warning="text-warning text-red", text_warning="La cuenta no existe")
elif AL == True and AL!=None: #si es correcto, crea un coockie que es como una variable que toma valor el email
session['emailXZK']=email
if "emailXZK" in session: #si el coockie existe, osea que si se ha creado, nos envía a la función home redireccionandonos al link
return redirect('/home')
else: #si no existe el coockie renderiza nuevamente el login con otro mensaje
return render_template("login/login.html", style_warning="text-warning text-red", text_warning="Ocurrió un error con coockies al logearse.")
elif AL!=True and AL!=None: #si al es diferente de True o None renderiza el login por que ha ocurrido un error inesperado
return render_template("login/login.html", style_warning="text-warning text-white", text_warning="Ocurrió un error al logearse.")
return render_template("login/login.html") #saliendo fuera del condicional de los metodos http, renderiza el login
@app.route("/signup", methods=["GET","POST"])#Crea un link y admite los metodos get y post
def signup():
if request.method == "POST": #espera a que se envíe el metodo http post
user_name=request.form.get('user_name'); #obtiene valor de los formularios, en este caso el name del input del form que tiene como valor user_name
user_surname=request.form.get('user_surname'); #lo mismo para los otros
email=request.form.get('email');
password=request.form.get('password');
telephone=request.form.get('telephone');
direction=request.form.get('direction');
age=request.form.get('age');
#este condicional solo verifica que los datos no estén vacios, si no lo están, retorna True y accede al condicional
#Si crees que esto solo se resuelve con js, estás equivocado, por que el js, se puede bloquear o simplemente cambiar valores
if user_name!=None or user_surname!=None or email!=None or password!=None or telephone!=None or direction!=None or age!=None:
signup=account_signup(user_name, user_surname, email, password, telephone, direction, age) #envía los datos a la función account_signup en database.py
if signup==True: #si signup la función retorna True, accede a este condicional
#renderiza el login con un mensaje
return render_template("login/login.html", style_warning="text-warning text-green", text_warning="¡Gracias por suscribirte!.")
else: #si signup retorna False o None, renderiza el signup
return render_template("signup/signup.html", style_warning="text-warning text-red", text_warning="La cuenta no se pudo crear")
return render_template("signup/signup.html", style_warning="text-warning text-white", text_warning="Registrarse")
#fuera del condicional del metodo http, GET POST, renderiza signup
@app.route("/home", methods=["GET","POST"])#Crea un link y admite los metodos get y post
def home():
if 'emailXZK' in session: #comprueba si existe un coockie
usuario=nameRETURN(escape(session["emailXZK"])) # obtiene el nombre del usuario utilizando la coockie y una función
return render_template("home/home.html", user_name=usuario) #renderiza home con el nombre de usuario
else:
return redirect("/login") #si no existe coockie, nos envía al login para logearnos
@app.route("/logout")#si acceden a este link ejecuta la función logout eliminando los coockies
def logout():
if "emailXZK" in session:
session.pop("emailXZK", None)
return redirect("/login")
else:
return redirect("/login")
if __name__ == "__main__":
try:
#Inicia la aplicación: http://127.0.0.1:2002
socketio.run(app, host=ip, port=port, debug=True)
except Exception as e: #Si hay un error, imprime en consola esto:
print("------------------------------------------------------------")
print("¡El servidor ha presentado un error!:\nhttp://"+ ip+ ":"+ str(port) )
print("ERROR: ", e)
print("------------------------------------------------------------")
|
{"/init.py": ["/database.py"]}
|
37,961,707
|
dofun12/gdrive-autobackup
|
refs/heads/master
|
/model/gdrive_config_model.py
|
class GDriveConfigModel:
SCOPES = []
CREDENTIAL_PATH: str
TOKEN_PICK_PATH: str
MAX_BACKUPS: int
def __init__(self, config):
protocol = config['GDRIVE']['SCOPE_PROTOCOL']
host = config['GDRIVE']['SCOPE_HOST']
paths = config['GDRIVE']['SCOPE_PATHS']
for path in paths.split(','):
self.SCOPES.append(f'{protocol}://{host}{path}')
self.CREDENTIAL_PATH = config['GDRIVE']['CREDENTIAL_PATH']
self.TOKEN_PICK_PATH = config['GDRIVE']['TOKEN_PICK_PATH']
self.MAX_BACKUPS = int(config['GDRIVE']['MAX_BACKUPS'])
|
{"/gdrive/backup.py": ["/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/general/local_files.py", "/model/general_config_model.py"], "/main.py": ["/gdrive/backup.py", "/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/model/general_config_model.py"], "/general/local_files.py": ["/model/file_model.py", "/model/general_config_model.py", "/model/mysql_config_model.py"], "/gdrive/gdrive_api.py": ["/model/gdrive_config_model.py"]}
|
37,961,708
|
dofun12/gdrive-autobackup
|
refs/heads/master
|
/main.py
|
# This is a sample Python script.
import configparser
import sys
import time
from datetime import datetime
import threading
import pycron
from gdrive.backup import Backup
from gdrive.gdrive_api import GDriveApi
from model.gdrive_config_model import GDriveConfigModel
from model.general_config_model import GeneralConfigModel
from model.mysql_config_model import MysqlConfigModel
import argparse
def print_hi(name):
# Use a breakpoint in the code line below to debug your script.
print(f'Hi, {name}') # Press Ctrl+F8 to toggle the breakpoint.
def now():
return datetime.now()
def now_str():
return now().strftime("%m/%d/%Y - %H:%M:%S")
def do_backup_async(gdrive_config, mysql_config, general_config):
now = datetime.now().strftime("%m/%d/%Y - %H:%M:%S")
print(f'{now} Backuping....')
gdrive_api = GDriveApi(gdrive_config)
list = gdrive_api.list_files()
if len(list) > gdrive_config.MAX_BACKUPS:
gdrive_api.remove_oldest_file()
gbackup = Backup(mysql_config, gdrive_config, general_config)
gbackup.do_drive_upload()
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Optional app description')
parser.add_argument('--profile', type=str,
help='Set the profile of config.ini')
parser.add_argument('--cron', action='store_true',
help='Run on cron')
args = parser.parse_args()
now = datetime.now().strftime("%m/%d/%Y - %H:%M:%S")
print(f'{now} Starting...')
default_config = 'config.ini'
if args.profile:
default_config = 'config_' + args.profile + '.ini'
config = configparser.ConfigParser()
config.read(default_config)
iscron = args.cron
gdrive_config = GDriveConfigModel(config)
mysql_config = MysqlConfigModel(config)
general_config = GeneralConfigModel(config)
if not iscron:
do_backup_async(gdrive_config, mysql_config, general_config)
exit(0)
CRON = general_config.CRON
print(f'Starting cron... waiting for ( {CRON} )')
while iscron:
if pycron.is_now(CRON):
now = datetime.now().strftime("%m/%d/%Y - %H:%M:%S")
print(f'{now} Cronando....')
threading.Thread(target=do_backup_async, args=(gdrive_config, mysql_config, general_config), kwargs={}).start()
time.sleep(60)
|
{"/gdrive/backup.py": ["/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/general/local_files.py", "/model/general_config_model.py"], "/main.py": ["/gdrive/backup.py", "/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/model/general_config_model.py"], "/general/local_files.py": ["/model/file_model.py", "/model/general_config_model.py", "/model/mysql_config_model.py"], "/gdrive/gdrive_api.py": ["/model/gdrive_config_model.py"]}
|
37,961,709
|
dofun12/gdrive-autobackup
|
refs/heads/master
|
/general/local_files.py
|
import tarfile
import os
import time
from model.file_model import FileModel
from model.general_config_model import GeneralConfigModel
from model.mysql_config_model import MysqlConfigModel
class LocalFiles:
MYSQL_CONFIG: MysqlConfigModel
GENERAL_CONFIG: GeneralConfigModel
def __init__(self, mysql_config: MysqlConfigModel, general_config: GeneralConfigModel):
self.MYSQL_CONFIG = mysql_config
self.GENERAL_CONFIG = general_config
def list_tar_backups(self):
self.mkdirs(self.MYSQL_CONFIG.TARGET_DIR)
dirs = [self.MYSQL_CONFIG.TARGET_DIR + "/{0}".format(fn) for fn in os.listdir(self.MYSQL_CONFIG.TARGET_DIR) if fn.endswith(".tar.gz")]
return dirs
def make_tarfile(self, source_file, output_filename):
print(f'Compressing {source_file} >>> {output_filename}')
tar = tarfile.open(output_filename, "w:gz")
tar.add(source_file)
tar.close()
def mkdirs(self, directory):
if not os.path.exists(directory):
os.makedirs(directory)
def list_sql_dumps(self):
self.mkdirs(self.MYSQL_CONFIG.TARGET_DIR)
dirs = [self.MYSQL_CONFIG.TARGET_DIR + "/{0}".format(fn) for fn in os.listdir(self.MYSQL_CONFIG.TARGET_DIR) if fn.endswith(".sql")]
return dirs
def print_sql_dumps(self):
for dir in self.list_sql_dumps():
print(dir)
def prepare_backupfile(self):
target_dir = self.MYSQL_CONFIG.TARGET_DIR
self.mkdirs(target_dir)
filestamp = time.strftime('%Y%m%d%H%M')
base_filename = self.MYSQL_CONFIG.DATABASE + "_" + filestamp
sql_file_name = f'{base_filename}.sql'
sql_file_path = os.path.join(target_dir,sql_file_name)
self.create_dump(sql_file_path)
compressed_file_name = f'{base_filename}.tar.gz'
compressed_file_path = os.path.join(target_dir,compressed_file_name)
self.make_tarfile(sql_file_path, compressed_file_path)
return FileModel(compressed_file_path,compressed_file_name)
def create_dump(self, target_path):
command = "mysqldump -h %s -P %s -u %s -p%s -A -R -E --triggers --column-statistics=0 --single-transaction > " \
"%s" % (
self.MYSQL_CONFIG.HOST,
self.MYSQL_CONFIG.PORT,
self.MYSQL_CONFIG.DB_USER,
self.MYSQL_CONFIG.DB_PASS,
target_path
)
print("Running...\n" + command)
self.run_command(command)
print("\n|| Database dumped to " + target_path+ " || ")
def run_command(self, command):
os.system(command)
def remove_oldest(self,list,max_files):
totalFiles = len(list)
while totalFiles > max_files:
oldest_file = min(list, key=os.path.getctime)
targetFile = os.path.abspath(oldest_file)
print("Removing " + targetFile)
os.remove(os.path.abspath(oldest_file))
totalFiles = totalFiles - 1
def clean(self):
print("Cleaning...")
if self.GENERAL_CONFIG.KEEP_LOCAL_BACKUPS:
self.remove_oldest(self.list_tar_backups(), self.GENERAL_CONFIG.MAX_COMPRESSED_DUMPS)
self.remove_oldest(self.list_sql_dumps(), self.GENERAL_CONFIG.MAX_SQL_DUMPS)
return
self.remove_oldest(self.list_tar_backups(), 0)
self.remove_oldest(self.list_sql_dumps(), 0)
|
{"/gdrive/backup.py": ["/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/general/local_files.py", "/model/general_config_model.py"], "/main.py": ["/gdrive/backup.py", "/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/model/general_config_model.py"], "/general/local_files.py": ["/model/file_model.py", "/model/general_config_model.py", "/model/mysql_config_model.py"], "/gdrive/gdrive_api.py": ["/model/gdrive_config_model.py"]}
|
37,961,710
|
dofun12/gdrive-autobackup
|
refs/heads/master
|
/model/general_config_model.py
|
class GeneralConfigModel:
MAX_SQL_DUMPS: int
MAX_COMPRESSED_DUMPS: int
KEEP_LOCAL_BACKUPS: bool
CRON: str
def __init__(self, config):
self.CRON = config['GENERAL']['CRON']
self.MAX_SQL_DUMPS = int(config['GENERAL']['MAX_SQL_DUMPS'])
self.MAX_COMPRESSED_DUMPS = int(config['GENERAL']['MAX_COMPRESSED_DUMPS'])
self.KEEP_LOCAL_BACKUPS = 'true' == config['GENERAL']['KEEP_LOCAL_BACKUPS']
|
{"/gdrive/backup.py": ["/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/general/local_files.py", "/model/general_config_model.py"], "/main.py": ["/gdrive/backup.py", "/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/model/general_config_model.py"], "/general/local_files.py": ["/model/file_model.py", "/model/general_config_model.py", "/model/mysql_config_model.py"], "/gdrive/gdrive_api.py": ["/model/gdrive_config_model.py"]}
|
37,961,711
|
dofun12/gdrive-autobackup
|
refs/heads/master
|
/model/file_model.py
|
class FileModel:
name: str
full_path: str
def __init__(self, full_path, name):
self.name = name
self.full_path = full_path
|
{"/gdrive/backup.py": ["/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/general/local_files.py", "/model/general_config_model.py"], "/main.py": ["/gdrive/backup.py", "/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/model/general_config_model.py"], "/general/local_files.py": ["/model/file_model.py", "/model/general_config_model.py", "/model/mysql_config_model.py"], "/gdrive/gdrive_api.py": ["/model/gdrive_config_model.py"]}
|
37,961,712
|
dofun12/gdrive-autobackup
|
refs/heads/master
|
/gdrive/gdrive_api.py
|
from __future__ import print_function
import datetime
import pickle
import os.path
import os
import time
from googleapiclient.discovery import build, Resource
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
from googleapiclient.http import MediaFileUpload
from model.gdrive_config_model import GDriveConfigModel
from utils import dateutils
class GDriveApi:
gdrive_config: GDriveConfigModel
service = Resource
def __init__(self, config: GDriveConfigModel):
self.gdrive_config = config
self.service = self.init_api()
def upload_file(self, filename, path):
self.init_api()
print("Trying upload " + filename + " at path " + path)
file_metadata = {
'name': filename,
'parents': ['appDataFolder']
}
media = MediaFileUpload(self.get_file(path), mimetype='application/zip')
file = self.service.files().create(body=file_metadata,
media_body=media,
fields='id'
).execute()
print('File ID: %s' % file.get('id'))
def list_files(self):
# Call the Drive v3 API
results = self.service.files().list(
spaces='appDataFolder',
q="name contains 'tar.gz'", pageSize=30,
fields="nextPageToken, files(id, name, createdTime)").execute()
return results.get('files', [])
def print_list(self):
items = self.list_files()
if not items:
print('No files found.')
else:
print('Files:')
for item in items:
print(u'{0} ({1}) {2}'.format(item['name'], item['id'], item['createdTime']))
def get_oldest_file(self):
files = self.list_files()
oldest_date = None
oldest_file = None
for file in files:
created_time = file['createdTime']
created_time_date = dateutils.str_to_date_object(created_time)
if oldest_file is None:
oldest_file = file
oldest_date = created_time_date
continue
if created_time_date < oldest_date:
oldest_file = file
oldest_date = created_time_date
return oldest_file
def init_api(self):
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists(self.gdrive_config.TOKEN_PICK_PATH):
with open(self.gdrive_config.TOKEN_PICK_PATH, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
self.gdrive_config.CREDENTIAL_PATH, self.gdrive_config.SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(self.gdrive_config.TOKEN_PICK_PATH, 'wb') as token:
pickle.dump(creds, token)
service = build('drive', 'v3', credentials=creds)
return service
def delete_file(self,file_id):
self.service.files().delete(fileId=file_id).execute()
def remove_oldest_file(self):
print('Deleting old backup from drive ',self.get_oldest_file())
self.delete_file(self.get_oldest_file()['id'])
def get_file(self, path):
file = open(path)
print(os.path.basename(file.name))
print(file.name)
return file.name
|
{"/gdrive/backup.py": ["/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/general/local_files.py", "/model/general_config_model.py"], "/main.py": ["/gdrive/backup.py", "/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/model/general_config_model.py"], "/general/local_files.py": ["/model/file_model.py", "/model/general_config_model.py", "/model/mysql_config_model.py"], "/gdrive/gdrive_api.py": ["/model/gdrive_config_model.py"]}
|
37,961,713
|
dofun12/gdrive-autobackup
|
refs/heads/master
|
/model/mysql_config_model.py
|
class MysqlConfigModel:
TARGET_DIR = str
HOST: str
PORT: int
DB_USER: str
DB_PASS: str
DATABASE: str
def __init__(self, config):
self.TARGET_DIR = config['MYSQL']['TARGET_DIR']
self.HOST = config['MYSQL']['HOST']
self.PORT = int(config['MYSQL']['PORT'])
self.DB_USER = config['MYSQL']['DB_USER']
self.DB_PASS = config['MYSQL']['DB_PASS']
self.DATABASE = config['MYSQL']['DATABASE']
|
{"/gdrive/backup.py": ["/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/general/local_files.py", "/model/general_config_model.py"], "/main.py": ["/gdrive/backup.py", "/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/model/general_config_model.py"], "/general/local_files.py": ["/model/file_model.py", "/model/general_config_model.py", "/model/mysql_config_model.py"], "/gdrive/gdrive_api.py": ["/model/gdrive_config_model.py"]}
|
37,961,714
|
dofun12/gdrive-autobackup
|
refs/heads/master
|
/gdrive/backup.py
|
import os
from gdrive.gdrive_api import GDriveApi
from general.local_files import LocalFiles
from model.gdrive_config_model import GDriveConfigModel
from model.general_config_model import GeneralConfigModel
from model.mysql_config_model import MysqlConfigModel
class Backup:
MYSQL_CONFIG: MysqlConfigModel
GDRIVE_CONFIG: GDriveConfigModel
LOCAL_FILES: LocalFiles
def __init__(self, mysql_config: MysqlConfigModel, gdrive_config: GDriveConfigModel, general_config: GeneralConfigModel):
self.MYSQL_CONFIG = mysql_config
self.GDRIVE_CONFIG = gdrive_config
self.LOCAL_FILES = LocalFiles(mysql_config, general_config)
def do_drive_upload(self):
self.LOCAL_FILES.clean()
compressed_file = self.LOCAL_FILES.prepare_backupfile()
print("Starting upload... " + compressed_file.name)
gdrive_api = GDriveApi(self.GDRIVE_CONFIG)
try:
gdrive_api.upload_file(compressed_file.name, compressed_file.full_path)
self.LOCAL_FILES.clean()
gdrive_api.remove_oldest_file()
except:
print('Error on upload')
|
{"/gdrive/backup.py": ["/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/general/local_files.py", "/model/general_config_model.py"], "/main.py": ["/gdrive/backup.py", "/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/model/general_config_model.py"], "/general/local_files.py": ["/model/file_model.py", "/model/general_config_model.py", "/model/mysql_config_model.py"], "/gdrive/gdrive_api.py": ["/model/gdrive_config_model.py"]}
|
37,961,715
|
dofun12/gdrive-autobackup
|
refs/heads/master
|
/utils/dateutils.py
|
import datetime
#'2021-04-12T03:51:47.092Z'
def str_to_date_object(date_string):
return datetime.datetime.strptime(date_string, '%Y-%m-%dT%H:%M:%S.%f%z')
|
{"/gdrive/backup.py": ["/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/general/local_files.py", "/model/general_config_model.py"], "/main.py": ["/gdrive/backup.py", "/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/model/general_config_model.py"], "/general/local_files.py": ["/model/file_model.py", "/model/general_config_model.py", "/model/mysql_config_model.py"], "/gdrive/gdrive_api.py": ["/model/gdrive_config_model.py"]}
|
37,961,716
|
dofun12/gdrive-autobackup
|
refs/heads/master
|
/gdrive/test.py
|
from gdrive import gdrive_api
if __name__=="__main__":
service = gdrive_api.init_api()
gdrive_api.list_files(service)
|
{"/gdrive/backup.py": ["/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/general/local_files.py", "/model/general_config_model.py"], "/main.py": ["/gdrive/backup.py", "/gdrive/gdrive_api.py", "/model/gdrive_config_model.py", "/model/mysql_config_model.py", "/model/general_config_model.py"], "/general/local_files.py": ["/model/file_model.py", "/model/general_config_model.py", "/model/mysql_config_model.py"], "/gdrive/gdrive_api.py": ["/model/gdrive_config_model.py"]}
|
37,985,857
|
jaderigby/quick-git
|
refs/heads/master
|
/New.py
|
import messages as msg
import subprocess, helpers
# settings = helpers.get_settings()
def execute():
msg.new_branch_info()
nameSelect = helpers.user_input("Please select a name for your feature branch: ")
helpers.run_command('git checkout master')
helpers.run_command('git pull')
helpers.run_command('git checkout -b {}'.format(nameSelect))
helpers.run_command('git push -u origin {}'.format(nameSelect))
# helpers.run_command('git branch -u origin/{}'.format(nameSelect))
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,858
|
jaderigby/quick-git
|
refs/heads/master
|
/Feature.py
|
import messages as msg
import subprocess, helpers
# settings = helpers.get_settings()
def execute():
nameSelect = helpers.user_input("Please select a name for your feature branch: ")
currentBranch = helpers.run_command_output('git branch --show-current', False).replace('\n','')
helpers.run_command('git checkout -b {} {}'.format(nameSelect, currentBranch))
helpers.run_command('git push -u origin {}'.format(nameSelect))
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,859
|
jaderigby/quick-git
|
refs/heads/master
|
/Diff.py
|
import messages as msg
import helpers
settings = helpers.get_settings()
def execute():
if settings:
if 'differ' in settings:
while True:
listing = helpers.run_command_output('git diff --name-only')
listingList = listing.splitlines()
selection = helpers.user_selection('Select file to diff: ', listingList)
if isinstance(selection, int):
option = listingList[selection - 1]
currentPath = helpers.run_command_output('pwd', False)
helpers.run_command('git difftool {}/{} {}'.format(currentPath.replace('\n', ''), option, settings['differ']))
elif selection == 'exit':
print('\nExiting ...\n')
break
else:
msg.set_differ()
msg.done()
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,860
|
jaderigby/quick-git
|
refs/heads/master
|
/settings.py
|
import os
toolDirectory = os.path.dirname(__file__)
settings = {
"profile_url" : toolDirectory + "/profiles/",
"profile" : "profile.py"
}
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,861
|
jaderigby/quick-git
|
refs/heads/master
|
/actions.py
|
import messages as msg
import sys, helpers, sizzle
import commitAll
import Feature
import Remove
import Branch
import Diff
import Restore
import RestoreBack
import DoStatus
import Back
import New
# new imports start here
# settings = helpers.get_settings()
try:
action = str(sys.argv[1])
except:
action = None
args = sys.argv[2:]
if action == None:
msg.statusMessage()
elif action == '-action':
sizzle.do_action(args)
elif action == '-profile':
sizzle.profile()
elif action == '-helpers':
sizzle.helpers()
elif action == "all":
commitAll.execute()
elif action == "feature":
Feature.execute()
elif action == "remove":
Remove.execute(args)
elif action == "branch":
Branch.execute(args)
elif action == "diff":
Diff.execute()
elif action == "re":
Restore.execute(args)
elif action == "reback":
RestoreBack.execute()
elif action == "status":
DoStatus.execute()
elif action == "back":
Back.execute()
elif action == "new":
New.execute()
# new actions start here
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,862
|
jaderigby/quick-git
|
refs/heads/master
|
/DoStatus.py
|
import messages as msg
import helpers
settings = helpers.get_settings()
def execute():
while True:
fileList = helpers.run_command_output('git diff --name-only', False).splitlines()
untrackedListing = helpers.run_command_output('git ls-files --others --exclude-standard', False).splitlines()
combinedList = fileList + untrackedListing
helpers.run_command('git status')
if len(combinedList) == 0:
break
selection = helpers.status_selection('Selection: ', ['stash', 'push all', 'unstage all', 'push/exclude'])
if selection == 1:
helpers.run_command('git stash')
elif selection == 2:
helpers.run_command('git add -A')
helpers.run_command('git status', False)
commitMessage = helpers.user_input("Commit Message: ")
helpers.run_command('git commit -m "{}"'.format(commitMessage))
helpers.run_command('git push')
break
elif selection == 3:
helpers.run_command('git reset *')
elif selection == 4:
print("\n=====================")
fileSelection = helpers.user_selection('push all, except: [eg: 1,3,4] ', combinedList, True)
helpers.run_command('git add -A')
if fileSelection != 'exit':
for item in fileSelection:
helpers.run_command('git reset {}'.format(combinedList[item - 1]))
helpers.run_command('git status')
if len(helpers.run_command_output('git diff --name-only --cached', False)) == 0:
print("\nNothing staged!\n")
break
commitMessage = helpers.user_input("Commit Message: ")
helpers.run_command('git commit -m "{}"'.format(commitMessage))
helpers.run_command('git push')
break
elif selection == 'exit':
break
msg.done()
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,863
|
jaderigby/quick-git
|
refs/heads/master
|
/messages.py
|
import helpers, json
actionList = json.loads(helpers.read_file('{}/{}'.format(helpers.path('util'), 'action-list.json')))
def statusMessage():
if len(actionList['actions']) > 0:
print("")
for item in actionList['actions']:
print('''[ {} {} ]\t\t{}'''.format(actionList['alias'], item['name'], item['description']))
print("")
else:
print('''
quick-git is working successfully!
''')
def done():
print('''
[ Process Completed ]
''')
def exit():
print("\nExiting ...\n")
def example():
print("process working!")
def set_differ():
statusMsg = '''
Please add the differ configuration to your profile file.
For example:
{
"settings" : {
"differ": "-y -t Kaleidoscope"
}
}
'''
print(statusMsg)
def new_branch_info():
print('''
NOTE: This will temporarilly switch to - and then pull - the latest\nfrom master.
''')
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,864
|
jaderigby/quick-git
|
refs/heads/master
|
/RestoreBack.py
|
import messages as msg
import helpers
# settings = helpers.get_settings()
def execute():
branch = helpers.run_command_output('git branch --show-current', False)
branch = branch.replace('\n', '')
branchOrigin = 'origin/{}'.format(branch)
filepath = helpers.user_input('File name/path to restore from last commit: ')
# helpers.run_command('git restore --source {BRANCH} -- {FILE}'.format(BRANCH= branch, FILE= filepath))
helpers.run_command(['git', 'restore', '--source', branchOrigin, '--', filepath])
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,865
|
jaderigby/quick-git
|
refs/heads/master
|
/helpers.py
|
import json
from settings import settings
profilePath = settings['profile_url'] + settings['profile']
def load_profile():
import os
return json.loads(read_file(profilePath)) if os.path.exists(profilePath) else json.loads("{}")
def get_settings():
profile = load_profile()
return profile['settings'] if 'settings' in profile else False
def path(TYPE):
import os
if TYPE == 'user':
return os.path.expanduser('~/')
elif TYPE == 'util' or TYPE == 'utility':
return os.path.dirname(os.path.realpath(__file__))
else:
return False
def read_file(FILEPATH):
FILE = open(FILEPATH, 'r')
data = FILE.read()
FILE.close()
return data
def write_file(FILEPATH, DATA):
with open(FILEPATH, 'w') as f: f.write(DATA)
def run_command(CMD, option = True):
import subprocess
shellStatus = True
str = ''
showCmd = CMD
if isinstance(CMD, list):
shellStatus = False
for item in CMD:
str += (' ' + item)
showCmd = str
if option:
print('\n============== Running Command: {}\n'.format(showCmd))
subprocess.call(CMD, shell=shellStatus)
def run_command_output(CMD, option = True):
import subprocess
if option:
print('\n============== Outputting Command: {}\n'.format(CMD))
result = False
if CMD != None:
process = subprocess.Popen(CMD, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
out, err = process.communicate()
if err:
print(err)
else:
result = out.decode('utf-8')
return result
def decorate(COLOR, STRING):
bcolors = {
'lilac' : '\033[95m'
,'blue' : '\033[94m'
,'cyan' : '\033[96m'
,'green' : '\033[92m'
,'yellow' : '\033[93m'
,'red' : '\033[91m'
,'bold' : '\033[1m'
,'underline' : '\033[4m'
,'endc' : '\033[0m'
}
return bcolors[COLOR] + STRING + bcolors['endc']
def user_input(STRING):
try:
return raw_input(STRING)
except:
return input(STRING)
def list_expander(LIST):
baseList = LIST.replace(' ', '').split(',')
expandedList = []
for item in baseList:
if '-' in item:
rangeList = item.split('-')
tempList = [elem for elem in range(int(rangeList[0]), int(rangeList[1]) + 1)]
expandedList += tempList
else:
expandedList.append(int(item))
return expandedList
# generates a user selection session, where the passed in list is presented as numbered selections; selecting "x" or just hitting enter results in the string "exit" being returned. Any invaild selection is captured and presented with the message "Please select a valid entry"
def user_selection(DESCRIPTION, LIST, LIST_SELECT = False):
import re
str = ''
for i, item in enumerate(LIST, start=1):
str += '\n[{index}] {item}'.format(index=i, item=item)
str += '\n\n[x] Exit\n'
finalAnswer = False
while True:
print(str)
selection = user_input('{}'.format(DESCRIPTION))
pat = re.compile("[0-9,\- ]+") if LIST_SELECT else re.compile("[0-9]+")
if pat.match(selection):
selection = list_expander(selection) if LIST_SELECT else int(selection)
if isinstance(selection, int) or isinstance(selection, list):
finalAnswer = selection
break
elif selection == 'x':
finalAnswer = 'exit'
break
elif selection == '':
finalAnswer = 'exit'
break
else:
print("\nPlease select a valid entry...")
return finalAnswer
def arguments(ARGS, DIVIDER=':'):
return dict(item.split('{}'.format(DIVIDER)) for item in ARGS)
def kv_set(DICT, KEY, DEFAULT = False):
if KEY in DICT:
DICT[KEY] = 't' if DICT[KEY] == 'true' else 'f' if DICT[KEY] == 'false' else DICT[KEY]
return DICT[KEY]
else:
return DEFAULT
# custom helpers start here
# =========================
def separation():
return ' '
def status_selection(DESCRIPTION, LIST):
import re
str = ''
print('')
for i, item in enumerate(LIST, start=1):
str += '[{index}] {item}{separator}'.format(index=i, item=item, separator=separation())
str += '[x] Exit\n'
finalAnswer = False
while True:
print(str)
selection = user_input('{}'.format(DESCRIPTION))
pat = re.compile("[0-9]+")
if pat.match(selection):
selection = int(selection)
if isinstance(selection, int):
finalAnswer = selection
break
elif selection == 'x':
finalAnswer = 'exit'
break
elif selection == '':
finalAnswer = 'exit'
break
else:
print("\nPlease select a valid entry...")
return finalAnswer
def user_selection_with_highlight(DESCRIPTION, LIST, LIST_SELECT = False, CURRENT = '', EXCLUDE_CURRENT = False):
import re
str = ''
for i, item in enumerate(LIST, start=1):
if item == CURRENT and EXCLUDE_CURRENT:
str += decorate('green', '\n * {item}'.format(index=i, item=item))
elif item == CURRENT and EXCLUDE_CURRENT == False:
str += decorate('green', '\n[{index}] {item}'.format(index=i, item=item))
else:
str += '\n[{index}] {item}'.format(index=i, item=item)
str += '\n\n[x] Exit\n'
finalAnswer = False
while True:
print(str)
selection = user_input('{}'.format(DESCRIPTION))
if LIST_SELECT:
pat = re.compile("[0-9,\- ]+")
else:
pat = re.compile("[0-9]+")
if pat.match(selection):
if LIST_SELECT:
selection = list_expander(selection)
else:
selection = int(selection)
if isinstance(selection, int) or isinstance(selection, list):
finalAnswer = selection
break
elif selection == 'x':
finalAnswer = 'exit'
break
elif selection == '':
finalAnswer = 'exit'
break
else:
print("\nPlease select a valid entry...")
return finalAnswer
def remove_local_branch(CURRENT_BRANCH, BRANCH_TO_REMOVE):
if BRANCH_TO_REMOVE == CURRENT_BRANCH:
run_command('git checkout master')
run_command('git branch -D {}'.format(BRANCH_TO_REMOVE))
def remove_remote_branch(BRANCH_TO_REMOVE):
run_command('git push origin --delete {}'.format(BRANCH_TO_REMOVE))
def check_context(OBJ):
if OBJ:
if 'local' in OBJ:
if OBJ['local'] == 'true' or OBJ['local'] == 't':
return 'removeLocalOnly'
elif 'l' in OBJ:
if OBJ['l'] == 'true' or OBJ['l'] == 't':
return 'removeLocalOnly'
elif 'remote' in OBJ:
if OBJ['remote'] == 'true' or OBJ['remote'] == 't':
return 'removeRemoteOnly'
elif 'r' in OBJ:
if OBJ['r'] == 'true' or OBJ['r'] == 't':
return 'removeRemoteOnly'
else:
return 'removeLocalAndRemote'
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,866
|
jaderigby/quick-git
|
refs/heads/master
|
/Branch.py
|
import messages as msg
import subprocess, helpers, os, re
# settings = helpers.get_settings()
def execute(ARGS):
argDict = helpers.arguments(ARGS)
out = subprocess.check_output(["git", "branch"]).decode('utf-8')
outList = out.split()
if '*' in outList: outList.remove('*')
if argDict:
if 'go' in argDict:
if argDict['go'] == 'back':
try:
helpers.run_command('git checkout -')
except:
print("\nNothing to go back to. Exiting ...")
elif argDict['go'] == 'master':
helpers.run_command('git checkout master')
elif argDict['go'] == 'latest':
activity = helpers.run_command_output('git for-each-ref --sort=-committerdate refs/heads/', False)
activityRefined = re.sub('[\w]* commit\t', '', activity)
latestBranch = activityRefined.split('\n')[0].replace('refs/heads/', '')
helpers.run_command('git checkout {}'.format(latestBranch))
elif re.search('[0-9]*', argDict['go']):
branchName = outList[int(argDict['go']) - 1]
helpers.run_command('git checkout {}'.format(branchName))
else:
print("\nNot a valid selection")
if 'new' in argDict:
if argDict['new']:
nameSelect = argDict['new']
helpers.run_command('git checkout master')
helpers.run_command('git pull')
helpers.run_command('git checkout -b {}'.format(nameSelect))
helpers.run_command('git push -u origin {}'.format(nameSelect))
else:
currentBranch = helpers.run_command_output('git branch --show-current', False).replace('\n','')
print("")
selection = helpers.user_selection_with_highlight("Please select branch to checkout: ", outList, False, currentBranch, True)
if isinstance(selection, int):
branchName = outList[int(selection) - 1]
if branchName != currentBranch[0]:
helpers.run_command('git checkout {}'.format(branchName))
else:
msg.exit()
else:
msg.exit()
msg.done()
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,867
|
jaderigby/quick-git
|
refs/heads/master
|
/commitAll.py
|
import messages as msg
import helpers
# settings = helpers.get_settings()
def execute():
helpers.run_command('git add -A')
helpers.run_command('git status', False)
commitMessage = helpers.user_input("Commit Message: ")
helpers.run_command('git commit -m "{}"'.format(commitMessage))
helpers.run_command('git push')
msg.done()
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,868
|
jaderigby/quick-git
|
refs/heads/master
|
/Back.py
|
import messages as msg
import helpers
# settings = helpers.get_settings()
def execute():
helpers.run_command('git checkout -')
msg.done()
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,869
|
jaderigby/quick-git
|
refs/heads/master
|
/Restore.py
|
import messages as msg
import helpers
settings = helpers.get_settings()
def execute(ARGS):
argDict = helpers.arguments(ARGS)
if argDict:
if 'path' in argDict:
filepath = argDict['path']
helpers.run_command('git restore --source origin/master -- {FILE}'.format(FILE= filepath))
elif 'file' in argDict:
filepath = argDict['file']
helpers.run_command('git restore --source origin/master -- {FILE}'.format(FILE= filepath))
elif 'profile' in argDict:
if argDict['profile'] == 'true':
for item in settings['restore']:
helpers.run_command('git restore --source origin/master -- {FILE}'.format(FILE= item))
elif argDict['profile'] == 'open':
helpers.run_command('code {}/profiles/profile.py'.format(helpers.path('util')))
else:
filepath = helpers.user_input('File name/path to restore from master: ')
helpers.run_command('git restore --source origin/master -- {FILE}'.format(FILE= filepath))
else:
filepath = helpers.user_input('File name/path to restore from master: ')
helpers.run_command('git restore --source origin/master -- {FILE}'.format(FILE= filepath))
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,870
|
jaderigby/quick-git
|
refs/heads/master
|
/sizzle.py
|
import messages as msg
import os, re, json
def do_action(ARGS):
import helpers
argDict = helpers.arguments(ARGS)
name = helpers.path('util').split('/')[-1]
fullPath = helpers.path('util')
titleString = "Action"
if argDict:
if 'args' in argDict:
if argDict['args'] == 'true':
titleString = "Action With Arguments"
newFeature = helpers.user_input('''
-- New {} --
Please give your new action a name [Eg: OpenFile]: '''.format(titleString))
newAction = helpers.user_input('''
What would you like to call the action? ''')
basicSnippet = '''import messages as msg
# settings = helpers.get_settings()
def execute():
msg.example()
'''
argSnippet = '''import messages as msg
import helpers
# settings = helpers.get_settings()
def execute(ARGS):
argDict = helpers.arguments(ARGS)
print(argDict)
'''
finalPath = helpers.path('util') + '/' + newFeature + '.py'
statusMsg = '''
NAME: {}
NEW MODULE: {}
NEW ACTION: {}
LOCATION: {}'''.format(name, newFeature, newAction, finalPath)
print(statusMsg)
template = basicSnippet
if argDict:
if 'args' in argDict:
if argDict['args'] == 'true':
template = argSnippet
helpers.write_file(finalPath, template)
data = helpers.read_file(fullPath + '/actions.py')
data = data.replace('# new imports start here', '''import {}
# new imports start here'''.format(newFeature))
basicContent = '''
elif action == "{newAction}":
{newFeature}.execute()
# new actions start here'''.format(newFeature=newFeature, newAction=newAction)
argContent = '''
elif action == "{newAction}":
{newFeature}.execute(args)
# new actions start here'''.format(newFeature=newFeature, newAction=newAction)
newContent = basicContent
if argDict:
if 'args' in argDict:
if argDict['args'] == 'true':
newContent = argContent
data = data.replace("# new actions start here", newContent)
helpers.write_file(fullPath + '/actions.py', data)
actionData = json.loads(helpers.read_file(fullPath + '/action-list.json'))
newItem = {}
newItem['name'] = newAction
newItem['description'] = ''
actionData['actions'].append(newItem)
helpers.write_file(fullPath + '/action-list.json', json.dumps(actionData, indent=4))
msg.done()
def profile():
import helpers
import os
utilDir = helpers.path('util')
if not os.path.exists(utilDir + '/profiles/profile.py'):
snippet = '''{\n\t"settings" : {\n\n\t\t}\n}'''
helpers.run_command('mkdir {}/profiles'.format(utilDir), False)
helpers.write_file(utilDir + '/profiles/profile.py', snippet)
print("\nprofile added!\n")
msg.done
def helpers():
import helpers
# get bacon filepath
baconHelpersFilepath = helpers.run_command_output('cd {} && cd ../ && pwd'.format(helpers.path('util'))).replace('\n', '') + '/bacon/template/helpers.py'
utilityHelpersFilepath = '/{}/{}'.format(helpers.path('util'), 'helpers.py')
# get target helpers content
content = helpers.read_file(utilityHelpersFilepath)
customHelpers = content.split("# custom helpers start here\n# =========================")[1]
# get default helpers template from bacon
newDefaultHelpers = helpers.read_file(baconHelpersFilepath)
# pack content and save
newContent = newDefaultHelpers + customHelpers
# print(newContent)
helpers.write_file(utilityHelpersFilepath, newContent)
msg.done()
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
37,985,871
|
jaderigby/quick-git
|
refs/heads/master
|
/Remove.py
|
import messages as msg
import subprocess, helpers, re
# settings = helpers.get_settings()
def execute(ARGS):
argDict = helpers.arguments(ARGS)
# get list of local branches; filter out '*' and 'master'
localBranchList = helpers.run_command_output('git branch', False).split()
if '*' in localBranchList: localBranchList.remove('*')
if 'master' in localBranchList: localBranchList.remove('master')
# get current branch
currentBranch = helpers.run_command_output('git branch --show-current', False).replace('\n', '')
# Assign context
context = helpers.check_context(argDict)
# select branch/s to remove
selections = helpers.user_selection_with_highlight("\nPlease select branch to remove/delete: ", localBranchList, True, currentBranch)
if selections != 'exit':
branchesString = '\n'
branchesToRemoveList = []
for branchItem in selections:
branchName = localBranchList[branchItem - 1]
branchesString += '- {}\n'.format(localBranchList[branchItem - 1])
branchesToRemoveList.append(branchName)
if context == 'removeLocalOnly':
if len(selections) > 1:
print('\n\nRemoving local branches: \n{}'.format(helpers.decorate('yellow', branchesString)))
else:
print('\n\nRemoving local branch: {}'.format(helpers.decorate('yellow', branchName)))
confirm = helpers.user_input("\nAre you sure? [y/n] ")
if confirm == 'y':
for branch in branchesToRemoveList:
helpers.remove_local_branch(currentBranch, branch)
elif context == 'removeRemoteOnly':
if len(selections) > 1:
print('\n\nRemoving remote branches: \n{}'.format(helpers.decorate('yellow', branchesString)))
else:
print('\n\nRemoving remote branch: {}'.format(helpers.decorate('yellow', branchName)))
confirm = helpers.user_input("\nAre you sure? [y/n] ")
if confirm == 'y':
for branch in branchesToRemoveList:
helpers.remove_remote_branch(branch)
elif context == 'removeLocalAndRemote':
if len(selections) > 1:
print('\n\nRemoving branches: \n{}'.format(helpers.decorate('yellow', branchesString)))
else:
print('\n\nRemoving branch: {}'.format(helpers.decorate('yellow', branchName)))
confirm = helpers.user_input("\nAre you sure? [y/n] ")
if confirm == 'y':
for branch in branchesToRemoveList:
helpers.remove_local_branch(currentBranch, branch)
helpers.remove_remote_branch(branch)
else:
msg.exit()
msg.done()
|
{"/Branch.py": ["/messages.py", "/helpers.py"], "/helpers.py": ["/settings.py"], "/Remove.py": ["/messages.py", "/helpers.py"], "/New.py": ["/messages.py", "/helpers.py"], "/Feature.py": ["/messages.py", "/helpers.py"], "/Diff.py": ["/messages.py", "/helpers.py"], "/actions.py": ["/messages.py", "/helpers.py", "/sizzle.py", "/commitAll.py", "/Feature.py", "/Remove.py", "/Branch.py", "/Diff.py", "/Restore.py", "/RestoreBack.py", "/DoStatus.py", "/Back.py", "/New.py"], "/DoStatus.py": ["/messages.py", "/helpers.py"], "/messages.py": ["/helpers.py"], "/RestoreBack.py": ["/messages.py", "/helpers.py"], "/commitAll.py": ["/messages.py", "/helpers.py"], "/Back.py": ["/messages.py", "/helpers.py"], "/Restore.py": ["/messages.py", "/helpers.py"], "/sizzle.py": ["/messages.py", "/helpers.py"]}
|
38,257,423
|
nahidhasan007/restapi
|
refs/heads/main
|
/blood_group/urls.py
|
from django.urls import path
from . import views
#from blood_group.views import AddDonerView
app_name = 'blood_group'
urlpatterns = [
path('',views.index,name='index'),
path('<int:location_id>/home',views.home,name='home'),
path('search/',views.search,name='search'),
path('add_location/',views.add_location,name='add_location'),
path('add_doners/',views.AddDonerView.as_view(),name='add_doners'),
]
|
{"/todo/urls.py": ["/todo/views.py"], "/Article/api/urls.py": ["/Article/api/views.py"], "/api/serializers.py": ["/blood_group/models.py"], "/blood_group/views.py": ["/blood_group/models.py"], "/api/urls.py": ["/api/views.py"], "/api/views.py": ["/blood_group/models.py", "/api/serializers.py"], "/blood_group/admin.py": ["/blood_group/models.py"]}
|
38,257,424
|
nahidhasan007/restapi
|
refs/heads/main
|
/api/serializers.py
|
from rest_framework import serializers
from blood_group.models import Location, Doner
class LocationSerializer(serializers.ModelSerializer):
class Meta:
model = Location
fields = ('__all__')
class DonerSerializer(serializers.ModelSerializer):
class Meta:
model = Doner
fields = ('__all__')
|
{"/todo/urls.py": ["/todo/views.py"], "/Article/api/urls.py": ["/Article/api/views.py"], "/api/serializers.py": ["/blood_group/models.py"], "/blood_group/views.py": ["/blood_group/models.py"], "/api/urls.py": ["/api/views.py"], "/api/views.py": ["/blood_group/models.py", "/api/serializers.py"], "/blood_group/admin.py": ["/blood_group/models.py"]}
|
38,257,425
|
nahidhasan007/restapi
|
refs/heads/main
|
/blood_group/views.py
|
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse
from django.urls import reverse_lazy
from . models import Location, Doner
from django.views.generic import CreateView
# Create your views here.
def index(request):
areas = Location.objects.all()
return render(request,'blood_group/index.html',{'areas':areas})
def add_location(request):
if request.method=="POST":
location_name = request.POST['location_name']
loc = Location(location_name=location_name)
loc.save();
return render(request,'blood_group/index.html')
else:
return render(request,'blood_group/add_location.html')
class AddDonerView(CreateView):
model = Doner
template_name = 'blood_group/add_doners.html'
fields = '__all__'
def home(request, location_id):
loc = Location.objects.get(pk=location_id)
data = loc.doner_set.all()
return render(request,'blood_group/home.html',{'data':data,'loc':loc})
def search(request):
query = request.GET.get('give')
data = Doner.objects.filter(donar_blood_group=query)
return render(request,'blood_group/search.html',{'data':data})
|
{"/todo/urls.py": ["/todo/views.py"], "/Article/api/urls.py": ["/Article/api/views.py"], "/api/serializers.py": ["/blood_group/models.py"], "/blood_group/views.py": ["/blood_group/models.py"], "/api/urls.py": ["/api/views.py"], "/api/views.py": ["/blood_group/models.py", "/api/serializers.py"], "/blood_group/admin.py": ["/blood_group/models.py"]}
|
38,257,426
|
nahidhasan007/restapi
|
refs/heads/main
|
/api/urls.py
|
from django.contrib import admin
from django.urls import path
from .views import BloodAPIView
from .views import BloodCategoryAPIView
from .views import BloodDetailView
from .views import BloodNewView
urlpatterns = [
path('',BloodCategoryAPIView.as_view()),
path('blood/', BloodAPIView.as_view()),
path('blood/<int:pk>/',BloodDetailView.as_view()),
path('blood/new/',BloodNewView.as_view()),
]
|
{"/todo/urls.py": ["/todo/views.py"], "/Article/api/urls.py": ["/Article/api/views.py"], "/api/serializers.py": ["/blood_group/models.py"], "/blood_group/views.py": ["/blood_group/models.py"], "/api/urls.py": ["/api/views.py"], "/api/views.py": ["/blood_group/models.py", "/api/serializers.py"], "/blood_group/admin.py": ["/blood_group/models.py"]}
|
38,257,427
|
nahidhasan007/restapi
|
refs/heads/main
|
/api/views.py
|
from rest_framework import generics,permissions
from blood_group.models import Location, Doner
# Create your views here.
from .serializers import LocationSerializer
from .serializers import DonerSerializer
class BloodAPIView(generics.ListAPIView):
permission_classes = (permissions.IsAuthenticated,)
queryset = Location.objects.all()
serializer_class = LocationSerializer
class BloodCategoryAPIView(generics.ListAPIView):
permission_classes = (permissions.IsAuthenticated,)
queryset = Doner.objects.all()
serializer_class = DonerSerializer
class BloodDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = (permissions.IsAuthenticated,)
queryset = Doner.objects.all()
serializer_class = DonerSerializer
class BloodNewView(generics.ListCreateAPIView):
permission_classes = (permissions.IsAuthenticated,)
queryset = Doner.objects.all().order_by('-id')[:1]
serializer_class = DonerSerializer
|
{"/todo/urls.py": ["/todo/views.py"], "/Article/api/urls.py": ["/Article/api/views.py"], "/api/serializers.py": ["/blood_group/models.py"], "/blood_group/views.py": ["/blood_group/models.py"], "/api/urls.py": ["/api/views.py"], "/api/views.py": ["/blood_group/models.py", "/api/serializers.py"], "/blood_group/admin.py": ["/blood_group/models.py"]}
|
38,257,428
|
nahidhasan007/restapi
|
refs/heads/main
|
/blood_group/models.py
|
import datetime
from django.db import models
from django.utils import timezone
from django.urls import reverse, reverse_lazy
# Create your models here.
class Location(models.Model):
location_name = models.CharField(max_length=200)
def __str__(self):
return self.location_name
class Doner(models.Model):
location = models.ForeignKey(Location,on_delete=models.CASCADE)
donar_name = models.CharField(max_length=100)
donar_blood_group = models.CharField(max_length=100)
mobile_number = models.CharField(max_length=100)
def __str__(self):
return self.donar_name
def get_absolute_url(self):
return reverse('blood_group:index')
|
{"/todo/urls.py": ["/todo/views.py"], "/Article/api/urls.py": ["/Article/api/views.py"], "/api/serializers.py": ["/blood_group/models.py"], "/blood_group/views.py": ["/blood_group/models.py"], "/api/urls.py": ["/api/views.py"], "/api/views.py": ["/blood_group/models.py", "/api/serializers.py"], "/blood_group/admin.py": ["/blood_group/models.py"]}
|
38,257,429
|
nahidhasan007/restapi
|
refs/heads/main
|
/blood_group/admin.py
|
from django.contrib import admin
from .models import Location, Doner
# Register your models here.
admin.site.register(Location)
admin.site.register(Doner)
|
{"/todo/urls.py": ["/todo/views.py"], "/Article/api/urls.py": ["/Article/api/views.py"], "/api/serializers.py": ["/blood_group/models.py"], "/blood_group/views.py": ["/blood_group/models.py"], "/api/urls.py": ["/api/views.py"], "/api/views.py": ["/blood_group/models.py", "/api/serializers.py"], "/blood_group/admin.py": ["/blood_group/models.py"]}
|
38,278,629
|
ravi105362/projects
|
refs/heads/main
|
/project/upload_API/migrations/0001_initial.py
|
# Generated by Django 3.2.6 on 2021-08-24 10:23
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='BlockItems',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('total_blocks', models.PositiveIntegerField()),
('begin_row', models.PositiveIntegerField()),
('end_row', models.PositiveIntegerField()),
('begin_col', models.PositiveIntegerField()),
('end_col', models.PositiveIntegerField()),
],
),
]
|
{"/project/upload_API/views.py": ["/project/upload_API/models.py"], "/project/upload_API/admin.py": ["/project/upload_API/models.py"], "/project/upload_API/urls.py": ["/project/upload_API/views.py"]}
|
38,278,630
|
ravi105362/projects
|
refs/heads/main
|
/project/upload_API/models.py
|
from django.db import models
class BlockItem(models.Model):
current_block = models.PositiveIntegerField()
begin_row = models.PositiveIntegerField()
end_row = models.PositiveIntegerField()
begin_col = models.PositiveIntegerField()
end_col = models.PositiveIntegerField()
data_string = models.CharField(max_length=100, blank=True)
|
{"/project/upload_API/views.py": ["/project/upload_API/models.py"], "/project/upload_API/admin.py": ["/project/upload_API/models.py"], "/project/upload_API/urls.py": ["/project/upload_API/views.py"]}
|
38,278,631
|
ravi105362/projects
|
refs/heads/main
|
/project/upload_API/views.py
|
from django.shortcuts import render
# Create your views here.
from django.views import View
from django.http import JsonResponse
from .models import BlockItem
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
import json
@method_decorator(csrf_exempt, name='dispatch')
class project(View):
def post(self, request):
"""
POST method to parse a receipt and store the relevant data in DB
"""
receipt_data=request.body.decode("utf-8")
relevant_receipt_data=receipt_data.split("text/plain")
#Data is now divided in blocks
data_in_blocks=relevant_receipt_data[1].split("\r\n\r\n")
block=1
row=1
col=1
last_row=1
for each_block in data_in_blocks :
#ignoring the irrelevant data
if(block>8):
break
# Counting the number of new lines in each_block
if(each_block.count('\r\n')>=1):
row=row+each_block.count('\r\n')
#checking if the block is empty then ignoring the further processing
if(len(each_block)==0):
continue
#Removing the blocks created due to '-'
if(each_block.count('-')>=2):
each_block=each_block.split('--')
#checking if the new block was created due to '-' or not
if(isinstance(each_block, list)):
if(len(each_block[-1])==0):
continue
to_be_pushed=each_block[-1]
k=each_block[-1].split('\n')
col_end=len(k[-1])
elif (isinstance(each_block, str)):
if(len(each_block)==0):
continue
to_be_pushed=each_block
k=each_block.split('\n')
col_end=len(k[-1])
#final block information to be added
product_data = {
'current_block':block,
'begin_row': last_row,
'end_row': row,
'begin_col': col,
'end_col':col_end,
'data_string': to_be_pushed,
}
cart_item = BlockItem.objects.create(**product_data)
#updating the block
block=block+1
#updating row by 2 as each block was containing two new line items
row=row+2
#keeping the last_row value to track the begin row for next block
last_row=row
data = {
"message": f"New item added to DB with id: {cart_item.id}"
}
return JsonResponse(data, status=201)
def get(self, request):
"""
GET method to get the relevant information from the DB
"""
#count of the number of items in DB
items_count = BlockItem.objects.count()
#getting the actual items
items = BlockItem.objects.all()
items_data = []
for item in items:
items_data.append({
'current_block' : item.current_block,
'begin_row': item.begin_row,
'end_row': item.end_row,
'begin_col': item.begin_col,
'end_col': item.end_col,
'data_string' : item.data_string,
})
data = {
'items': items_data,
'count': items_count,
}
return JsonResponse(data)
def delete(self,request) :
"""
Delete method to delete the items from DB
"""
#getting the items in DB
items = BlockItem.objects.all()
#deleting the items in DB
item.delete() for item in items
data={
'items': True,
}
return JsonResponse(data)
|
{"/project/upload_API/views.py": ["/project/upload_API/models.py"], "/project/upload_API/admin.py": ["/project/upload_API/models.py"], "/project/upload_API/urls.py": ["/project/upload_API/views.py"]}
|
38,278,632
|
ravi105362/projects
|
refs/heads/main
|
/project/upload_API/migrations/0003_blockitem_data_string.py
|
# Generated by Django 3.2.6 on 2021-08-27 09:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('upload_API', '0002_auto_20210824_1100'),
]
operations = [
migrations.AddField(
model_name='blockitem',
name='data_string',
field=models.CharField(blank=True, max_length=100),
),
]
|
{"/project/upload_API/views.py": ["/project/upload_API/models.py"], "/project/upload_API/admin.py": ["/project/upload_API/models.py"], "/project/upload_API/urls.py": ["/project/upload_API/views.py"]}
|
38,278,633
|
ravi105362/projects
|
refs/heads/main
|
/project/upload_API/admin.py
|
from django.contrib import admin
from .models import BlockItem
admin.site.register(BlockItem)
|
{"/project/upload_API/views.py": ["/project/upload_API/models.py"], "/project/upload_API/admin.py": ["/project/upload_API/models.py"], "/project/upload_API/urls.py": ["/project/upload_API/views.py"]}
|
38,278,634
|
ravi105362/projects
|
refs/heads/main
|
/project/upload_API/urls.py
|
from django.urls import path
from .views import project
urlpatterns = [
path('receipts/', project.as_view()),
]
|
{"/project/upload_API/views.py": ["/project/upload_API/models.py"], "/project/upload_API/admin.py": ["/project/upload_API/models.py"], "/project/upload_API/urls.py": ["/project/upload_API/views.py"]}
|
38,286,587
|
pchech/tgbot
|
refs/heads/dev
|
/filters.py
|
import telebot
import io
from utils import validate_stop,prepare_stop
from PIL import Image, ImageDraw
import random
class Filter():
def __init__(self,bot):
self.bot=bot
def choose_filter(self,message):
markup = telebot.types.ReplyKeyboardMarkup(one_time_keyboard = True,resize_keyboard=True)
itembtn1 = telebot.types.KeyboardButton('Черно-белое')
itembtn2 = telebot.types.KeyboardButton('Сепия')
itembtn3 = telebot.types.KeyboardButton('Негатив')
itembtn4 = telebot.types.KeyboardButton('Изменить яркость')
itembtn5 = telebot.types.KeyboardButton('Добавить шум')
itembtn6 = telebot.types.KeyboardButton('Остановить')
markup.row(itembtn1, itembtn2,itembtn3)
markup.row(itembtn4, itembtn5)
markup.row(itembtn6)
msg=self.bot.send_message(message.chat.id,'Выберите фильтр', reply_markup = markup)
self.bot.register_next_step_handler(msg, self.welcome)
def welcome(self,message):
if validate_stop(message,self.bot):
return
global fil
fil=message.text
markup_cancel = prepare_stop(message)
if fil == 'Сепия':
msg=self.bot.send_message(message.chat.id,'Укажите глубину', reply_markup=markup_cancel)
self.bot.register_next_step_handler(msg, self.add_parameters)
elif fil in ('Изменить яркость','Добавить шум'):
msg=self.bot.send_message(message.chat.id,'Укажите параметр', reply_markup=markup_cancel)
self.bot.register_next_step_handler(msg, self.add_parameters)
elif fil in ('Черно-белое','Негатив'):
msg=self.bot.send_message(message.chat.id,'Отправьте изображение', reply_markup=markup_cancel)
self.bot.register_next_step_handler(msg, self.make_filter)
else:
msg=self.bot.send_message(message.chat.id,'Неверный фильтр')
self.bot.register_next_step_handler(msg, self.welcome)
return
def add_parameters(self,message):
if validate_stop(message,self.bot):
return
global parameters
try:
parameters=int(message.text)
if fil == 'Добавить шум':
parameters=abs(parameters)
msg=self.bot.send_message(message.chat.id,'Отправьте изображение')
self.bot.register_next_step_handler(msg, self.make_filter)
except ValueError:
msg=self.bot.send_message(message.chat.id,'Параметр должен быть числовым')
self.bot.register_next_step_handler(msg, self.add_parameters)
return
def make_filter(self,message):
if validate_stop(message,self.bot):
return
markup = telebot.types.ReplyKeyboardRemove(selective=False)
if message.photo is None:
msg=self.bot.send_message(message.chat.id,'Не изображение')
self.bot.register_next_step_handler(msg, self.make_filter)
return
else:
photo = message.photo[-1].file_id
file = self.bot.get_file(photo)
if file.file_size > 10485760:
msg=self.bot.send_message(message.chat.id,'Файл не должен быть больше 10 МБ')
self.bot.register_next_step_handler(msg, self.make_filter)
else:
downloaded_file = self.bot.download_file(file.file_path)
image_file = io.BytesIO(downloaded_file)
if fil in ('Изменить яркость','Добавить шум','Сепия'):
img=self.filter_choice(image_file,parameters)
else:
img=self.filter_choice(image_file,None)
imgByteArr = io.BytesIO()
img.save(imgByteArr,format = 'PNG')
imgByteArr = imgByteArr.getvalue()
self.bot.send_photo(message.chat.id, imgByteArr,reply_markup = markup)
def filter_choice(self,image_file,parameters):
if fil == 'Черно-белое':
img=self.black_white_filter(image_file)
elif fil == 'Сепия':
img=self.sepia(image_file,parameters)
elif fil == 'Негатив':
img=self.negative(image_file)
elif fil == 'Изменить яркость':
img=self.brightnessChange(image_file,parameters)
elif fil == 'Добавить шум':
img=self.add_noise(image_file,parameters)
return img
######ФИЛЬТРЫ
def black_white_filter(self,dir):
image = Image.open(dir) #Открываем изображениеH.
draw = ImageDraw.Draw(image) #Создаем инструмент для рисования.
width = image.size[0] #Определяем ширину.
height = image.size[1] #Определяем высоту.
pix = image.load() #Выгружаем значения пикселей.
factor = 1
for i in range(width):
for j in range(height):
a = pix[i, j][0]
b = pix[i, j][1]
c = pix[i, j][2]
S = a + b + c
if (S > (((255 + factor) // 2) * 3)):
a, b, c = 255, 255, 255
else:
a, b, c = 0, 0, 0
draw.point((i, j), (a, b, c))
return image
def sepia(self,dir,parameters):
image = Image.open(dir) # Открываем изображениеH.
draw = ImageDraw.Draw(image) #Создаем инструмент для рисования.
width = image.size[0] #Определяем ширину.
height = image.size[1] #Определяем высоту.
pix = image.load() #Выгружаем значения пикселей.
depth = parameters
for i in range(width):
for j in range(height):
a = pix[i, j][0]
b = pix[i, j][1]
c = pix[i, j][2]
S = (a + b + c) // 3
a = S + depth * 2
b = S + depth
c = S
if (a > 255):
a = 255
if (b > 255):
b = 255
if (c > 255):
c = 255
draw.point((i, j), (a, b, c))
return image
def negative(self,dir):
image = Image.open(dir) # Открываем изображениеH.
draw = ImageDraw.Draw(image) #Создаем инструмент для рисования.
width = image.size[0] #Определяем ширину.
height = image.size[1] #Определяем высоту.
pix = image.load() #Выгружаем значения пикселей.
for i in range(width):
for j in range(height):
a = pix[i, j][0]
b = pix[i, j][1]
c = pix[i, j][2]
draw.point((i, j), (255 - a, 255 - b, 255 - c))
return image
def brightnessChange(self,dir,parameters):
image = Image.open(dir) # Открываем изображениеH.
draw = ImageDraw.Draw(image) #Создаем инструмент для рисования.
width = image.size[0] #Определяем ширину.
height = image.size[1] #Определяем высоту.
pix = image.load() #Выгружаем значения пикселей.
factor = parameters
for i in range(width):
for j in range(height):
a = pix[i, j][0] + factor
b = pix[i, j][1] + factor
c = pix[i, j][2] + factor
if (a < 0):
a = 0
if (b < 0):
b = 0
if (c < 0):
c = 0
if (a > 255):
a = 255
if (b > 255):
b = 255
if (c > 255):
c = 255
draw.point((i, j), (a, b, c))
return image
def add_noise(self,dir,parameters):
image = Image.open(dir) # Открываем изображениеH.
draw = ImageDraw.Draw(image) #Создаем инструмент для рисования.
width = image.size[0] #Определяем ширину.
height = image.size[1] #Определяем высоту.
pix = image.load() #Выгружаем значения пикселей.
factor = parameters
for i in range(width):
for j in range(height):
rand = random.randint(-factor, factor)
a = pix[i, j][0] + rand
b = pix[i, j][1] + rand
c = pix[i, j][2] + rand
if (a < 0):
a = 0
if (b < 0):
b = 0
if (c < 0):
c = 0
if (a > 255):
a = 255
if (b > 255):
b = 255
if (c > 255):
c = 255
draw.point((i, j), (a, b, c))
return image
|
{"/filters.py": ["/utils.py"], "/colorization.py": ["/utils.py"], "/mbotF.py": ["/mtg.py", "/utils.py", "/filters.py", "/colorization.py", "/clusterization.py"], "/clusterization.py": ["/utils.py"]}
|
38,286,588
|
pchech/tgbot
|
refs/heads/dev
|
/colorization.py
|
import telebot
import os
from utils import validate_stop,prepare_stop
import Algorithmia
class Colorizer:
def __init__(self,bot,api_key,collection_name):
self.bot=bot
self.client = Algorithmia.client(api_key)
self.collection_name = collection_name
def ask_for_image(self,message):
markup_cancel = prepare_stop(message)
msg=self.bot.send_message(message.chat.id,'Отправьте изображение',reply_markup = markup_cancel)
self.bot.register_next_step_handler(msg, self.colorize)
def colorize(self,message):
if validate_stop(message,self.bot):
return
if message.photo is None:
msg=self.bot.send_message(message.chat.id,'Не изображение')
self.bot.register_next_step_handler(msg, self.colorize)
return
else:
markup = telebot.types.ReplyKeyboardRemove(selective=False)
photo = message.photo[-1].file_id
file = self.bot.get_file(photo)
if file.file_size > 10485760:
msg=self.bot.send_message(message.chat.id,'Файл не должен быть больше 10 МБ')
self.bot.register_next_step_handler(msg, colorize)
else:
downloaded_file = self.bot.download_file(file.file_path)
img=self.action(downloaded_file)
self.bot.send_photo(message.chat.id, img, reply_markup=markup)
def action(self,data):
#mass = filepath.split("/")
self.client.file("data://.my/"+self.collection_name+"/testimg.png").put(data)
input = {
"image": "data://.my/"+self.collection_name+"/testimg.png"
}
algo = self.client.algo('deeplearning/ColorfulImageColorization/1.1.13')
out = algo.pipe(input).result
t800Bytes = self.client.file(out['output']).getBytes()
return t800Bytes
|
{"/filters.py": ["/utils.py"], "/colorization.py": ["/utils.py"], "/mbotF.py": ["/mtg.py", "/utils.py", "/filters.py", "/colorization.py", "/clusterization.py"], "/clusterization.py": ["/utils.py"]}
|
38,286,589
|
pchech/tgbot
|
refs/heads/dev
|
/modules.py
|
from PIL import Image, ImageDraw #Подключим необходимые библиотеки.
import random
import Algorithmia
import posixpath
from skimage.io import imread
from skimage import img_as_float
from sklearn.cluster import KMeans
import io
import numpy as np
import scipy.misc
class Filter:
def black_white_filter(self,dir):
image = Image.open(dir) #Открываем изображениеH.
draw = ImageDraw.Draw(image) #Создаем инструмент для рисования.
width = image.size[0] #Определяем ширину.
height = image.size[1] #Определяем высоту.
pix = image.load() #Выгружаем значения пикселей.
factor = 1
for i in range(width):
for j in range(height):
a = pix[i, j][0]
b = pix[i, j][1]
c = pix[i, j][2]
S = a + b + c
if (S > (((255 + factor) // 2) * 3)):
a, b, c = 255, 255, 255
else:
a, b, c = 0, 0, 0
draw.point((i, j), (a, b, c))
return image
def sepia(self,dir,parameters):
image = Image.open(dir) # Открываем изображениеH.
draw = ImageDraw.Draw(image) #Создаем инструмент для рисования.
width = image.size[0] #Определяем ширину.
height = image.size[1] #Определяем высоту.
pix = image.load() #Выгружаем значения пикселей.
depth = parameters
for i in range(width):
for j in range(height):
a = pix[i, j][0]
b = pix[i, j][1]
c = pix[i, j][2]
S = (a + b + c) // 3
a = S + depth * 2
b = S + depth
c = S
if (a > 255):
a = 255
if (b > 255):
b = 255
if (c > 255):
c = 255
draw.point((i, j), (a, b, c))
return image
def negative(self,dir):
image = Image.open(dir) # Открываем изображениеH.
draw = ImageDraw.Draw(image) #Создаем инструмент для рисования.
width = image.size[0] #Определяем ширину.
height = image.size[1] #Определяем высоту.
pix = image.load() #Выгружаем значения пикселей.
for i in range(width):
for j in range(height):
a = pix[i, j][0]
b = pix[i, j][1]
c = pix[i, j][2]
draw.point((i, j), (255 - a, 255 - b, 255 - c))
return image
def brightnessChange(self,dir,parameters):
image = Image.open(dir) # Открываем изображениеH.
draw = ImageDraw.Draw(image) #Создаем инструмент для рисования.
width = image.size[0] #Определяем ширину.
height = image.size[1] #Определяем высоту.
pix = image.load() #Выгружаем значения пикселей.
factor = parameters
for i in range(width):
for j in range(height):
a = pix[i, j][0] + factor
b = pix[i, j][1] + factor
c = pix[i, j][2] + factor
if (a < 0):
a = 0
if (b < 0):
b = 0
if (c < 0):
c = 0
if (a > 255):
a = 255
if (b > 255):
b = 255
if (c > 255):
c = 255
draw.point((i, j), (a, b, c))
return image
def add_noise(self,dir,parameters):
image = Image.open(dir) # Открываем изображениеH.
draw = ImageDraw.Draw(image) #Создаем инструмент для рисования.
width = image.size[0] #Определяем ширину.
height = image.size[1] #Определяем высоту.
pix = image.load() #Выгружаем значения пикселей.
factor = parameters
for i in range(width):
for j in range(height):
rand = random.randint(-factor, factor)
a = pix[i, j][0] + rand
b = pix[i, j][1] + rand
c = pix[i, j][2] + rand
if (a < 0):
a = 0
if (b < 0):
b = 0
if (c < 0):
c = 0
if (a > 255):
a = 255
if (b > 255):
b = 255
if (c > 255):
c = 255
draw.point((i, j), (a, b, c))
return image
class Colorizer:
def __init__(self,api_key,collection_name):
self.client = Algorithmia.client(api_key)
self.collection_name = collection_name
def action(self,data):
#mass = filepath.split("/")
self.client.file("data://.my/"+self.collection_name+"/testimg.png").put(data)
input = {
"image": "data://.my/"+self.collection_name+"/testimg.png"
}
algo = self.client.algo('deeplearning/ColorfulImageColorization/1.1.13')
out = algo.pipe(input).result
t800Bytes = self.client.file(out['output']).getBytes()
return t800Bytes
def find_color(i):
cluster=res[i]
mean=[]
for j in range(X.shape[0]):
if res[j]==cluster:
mean.append(X[j])
R=0
G=0
B=0
for j in mean:
R+=j[0]
G+=j[1]
B+=j[2]
return list([R/len(mean),G/len(mean),B/len(mean)])
def change_color(img,n_color):
image=imread(img)
image=img_as_float(image)
X = image.reshape(image.shape[0] * image.shape[1], 3)
clt=KMeans(random_state=241,init='k-means++',n_clusters=n_color)
clt.fit(X)
res=clt.predict(X)
centre=clt.cluster_centers_
new_X=[]
for i in range(X.shape[0]):
new_X.append(centre[res[i]])
new_image = np.array(new_X).reshape(image.shape[0], image.shape[1], 3)
n_img = scipy.misc.toimage(new_image)
imgByteArr = io.BytesIO()
n_img.save(imgByteArr,format = 'JPEG')
imgByteArr = imgByteArr.getvalue()
return imgByteArr
|
{"/filters.py": ["/utils.py"], "/colorization.py": ["/utils.py"], "/mbotF.py": ["/mtg.py", "/utils.py", "/filters.py", "/colorization.py", "/clusterization.py"], "/clusterization.py": ["/utils.py"]}
|
38,286,590
|
pchech/tgbot
|
refs/heads/dev
|
/utils.py
|
import telebot
def prepare_stop(message):
markup_cancel = telebot.types.ReplyKeyboardMarkup(one_time_keyboard=True,resize_keyboard=True)
cancel = telebot.types.KeyboardButton('Остановить')
markup_cancel.row(cancel)
return markup_cancel
def validate_stop(message,bot):
if message.text == 'Остановить':
markup = telebot.types.ReplyKeyboardRemove(selective=False)
bot.send_message(message.chat.id,'Процесс остановлен', reply_markup=markup)
return True
|
{"/filters.py": ["/utils.py"], "/colorization.py": ["/utils.py"], "/mbotF.py": ["/mtg.py", "/utils.py", "/filters.py", "/colorization.py", "/clusterization.py"], "/clusterization.py": ["/utils.py"]}
|
38,286,591
|
pchech/tgbot
|
refs/heads/dev
|
/mbotF.py
|
import telebot
from flask import Flask, request
import os
from mtg import MtgFinder
from utils import validate_stop
from filters import Filter
from colorization import Colorizer
from clusterization import Cluster
token = os.environ.get('TOKEN')
server = Flask(__name__)
#WEBHOOK_HOST = 'cryptic-citadel-53949.herokuapp.com'
WEBHOOK_HOST = 'testmtgbot.herokuapp.com'
WEBHOOK_PORT = 8443#8443 # 443, 80, 88 или 8443 (порт должен быть открыт!)
WEBHOOK_LISTEN = '0.0.0.0' # На некоторых серверах придется указывать такой же IP, что и выше
WEBHOOK_SSL_CERT = './webhook_cert.pem' # Путь к сертификату
WEBHOOK_SSL_PRIV = './webhook_pkey.pem' # Путь к приватному ключу
WEBHOOK_URL_BASE = "https://%s:%s" % (WEBHOOK_HOST, WEBHOOK_PORT)
WEBHOOK_URL_PATH = "/%s/" % (token)
bot = telebot.TeleBot(token)
welcome_message="""Боту можно отправить название карты ( на русском или английском)
Бот пришлет изображения, если совпадений не больше 2
В противном случае бот пришлет список карт
Для указания произвольных символов используйте %
Команда mtgadvancemode позволяет осуществлять поиск карт, используя выбранные фильтры
Доступные фильтры:
Color (R,G,B,U,W,N)
Type
Edition (rna,grn,m19,dom,rix,xln,emn,bfz и т.д.)
Rarity (common, uncommon, rare,mythic)"""
mtgfinder = MtgFinder(bot)
filt=Filter(bot)
coloriz=Colorizer(bot,os.environ.get('ALGO_KEY'),'MyCollection')
cluster=Cluster(bot)
#@bot.message_handler(commands=['normalmode'])
#def change_mod_process(message):
# mtgfinder.change_to_normal(message)
@bot.message_handler(commands=['mtgmode'])
def change_mod_process(message):
mtgfinder.change_to_mtg(message)
@bot.message_handler(commands=['mtgadvancemode'])
def change_mod_process(message):
mtgfinder.change_to_advance(message)
@bot.message_handler(commands=['filter'])
def apply_filter(message):
filt.choose_filter(message)
@bot.message_handler(commands=['colorize'])
def colorize_photo(message):
coloriz.ask_for_image(message)
@bot.message_handler(commands=['change_color'])
def ask_for_image_clust(message):
cluster.ask_for_image_clust(message)
@bot.message_handler(func=mtgfinder.is_normal, content_types=["text"])
def show_welcome(message):
if validate_stop(message,bot):
return
bot.send_message(message.chat.id, welcome_message)
@bot.message_handler(commands=['start'])
def ask_for_image_clust(message):
if validate_stop(message,bot):
return
bot.send_message(message.chat.id, welcome_message)
@bot.message_handler(func=mtgfinder.is_mtg, content_types=["text"])
def mtg_search(message):
mtgfinder.card_search(message)
@bot.callback_query_handler(func=lambda call: True)
def callback_inline(call):
mtgfinder.callback_inline(call)
@server.route('/' + token, methods=['POST'])
def getMessage():
bot.process_new_updates([telebot.types.Update.de_json(request.stream.read().decode("utf-8"))])
return "!", 200
@server.route("/")
def webhook():
bot.remove_webhook()
bot.set_webhook(url='https://testmtgbot.herokuapp.com/' + token)
return "!", 200
if __name__ == "__main__":
server.run(host=WEBHOOK_LISTEN, port=int(os.environ.get('PORT', 5000)))
|
{"/filters.py": ["/utils.py"], "/colorization.py": ["/utils.py"], "/mbotF.py": ["/mtg.py", "/utils.py", "/filters.py", "/colorization.py", "/clusterization.py"], "/clusterization.py": ["/utils.py"]}
|
38,286,592
|
pchech/tgbot
|
refs/heads/dev
|
/clusterization.py
|
import telebot
import io
import numpy as np
from utils import prepare_stop, validate_stop
from skimage.io import imread
from skimage import img_as_float
from sklearn.cluster import KMeans
import scipy.misc
class Cluster:
def __init__(self,bot):
self.bot=bot
def ask_for_image_clust(self,message):
markup_cancel = prepare_stop(message)
msg=self.bot.send_message(message.chat.id,'Выберите число цветов (не более 10)',reply_markup = markup_cancel)
self.bot.register_next_step_handler(msg, self.ask_for_color)
def ask_for_color(self,message):
if validate_stop(message,self.bot):
return
try:
global parameters
parameters=int(message.text)
if parameters > 10:
msg=self.bot.send_message(message.chat.id,'Не больше 10 и не менее 1 цветов')
self.bot.register_next_step_handler(msg, self.ask_for_color)
return
msg=self.bot.send_message(message.chat.id,'Отправьте изображение')
self.bot.register_next_step_handler(msg, self.clusterization)
except ValueError:
msg=self.bot.send_message(message.chat.id,'Параметр должен быть числовым')
self.bot.register_next_step_handler(msg, self.ask_for_color)
return
def clusterization(self,message):
if validate_stop(message,self.bot):
return
if message.photo is None:
msg=self.bot.send_message(message.chat.id,'Не изображение')
self.bot.register_next_step_handler(msg, self.colorize)
return
else:
markup = telebot.types.ReplyKeyboardRemove(selective=False)
photo = message.photo[-1].file_id
file = self.bot.get_file(photo)
if file.file_size > 10485760:
msg=self.bot.send_message(message.chat.id,'Файл не должен быть больше 10 МБ')
self.bot.register_next_step_handler(msg, self.clusterization)
else:
downloaded_file = self.bot.download_file(file.file_path)
image_file = io.BytesIO(downloaded_file)
img=self.change_color(image_file,parameters)
self.bot.send_photo(message.chat.id, img, reply_markup=markup)
def change_color(self,img,n_color):
image=imread(img)
image=img_as_float(image)
X = image.reshape(image.shape[0] * image.shape[1], 3)
clt=KMeans(random_state=241,init='k-means++',n_clusters=n_color)
clt.fit(X)
res=clt.predict(X)
centre=clt.cluster_centers_
new_X=[]
for i in range(X.shape[0]):
new_X.append(centre[res[i]])
new_image = np.array(new_X).reshape(image.shape[0], image.shape[1], 3)
n_img = scipy.misc.toimage(new_image)
imgByteArr = io.BytesIO()
n_img.save(imgByteArr,format = 'JPEG')
imgByteArr = imgByteArr.getvalue()
return imgByteArr
|
{"/filters.py": ["/utils.py"], "/colorization.py": ["/utils.py"], "/mbotF.py": ["/mtg.py", "/utils.py", "/filters.py", "/colorization.py", "/clusterization.py"], "/clusterization.py": ["/utils.py"]}
|
38,286,593
|
pchech/tgbot
|
refs/heads/dev
|
/mtg_price_collect.py
|
import json
import psycopg2
import requests
import os
from apscheduler.schedulers.blocking import BlockingScheduler
sched = BlockingScheduler()
@sched.scheduled_job('cron', day_of_week='mon-sun', hour=12, minute = 55)
def scheduled_job():
DATABASE_URL = os.environ['DATABASE_URL']
conn = psycopg2.connect(DATABASE_URL, sslmode='require')
cursor = conn.cursor()
url = 'https://api.scryfall.com/cards/{}'
usd_list=[]
try:
select = "select distinct id from mtg.card_export where lang = 'en'"
cursor.execute(select)
mtg_records = cursor.fetchall()
for id in mtg_records:
rsp=requests.get(url.format(id[0]))
rsp = json.loads(rsp.text)
try:
usd = rsp['usd']
except KeyError:
usd = 0
usd_list.append({'id':id[0],'usd':usd})
# with open('usd_list.data','w') as f:
# json.dump(usd_list,f)
#with open('usd_list.data','r') as f:
# usd_list = json.loads(f.read())
print('Start update')
for item in usd_list:
insert = "update mtg.card_price set usd = {} where id = '{}'".format(item['usd'],item['id'])
cursor.execute(insert)
conn.commit()
finally:
if (conn):
cursor.close()
conn.close()
sched.start()
|
{"/filters.py": ["/utils.py"], "/colorization.py": ["/utils.py"], "/mbotF.py": ["/mtg.py", "/utils.py", "/filters.py", "/colorization.py", "/clusterization.py"], "/clusterization.py": ["/utils.py"]}
|
38,286,594
|
pchech/tgbot
|
refs/heads/dev
|
/mtg.py
|
import requests
import json
import psycopg2
import telebot
import os
from bs4 import BeautifulSoup
class MtgFinder:
session = {}
rez = {}
c_types=['c','t','o','m','cmc','mana','is','r','e','in','f',
'color','type','oracle','edition','format','cmc','rarity']
map={'color':'c1.color',
'type':'c1.printed_type',
'oracle':'o',
'edition':'s.set_id',
'format':'f',
'cmc':'cmc',
'rarity':'c1.rarity'}
change=1
set_list = []
select = """select string_agg(nat_name,'||' order by nat_name) card_name,color,set_id,usd
from
(select string_agg(c1.printed_name,' // ' order by c1.name) nat_name,c1.color,c3.image,string_agg(c1.name,' // ' order by c1.name) en_name, s.set_id,cp.usd
from mtg.card_export c1, mtg.card_export c3,mtg.card_price cp, mtg.set s
where 1=1
and c1.name = c3.name
and c3.lang = 'en'
and c1.set_id = c3.set_id
and c3.id=cp.id
and cast(c1.set_id as integer) = s.id
and c1.set_id = (select max(c4.set_id)
from mtg.card_export c4
where c4.name = c1.name)
{}
group by c1.id,c1.color,c3.image, s.set_id,cp.usd
order by c1.color,cp.usd) v
group by en_name, color, set_id,usd
order by color, card_name"""
params={'q':''}
mtg_records = {}
temp_flag = {}
type_flag = {}
def __init__(self,bot):
self.bot=bot
try:
conn=psycopg2.connect(user = os.environ.get('DB_USER'),
password = os.environ.get('DB_PASS'),
host = os.environ.get('DB_HOST'),
port = os.environ.get('DB_PORT'),
database=os.environ.get('DB_NAME'))
cursor = conn.cursor()
select_Query = """select set_name from mtg.set"""
cursor.execute(select_Query)
self.set_list = [set[0] for set in cursor.fetchall()]
print(self.set_list)
finally:
if (conn):
cursor.close()
conn.close()
def clear_param(self,chat_id):
#self.params={'q':''}
self.temp_flag[chat_id] = 0
self.session[chat_id] = self.select
self.bot.send_message(chat_id, 'Complete',reply_markup = self.prepare_cancel_keyboard())
def add_param(self,chat_id,param):
self.session[chat_id] = self.session[chat_id].format('and lower('+self.get_map(param)+') like {}')
#self.select = self.select.format('and lower('+self.get_map(param)+') like {}')
#self.params['q']=self.params['q']+self.get_map(param)+':'
def add_params_value(self,chat_id,value):
#if ' ' in value:
# value='"'+value+'"'
self.session[chat_id] = self.session[chat_id].format("'%"+value.lower()+"%' {}")
#self.select = self.select.format("'%"+value.lower()+"%' {}")
#self.params['q']=self.params['q']+value+' '
def get_map(self,type):
return self.map[type.lower()]
def prepare_keyboard(self):
markup = telebot.types.ReplyKeyboardMarkup(resize_keyboard=True)
itembtn1 = telebot.types.KeyboardButton('Color')
itembtn2 = telebot.types.KeyboardButton('Type')
#itembtn3 = telebot.types.KeyboardButton('Oracle')
itembtn4 = telebot.types.KeyboardButton('Edition')
itembtn5 = telebot.types.KeyboardButton('Rarity')
#itembtn6 = telebot.types.KeyboardButton('Cmc')
itembtn6 = telebot.types.KeyboardButton('Finish')
markup.row(itembtn1, itembtn2)
markup.row(itembtn4,itembtn5)
markup.row(itembtn6)
return markup
def prepare_color_keyboard(self):
markup = telebot.types.ReplyKeyboardMarkup(resize_keyboard=True)
itembtn1 = telebot.types.KeyboardButton('R')
itembtn2 = telebot.types.KeyboardButton('G')
itembtn3 = telebot.types.KeyboardButton('U')
itembtn4 = telebot.types.KeyboardButton('B')
itembtn5 = telebot.types.KeyboardButton('W')
itembtn6 = telebot.types.KeyboardButton('N')
markup.row(itembtn1, itembtn2, itembtn3)
markup.row(itembtn4, itembtn5, itembtn6)
return markup
def prepare_rarity_keyboard(self):
markup = telebot.types.ReplyKeyboardMarkup(resize_keyboard=True)
itembtn1 = telebot.types.KeyboardButton('Common')
itembtn2 = telebot.types.KeyboardButton('Uncommon')
itembtn3 = telebot.types.KeyboardButton('Rare')
itembtn4 = telebot.types.KeyboardButton('Mythic')
markup.row(itembtn1, itembtn2)
markup.row(itembtn3, itembtn4)
return markup
def prepare_cancel_keyboard(self):
markup = telebot.types.ReplyKeyboardRemove(selective=False)
return markup
#Смена режима работы бота
def change_to_mtg(self,message):
if self.change != 1:
self.change = 1
self.bot.send_message(message.chat.id, 'Включен MTG режим')
self.type_flag[message.chat.id] = False
def change_to_advance(self,message):
self.session[message.chat.id] = self.select
self.temp_flag[message.chat.id] = 0
msg=self.bot.send_message(message.chat.id, 'Включен MTG Advance режим')
msg=self.bot.send_message(message.chat.id, 'Выберите фильтр',reply_markup = self.prepare_keyboard())
self.bot.register_next_step_handler(msg, self.advance_search)
# def change_to_normal(self,message):
#if self.change != 0:
# self.change = 0
# self.bot.send_message(message.chat.id, 'Включен обычный режим')
#Проверка на валидность типа
def validate_type(self,type):
if type.lower() in self.c_types:
return True
else:
return False
#Бот в обычном режиме?
def is_normal(self,message):
return self.change == 0
#Бот в MTG режиме?
def is_mtg(self,message):
return self.change == 1
def is_mtg_advanced(self,message):
return self.change == 2
#Поиск карты по названию
def advance_search(self,message):
if message.text.lower() == 'finish':
if self.temp_flag[message.chat.id]!=0:
self.session[message.chat.id] = self.session[message.chat.id].format('')
self.card_search_advance(message)
self.clear_param(message.chat.id)
else:
msg=self.bot.send_message(message.chat.id, 'Запрос без фильтров временно запрещен',reply_markup = self.prepare_keyboard())
self.bot.register_next_step_handler(msg, self.advance_search)
else:
if self.validate_type(message.text) is False:
msg=self.bot.send_message(message.chat.id, 'Неправильный фильтр', reply_markup = self.prepare_keyboard())
self.bot.register_next_step_handler(msg, self.advance_search)
else:
self.temp_flag[message.chat.id]=1
self.add_param(message.chat.id,message.text)
if message.text.lower() == 'color':
msg=self.bot.send_message(message.chat.id, 'Введите значение', reply_markup = self.prepare_color_keyboard())
elif message.text.lower() == 'rarity':
msg=self.bot.send_message(message.chat.id, 'Введите значение', reply_markup = self.prepare_rarity_keyboard())
else:
msg=self.bot.send_message(message.chat.id, 'Введите значение', reply_markup = self.prepare_cancel_keyboard())
self.bot.register_next_step_handler(msg, self.cardd_search)
def cardd_search(self,message):
self.add_params_value(message.chat.id,message.text)
msg=self.bot.send_message(message.chat.id, 'Продолжим?', reply_markup = self.prepare_keyboard())
self.bot.register_next_step_handler(msg, self.advance_search)
def card_search_advance(self,message):
self.type_flag[message.chat.id] = True
try:
conn=psycopg2.connect(user = os.environ.get('DB_USER'),
password = os.environ.get('DB_PASS'),
host = os.environ.get('DB_HOST'),
port = os.environ.get('DB_PORT'),
database=os.environ.get('DB_NAME'))
cursor = conn.cursor()
cursor.execute(self.session[message.chat.id])
self.mtg_records[message.chat.id] = cursor.fetchall()
if cursor.rowcount == 0:
self.bot.send_message(message.chat.id,'Не найдено')
else:
self.print_card_list(message)
finally:
if (conn):
cursor.close()
conn.close()
# self.params['include_multilingual']=True
# url='https://api.scryfall.com/cards/search'
# rsp=requests.get(url=url,params=self.params)
#self.bot.send_message(message.chat.id,rsp.url)
# rsp=json.loads(rsp.text)
# rez=''
# try:
# card_list=rsp['data']
# for card in card_list:
# try:
# rez+=card['name']+'\t'+card['usd']+'\n'
# except KeyError:
# pass
# self.bot.send_message(message.chat.id,rez)
# except KeyError:
# self.bot.send_message(message.chat.id,'Неправильный запрос')
def card_search(self,message):
self.type_flag[message.chat.id] = False
try:
conn=psycopg2.connect(user = os.environ.get('DB_USER'),
password = os.environ.get('DB_PASS'),
host = os.environ.get('DB_HOST'),
port = os.environ.get('DB_PORT'),
database=os.environ.get('DB_NAME'))
cursor = conn.cursor()
select_Query = """select distinct string_agg(c1.printed_name,' // '),c1.color,(select image
from mtg.card_export c4
where c4.name = c3.name
and c4.set_id = c3.set_id
and c4.lang = 'en'
limit 1) image_en, string_agg(c1.name,' // '), s.set_id,cp.usd
from mtg.card_export c1, mtg.set s, mtg.card_export c3,mtg.card_price cp
where c1.id in
(select c2.id
from mtg.card_export c2
where replace(lower(c2.printed_name),',','') like lower(%(like)s) escape '='
)
and c1.name = c3.name
and c3.lang = 'en'
and c1.set_id = c3.set_id
and c3.id=cp.id
and s.id = cast(c1.set_id as integer)
and s.set_num = (select max(s1.set_num)
from mtg.card_export ce, mtg.set s1
where cast(ce.set_id as integer)=s1.id
and ce.name = c1.name
)
group by c1.id,c1.color, s.set_id,image_en,cp.usd
order by c1.color,cp.usd desc"""
cursor.execute(select_Query, dict(like= '%'+message.text.replace(',','')+'%'))
self.mtg_records[message.chat.id] = cursor.fetchall()
if cursor.rowcount == 0:
self.bot.send_message(message.chat.id,'Не найдено')
elif cursor.rowcount < 3:
for row in self.mtg_records[message.chat.id]:
self.bot.send_photo(message.chat.id,bytes(row[2]))
#self.bot.send_message(message.chat.id, 'TCG Price:' + str(row[5]))
loading = self.bot.send_message(message.chat.id, 'Price is loading ... ')
self.bot.edit_message_text(chat_id=loading.chat.id, message_id=loading.message_id, text=self.scg_search(row[3]))
else:
self.print_card_list(message)
finally:
if (conn):
cursor.close()
conn.close()
# url='https://api.scryfall.com/cards/named'
# params={'fuzzy':message.text}
# try:
# rsp=requests.get(url=url,params=params)
# rsp=json.loads(rsp.text)
# img_url=rsp['image_uris']['normal']
# img_rsp=requests.get(url=img_url)
# img=img_rsp.content
# bot.send_photo(message.chat.id,img)
# try:
# bot.send_message(message.chat.id,rsp['usd'])
# except KeyError:
# pass
# except KeyError:
# url='https://api.scryfall.com/cards/autocomplete'
# params = {'q': message.text}
# rsp = requests.get(url=url, params=params)
# rsp=json.loads(rsp.text)
# data=rsp['data']
# if data==[]:
# rez='Совпадений не найдено'
# else:
# rez='\n'.join(data)
# bot.send_message(message.chat.id,rez)
def scg_search(self,card_name):
url = "http://www.starcitygames.com/results?name={}&numpage=100".format(card_name)
rsp = requests.get(url)
bs = BeautifulSoup(rsp.content,'html.parser')
rs = bs.findAll(attrs={'class':['deckdbbody_row','deckdbbody2_row']})
if len(rs) != 0:
result = 'Edition | Price \n'
for row in rs:
try:
edition = row.findNext(class_='search_results_2').a.string
if edition not in self.set_list:
continue
price = row.findNext(class_='search_results_9').string
result += edition + ' | ' + price + '\n'
except AttributeError:
pass
else:
result = 'Not Found'
return result
def print_card_list(self,message):
keyboard = telebot.types.InlineKeyboardMarkup()
callback_button = telebot.types.InlineKeyboardButton(text="Показать следующую страницу", callback_data="next")
keyboard.add(callback_button)
self.rez[message.chat.id]=' '
for i in range (len(self.mtg_records[message.chat.id])):
if self.type_flag[message.chat.id] is False:
if self.mtg_records[message.chat.id][0][0] != self.mtg_records[message.chat.id][0][3]:
self.rez[message.chat.id] += self.mtg_records[message.chat.id][0][0] + '[' + self.mtg_records[message.chat.id][0][3] + ']' + '\n' + self.mtg_records[message.chat.id][0][1] + ' | ' + self.mtg_records[message.chat.id][0][4] + ' | ' + str(self.mtg_records[message.chat.id][0][5])+ '\n----------\n'
else:
self.rez[message.chat.id] += self.mtg_records[message.chat.id][0][0] + ' | ' + self.mtg_records[message.chat.id][0][1] + '\n' + self.mtg_records[message.chat.id][0][4] + ' | ' + str(self.mtg_records[message.chat.id][0][5])+ '\n----------\n'
else:
self.rez[message.chat.id] += self.mtg_records[message.chat.id][0][0] + '\n' + self.mtg_records[message.chat.id][0][1] + ' | ' + self.mtg_records[message.chat.id][0][2] + ' | ' + str(self.mtg_records[message.chat.id][0][3])+ '\n----------\n'
self.mtg_records[message.chat.id].pop(0)
if i == 10:
break
if len(self.mtg_records[message.chat.id]) == 0:
self.type_flag[message.chat.id] = False
self.bot.send_message(message.chat.id, self.rez[message.chat.id])
else:
self.bot.send_message(message.chat.id, self.rez[message.chat.id], reply_markup = keyboard)
def callback_inline(self,call):
keyboard = telebot.types.InlineKeyboardMarkup()
callback_button = telebot.types.InlineKeyboardButton(text="Показать следующую страницу", callback_data="next")
keyboard.add(callback_button)
if call.message:
if call.data == "next":
self.rez[call.message.chat.id]=''
for i in range (len(self.mtg_records[call.message.chat.id])):
if self.type_flag[call.message.chat.id] is False:
if self.mtg_records[call.message.chat.id][0][0] != self.mtg_records[call.message.chat.id][0][3]:
self.rez[call.message.chat.id] += self.mtg_records[call.message.chat.id][0][0] + '[' + self.mtg_records[call.message.chat.id][0][3] + ']' + '\n' + self.mtg_records[call.message.chat.id][0][1] + ' | ' + self.mtg_records[call.message.chat.id][0][4] + ' | ' + str(self.mtg_records[call.message.chat.id][0][5])+ '\n----------\n'
else:
self.rez[call.message.chat.id] += self.mtg_records[call.message.chat.id][0][0] + ' | ' + self.mtg_records[call.message.chat.id][0][1] + '\n' + self.mtg_records[call.message.chat.id][0][4] + ' | ' + str(self.mtg_records[call.message.chat.id][0][5])+ '\n----------\n'
else:
self.rez[call.message.chat.id] += self.mtg_records[call.message.chat.id][0][0] + '\n' + self.mtg_records[call.message.chat.id][0][1] + ' | ' + self.mtg_records[call.message.chat.id][0][2] + ' | ' + str(self.mtg_records[call.message.chat.id][0][3])+ '\n----------\n'
self.mtg_records[call.message.chat.id].pop(0)
if i == 10:
break
if len(self.mtg_records[call.message.chat.id]) == 0:
self.type_flag[call.message.chat.id] = False
self.bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id, text=self.rez[call.message.chat.id])
else:
self.bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id, text=self.rez[call.message.chat.id], reply_markup = keyboard)
|
{"/filters.py": ["/utils.py"], "/colorization.py": ["/utils.py"], "/mbotF.py": ["/mtg.py", "/utils.py", "/filters.py", "/colorization.py", "/clusterization.py"], "/clusterization.py": ["/utils.py"]}
|
38,345,465
|
LekamCharity/insta-IG
|
refs/heads/master
|
/instaphotos/apps.py
|
from django.apps import AppConfig
class InstaphotosConfig(AppConfig):
name = 'instaphotos'
|
{"/instaphotos/views.py": ["/instaphotos/models.py", "/instaphotos/forms.py"], "/instaphotos/forms.py": ["/instaphotos/models.py"]}
|
38,353,251
|
Kim368/Py
|
refs/heads/master
|
/factory.py
|
from abc import ABC, abstractmethod
class Creator(ABC):
@abstractmethod
def factory_method (self):
pass
def same_operation (self):
parser = self.factory_method()
result = parser.operation()
return result
##
class ParserCreatorPickle(Creator):
def __init__ (self, obj):
self.obj = obj
def factory_method (self):
return ParserPickle(self.obj)
class ParserCreatorDePickle(Creator):
def __init__ (self, dump_obj):
self.dump_obj = dump_obj
def factory_method (self):
return ParserDePickle(self.dump_obj)
##
class ParserCreatorJson(Creator):
def __init__ (self, obj):
self.obj = obj
def factory_method (self):
return ParserJson(self.obj)
class ParserCreatorDeJson(Creator):
def __init__ (self, dump_obj):
self.dump_obj = dump_obj
def factory_method (self):
return ParserDeJson(self.dump_obj)
##
class ParserCreatorYaml(Creator):
def __init__ (self, obj):
self.obj = obj
def factory_method (self):
return ParserYaml(self.obj)
class ParserCreatorDeYaml(Creator):
def __init__ (self, dump_obj):
self.dump_obj = dump_obj
def factory_method (self):
return ParserDeYaml(self.dump_obj)
##
class ParserCreatorToml(Creator):
def __init__ (self, obj):
self.obj = obj
def factory_method (self):
return ParserToml(self.obj)
class ParserCreatorDeToml(Creator):
def __init__ (self, dump_obj):
self.dump_obj = dump_obj
def factory_method (self):
return ParserDeToml(self.dump_obj)
##
class Parser(ABC):
@abstractmethod
def operation (self):
pass
##
import pickle
class ParserPickle(Parser):
def __init__ (self, obj):
self.obj = obj
def pick (self):
import codecs
# return pickle.dumps(self.obj)
return codecs.encode(pickle.dumps(self.obj), "base64").decode()
def operation (self):
pick = self.pick()
return str(pick)
class ParserDePickle(Parser):
def __init__ (self, dump_obj):
self.dump_obj = dump_obj
def pick (self):
import codecs
return pickle.loads(codecs.decode(self.dump_obj.encode(), "base64"))
# return pickle.loads(self.dump_obj, encoding="ASCII")
def operation (self):
pick = self.pick()
return pick
##
class ParserJson(Parser):
def __init__ (self, obj):
self.obj = obj
def json (self):
strlines = ''
jstr = '{\n'
jstr += '\'' + 'type' + '\': ' \
+ '\'' + str(self.obj['type']) + '\'\n'
jstr += '\'' + 'lines' + '\': ' \
+ '\'' + str(self.obj['lines']) + '\'\n'
# jstr += '\'' + 'lines' + '\': ' \
# + '\'' + self.obj['lines'] + '\'\n'
jstr += '}'
return jstr
def operation (self):
j = self.json()
return j
class ParserDeJson(Parser):
def __init__ (self, dump_obj):
self.dump_obj = dump_obj
def json (self):
dump_obj_type = None
dump_obj_lines = None
lines = self.dump_obj.split('\n')
# print(lines)
for line in lines:
if line.find("'type': ") != -1:
dump_obj_type = line.split("'type': ")[-1][1:-1]
if line.find("'lines': ") != -1:
dump_obj_lines = line.split("'lines': ")[-1][1:-1]
# print(type(dump_obj_lines).__name__) print(type(dump_obj_lines).__name__)
print(dump_obj_lines)
# print(dump_obj_lines)
if (dump_obj_type is not None) and (dump_obj_lines is not None):
dump_obj = {'type': dump_obj_type, 'lines': dump_obj_lines}
return dump_obj
pass
# return pickle.loads(self.dump_obj, encoding="ASCII")
def operation (self):
j = self.json()
return j
##
class ParserYaml(Parser):
def __init__ (self, obj):
self.obj = obj
def yaml (self):
ystr = 'type' + ': ' \
+ str(self.obj['type']) + '\n'
ystr += 'lines' + ': ' \
+ str(self.obj['lines']) + '\n'
return ystr
def operation (self):
y = self.yaml()
return y
class ParserDeYaml(Parser):
def __init__ (self, dump_obj):
self.dump_obj = dump_obj
def yaml (self):
dump_obj_type = None
dump_obj_lines = None
lines = self.dump_obj.split('\n')
for line in lines:
if line.find('type: ') != -1:
dump_obj_type = line.split('type: ')[-1]
if line.find('lines: ') != -1:
dump_obj_lines = line.split('lines: ')[-1]
print(type(dump_obj_lines).__name__)
print(dump_obj_lines)
# dump_obj_lines = dump_obj_lines[1:-1].split('\', \'')
# print(dump_obj_lines)
if (dump_obj_type is not None) and (dump_obj_lines is not None):
dump_obj = {'type': dump_obj_type, 'lines': dump_obj_lines}
return dump_obj
def operation (self):
y = self.yaml()
return y
##
class ParserDeToml(Parser):
def __init__ (self, dump_obj):
self.dump_obj = dump_obj
def toml (self):
dump_obj_type = None
dump_obj_lines = None
lines = self.dump_obj.split('\n')
for line in lines:
if line.find('type = ') != -1:
dump_obj_type = line.split('type = ')[-1]
if line.find('lines = ') != -1:
dump_obj_lines = line.split('lines = ')[-1]
if (dump_obj_type is not None) and (dump_obj_lines is not None):
dump_obj = {'type': dump_obj_type, 'lines': dump_obj_lines}
return dump_obj
def operation (self):
y = self.toml()
return y
class ParserToml(Parser):
def __init__ (self, obj):
self.obj = obj
def toml (self):
tstr = 'type' + ' = ' \
+ str(self.obj['type']) + '\n'
tstr += 'lines' + ' = ' \
+ str(self.obj['lines']) + '\n'
return tstr
def operation (self):
t = self.toml()
return t
##
def client_code(creator: Creator):
# print(f"Client work with {creator.same_operation()}")
return creator.same_operation()
def create_serializer(ftype, obj):
d = {'pickle': ParserCreatorPickle, 'json': ParserCreatorJson, 'yaml': ParserCreatorYaml, 'toml': ParserCreatorToml}
return client_code(d[ftype](obj))
def create_deserializer(ftype, dump_obj):
d = {'pickle': ParserCreatorDePickle, 'json': ParserCreatorDeJson, 'yaml': ParserCreatorDeYaml,
'toml': ParserCreatorDeToml}
return client_code(d[ftype](dump_obj))
##
# if __name__ == "__main__":
# create_serializer('pickle', '1231231231')
# client_code(ParserCreatorPickle('1234'))
# client_code(ParserCreatorYaml({'type': 'dict', 'lines': "{'asds': 1, 3: {'a': 4, 333: dump}}"}))
# client_code(ParserCreatorJson({'type': 'dict', 'lines': "{'asds': 1, 3: {'a': 4, 333: dump}}"}))
# client_code(ParserCreatorToml({'type': 'dict', 'lines': "{'asds': 1, 3: {'a': 4, 333: dump}}"}))
|
{"/test/clclclclc.py": ["/test/dfdf.py"], "/test/dfdf.py": ["/clean.py"], "/cleanUnitTest.py": ["/clean.py", "/diffTypesForTest.py"], "/console.py": ["/clean.py"], "/clean.py": ["/factory.py"]}
|
38,353,252
|
Kim368/Py
|
refs/heads/master
|
/cleanUnitTest.py
|
import unittest
from clean import *
from diffTypesForTest import *
class MyTestCase(unittest.TestCase):
def test_loads(self):
self.assertEqual('qwerty', loads(dumps('qwerty'), {}))
c = Collections()
self.assertEqual(c.list_list, loads(dumps(c.list_list), {}))
self.assertEqual(c.list_int, loads(dumps(c.list_int), {}))
self.assertEqual(c.list_str, loads(dumps(c.list_str), {}))
self.assertEqual(c.list_bool, loads(dumps(c.list_bool), {}))
self.assertEqual(c.list_float, loads(dumps(c.list_float), {}))
self.assertEqual(c.set_bool, loads(dumps(c.set_bool), {}))
self.assertEqual(c.dictionary, loads(dumps(c.dictionary), {'my_class': my_class}))
self.assertEqual(c.tupl, loads(dumps(c.tupl), {'my_class': my_class}))
self.assertEqual(my_class.__dict__.keys(), loads(dumps(my_class),{}).__dict__.keys())
self.assertEqual(my_class.my_class2.__dict__.keys(), loads(dumps(my_class.my_class2), {'my_class': my_class}).__dict__.keys())
self.assertEqual(my_class.pr.__dict__.keys(), loads(dumps(my_class.pr), {}).__dict__.keys())
self.assertEqual(my_class.pr(my_class('a')), loads(dumps(my_class.pr), {})(my_class('a')))
self.assertEqual(my_class.aaa(my_class('a')), loads(dumps(my_class.aaa), {})(my_class('a')))
def test_load(self):
c = Collections()
self.assertEqual(c.list_list, load(dump(c.list_list, '/home/jke/txt.json', 'pickle'), {}))
self.assertEqual(c.list_int, load(dump(c.list_int, '/home/jke/txt.json', 'json'), {}))
self.assertEqual(c.list_str, load(dump(c.list_str, '/home/jke/txt.json', 'toml'), {}))
self.assertEqual(c.list_bool, load(dump(c.list_bool, '/home/jke/txt.json', 'pickle'), {}))
self.assertEqual(c.list_float, load(dump(c.list_float, '/home/jke/txt.json', 'yaml'), {}))
self.assertEqual(c.set_bool, load(dump(c.set_bool, '/home/jke/txt.json', 'toml'), {}))
self.assertEqual(c.dictionary, load(dump(c.dictionary, '/home/jke/txt.json', 'json'), {'my_class': my_class}))
self.assertEqual(c.tupl, load(dump(c.tupl, '/home/jke/txt.json', 'pickle'), {'my_class': my_class}))
self.assertEqual(my_class.__dict__.keys(), load(dump(my_class, '/home/jke/txt.json', 'json'),{}).__dict__.keys())
self.assertEqual(my_class.my_class2.__dict__.keys(), load(dump(my_class.my_class2, '/home/jke/txt.json', 'toml'), {'my_class': my_class}).__dict__.keys())
self.assertEqual(my_class.pr.__dict__.keys(), load(dump(my_class.pr, '/home/jke/txt.json', 'yaml'), {}).__dict__.keys())
self.assertEqual(my_class.pr(my_class('a')), load(dump(my_class.pr, '/home/jke/txt.json', 'toml'), {})(my_class('a')))
self.assertEqual(my_class.aaa(my_class('a')), load(dump(my_class.aaa, '/home/jke/txt.json', 'json'), {})(my_class('a')))
def test_conv(self):
dump(my_class, '/home/jke/txt.json', 'yaml')
convert('/home/jke/txt.yaml', 'json')
self.assertEqual(my_class.__dict__.keys(), load(dump(my_class, '/home/jke/txt.json', 'json'),{}).__dict__.keys())
dump(my_class, '/home/jke/txt.json', 'pickle')
convert('/home/jke/txt.yaml', 'yaml')
self.assertEqual(my_class.__dict__.keys(),
load(dump(my_class, '/home/jke/txt.json', 'yaml'), {}).__dict__.keys())
dump(my_class, '/home/jke/txt.json', 'toml')
convert('/home/jke/txt.yaml', 'pickle')
self.assertEqual(my_class.__dict__.keys(),
load(dump(my_class, '/home/jke/txt.json', 'pickle'), {}).__dict__.keys())
if __name__ == '__main__':
unittest.main()
|
{"/test/clclclclc.py": ["/test/dfdf.py"], "/test/dfdf.py": ["/clean.py"], "/cleanUnitTest.py": ["/clean.py", "/diffTypesForTest.py"], "/console.py": ["/clean.py"], "/clean.py": ["/factory.py"]}
|
38,353,253
|
Kim368/Py
|
refs/heads/master
|
/console.py
|
import argparse
import inspect
from abc import ABC, abstractmethod
parser = argparse.ArgumentParser(description='Convert one file to another')
parser.add_argument('fpath', type=str, help='File path')
parser.add_argument('type_of_new', type=str, help='Converted file type')
args = parser.parse_args()
print(args.fpath)
print(args.type_of_new)
import clean
clean.convert(args.fpath, args.type_of_new)
|
{"/test/clclclclc.py": ["/test/dfdf.py"], "/test/dfdf.py": ["/clean.py"], "/cleanUnitTest.py": ["/clean.py", "/diffTypesForTest.py"], "/console.py": ["/clean.py"], "/clean.py": ["/factory.py"]}
|
38,353,254
|
Kim368/Py
|
refs/heads/master
|
/diffTypesForTest.py
|
class my_class():
def __init__(self, s):
self.c = s
self.s = s+s
class my_class2():
se = 23
a = 1
b = "mfksdkfs"
d = 'hhh'
a = 1
b = "mfksdkfs"
d = 'hhh'
def aaa(self):
return 'AZAZZAZAZAZA'
def pr(self):
print('a = ', self.a, ' b = ', self.b, ' c = ', self.c)
self.c = self.a
self.a = self.s # a stanovitsa attributom
print('a = ', self.a, ' b = ', self.b, ' c = ', self.c)
import random
from string import printable
class Collections():
list_int = []
list_str = []
list_float = []
list_bool = []
set_bool = ()
list_list = []
dictionary = {my_class: 123, 'asdas': 234, 6:1, my_class.pr: 'funk', 'abc': ['a', 'b', 'c']}
tupl = (1, 2, 'asdd', my_class, my_class.aaa)
def __init__(self):
self.list_int = self.get_list_int()
self.list_str = self.get_list_str()
self.list_float = self.get_list_float()
self.list_bool = self.get_list_bool()
self.set_bool = self.get_set()
self.list_list = self.get_list_list()
#self.dictionary = self.get_dict()
def get_list_int(self):
for i in range(50):
self.list_int.append(random.randint(-999999999, 999999999))
return self.list_int
def get_list_float(self):
for i in range(50):
num = random.randint(-999999999, 999999999)
self.list_float.append(num * 0.001)
return self.list_float
def get_list_bool(self):
for i in range(50):
num = random.random()
if num == 1:
bl = True
else:
bl = False
self.list_bool.append(bl)
return self.list_bool
def get_list_str(self):
s = ''
for i in range(50):
for j in range(20):
s += random.choice(printable)
self.list_str.append(s)
return self.list_str
def get_set(self):
self.set_bool = set(self.get_list_bool())
return self.set_bool
def get_list_list(self):
l = self.get_list_str()
l.append(self.get_list_bool())
l.append(self.get_list_int())
l.append(self.get_list_float())
return l
def get_dict(self):
keys = self.get_list_int() + self.get_list_str()
values = self.get_list_str() + self.get_list_float()
d = dict(zip(keys, values))
d[my_class] = my_class.my_class2
d[my_class] = my_class.pr
d[my_class.pr] = my_class.my_class2
return d
# import clean
# Collections()
# clean.dump(Collections, '/home/jke/txt.json', 'pickle')
# clean.dump(Collections, '/home/jke/txt.json', 'json')
# clean.dump(Collections, '/home/jke/txt.json', 'yaml')
# clean.dump(Collections, '/home/jke/txt.json', 'toml')
#
# # a = clean.load('/home/jke/txt.json', {'my_class': my_class})
# clean.convert('/home/jke/txt.pickle', 'json')
# clean.convert('/home/jke/txt.pickle', 'json')
# a = clean.load('/home/jke/txt.json', {'my_class': my_class})
# clean.load('/home/jke/txt.yaml', {'my_class': my_class})
# print(a)
|
{"/test/clclclclc.py": ["/test/dfdf.py"], "/test/dfdf.py": ["/clean.py"], "/cleanUnitTest.py": ["/clean.py", "/diffTypesForTest.py"], "/console.py": ["/clean.py"], "/clean.py": ["/factory.py"]}
|
38,353,255
|
Kim368/Py
|
refs/heads/master
|
/clean.py
|
import inspect
def dump(obj, path, *args):
dumps_obj = dumps(obj)
if len(args) == 1:
import factory
dumps_obj = factory.create_serializer(args[0], dumps_obj)
if path.find('.') != -1:
path = str(path.split('.')[0]) + '.' + args[0]
# print('path ' + path)
f = open(path, 'w')
f.writelines(str(dumps_obj))
f.close()
return path
# print(f"Dumping was successful: {path} ")
def load(path, globl):
if path.find('.') != -1:
find_in_types = 'picklejsonyamltoml'
ftype = str(path.split('.')[-1])
if find_in_types.find(ftype) != -1:
f = open(path, 'r')
line = f.read()
f.close()
import factory
dump_obj = factory.create_deserializer(ftype, line)
if len(globl) == 0:
return loads(dump_obj, globl)
else:
return loads(dump_obj, globl)
# def convert(path, type, globl):
# find_in_types = 'picklejsonyamltoml'
# ftype = str(path.split('.')[-1])
# if find_in_types.find(ftype) != -1:
# if ftype != type:
# obj = load(path, globl)
# return dump(obj, path, type)
def convert(path, type):
find_in_types = 'picklejsonyamltoml'
ftype = str(path.split('.')[-1])
if find_in_types.find(ftype) != -1:
if ftype != type:
f = open(path, 'r')
line = f.read()
f.close()
import factory
dumps_obj = factory.create_deserializer(ftype, line)
dumps_obj = factory.create_serializer(type, dumps_obj)
path = str(path.split('.')[0]) + '.' + type
f = open(path, 'w')
f.writelines(str(dumps_obj))
f.close()
def loads(dumps_obj, globl):
# print(globl)
if dumps_obj['type'] == 'type':
obj = __loads_class(dumps_obj['lines'], globl)
elif dumps_obj['type'] == 'function':
obj = __loads_func(dumps_obj['lines'], globl)
else:
obj = __loads_other(dumps_obj, globl)
return obj
def __loads_class(lines, globl):
if type(lines).__name__ == 'str':
lines = eval(lines)
cl_name = lines[0]
# lines[0] = 'class amy_class():\n'
cl_name = cl_name.split('(')[0]
cl_name = str(cl_name.split()[-1])
# cl_name = 'a' + cl_name
line = ''
for l in lines:
line += l
lllll = lines
cl = 'cl = ' + cl_name
loc = {} # need to add same func and variebles to exec????????
exec(line + '\n' + cl, globl, loc)
link = loc['cl']
# ww = (globals())
return link
def __loads_func(lines, globl):
if type(lines).__name__ == 'str':
lines = eval(lines)
def_name = lines[0]
def_name = def_name.split('(')[0]
def_name = str(def_name.split()[-1])
line = ''
for l in lines:
line += l
func = 'func = ' + def_name
loc = {} # need to add same func and variebles to exec????????
exec(line + func, globl, loc)
link = loc['func']
return link
def __loads_other(dump_obj, globl):
# print(dump_obj)
if dump_obj['type'] == 'str':
return dump_obj['lines']
else:
return eval(dump_obj['lines'], globl)
def dumps(obj):
if type(obj).__name__ == 'type':
dump_obj = __dumps_class(obj)
elif type(obj).__name__ == 'function':
dump_obj = __dumps_func(obj)
else:
dump_obj = __dumps_other(obj)
return dump_obj
def __dumps_class(obj):
# print(obj.__dict__)
# print(obj)
lines = inspect.getsourcelines(obj) # inspect.getsourcelines(obj.__class__)
# print('lines: ' + str(lines)) # list
if lines[0][0][0] == ' ':
for c in lines[0][0]:
if c != ' ':
break
lines[0][0] = lines[0][0][1:]
return {'type': type(obj).__name__, 'lines': lines[0]}
def __dumps_func(obj):
lines = inspect.getsourcelines(obj)[0] # list
if lines[0][0] == ' ':
for c in lines[0]:
if c != ' ':
break
lines[0] = lines[0][1:]
# (['def foo(c):\n', ' a = 1234\n', " b = 'asdf'\n", ' return b + c\n'], 5)
# list return
return {'type': type(obj).__name__, 'lines': lines}
def __dumps_other(value):
dump_obj = {'type': type(value).__name__, 'lines': str(value)}
if (dump_obj['type'] == 'tuple'
or dump_obj['type'] == 'list'
or dump_obj['type'] == 'dict'
or dump_obj['type'] == 'set'):
dump_obj = __make_lines_from_collection(value)
return dump_obj
def __make_lines_from_collection(collection):
dump_obj = __make_corrent_dumps(
__make_correct_and_str_dict(collection),
collection)
return dump_obj
def __make_correct_and_str_dict(obj):
# correct_values = []
# str_values = []
correct_and_str = {'correct_values': [], 'str_values': []}
if type(obj).__name__ == 'dict':
keys = obj.keys()
values = obj.values()
for i in keys:
if type(i).__name__ == 'type':
correct_and_str['correct_values'].append(i.__qualname__)
correct_and_str['str_values'].append(str(i))
elif type(i).__name__ == 'function':
correct_and_str['correct_values'].append(i.__qualname__)
correct_and_str['str_values'].append(str(i))
if (type(i).__name__ == 'tuple'
or type(i).__name__ == 'list'
or type(i).__name__ == 'dict'
or type(i).__name__ == 'set'):
# print('__________1111_________-to_str_objInObj(__________')
tmp = __make_correct_and_str_dict(i)
correct_and_str['correct_values'] += tmp['correct_values']
correct_and_str['str_values'] += tmp['str_values']
for i in values:
# print(type(i).__name__)
# print('eeeeeeee', type(i).__name__)
if type(i).__name__ == 'type':
correct_and_str['correct_values'].append(i.__qualname__)
correct_and_str['str_values'].append(str(i))
if type(i).__name__ == 'function':
# print(i.__name__)
# print(i.__qualname__)
# print(type(i))
# print(str(i))
correct_and_str['correct_values'].append(i.__qualname__)
correct_and_str['str_values'].append(str(i))
if (type(i).__name__ == 'tuple'
or type(i).__name__ == 'list'
or type(i).__name__ == 'dict'
or type(i).__name__ == 'set'):
tmp = __make_correct_and_str_dict(i)
correct_and_str['correct_values'] += tmp['correct_values']
correct_and_str['str_values'] += tmp['str_values']
else:
for i in obj:
# print( type(i).__name__)
if type(i).__name__ == 'type':
correct_and_str['correct_values'].append(i.__qualname__)
correct_and_str['str_values'].append(str(i))
if type(i).__name__ == 'function':
correct_and_str['correct_values'].append(i.__qualname__)
correct_and_str['str_values'].append(str(i))
if (type(i).__name__ == 'tuple'
or type(i).__name__ == 'list'
or type(i).__name__ == 'dict'
or type(i).__name__ == 'set'):
tmp = __make_correct_and_str_dict(i)
correct_and_str['correct_values'] += tmp['correct_values']
correct_and_str['str_values'] += tmp['str_values']
return correct_and_str
def __make_corrent_dumps(corrent_and_str, obj):
str_values = corrent_and_str['str_values']
correct_values = corrent_and_str['correct_values']
lines = str(obj)
for i in range(len(str_values)):
splt = lines.split(str_values[i], 1)
if len(splt) > 1:
lines = splt[0] + correct_values[i] + splt[1]
return {'type': type(obj).__name__, 'lines': lines}
class m():
a = 1
if __name__ == "__main__":
dump(dump, 'D:\\work\\lab2\\txt.txt', 'pickle')
a = load('D:\\work\\lab2\\txt.pickle', {})
print(a)
|
{"/test/clclclclc.py": ["/test/dfdf.py"], "/test/dfdf.py": ["/clean.py"], "/cleanUnitTest.py": ["/clean.py", "/diffTypesForTest.py"], "/console.py": ["/clean.py"], "/clean.py": ["/factory.py"]}
|
38,402,825
|
PedroLormendez/jcclass
|
refs/heads/master
|
/jc_class/__init__.py
|
from .jc_class import jc_class
|
{"/jcclass/__init__.py": ["/jcclass/jcclass.py"], "/jcclass/jcclass.py": ["/jcclass/__init__.py"], "/jcclass/JC_classification.py": ["/jcclass/__init__.py"], "/jcclass/CTs_plots.py": ["/jcclass/__init__.py"]}
|
38,417,404
|
Drashkar/openpilot
|
refs/heads/xps_pro
|
/selfdrive/car/chrysler/chryslerlonghelper.py
|
from selfdrive.config import Conversions as CV
from common.numpy_fast import clip
from numpy import interp
SET_SPEED_MIN = 5 * CV.MPH_TO_MS
SET_SPEED_MAX = 120 * CV.MPH_TO_MS
LONG_PRESS_TIME = 50 # 500msec
SHORT_PRESS_STEP = 1
LONG_PRESS_STEP = 5
# Accel Hard limits
ACCEL_HYST_GAP = 0.0 # don't change accel command for small oscillations within this value
ACCEL_MAX = 2. # m/s2
ACCEL_MIN = -3.8 # m/s2
ACCEL_SCALE = 1.
DEFAULT_DECEL = 4.0 # m/s2
START_BRAKE_THRESHOLD = -0.25 # m/s2
STOP_BRAKE_THRESHOLD = 0.0 # m/s2
START_GAS_THRESHOLD = 0.0 # m/s2
STOP_GAS_THRESHOLD = -0.25 # m/s2
CHIME_TIME = 8
CHIME_GAP_TIME = 5
def setspeedlogic(set_speed, acc_enabled, acc_enabled_prev, setplus, setminus, resbut, timer, ressetspeed, short_press, vego, gas_set, gas, gas_timer):
set_speed = int(round((set_speed * CV.MS_TO_MPH), 0))
vego = int(round((vego * CV.MS_TO_MPH), 0))
if not acc_enabled and acc_enabled_prev:
ressetspeed = set_speed
if acc_enabled_prev and acc_enabled:
if setplus:
if not short_press:
if gas and not gas_set:
if set_speed < vego:
set_speed = vego
else:
set_speed += SHORT_PRESS_STEP
gas_set = True
else:
set_speed += SHORT_PRESS_STEP
short_press = True
elif timer % LONG_PRESS_TIME == 0:
set_speed += (LONG_PRESS_STEP - set_speed % LONG_PRESS_STEP)
timer += 1
elif setminus:
if not short_press:
if gas and not gas_set:
set_speed = vego
gas_set = True
else:
set_speed -= SHORT_PRESS_STEP
short_press = True
elif timer % LONG_PRESS_TIME == 0:
if set_speed % LONG_PRESS_STEP > 0:
set_speed += (LONG_PRESS_STEP - set_speed % LONG_PRESS_STEP)
set_speed -= LONG_PRESS_STEP
timer += 1
else:
timer = 0
short_press = False
elif acc_enabled and not short_press:
if resbut:
set_speed = ressetspeed
else:
set_speed = vego
short_press = True
timer += 1
else:
short_press = False
timer = 0
if not gas or gas_timer > 200:
gas_set = False
gas_timer = 0
elif gas_set:
gas_timer += 1
set_speed = set_speed * CV.MPH_TO_MS
set_speed = clip(set_speed, SET_SPEED_MIN, SET_SPEED_MAX)
return set_speed, short_press, timer, gas_set, ressetspeed, gas_timer
def cruiseiconlogic(acc_enabled, acc_available, has_lead):
if acc_enabled:
cruise_state = 4 # ACC engaged
if has_lead:
cruise_icon = 15 # ACC green icon with 4 bar distance and lead
else:
cruise_icon = 11 # ACC green icon with 4 bar distance and no lead
else:
if acc_available:
cruise_state = 3 # ACC on
cruise_icon = 5 # ACC white icon with 4 bar distance
else:
cruise_state = 0
cruise_icon = 0
return cruise_state, cruise_icon
def accel_hysteresis(accel, accel_steady):
# for small accel oscillations within ACCEL_HYST_GAP, don't change the accel command
if accel > accel_steady + ACCEL_HYST_GAP:
accel_steady = accel - ACCEL_HYST_GAP
elif accel < accel_steady - ACCEL_HYST_GAP:
accel_steady = accel + ACCEL_HYST_GAP
accel = accel_steady
return accel, accel_steady
def accel_rate_limit(accel_lim, prev_accel_lim):
# acceleration jerk = 2.0 m/s/s/s
# brake jerk = 3.8 m/s/s/s
drBp = [0., -0.15, -0.50, -1.0, -1.5, -5.0]
dra = [ -0.005, -0.007, -0.01, -0.015, -0.02, -0.04]
decel_rate = interp(accel_lim, drBp, dra)
if accel_lim > 0:
if accel_lim > prev_accel_lim:
accel_lim = min(accel_lim, prev_accel_lim + 0.01)
else:
accel_lim = max(accel_lim, prev_accel_lim - 0.038)
else:
if accel_lim < prev_accel_lim:
accel_lim = max(accel_lim, prev_accel_lim - decel_rate)
else:
accel_lim = min(accel_lim, prev_accel_lim + 0.01)
return accel_lim
def cluster_chime(chime_val, enabled, enabled_prev, chime_timer, gap_timer):
if not enabled_prev and enabled:
chime_val = 4
chime_timer = CHIME_TIME
gap_timer = 0
elif enabled_prev and not enabled:
chime_val = 7
chime_timer = CHIME_TIME
gap_timer = CHIME_GAP_TIME
if chime_timer > 0:
chime_timer -= 1
elif gap_timer > 0:
gap_timer -= 1
if gap_timer == 0:
chime_timer = CHIME_TIME
return chime_val, chime_timer, gap_timer
|
{"/selfdrive/car/chrysler/carstate.py": ["/selfdrive/car/chrysler/chryslerlonghelper.py"], "/selfdrive/car/chrysler/carcontroller.py": ["/selfdrive/car/chrysler/chryslercan.py", "/selfdrive/car/chrysler/chryslerlonghelper.py"]}
|
38,420,845
|
yeboahd24/python202
|
refs/heads/main
|
/Event Driven/asyncioQueue.py
|
#!usr/bin/env/python3
import asyncio
import random
import time
@asyncio.coroutine
def newsProducer(myQueue):
while True:
yield from myQueue.put(random.randint(1,5))
yield from asyncio.sleep(1)
@asyncio.coroutine
def newsConsumer(myQueue):
while True:
articleId = yield from myQueue.get()
print("News Reader Consumed News Article {}", articleId)
myQueue = asyncio.Queue()
loop = asyncio.get_event_loop()
loop.create_task(newsProducer(myQueue))
loop.create_task(newsConsumer(myQueue))
try:
loop.run_forever()
finally:
loop.close()
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,846
|
yeboahd24/python202
|
refs/heads/main
|
/Event Driven/cancelTask.py
|
#!usr/bin/env/python3
import asyncio
async def myCoroutine():
print("My Coroutine")
async def main():
current = asyncio.current_task()
print(current)
loop = asyncio.get_event_loop()
try:
task1 = loop.create_task(myCoroutine())
task2 = loop.create_task(myCoroutine())
task3 = loop.create_task(myCoroutine()) # task3 is cancelled
task3.cancel()
loop.run_until_complete(main())
finally:
loop.close()
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,847
|
yeboahd24/python202
|
refs/heads/main
|
/Design Pattern/document.py
|
#!usr/bin/env/python3
class Document:
def __init__(self):
self.characters = []
self.cursor = 0
self.filename = ''
def insert(self, character):
self.characters.insert(self.cursor, character)
self.cursor += 1
def delete(self):
del self.characters[self.cursor]
def save(self):
with open(self.filename, 'w') as f:
f.write(''.join(self.characters))
def forward(self):
self.cursor += 1
def back(self):
self.cursor -= 1
doc = Document()
doc.filename = "test_document"
doc.insert('h')
doc.insert('e')
doc.insert('l')
doc.insert('l')
doc.insert('o')
print("".join(doc.characters))
doc.back()
doc.delete()
doc.insert('p')
print("".join(doc.characters))
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,848
|
yeboahd24/python202
|
refs/heads/main
|
/Design Pattern/scrape_test.py
|
from concurrent.futures import ThreadPoolExecutor
import threading
import time
import requests
state = threading.Lock()
def get_google(base_url):
state.acquire()
return requests.get(base_url)
time.sleep(.2)
state.release()
def get_python(base_url):
state.acquire()
return requests.get(base_url)
time.sleep(.2)
state.release()
def main():
print('Testing...')
with ThreadPoolExecutor(max_workers=3) as executor:
task1 = executor.submit(get_google, ('https://www.google.com'))
task1 = executor.submit(get_python, ('https://www.google.org'))
if __name__ == '__main__':
main()
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,849
|
yeboahd24/python202
|
refs/heads/main
|
/python etc_2/callableObject.py
|
#!usr/bin/env/python3
from typing import Callable
IntExp = Callable[[int, int], int] # this means two integer values that returns an integer value
class Power1:
def __call__(self, x: int, n: int) -> int:
p = 1
for i in range(n):
p *= x
return p
pow1: IntExp = Power1()
# print(pow1(2, 0))
class Power2:
def __call__(self, x: int, n: int) -> int:
p = 1
for i in range(n):
p *= x
return p
p = Power2()
print(p(2, 0))
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,850
|
yeboahd24/python202
|
refs/heads/main
|
/python_etc_3/factorial_with_operator.py
|
#!usr/bin/env/python3
from functools import reduce
from operator import mul
def fact(n):
return reduce(mul, range(1, n+1))
print(fact(5))
#without functional programming
def fact_1(n):
if n < 2:
return n
return n * fact(n-1)
print(fact_1(5))
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,851
|
yeboahd24/python202
|
refs/heads/main
|
/python_etc_5/advance_email_demo.py
|
#!usr/bin/env/python3
import smtplib, ssl
import os
from email.message import EmailMessage
import credentials
# EMAIL_ADDRESS = os.environ.get('EMAIL_USER')
# EMAIL_PASSWORD = os.environ.get('EMAIL_PASS')
# HOST = 'smtp.gmail.com'
# PORT = 587
# context=ssl.create_default_context()
# msg = EmailMessage()
# msg['Subject'] = "Email Testing By Dominic"
# msg['From'] = EMAIL_ADDRESS
# msg['To'] = EMAIL_ADDRESS
# msg.set_content("Is all about tesing...")
# with smtplib.SMTP(HOST, PORT) as smtp:
# smtp.starttls(context=context)
# smtp.login(EMAIL_ADDRESS, EMAIL_PASSWORD)
# smtp.send_message(msg)
# data = os.getenv('EMAIL_USER')
# print(data)
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,852
|
yeboahd24/python202
|
refs/heads/main
|
/Design Pattern/using_function_as_attr.py
|
#!usr/bin/env/python3
#Bad way
class A:
CALLBACK = lambda x: x * 2
# print(A.CALLBACK(3)) # works
call = A()
# print(call.CALLBACK(4)) # WRONG
# God Way
class FunctionHolder:
def __init__(self, f):
self._f = f
def __get__(self, x, y):
return self._f
class A:
CALLBACK = FunctionHolder(lambda x: x * 2)
call = A()
print(call.CALLBACK(3))
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,853
|
yeboahd24/python202
|
refs/heads/main
|
/python_etc_3/sorting_object.py
|
#!usr/bin/env/python3
#Sorting object of the same class
class User(object):
def __init__(self, user_id):
self.user_id = user_id
def __repr__(self):
return 'User({})'.format(self.user_id)
users = [User(23), User(3), User(99)]
print(users) #[User(23), User(3), User(99)]
print(sorted(users, key=lambda u: u.user_id)) #[User(3), User(23), User(99)]
#Instead of using lambda, an alternative approach is to use operator.attrgetter():
from operator import attrgetter
print(sorted(users, key=attrgetter('user_id')))
#NB: attregetter is a little bit faster than lambda
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,854
|
yeboahd24/python202
|
refs/heads/main
|
/python etc_2/getattrMethod.py
|
#!usr/bin/env/python3
class RTD_Solver:
def __init__(
self, *,
rate: float = None,
time: float = None,
distance: float = None) -> None:
if rate:
self.rate = rate
if time:
self.time = time
if distance:
self.distance = distance
def __getattr__(self, name: str) -> float:
if name == "rate":
return self.distance / self.time
elif name == "time":
return self.distance / self.rate
elif name == "distance":
return self.rate * self.time
else:
raise AttributeError(f"Can't compute {name}")
r1 = RTD_Solver(rate=6.25, distance=10.25)
print(r1.rate)
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,855
|
yeboahd24/python202
|
refs/heads/main
|
/Event Driven/future.py
|
#!usr/bin/env/python3
import asyncio
async def myFuture(future):
await asyncio.sleep(1)
future.set_result("My Future Has Completed")
async def main():
future = asyncio.Future()
await asyncio.ensure_future(myFuture(future))
print(future.result())
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(main())
finally:
loop.close()
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,856
|
yeboahd24/python202
|
refs/heads/main
|
/Design Pattern/sub1/server.py
|
#!usr/bin/env/python3
#The decorator pattern allows us to "wrap" an object that provides core functionality
#with other objects that alter this functionality.
#There are two primary uses of the decorator pattern:
#• Enhancing the response of a component as it sends data to a second component
#• Supporting multiple optional behaviors
import socket
def respond(client):
response = input("Enter a value: ")
client.send(bytes(response, 'utf8'))
client.close()
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('localhost',2401))
server.listen(1)
try:
while True:
client, addr = server.accept()
respond(client)
finally:
server.close()
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,857
|
yeboahd24/python202
|
refs/heads/main
|
/Event Driven/debugAsyncio.py
|
#!usr/bin/env/python3
import asyncio
import logging
import time
logging.basicConfig(level=logging.DEBUG)
async def myWorker():
logging.info("My Worker Coroutine Hit")
time.sleep(1)
async def main():
logging.debug("My Main Function Hit")
await asyncio.wait([myWorker()]) # without this the logging info will not execute
loop = asyncio.get_event_loop()
loop.set_debug(True)
try:
loop.run_until_complete(main())
finally:
loop.close()
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,858
|
yeboahd24/python202
|
refs/heads/main
|
/Concurrent/terminatingProcess.py
|
#!usr/bin/env/python3
import multiprocessing
import time
def daemonProcess():
current_process = multiprocessing.current_process()
print("Child Process PID: {}".format(current_process.pid))
time.sleep(20)
current_process = multiprocessing.current_process()
print("Main process PID: {}".format(current_process.pid))
myProcess = multiprocessing.Process(target=daemonProcess)
myProcess.start()
print("My Process has terminated, terminating main thread")
print("Terminating Child Process")
myProcess.terminate()
print("Child Process Successfully terminated")
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,859
|
yeboahd24/python202
|
refs/heads/main
|
/python etc_2/PartialFunc.py
|
#!usr/bin/env/python3
# A prtial function is a generalization of a mathematical function in a way that isn't forced to map
# every possible input value (domain) to its results.
from functools import partial
from itertools import count
power_of_2 = partial(pow, 2)
print(power_of_2(4))
infinite_powers_of_2 = tuple(map(partial(pow, 2), count()))
# print(infinite_powers_of_2(1))
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,860
|
yeboahd24/python202
|
refs/heads/main
|
/python_etc_4/test.py
|
#!usr/bin/env/python3
# with open('sample.txt') as f:
# fp = f.read()
# print(f.closed) # True, becuase the with close the file automatically
# print(f.read()) # try to read it again raises an error because the file is
# already closed
# data = list()
# for i in range(11):
# user = int(input('Integer: '))
# x = list()
# for _ in range(3):
# user = int(input("int: "))
# x.append(user)
# print(sum(x))
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,861
|
yeboahd24/python202
|
refs/heads/main
|
/python_etc_5/credentials.py
|
#!usr/bin/env/python3
import os
os.environ["EMAIL_USER"] = "yeboahd24@gmail.com"
os.environ["EMAIL_PASS"] = "maryyeboah70"
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,862
|
yeboahd24/python202
|
refs/heads/main
|
/python_etc_4/coroutine.py
|
#!usr//bin/env/python3
def simple_coroutine(): #
print('-> coroutine started')
x = yield # to recive data from the client
print('-> coroutine received:', x)
my_coro = simple_coroutine()
print(my_coro) # coroutine/generator
print(next(my_coro)) # starting coroutine
print(my_coro.send(42)) # coroutine recieve: 42
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,863
|
yeboahd24/python202
|
refs/heads/main
|
/16-sqlite.py
|
# -*- coding: utf-8 -*-
"""
> sqlite3 phonebook.db
sqlite> create table contacts
(
id integer primary key autoincrement,
name string not null,
number string not null
);
"""
import sqlite3
from simpletk import *
def execSQL ( query, params = () ):
with sqlite3.connect( dbName ) as conn:
cursor = conn.cursor()
result = list(cursor.execute(query, params))
conn.commit()
return result
def loadTableFromBase ():
query = 'SELECT * FROM contacts ORDER BY name desc'
records = execSQL(query)
table.clear()
for row in records:
table.appendRow( row )
def addRecord( sender ):
query = 'INSERT INTO contacts VALUES(NULL,?, ?)'
parameters = ( nameEdit.text, phoneEdit.text )
execSQL( query, parameters )
loadTableFromBase()
nameEdit.text = ''
phoneEdit.text = ''
def execSQLCommand( event ):
result = execSQL(sqlMemo.text)
print( result )
loadTableFromBase()
dbName = "phonebook.db"
execSQL( """create table if not exists contacts
(
id integer primary key autoincrement,
name string not null,
number string not null
);""" )
app = TApplication("customer sqlite3")
app.size = (400, 350)
font = ('Arial', 11)
topPanel = TPanel(app, height = 52)
topPanel.align = TOP
nameLbl = TLabel(topPanel, text = "Name", font = font)
nameLbl.position = (5, 5)
phoneLbl = TLabel(topPanel, text = "Теlephone", font = font)
phoneLbl.position = (5, 25)
nameEdit = TEdit(topPanel, width = 160, font = font)
nameEdit.position = (75, 5)
phoneEdit = TEdit(topPanel, width = 160, font = font)
phoneEdit.position = (75, 25)
addImage = PhotoImage('save.gif')
addBtn = TButton(topPanel, text = " Add", width = 120, height = 30,
font = font, image = addImage )
addBtn.position = (250, 10)
addBtn.onClick = addRecord
sqlMemo = TMemo(app, height = 100)
sqlMemo.align = BOTTOM
sqlMemo.text = "select * from contacts"
sqlPanel = TPanel(app, height = 32)
sqlPanel.align = BOTTOM
lblSQL = TLabel(sqlPanel, text = 'SQL', font = font )
lblSQL.position = (5, 5)
execImage = PhotoImage('exec.gif')
sqlGoBtn = TButton(sqlPanel, text = "Go!", width = 60, height = 27,
font = font, image = execImage)
sqlGoBtn.position = (100, 2)
sqlGoBtn.onClick = execSQLCommand
table = TStringGrid( app, columns = ['A', 'B', 'C'],
oddColor = "#F0F0F0" )
table.align = CLIENT
table.font = font
# Заголовки столбцов все сразу
table.allHeadings( ['ID', 'Name', 'Теlephone'] )
# Настройка столбцов по номерам
table.column( "#0", minwidth = 0, width = 40 )
table.column( "#1", minwidth = 0, width = 100 )
table.column( "#2", minwidth = 0, width = 100 )
loadTableFromBase()
app.run()
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,864
|
yeboahd24/python202
|
refs/heads/main
|
/python etc_2/user_dict.py
|
#!usr/bin/env/python3
from collections import UserDict
class DistinctError(ValueError):
"""Raised when duplicate value is added to a distinctdict."""
class distinctdict(UserDict):
"""Dictionary that does not accept duplicate values."""
def __setitem__(self, key, value):
if value in self.values():
if (
(key in self and self[key] != value) or
key not in self
):
raise DistinctError(
"This value already exists for different key"
)
super().__setitem__(key, value)
my = distinctdict()
my['key'] = 'value'
# my['other_key'] = 'value' #error because the same value
my['key1'] = 'value2'
# print(my)
# Python program to demonstrate
# userdict
# Creating a Dictionary where
# deletion is not allowed
class MyDict(UserDict):
# Function to stop deleltion
# from dictionary
def __del__(self):
raise RuntimeError("Deletion not allowed")
# Function to stop pop from
# dictionary
def pop(self, s = None):
raise RuntimeError("Deletion not allowed")
# Function to stop popitem
# from Dictionary
def popitem(self, s = None):
raise RuntimeError("Deletion not allowed")
# Driver's code
d = MyDict({'a':1,
'b': 2,
'c': 3})
print("Original Dictionary")
print(d)
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,865
|
yeboahd24/python202
|
refs/heads/main
|
/python_etc_5/super_function.py
|
#!usr/bin/env/python3
#This kind of method leads to error
class Base:
def __init__(self):
print('Base.__init__')
class A(Base):
def __init__(self):
Base.__init__(self)
print('A.__init__')
class B(Base):
def __init__(self):
Base.__init__(self)
print('B.__init__')
class C(A,B):
def __init__(self):
A.__init__(self)
B.__init__(self)
print('C.__init__')
# c = C()
# print(c) # duplications
class Base:
def __init__(self):
print('Base.__init__')
class A(Base):
def __init__(self):
super().__init__()
print('A.__init__')
class B(Base):
def __init__(self):
super().__init__()
print('B.__init__')
class C(A,B):
def __init__(self):
super().__init__() # Only one call to super() here
print('C.__init__')
b = C()
print(b)
print(C.__mro__)
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,866
|
yeboahd24/python202
|
refs/heads/main
|
/Concurrent/ThreadVsProcess.py
|
#!usr/bin/env/python3
# Threads Vs Process
import threading
from multiprocessing import Process
import time
import os
def MyThread():
time.sleep(2)
t0 = time.time()
threads = []
for i in range(10):
thread = threading.Thread(target=MyThread)
thread.start()
threads.append(thread)
t1 = time.time()
print("Total Time for Creating 10 Threads: {} seconds".format(t1-t0))
for thread in threads:
thread.join()
t2 = time.time()
procs = []
for i in range(10):
process = Process(target=MyThread)
process.start()
procs.append(process)
t3 = time.time()
print("Total Time for Creating 10 Processes: {} seconds".format(t3-t2))
for proc in procs:
proc.join()
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,867
|
yeboahd24/python202
|
refs/heads/main
|
/python_etc_3/tombolist.py
|
#!usr/bin/env/python3
from abstract import Tombola
@Tombola.register # virtual class of Tombola
class TomboList(list):
def pick(self):
if self: #
position = randrange(len(self))
return self.pop(position) #
else:
raise LookupError('pop from empty TomboList')
load = list.extend
def loaded(self):
return bool(self)
def inspect(self):
return tuple(sorted(self))
# Note that because of the registration, the functions issubclass and isinstance act as
# if TomboList is a subclass of Tombola:
x = TomboList
y = Tombola
print(issubclass(x, y)) # True
t = TomboList(range(100))
print(isinstance(t, Tombola)) # True
print(TomboList.__mro__)
# Tombola is not in Tombolist.__mro__, so Tombolist does not inherit any methods from
# Tombola.
print(issubclass(x, list))
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,868
|
yeboahd24/python202
|
refs/heads/main
|
/concurrent1/IObottleneck.py
|
#!usr/bin/env/python3
import requests
""" I/O bottlenecks, or I/O bottlenecks for short, are bottlenecks where your computer spends
more time waiting on various inputs and outputs than it does on processing the
information."""
import urllib.request
import time
# t0 = time.time()
# req = urllib.request.urlopen('http://www.example.com')
# pageHtml = req.read()
# t1 = time.time()
# print("Total Time To Fetch Page: {} Seconds".format(t1-t0))
t0 = time.time()
req = requests.get('https://www.google.com')
pageHtml = req.content
print(pageHtml)
t1 = time.time()
print("Total Time To Fetch Page: {} Seconds".format(t1-t0))
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,869
|
yeboahd24/python202
|
refs/heads/main
|
/Design Pattern/sub1/coroutine.py
|
#!usr/bin/env/python3
def tally():
score = 0
while True:
increment = yield score # coroutine
score+=increment
# first = tally()
# print(next(first)) # 0
# first.send(1)
# print(next(first))
def generator2():
for i in range(10):
yield i
def generator3():
for j in range(10, 20):
yield j
def generator():
for i in generator2():
yield i
for j in generator3():
yield j
# Update with yield from
# Before you use yield from the function must be iterated
def generator():
yield from generator2()
yield from generator3()
# n = generator()
# print(next(n))
# print(next(n))
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,870
|
yeboahd24/python202
|
refs/heads/main
|
/python etc_2/namedTuple.py
|
#!usr/bin/env/python3
from typing import NamedTuple, List, Optional
class Monty(NamedTuple):
name: str
age: int
def get_name(self):
pass
class Python(Monty):
sch: str # this will not work because we can't add another attribute but we can overide methods
def get_name(self):
pass
p = Python('linux', 2)
# p = Python('linux', 2, 'python') # error
# print(p)
# def add(item: Optional[int]=None):
# return item
# print(add())
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
38,420,871
|
yeboahd24/python202
|
refs/heads/main
|
/python etc_2/user_list.py
|
#!usr/bin/env/python3
from collections import UserList
class Folder(UserList):
def __init__(self, name):
self.name = name
def dir(self, nesting=0):
offset = " " * nesting
print('%s%s/' % (offset, self.name))
for element in self:
if hasattr(element, 'dir'):
element.dir(nesting + 1)
else:
print("%s %s" % (offset, element))
# tree = Folder('project')
# tree.append('README.md')
# print(tree.dir())
class MyList(UserList):
def __init__(self, name):
self.name = name
def pops(self, s:int):
print(f'popping index: {s} from the orginal list {self.name}')
self.name[s]=None
if None in self.name:
self.name.remove(None)
return f'here is the updated list now {self.name}'
def appends(self, s:int):
print(f'appending {s} to the original list {self.name}')
self.name.append(s)
return f'updated list {self.name}'
L = MyList([1,2,3,4])
# print(L.pop(1))
print(L.appends(5))
|
{"/apps/product/admin.py": ["/apps/product/models.py"], "/apps/product/models.py": ["/apps/vendor/models.py"], "/apps/vendor/admin.py": ["/apps/vendor/models.py"], "/apps/order/models.py": ["/apps/product/models.py", "/apps/vendor/models.py"], "/apps/vendor/views.py": ["/apps/vendor/models.py", "/apps/product/models.py"], "/apps/product/views.py": ["/apps/product/forms.py", "/apps/product/models.py"], "/apps/cart/views.py": ["/apps/cart/forms.py", "/apps/order/utilities.py"], "/apps/core/views.py": ["/apps/product/models.py", "/apps/core/forms.py"], "/apps/order/utilities.py": ["/apps/order/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.