index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
30,621,488
|
josemariaaynat/CRYPTO
|
refs/heads/main
|
/movements/forms.py
|
from flask_wtf import FlaskForm
from wtforms import *
from wtforms.validators import DataRequired, Length
monedas=('EUR', 'ETH', 'LTC', 'BNB', 'EOS', 'XLM', 'TRX', 'BTC', 'XRP', 'BCH', 'USDT', 'BSV', 'ADA')
class FormMovimientos(FlaskForm):
monedafrom = SelectField('From', validators=[DataRequired()])
cantidadfrom = FloatField('Cantidad', validators=[DataRequired()])
monedato = SelectField('To', validators=[DataRequired()], choices=monedas)
cantidadto = FloatField('CantidadTO', validators=[DataRequired()])
submit = SubmitField('Aceptar')
calc =SubmitField('Calcular')
|
{"/movements/views.py": ["/movements/__init__.py", "/movements/forms.py"], "/movements/acciones.py": ["/movements/__init__.py"]}
|
30,621,489
|
josemariaaynat/CRYPTO
|
refs/heads/main
|
/movements/views.py
|
from movements import app, acciones
from movements.forms import FormMovimientos
from flask import render_template, request, url_for, redirect
DBFILE = app.config['DBFILE']
@app.route('/')
def listadoMovimientos():
ingresos = acciones.busqueda('SELECT fecha, hora, monedafrom, cantidadfrom, monedato, cantidadto, conversion, id FROM movimientos;')
return render_template("movimientos.html", datos=ingresos, title="Todos los movimientos")
@app.route('/nuevacompra', methods=['GET', 'POST'])
def transaccion():
fecha=acciones.fecha()
hora=acciones.hora()
monedasDisponibles = acciones.busqueda('SELECT DISTINCT monedato FROM movimientos')
resultado=["EUR"]
for d in monedasDisponibles:
resultado.extend(list(d.values()))
conversion=10
form = FormMovimientos()
form.monedafrom.choices=resultado
if request.method == 'POST':
if form.validate():
acciones.busqueda ('INSERT INTO movimientos (fecha, hora, monedafrom, monedato, cantidadfrom, cantidadto, conversion) VALUES (?,?, ?, ?, ?,?,?);',
(
fecha,
hora,
form.monedafrom.data,
form.monedato.data,
form.cantidadfrom.data,
form.cantidadto.data,
conversion
)
)
return redirect(url_for('listadoMovimientos'))
else:
return render_template("alta.html", form=form)
return render_template("alta.html", form=form)
|
{"/movements/views.py": ["/movements/__init__.py", "/movements/forms.py"], "/movements/acciones.py": ["/movements/__init__.py"]}
|
30,621,490
|
josemariaaynat/CRYPTO
|
refs/heads/main
|
/config_template.py
|
SECRET_KEY ='clavesecreta'
API_KEY = 'claviapi'
DBFILE = 'ruta de basededatos'
|
{"/movements/views.py": ["/movements/__init__.py", "/movements/forms.py"], "/movements/acciones.py": ["/movements/__init__.py"]}
|
30,639,359
|
symbatoss/python5-3month
|
refs/heads/master
|
/project/models.py
|
from django.db import models
# Create your models here.
class Category(models.Model):
class Meta:
verbose_name = 'Категория'
verbose_name_plural = 'Категории'
name = models.CharField(max_length=100,
verbose_name='Название')
def __str__(self):
return self.name
class Product(models.Model):
class Meta:
verbose_name = 'Продукт'
verbose_name_plural = 'Продукты'
title = models.CharField(max_length=100,
verbose_name='Название')
description = models.TextField(verbose_name='Описание')
price = models.IntegerField(verbose_name='Цена')
category = models.ForeignKey(Category, null=True,
on_delete=models.CASCADE,
verbose_name='Категория')
def __str__(self):
return self.title
class Review(models.Model):
class Meta:
verbose_name = 'Отзыв'
verbose_name_plural = 'Отзывы'
text = models.TextField(verbose_name='Текст')
product = models.ForeignKey(Product, null=True,
on_delete=models.CASCADE,
verbose_name='Продукт')
def __str__(self):
return self.text
|
{"/project/forms.py": ["/project/models.py"], "/views.py": ["/project/forms.py", "/project/models.py"], "/project/views.py": ["/project/forms.py", "/project/models.py"]}
|
30,639,360
|
symbatoss/python5-3month
|
refs/heads/master
|
/project/views.py
|
from django.shortcuts import render, redirect
from project.forms import CategoryForm, UserCreationForm
from project.models import Product, Review, Category
# Create your views here.
def get_all_products(request):
word = request.GET.get('search', '')
products = Product.objects.filter(title__contains=word)
print(products)
data = {
'all_products': products
}
return render(request, 'products.html', context=data)
def get_one_product(request, id):
product = Product.objects.get(id=id)
review = Review.objects.filter(product_id=id)
data = {
'product': product,
'review': review,
}
return render(request, 'detail.html', context=data)
def add_category(request):
if request.method == 'POST':
name = request.POST.get('category_name', '')
print(name)
Category.objects.create(name=name)
return redirect('/add/')
return render(request, 'add.html')
def add(request):
if request.method == 'POST':
print(request.POST)
form = CategoryForm(data=request.POST)
if form.is_valid():
name = request.POST.get('name')
Category.objects.create(name=name)
return redirect('/add/')
else:
return render(request, 'add1.html', context={
'form': form
})
data = {
'form': CategoryForm()
}
return render(request, 'add1.html', context=data)
def main_page(request):
return render(request, 'main.html')
def register(request):
if request.method == 'POST':
form = UserCreationForm(data=request.POST)
if form.is_valid():
form.save()
print('POST запрос без ошибок')
return redirect('/admin/')
else:
print('POST запрос с ошибками')
return render(request, 'register.html', context={'form': form})
data = {
'form': UserCreationForm()
}
print('Get запрос')
return render(request, 'register.html', context=data)
|
{"/project/forms.py": ["/project/models.py"], "/views.py": ["/project/forms.py", "/project/models.py"], "/project/views.py": ["/project/forms.py", "/project/models.py"]}
|
30,639,361
|
symbatoss/python5-3month
|
refs/heads/master
|
/project/forms.py
|
from django import forms
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.forms import TextInput, PasswordInput
from project.models import Category
class CategoryForm(forms.Form):
name = forms.CharField(min_length=2, max_length=100,
required=True, label='Название',
widget=TextInput(
attrs={
'placeholder': 'Название категории'
}
))
def clean_name(self):
name = self.cleaned_data['name']
print(name)
categories = Category.objects.filter(name=name)
print(categories.count())
if categories.count() > 0:
raise ValidationError('Такая категория уже существует')
return name
def save(self, commit=True):
category = Category.objects.create(name=self.cleaned_data['name'])
category.save()
return category
class UserCreationForm(forms.Form):
username = forms.CharField(max_length=100,
widget=TextInput(attrs={
'placeholder': 'UserName',
'class': 'form-control'
}))
password = forms.CharField(max_length=100,
widget=PasswordInput(attrs={
'placeholder': 'Password',
'class': 'form-control'
}))
password1 = forms.CharField(max_length=100,
widget=PasswordInput(attrs={
'placeholder': 'Repeat Password',
'class': 'form-control'
}))
def clean_username(self):
username = self.cleaned_data['username']
if User.objects.filter(username=username).count() > 0:
raise ValidationError('Такой пользователь уже существует!')
return username
def clean_password1(self):
if self.cleaned_data['password'] != self.cleaned_data['password1']:
raise ValidationError('Пароли не совпададают!')
return self.cleaned_data['password1']
def save(self):
user = User.objects.create_user(username=self.cleaned_data['username'],
email='osymbat1@gmail.com',
password=self.cleaned_data['password1'])
user.save()
return user
|
{"/project/forms.py": ["/project/models.py"], "/views.py": ["/project/forms.py", "/project/models.py"], "/project/views.py": ["/project/forms.py", "/project/models.py"]}
|
30,656,139
|
wyadmins/hotrolling_monitor
|
refs/heads/master
|
/__init__baowu.py
|
import sys
import traceback
import time
from datetime import datetime
from graph import Graph
from alg001_MG2250 import Alg001
from alg003_MG2250 import Alg003
from alg004_MG2250 import Alg004
def main():
t1 = time.time()
for x in sys.stdin:
try:
graph = Graph.graph_from_json(x)
algcode = graph.algcode
if algcode == 'alg001_MG2250': # 电机卡阻
alg = Alg001(graph)
elif algcode == 'alg003_MG2250': # 伺服阀动作过程压差指标
alg = Alg003(graph)
elif algcode == 'alg004_MG2250': # 伺服阀稳态过程开口度
alg = Alg004(graph)
else:
raise Exception("Algorithm not registered !")
if algcode is not None:
alg.execute()
else:
raise Exception("Algorithm not registered !")
graph.consumetime = time.time() - t1
sys.stdout.write(str(graph.to_json()) + '\n')
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
errorinfo = repr(traceback.format_exception(exc_type, exc_value, exc_traceback))
error = dict(exceptiontype=str(exc_value), time=str(datetime.now()), exception=errorinfo,
runningtime=str(time.time() - t1))
if 'graph' in locals():
graph.exceptions.append(error)
graph.inputstr = [x]
graph.consumetime = time.time() - t1
sys.stdout.write(str(graph.to_json()) + '\n')
if __name__ == '__main__':
main()
|
{"/alg022_MG2250.py": ["/graph.py"], "/graph.py": ["/com_util.py"], "/alg004_s8_MG2250.py": ["/graph.py", "/com_util.py"], "/alg017_MG2250.py": ["/graph.py"], "/Alarm_motor_temp_MG2250.py": ["/graph.py"], "/alg023_MG2250.py": ["/graph.py"], "/alg004_s5_MG2250.py": ["/graph.py", "/com_util.py"], "/alg001_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s3_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s4_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s6_MG2250.py": ["/graph.py", "/com_util.py"], "/alg010_MG2250.py": ["/graph.py", "/com_util.py"], "/com_util.py": ["/graph.py"], "/alg005_MG2250.py": ["/com_util.py", "/graph.py"], "/alg004_MG2250.py": ["/graph.py", "/com_util.py"], "/__init__alarm.py": ["/graph.py"], "/func_test.py": ["/graph.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg016_MG2250.py", "/alg015_MG2250.py"], "/alg021_MG2250.py": ["/graph.py"], "/alg014_MG2250.py": ["/graph.py"], "/__init__baowu.py": ["/com_util.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg004_s2_MG2250.py", "/alg004_s3_MG2250.py", "/alg004_s4_MG2250.py", "/alg004_s5_MG2250.py", "/alg004_s6_MG2250.py", "/alg004_s7_MG2250.py", "/alg004_s8_MG2250.py", "/alg005_MG2250.py", "/alg010_MG2250.py", "/alg011_MG2250.py", "/alg014_MG2250.py", "/alg015_MG2250.py", "/alg016_MG2250.py", "/alg017_MG2250.py", "/alg018_MG2250.py", "/alg019_MG2250.py", "/alg021_MG2250.py", "/graph.py"], "/alg003_MG2250.py": ["/graph.py", "/com_util.py"], "/alg015_MG2250.py": ["/com_util.py", "/graph.py"], "/alg011_MG2250.py": ["/graph.py"], "/alg004_s7_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s2_MG2250.py": ["/graph.py", "/com_util.py"], "/alg018_MG2250.py": ["/graph.py"], "/alg016_MG2250.py": ["/graph.py"]}
|
30,656,140
|
wyadmins/hotrolling_monitor
|
refs/heads/master
|
/alg004_MG2250.py
|
"""
Provides:
伺服阀泄漏指标,计算稳态伺服阀开口度
==============
Input Signals (4):
* 实际辊缝值:gap_act
* 辊缝设定值:gap_ref
* 伺服阀开口度:sv_out
* 速度给定值: speed_ref
Parameter Configs (2):
* 辊缝稳态最大值
* 稳态持续时间下限(秒)
==============
Outputs:
指标 | 指标id
---------------------
* 伺服阀开口度均值: 10400
* 伺服阀开口度标准差:10401
* 伺服阀开口度最大值:10402
* 伺服阀开口度最小值:10403
"""
import numpy as np
from graph import Index
import com_util
class Alg004:
def __init__(self, graph):
self.graph = graph
@staticmethod
def get_fe(df, algparas):
avg_sv_out = []
std_sv_out = []
max_sv_out = []
min_sv_out = []
measdate = []
if not df.empty:
max_gap = np.max(df.gap_ref)
upper_limit = max_gap * 1.01
lower_limit = max_gap * 0.99
idx = (df.gap_act < upper_limit) & (df.gap_act > lower_limit) & (df.gap_ref == max_gap) \
& (max_gap > algparas[0]) & (df.speed_ref == 0) # 停机伺服阀稳态开口度
n = algparas[1] * df.num_per_sec
re_iter = com_util.Reg.finditer(idx, n)
for i in re_iter:
[stidx, edidx] = i.span()
if edidx - stidx > n:
measdate.append(df.index[stidx + n])
avg_sv_out.append(np.mean(df.sv_out[stidx+n:edidx]))
std_sv_out.append(np.std(df.sv_out[stidx+n:edidx]))
max_sv_out.append(np.max(df.sv_out[stidx+n:edidx]))
min_sv_out.append(np.min(df.sv_out[stidx+n:edidx]))
return measdate, avg_sv_out, std_sv_out, max_sv_out, min_sv_out
def execute(self):
df = self.graph.get_data_from_api(['gap_ref', 'gap_act', 'sv_out', 'speed_ref'])
measdate, avg_sv_out, std_sv_out, max_sv_out, min_sv_out = self.get_fe(df, self.graph.parameter)
for i, meastime in enumerate(measdate):
index = Index({'assetid': self.graph.deviceid, 'meastime1st': meastime, 'feid1st': "10400",
'value1st': avg_sv_out[i], 'indices2nd': []})
self.graph.indices.append(index)
index = Index({'assetid': self.graph.deviceid, 'meastime1st': meastime, 'feid1st': "10401",
'value1st': std_sv_out[i], 'indices2nd': []})
self.graph.indices.append(index)
index = Index({'assetid': self.graph.deviceid, 'meastime1st': meastime, 'feid1st': "10402",
'value1st': max_sv_out[i], 'indices2nd': []})
self.graph.indices.append(index)
index = Index({'assetid': self.graph.deviceid, 'meastime1st': meastime, 'feid1st': "10403",
'value1st': min_sv_out[i], 'indices2nd': []})
self.graph.indices.append(index)
|
{"/alg022_MG2250.py": ["/graph.py"], "/graph.py": ["/com_util.py"], "/alg004_s8_MG2250.py": ["/graph.py", "/com_util.py"], "/alg017_MG2250.py": ["/graph.py"], "/Alarm_motor_temp_MG2250.py": ["/graph.py"], "/alg023_MG2250.py": ["/graph.py"], "/alg004_s5_MG2250.py": ["/graph.py", "/com_util.py"], "/alg001_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s3_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s4_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s6_MG2250.py": ["/graph.py", "/com_util.py"], "/alg010_MG2250.py": ["/graph.py", "/com_util.py"], "/com_util.py": ["/graph.py"], "/alg005_MG2250.py": ["/com_util.py", "/graph.py"], "/alg004_MG2250.py": ["/graph.py", "/com_util.py"], "/__init__alarm.py": ["/graph.py"], "/func_test.py": ["/graph.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg016_MG2250.py", "/alg015_MG2250.py"], "/alg021_MG2250.py": ["/graph.py"], "/alg014_MG2250.py": ["/graph.py"], "/__init__baowu.py": ["/com_util.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg004_s2_MG2250.py", "/alg004_s3_MG2250.py", "/alg004_s4_MG2250.py", "/alg004_s5_MG2250.py", "/alg004_s6_MG2250.py", "/alg004_s7_MG2250.py", "/alg004_s8_MG2250.py", "/alg005_MG2250.py", "/alg010_MG2250.py", "/alg011_MG2250.py", "/alg014_MG2250.py", "/alg015_MG2250.py", "/alg016_MG2250.py", "/alg017_MG2250.py", "/alg018_MG2250.py", "/alg019_MG2250.py", "/alg021_MG2250.py", "/graph.py"], "/alg003_MG2250.py": ["/graph.py", "/com_util.py"], "/alg015_MG2250.py": ["/com_util.py", "/graph.py"], "/alg011_MG2250.py": ["/graph.py"], "/alg004_s7_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s2_MG2250.py": ["/graph.py", "/com_util.py"], "/alg018_MG2250.py": ["/graph.py"], "/alg016_MG2250.py": ["/graph.py"]}
|
30,656,141
|
wyadmins/hotrolling_monitor
|
refs/heads/master
|
/alg015_MG2250.py
|
"""
Provides:
马钢双仪表检测
==============
Input Signals (2):
* 仪表1信号:signal1
* 仪表2信号:signal2
Parameter Configs (5):
* 信号类型:模拟量 -> 1 , 0/1型量 -> 2
* 模拟量聚合时间(s)
* 模拟量偏差门限(%)
* 0-1量报警点数要求
* 0-1量报警时长要求(s)
==============
Outputs:
指标 | 指标id
---------------------
"""
import numpy as np
from graph import Event
class Alg015:
def __init__(self, graph):
self.graph = graph
def get_alarm_analogs(self, p2, p3):
"""
模拟量信号
"""
df = self.graph.get_data_from_api(['signal1', 'signal2'])
if not df.empty:
df = df.resample(f'{p2}S').mean()
r = (np.abs(df['signal1'] - df['signal2']) / np.mean(df['signal1'] + df['signal2'])) * 100 # 双仪表相对偏差(%)
if np.any(r) > p3:
event = Event({'assetid': self.graph.deviceid, 'meastime': df.index[0], 'level': 1, 'info': '双仪表数值不匹配'})
self.graph.events.append(event)
def get_alarm_logical(self, p4, p5):
"""
0-1型信号
"""
df = self.graph.get_data_from_api(['signal1', 'signal2'])
if not df.empty:
n = np.sum(np.abs(df['signal1'] - df['signal2']))
t = n * df.dt
if t > p4 or n > p5:
event = Event({'assetid': self.graph.deviceid, 'meastime': df.index[0], 'level': 1, 'info': '双仪表数值不匹配'})
self.graph.events.append(event)
def execute(self):
[p1, p2, p3, p4, p5] = self.graph.parameter
if 1 == p1:
self.get_alarm_analogs(p2, p3) # 模拟量信号双仪表检测
elif 2 == p1:
self.get_alarm_logical(p4, p5) # 0/1类型信号双仪表检测
|
{"/alg022_MG2250.py": ["/graph.py"], "/graph.py": ["/com_util.py"], "/alg004_s8_MG2250.py": ["/graph.py", "/com_util.py"], "/alg017_MG2250.py": ["/graph.py"], "/Alarm_motor_temp_MG2250.py": ["/graph.py"], "/alg023_MG2250.py": ["/graph.py"], "/alg004_s5_MG2250.py": ["/graph.py", "/com_util.py"], "/alg001_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s3_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s4_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s6_MG2250.py": ["/graph.py", "/com_util.py"], "/alg010_MG2250.py": ["/graph.py", "/com_util.py"], "/com_util.py": ["/graph.py"], "/alg005_MG2250.py": ["/com_util.py", "/graph.py"], "/alg004_MG2250.py": ["/graph.py", "/com_util.py"], "/__init__alarm.py": ["/graph.py"], "/func_test.py": ["/graph.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg016_MG2250.py", "/alg015_MG2250.py"], "/alg021_MG2250.py": ["/graph.py"], "/alg014_MG2250.py": ["/graph.py"], "/__init__baowu.py": ["/com_util.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg004_s2_MG2250.py", "/alg004_s3_MG2250.py", "/alg004_s4_MG2250.py", "/alg004_s5_MG2250.py", "/alg004_s6_MG2250.py", "/alg004_s7_MG2250.py", "/alg004_s8_MG2250.py", "/alg005_MG2250.py", "/alg010_MG2250.py", "/alg011_MG2250.py", "/alg014_MG2250.py", "/alg015_MG2250.py", "/alg016_MG2250.py", "/alg017_MG2250.py", "/alg018_MG2250.py", "/alg019_MG2250.py", "/alg021_MG2250.py", "/graph.py"], "/alg003_MG2250.py": ["/graph.py", "/com_util.py"], "/alg015_MG2250.py": ["/com_util.py", "/graph.py"], "/alg011_MG2250.py": ["/graph.py"], "/alg004_s7_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s2_MG2250.py": ["/graph.py", "/com_util.py"], "/alg018_MG2250.py": ["/graph.py"], "/alg016_MG2250.py": ["/graph.py"]}
|
30,656,142
|
wyadmins/hotrolling_monitor
|
refs/heads/master
|
/alg017_MG2250.py
|
"""
Provides:
减压阀漂移,计算稳态工况减压阀压力均值
==============
Input Signals (4):
* 减压阀压力:pressure
* 参考信号位:s1
* 参考信号位:s2
* 参考信号位:s3
Parameter Configs (4):
* 参考信号位s1稳态窗口时长(s)
* 参考信号s2稳态选取标志:[0, 1]
* 参考信号s3稳态选取标志:[0, 1]
* 稳态窗口变异系数门限:范围(0-0.5),用于参考信号位s1,大于该值则数据包不包含稳态工况
==============
Outputs:
指标 | 指标id
---------------------
减压阀压力均值 17000
"""
import numpy as np
import pandas as pd
from graph import Index
class Alg016:
def __init__(self, graph):
self.graph = graph
def get_alarm(self):
df = self.graph.get_data_from_api(['pressure', 's1', 's2', 's3'])
if df.empty:
return
algparas = self.graph.parameter
rolling = df['s1'].rolling(algparas[0] * df.dt)
roll_cv = np.abs(rolling.std() / rolling.mean()) # 计算窗口变异系数
idx1 = roll_cv < algparas[1]
if not idx1:
return
idx2 = pd.Series((df['s2'] == algparas[2]) & (df['s3'] == algparas[3]))\
.rolling(algparas[0] * df.dt, center=True).min() # 满足
min_cv = roll_cv[idx1 & idx2].min()
if np.isnan(min_cv): # 数据包不包含稳态工况
return
stidx = roll_cv[idx1 & idx2].argmin() - np.floor(algparas[0] * df.dt / 2)
edidx = roll_cv[idx1 & idx2].argmin() + np.ceil(algparas[0] * df.dt / 2)
n = (edidx - stidx) // 5 # 对稳态段切头切尾
avg_pressure = np.mean(df['pressure'][stidx + n:edidx - n])
index = Index({'assetid': self.graph.deviceid, 'meastime1st': df.index[0], 'feid1st': "17000",
'value1st': avg_pressure, 'indices2nd': []})
self.graph.indices.append(index)
def execute(self):
self.get_alarm()
|
{"/alg022_MG2250.py": ["/graph.py"], "/graph.py": ["/com_util.py"], "/alg004_s8_MG2250.py": ["/graph.py", "/com_util.py"], "/alg017_MG2250.py": ["/graph.py"], "/Alarm_motor_temp_MG2250.py": ["/graph.py"], "/alg023_MG2250.py": ["/graph.py"], "/alg004_s5_MG2250.py": ["/graph.py", "/com_util.py"], "/alg001_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s3_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s4_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s6_MG2250.py": ["/graph.py", "/com_util.py"], "/alg010_MG2250.py": ["/graph.py", "/com_util.py"], "/com_util.py": ["/graph.py"], "/alg005_MG2250.py": ["/com_util.py", "/graph.py"], "/alg004_MG2250.py": ["/graph.py", "/com_util.py"], "/__init__alarm.py": ["/graph.py"], "/func_test.py": ["/graph.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg016_MG2250.py", "/alg015_MG2250.py"], "/alg021_MG2250.py": ["/graph.py"], "/alg014_MG2250.py": ["/graph.py"], "/__init__baowu.py": ["/com_util.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg004_s2_MG2250.py", "/alg004_s3_MG2250.py", "/alg004_s4_MG2250.py", "/alg004_s5_MG2250.py", "/alg004_s6_MG2250.py", "/alg004_s7_MG2250.py", "/alg004_s8_MG2250.py", "/alg005_MG2250.py", "/alg010_MG2250.py", "/alg011_MG2250.py", "/alg014_MG2250.py", "/alg015_MG2250.py", "/alg016_MG2250.py", "/alg017_MG2250.py", "/alg018_MG2250.py", "/alg019_MG2250.py", "/alg021_MG2250.py", "/graph.py"], "/alg003_MG2250.py": ["/graph.py", "/com_util.py"], "/alg015_MG2250.py": ["/com_util.py", "/graph.py"], "/alg011_MG2250.py": ["/graph.py"], "/alg004_s7_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s2_MG2250.py": ["/graph.py", "/com_util.py"], "/alg018_MG2250.py": ["/graph.py"], "/alg016_MG2250.py": ["/graph.py"]}
|
30,656,143
|
wyadmins/hotrolling_monitor
|
refs/heads/master
|
/com_util.py
|
import requests
import json
import pandas as pd
import numpy as np
import re
def get_data_from_api(guid, st, ed):
if isinstance(guid, str):
url = f'http://192.168.1.15:8131/api/Values?tagid={guid}&Start={st}&End={ed}'
elif isinstance(guid, list) and 1 == len(guid):
url = f'http://192.168.1.15:8131/api/Values?tagid={guid[0]}&Start={st}&End={ed}'
else:
raise Exception("guid numbers > 1 !")
# url = f'http://127.0.0.1:8130/api/Values?tagid={guid}&Start={st}&End={ed}'
r = requests.get(url)
data = pd.Series(json.loads(r.json()))
return data
def get_data_from_api_v2(assetid, aiid, st, ed, tags):
url = f'http://192.168.1.87:8132/api/Values/GetTagDataGet?AssetId={assetid}&AiId={aiid}&Start={st}&End={ed}'
r = requests.get(url)
data = pd.Series(json.loads(r.json()))
df = pd.DataFrame(data['Result']['Detail']).T
df.columns = tags
return df
def get_dt(index):
dt = (pd.to_datetime(index[1]) - pd.to_datetime(index[0])).total_seconds()
sec = int(np.floor(1 / dt))
return dt, sec
def find_0_to_1(d):
indexs = np.where(1 == np.diff(d))
return indexs
def find_1_to_0(d):
indexs = np.where(-1 == np.diff(d))
return indexs
def down_sample(df):
df = df.resample('T').mean()
return df
class Reg:
@staticmethod
def finditer(idx, n, flag=1):
s = idx.astype(int)
str1 = ''.join(str(i) for i in s)
re_iter = re.finditer(f'{flag}{n,}'.replace('(', '{').replace(')', '}'), str1)
return re_iter
@staticmethod
def search(idx, n, sec):
s = idx.astype(int)
str1 = ''.join(str(i) for i in s)
r2 = re.search(f'1{sec,}'.replace('(', '{').replace(')', '}'), str1)
return r2
|
{"/alg022_MG2250.py": ["/graph.py"], "/graph.py": ["/com_util.py"], "/alg004_s8_MG2250.py": ["/graph.py", "/com_util.py"], "/alg017_MG2250.py": ["/graph.py"], "/Alarm_motor_temp_MG2250.py": ["/graph.py"], "/alg023_MG2250.py": ["/graph.py"], "/alg004_s5_MG2250.py": ["/graph.py", "/com_util.py"], "/alg001_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s3_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s4_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s6_MG2250.py": ["/graph.py", "/com_util.py"], "/alg010_MG2250.py": ["/graph.py", "/com_util.py"], "/com_util.py": ["/graph.py"], "/alg005_MG2250.py": ["/com_util.py", "/graph.py"], "/alg004_MG2250.py": ["/graph.py", "/com_util.py"], "/__init__alarm.py": ["/graph.py"], "/func_test.py": ["/graph.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg016_MG2250.py", "/alg015_MG2250.py"], "/alg021_MG2250.py": ["/graph.py"], "/alg014_MG2250.py": ["/graph.py"], "/__init__baowu.py": ["/com_util.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg004_s2_MG2250.py", "/alg004_s3_MG2250.py", "/alg004_s4_MG2250.py", "/alg004_s5_MG2250.py", "/alg004_s6_MG2250.py", "/alg004_s7_MG2250.py", "/alg004_s8_MG2250.py", "/alg005_MG2250.py", "/alg010_MG2250.py", "/alg011_MG2250.py", "/alg014_MG2250.py", "/alg015_MG2250.py", "/alg016_MG2250.py", "/alg017_MG2250.py", "/alg018_MG2250.py", "/alg019_MG2250.py", "/alg021_MG2250.py", "/graph.py"], "/alg003_MG2250.py": ["/graph.py", "/com_util.py"], "/alg015_MG2250.py": ["/com_util.py", "/graph.py"], "/alg011_MG2250.py": ["/graph.py"], "/alg004_s7_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s2_MG2250.py": ["/graph.py", "/com_util.py"], "/alg018_MG2250.py": ["/graph.py"], "/alg016_MG2250.py": ["/graph.py"]}
|
30,656,144
|
wyadmins/hotrolling_monitor
|
refs/heads/master
|
/graph.py
|
import json
import requests
import pandas as pd
import com_util
import time
class Graph:
def __init__(self, nodes, algcode, datasource, indices, events, datasourcetimes, exceptions,
starttime, endtime, channelid, deviceid, devicename, aiid):
self.nodes = nodes
self.algcode = algcode
self.datasource = datasource
self.indices = indices
self.events = events
self.datasourcetimes = datasourcetimes
self.exceptions = exceptions
self.starttime = starttime
self.endtime = endtime
self.channelid = channelid
self.deviceid = deviceid
self.devicename = devicename
self.aiid = aiid
@staticmethod
def graph_from_json(data):
line = json.loads(data)
nodes = line.get('nodes')
events = line.get('events', [])
indices = line.get('indices', [])
if line.get('inputstr'):
datasource = eval(line.get('inputstr')[0])['datasource'][0]
else:
datasource = line.get('datasource')[0]
exceptions = line.get('exceptions', [])
channelid = datasource.get('channelid')
algcode = datasource.get('algcode')
deviceid = datasource.get('deviceid')
devicename = datasource.get('devicename')
starttime = datasource.get('starttime')
endtime = datasource.get('endtime')
datasourcetimes = datasource.get('datasourcetimes')
aiid = datasource.get('aiid')
return Graph(nodes, algcode, datasource, indices, events, datasourcetimes, exceptions,
starttime, endtime, channelid, deviceid, devicename, aiid)
def to_json(self):
def to_dict(value):
return {k: v for k, v in value.__dict__.items() if
not str(k).startswith('_')} # if inspect.isclass(value) else value
return json.dumps(self, default=to_dict, ensure_ascii=False)
def get_data_from_api(self, tags):
url = f'http://192.168.1.15:8132/api/Values/GetTagDataGet?AssetId={self.deviceid}&AiId={self.aiid}&Start={self.starttime}&End={self.endtime}'
r = requests.get(url)
data = json.loads(r.json())
df = pd.DataFrame(data['Detail']).T
df.columns = tags
df.set_index(pd.to_datetime(df.index), inplace=True)
df.dt, df.num_per_sec = com_util.get_dt(df.index)
return df
def read_cache(self):
url = f"http://192.168.1.15:8130/api/services/app/V1_Ai/GetAiCache?DeviceId={self.deviceid}&AlgCode={self.algcode}"
data = requests.get(url).json()
cache = data['data']['cache']
return cache
def set_cache(self, cache):
url = 'http://192.168.1.15:8130/api/services/app/V1_Ai/CreateOrEditAiCache'
data = '{"deviceId": "%s","algCode": "%s","cache": "%s"}' % (self.deviceid, self.algcode, cache)
rep = requests.post(url=url, data=data, headers={'Content-Type': 'application/json'})
return rep.ok
class Index:
"""
指标
"""
def __init__(self, value):
self.assetid = value['assetid']
self.meastime1st = value['meastime1st']
self.feid1st = value['feid1st']
self.value1st = value['value1st']
self.indices2nd = value.get('indices2nd', [])
class Event:
"""
报警事件
"""
def __init__(self, value):
self.assetid = value['assetid']
self.alarm_time = value['meastime']
self.alarm_level = value['level']
self.alarm_info = value['info']
self.send_time = time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime())
|
{"/alg022_MG2250.py": ["/graph.py"], "/graph.py": ["/com_util.py"], "/alg004_s8_MG2250.py": ["/graph.py", "/com_util.py"], "/alg017_MG2250.py": ["/graph.py"], "/Alarm_motor_temp_MG2250.py": ["/graph.py"], "/alg023_MG2250.py": ["/graph.py"], "/alg004_s5_MG2250.py": ["/graph.py", "/com_util.py"], "/alg001_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s3_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s4_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s6_MG2250.py": ["/graph.py", "/com_util.py"], "/alg010_MG2250.py": ["/graph.py", "/com_util.py"], "/com_util.py": ["/graph.py"], "/alg005_MG2250.py": ["/com_util.py", "/graph.py"], "/alg004_MG2250.py": ["/graph.py", "/com_util.py"], "/__init__alarm.py": ["/graph.py"], "/func_test.py": ["/graph.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg016_MG2250.py", "/alg015_MG2250.py"], "/alg021_MG2250.py": ["/graph.py"], "/alg014_MG2250.py": ["/graph.py"], "/__init__baowu.py": ["/com_util.py", "/alg001_MG2250.py", "/alg003_MG2250.py", "/alg004_MG2250.py", "/alg004_s2_MG2250.py", "/alg004_s3_MG2250.py", "/alg004_s4_MG2250.py", "/alg004_s5_MG2250.py", "/alg004_s6_MG2250.py", "/alg004_s7_MG2250.py", "/alg004_s8_MG2250.py", "/alg005_MG2250.py", "/alg010_MG2250.py", "/alg011_MG2250.py", "/alg014_MG2250.py", "/alg015_MG2250.py", "/alg016_MG2250.py", "/alg017_MG2250.py", "/alg018_MG2250.py", "/alg019_MG2250.py", "/alg021_MG2250.py", "/graph.py"], "/alg003_MG2250.py": ["/graph.py", "/com_util.py"], "/alg015_MG2250.py": ["/com_util.py", "/graph.py"], "/alg011_MG2250.py": ["/graph.py"], "/alg004_s7_MG2250.py": ["/graph.py", "/com_util.py"], "/alg004_s2_MG2250.py": ["/graph.py", "/com_util.py"], "/alg018_MG2250.py": ["/graph.py"], "/alg016_MG2250.py": ["/graph.py"]}
|
30,658,727
|
jrerwandi/Differential_Evolution_Inverse_Kinematics_2D
|
refs/heads/master
|
/IKDE.py
|
import numpy as np
import matplotlib.pyplot as plt
import math
from pid import *
# Robot Link Length Parameter
link = [20, 30, 40, 40]
# Robot Initial Joint Values (degree)
angle = [0, 0, 0, 0]
# Target End of Effector Position
target = [0, 0, 0]
# Create figure to plot
fig = plt.figure(1)
ax = fig.add_subplot(1,1,1)
#ax2 = fig2.add_subplot(1,1,1)
ax.set_xlim(-150, 150)
ax.set_ylim(-150, 150)
fig2, ax2 = plt.subplots(2, 2)
# Draw Axis
def draw_axis(ax, scale=1.0, A=np.eye(4), style='-', draw_2d = False):
xaxis = np.array([[0, 0, 0, 1],
[scale, 0, 0, 1]]).T
yaxis = np.array([[0, 0, 0, 1],
[0, scale, 0, 1]]).T
zaxis = np.array([[0, 0, 0, 1],
[0, 0, scale, 1]]).T
xc = A.dot( xaxis )
yc = A.dot( yaxis )
zc = A.dot( zaxis )
if draw_2d:
ax.plot(xc[0,:], xc[1,:], 'r' + style)
ax.plot(yc[0,:], yc[1,:], 'g' + style)
else:
ax.plot(xc[0,:], xc[1,:], xc[2,:], 'r' + style)
ax.plot(yc[0,:], yc[1,:], yc[2,:], 'g' + style)
ax.plot(zc[0,:], zc[1,:], zc[2,:], 'b' + style)
def rotateZ(theta):
rz = np.array([[math.cos(theta), - math.sin(theta), 0, 0],
[math.sin(theta), math.cos(theta), 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]])
return rz
def translate(dx, dy, dz):
t = np.array([[1, 0, 0, dx],
[0, 1, 0, dy],
[0, 0, 1, dz],
[0, 0, 0, 1]])
return t
def FK(angle, link):
n_links = len(link)
P = []
P.append(np.eye(4))
for i in range(0, n_links):
R = rotateZ(angle[i])
T = translate(link[i], 0, 0)
P.append(P[-1].dot(R).dot(T))
return P
def objective_function(target, thetas, link):
P = FK(thetas, link)
end_to_target = target - P[-1][:3, 3]
fitness = math.sqrt(end_to_target[0] ** 2 + end_to_target[1] ** 2)
#plt.scatter(P[-1][0,3], P[-1][1,3], marker='^')
return fitness, thetas
# Cr = crossover rate
# F = mutation rate
# NP = n population
def DE(target, angle, link, n_params,Cr=0.5, F=0.5, NP=10, max_gen=300):
target_vectors = np.random.rand(NP, n_params)
target_vectors = np.interp(target_vectors, (-np.pi, np.pi), (-np.pi, np.pi))
donor_vector = np.zeros(n_params)
trial_vector = np.zeros(n_params)
best_fitness = np.inf
list_best_fitness = []
for gen in range(max_gen):
print("Generation :", gen)
for pop in range(NP):
index_choice = [i for i in range(NP) if i != pop]
a, b, c = np.random.choice(index_choice, 3)
donor_vector = target_vectors[a] + F * (target_vectors[b]-target_vectors[c])
cross_points = np.random.rand(n_params) < Cr
trial_vector = np.where(cross_points, donor_vector, target_vectors[pop])
target_fitness, d = objective_function(target,target_vectors[pop],link)
trial_fitness, e = objective_function(target,trial_vector,link)
if trial_fitness < target_fitness:
target_vectors[pop] = trial_vector.copy()
best_fitness = trial_fitness
angle = d
else:
best_fitness = target_fitness
angle = e
print("Best fitness :", best_fitness)
'''
P = FK(angle, link)
for i in range(len(link)):
start_point = P[i]
end_point = P[i+1]
ax.plot([start_point[0,3], end_point[0,3]], [start_point[1,3], end_point[1,3]], linewidth=5)
ax.set_xlim(-150, 150)
ax.set_ylim(-150, 150)
plt.scatter(target[0], target[1], marker='x', color = 'black')
plt.pause(0.01)#plt.ion()
plt.cla()
'''
return best_fitness, angle
def onclick(event):
fig2.suptitle("PID", fontsize=12)
global target, link, angle, ax
target[0] = event.xdata
target[1] = event.ydata
print("Target Position : ", target)
# plt.cla()
limits = 4
# Inverse Kinematics
err, angle = DE(target, angle, link, limits, max_gen= 200)
####pid
kp = 0.4
ki = 0.8
kd = 0.05
x = []
y1 = [0]
y2 = [0]
y3 = [0]
y4 = [0]
y5 = [0]
y6 = [0]
y7 = [0]
y8 = [0]
y1.pop()
y2.pop()
y3.pop()
y4.pop()
y5.pop()
y6.pop()
y7.pop()
y8.pop()
pid1 = PID(kp, ki, kd) # default sample time : 10ms
pid2 = PID(kp, ki, kd) # default sample time : 10ms
pid3 = PID(kp, ki, kd) # default sample time : 10ms
pid4 = PID(kp, ki, kd) # default sample time : 10ms
for point_num in range(30): #baru 1 joint
t = point_num * pid1.sample_time
set_line = angle[0]
set_line2 = angle[1]
set_line3 = angle[2]
set_line4 = angle[3]
output_line = pid1.update(set_line)
output_line2 = pid2.update(set_line2)
output_line3 = pid3.update(set_line3)
output_line4 = pid4.update(set_line4)
x.append(t)
y1.append(set_line)
y2.append(output_line)
y3.append(set_line2)
y4.append(output_line2)
y5.append(set_line3)
y6.append(output_line3)
y7.append(set_line4)
y8.append(output_line4)
# print("a",output_line)
ax2[0, 0].plot(x, y1, 'b--', x, y2, 'r')
ax2[0, 0].set_title('joint 1')
ax2[0, 1].plot(x, y3, 'b--', x, y4, 'r')
ax2[0, 1].set_title('joint 2')
ax2[1, 0].plot(x, y5, 'b--', x, y6, 'r')
ax2[1, 0].set_title('joint 3')
ax2[1, 1].plot(x, y7, 'b--', x, y8, 'r')
ax2[1, 1].set_title('joint 4')
angle2 = [output_line,output_line2,output_line3,output_line4]
P = FK(angle2, link)
for i in range(len(link)):
start_point = P[i]
end_point = P[i+1]
ax.plot([start_point[0,3], end_point[0,3]], [start_point[1,3], end_point[1,3]], linewidth=5)
ax.set_xlim(-150, 150)
ax.set_ylim(-150, 150)
ax.scatter(target[0], target[1], marker='x', color = 'black')
for axs in ax2.flat:
axs.set(xlabel='x-label', ylabel='y-label')
plt.pause(0.01)#plt.ion()
plt.cla()
P = FK(angle, link)
for i in range(len(link)):
start_point = P[i]
end_point = P[i+1]
ax.plot([start_point[0,3], end_point[0,3]], [start_point[1,3], end_point[1,3]], linewidth=5)
ax.set_xlim(-150, 150)
ax.set_ylim(-150, 150)
ax.scatter(target[0], target[1], marker='x', color = 'black')
if (err > 1):
print("IK Error")
else:
print("IK Solved")
# print("Angle :", angle)
# print("Target :", target)
# print("End Effector :", P[-1][:3, 3])
# print("Error :", err)
fig.show()
fig2.show()
def main():
fig.canvas.mpl_connect('button_press_event', onclick)
fig.suptitle("Differential Evolution - Inverse Kinematics", fontsize=12)
fig2.suptitle("PID", fontsize=12)
ax.set_xlim(-150, 150)
ax.set_ylim(-150, 150)
# Forward Kinematics
P = FK(angle, link)
# Plot Link
for i in range(len(link)):
start_point = P[i]
end_point = P[i+1]
ax.plot([start_point[0,3], end_point[0,3]], [start_point[1,3], end_point[1,3]], linewidth=5)
plt.show()
if __name__ == "__main__":
main()
|
{"/IKDE.py": ["/pid.py"]}
|
30,658,728
|
jrerwandi/Differential_Evolution_Inverse_Kinematics_2D
|
refs/heads/master
|
/pid.py
|
import matplotlib.pyplot as plt
import numpy as np
class PID:
def __init__(self, kp, ki, kd, sample_time=0.01): # default sample time : 10ms
self.kp = kp
self.ki = ki
self.kd = kd
self.sample_time = sample_time
self.first_flag = True
self.last_error = 0
self.feedback = 0
self.integral = 0
self.output = 0
def update(self, set_point):
"""pid update method"""
error = set_point - self.feedback
if self.first_flag:
'''first time have no integral item and derivative item'''
derivative = 0
'''first time complete'''
self.first_flag = False
else:
self.integral += error
derivative = (error - self.last_error)
self.output = self.kp * error + self.ki * self.integral + self.kd * derivative
'''update attribute'''
self.last_error = error
self.feedback = self.output
return self.output
|
{"/IKDE.py": ["/pid.py"]}
|
30,673,549
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/tests/test_utils.py
|
import math
from datetime import datetime
import pytest
from flexget.utils import json
from flexget.utils.tools import parse_filesize, split_title_year, merge_dict_from_to
def compare_floats(float1, float2):
eps = 0.0001
return math.fabs(float1 - float2) <= eps
class TestJson:
def test_json_encode_dt(self):
date_str = '2016-03-11T17:12:17Z'
dt = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%SZ')
encoded_dt = json.dumps(dt, encode_datetime=True)
assert encoded_dt == '"%s"' % date_str
def test_json_encode_dt_dict(self):
date_str = '2016-03-11T17:12:17Z'
dt = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%SZ')
date_obj = {'date': dt}
encoded_dt = json.dumps(date_obj, encode_datetime=True)
assert encoded_dt == '{"date": "%s"}' % date_str
def test_json_decode_dt(self):
date_str = '"2016-03-11T17:12:17Z"'
dt = datetime.strptime(date_str, '"%Y-%m-%dT%H:%M:%SZ"')
decoded_dt = json.loads(date_str, decode_datetime=True)
assert dt == decoded_dt
def test_json_decode_dt_obj(self):
date_str = '"2016-03-11T17:12:17Z"'
date_obj_str = '{"date": %s}' % date_str
decoded_dt = json.loads(date_obj_str, decode_datetime=True)
dt = datetime.strptime(date_str, '"%Y-%m-%dT%H:%M:%SZ"')
assert decoded_dt == {'date': dt}
class TestParseFilesize:
def test_parse_filesize_no_space(self):
size = '200KB'
expected = 200 * 1000 / 1024 ** 2
assert compare_floats(parse_filesize(size), expected)
def test_parse_filesize_space(self):
size = '200.0 KB'
expected = 200 * 1000 / 1024 ** 2
assert compare_floats(parse_filesize(size), expected)
def test_parse_filesize_non_si(self):
size = '1234 GB'
expected = 1234 * 1000 ** 3 / 1024 ** 2
assert compare_floats(parse_filesize(size), expected)
def test_parse_filesize_auto(self):
size = '1234 GiB'
expected = 1234 * 1024 ** 3 / 1024 ** 2
assert compare_floats(parse_filesize(size), expected)
def test_parse_filesize_auto_mib(self):
size = '1234 MiB'
assert compare_floats(parse_filesize(size), 1234)
def test_parse_filesize_ib_not_valid(self):
with pytest.raises(ValueError):
parse_filesize('100 ib')
def test_parse_filesize_single_digit(self):
size = '1 GiB'
assert compare_floats(parse_filesize(size), 1024)
def test_parse_filesize_separators(self):
size = '1,234 GiB'
assert parse_filesize(size) == 1263616
size = '1 234 567 MiB'
assert parse_filesize(size) == 1234567
class TestSplitYearTitle:
@pytest.mark.parametrize(
'title, expected_title, expected_year',
[
('The Matrix', 'The Matrix', None),
('The Matrix 1999', 'The Matrix', 1999),
('The Matrix (1999)', 'The Matrix', 1999),
('The Matrix - 1999', 'The Matrix -', 1999),
('The.Matrix.1999', 'The.Matrix.', 1999),
(
'The Human Centipede III (Final Sequence)',
'The Human Centipede III (Final Sequence)',
None,
),
(
'The Human Centipede III (Final Sequence) (2015)',
'The Human Centipede III (Final Sequence)',
2015,
),
('2020', '2020', None),
],
)
def test_split_year_title(self, title, expected_title, expected_year):
assert split_title_year(title) == (expected_title, expected_year)
class TestDictMerge(object):
def test_merge_dict_to_dict_list(self):
d1 = {'setting': {'parameter': ['item_1']}}
d2 = {'setting': {'parameter': ['item_2']}}
merge_dict_from_to(d1, d2)
assert d2 == {'setting': {'parameter': ['item_2', 'item_1']}}
def test_merge_dict_to_dict_override(self):
d1 = {'setting': {'parameter': ['item_1']}}
d2 = {'setting': {'parameter': 2}}
merge_dict_from_to(d1, d2)
assert d2 == {'setting': {'parameter': 2}}
def test_merge_dict_to_dict_add(self):
d1 = {'setting': {'parameter_1': ['item_1']}}
d2 = {'setting': {'parameter_2': 'item_2'}}
merge_dict_from_to(d1, d2)
assert d2 == {'setting': {'parameter_1': ['item_1'], 'parameter_2': 'item_2'}}
def test_merge_dict_to_dict_str(self):
d1 = {'setting': {'parameter': 'item_1'}}
d2 = {'setting': {'parameter': 'item_2'}}
merge_dict_from_to(d1, d2)
assert d2 == {'setting': {'parameter': 'item_2'}}
def test_merge_list_to_list(self):
d1 = {'setting': ['item_1']}
d2 = {'setting': ['item_2']}
merge_dict_from_to(d1, d2)
assert d2 == {'setting': ['item_2', 'item_1']}
def test_merge_list_to_str(self):
d1 = {'setting': ['list']}
d2 = {'setting': 'string'}
merge_dict_from_to(d1, d2)
assert d2 == {'setting': 'string'}
def test_merge_str_to_list(self):
d1 = {'setting': 'string'}
d2 = {'setting': ['list']}
merge_dict_from_to(d1, d2)
assert d2 == {'setting': ['list']}
def test_merge_str_to_str(self):
d1 = {'setting': 'string_1'}
d2 = {'setting': 'string_2'}
merge_dict_from_to(d1, d2)
assert d2 == {'setting': 'string_2'}
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,550
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/plugins/operate/domain_delay.py
|
import logging
from flexget import plugin
from flexget.event import event
from flexget.utils.requests import TimedLimiter
log = logging.getLogger('domain_delay')
class DomainDelay:
"""
Sets a minimum interval between requests to specific domains.
Example::
domain_delay:
mysite.com: 5 seconds
"""
schema = {'type': 'object', 'additionalProperties': {'type': 'string', 'format': 'interval'}}
def on_task_start(self, task, config):
for domain, delay in config.items():
log.debug('Adding minimum interval of %s between requests to %s' % (delay, domain))
task.requests.add_domain_limiter(TimedLimiter(domain, delay))
@event('plugin.register')
def register_plugin():
plugin.register(DomainDelay, 'domain_delay', api_ver=2)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,551
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/plugins/generic/db_analyze.py
|
import logging
from flexget.event import event
log = logging.getLogger('db_analyze')
# Run after the cleanup is actually finished
@event('manager.db_cleanup', 0)
def on_cleanup(manager, session):
log.info('Running ANALYZE on database to improve performance.')
session.execute('ANALYZE')
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,552
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/plugins/generic/log_start.py
|
import logging
import os
from argparse import SUPPRESS
from flexget import options
from flexget.event import event
log = logging.getLogger('log_start')
@event('manager.startup')
def log_on_start(manager):
if manager.options.log_start:
log.info('FlexGet started (PID: %s)' % os.getpid())
@event('manager.shutdown')
def log_on_shutdown(manager):
if manager.options.log_start:
log.info('FlexGet stopped (PID: %s)' % os.getpid())
@event('options.register')
def register_options():
options.get_parser().add_argument('--log-start', action='store_true', help=SUPPRESS)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,553
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/utils/soup.py
|
# Hack, hide DataLossWarnings
# Based on html5lib code namespaceHTMLElements=False should do it, but nope ...
# Also it doesn't seem to be available in older version from html5lib, removing it
import warnings
from bs4 import BeautifulSoup
from html5lib.constants import DataLossWarning
warnings.simplefilter('ignore', DataLossWarning)
def get_soup(obj, parser='html5lib'):
return BeautifulSoup(obj, parser)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,554
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/components/bittorrent/torrent_size.py
|
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('torrent_size')
class TorrentSize:
"""
Provides file size information when dealing with torrents
"""
@plugin.priority(200)
def on_task_modify(self, task, config):
for entry in task.entries:
if 'torrent' in entry:
size = entry['torrent'].size / 1024 / 1024
log.debug('%s size: %s MB' % (entry['title'], size))
entry['content_size'] = size
@event('plugin.register')
def register_plugin():
plugin.register(TorrentSize, 'torrent_size', builtin=True, api_ver=2)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,555
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/utils/database.py
|
import functools
from collections.abc import Mapping
from datetime import datetime
from sqlalchemy import extract, func
from sqlalchemy.ext.hybrid import Comparator, hybrid_property
from sqlalchemy.orm import synonym
from flexget.entry import Entry
from flexget.manager import Session
from flexget.utils import json, qualities
def with_session(*args, **kwargs):
""""
A decorator which creates a new session if one was not passed via keyword argument to the function.
Automatically commits and closes the session if one was created, caller is responsible for commit if passed in.
If arguments are given when used as a decorator, they will automatically be passed to the created Session when
one is not supplied.
"""
def decorator(func):
def wrapper(*args, **kwargs):
if kwargs.get('session'):
return func(*args, **kwargs)
with _Session() as session:
kwargs['session'] = session
return func(*args, **kwargs)
return wrapper
if len(args) == 1 and not kwargs and callable(args[0]):
# Used without arguments, e.g. @with_session
# We default to expire_on_commit being false, in case the decorated function returns db instances
_Session = functools.partial(Session, expire_on_commit=False)
return decorator(args[0])
else:
# Arguments were specified, turn them into arguments for Session creation e.g. @with_session(autocommit=True)
_Session = functools.partial(Session, *args, **kwargs)
return decorator
def pipe_list_synonym(name):
"""Converts pipe separated text into a list"""
def getter(self):
attr = getattr(self, name)
if attr:
return attr.strip('|').split('|')
def setter(self, value):
if isinstance(value, str):
setattr(self, name, value)
else:
setattr(self, name, '|'.join(value))
return synonym(name, descriptor=property(getter, setter))
def text_date_synonym(name):
"""Converts Y-M-D date strings into datetime objects"""
def getter(self):
return getattr(self, name)
def setter(self, value):
if isinstance(value, str):
try:
setattr(self, name, datetime.strptime(value, '%Y-%m-%d'))
except ValueError:
# Invalid date string given, set to None
setattr(self, name, None)
else:
setattr(self, name, value)
return synonym(name, descriptor=property(getter, setter))
def entry_synonym(name):
"""Use json to serialize python objects for db storage."""
def only_builtins(item):
supported_types = str, int, float, bool, datetime
# dict, list, tuple and set are also supported, but handled separately
if isinstance(item, supported_types):
return item
elif isinstance(item, Mapping):
result = {}
for key, value in item.items():
try:
result[key] = only_builtins(value)
except TypeError:
continue
return result
elif isinstance(item, (list, tuple, set)):
result = []
for value in item:
try:
result.append(only_builtins(value))
except ValueError:
continue
if isinstance(item, list):
return result
elif isinstance(item, tuple):
return tuple(result)
else:
return set(result)
elif isinstance(item, qualities.Quality):
return item.name
else:
for s_type in supported_types:
if isinstance(item, s_type):
return s_type(item)
# If item isn't a subclass of a builtin python type, raise ValueError.
raise TypeError('%r is not of type Entry.' % type(item))
def getter(self):
return Entry(json.loads(getattr(self, name), decode_datetime=True))
def setter(self, entry):
if isinstance(entry, Entry) or isinstance(entry, dict):
setattr(self, name, json.dumps(only_builtins(dict(entry)), encode_datetime=True))
else:
raise TypeError('%r is not of type Entry or dict.' % type(entry))
return synonym(name, descriptor=property(getter, setter))
def json_synonym(name):
"""Use json to serialize python objects for db storage."""
def getter(self):
return json.loads(getattr(self, name), decode_datetime=True)
def setter(self, entry):
setattr(self, name, json.dumps(entry, encode_datetime=True))
return synonym(name, descriptor=property(getter, setter))
class CaseInsensitiveWord(Comparator):
"""Hybrid value representing a string that compares case insensitively."""
def __init__(self, word):
if isinstance(word, CaseInsensitiveWord):
self.word = word.word
else:
self.word = word
def lower(self):
if isinstance(self.word, str):
return self.word.lower()
else:
return func.lower(self.word)
def operate(self, op, other):
if not isinstance(other, CaseInsensitiveWord):
other = CaseInsensitiveWord(other)
return op(self.lower(), other.lower())
def __clause_element__(self):
return self.lower()
def __str__(self):
return self.word
def __getattr__(self, item):
"""Expose string methods to be called directly on this object."""
return getattr(self.word, item)
def quality_property(text_attr):
def getter(self):
return qualities.Quality(getattr(self, text_attr))
def setter(self, value):
if isinstance(value, str):
setattr(self, text_attr, value)
else:
setattr(self, text_attr, value.name)
class QualComparator(Comparator):
def operate(self, op, other):
if isinstance(other, qualities.Quality):
other = other.name
return op(self.__clause_element__(), other)
def comparator(self):
return QualComparator(getattr(self, text_attr))
prop = hybrid_property(getter, setter)
prop = prop.comparator(comparator)
return prop
def quality_requirement_property(text_attr):
def getter(self):
return qualities.Requirements(getattr(self, text_attr))
def setter(self, value):
if isinstance(value, str):
setattr(self, text_attr, value)
else:
setattr(self, text_attr, value.text)
prop = hybrid_property(getter, setter)
return prop
def ignore_case_property(text_attr):
def getter(self):
return CaseInsensitiveWord(getattr(self, text_attr))
def setter(self, value):
setattr(self, text_attr, value)
return hybrid_property(getter, setter)
def year_property(date_attr):
def getter(self):
date = getattr(self, date_attr)
return date and date.year
def expr(cls):
return extract('year', getattr(cls, date_attr))
return hybrid_property(getter, expr=expr)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,556
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/plugins/operate/free_space.py
|
import logging
import os
from flexget import plugin
from flexget.event import event
log = logging.getLogger('free_space')
def get_free_space(folder):
""" Return folder/drive free space (in megabytes)"""
if os.name == 'nt':
import ctypes
free_bytes = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
ctypes.c_wchar_p(folder), None, None, ctypes.pointer(free_bytes)
)
return free_bytes.value / (1024 * 1024)
else:
stats = os.statvfs(folder)
return (stats.f_bavail * stats.f_frsize) / (1024 * 1024)
class PluginFreeSpace:
"""Aborts a task if an entry is accepted and there is less than a certain amount of space free on a drive."""
schema = {
'oneOf': [
{'type': 'number'},
{
'type': 'object',
'properties': {
'space': {'type': 'number'},
'path': {'type': 'string', 'format': 'path'},
},
'required': ['space'],
'additionalProperties': False,
},
]
}
def prepare_config(self, config, task):
if isinstance(config, (float, int)):
config = {'space': config}
# Use config path if none is specified
if not config.get('path'):
config['path'] = task.manager.config_base
return config
@plugin.priority(plugin.PRIORITY_FIRST)
def on_task_download(self, task, config):
config = self.prepare_config(config, task)
# Only bother aborting if there were accepted entries this run.
if task.accepted:
if get_free_space(config['path']) < config['space']:
log.error(
'Less than %d MB of free space in %s aborting task.'
% (config['space'], config['path'])
)
# backlog plugin will save and restore the task content, if available
task.abort(
'Less than %d MB of free space in %s' % (config['space'], config['path'])
)
@event('plugin.register')
def register_plugin():
plugin.register(PluginFreeSpace, 'free_space', api_ver=2)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,557
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/plugins/modify/path_by_ext.py
|
import logging
import mimetypes
from flexget import plugin
from flexget.event import event
log = logging.getLogger('path_by_ext')
class PluginPathByExt:
"""
Allows specifying path based on content-type
Example:
path_by_ext:
torrent: ~/watch/torrent/
nzb: ~/watch/nzb/
"""
schema = {'type': 'object'}
def on_task_modify(self, task, config):
self.ext(task, config, self.set_path)
def set_path(self, entry, path):
log.debug('Setting %s path to %s' % (entry['title'], path))
entry['path'] = path
def ext(self, task, config, callback):
for entry in task.entries:
if 'mime-type' in entry:
# check if configuration has mimetype that entry has
if entry['mime-type'] in config:
callback(entry, config[entry['mime-type']])
# check if entry mimetype extension matches in config
ext = mimetypes.types_map.get(entry['mime-type'])
path = config.get(ext) or config.get(ext[1:])
if path:
callback(entry, path)
else:
log.debug('Unknown mimetype %s' % entry['mime-type'])
else:
# try to find from url
for ext, path in config.items():
if entry['url'].endswith('.' + ext):
callback(entry, path)
@event('plugin.register')
def register_plugin():
plugin.register(PluginPathByExt, 'path_by_ext', api_ver=2)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,558
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/entry.py
|
import copy
import functools
import logging
from flexget.plugin import PluginError
from flexget.utils.lazy_dict import LazyDict, LazyLookup
from flexget.utils.template import FlexGetTemplate, render_from_entry
log = logging.getLogger('entry')
class EntryUnicodeError(Exception):
"""This exception is thrown when trying to set non-unicode compatible field value to entry."""
def __init__(self, key, value):
self.key = key
self.value = value
def __str__(self):
return 'Entry strings must be unicode: %s (%r)' % (self.key, self.value)
class Entry(LazyDict):
"""
Represents one item in task. Must have `url` and *title* fields.
Stores automatically *original_url* key, which is necessary because
plugins (eg. urlrewriters) may change *url* into something else
and otherwise that information would be lost.
Entry will also transparently convert all ascii strings into unicode
and raises :class:`EntryUnicodeError` if conversion fails on any value
being set. Such failures are caught by :class:`~flexget.task.Task`
and trigger :meth:`~flexget.task.Task.abort`.
"""
def __init__(self, *args, **kwargs):
super().__init__()
self.traces = []
self.snapshots = {}
self._state = 'undecided'
self._hooks = {'accept': [], 'reject': [], 'fail': [], 'complete': []}
self.task = None
if len(args) == 2:
kwargs['title'] = args[0]
kwargs['url'] = args[1]
args = []
# Make sure constructor does not escape our __setitem__ enforcement
self.update(*args, **kwargs)
def trace(self, message, operation=None, plugin=None):
"""
Adds trace message to the entry which should contain useful information about why
plugin did not operate on entry. Accept and Reject messages are added to trace automatically.
:param string message: Message to add into entry trace.
:param string operation: None, reject, accept or fail
:param plugin: Uses task.current_plugin by default, pass value to override
"""
if operation not in (None, 'accept', 'reject', 'fail'):
raise ValueError('Unknown operation %s' % operation)
item = (plugin, operation, message)
if item not in self.traces:
self.traces.append(item)
def run_hooks(self, action, **kwargs):
"""
Run hooks that have been registered for given ``action``.
:param action: Name of action to run hooks for
:param kwargs: Keyword arguments that should be passed to the registered functions
"""
for func in self._hooks[action]:
func(self, **kwargs)
def add_hook(self, action, func, **kwargs):
"""
Add a hook for ``action`` to this entry.
:param string action: One of: 'accept', 'reject', 'fail', 'complete'
:param func: Function to execute when event occurs
:param kwargs: Keyword arguments that should be passed to ``func``
:raises: ValueError when given an invalid ``action``
"""
try:
self._hooks[action].append(functools.partial(func, **kwargs))
except KeyError:
raise ValueError('`%s` is not a valid entry action' % action)
def on_accept(self, func, **kwargs):
"""
Register a function to be called when this entry is accepted.
:param func: The function to call
:param kwargs: Keyword arguments that should be passed to the registered function
"""
self.add_hook('accept', func, **kwargs)
def on_reject(self, func, **kwargs):
"""
Register a function to be called when this entry is rejected.
:param func: The function to call
:param kwargs: Keyword arguments that should be passed to the registered function
"""
self.add_hook('reject', func, **kwargs)
def on_fail(self, func, **kwargs):
"""
Register a function to be called when this entry is failed.
:param func: The function to call
:param kwargs: Keyword arguments that should be passed to the registered function
"""
self.add_hook('fail', func, **kwargs)
def on_complete(self, func, **kwargs):
"""
Register a function to be called when a :class:`Task` has finished processing this entry.
:param func: The function to call
:param kwargs: Keyword arguments that should be passed to the registered function
"""
self.add_hook('complete', func, **kwargs)
def accept(self, reason=None, **kwargs):
if self.rejected:
log.debug('tried to accept rejected %r' % self)
elif not self.accepted:
self._state = 'accepted'
self.trace(reason, operation='accept')
# Run entry on_accept hooks
self.run_hooks('accept', reason=reason, **kwargs)
def reject(self, reason=None, **kwargs):
# ignore rejections on immortal entries
if self.get('immortal'):
reason_str = '(%s)' % reason if reason else ''
log.info('Tried to reject immortal %s %s' % (self['title'], reason_str))
self.trace('Tried to reject immortal %s' % reason_str)
return
if not self.rejected:
self._state = 'rejected'
self.trace(reason, operation='reject')
# Run entry on_reject hooks
self.run_hooks('reject', reason=reason, **kwargs)
def fail(self, reason=None, **kwargs):
log.debug('Marking entry \'%s\' as failed' % self['title'])
if not self.failed:
self._state = 'failed'
self.trace(reason, operation='fail')
log.error('Failed %s (%s)' % (self['title'], reason))
# Run entry on_fail hooks
self.run_hooks('fail', reason=reason, **kwargs)
def complete(self, **kwargs):
# Run entry on_complete hooks
self.run_hooks('complete', **kwargs)
@property
def state(self):
return self._state
@property
def accepted(self):
return self._state == 'accepted'
@property
def rejected(self):
return self._state == 'rejected'
@property
def failed(self):
return self._state == 'failed'
@property
def undecided(self):
return self._state == 'undecided'
def __setitem__(self, key, value):
# Enforce unicode compatibility.
if isinstance(value, bytes):
raise EntryUnicodeError(key, value)
# Coerce any enriched strings (such as those returned by BeautifulSoup) to plain strings to avoid serialization
# troubles.
elif (
isinstance(value, str) and type(value) != str
): # pylint: disable=unidiomatic-typecheck
value = str(value)
# url and original_url handling
if key == 'url':
if not isinstance(value, (str, LazyLookup)):
raise PluginError('Tried to set %r url to %r' % (self.get('title'), value))
self.setdefault('original_url', value)
# title handling
if key == 'title':
if not isinstance(value, (str, LazyLookup)):
raise PluginError('Tried to set title to %r' % value)
self.setdefault('original_title', value)
try:
log.trace('ENTRY SET: %s = %r' % (key, value))
except Exception as e:
log.debug('trying to debug key `%s` value threw exception: %s' % (key, e))
super().__setitem__(key, value)
def safe_str(self):
return '%s | %s' % (self['title'], self['url'])
# TODO: this is too manual, maybe we should somehow check this internally and throw some exception if
# application is trying to operate on invalid entry
def isvalid(self):
"""
:return: True if entry is valid. Return False if this cannot be used.
:rtype: bool
"""
if 'title' not in self:
return False
if 'url' not in self:
return False
if not isinstance(self['url'], str):
return False
if not isinstance(self['title'], str):
return False
return True
def take_snapshot(self, name):
"""
Takes a snapshot of the entry under *name*. Snapshots can be accessed via :attr:`.snapshots`.
:param string name: Snapshot name
"""
snapshot = {}
for field, value in self.items():
try:
snapshot[field] = copy.deepcopy(value)
except TypeError:
log.warning(
'Unable to take `%s` snapshot for field `%s` in `%s`'
% (name, field, self['title'])
)
if snapshot:
if name in self.snapshots:
log.warning('Snapshot `%s` is being overwritten for `%s`' % (name, self['title']))
self.snapshots[name] = snapshot
def update_using_map(self, field_map, source_item, ignore_none=False):
"""
Populates entry fields from a source object using a dictionary that maps from entry field names to
attributes (or keys) in the source object.
:param dict field_map:
A dictionary mapping entry field names to the attribute in source_item (or keys,
if source_item is a dict)(nested attributes/dicts are also supported, separated by a dot,)
or a function that takes source_item as an argument
:param source_item:
Source of information to be used by the map
:param ignore_none:
Ignore any None values, do not record it to the Entry
"""
func = dict.get if isinstance(source_item, dict) else getattr
for field, value in field_map.items():
if isinstance(value, str):
v = functools.reduce(func, value.split('.'), source_item)
else:
v = value(source_item)
if ignore_none and v is None:
continue
self[field] = v
def render(self, template, native=False):
"""
Renders a template string based on fields in the entry.
:param template: A template string or FlexGetTemplate that uses jinja2 or python string replacement format.
:param native: If True, and the rendering result can be all native python types, not just strings.
:return: The result of the rendering.
:rtype: string
:raises RenderError: If there is a problem.
"""
if not isinstance(template, (str, FlexGetTemplate)):
raise ValueError(
'Trying to render non string template or unrecognized template format, got %s'
% repr(template)
)
log.trace('rendering: %s', template)
return render_from_entry(template, self, native=native)
def __eq__(self, other):
return self.get('original_title') == other.get('original_title') and self.get(
'original_url'
) == other.get('original_url')
def __hash__(self):
return hash(self.get('original_title', '') + self.get('original_url', ''))
def __repr__(self):
return '<Entry(title=%s,state=%s)>' % (self['title'], self._state)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,559
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/components/trakt/trakt_lookup.py
|
# -*- coding: utf-8 -*-
import logging
from flexget import plugin
from flexget.event import event
from flexget.manager import Session
from . import api_trakt as plugin_api_trakt
from . import db
# TODO: not very nice ..
lookup_series = plugin_api_trakt.ApiTrakt.lookup_series
lookup_movie = plugin_api_trakt.ApiTrakt.lookup_movie
log = logging.getLogger('trakt_lookup')
def is_show(entry):
return entry.get('series_name') or entry.get('tvdb_id', eval_lazy=False)
def is_episode(entry):
return entry.get('series_season') and entry.get('series_episode')
def is_season(entry):
return entry.get('series_season') and not is_episode(entry)
def is_movie(entry):
return bool(entry.get('movie_name'))
class TraktLazyLookup:
def __init__(self, field_map, lookup_function):
self.field_map = field_map
self.lookup_function = lookup_function
def __call__(self, entry):
with Session() as session:
try:
result = self.lookup_function(entry, session)
except LookupError as e:
log.debug(e)
else:
entry.update_using_map(self.field_map, result)
return entry
class TraktUserDataLookup:
def __init__(self, field_name, data_type, media_type, lookup_function):
self.field_name = field_name
self.lookup_function = lookup_function
self.data_type = data_type
self.media_type = media_type
def __call__(self, entry):
try:
result = self.lookup_function(
data_type=self.data_type, media_type=self.media_type, entry=entry
)
except LookupError as e:
log.debug(e)
else:
entry[self.field_name] = result
return entry
class PluginTraktLookup:
"""Retrieves trakt information for entries. Uses series_name,
series_season, series_episode from series plugin.
Example:
trakt_lookup: yes
Primarily used for passing trakt information to other plugins.
Among these is the IMDB url for the series.
This information is provided (via entry):
series info:
trakt_series_name
trakt_series_runtime
trakt_series_first_aired_epoch
trakt_series_first_aired_iso
trakt_series_air_time
trakt_series_content_ratingi
trakt_series_genres
trakt_series_imdb_url
trakt_series_trakt_url
imdb_id
tvdb_id
trakt_series_actors
trakt_series_country
trakt_series_year
trakt_series_tvrage_id
trakt_series_status
trakt_series_overview
trakt_ep_name
trakt_ep_season
trakt_ep_number
trakt_ep_overview
trakt_ep_first_aired_epoch
trakt_ep_first_aired_iso
trakt_ep_id
trakt_ep_tvdb_id
"""
# Series info
series_map = {
'trakt_series_name': 'title',
'trakt_series_year': 'year',
'imdb_id': 'imdb_id',
'tvdb_id': 'tvdb_id',
'tmdb_id': 'tmdb_id',
'trakt_show_id': 'id',
'trakt_show_slug': 'slug',
'tvrage_id': 'tvrage_id',
'trakt_trailer': 'trailer',
'trakt_homepage': 'homepage',
'trakt_series_runtime': 'runtime',
'trakt_series_first_aired': 'first_aired',
'trakt_series_air_time': 'air_time',
'trakt_series_air_day': 'air_day',
'trakt_series_content_rating': 'certification',
'trakt_genres': lambda i: [db_genre.name for db_genre in i.genres],
'trakt_series_network': 'network',
'imdb_url': lambda series: series.imdb_id
and 'http://www.imdb.com/title/%s' % series.imdb_id,
'trakt_series_url': lambda series: series.slug
and 'https://trakt.tv/shows/%s' % series.slug,
'trakt_series_country': 'country',
'trakt_series_status': 'status',
'trakt_series_overview': 'overview',
'trakt_series_rating': 'rating',
'trakt_series_votes': 'votes',
'trakt_series_language': 'language',
'trakt_series_aired_episodes': 'aired_episodes',
'trakt_series_episodes': lambda show: [episodes.title for episodes in show.episodes],
'trakt_languages': 'translation_languages',
}
series_actor_map = {'trakt_actors': lambda show: db.list_actors(show.actors)}
show_translate_map = {
'trakt_translations': lambda show: db.get_translations_dict(show.translations, 'show')
}
# Episode info
episode_map = {
'trakt_ep_name': 'title',
'trakt_ep_imdb_id': 'imdb_id',
'trakt_ep_tvdb_id': 'tvdb_id',
'trakt_ep_tmdb_id': 'tmdb_id',
'trakt_ep_tvrage': 'tvrage_id',
'trakt_episode_id': 'id',
'trakt_ep_first_aired': 'first_aired',
'trakt_ep_overview': 'overview',
'trakt_ep_abs_number': 'number_abs',
'trakt_season': 'season',
'trakt_episode': 'number',
'trakt_ep_id': lambda ep: 'S%02dE%02d' % (ep.season, ep.number),
}
# Season info
season_map = {
'trakt_season_name': 'title',
'trakt_season_tvdb_id': 'tvdb_id',
'trakt_season_tmdb_id': 'tmdb_id',
'trakt_season_tvrage': 'tvrage_id',
'trakt_season_id': 'id',
'trakt_season_first_aired': 'first_aired',
'trakt_season_overview': 'overview',
'trakt_season_episode_count': 'episode_count',
'trakt_season': 'number',
'trakt_season_aired_episodes': 'aired_episodes',
}
# Movie info
movie_map = {
'movie_name': 'title',
'movie_year': 'year',
'trakt_movie_name': 'title',
'trakt_movie_year': 'year',
'trakt_movie_id': 'id',
'trakt_movie_slug': 'slug',
'imdb_id': 'imdb_id',
'tmdb_id': 'tmdb_id',
'trakt_tagline': 'tagline',
'trakt_overview': 'overview',
'trakt_released': 'released',
'trakt_runtime': 'runtime',
'trakt_rating': 'rating',
'trakt_votes': 'votes',
'trakt_homepage': 'homepage',
'trakt_trailer': 'trailer',
'trakt_language': 'language',
'trakt_genres': lambda i: [db_genre.name for db_genre in i.genres],
'trakt_languages': 'translation_languages',
}
movie_translate_map = {
'trakt_translations': lambda movie: db.get_translations_dict(movie.translations, 'movie')
}
movie_actor_map = {'trakt_actors': lambda movie: db.list_actors(movie.actors)}
user_data_map = {
'collected': 'trakt_collected',
'watched': 'trakt_watched',
'ratings': {
'show': 'trakt_series_user_rating',
'season': 'trakt_season_user_rating',
'episode': 'trakt_ep_user_rating',
'movie': 'trakt_movie_user_rating',
},
}
schema = {
'oneOf': [
{
'type': 'object',
'properties': {'account': {'type': 'string'}, 'username': {'type': 'string'}},
'anyOf': [{'required': ['username']}, {'required': ['account']}],
'error_anyOf': 'At least one of `username` or `account` options are needed.',
'additionalProperties': False,
},
{'type': 'boolean'},
]
}
def __init__(self):
self.getter_map = {
'show': self._get_series,
'season': self._get_season,
'episode': self._get_episode,
'movie': self._get_movie,
}
def on_task_start(self, task, config):
if not isinstance(config, dict):
config = {}
self.trakt = plugin_api_trakt.ApiTrakt(
username=config.get('username'), account=config.get('account')
)
def _get_user_data_field_name(self, data_type, media_type):
if data_type not in self.user_data_map:
raise plugin.PluginError('Unknown user data type "%s"' % data_type)
if isinstance(self.user_data_map[data_type], dict):
return self.user_data_map[data_type][media_type]
return self.user_data_map[data_type]
def _get_lookup_args(self, entry):
args = {
'title': entry.get('series_name', eval_lazy=False)
or entry.get('title', eval_lazy=False),
'year': entry.get('year', eval_lazy=False),
'trakt_slug': (
entry.get('trakt_show_slug', eval_lazy=False)
or entry.get('trakt_movie_slug', eval_lazy=False)
),
'tmdb_id': entry.get('tmdb_id', eval_lazy=False),
'tvdb_id': entry.get('tvdb_id', eval_lazy=False),
'imdb_id': entry.get('imdb_id', eval_lazy=False),
'tvrage_id': entry.get('tvrage_id', eval_lazy=False),
}
if entry.get('trakt_movie_id', eval_lazy=False):
args['trakt_id'] = entry['trakt_movie_id']
elif entry.get('trakt_show_id', eval_lazy=False):
args['trakt_id'] = entry['trakt_show_id']
elif is_movie(entry) and entry.get('trakt_movie_id', eval_lazy=True):
args['trakt_id'] = entry['trakt_movie_id']
elif entry.get('trakt_show_id', eval_lazy=True):
args['trakt_id'] = entry['trakt_show_id']
return args
def _get_series(self, entry, session):
series_lookup_args = self._get_lookup_args(entry)
return lookup_series(session=session, **series_lookup_args)
def _get_season(self, entry, session):
series_lookup_args = self._get_lookup_args(entry)
show = lookup_series(session=session, **series_lookup_args)
return show.get_season(entry['series_season'], session)
def _get_episode(self, entry, session):
series_lookup_args = self._get_lookup_args(entry)
show = lookup_series(session=session, **series_lookup_args)
return show.get_episode(entry['series_season'], entry['series_episode'], session)
def _get_movie(self, entry, session):
movie_lookup_args = self._get_lookup_args(entry)
return lookup_movie(session=session, **movie_lookup_args)
def lazy_lookup(self, entry, media_type, mapping):
"""Does the lookup for this entry and populates the entry fields."""
with Session() as session:
try:
trakt_media = self.getter_map[media_type](entry, session)
except LookupError as e:
log.debug(e)
else:
entry.update_using_map(mapping, trakt_media)
return entry
def _lazy_user_data_lookup(self, data_type, media_type, entry):
try:
lookup = self.getter_map[media_type]
user_data_lookup = self.trakt.lookup_map[data_type][media_type]
except KeyError:
raise plugin.PluginError(
'Unknown data type="%s" or media type="%s"' % (data_type, media_type)
)
with Session() as session:
try:
return user_data_lookup(lookup(entry, session), entry['title'])
except LookupError as e:
log.debug(e)
# Run after series and metainfo series
@plugin.priority(110)
def on_task_metainfo(self, task, config):
if not config:
return
if isinstance(config, bool):
config = dict()
for entry in task.entries:
if is_show(entry):
entry.register_lazy_func(
TraktLazyLookup(self.series_map, self._get_series), self.series_map
)
# TODO cleaner way to do this?
entry.register_lazy_func(
TraktLazyLookup(self.series_actor_map, self._get_series), self.series_actor_map
)
entry.register_lazy_func(
TraktLazyLookup(self.show_translate_map, self._get_series),
self.show_translate_map,
)
if is_episode(entry):
entry.register_lazy_func(
TraktLazyLookup(self.episode_map, self._get_episode), self.episode_map
)
elif is_season(entry):
entry.register_lazy_func(
TraktLazyLookup(self.season_map, self._get_season), self.season_map
)
else:
entry.register_lazy_func(
TraktLazyLookup(self.movie_map, self._get_movie), self.movie_map
)
# TODO cleaner way to do this?
entry.register_lazy_func(
TraktLazyLookup(self.movie_actor_map, self._get_movie), self.movie_actor_map
)
entry.register_lazy_func(
TraktLazyLookup(self.movie_translate_map, self._get_movie),
self.movie_translate_map,
)
if config.get('username') or config.get('account'):
self._register_lazy_user_data_lookup(entry, 'collected')
self._register_lazy_user_data_lookup(entry, 'watched')
self._register_lazy_user_ratings_lookup(entry)
def _get_media_type_from_entry(self, entry):
media_type = None
if is_episode(entry):
media_type = 'episode'
elif is_season(entry):
media_type = 'season'
elif is_show(entry):
media_type = 'show'
elif is_movie(entry):
media_type = 'movie'
return media_type
def _register_lazy_user_data_lookup(self, entry, data_type, media_type=None):
media_type = media_type or self._get_media_type_from_entry(entry)
if not media_type:
return
field_name = self._get_user_data_field_name(data_type=data_type, media_type=media_type)
entry.register_lazy_func(
TraktUserDataLookup(field_name, data_type, media_type, self._lazy_user_data_lookup),
[field_name],
)
def _register_lazy_user_ratings_lookup(self, entry):
data_type = 'ratings'
if is_show(entry):
self._register_lazy_user_data_lookup(
entry=entry, data_type=data_type, media_type='show'
)
self._register_lazy_user_data_lookup(
entry=entry, data_type=data_type, media_type='season'
)
self._register_lazy_user_data_lookup(
entry=entry, data_type=data_type, media_type='episode'
)
else:
self._register_lazy_user_data_lookup(
entry=entry, data_type=data_type, media_type='movie'
)
@property
def series_identifier(self):
"""Returns the plugin main identifier type"""
return 'trakt_show_id'
@property
def movie_identifier(self):
"""Returns the plugin main identifier type"""
return 'trakt_movie_id'
@event('plugin.register')
def register_plugin():
plugin.register(
PluginTraktLookup,
'trakt_lookup',
api_ver=2,
interfaces=['task', 'series_metainfo', 'movie_metainfo'],
)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,560
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/logger.py
|
import codecs
import collections
import contextlib
import logging
import logging.handlers
import os
import sys
import threading
import uuid
import warnings
from flexget import __version__
from flexget.utils.tools import io_encoding
# A level more detailed than DEBUG
TRACE = 5
# A level more detailed than INFO
VERBOSE = 15
# environment variables to modify rotating log parameters from defaults of 1 MB and 9 files
ENV_MAXBYTES = 'FLEXGET_LOG_MAXBYTES'
ENV_MAXCOUNT = 'FLEXGET_LOG_MAXCOUNT'
# Stores `task`, logging `session_id`, and redirected `output` stream in a thread local context
local_context = threading.local()
def get_level_no(level):
if not isinstance(level, int):
# Cannot use getLevelName here as in 3.4.0 it returns a string.
level = level.upper()
if level == 'TRACE':
level = TRACE
elif level == 'VERBOSE':
level = VERBOSE
else:
level = getattr(logging, level)
return level
@contextlib.contextmanager
def task_logging(task):
"""Context manager which adds task information to log messages."""
old_task = getattr(local_context, 'task', '')
local_context.task = task
try:
yield
finally:
local_context.task = old_task
class SessionFilter(logging.Filter):
def __init__(self, session_id):
self.session_id = session_id
def filter(self, record):
return getattr(record, 'session_id', None) == self.session_id
@contextlib.contextmanager
def capture_output(stream, loglevel=None):
"""Context manager which captures all log and console output to given `stream` while in scope."""
root_logger = logging.getLogger()
old_level = root_logger.getEffectiveLevel()
old_id = getattr(local_context, 'session_id', None)
# Keep using current, or create one if none already set
local_context.session_id = old_id or uuid.uuid4()
old_output = getattr(local_context, 'output', None)
old_loglevel = getattr(local_context, 'loglevel', None)
streamhandler = logging.StreamHandler(stream)
streamhandler.setFormatter(FlexGetFormatter())
streamhandler.addFilter(SessionFilter(local_context.session_id))
if loglevel is not None:
loglevel = get_level_no(loglevel)
streamhandler.setLevel(loglevel)
# If requested loglevel is lower than the root logger is filtering for, we need to turn it down.
# All existing handlers should have their desired level set and not be affected.
if not root_logger.isEnabledFor(loglevel):
root_logger.setLevel(loglevel)
local_context.output = stream
local_context.loglevel = loglevel
root_logger.addHandler(streamhandler)
try:
yield
finally:
root_logger.removeHandler(streamhandler)
root_logger.setLevel(old_level)
local_context.session_id = old_id
local_context.output = old_output
local_context.loglevel = old_loglevel
def get_capture_stream():
"""If output is currently being redirected to a stream, returns that stream."""
return getattr(local_context, 'output', None)
def get_capture_loglevel():
"""If output is currently being redirected to a stream, returns declared loglevel for that stream."""
return getattr(local_context, 'loglevel', None)
class RollingBuffer(collections.deque):
"""File-like that keeps a certain number of lines of text in memory."""
def write(self, line):
self.append(line)
class FlexGetLogger(logging.Logger):
"""Custom logger that adds trace and verbose logging methods, and contextual information to log records."""
def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func, extra, *exargs):
extra = extra or {}
extra.update(
task=getattr(local_context, 'task', ''),
session_id=getattr(local_context, 'session_id', ''),
)
# Replace newlines in log messages with \n
if isinstance(msg, str):
msg = msg.replace('\n', '\\n')
return logging.Logger.makeRecord(
self, name, level, fn, lno, msg, args, exc_info, func, extra, *exargs
)
def trace(self, msg, *args, **kwargs):
"""Log at TRACE level (more detailed than DEBUG)."""
self.log(TRACE, msg, *args, **kwargs)
def verbose(self, msg, *args, **kwargs):
"""Log at VERBOSE level (displayed when FlexGet is run interactively.)"""
self.log(VERBOSE, msg, *args, **kwargs)
class FlexGetFormatter(logging.Formatter):
"""Custom formatter that can handle both regular log records and those created by FlexGetLogger"""
flexget_fmt = '%(asctime)-15s %(levelname)-8s %(name)-13s %(task)-15s %(message)s'
def __init__(self):
logging.Formatter.__init__(self, self.flexget_fmt, '%Y-%m-%d %H:%M')
def format(self, record):
if not hasattr(record, 'task'):
record.task = ''
return logging.Formatter.format(self, record)
_logging_configured = False
_buff_handler = None
_logging_started = False
# Stores the last 50 debug messages
debug_buffer = RollingBuffer(maxlen=50)
def initialize(unit_test=False):
"""Prepare logging.
"""
global _logging_configured, _logging_started, _buff_handler
if _logging_configured:
return
if 'dev' in __version__:
warnings.filterwarnings('always', category=DeprecationWarning, module='flexget.*')
warnings.simplefilter('once', append=True)
logging.addLevelName(TRACE, 'TRACE')
logging.addLevelName(VERBOSE, 'VERBOSE')
_logging_configured = True
# with unit test we want pytest to add the handlers
if unit_test:
_logging_started = True
return
# Store any log messages in a buffer until we `start` function is run
logger = logging.getLogger()
_buff_handler = logging.handlers.BufferingHandler(1000 * 1000)
logger.addHandler(_buff_handler)
logger.setLevel(logging.NOTSET)
# Add a handler that sores the last 50 debug lines to `debug_buffer` for use in crash reports
crash_handler = logging.StreamHandler(debug_buffer)
crash_handler.setLevel(logging.DEBUG)
crash_handler.setFormatter(FlexGetFormatter())
logger.addHandler(crash_handler)
def start(filename=None, level=logging.INFO, to_console=True, to_file=True):
"""After initialization, start file logging.
"""
global _logging_started
assert _logging_configured
if _logging_started:
return
# root logger
logger = logging.getLogger()
level = get_level_no(level)
logger.setLevel(level)
formatter = FlexGetFormatter()
if to_file:
file_handler = logging.handlers.RotatingFileHandler(
filename,
maxBytes=int(os.environ.get(ENV_MAXBYTES, 1000 * 1024)),
backupCount=int(os.environ.get(ENV_MAXCOUNT, 9)),
encoding='utf-8'
)
file_handler.setFormatter(formatter)
file_handler.setLevel(level)
logger.addHandler(file_handler)
# without --cron we log to console
if to_console:
# Make sure we don't send any characters that the current terminal doesn't support printing
stdout = sys.stdout
if hasattr(stdout, 'buffer'):
# On python 3, we need to get the buffer directly to support writing bytes
stdout = stdout.buffer
safe_stdout = codecs.getwriter(io_encoding)(stdout, 'replace')
console_handler = logging.StreamHandler(safe_stdout)
console_handler.setFormatter(formatter)
console_handler.setLevel(level)
logger.addHandler(console_handler)
# flush what we have stored from the plugin initialization
logger.removeHandler(_buff_handler)
if _buff_handler:
for record in _buff_handler.buffer:
if logger.isEnabledFor(record.levelno):
logger.handle(record)
_buff_handler.flush()
_logging_started = True
# Set our custom logger class as default
logging.setLoggerClass(FlexGetLogger)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,561
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/plugins/operate/cfscraper.py
|
import logging
from collections import OrderedDict
from flexget import plugin
from flexget.event import event
from flexget.utils.requests import Session
log = logging.getLogger('cfscraper')
class CFScraper:
"""
Plugin that enables scraping of cloudflare protected sites.
Example::
cfscraper: yes
"""
schema = {'type': 'boolean'}
@plugin.priority(253)
def on_task_start(self, task, config):
try:
import cloudscraper
except ImportError as e:
log.debug('Error importing cloudscraper: %s' % e)
raise plugin.DependencyError(
'cfscraper', 'cloudscraper', 'cloudscraper module required. ImportError: %s' % e
)
class CFScrapeWrapper(Session, cloudscraper.CloudScraper):
"""
This class allows the FlexGet session to inherit from CloudScraper instead of the requests.Session directly.
"""
if config is True:
task.requests.headers = OrderedDict(
[
('User-Agent', task.requests.headers['User-Agent']),
('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'),
('Accept-Language', 'en-US,en;q=0.5'),
('Accept-Encoding', 'gzip, deflate'),
('Connection', 'close'),
('Upgrade-Insecure-Requests', '1'),
]
)
task.requests = CFScrapeWrapper.create_scraper(task.requests)
@event('plugin.register')
def register_plugin():
plugin.register(CFScraper, 'cfscraper', api_ver=2)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,562
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/components/series/utils.py
|
TRANSLATE_MAP = {ord(u'&'): u' and '}
for char in u'\'\\':
TRANSLATE_MAP[ord(char)] = u''
for char in u'_./-,[]():':
TRANSLATE_MAP[ord(char)] = u' '
def normalize_series_name(name):
"""Returns a normalized version of the series name."""
name = name.lower()
name = name.replace('&', ' and ')
name = name.translate(TRANSLATE_MAP) # Replaced some symbols with spaces
name = u' '.join(name.split())
return name
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,563
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/components/ftp/sftp.py
|
import logging
import os
import posixpath
import time
from collections import namedtuple
from functools import partial
from itertools import groupby
from urllib.parse import quote, unquote, urljoin, urlparse
from flexget import plugin
from flexget.config_schema import one_or_more
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.template import RenderError, render_from_entry
log = logging.getLogger('sftp')
ConnectionConfig = namedtuple(
'ConnectionConfig', ['host', 'port', 'username', 'password', 'private_key', 'private_key_pass']
)
# retry configuration constants
CONNECT_TRIES = 3
RETRY_INTERVAL = 15
RETRY_STEP = 5
SOCKET_TIMEOUT = 15
# make separate path instances for local vs remote path styles
localpath = os.path
remotepath = posixpath # pysftp uses POSIX style paths
try:
import pysftp
logging.getLogger("paramiko").setLevel(logging.ERROR)
except ImportError:
pysftp = None
def sftp_connect(conf):
"""
Helper function to connect to an sftp server
"""
sftp = None
tries = CONNECT_TRIES
retry_interval = RETRY_INTERVAL
while not sftp:
try:
sftp = pysftp.Connection(
host=conf.host,
username=conf.username,
private_key=conf.private_key,
password=conf.password,
port=conf.port,
private_key_pass=conf.private_key_pass,
)
sftp.timeout = SOCKET_TIMEOUT
log.verbose('Connected to %s' % conf.host)
except Exception as e:
if not tries:
raise e
else:
log.debug('Caught exception: %s' % e)
log.warning(
'Failed to connect to %s; waiting %d seconds before retrying.'
% (conf.host, retry_interval)
)
time.sleep(retry_interval)
tries -= 1
retry_interval += RETRY_STEP
return sftp
def sftp_from_config(config):
"""
Creates an SFTP connection from a Flexget config object
"""
host = config['host']
port = config['port']
username = config['username']
password = config['password']
private_key = config['private_key']
private_key_pass = config['private_key_pass']
conn_conf = ConnectionConfig(host, port, username, password, private_key, private_key_pass)
try:
sftp = sftp_connect(conn_conf)
except Exception as e:
raise plugin.PluginError('Failed to connect to %s (%s)' % (host, e))
return sftp
def sftp_prefix(config):
"""
Generate SFTP URL prefix
"""
login_str = ''
port_str = ''
if config['username'] and config['password']:
login_str = '%s:%s@' % (config['username'], config['password'])
elif config['username']:
login_str = '%s@' % config['username']
if config['port'] and config['port'] != 22:
port_str = ':%d' % config['port']
return 'sftp://%s%s%s/' % (login_str, config['host'], port_str)
def dependency_check():
"""
Check if pysftp module is present
"""
if not pysftp:
raise plugin.DependencyError(
issued_by='sftp',
missing='pysftp',
message='sftp plugin requires the pysftp Python module.',
)
class SftpList:
"""
Generate entries from SFTP. This plugin requires the pysftp Python module and its dependencies.
Configuration:
host: Host to connect to
port: Port the remote SSH server is listening on. Defaults to port 22.
username: Username to log in as
password: The password to use. Optional if a private key is provided.
private_key: Path to the private key (if any) to log into the SSH server
private_key_pass: Password for the private key (if needed)
recursive: Indicates whether the listing should be recursive
get_size: Indicates whetern to calculate the size of the remote file/directory.
WARNING: This can be very slow when computing the size of directories!
files_only: Indicates wheter to omit diredtories from the results.
dirs: List of directories to download
Example:
sftp_list:
host: example.com
username: Username
private_key: /Users/username/.ssh/id_rsa
recursive: False
get_size: True
files_only: False
dirs:
- '/path/to/list/'
- '/another/path/'
"""
schema = {
'type': 'object',
'properties': {
'host': {'type': 'string'},
'username': {'type': 'string'},
'password': {'type': 'string'},
'port': {'type': 'integer', 'default': 22},
'files_only': {'type': 'boolean', 'default': True},
'recursive': {'type': 'boolean', 'default': False},
'get_size': {'type': 'boolean', 'default': True},
'private_key': {'type': 'string'},
'private_key_pass': {'type': 'string'},
'dirs': one_or_more({'type': 'string'}),
},
'additionProperties': False,
'required': ['host', 'username'],
}
def prepare_config(self, config):
"""
Sets defaults for the provided configuration
"""
config.setdefault('port', 22)
config.setdefault('password', None)
config.setdefault('private_key', None)
config.setdefault('private_key_pass', None)
config.setdefault('dirs', ['.'])
return config
def on_task_input(self, task, config):
"""
Input task handler
"""
dependency_check()
config = self.prepare_config(config)
files_only = config['files_only']
recursive = config['recursive']
get_size = config['get_size']
private_key = config['private_key']
private_key_pass = config['private_key_pass']
dirs = config['dirs']
if not isinstance(dirs, list):
dirs = [dirs]
log.debug('Connecting to %s' % config['host'])
sftp = sftp_from_config(config)
url_prefix = sftp_prefix(config)
entries = []
def file_size(path):
"""
Helper function to get the size of a node
"""
return sftp.lstat(path).st_size
def dir_size(path):
"""
Walk a directory to get its size
"""
sizes = []
def node_size(f):
sizes.append(file_size(f))
sftp.walktree(path, node_size, node_size, node_size, True)
size = sum(sizes)
return size
def handle_node(path, size_handler, is_dir):
"""
Generic helper function for handling a remote file system node
"""
if is_dir and files_only:
return
url = urljoin(url_prefix, quote(sftp.normalize(path)))
title = remotepath.basename(path)
entry = Entry(title, url)
if get_size:
try:
size = size_handler(path)
except Exception as e:
log.error('Failed to get size for %s (%s)' % (path, e))
size = -1
entry['content_size'] = size
if private_key:
entry['private_key'] = private_key
if private_key_pass:
entry['private_key_pass'] = private_key_pass
entries.append(entry)
# create helper functions to handle files and directories
handle_file = partial(handle_node, size_handler=file_size, is_dir=False)
handle_dir = partial(handle_node, size_handler=dir_size, is_dir=True)
def handle_unknown(path):
"""
Skip unknown files
"""
log.warning('Skipping unknown file: %s' % path)
# the business end
for dir in dirs:
try:
sftp.walktree(dir, handle_file, handle_dir, handle_unknown, recursive)
except IOError as e:
log.error('Failed to open %s (%s)' % (dir, e))
continue
sftp.close()
return entries
class SftpDownload:
"""
Download files from a SFTP server. This plugin requires the pysftp Python module and its
dependencies.
Configuration:
to: Destination path; supports Jinja2 templating on the input entry. Fields such
as series_name must be populated prior to input into this plugin using
metainfo_series or similar.
recursive: Indicates wether to download directory contents recursively.
delete_origin: Indicates wether to delete the remote files(s) once they've been downloaded.
Example:
sftp_download:
to: '/Volumes/External/Drobo/downloads'
delete_origin: False
"""
schema = {
'type': 'object',
'properties': {
'to': {'type': 'string', 'format': 'path'},
'recursive': {'type': 'boolean', 'default': True},
'delete_origin': {'type': 'boolean', 'default': False},
},
'required': ['to'],
'additionalProperties': False,
}
def get_sftp_config(self, entry):
"""
Parses a url and returns a hashable config, source path, and destination path
"""
# parse url
parsed = urlparse(entry['url'])
host = parsed.hostname
username = parsed.username or None
password = parsed.password or None
port = parsed.port or 22
# get private key info if it exists
private_key = entry.get('private_key')
private_key_pass = entry.get('private_key_pass')
if parsed.scheme == 'sftp':
config = ConnectionConfig(
host, port, username, password, private_key, private_key_pass
)
else:
log.warning('Scheme does not match SFTP: %s' % entry['url'])
config = None
return config
def download_file(self, path, dest, sftp, delete_origin):
"""
Download a file from path to dest
"""
dir_name = remotepath.dirname(path)
dest_relpath = localpath.join(
*remotepath.split(path)
) # convert remote path style to local style
destination = localpath.join(dest, dest_relpath)
dest_dir = localpath.dirname(destination)
if localpath.exists(destination):
log.verbose('Destination file already exists. Skipping %s' % path)
return
if not localpath.exists(dest_dir):
os.makedirs(dest_dir)
log.verbose('Downloading file %s to %s' % (path, destination))
try:
sftp.get(path, destination)
except Exception as e:
log.error('Failed to download %s (%s)' % (path, e))
if localpath.exists(destination):
log.debug('Removing partially downloaded file %s' % destination)
os.remove(destination)
raise e
if delete_origin:
log.debug('Deleting remote file %s' % path)
try:
sftp.remove(path)
except Exception as e:
log.error('Failed to delete file %s (%s)' % (path, e))
return
self.remove_dir(sftp, dir_name)
def handle_dir(self, path):
"""
Dummy directory handler. Does nothing.
"""
pass
def handle_unknown(self, path):
"""
Dummy unknown file handler. Warns about unknown files.
"""
log.warning('Skipping unknown file %s' % path)
def remove_dir(self, sftp, path):
"""
Remove a directory if it's empty
"""
if sftp.exists(path) and not sftp.listdir(path):
log.debug('Attempting to delete directory %s' % path)
try:
sftp.rmdir(path)
except Exception as e:
log.error('Failed to delete directory %s (%s)' % (path, e))
def download_entry(self, entry, config, sftp):
"""
Downloads the file(s) described in entry
"""
path = unquote(urlparse(entry['url']).path) or '.'
delete_origin = config['delete_origin']
recursive = config['recursive']
to = config['to']
if to:
try:
to = render_from_entry(to, entry)
except RenderError as e:
log.error('Could not render path: %s' % to)
entry.fail(e)
return
if not sftp.lexists(path):
log.error('Remote path does not exist: %s' % path)
return
if sftp.isfile(path):
source_file = remotepath.basename(path)
source_dir = remotepath.dirname(path)
try:
sftp.cwd(source_dir)
self.download_file(source_file, to, sftp, delete_origin)
except Exception as e:
error = 'Failed to download file %s (%s)' % (path, e)
log.error(error)
entry.fail(error)
elif sftp.isdir(path):
base_path = remotepath.normpath(remotepath.join(path, '..'))
dir_name = remotepath.basename(path)
handle_file = partial(
self.download_file, dest=to, sftp=sftp, delete_origin=delete_origin
)
try:
sftp.cwd(base_path)
sftp.walktree(
dir_name, handle_file, self.handle_dir, self.handle_unknown, recursive
)
except Exception as e:
error = 'Failed to download directory %s (%s)' % (path, e)
log.error(error)
entry.fail(error)
return
if delete_origin:
self.remove_dir(sftp, path)
else:
log.warning('Skipping unknown file %s' % path)
def on_task_download(self, task, config):
"""
Task handler for sftp_download plugin
"""
dependency_check()
# Download entries by host so we can reuse the connection
for sftp_config, entries in groupby(task.accepted, self.get_sftp_config):
if not sftp_config:
continue
error_message = None
sftp = None
try:
sftp = sftp_connect(sftp_config)
except Exception as e:
error_message = 'Failed to connect to %s (%s)' % (sftp_config.host, e)
log.error(error_message)
for entry in entries:
if sftp:
self.download_entry(entry, config, sftp)
else:
entry.fail(error_message)
if sftp:
sftp.close()
class SftpUpload:
"""
Upload files to a SFTP server. This plugin requires the pysftp Python module and its
dependencies.
host: Host to connect to
port: Port the remote SSH server is listening on. Defaults to port 22.
username: Username to log in as
password: The password to use. Optional if a private key is provided.
private_key: Path to the private key (if any) to log into the SSH server
private_key_pass: Password for the private key (if needed)
to: Path to upload the file to; supports Jinja2 templating on the input entry. Fields such
as series_name must be populated prior to input into this plugin using
metainfo_series or similar.
delete_origin: Indicates wheter to delete the original file after a successful
upload.
Example:
sftp_list:
host: example.com
username: Username
private_key: /Users/username/.ssh/id_rsa
to: /TV/{{series_name}}/Series {{series_season}}
delete_origin: False
"""
schema = {
'type': 'object',
'properties': {
'host': {'type': 'string'},
'username': {'type': 'string'},
'password': {'type': 'string'},
'port': {'type': 'integer', 'default': 22},
'private_key': {'type': 'string'},
'private_key_pass': {'type': 'string'},
'to': {'type': 'string'},
'delete_origin': {'type': 'boolean', 'default': False},
},
'additionProperties': False,
'required': ['host', 'username'],
}
def prepare_config(self, config):
"""
Sets defaults for the provided configuration
"""
config.setdefault('password', None)
config.setdefault('private_key', None)
config.setdefault('private_key_pass', None)
config.setdefault('to', None)
return config
def handle_entry(self, entry, sftp, config, url_prefix):
location = entry['location']
filename = localpath.basename(location)
to = config['to']
if to:
try:
to = render_from_entry(to, entry)
except RenderError as e:
log.error('Could not render path: %s', to)
entry.fail(e)
return
destination = remotepath.join(to, filename)
destination_url = urljoin(url_prefix, destination)
if not os.path.exists(location):
log.warning('File no longer exists: %s', location)
return
if not sftp.lexists(to):
try:
sftp.makedirs(to)
except Exception as e:
log.error('Failed to create remote directory %s (%s)' % (to, e))
entry.fail(e)
return
if not sftp.isdir(to):
log.error('Not a directory: %s' % to)
entry.fail('Not a directory: %s' % to)
return
try:
sftp.put(localpath=location, remotepath=destination)
log.verbose('Successfully uploaded %s to %s' % (location, destination_url))
except IOError as e:
log.error('Remote directory does not exist: %s (%s)' % to)
entry.fail('Remote directory does not exist: %s (%s)' % to)
return
except Exception as e:
log.error('Failed to upload %s (%s)' % (location, e))
entry.fail('Failed to upload %s (%s)' % (location, e))
return
if config['delete_origin']:
try:
os.remove(location)
except Exception as e:
log.error('Failed to delete file %s (%s)')
def on_task_output(self, task, config):
"""Uploads accepted entries to the specified SFTP server."""
config = self.prepare_config(config)
sftp = sftp_from_config(config)
url_prefix = sftp_prefix(config)
for entry in task.accepted:
if sftp:
log.debug('Uploading file: %s' % entry)
self.handle_entry(entry, sftp, config, url_prefix)
else:
log.debug('SFTP connection failed; failing entry: %s' % entry)
entry.fail('SFTP connection failed; failing entry: %s' % entry)
@event('plugin.register')
def register_plugin():
plugin.register(SftpList, 'sftp_list', api_ver=2)
plugin.register(SftpDownload, 'sftp_download', api_ver=2)
plugin.register(SftpUpload, 'sftp_upload', api_ver=2)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,564
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/plugins/metainfo/nzb_size.py
|
import logging
import mimetypes
from flexget import plugin
from flexget.event import event
log = logging.getLogger('nzb_size')
# a bit hacky, add nzb as a known mimetype
mimetypes.add_type('application/x-nzb', '.nzb')
class NzbSize:
"""
Provides entry size information when dealing with nzb files
"""
@plugin.priority(200)
def on_task_modify(self, task, config):
"""
The downloaded file is accessible in modify phase
"""
try:
from pynzb import nzb_parser
except ImportError:
# TODO: remove builtin status so this won't get repeated on every task execution
# TODO: this will get loaded even without any need for nzb
raise plugin.DependencyError(issued_by='nzb_size', missing='lib pynzb')
for entry in task.accepted:
if (
entry.get('mime-type') in ['text/nzb', 'application/x-nzb']
or entry.get('filename')
and entry['filename'].endswith('.nzb')
):
if 'file' not in entry:
log.warning(
'`%s` does not have a `file` that could be used to get size information'
% entry['title']
)
continue
filename = entry['file']
log.debug('reading %s' % filename)
xmldata = open(filename).read()
try:
nzbfiles = nzb_parser.parse(xmldata)
except Exception:
log.debug('%s is not a valid nzb' % entry['title'])
continue
size = 0
for nzbfile in nzbfiles:
for segment in nzbfile.segments:
size += segment.bytes
size_mb = size / 1024 / 1024
log.debug('%s content size: %s MB' % (entry['title'], size_mb))
entry['content_size'] = size_mb
else:
log.trace('%s does not seem to be nzb' % entry['title'])
@event('plugin.register')
def register_plugin():
plugin.register(NzbSize, 'nzb_size', api_ver=2, builtin=True)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,673,565
|
mfonville/Flexget
|
refs/heads/develop
|
/flexget/plugins/operate/disable.py
|
import logging
from flexget import plugin
from flexget.config_schema import one_or_more
from flexget.event import event
log = logging.getLogger('disable')
def all_builtins():
"""Helper function to return an iterator over all builtin plugins."""
return (p for p in plugin.plugins.values() if p.builtin)
class DisablePlugin:
"""
Allows disabling built-ins, or plugins referenced by template/include plugin.
Example::
templates:
movies:
download: ~/torrents/movies/
.
.
tasks:
nzbs:
template: movies
disable:
- download
sabnzbd:
.
.
# Task nzbs uses all other configuration from template movies but removes the download plugin
"""
schema = one_or_more({'type': 'string'})
disabled_builtins = None
@plugin.priority(254)
def on_task_start(self, task, config):
self.disabled_builtins = []
disabled = []
if isinstance(config, str):
config = [config]
for p in config:
# Disable plugins explicitly included in config.
if p in task.config:
disabled.append(p)
del task.config[p]
# Disable built-in plugins.
if p in plugin.plugins and plugin.plugins[p].builtin:
plugin.plugins[p].builtin = False
self.disabled_builtins.append(p)
# Disable all builtins mode.
if 'builtins' in config:
for p in all_builtins():
p.builtin = False
self.disabled_builtins.append(p.name)
if self.disabled_builtins:
log.debug('Disabled built-in plugin(s): %s' % ', '.join(self.disabled_builtins))
if disabled:
log.debug('Disabled plugin(s): %s' % ', '.join(disabled))
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_exit(self, task, config):
if not self.disabled_builtins:
return
for name in self.disabled_builtins:
plugin.plugins[name].builtin = True
log.debug('Re-enabled builtin plugin(s): %s' % ', '.join(self.disabled_builtins))
self.disabled_builtins = []
on_task_abort = on_task_exit
@event('plugin.register')
def register_plugin():
plugin.register(DisablePlugin, 'disable', api_ver=2)
|
{"/flexget/plugins/list/regexp_list.py": ["/flexget/entry.py"], "/flexget/plugins/filter/pending_approval.py": ["/flexget/utils/database.py"], "/flexget/plugins/list/pending_list.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/parsers/__init__.py": ["/flexget/plugins/parsers/parser_common.py"], "/flexget/plugins/operate/status.py": ["/flexget/utils/database.py"], "/flexget/plugins/sites/nyaa.py": ["/flexget/entry.py"], "/flexget/plugins/metainfo/imdb_lookup.py": ["/flexget/entry.py", "/flexget/utils/database.py"], "/flexget/plugins/generic/archive.py": ["/flexget/entry.py"], "/flexget/plugins/internal/api_trakt.py": ["/flexget/utils/database.py"], "/flexget/api/__init__.py": ["/flexget/api/plugins/__init__.py"], "/flexget/plugins/filter/series.py": ["/flexget/plugins/parsers/__init__.py", "/flexget/utils/database.py"], "/flexget/plugins/input/backlog.py": ["/flexget/utils/database.py"], "/flexget/utils/database.py": ["/flexget/entry.py"], "/flexget/components/ftp/sftp.py": ["/flexget/entry.py"]}
|
30,724,385
|
Mohmn/social-media-kinda-app
|
refs/heads/main
|
/network/migrations/0003_auto_20200930_1946.py
|
# Generated by Django 3.0.6 on 2020-09-30 19:46
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('network', '0002_auto_20200929_2002'),
]
operations = [
migrations.AlterField(
model_name='posts',
name='uploaded_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to=settings.AUTH_USER_MODEL),
),
]
|
{"/network/admin.py": ["/network/models.py"], "/network/views.py": ["/network/models.py"], "/network/consumers.py": ["/network/models.py"]}
|
30,724,386
|
Mohmn/social-media-kinda-app
|
refs/heads/main
|
/network/migrations/0005_auto_20201122_1249.py
|
# Generated by Django 3.0.6 on 2020-11-22 12:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('network', '0004_messeages_preferences'),
]
operations = [
migrations.RenameModel(
old_name='Messeages',
new_name='Messages',
),
]
|
{"/network/admin.py": ["/network/models.py"], "/network/views.py": ["/network/models.py"], "/network/consumers.py": ["/network/models.py"]}
|
30,724,387
|
Mohmn/social-media-kinda-app
|
refs/heads/main
|
/network/migrations/0004_messeages_preferences.py
|
# Generated by Django 3.0.6 on 2020-11-15 11:11
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('network', '0003_auto_20200930_1946'),
]
operations = [
migrations.CreateModel(
name='Preferences',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='p_likes', to='network.Posts')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='my_likes', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Messeages',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=1500)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('reciever', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reciever_messages', to=settings.AUTH_USER_MODEL)),
('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sended_messages', to=settings.AUTH_USER_MODEL)),
],
),
]
|
{"/network/admin.py": ["/network/models.py"], "/network/views.py": ["/network/models.py"], "/network/consumers.py": ["/network/models.py"]}
|
30,724,388
|
Mohmn/social-media-kinda-app
|
refs/heads/main
|
/network/migrations/0002_auto_20200929_2002.py
|
# Generated by Django 3.0.6 on 2020-09-29 20:02
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('network', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='posts',
name='followers',
),
migrations.RemoveField(
model_name='posts',
name='following',
),
migrations.AddField(
model_name='user',
name='followers',
field=models.ManyToManyField(blank=True, null=True, related_name='followees', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='user',
name='following',
field=models.ManyToManyField(blank=True, null=True, related_name='followieng', to=settings.AUTH_USER_MODEL),
),
]
|
{"/network/admin.py": ["/network/models.py"], "/network/views.py": ["/network/models.py"], "/network/consumers.py": ["/network/models.py"]}
|
30,724,389
|
Mohmn/social-media-kinda-app
|
refs/heads/main
|
/network/models.py
|
from django.contrib.auth.models import AbstractUser
from django.db import models
class User(AbstractUser):
following = models.ManyToManyField('User',blank=True,null=True,related_name="followieng")
followers = models.ManyToManyField('User',blank=True,null=True,related_name="followees")
class Posts(models.Model):
post = models.TextField()
uploaded_by = models.ForeignKey('User',on_delete=models.CASCADE,related_name="posts")
likes = models.IntegerField(default=0)
created_on = models.DateTimeField(auto_now_add=True)
class Preferences(models.Model):
user = models.ForeignKey("User",on_delete=models.CASCADE,related_name="my_likes")
post = models.ForeignKey("Posts",on_delete=models.CASCADE,related_name="p_likes")
class Messages(models.Model):
sender = models.ForeignKey("User", on_delete=models.CASCADE,related_name="sended_messages")
reciever = models.ForeignKey("User", on_delete=models.CASCADE,related_name="reciever_messages")
text = models.CharField( max_length=1500)
read = models.BooleanField(default=False)
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.text
|
{"/network/admin.py": ["/network/models.py"], "/network/views.py": ["/network/models.py"], "/network/consumers.py": ["/network/models.py"]}
|
30,724,390
|
Mohmn/social-media-kinda-app
|
refs/heads/main
|
/network/admin.py
|
from django.contrib import admin
from .models import Posts,User,Messages
# Register your models here.
admin.site.register(Posts)
admin.site.register(User)
admin.site.register(Messages)
|
{"/network/admin.py": ["/network/models.py"], "/network/views.py": ["/network/models.py"], "/network/consumers.py": ["/network/models.py"]}
|
30,743,272
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/bfs.py
|
# Time Complexity:
'''
while loop: at most one time for each node
for loop: at most one for rach edge?
Combining: O(V+E)
Searching:
O(V); V for vertices
'''
from adjacencylist import g
from queue import q
#g.printGraph(g)
def bfs(start):
# Set Starting Node to Grey, 0 distance and No Prececessor
start.setColor("Grey")
start.setDistance(0)
start.setPrececessor(None)
# Enqueue starting node to queue
q.enqueue(start)
while q.isEmpty() is False:
currentNode = q.dequeue()
# For all adjacent Node in currentNode
for node in currentNode.connectedTo:
#print("NB:", node.getId(), ", Color", node.getColor())
if node.getColor() == "White":
node.setColor("Grey")
# Current Precessor as Parent Node
node.setPrececessor(currentNode.getId())
node.setDistance(currentNode.getDistance() + 1)
q.enqueue(node)
currentNode.setColor("Black")
def printContact():
first_degree = list()
second_degree = list()
for vertices in g:
print(vertices.getId())
bfs(g.getVertex(10))
print()
printContact()
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,273
|
jzxr/TraceTogether
|
refs/heads/main
|
/dataPrep_HeatMap.py
|
import pathlib
import csv
from DataStructuresandAlgorithms.linked_list import LinkedList
# Insert Location Details (longitude and latitude) onto Linked List
def insertLocation():
newList = LinkedList()
newList.insertAtHead("BALESTIER PLAZA", 103.850723, 1.325601)
newList.insertAtHead("CITY SQUARE MALL", 103.850949, 1.280095)
newList.insertAtHead("NORTHPOINT", 103.836127, 1.42952)
newList.insertAtHead("SIT-NYP", 103.848787, 1.377433)
newList.insertAtHead("ZHONGSHAN MALL", 103.8464, 1.3271)
newList.insertAtHead("APERIA MALL", 103.8644, 1.3101)
newList.insertAtHead("JCUBE", 103.7402, 1.3333)
newList.insertAtHead("JURONG POINT", 103.7090, 1.3404)
newList.insertAtHead("PIONEER MALL", 103.6974, 1.3421)
return newList
# Create a CSV suitable for plotting onto HeatMap
def processing(newList):
root = pathlib.Path("Data Sets/Results/")
readfile = "WriteToHtml.csv"
readdirectory = root / readfile
writefile = "potential.csv"
writedirectory = root / writefile
with open(readdirectory, mode='r') as read_file, open(writedirectory, mode='w') as write_file:
headers = ['Date', 'Phone-Number', 'Location', 'Degree Contact', 'Longtitute', 'Latitute', 'Interval', 'Weight']
csv_read = csv.DictReader(read_file)
csv_write = csv.writer(write_file, delimiter=',', quoting=csv.QUOTE_MINIMAL)
csv_write.writerow(headers)
for row in csv_read:
location_node = newList.search(row["Location"])
csv_write.writerow([row["Date"], row["Phone-Number"], row["Location"], row["Degree Contact"], location_node.longtitute, location_node.latitute, 0, 1])
# Driver Function
def data_prep():
newList = insertLocation()
processing(newList)
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,274
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/testgraph.py
|
import networkx as nx
import matplotlib.pyplot as plt
#graph will be based on 20/01/2021
graph = {
"313 Somerset" : {"99053469" : 27, "98741636" : 27},
"321 Clementi" : {"82863770" : 4},
"Aperia Mall" : {"93213933" : 1, "99995219": 50, "98439577": 32, "85991110": 4},
"Bugis Cube" : {"91700369" : 40, "99995219": 50, "98439577": 32, "85991110": 4},
"Bugis Plus" : {"93213933" : 24},
"Balestier Hill Shopping Centre" : {"98173219" : 6},
"Balestier Plaza" : {"91700369" : 158, "97604430" : 2, "99995219" : 2, "97601182" : 2, "86148198" : 2, "88877112" : 2, "91654703" : 44},
"Cathay Cineleisure Orchard" : {"91164088" : 11},
"Capitol Singapore" : {"98114818" : 27, "92561060" : 21},
"City Square Mall" : {"90175180" : 24, "94459175" : 4, "97604430" : 4, "88877112" : 1, "95749866" : 1},
"Duo" : {"94185021" : 3},
"Far East Plaza" : {"92245161" : 9, "90782389" : 21},
"Funan" : {"86806044" : 11, "96625428" : 4, "98114818" : 1, "92561060" : 1},
"Gek Poh Shopping Centre" : {"90782389" : 11},
"Great World City" : {"94185021" : 27},
"HDB Hub" : {"98173219" : 4, "87686851" : 5},
"ION Orchard" : {"83858992" : 19},
"IMM" : {"97936255" : 6, "98173219" : 6, "90515712" : 6, "88539489" : 11},
"Jcube" : {"99002814" : 1, "90782389" : 1},
"Jem" : {"98741632" : 8, "96301245" : 3},
"Jewel Changi" : {"91806790" : 27},
"Junction 8" : {"87686851" : 3},
"Jurong Point" : {"98741631" : 1, "98741632" : 4, "96301245" : 1, "85571447" : 1},
"Liang Court" : {"82116909" : 1},
"Lucky Plaza" : {"97936255" : 10, "98173219" : 19},
"Kallang Wave Mall" : {"96493305" : 40, "87083988" : 10},
"Millenia Walk" : {"92245161" : 31},
"Orchard Central" : {"96625428" : 6, "91164088" : 3},
"Orchard Gateway" : {"99995219" : 31, "99002814" : 5},
"People's Park Centre" : {"95066161" : 49},
"People's Park Complex" : {"95066161" : 31},
"Mustafa Shopping Centre" : {"95066161" : 3, "93861925" : 3, "95939567" : 3, "82204942" : 3},
"Nex" : {"91654703" : 2},
"Ngee Ann City" : {"99053469": 5, "99002814" : 5},
"Northpoint" : {"99053469" : 1, "98741636" : 1, "86148198" : 1, "98439574" : 8, "98439575" : 1},
"OUE Downtown" : {"94185021" : 31},
"Paragon" : {"83858992" : 7, "91164088" : 7},
"Paya Lebar Quarter" : {"89083988" : 4, "99124255" : 5},
"Pioneer Mall" : {"92245161" : 1, "93993463" : 1},
"Plaza Singapura" : {"92294434" : 1, "91806792" : 2},
"PoMo" : {"90515712" : 7, "89083988" : 27, "87083988" : 6, "83858992" : 10},
"Serangoon Plaza" : {"99124255" : 2},
"Shaw House and Centre" : {"93861925" : 1},
"Sim Lim Square" : {"96760458" : 11, "90515712" : 9},
"Sim Lim Tower" : {"96760458" : 8, "90515712" : 6},
"SIT@DOVER" : {"93993463" : 1},
"SIT@NP" : {"96008055" : 10, "91164088" : 10},
"SIT@NYP" : {"91806792" : 26, "93120759" : 1, "93690508" : 1},
"SIT@RP" : {"93993463" : 3},
"SIT@SP" : {"96008055" : 6, "91164088" : 6},
"SIT@TP" : {"96008055" : 5, "91164088" : 5},
"Sunshine Plaza" : {"82204942" : 1},
"Suntec City" : {"83858992" : 40, "96625428" : 31, "86806044" : 11},
"Square 2" : {"95939567" : 1},
"Tampines 1" : {"96493305" : 31},
"Tanglin Mall" : {"96625428" : 10, "88539489" : 8},
"The Shoppes at Marina Bay Sands" : {"87686851" : 40},
"The Star Vista" : {"85571447" : 7, "83638020" : 1},
"Velocity@Novena Square" : {"88539489" : 31},
"Vivo City" : {"82863770" : 27, "94459175" : 5},
"Westgate Mall" : {"97936255" : 3,"96493305" : 3, "88539489" : 26, "96760458": 8},
"Westmall" : {"82863770" : 1, "87309912" : 1, "97601182" : 4, "89652292" : 40, "91806790" : 4},
"Wheelock Place" : {"87686851" : 3, "95066161" : 7, "94185021" : 49},
"Zhongshan Mall" : {"91700369": 5, "82863770" : 5, "92294434" : 40, "97601182" : 4, "92245160" : 27, "83638020" : 27},
}
def dijikstra(graph,start,end,visited=[], distance = {}, predecessors = {}):
if start not in graph:
raise TypeError ('The root of the shortest path tree cannot be found')
if end not in graph:
raise TypeError ('The target of the shortest path tree cannot be found')
if start == end:
network = []
pred = end
while pred != None:
network.append(pred)
pred = predecessors.get(pred,None)
temp = network[0]
for i in range(1,len(network)):
temp = network[i] + '--->' + temp
print('Shortest social network (in arr): ' + str(network))
print('Network: ' + temp + ", cost=" + str(distance[end]))
else:
if not visited:
distance[start] = 0
for next in graph[start]:
if next not in visited:
newDistance < distance.get(next,float('inf'))
distance[next] = newDistance
predecessors[next] = start
visited.append(start)
unvisited = {}
for k in graph:
if k not in visited:
unvisited[k] = distance.get(k,float('inf'))
x = min(unvisited, key = unvisited.get)
dijikstra(graph,x,start,visited, distance,predecessors)
dijikstra(graph,'Balestier Plaza', '97604430')
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,275
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/infectionGraph.py
|
import matplotlib.pyplot as plt
import networkx as nx
import csv
from pathlib import Path
from itertools import chain
#Read related CSV for graphing and return as a 1D list.
def readCSV_SE(infectedNumber):
data = []
#Get Root Path to folder containing CSV.
root = Path("Data Sets/HashMapResult/")
fileName = str(infectedNumber) + "_FirstDegree_SE.csv"
directory = root / fileName
print(directory)
try:
with open(directory,'r') as file:
reader = csv.reader(file)
for row in reader:
row.pop(0)
data.append(row)
#Convert 2D List to 1D
data = list(chain.from_iterable(data))
print(data)
return data
except Exception as e:
print(e)
return False
#Create Graph Nodes and Edges
def createNodes_Edge_First(graph, infectedNumber, data):
#Set to 25 cause data has to many...
for i in range(25):
graph.add_node(data[i], color="orange" )
graph.add_edge(infectedNumber, data[i])
graph.add_edge(data[i], 99999999)
def createColorNodes(graph, infectedNumber):
color_map = []
for node in graph:
if node == infectedNumber:
color_map.append('red')
elif node == 99999999:
color_map.append('yellow')
else:
color_map.append('orange')
return color_map
infectedNumber = 86148198
firstDegreeData_SE = readCSV_SE(infectedNumber)
graph = nx.Graph()
graph.add_node(infectedNumber, color= "red")
createNodes_Edge_First(graph, infectedNumber, firstDegreeData_SE)
color_map = createColorNodes(graph, infectedNumber)
nx.draw(graph, node_color=color_map, with_labels=True)
plt.savefig("graph.png", format= "png")
plt.show()
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,276
|
jzxr/TraceTogether
|
refs/heads/main
|
/DataStructuresandAlgorithms/linked_list.py
|
class LocationNode:
def __init__(self, location, longtitute, latitute):
self.location = location
self.longtitute = longtitute
self.latitute = latitute
self.next = None
class LinkedList:
def __init__(self):
self.head = None
def insertAtHead(self, location, longtitute, latitute):
node = LocationNode(location, longtitute, latitute)
if self.head is None:
self.head = self.tail = node
else:
self.head.prev = node
node.next = self.head
self.head = node
def search(self, location):
temp = self.head
while temp is not None:
if temp.location == location:
return temp
temp = temp.next
print("Location Not Found")
def printList(self):
if self.head is None:
print("Linked List is empty")
else:
temp = self.head
while temp is not None:
print(temp.location, temp.longtitute, temp.latitute)
temp = temp.next
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,277
|
jzxr/TraceTogether
|
refs/heads/main
|
/DataStructuresandAlgorithms/SeperateChaining.py
|
import csv
import os
import datetime
class Node:
# Key is Date
key = 0
# Value is Phone Number
value = 0
next = None
def __init__(self, key, value):
# key is the date
self.key = key
# value is the phone number
self.value = value
class HashMap:
# Date Range: 20/1/2021 to 13/2/21
dateBase = datetime.datetime(2021,2,13)
dateRange = list()
def __init__(self):
# Size is 25; cause storing contact tracing for 25 days
self.size = 25
# Flag for Probing of Date
self.flag = [False for x in range(self.size)]
self.key = dict()
# Array for storing Dates
self.st = [None for x in range(self.size)]
self.collectionofDates()
# Set keys for HashMap for the 25 days
self.setKey()
# Get the previous 14days based on user input
def collectionofDates(self):
for i in range(self.size):
newDate = HashMap.dateBase - datetime.timedelta(days=i)
HashMap.dateRange.append(int(newDate.strftime('%d%m%Y')))
# Linear Probing for Date
def setKey(self):
for i in range(len(HashMap.dateRange)):
counter = 0
while self.flag[(HashMap.dateRange[i] % self.size) + counter] is True:
if ((HashMap.dateRange[i] % self.size) + counter) < self.size - 1:
counter += 1
else:
counter -= self.size - 1
self.key[HashMap.dateRange[i]] = (HashMap.dateRange[i] % self.size) + counter
self.flag[(HashMap.dateRange[i] % self.size) + counter] = True
# Get HashMap Keys
def getkeys(self):
return self.key
# Get Key of Date
def getKey(self, date):
date = datetime.datetime.strptime(date, "%d/%m/%Y")
date =(int(date.strftime('%d%m%Y')))
key = self.key.get(date)
if key is not None:
return key
else:
print("Key", date ,"does not exist")
def put(self, key, value):
k = self.getKey(key)
if k is not None:
node = self.st[k]
while node is not None:
# Check for duplicates
if node.value == value:
return
node = node.next
#Insert new Node at the front of list
node = Node(key, value)
node.next = self.st[k]
self.st[k] = node
else:
return
def printHashMap(self):
# list out keys and values separately
# Reference: https://www.geeksforgeeks.org/python-get-key-from-value-in-dictionary/
key_list = list(self.key.keys())
val_list = list(self.key.values())
for i in range(len(self.st)):
temp = self.st[i]
print(key_list[val_list.index(i)], end=": ")
if temp == None:
print("None", end="")
while temp is not None:
print(temp.value, end=", ")
temp = temp.next
print()
# Write HashMap to Csv
def writeToCsv(self, newHash, filename):
with open(filename, mode='w') as data_file:
data_writer = csv.writer(data_file, delimiter=',', quoting=csv.QUOTE_MINIMAL, lineterminator = "\n")
for i in range(len(newHash.st)):
newList = list()
temp = newHash.st[i]
if temp == None:
continue
else:
while temp is not None:
newList.append(temp.value)
temp = temp.next
# Get Date of the Hash Map
newList.insert(0, str(newHash.st[i].key).split(" ")[0])
data_writer.writerow(newList)
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,278
|
jzxr/TraceTogether
|
refs/heads/main
|
/FindContacted_SE.py
|
import csv
import os
import datetime
from DataStructuresandAlgorithms.HashMapwAVL import HashMap
import pathlib
#################### First Degree Contact Code ############################
# Curate the Infected Person location, check-out and checkin information
def selectInfected(infectedperson, flag):
check_in_date_arr = list()
check_in_location_arr = list()
check_in_time_arr = list()
check_out_time_arr = list()
infectedperson_arr = list()
first_degree_person1 = list()
first_degree_person2 = list()
root = pathlib.Path("Data Sets/Safe Entry/")
infectedperson_file = infectedperson + "_SE.csv"
directory = root / infectedperson_file
with open(directory, mode='r') as csv_file:
csv_read = csv.DictReader(csv_file)
for row in csv_read:
# Reformat and Stores all infected person information in arrays
infected_checkin_date = datetime.datetime.strptime(row["Check-in date"], "%d/%m/%Y")
check_in_date_arr.append(infected_checkin_date)
check_in_location_arr.append(row["Check-in location"])
infected_checkin_time = datetime.datetime.strptime(row["Check-in time"], "%H%M")
check_in_time_arr.append(infected_checkin_time)
infected_checkout_time = datetime.datetime.strptime(row["Check-in time"], "%H%M")
check_out_time_arr.append(infected_checkout_time)
#print(check_in_date_arr)
if flag is True:
infectedperson_arr.append(infectedperson)
else:
first_degree_person1.append(row["Phone Number Check in"])
first_degree_person2.append(row["Phone Number Check Out"])
if flag is True:
arr = [check_in_date_arr, check_in_location_arr, check_in_time_arr, check_out_time_arr, infectedperson_arr]
return arr
else:
arr = [check_in_date_arr, check_in_location_arr, check_in_time_arr, check_out_time_arr, first_degree_person1, first_degree_person2]
return arr
# Search for 1st degree contact and 2nd degree contact
def SearchContacted(parentNode, arr, newHashAVL, flag):
root_dir = str(pathlib.Path().absolute()) + "/Data Sets/Location SE Data/"
for root, dirs, files in os.walk(root_dir, onerror=None):
# Get filename within the root_dir
for filename in files:
# Get Filename without the .csv file type
location = filename.split('.')[0]
# arr[1]: check-in locations of infected
for i in range(len(arr[1])):
# Search for the following condition (location.csv AND infected person location entry)
if location == arr[1][i]:
# Open the location.csv file
file_path = os.path.join(root_dir, filename)
# Get information to prep for second degree contact tracing
firstdegree_phone_number_check_in = None
firstdegree_phone_number_check_out = None
seconddegree_checkin_date = None
seconddegree_checkin_time = None
seconddegree_checkout_time = None
with open(file_path, "r") as csv_file:
csv_read = csv.DictReader(csv_file)
# arr[0][i]: Get Check in date of infected
# arr[2][i]: Get Check in timing of infected
# arr[3][i]: Get Check out timing of infected
# arr[4][i]: Get infected person
# Reformat check in date as datetime libraray
line_count = 0
for row in csv_read:
if line_count < 1:
line_count += 1
else:
# Get Normal Person's Check in/out information
normal_checkin_date = datetime.datetime.strptime(row["Check-in date"], "%d/%m/%Y")
normal_checkin_time = datetime.datetime.strptime(row["Check-in time"], "%H%M")
normal_checkout_time = datetime.datetime.strptime(row["Check-out time"], "%H%M")
# Check Condition: Infected person date == Normal person date
if arr[0][i] == normal_checkin_date:
# Check Condition: Normal person time range in within the infected person time range
if (normal_checkout_time < arr[2][i]) or (normal_checkin_time > arr[3][i]):
continue
else:
# Check whether its first degree contact or 2nd degree
# Needs editing here
if flag is True:
newHashAVL.put(normal_checkin_date, row["Phone No."], location, arr[4][i], "Orange")
else:
# Determine the 1st degree person
check_in_value = abs(arr[2][i] - normal_checkin_time)
check_out_value = abs(arr[3][i] - normal_checkout_time)
if check_in_value < check_out_value:
newHashAVL.put(normal_checkin_date, row["Phone No."], location, arr[4][i] ,"Yellow")
else:
newHashAVL.put(normal_checkin_date, row["Phone No."], location, arr[5][i] ,"Yellow")
# Logic to get checkindate, general range of 2nd degree contact
if seconddegree_checkin_date is None:
seconddegree_checkin_date = normal_checkin_date
if seconddegree_checkin_time is None:
seconddegree_checkin_time = normal_checkin_time
firstdegree_phone_number_check_in = row["Phone No."]
elif seconddegree_checkin_date > normal_checkin_date:
seconddegree_checkin_time = normal_checkin_date
firstdegree_phone_number_check_in = row["Phone No."]
if seconddegree_checkout_time is None:
seconddegree_checkout_time = normal_checkout_time
firstdegree_phone_number_check_out = row["Phone No."]
elif seconddegree_checkout_time < normal_checkout_time:
seconddegree_checkout_time = normal_checkout_time
firstdegree_phone_number_check_out = row["Phone No."]
# Append second degree contact date and time range to file
# Check Condition: True is the non-recursive call
if flag is True:
root = pathlib.Path("Data Sets/Safe Entry/")
second_filename = "SecondDegreeRange_SE.csv"
directory = root / second_filename
file_exists = os.path.isfile(directory)
with open(directory, "a") as csv_file:
headers = ['Check-in date', 'Check-in location', 'Check-in time', 'Check-out time', 'Phone Number Check in', 'Phone Number Check Out']
data_writer = csv.writer(csv_file, delimiter=',', quoting=csv.QUOTE_MINIMAL, lineterminator = "\n")
if not file_exists:
data_writer.writerow(headers)
# Reformat Date & Time
seconddegree_checkin_date = datetime.date.strftime(seconddegree_checkin_date, "%d/%m/%Y")
seconddegree_checkin_time = datetime.datetime.strftime(seconddegree_checkin_time, "%H%M")
seconddegree_checkout_time = datetime.datetime.strftime(seconddegree_checkout_time, "%H%M")
newList = [seconddegree_checkin_date, location, seconddegree_checkin_time, seconddegree_checkout_time, firstdegree_phone_number_check_in, firstdegree_phone_number_check_out]
data_writer.writerow(newList)
# Write HashMap to Csv using Linked List
def writeToCsvLinkedList(newHashAVL, filename, color):
root = pathlib.Path("Data Sets/Results/")
directory = root / filename
with open(directory, mode='w') as data_file:
data_writer = csv.writer(data_file, delimiter=',', quoting=csv.QUOTE_MINIMAL, lineterminator = "\n")
for i in range(len(newHashAVL.st)):
newList = list()
temp = newHashAVL.st[i]
if temp == None:
continue
else:
while temp is not None:
if temp.color == color:
newList.append(temp.value)
temp = temp.next
# Get Date of the Hash Map
newList.insert(0, str(newHashAVL.st[i].key).split(" ")[0])
data_writer.writerow(newList)
# Helper function to write Hashmap to CSV using AVL Tree
def writeToCsvAVL(newHashAVL, filename, color):
root = pathlib.Path("Data Sets/Results/")
directory = root / filename
with open(directory, mode='w') as data_file:
data_writer = csv.writer(data_file, delimiter=',', quoting=csv.QUOTE_MINIMAL, lineterminator = "\n")
for i in range(len(newHashAVL.st)):
if newHashAVL.st[i] is None:
continue
else:
contactedlist = writeToCsvAVL2(newHashAVL.st[i], color)
contactedlist.insert(0, str(newHashAVL.st[i].key).split(" ")[0])
data_writer.writerow(contactedlist)
# Write HashMap to Csv using AVL Tree
def writeToCsvAVL2(node, color):
arr = []
if node:
arr += writeToCsvAVL2(node.left, color)
if node.color == color:
arr.append(node.value)
arr += writeToCsvAVL2(node.right, color)
return arr
# Helper function to write to CSV to be used for HeatMap and Website Table using AVL Tree
def CsvForHtmlAVL(newHashAVL, infectedperson):
root = pathlib.Path("Data Sets/Results/")
filename = "WriteToHtml.csv"
directory = root / filename
file_exists = os.path.isfile(directory)
with open(directory, mode='w') as data_file:
headers = ['Date', 'Phone-Number', 'Location', 'Degree Contact']
data_writer = csv.writer(data_file, delimiter=',', quoting=csv.QUOTE_MINIMAL, lineterminator = "\n")
data_writer.writerow(headers)
for i in range(len(newHashAVL.st)):
if newHashAVL.st[i] is None:
continue
else:
CsvForHtmlAVL2(newHashAVL.st[i], newHashAVL, infectedperson, data_writer)
# Wwrite to CSV to be used for HeatMap and Website Table using AVL Tree
def CsvForHtmlAVL2(node, newHashAVL, infectedperson, data_writer):
if node:
CsvForHtmlAVL2(node.left, newHashAVL, infectedperson, data_writer)
if node.parentNode is infectedperson:
data_writer.writerow([str(node.key).split(' ')[0], node.value, node.location, "First Degree"])
else:
data_writer.writerow([str(node.key).split(' ')[0], node.value, node.location, "Second Degree"])
CsvForHtmlAVL2(node.right, newHashAVL, infectedperson, data_writer)
# Write to CSV to be used for HeatMap and Website Table using Linked List
def CsvForHtmlLinkedList(newHashAVL, infectedperson):
filename = "WriteToHtml.csv"
file_exists = os.path.isfile(filename)
with open(filename, mode='w') as data_file:
headers = ['Date', 'Phone-Number', 'Location', 'Degree Contact']
data_writer = csv.writer(data_file, delimiter=',', quoting=csv.QUOTE_MINIMAL)
if not file_exists:
data_writer.writerow(headers)
for i in range(len(newHashAVL.st)):
newList = list()
temp = newHashAVL.st[i]
if temp == None:
continue
else:
while temp is not None:
if temp.parentNode is infectedperson:
data_writer.writerow([str(temp.key).split(' ')[0], temp.value, temp.location, "First Degree"])
else:
data_writer.writerow([str(temp.key).split(' ')[0], temp.value, temp.location, "Second Degree"])
temp = temp.next
#################### Second Degree Contact Code ############################
# Curate and collect 1st degree location, check-in and check-out location
def SearchSecondDegree(newHashAVL):
arr = selectInfected("SecondDegreeRange", False)
SearchContacted(None, arr, newHashAVL, False)
# Search for and 2nd degree contact and write to CSV
def WriteSecondDegreeToCsv(newHashAVL, infectedperson):
#Linked List Implemenetation
writeToCsvAVL(newHashAVL, infectedperson + "_SecondDegree_SE.csv", "Yellow")
# Reset the 1st degree contact
def reset():
os.remove("./Data Sets/Safe Entry/SecondDegreeRange_SE.csv")
# Driver function
def findContactSE(infectedperson, infectionDate, daterange):
newHashAVL = HashMap(infectionDate, daterange)
if len(infectedperson) != 8:
print("Invalid phone number")
else:
# Step2. Get Check in and Check out from Infected person
arr = selectInfected(infectedperson, True)
# Step 3. Search for people who enter mall within the time range of infected person
SearchContacted(infectedperson, arr, newHashAVL, True)
# Step 4. Write Result to csv
#Linked List Implemenetation
writeToCsvAVL(newHashAVL, infectedperson + "_FirstDegree_SE.csv", "Orange")
# Indirect Contact
SearchSecondDegree(newHashAVL)
WriteSecondDegreeToCsv(newHashAVL, infectedperson)
CsvForHtmlAVL(newHashAVL, infectedperson)
reset()
return newHashAVL
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,279
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/quickSort.py
|
# Faster than Merge but cannot handle duplicate, maybe can add that feature hence Lecture Slides never teach
newlist = [10,5,8,12,15,6,3,9,16]
low = 0
high = len(newlist)-1
def quickSort(newlist):
low = 0
high = len(newlist)-1
quickSortHelper(low, high, newlist)
def quickSortHelper(low, high, newlist):
if (high>low):
j = partition(low, high, newlist)
quickSortHelper(low, j-1, newlist)
quickSortHelper(j+1, high, newlist)
def partition(low, high, newlist):
pivot = newlist[low]
i = low + 1
j = high
done = False
while done == False:
while pivot > newlist[i] and i<j:
i += 1
while pivot <= newlist[j] and j>=i:
j -= 1
if j > i:
newlist[i], newlist[j] = newlist[j], newlist[i]
else:
done = True
newlist[low], newlist[j] = newlist[j], newlist[low]
return j
quickSort(newlist)
print(newlist)
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,280
|
jzxr/TraceTogether
|
refs/heads/main
|
/contactCT.py
|
import datetime as DT
import csv, os
from pathlib import Path
from itertools import chain
from DataStructuresandAlgorithms.AVL import AVL_Tree
from DataStructuresandAlgorithms.SeperateChaining import HashMap
#Create a CSV of first Degree Contact.
def firstDegreeCT(infected_phoneNo, infectionDate, days):
data = []
#Read CSV
root = Path("Data Sets/Contact Tracing/")
fileName = str(infected_phoneNo) + "_CT.csv"
directory = root / fileName
try:
with open(directory,'r') as file:
reader = csv.reader(file)
for row in reader:
data.append(row)
except Exception as e:
print(e)
return False
#Get Index of infection date.
for i in range(25):
if data[i][0] == infectionDate:
index = i
#Get Past x days contacts
popcount = index - days + 1
for i in range(0,popcount):
data.pop(0)
#Create Comfirm Close Contacts
comfirmList = []
dateList = []
for i in range(0,len(data)):
dateList = []
dateList.append(data[i][0])
for r in range(1,len(data[i])):
string = data[i][r]
x = string.split(":")
#if distance less than 5 and contact duration more than 30 mins
if int(x[1]) <= 2 and int(x[2]) >= 30:
dateList.append(int(x[0]))
comfirmList.append(dateList)
#Write to CSV
root = Path("Data Sets/Results/")
fileName = str(infected_phoneNo) + "_firstDegreeContact.csv"
directory = root / fileName
try:
writer =csv.writer(open(directory, "w"), delimiter=",", lineterminator = "\n")
writer.writerows(comfirmList)
except Exception as e:
print(e)
return False
#Create and return a list of first Degree contact with CT data and without.
def secondDegreeCTExist(infected_phoneNo):
data = []
#Read CSV file and create 1D list
root = Path("Data Sets/Results/")
fileName = str(infected_phoneNo) + "_firstDegreeContact.csv"
directory = root / fileName
try:
with open(directory,'r') as file:
reader = csv.reader(file)
for row in reader:
row.pop(0)
data.append(row)
except Exception as e:
print(e)
return False
data = list(chain.from_iterable(data))
#Check if CT file Exist
data_CT_True = []
data_CT_False = []
root = Path("Data Sets/Contact Tracing/")
for i in range(0,len(data)):
fileName = data[i] + "_CT.csv"
directory = root / fileName
if os.path.isfile(directory):
data_CT_True.append(data[i])
else:
data_CT_False.append(data[i])
return data_CT_True, data_CT_False
#Create a CSV of second Degree Contact
def secondDegreeCT(infected_phoneNo, infectionDate, data_CT_True):
for i in range(0,len(data_CT_True)):
firstDegreeCT(data_CT_True[i], infectionDate, 7)
#Rename file
root = Path("Data Sets/Results/")
fileName = data_CT_True[i] + "_firstDegreeContact.csv"
src = root / fileName
fileName = str(infected_phoneNo) + "_" + data_CT_True[i] + "_SecondDegreeContact.csv"
dst = root / fileName
os.replace(src, dst)
#Create Second Degree CSV using HashMap
def mergeSecondDegreeCT(infected_phoneNo, data_CT_True):
newHash = HashMap()
for i in data_CT_True:
data = []
root = Path("Data Sets/Results/")
fileName = str(infected_phoneNo) + "_" + str(i) + "_SecondDegreeContact.csv"
directory = root / fileName
try:
with open(directory,'r') as file:
reader = csv.reader(file)
for row in reader:
data.append(row)
except Exception as e:
print(e)
for i in range(0,len(data)):
#print(str(data[i][0]))
for r in range(1,len(data[i])):
newHash.put(str(data[i][0]),str(data[i][r]))
root = Path("Data Sets/Results/")
fileName = str(infected_phoneNo) + "_secondDegreeContact.csv"
directory = root / fileName
newHash.writeToCsv(newHash, directory)
#Create a csv with numbers where TT data needs to be taken from users.
def getTTdata(infected_phoneNo, data_CT_False):
root = Path("Data Sets/Results/")
fileName = "getTTofCT.csv"
directory = root / fileName
try:
writer =csv.writer(open(directory, "w"), delimiter = ",", lineterminator = "\n")
writer.writerow(["Missing TT and SE data."])
for i in data_CT_False:
writer.writerow([i])
except Exception as e:
print(e)
#Format CSV to Ui requriments
def uiFormating(infected_phoneNo, deg):
data = []
root = Path("Data Sets/Results/")
if deg == 1:
fileName = str(infected_phoneNo) + "_firstDegreeContact.csv"
elif deg ==2:
fileName = str(infected_phoneNo) + "_secondDegreeContact.csv"
directory = root / fileName
try:
with open(directory,'r') as file:
reader = csv.reader(file)
for row in reader:
data.append(row)
except Exception as e:
print(e)
return False
if deg ==1:
fileName = "UiFirstDegreeContact.csv"
elif deg ==2:
fileName = "UisecondDegreeContact.csv"
directory = root / fileName
#format to Ui requriments.
try:
writer =csv.writer(open(directory, "w"), delimiter = ",", lineterminator = "\n")
Header = ['Date','Phone No.']
writer.writerow(Header)
for i in range(0,len(data)):
date = data[i][0]
for r in range(1,len(data[i])):
temp2 = []
temp2.append(date)
temp2.append(data[i][r])
writer.writerow(temp2)
except Exception as e:
print(e)
#Just call this for results of first degree and second degree.
def contactCT(infected_phoneNo,infectionDate, days):
firstDegreeCT(infected_phoneNo,infectionDate,days)
data_CT_True, data_CT_False = secondDegreeCTExist(infected_phoneNo)
#Remove Duplicated numbers and sort using BST.
newTree1 = AVL_Tree()
for i in data_CT_True:
newTree1.put(str(i))
data_CT_True = newTree1.inOrder()
newTree2 = AVL_Tree()
for i in data_CT_False:
newTree2.put(str(i))
data_CT_False = newTree2.inOrder()
secondDegreeCT(infected_phoneNo, infectionDate, data_CT_True)
mergeSecondDegreeCT(infected_phoneNo, data_CT_True)
uiFormating(infected_phoneNo,1)
uiFormating(infected_phoneNo,2)
getTTdata(infected_phoneNo,data_CT_False)
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,281
|
jzxr/TraceTogether
|
refs/heads/main
|
/DataStructuresandAlgorithms/stack.py
|
class Stack:
def __init__(self):
self.top = -1
self.data = []
def push(self, value):
self.data.append(0)
self.top += 1
self.data[self.top] = value
def pop(self):
try:
value = self.data[self.top]
del self.data[self.top]
self.top -= 1
return value
except:
None
def isEmpty(self):
if self.top == -1:
return True
else:
return False
def peek(self):
try:
value = self.data[self.top]
return value
except:
None
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,282
|
jzxr/TraceTogether
|
refs/heads/main
|
/clusterTable.py
|
import csv
from pathlib import Path
from DataStructuresandAlgorithms.stack import Stack
#Location Asbtract data type to keep track of location first and second degree count.
class LocationADT():
#Dictonary to store LocationName:Obj
LocationDict = {}
#Add location and Increment contact type.
def put(self, locationData):
if locationData[0] in self.LocationDict:
self.getObj(locationData)
else:
self.createObj(locationData)
#Fetch Object and increment based on contact type.
def getObj(self, locationData):
#Access Dict Object
temp = self.LocationDict[locationData[0]]
#Increment Contact Type.
temp.increase(locationData[1])
#Create Object for location and increment based on contact type.
def createObj(self, locationData):
#Create key with object
self.LocationDict[locationData[0]] = LocationADTNode()
#Access Dict Object
temp = self.LocationDict[locationData[0]]
#Increment Contact Type.
temp.increase(locationData[1])
def getObjDict(self):
return self.LocationDict
#Location ADT Node.
class LocationADTNode():
firstDeg = 0
secondDeg = 0
#Increase Contact type count.
def increase(self, ContactType):
if ContactType == "First Degree":
self.firstDeg += 1
elif ContactType == "Second Degree":
self.secondDeg += 1
def getData(self):
return self.firstDeg, self.secondDeg
def createClusterTable():
#Using Stack to make sure all location are accounted for and contact type.
locationStack = Stack()
#Abstract Data Type for Location contact type count tracking
locationAbstract = LocationADT()
#Read WriteToHtml.csv
root = Path("Data Sets/Results/")
fileName = "WriteToHtml.csv"
directory = root / fileName
try:
with open(directory,'r') as file:
reader = csv.reader(file)
for row in reader:
row.pop(0)
row.pop(0)
if row[0] == "Location":
pass
else:
locationStack.push(row)
except Exception as e:
print(e)
#Writer to Cluster Table
root = Path("Data Sets/Results/")
fileName = "ClusterTable.csv"
directory = root / fileName
try:
writer = csv.writer(open(directory, "w"), delimiter = ",", lineterminator = "\n")
writer.writerow(["Location Name:", "First Degree Count.", "Second Degree Count."])
except Exception as e:
print(e)
while locationStack.isEmpty() is False:
locationAbstract.put(locationStack.pop())
tempDict = locationAbstract.getObjDict()
for i in tempDict:
tempObj = tempDict[i]
firstDeg , secondDeg = tempObj.getData()
writer.writerow([i,firstDeg,secondDeg])
createClusterTable()
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,283
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/timeIntervals.py
|
import os
import csv
# ask the user for infected, use raw_input() on Python 2.x
infected = input("Enter the infected person's number: ")
# path to the root directory to search
root_dir = "/Users/jasminezheng/Desktop/SIT/CSC1008 Data Structure and Algorithm /Group2_TraceTogether/TraceTogether/Data Sets/Location SE Data"
for root, dirs, files in os.walk(root_dir, onerror=None): # walk the root dir
for filename in files: # iterate over the files in the current dir
file_path = os.path.join(root, filename) # build the file path
try:
with open(file_path, "rb") as f: # open the file for reading
# read the file line by line
# use: for i, line in enumerate(f) if you need line numbers
for line in f:
try:
# try to decode the contents to utf-8
line = line.decode("utf-8")
except ValueError: # decoding failed, skip the line
continue
if infected in line: # if the infected exists on the current line...
# print(file_path) # print the file path
# break # no need to iterate over the rest of the file
with open(file_path, newline='') as f:
reader = csv.reader(f)
try:
for row in reader:
infectedDate = row[1]
infectedCheckIn = row[2]
infectedCheckOut = row[3]
if infected in row:
print(row)
if row[1] == infectedDate:
if infectedCheckIn < row[3] and infectedCheckOut > row[2]:
potential = list()
potential.append(row[0])
print(potential)
count = 0
with open('potential.csv','w') as csvOutput:
writer = csv.writer(csvOutput)
writer.writerow(potential)
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(
filename, reader.line_num, e))
break
except (IOError, OSError): # ignore read and permission errors
pass
# with open('Data Sets/infected.csv', 'wb', newline='') as f2:
# writer = csv.writer(f2)
# writer.writerow(potential)
# aperiaFile = csv.reader(open('Data Sets/Location SE Data/APERIAMALL.csv'))
# # aperiaReader = csv.reader(aperiaFile)
# # balestierplazaFile = file('BALESTIER PLAZA.csv', 'r')
# # balestierplazaReader = csv.reader(balestierplazaFile)
# # csmFile = file('CITY SQUARE MALL.csv', 'r')
# # csmReader = csv.reader(csmFile)
# # jcubeFile = file('JCUBE.csv','r')
# # jcubeReader = csv.reader(jcubeFile)
# # jpFile = file('JURONG POINT.csv','r')
# # jpReader = csv.reader(jpFile)
# # npFile = file('NORTHPOINT.csv','r')
# # npReader = csv.reader(npFile)
# # pioneermallFile = file('PIONEER MALL.csv','r')
# # pioneermallReader = csv.reader(pioneermallFile)
# # sitnypFile = file('SIT-NYP.csv','r')
# # sitnypReader = csv.reader(sitnypFile)
# # zhongshanFile = file('ZHONGSHAN MALL.csv','r')
# # zhongshanReader = csv.reader(zhongshanFile)
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,284
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/BST.py
|
# Normal Binary Tree Complexity
'''
Worst Case for Search and Insert: O(N)
Average Case for Search and Insert: O(logn)
'''
class Node:
left = None
right = None
# Keep Track of number of node below this node
count = 0
# Keep Track on the number of repetition
counter = 0
key = 0
def __init__(self, key):
self.key = key
class BST:
def __init__(self):
self.root = None
def get(self, key):
p = self.root
while p is not None:
# If key is more then currentNode, go right
if key == p.key:
return p
elif key > p.key:
p = p.right
# If key is less then currentNode, go left
else:
p = p.left
# Return none if key does not exist
return None
def put(self, key):
self.root = self.put2(self.root, key)
# If there a need to track the height
def put2(self, node, key):
# If there is no root Node, create
if node is None:
return Node(key)
if key > node.key:
# Why is this here?
node.right = self.put2(node.right, key)
elif key < node.key:
node.left = self.put2(node.left, key)
else:
node.counter += 1
# Why is this here?
return node
def inOrder(self):
return self.inOrder2(self.root)
def inOrder2(self, node):
arr = []
if node:
arr += self.inOrder2(node.left)
arr.append(node.key)
arr += self.inOrder2(node.right)
return arr
def preOrder(self):
return self.preOrder2(self.root)
def preOrder2(self, node):
arr = []
if node:
arr.append(node.key)
arr += self.preOrder2(node.left)
arr += self.preOrder2(node.right)
return arr
# newTree = BST()
# newTree.put(5)
# newTree.put(7)
# newTree.put(1)
# newTree.put(2)
# newTree.put(2)
# newTree.put(10)
# newTree.put(8)
# print(newTree.get(10).key)
# print(newTree.inOrder())
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,285
|
jzxr/TraceTogether
|
refs/heads/main
|
/sms.py
|
from DataStructuresandAlgorithms.queue import Queue
from DataStructuresandAlgorithms.AVL import AVL_Tree
import csv
from pathlib import Path
from itertools import chain
from whatsapp import send_whatsapp_msg
def avlQueueFirst(infected_phoneNo, deg):
"""
@Param phone no. and 1st or 2nd degree contact.
@Return Queue Object.
"""
#Get TT first or second contact data.
data1 = []
root = Path("Data Sets/Results/")
if deg ==1:
fileName = str(infected_phoneNo) + "_firstDegreeContact.csv"
elif deg ==2:
fileName = str(infected_phoneNo) + "_secondDegreeContact.csv"
directory = root / fileName
try:
with open(directory,'r') as file:
reader = csv.reader(file)
for row in reader:
row.pop(0)
data1.append(row)
except Exception as e:
print(e)
return False
#Get SE first or second contact data.
data2 = []
root = Path("Data Sets/Results/")
if deg == 1:
fileName = str(infected_phoneNo) + "_FirstDegree_SE.csv"
elif deg == 2:
fileName = str(infected_phoneNo) + "_SecondDegree_SE.csv"
directory = root / fileName
try:
with open(directory,'r') as file:
reader = csv.reader(file)
for row in reader:
row.pop(0)
data2.append(row)
except Exception as e:
print(e)
return False
#Flatten to 1D List.
data1 = list(chain.from_iterable(data1))
data2 = list(chain.from_iterable(data2))
#Merge and remove duplicates.
ContactAVL = AVL_Tree()
for i in data1:
ContactAVL.put(str(i))
for i in data2:
ContactAVL.put(str(i))
toQueue = ContactAVL.inOrder()
#Add numbers to Queue
ContactQueue = Queue()
for i in toQueue:
number = "+65" + i
ContactQueue.enqueue(number)
return ContactQueue
#Dun use this one cause will really send LOL and spam people.
def __sendSMS(firstContactQueue, secondContactQueue):
msgList = ["Hi you been in contact with an infected person. Please Quratine from today.",
"Hi you been contact with someone that has close contact with an infected, please monitor your health."]
try:
while firstContactQueue.isEmpty() is False:
#send_whatsapp_msg(firstContactQueue.dequeue(), msgList[0]) #This will send, commented out to prevent spamming strangers whatsapp.
firstContactQueue.dequeue() #for testing
except Exception as e:
print(e)
try:
while secondContactQueue.isEmpty() is False:
#send_whatsapp_msg(secondContactQueue.dequeue(), msgList[1]) #This will send, commented out to prevent spamming strangers whatsapp.
secondContactQueue.dequeue() #for testing
except Exception as e:
print(e)
print("All SHN and Notice Sent.")
def actuallySendSMS(phoneNo):
phoneNoList = ["+6597604430", "+6597601182","+6588877112"]
for i in phoneNoList:
try:
send_whatsapp_msg(i, "Hi, you come to contact with a confirmed case. You are expected to quarantine at your home for 14 days till the MOH will contact you for more details. For more details, please visit MOH website.")
except Exception as e:
print(e)
def sendSHN_Notice(infected_phoneNo,ActualSend_No):
firstContactQueue = avlQueueFirst(infected_phoneNo,1)
secondContactQueue = avlQueueFirst(infected_phoneNo,2)
__sendSMS(firstContactQueue,secondContactQueue)
try:
actuallySendSMS(ActualSend_No)
except:
print("No Actual Phone Number")
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,286
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/mergeSort.py
|
def mergeSort(n):
#print("Splitting ", n)
if len(n) > 1:
mid = len(n) // 2
lefthalf = n[:mid]
righthalf = n[mid:]
mergeSort(lefthalf)
mergeSort(righthalf)
i=0
j=0
k=0
while i<len(lefthalf) and j<len(righthalf):
if lefthalf[i] <= righthalf[j]:
n[k] = lefthalf[i]
i=i+1
else:
n[k] = righthalf[j]
j=j+1
k=k+1
while i < len(lefthalf):
n[k] = lefthalf[i]
i=i+1
k=k+1
while j < len(righthalf):
n[k] = righthalf[j]
j=j+1
k=k+1
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,287
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/interpolationSearch.py
|
#Ref:
#https://github.com/williamfiset/Algorithms/blob/master/src/main/java/com/williamfiset/algorithms/search/InterpolationSearch.java
#https://www.geeksforgeeks.org/interpolation-search/
#For uniform distrubtion list.
#Worst Case O(n)
#Avearge Case O(log(log(n)))
#When element to be search si closer to data[hi]
# mid value ideally is higher opposite if element is closer to data[lo]
#Formula for mid:
# Let's assume that the elements of the array are linearly distributed.
# General equation of line : y = m*x + c.
# y is the value in the array and x is its index.
# Now putting value of lo,hi and x in the equation
# arr[hi] = m*hi+c ----(1)
# arr[lo] = m*lo+c ----(2)
# x = m*pos + c ----(3)
# m = (arr[hi] - arr[lo] )/ (hi - lo)
# subtracting eqxn (2) from (3)
# x - arr[lo] = m * (pos - lo)
# lo + (x - arr[lo])/m = pos
# pos = lo + (x - arr[lo]) *(hi - lo)/(arr[hi] - arr[lo])
class interpolationSearch:
def __init__(self, sortedData, value):
self.data = sortedData
self.value = value
def search(self):
lo = 0
mid = 0
hi = len(self.data) - 1
while (self.data[lo] <= self.value and self.data[hi] >= self.value):
mid = lo + ((self.value - self.data[lo]) *(hi - lo)) / (self.data[hi] - self.data[lo])
mid = int(mid)
if(self.data[mid] < self.value):
lo = mid + 1
elif (self.data[mid] > self.value):
hi = mid - 1
else:
return mid
if (self.data[lo]==self.value):
return lo
return None
data = ["10", "20", "25", "35", '50', '70', '85', '100', '110', '120', '125']
test = interpolationSearch(data,'25')
print(test.search())
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,288
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/graph.py
|
class Node(object):
def __init__(self, key=None, value=None):
self.key = key
self.value = value
# List of adjacent node objects
self.adj = []
class Graph(object):
def __init__(self):
# Adjacency list
self.nodes = {}
def add(self, f, t):
if f not in nodes:
self.nodes[f] = Node(key=f)
if t not in nodes:
self.nodes[t] = Node(key=t)
if t not in self.nodes[f].adj:
self.nodes[f].adj.append(self.nodes[t])
# Create graph object
graph = Graph()
# Loop the elements in the row
# ...
for number in range(len(row)):
if number == 0:
phone_number = row[number]
else:
if row[number] == -1:
graph.add(phone_number,phonenumbers[number])
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,289
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/genDataCT.py
|
import csv,random
with open('99995219_CT.csv', mode='w') as csv_file:
csv_write = csv.writer(csv_file, delimiter=',',quotechar='"', quoting = csv.QUOTE_MINIMAL,lineterminator = "\n")
for i in range(25):
data = []
datelist = ['20/1/2021','21/1/2021','22/1/2021','23/1/2021','24/1/2021',
'25/1/2021','26/1/2021','27/1/2021','28/1/2021','29/1/2021',
'30/1/2021','31/1/2021','1/2/2021','2/2/2021','3/2/2021',
'4/2/2021','5/2/2021','6/2/2021','7/2/2021','8/2/2021',
'9/2/2021','10/2/2021','11/2/2021','12/2/2021','13/2/2021']
data.append(datelist[i])
for i in range(10):
data.append(random.randrange(80000000,99999999))
csv_write.writerow(data)
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,290
|
jzxr/TraceTogether
|
refs/heads/main
|
/infectedGraph_SE.py
|
import matplotlib.pyplot as plt
import networkx as nx
import csv
from pathlib import Path
from itertools import chain
# Helper Function to adds 1st degree and 2nd degree node to graph
def createNodes_Edge_First(graph, infectedNumber, newHashAVL):
key_list = list(newHashAVL.key.keys())
val_list = list(newHashAVL.key.values())
for i in range(len(newHashAVL.st)):
createNodes_Edge_First2(newHashAVL.st[i], graph, infectedNumber)
# Add 1st and 2nd degree node to graph
def createNodes_Edge_First2(node, graph, infectedNumber):
if node:
createNodes_Edge_First2(node.left, graph, infectedNumber)
if node.parentNode is not infectedNumber:
graph.add_node(node.parentNode)
graph.add_edge(node.parentNode, node.value)
graph.add_edge(infectedNumber, node.parentNode)
createNodes_Edge_First2(node.right, graph, infectedNumber)
# Categories the infected person, 1st and 2nd degree with different colors
# Red: Infected Person
# Orange: 1st Degree Contact
# Yellow: 2nd Degree Contact
def createColorNodes(graph, infectedNumber):
color_map = []
for node in graph:
if node == infectedNumber:
color_map.append('red')
elif infectedNumber in nx.all_neighbors(graph, node):
color_map.append('orange')
else:
color_map.append('yellow')
return color_map
# Driver Function
def infectedPlot(infectedNumber, newHashAVL):
graph = nx.Graph()
createNodes_Edge_First(graph, infectedNumber, newHashAVL)
graph.add_node(infectedNumber, color= "red")
color_map = createColorNodes(graph, infectedNumber)
nx.draw(graph, node_color=color_map, with_labels=False)
plt.savefig("graph.png", format= "png")
#plt.show()
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,291
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/adjacencylist.py
|
class Vertex:
def __init__(self,key):
self.id = key
self.connectedTo = {}
# Color to keep track of Visited Node
# White: Not Invited
# Grey: Partially Visited
# Black: Fully Visited
self.color = None
# Number of nodes from the origin node
self.distance = None
# Parent Node
self.predecessor = None
def addNeighbor(self,nbr,weight=0):
self.connectedTo[nbr] = weight
# Replace print Object with the return value
# def __str__(self):
# return str(self.id) + ' connectedTo: ' + str([x.id for x in self.connectedTo])
def getConnections(self):
return self.connectedTo.keys()
def getId(self):
return self.id
def getWeight(self,nbr):
return self.connectedTo[nbr]
def getColor(self):
return self.color
def setColor(self, color):
self.color = color
def getDistance(self):
return self.distance
def setDistance(self, distance):
self.distance = distance
def getPrececessor(self):
return self.predecessor
def setPrececessor(self, predecessor):
self.predecessor = predecessor
class Graph:
def __init__(self):
self.vertList = {}
self.numVertices = 0
def addVertex(self,key):
self.numVertices = self.numVertices + 1
newVertex = Vertex(key)
self.vertList[key] = newVertex
# Set Vertex color to White
newVertex.setColor("White")
return newVertex
def getVertex(self, n):
return self.vertList[n]
def getVertexNB(self,n):
if n in self.vertList:
return self.vertList[n]
else:
return None
def __contains__(self,n):
return n in self.vertList
def addEdge(self,f,t,weight=0):
if f not in self.vertList:
nv = self.addVertex(f)
if t not in self.vertList:
nv = self.addVertex(t)
self.vertList[f].addNeighbor(self.vertList[t], weight)
def getVertices(self):
return self.vertList.keys()
def __iter__(self):
return iter(self.vertList.values())
def printGraph(self, g):
for v in g:
print("Id:", v.getId(), end=" NB: ")
for w in v.getConnections():
print(w.getId(), end= " ")
print()
g = Graph()
g.addVertex(1)
g.addVertex(2)
g.addVertex(3)
g.addVertex(4)
g.addVertex(5)
g.addVertex(6)
g.addVertex(7)
g.addVertex(8)
g.addVertex(9)
g.addVertex(10)
g.addVertex(11)
g.addVertex(12)
g.addVertex(13)
g.addVertex(14)
g.addEdge(1, 5)
g.addEdge(1, 2)
g.addEdge(1, 3)
g.addEdge(2, 1)
g.addEdge(2, 5)
g.addEdge(2, 3)
g.addEdge(3, 1)
g.addEdge(3, 2)
g.addEdge(3, 4)
g.addEdge(3, 6)
g.addEdge(4, 5)
g.addEdge(4, 3)
g.addEdge(4, 9)
g.addEdge(4, 10)
g.addEdge(4, 7)
g.addEdge(5, 1)
g.addEdge(5, 2)
g.addEdge(5, 4)
g.addEdge(6, 3)
g.addEdge(6, 8)
g.addEdge(7, 4)
g.addEdge(7, 10)
g.addEdge(7, 8)
g.addEdge(8, 7)
g.addEdge(8, 6)
g.addEdge(8, 11)
g.addEdge(9, 4)
g.addEdge(9, 14)
g.addEdge(9, 10)
g.addEdge(10, 9)
g.addEdge(10, 4)
g.addEdge(10, 7)
g.addEdge(10, 12)
g.addEdge(10, 11)
g.addEdge(11, 8)
g.addEdge(11, 10)
g.addEdge(11, 13)
g.addEdge(12, 14)
g.addEdge(12, 13)
g.addEdge(12, 10)
g.addEdge(13, 12)
g.addEdge(13, 11)
g.addEdge(14, 9)
g.addEdge(14, 12)
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,292
|
jzxr/TraceTogether
|
refs/heads/main
|
/DataStructuresandAlgorithms/HashMapwAVL.py
|
import datetime
class Node:
# Key is Date
key = 0
# Value is Phone Number
value = 0
next = None
def __init__(self, key, value, location, parentNode, color):
# key is the date
self.key = key
# value is the phone number
self.value = value
self.left = None
self.right = None
self.height = 1
self.location = location
self.parentNode = parentNode
# Red: Infected
# Orange: 1st Degree
# Yellow: 2nd Degree
self.color = color
class HashMap:
# Date Range: 20/1/2021 to 13/2/21
dateRange = list()
def __init__(self, infectionDate, daterange):
# Size is 25; cause storing contact tracing for 25 days
self.size = daterange
# Flag for Probing of Date
self.flag = [False for x in range(self.size)]
self.key = dict()
# Array for storing Dates
self.st = [None for x in range(self.size)]
self.dateBase = datetime.datetime.strptime(infectionDate, '%d/%m/%Y').date()
self.collectionofDates()
# Set keys for HashMap for the 25 days
self.setKey()
# Get Date Range: 20/1/2021 to 13/2/21
def collectionofDates(self):
for i in range(self.size):
newDate = self.dateBase - datetime.timedelta(days=i)
HashMap.dateRange.append(int(newDate.strftime('%d%m%Y')))
# Linear Probing for Date
def setKey(self):
for i in range(len(HashMap.dateRange)):
counter = 0
# Collision Detection
while self.flag[(HashMap.dateRange[i] % self.size) + counter] is True:
if ((HashMap.dateRange[i] % self.size) + counter) < self.size - 1:
counter += 1
else:
counter -= self.size - 1
self.key[HashMap.dateRange[i]] = (HashMap.dateRange[i] % self.size) + counter
self.flag[(HashMap.dateRange[i] % self.size) + counter] = True
# Get HashMap Keys
def getkeys(self):
for date, value in self.key.items():
print(value,' : ', date)
# Get Key of Date
def getKey(self, date):
date = int(date.strftime('%d%m%Y'))
key = self.key.get(date)
if key is not None:
return key
def put(self, key, value, location, parentNode, color):
k = self.getKey(key)
if k is not None:
node = self.st[k]
self.st[k] = self.put2(self.st[k], key, value, location, parentNode, color)
else:
return
def put2(self, node, key, value, location, parentNode, color):
if node is None:
return Node(key, value, location, parentNode, color)
elif value < node.value:
node.left = self.put2(node.left, key, value, location, parentNode, color)
elif value > node.value:
node.right = self.put2(node.right, key, value, location, parentNode, color)
else:
return node
node.height = 1 + max(self.getHeight(node.left), self.getHeight(node.right))
balance = self.getBalance(node)
# LL Rotation
if balance > 1 and value < node.left.value:
return self.rightRotate(node)
# RR Right
if balance < -1 and value > node.right.value:
return self.leftRotate(node)
# Left Right
if balance > 1 and value > node.left.value:
node.left = self.leftRotate(node.left)
return self.rightRotate(node)
# Right Left
if balance < -1 and value < node.right.value:
node.right = self.rightRotate(node.right)
return self.leftRotate(node)
return node
def getHeight(self, root):
if root is None:
return 0
return root.height
def getBalance(self, root):
if root is None:
return 0
return self.getHeight(root.left) - self.getHeight(root.right)
def leftRotate(self, z):
y = z.right
T2 = y.left
y.left = z
z.right = T2
z.height = 1 + max(self.getHeight(z.left), self.getHeight(z.right))
y.height = 1 + max(self.getHeight(y.left), self.getHeight(y.right))
return y
def rightRotate(self, z):
y = z.left
T3 = y.right
y.right = z
z.left = T3
z.height = 1 + max(self.getHeight(z.left), self.getHeight(z.right))
y.height = 1 + max(self.getHeight(y.left), self.getHeight(y.right))
return y
def inOrder(self, node):
arr = []
if node:
arr += self.inOrder(node.left)
arr.append(node.value)
arr += self.inOrder(node.right)
return arr
def preOrder(self, node):
if node:
print(node.key, node.value)
self.preOrder(node.left)
self.preOrder(node.right)
def printAVLTree(self):
key_list = list(self.key.keys())
val_list = list(self.key.values())
for i in range(len(self.st)):
print(key_list[val_list.index(i)], end=": ")
if self.st[i] is None:
print("None")
else:
contactedlist = self.inOrder(self.st[i])
print(contactedlist)
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,293
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/samplenodes.py
|
import networkx as nx
import matplotlib.pyplot as plt
# load the graph
G = nx.les_miserables_graph()
# visualize the graph
nx.draw(G, with_labels = True)
def edge_to_remove(graph):
G_dict = nx.edge_betweenness_centrality(graph)
edge = ()
# extract the edge with highest edge betweenness centrality score
for key, value in sorted(G_dict.items(), key=lambda item: item[1], reverse = True):
edge = key
break
return edge
def girvan_newman(graph):
# find number of connected components
sg = nx.connected_components(graph)
sg_count = nx.number_connected_components(graph)
while(sg_count == 1):
graph.remove_edge(edge_to_remove(graph)[0], edge_to_remove(graph)[1])
sg = nx.connected_components(graph)
sg_count = nx.number_connected_components(graph)
return sg
# find communities in the graph
c = girvan_newman(G.copy())
# find the nodes forming the communities
node_groups = []
for i in c:
node_groups.append(list(i))
# plot the communities
color_map = []
for node in G:
if node in node_groups[0]:
color_map.append('blue')
else:
color_map.append('green')
nx.draw(G, node_color=color_map, with_labels=False)
plt.show()
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,294
|
jzxr/TraceTogether
|
refs/heads/main
|
/DataStructuresandAlgorithms/queue.py
|
class Queue:
def __init__(self):
self.rear = -1
self.data = []
def enqueue(self, value):
self.data.append(value)
self.rear += 1
def dequeue(self):
try:
value = self.data[0]
del self.data[0]
self.rear -= 1
return value
except:
return None
def isEmpty(self):
if self.rear == -1:
return True
else:
return False
def peek(self):
try:
value = self.data[0]
return value
except:
None
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,295
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/dijikstra.py
|
def dijkstra(graph,src,dest,visited=[],distances={},predecessors={}):
""" calculates a shortest path tree routed in src
"""
# a few sanity checks
if src not in graph:
raise TypeError('The root of the shortest path tree cannot be found')
if dest not in graph:
raise TypeError('The target of the shortest path cannot be found')
# ending condition
if src == dest:
# We build the shortest path and display it
path=[]
pred=dest
while pred != None:
path.append(pred)
pred=predecessors.get(pred,None)
# reverses the array, to display the path nicely
readable=path[0]
for index in range(1,len(path)): readable = path[index]+'--->'+readable
#prints it
print('shortest path - array: '+str(path))
print("path: "+readable+", distance="+str(distances[dest]))
else :
# if it is the initial run, initializes the cost
if not visited:
distances[src]=0
# visit the neighbors
for neighbor in graph[src] :
if neighbor not in visited:
new_distance = distances[src] + graph[src][neighbor]
if new_distance < distances.get(neighbor,float('inf')):
distances[neighbor] = new_distance
predecessors[neighbor] = src
# mark as visited
visited.append(src)
# now that all neighbors have been visited: recurse
# select the non visited node with lowest distance 'x'
# run Dijskstra with src='x'
unvisited={}
for k in graph:
if k not in visited:
unvisited[k] = distances.get(k,float('inf'))
x=min(unvisited, key=unvisited.get)
dijkstra(graph,x,dest,visited,distances,predecessors)
if __name__ == "__main__":
graph = {'82863770': {'99053469': 2, '98741636': 1},
'99053469': {'82863770': 3, '98741636': 4, '99995219':8},
'98741636': {'82863770': 4, '99053469': 2, '98173219': 2},
'99995219': {'99053469': 2, '85991110': 7, '97604430': 4},
'85991110': {'98741636': 1, '99995219': 11, '97604430': 5},
'97604430': {'85991110': 3, '85991110': 5}}
dijkstra(graph,'82863770','97604430')
# import networkx as nx
# import matplotlib.pyplot as plt
#graph will be based on 20/01/2021
# '99053469' : {'98741636' : 27},
# '82863770' : {},
# '93213933' : {'99995219': 50, '98439577': 32, '85991110': 4},
# '91700369' : {'99995219': 50, '98439577': 32, '85991110': 4},
# '93213933' : {},
# '98173219' : {},
# '91700369' : {'97604430' : 2, '99995219' : 2, '97601182' : 2, '86148198' : 2, '88877112' : 2, '91654703' : 44},
# # "Cathay Cineleisure Orchard" : {"91164088" : 11},
# "Capitol Singapore" : {"98114818" : 27, "92561060" : 21},
# "City Square Mall" : {"90175180" : 24, "94459175" : 4, "97604430" : 4, "88877112" : 1, "95749866" : 1},
# "Duo" : {"94185021" : 3},
# "Far East Plaza" : {"92245161" : 9, "90782389" : 21},
# "Funan" : {"86806044" : 11, "96625428" : 4, "98114818" : 1, "92561060" : 1},
# "Gek Poh Shopping Centre" : {"90782389" : 11},
# "Great World City" : {"94185021" : 27},
# "HDB Hub" : {"98173219" : 4, "87686851" : 5},
# "ION Orchard" : {"83858992" : 19},
# "IMM" : {"97936255" : 6, "98173219" : 6, "90515712" : 6, "88539489" : 11},
# "Jcube" : {"99002814" : 1, "90782389" : 1},
# "Jem" : {"98741632" : 8, "96301245" : 3},
# "Jewel Changi" : {"91806790" : 27},
# "Junction 8" : {"87686851" : 3},
# "Jurong Point" : {"98741631" : 1, "98741632" : 4, "96301245" : 1, "85571447" : 1},
# "Liang Court" : {"82116909" : 1},
# "Lucky Plaza" : {"97936255" : 10, "98173219" : 19},
# "Kallang Wave Mall" : {"96493305" : 40, "87083988" : 10},
# "Millenia Walk" : {"92245161" : 31},
# "Orchard Central" : {"96625428" : 6, "91164088" : 3},
# "Orchard Gateway" : {"99995219" : 31, "99002814" : 5},
# "People's Park Centre" : {"95066161" : 49},
# "People's Park Complex" : {"95066161" : 31},
# "Mustafa Shopping Centre" : {"95066161" : 3, "93861925" : 3, "95939567" : 3, "82204942" : 3},
# "Nex" : {"91654703" : 2},
# "Ngee Ann City" : {"99053469": 5, "99002814" : 5},
# "Northpoint" : {"99053469" : 1, "98741636" : 1, "86148198" : 1, "98439574" : 8, "98439575" : 1},
# "OUE Downtown" : {"94185021" : 31},
# "Paragon" : {"83858992" : 7, "91164088" : 7},
# "Paya Lebar Quarter" : {"89083988" : 4, "99124255" : 5},
# "Pioneer Mall" : {"92245161" : 1, "93993463" : 1},
# "Plaza Singapura" : {"92294434" : 1, "91806792" : 2},
# "PoMo" : {"90515712" : 7, "89083988" : 27, "87083988" : 6, "83858992" : 10},
# "Serangoon Plaza" : {"99124255" : 2},
# "Shaw House and Centre" : {"93861925" : 1},
# "Sim Lim Square" : {"96760458" : 11, "90515712" : 9},
# "Sim Lim Tower" : {"96760458" : 8, "90515712" : 6},
# "SIT@DOVER" : {"93993463" : 1},
# "SIT@NP" : {"96008055" : 10, "91164088" : 10},
# "SIT@NYP" : {"91806792" : 26, "93120759" : 1, "93690508" : 1},
# "SIT@RP" : {"93993463" : 3},
# "SIT@SP" : {"96008055" : 6, "91164088" : 6},
# "SIT@TP" : {"96008055" : 5, "91164088" : 5},
# "Sunshine Plaza" : {"82204942" : 1},
# "Suntec City" : {"83858992" : 40, "96625428" : 31, "86806044" : 11},
# "Square 2" : {"95939567" : 1},
# "Tampines 1" : {"96493305" : 31},
# "Tanglin Mall" : {"96625428" : 10, "88539489" : 8},
# "The Shoppes at Marina Bay Sands" : {"87686851" : 40},
# "The Star Vista" : {"85571447" : 7, "83638020" : 1},
# "Velocity@Novena Square" : {"88539489" : 31},
# "Vivo City" : {"82863770" : 27, "94459175" : 5},
# "Westgate Mall" : {"97936255" : 3,"96493305" : 3, "88539489" : 26, "96760458": 8},
# "Westmall" : {"82863770" : 1, "87309912" : 1, "97601182" : 4, "89652292" : 40, "91806790" : 4},
# "Wheelock Place" : {"87686851" : 3, "95066161" : 7, "94185021" : 49},
# "Zhongshan Mall" : {"91700369": 5, "82863770" : 5, "92294434" : 40, "97601182" : 4, "92245160" : 27, "83638020" : 27},
# def dijikstra(graph,start,end,visited=[], distance = {}, predecessors = {}):
# if start not in graph:
# raise TypeError ('The root of the shortest path tree cannot be found')
# if end not in graph:
# raise TypeError ('The target of the shortest path tree cannot be found')
# if start == end:
# network = []
# pred = end
# while pred != None:
# network.append(pred)
# pred = predecessors.get(pred,None)
# temp = network[0]
# for i in range(1,len(network)):
# temp = network[i] + '--->' + temp
# print('Shortest social network (in arr): ' + str(network))
# print('Network: ' + temp + ", cost=" + str(distance[end]))
# else:
# if not visited:
# distance[start] = 0
# for next in graph[start]:
# if next not in visited:
# newDistance < distance.get(next,float('inf'))
# distance[next] = newDistance
# predecessors[next] = start
# visited.append(start)
# unvisited = {}
# for k in graph:
# if k not in visited:
# unvisited[k] = distance.get(k,float('inf'))
# x = min(unvisited, key = unvisited.get)
# dijikstra(graph,x,start,visited, distance,predecessors)
# dijikstra(graph,'s', 't')
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,296
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/whatsapp.py
|
from time import sleep
from selenium.webdriver.common.by import By
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support.ui import WebDriverWait
import socket
import csv
from pathlib import Path
message_text = ""
with open('message.txt') as msg_file:
for text in msg_file:
message_text+=text
no_of_message = 1
# no. of time you want the message to be send
moblie_no_list = []
# list of phone number can be of any length
with open('test.csv', 'r') as csvfile:
moblie_no_list = [int(row[0])
for row in csv.reader(csvfile, delimiter=';')]
# get mobile no from csv file
def element_presence(by, xpath, time):
'''
@author Rishit Dagli
Determines presence of web drivers.
'''
element_present = EC.presence_of_element_located((By.XPATH, xpath))
WebDriverWait(driver, time).until(element_present)
def is_connected():
'''
@author Rishit Dagli
Returns True if ping to www.google.com
on port 80 is succesfull
'''
try:
# connect to the host -- tells us if the host is actually reachable
socket.create_connection(("www.google.com", 80))
return True
except BaseException:
is_connected()
path = Path().parent.absolute()
fileName = "chromedriver.exe"
path = path / fileName
print(path)
driver = webdriver.Chrome(executable_path=path)
driver.get("http://web.whatsapp.com")
sleep(10)
# wait time to scan the code in second
def send_whatsapp_msg(phone_no, text):
'''
@author Rishit Dagli
send_whatsapp_msg() accepts 2 arguments - phone_no and text integer and string respectively.
For infected arguments use send_whatsapp_msg(phone_no= ,test='').
Connects to whatsapp web and takes precautions for wrong mobile numbers.
Call the isConnected method before this function.
'''
driver.get(
"https://web.whatsapp.com/send?phone={}&source=&data=#".format(phone_no)
)
try:
driver.switch_to_alert().accept()
except Exception as e:
pass
try:
element_presence(
By.XPATH,
'//*[@id="main"]/footer/div[1]/div[2]/div/div[2]',
30)
txt_box = driver.find_element(
By.XPATH, '//*[@id="main"]/footer/div[1]/div[2]/div/div[2]')
global no_of_message
for x in range(no_of_message):
txt_box.send_keys(text)
txt_box.send_keys("\n")
except Exception as e:
print("Invailid phone no :" + str(phone_no))
def main():
'''
@author Rishit Dagli
Iterates through mobile number and sends them
to send_whatsapp_msg function
'''
for moblie_no in moblie_no_list:
try:
send_whatsapp_msg(phone_no=moblie_no, text=message_text)
except Exception as e:
sleep(10)
is_connected()
'''
print("functions- main, element_presence, is_connected, send_whatsapp_msg")
print("Docs")
print(main.__doc__)
print(element_presence.__doc__)
print(is_connected.__doc__)
print(send_whatsapp_msg.__doc__)
'''
# if __name__ == '__main__':
# main()
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,297
|
jzxr/TraceTogether
|
refs/heads/main
|
/main.py
|
from contactCT import contactCT
from sms import sendSHN_Notice
from FindContacted_SE import findContactSE
from infectedGraph_SE import infectedPlot
from clusterTable import createClusterTable
from dataPrep_HeatMap import data_prep
#CLI interface
def main():
datelist = ['01','02','03','04','05','06','07','08','09']
#Inputs
infected_phoneNo = input("Please Enter the infected Phone No.: 86148198\n")
if len(infected_phoneNo) != 8 or int(infected_phoneNo) < 80000000 or int(infected_phoneNo) > 99999999 :
print("Invalid Phone Number")
else:
infectionDate = "13/2/2021"
infectionDate = input("Please Enter the date of infection: Eg. 13/2/2021 or 9/2/2021\n")
x = infectionDate.split("/",2)
if x[0] in datelist:
x1 = x[0].split("0")
x1 = x1[1]
if x[1] in datelist:
x2 = x[1].split("0")
x2 = x2[1]
else:
x2 = x[1]
infectionDate = x1 + "/" + x2 + "/" + x[2]
#Create CSV of close contact and second contact.
contactCT(infected_phoneNo,infectionDate,14)
newHashAVL = findContactSE(infected_phoneNo, infectionDate, 14)
#Create Cluster Table
createClusterTable()
#Send SHN Notice and infection vector graph.
sendSHN_Notice(infected_phoneNo,"+6586148198")
infectedPlot(infected_phoneNo, newHashAVL)
data_prep()
if __name__ == "__main__":
main()
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,743,298
|
jzxr/TraceTogether
|
refs/heads/main
|
/depreciated/CT_HashMap.py
|
import datetime, csv, os
#import pathlib import Path
class Node:
# Key is Date
key = 0
# Value is Phone Number
value = 0
next = None
def __init__(self, key, value):
self.key = key
self.value = value
class HashMap:
size = 25
dateBase = datetime.datetime(2021, 2, 13)
dateRange = list()
def __init__(self):
# Size is 25; cause 25 days
self.size = HashMap.size
# Flag for Linear Probing of Date
self.flag = [False for x in range(self.size)]
self.key = dict()
self.st = [None for x in range(self.size)]
# Get 25 Days
self.putDate()
# Set keys for the 25 days
self.setKey()
# Store today-constant to today date in array
def putDate(self):
for i in range(self.size):
newDate = HashMap.dateBase - datetime.timedelta(days=i)
HashMap.dateRange.append(int(newDate.strftime('%Y%m%d')))
# Linear Probing: Map Date to Key
def setKey(self):
for i in range(len(HashMap.dateRange)):
counter = 0
while self.flag[(HashMap.dateRange[i] % self.size) + counter] is True:
# If Flag is within array bound
if ((HashMap.dateRange[i] % self.size) + counter) < self.size - 1:
counter += 1
# Else reset to first index of flag
else:
counter -= self.size - 1
self.key[HashMap.dateRange[i]] = (
HashMap.dateRange[i] % self.size) + counter
self.flag[(HashMap.dateRange[i] % self.size) + counter] = True
# Get Key of Date
def getKey(self, date):
date = int(date.strftime('%Y%m%d'))
key = self.key.get(date)
if key is not None:
return key
else:
print("Key", date, "does not exist")
# Print List of Keys
def printKey(self):
print(self.key)
# Map phone number to HashMap
def put(self, key, value):
k = self.getKey(key)
if k is not None:
node = self.st[k]
while node is not None:
# Key exist
if node.value == value:
return
node = node.next
node = Node(key, value)
node.next = self.st[k]
self.st[k] = node
else:
return
# Searching Algo: Using a Simple Linear search using 3 list
def selectFromCsv(self, phone_number):
phonenumberlist = list()
locationlist = list()
starttimelist = list()
endtimelist = list()
# path to the root directory to search
root_dir = "/Data Sets/Location SE Data"
# walk the root dir
for root, dirs, files in os.walk(root_dir, onerror=None):
for filename in files: # iterate over the files in the current dir
file_path = os.path.join(root, filename) # build the file path
try:
with open(file_path, "rb") as f: # open the file for reading
# read the file line by line
# use: for i, line in enumerate(f) if you need line numbers
for line in f:
try:
# try to decode the contents to utf-8
line = line.decode("utf-8")
except ValueError: # decoding failed, skip the line
continue
if phone_number in line: # if the infected exists on the current line...
# print(file_path) # print the file path
# break # no need to iterate over the rest of the file
with open(file_path, mode='r') as f:
reader = csv.DictReader(f)
line_count = 0
potential = list()
try:
for row in reader:
if line_count < 1:
line_count += 1
# Read data from each row
if row["Phone No."] == phone_number:
starttime = datetime.datetime.strptime(
row["Check-in date"] + " " + row["Check-in time"], "%d/%m/%Y %H%M")
endtime = datetime.datetime.strptime(
row["Check-in date"] + " " + row["Check-out time"], "%d/%m/%Y %H%M")
print(starttime, " ", endtime)
phonenumberlist.append(phone_number)
locationlist.append(file_path)
starttimelist.append(starttime)
endtimelist.append(endtime)
potentialList = [phonenumberlist, locationlist, starttimelist, endtimelist]
return potentialList
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(filename, reader.line_num, e))
break
else:
print("Phone not found in path.")
except (IOError, OSError): # ignore read and permission errors
pass
# "Qualify" the potentials and filter them with check-in date, check-in time, check-out time
def SearchContacted(self, arr):
# path to the root directory to search
root_dir = "/Data Sets/Location SE Data"
# walk the root dir
for root, dirs, files in os.walk(root_dir, onerror=None):
for filename in files: # iterate over the files in the current dir
file_path = os.path.join(root, filename) # build the file path
try:
with open(file_path, "rb") as f: # open the file for reading
# read the file line by line
# use: for i, line in enumerate(f) if you need line numbers
for line in f:
try:
# try to decode the contents to utf-8
line = line.decode("utf-8")
except ValueError: # decoding failed, skip the line
continue
if phone_number in line: # if the infected exists on the current line...
# print(file_path) # print the file path
# break # no need to iterate over the rest of the file
for i in range(len(arr[0])):
with open(file_path, mode='r') as f:
reader = csv.DictReader(f)
line_count = 0
try:
for row in reader:
if line_count < 1:
line_count += 1
if arr[0][i] != row["Phone No."]:
checkintime = datetime.datetime.strptime(
row["Check-in date"] + " " + row["Check-in time"], "%d/%m/%Y %H%M")
checkoutime = datetime.datetime.strptime(
row["Check-in date"] + " " + row["Check-out time"], "%d/%m/%Y %H%M")
# print(checkintime, " ", checkoutime)
# Check for infected
if (checkoutime > arr[3][i] or checkintime < arr[2][i]):
continue
else:
try:
self.put(datetime.datetime.strptime(
row["Check-in date"], '%d/%m/%Y').date(), row["Phone No."])
except ValueError:
print(
row["Check-in date"], 'Date does not exist')
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(
filename, reader.line_num, e))
break
else:
print("Phone not found in path.")
except (IOError, OSError): # ignore read and permission errors
pass
# Print HasMap to console
def print(self):
# list out keys and values separately
# Reference: https://www.geeksforgeeks.org/python-get-key-from-value-in-dictionary/
key_list = list(self.key.keys())
val_list = list(self.key.values())
for i in range(len(self.st)):
temp = self.st[i]
# print key with val i
print(key_list[val_list.index(i)], end=': ')
if temp == None:
print("None", end="")
while temp is not None:
print(temp.value, end=", ")
temp = temp.next
print()
# Write HashMap to csv
def writeToTTCsv(self, phone_number):
# file name needs to be replaced with phone number
with open('Data Sets/TraceTogether/'+phone_number+'_TT.csv', mode='w') as data_file:
data_writer = csv.writer(
data_file, delimiter=',', quoting=csv.QUOTE_MINIMAL)
for i in range(len(self.st)):
potentialList = list()
temp = self.st[i]
if temp == None:
continue
else:
while temp is not None:
potentialList.append(temp.value)
temp = temp.next
# Get key from first element in the Hash Map
potentialList.insert(0, self.st[i].key)
data_writer.writerow(potentialList)
# Find the direct contact from the potentials and compare the "potentials"_CT.csv
def findDirectContact(self, phone_number):
# path to the root directory to search
root_dir = "/Data Sets/Contact Tracing"
storeAllList = list()
dateList = list()
personList = list()
TTList = list()
directContactList = list()
# walk the root dir
for root, dirs, files in os.walk(root_dir, onerror=None):
for filename in files: # iterate over the files in the current dir
file_path = os.path.join(root, filename) # build the file path
try:
with open(file_path, "rb") as f: # open the file for reading
# read the file line by line
# use: for i, line in enumerate(f) if you need line numbers
for line in f:
try:
# try to decode the contents to utf-8
line = line.decode("utf-8")
except ValueError: # decoding failed, skip the line
continue
if phone_number in file_path: # if the infected exists on the current line...
with open(file_path, mode='r') as f:
CTreader = csv.DictReader(f)
try:
for row in CTreader:
personList.append(row)
personList = line.split(',')
for i in range(1, 9):
storeAllList.append(
personList[i].split(':'))
#dateList.append(personList[0])
except csv.Error as e:
sys.exit('file {}, line {}: {}'.format(
filename, reader.line_num, e))
except (IOError, OSError): # ignore read and permission errors
pass
for i in range(0, len(storeAllList)):
# determine the potential list by their distance.
# if distance at least 2m, and up to 5m.
phone_number = storeAllList[i][0]
dist = int(storeAllList[i][1])
timeWindow = int(storeAllList[i][2])
if 2 >= dist < 5:
if timeWindow < 30:
directContactList = phone_number
with open('Data Sets/Contact Tracing'+ str(phone_number) +'_DirectContact_infected.csv', mode='w') as data_file:
data_writer = csv.writer(
data_file, delimiter=',', quoting=csv.QUOTE_MINIMAL)
for i in range(len(self.st)):
directContactList = list()
temp = self.st[i]
if temp == None:
continue
else:
while temp is not None:
directContactList.append(temp.value)
temp = temp.next
# # Get key from first element in the Hash Map
# infectedList.insert(0, self.st[i].key)
data_writer.writerow(directContactList)
print("end of CT data")
newHash = HashMap()
phone_number = input("Enter the infected person's number: ")
arr = newHash.selectFromCsv(phone_number)
newHash.SearchContacted(arr)
newHash.writeToTTCsv(phone_number)
|
{"/dataPrep_HeatMap.py": ["/DataStructuresandAlgorithms/linked_list.py"], "/FindContacted_SE.py": ["/DataStructuresandAlgorithms/HashMapwAVL.py"], "/contactCT.py": ["/DataStructuresandAlgorithms/SeperateChaining.py"], "/clusterTable.py": ["/DataStructuresandAlgorithms/stack.py"], "/sms.py": ["/DataStructuresandAlgorithms/queue.py"], "/main.py": ["/contactCT.py", "/sms.py", "/FindContacted_SE.py", "/infectedGraph_SE.py", "/clusterTable.py", "/dataPrep_HeatMap.py"]}
|
30,792,669
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/migrations/0007_follow.py
|
# Generated by Django 3.0.1 on 2020-02-07 17:11
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('post', '0006_auto_20200206_2143'),
]
operations = [
migrations.CreateModel(
name='Follow',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subscribed_by', models.TextField()),
('time', models.DateTimeField()),
('subscribed_to', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,670
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/migrations/0008_react_time.py
|
# Generated by Django 3.0.1 on 2020-02-07 18:39
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0007_follow'),
]
operations = [
migrations.AddField(
model_name='react',
name='time',
field=models.DateTimeField(default=datetime.datetime(2020, 2, 8, 0, 24, 46, 685070)),
),
]
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,671
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/migrations/0004_auto_20200125_1027.py
|
# Generated by Django 3.0.1 on 2020-01-25 04:42
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0003_auto_20200125_1027'),
]
operations = [
migrations.AlterField(
model_name='posts',
name='post_date',
field=models.DateTimeField(default=datetime.datetime(2020, 1, 25, 10, 27, 45, 501985)),
),
]
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,672
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/models.py
|
from django.db import models
import datetime
from django.contrib.auth.models import User
# Create your models here.
'''
This is our main database Posts.
This stores all the data base of our posts.
This table has an attribute username that is the foreign key and links with the use tabe
This shows a one to many relation between user and post table as one user can create mutiple posts and also one post can have only one user as its creater.
This has on_delete cascade as once the user deletes his or her profile all the posts created by the user is also automatically deleted
'''
class Posts(models.Model):
post_title = models.CharField(max_length=200)
post_content = models.TextField()
post_images = models.ImageField(default='post_images/default.jpg', upload_to='post_images')
post_date = models.DateTimeField()
username = models.ForeignKey(User, on_delete=models.CASCADE)
def __str__(self):
return self.post_title
'''
This is a data base to store all the ratings given to post and comments along with the post and comments along with the time the comments were done
Table as one to many relation with post table as one post can have multiple reactions but one reaction is associated with only one post.
Table also has one to many relation with user table as one user can make multiple reaction but only one user for any given reaction
This has on delete cascade as if user or post is deleted all the reaction associated with this is also deleted
'''
class React(models.Model):
post_id = models.ForeignKey(Posts, on_delete=models.CASCADE)
username = models.ForeignKey(User, on_delete=models.CASCADE)
rating = models.IntegerField()
comment = models.TextField()
time = models.DateTimeField(default=datetime.datetime.now())
def __str__(self):
return str(self.username)
'''
This table is to keep track of the other users that any given user follows.
'''
class Follow(models.Model):
subscribed_to = models.ForeignKey(User, on_delete=models.CASCADE)
subscribed_by = models.TextField()
time = models.DateTimeField()
def __str__(self):
return str(self.subscribed_to)
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,673
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/tests.py
|
from django.test import TestCase
from django.contrib.auth.models import User
from post.models import Posts, React
from datetime import datetime
from django.db.models import Avg
# Create your tests here.
class OrmTest(TestCase):
def setUp(self):
self.credentials = {
'username': 'travelersjournal',
'password': 'aurora'}
self.credentials2 = {
'username': 'user2',
'password': 'aurora'}
self.credentials3 = {
'username': 'user3',
'password': 'aurora'}
U1 = User.objects.create_user(**self.credentials)
U2 = User.objects.create_user(**self.credentials2)
U3 = User.objects.create_user(**self.credentials3)
P1 = Posts.objects.create(post_title="Test Title", post_content="Test Content", post_date=datetime.now(), username=U1)
P2 = Posts.objects.create(post_title="Test Title1", post_content="Test Content1",post_date=datetime.now(), username=U1)
P3 = Posts.objects.create(post_title="Test Title2", post_content="Test Content2", post_date=datetime.now(), username=U1)
React.objects.create(id=101, post_id=P1, username=U2, rating=4, comment='Comment By U2 on P1')
React.objects.create(id=102, post_id=P1, username=U3, rating=5, comment='Comment By U3 on P1')
React.objects.create(id=103, post_id=P2, username=U2, rating=1, comment='Comment By U2 on P2')
React.objects.create(id=104, post_id=P2, username=U3, rating=5, comment='Comment By U3 on P2')
React.objects.create(id=105, post_id=P2, username=U1, rating=1, comment='Comment By U1 on P2')
'''
Basisc ORM test of post class
'''
def testORM(self):
testPostObj = Posts.objects.get(post_title="Test Title")
testPostObj1 = Posts.objects.get(post_title="Test Title1")
self.assertEqual(testPostObj.post_content, "Test Content")
self.assertEqual(testPostObj.post_title, "Test Title")
self.assertIsNot(testPostObj.id, testPostObj1.id)
self.assertIsNotNone(testPostObj)
'''
ORM test to check update
'''
def test_update(self):
testPostObj = Posts.objects.get(post_title="Test Title")
testPostObj.post_title = "Updated Title"
testPostObj.post_content = "Updated Content"
testPostObj.save()
self.assertEqual(testPostObj.post_title, "Updated Title")
self.assertEqual(testPostObj.post_content, "Updated Content")
'''
Test to check weather the username stored in the data base is same as the username who created the post
'''
def test_user(self):
user = User.objects.get(username__in=['travelersjournal'])
testPostObj2 = Posts.objects.get(post_title="Test Title2")
self.assertEqual(testPostObj2.username, user)
'''
Basic ORM test of React Class/Table
'''
def test_react_basics(self):
testReactObj = React.objects.get(id=101)
testUserObj = User.objects.get(username__in=['user2'])
self.assertEqual(testReactObj.rating, 4)
self.assertEqual(testReactObj.comment, 'Comment By U2 on P1')
self.assertEqual(testReactObj.username, testUserObj)
self.assertIsInstance(testReactObj, React)
self.assertNotIsInstance(testReactObj, User)
'''
Test to check the ratings of he posts.
Gets average ratings and checks it
Checks if no rating is given to a post, its average is None data type and vice versa
'''
def test_react_rating(self):
testPostObj = Posts.objects.get(post_title="Test Title")
testPostObj2 = Posts.objects.get(post_title="Test Title1")
testPostObj3 = Posts.objects.get(post_title="Test Title2")
ratingObj = React.objects.filter(post_id=testPostObj).aggregate((Avg('rating')))
ratingObj2 = React.objects.filter(post_id=testPostObj2).aggregate((Avg('rating')))
ratingObj3 = React.objects.filter(post_id=testPostObj3).aggregate((Avg('rating')))
rating = round(ratingObj['rating__avg'], 2)
rating2 = round(ratingObj2['rating__avg'], 2)
self.assertEqual(rating, 4.5 )
self.assertEqual(rating2, 2.33 )
self.assertIsNone(ratingObj3['rating__avg'])
self.assertIsNotNone(ratingObj2['rating__avg'])
'''
Checks the number of rating in a post.
'''
def test_react_cont(self):
testPostObj = Posts.objects.get(post_title="Test Title")
testPostObj2 = Posts.objects.get(post_title="Test Title1")
testPostObj3 = Posts.objects.get(post_title="Test Title2")
ratingObj = React.objects.filter(post_id=testPostObj).count()
ratingObj2 = React.objects.filter(post_id=testPostObj2).count()
ratingObj3 = React.objects.filter(post_id=testPostObj3).count()
self.assertEqual(ratingObj, 2)
self.assertEqual(ratingObj2, 3)
self.assertEqual(ratingObj3, 0)
self.assertIsNotNone(ratingObj3)
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,674
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/forms.py
|
from django import forms
from .models import Posts
"""Form class for creating and editing the posts"""
class PostForm(forms.ModelForm):
class Meta:
model = Posts
fields = ('post_title', 'post_content', 'post_images')
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,675
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/views.py
|
from django.shortcuts import render,redirect,get_object_or_404
from .models import Posts, React, Follow
import datetime
from .forms import PostForm
from django.http import HttpResponse
from django.contrib.auth import get_user_model as user_data
from django.db.models import Q
from django.db.models import Avg
from math import ceil
from django.utils.datastructures import MultiValueDictKeyError
from django.contrib import messages
# Create your views here.
'''Logic for view funciton'''
def search(query=None):
queryset = []
queries = query.split(" ")
for q in queries:
posts = Posts.objects.filter(
Q(post_title__icontains=q) |
Q(post_content__icontains=q)
)
for post in posts:
queryset.append(post)
return list(set(queryset))
'''
redirects request to dispay data by implemending pagination
'''
def base(request):
return redirect('/post/5/1')
'''
View function that renders the main post page.this function initialy queries the database and returns all the posts stored in the database
Data is returned in a paged format depending upon the size and page given.
it also calculates the maximum number of page required to display all the post and html files only displays the appropriate number of pages
Dynamically increases the number of pages as the number of posts increases
if the user is searches a post, it quereies tha database and returns only the post matched with the search querry
'''
def homePage(request,SIZE,PAGENO):
skip = SIZE * (PAGENO - 1)
post = Posts.objects.all().order_by('-post_date')[skip: (PAGENO * SIZE)]
recentPost = Posts.objects.all().order_by('-post_date')[0:4]
noOfPages = int(ceil(Posts.objects.all().count()/SIZE))
userFirstLetter = str(request.user)[0].upper()
if request.user.is_authenticated:
followObj = Follow.objects.filter(subscribed_by=request.user.username)
else:
followObj = None
if request.user.is_authenticated:
postObj = Posts.objects.filter(username=request.user)
postList = []
for p in postObj:
postList.append(p.id)
notification = React.objects.filter(post_id__in=postList)
else:
postObj = None
notification = None
query = ""
if request.GET:
query = request.GET['searchKey']
post = search(str(query))
return render(request, 'post/index.html', {'posts': post, 'followObj': followObj, 'recentPost': recentPost, 'userFLetter': userFirstLetter, 'notification': notification})
return render(request, 'post/index.html', {'posts': post, 'noOfPages': range(1,noOfPages+1), 'followObj':followObj, 'recentPost':recentPost, 'userFLetter':userFirstLetter,'notification':notification})
'''
View function for creating a new fucntion.
We use forms.py to create a form.
This function requires user to be logged in before creating any post.
We have used instance property of form class to instanciate username and the current date and time.
This is done as we want our app to auto insert username and current time at the time of the creation of post and not allow user to edit these values
'''
def create(request):
form = PostForm()
if request.method == "POST" and request.user.is_authenticated:
form = PostForm(request.POST, request.FILES)
form.instance.username = request.user
form.instance.post_date = datetime.datetime.now()
if form.is_valid():
form.save()
messages.success(request, 'Post Created!!')
return redirect('post:base')
else:
messages.warning(request, 'Post Data not valid')
return redirect('post:create')
elif request.user.is_authenticated:
return render(request, 'post/create.html', {'form': form})
else:
messages.warning(request, 'Please Login to Continue')
return redirect('user:login')
'''
Simple view function to display any particular post.
This function users slug to determine which post the user asked to display and only displays the post asked by the user.
This view function also displays all the comments posted on this post and also dispays the average rating of the post.
If the user submits blank rating and comment ir relods the same page by expecting multi value dictionary error
Also if the user is not authenticated when submitting a rating and comment, it redirects to the login page.
'''
def viewPost(request, ID):
postObj = Posts.objects.get(id=ID)
recentPost = Posts.objects.all().order_by('-post_date')[0:4]
ratingObj = React.objects.filter(post_id=ID).aggregate((Avg('rating')))
commentObj = React.objects.filter(post_id=ID)
print(ratingObj)
if ratingObj['rating__avg'] is None:
ratingObj = 'None'
else:
ratingObj = round(ratingObj['rating__avg'],2)
if request.method == 'POST' and request.user.is_authenticated:
try:
get_rating = request.POST['rating']
get_comment = request.POST['comment']
react_obj = React(post_id=postObj, username=request.user,rating=get_rating, comment=get_comment, time=datetime.datetime.now())
react_obj.save()
ratingObj = React.objects.filter(post_id=ID).aggregate((Avg('rating')))
ratingObj = round(ratingObj['rating__avg'], 2)
return render(request, 'post/view.html', {'posts': postObj, 'rating': ratingObj, 'comments': commentObj,'recentPost':recentPost})
except MultiValueDictKeyError:
return render(request, 'post/view.html', {'posts': postObj, 'rating': ratingObj, 'comments': commentObj,'recentPost':recentPost})
elif not request.user.is_authenticated and request.method == 'POST':
messages.warning(request, 'Please Login to Continue')
return redirect('user:login')
return render(request, 'post/view.html', {'posts': postObj, 'rating': ratingObj, 'comments': commentObj,'recentPost':recentPost})
'''
This method opens a page that shows the post from the users that the logged in user has followed.
If the user is not authenticated, it redirects to a login page with appropriate message
'''
def followed(request, USER):
if request.user.is_authenticated:
followObj = Follow.objects.filter(subscribed_by=USER)
userFirstLetter = str(request.user)[0].upper()
recentPost = Posts.objects.all().order_by('-post_date')[0:4]
userset = []
for f in followObj:
userset.append(f.subscribed_to)
postObj = Posts.objects.filter(username__in=userset)
return render(request, 'post/follow.html', {'posts':postObj,'recentPost': recentPost,'userFLetter':userFirstLetter})
else:
messages.warning(request, 'Please Login to Continue')
return redirect('user:login')
'''
View funtion to edit any post.
This function users slug to determine which post the user the user asked to edit and displays the post asked by the user.
This function also checks weather the user is logged in or not.
Only allows the user to edit any post if the user sending the request to edit this post is the orgninal creator of the post
'''
def editPostUpdateForm(request, ID):
inst = get_object_or_404(Posts, id=ID)
form = PostForm(instance=inst)
form.instance.post_date = datetime.datetime.now()
if request.method == "POST" and request.user == inst.username:
form = PostForm(request.POST, request.FILES, instance=inst)
if form.is_valid():
form.save()
messages.success(request, 'Post Updated!!!')
return redirect(f'/post/view/{ID}')
elif request.user == inst.username:
return render(request, "post/edit.html", {"form": form, "posts": inst})
elif request.user.is_authenticated:
messages.warning(request, 'You cannot edit this post as You are not the orginal creator of this post!!')
return redirect(f'/post/view/{ID}')
else:
messages.warning(request, 'Please Login to Continue')
return redirect('user:login')
'''
View funtion to delete.
This function users slug to determine which post the user the user asked to delete and displays the post asked by the user.
This function also checks weather the user is logged in or not.
Only deletes the post if the user sending the request to delete the post is the orginal creator of the post
'''
def postDelete(request, ID):
delPost = Posts.objects.get(id=ID)
if request.user == delPost.username:
delPost.delete()
messages.warning(request, 'Your Post has been Deleted!!')
return redirect('user:profile')
elif request.user.is_authenticated:
messages.warning(request, 'You cannot delete this post as you are not the orginal creator of this post!!')
return redirect(f'/post/view/{ID}')
else:
messages.warning(request, 'Please Login to Continue')
return redirect('user:login')
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,676
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/admin.py
|
from django.contrib import admin
# Register your models here.
from .models import Posts
from .models import React
from .models import Follow
'''
Registerd all our classes from models to be accessed by admin
'''
admin.site.register(Posts)
admin.site.register(React)
admin.site.register(Follow)
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,677
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/migrations/0003_auto_20200125_1027.py
|
# Generated by Django 3.0.1 on 2020-01-25 04:42
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('post', '0002_auto_20200110_1308'),
]
operations = [
migrations.AddField(
model_name='posts',
name='username',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
migrations.AlterField(
model_name='posts',
name='post_date',
field=models.DateTimeField(default=datetime.datetime(2020, 1, 25, 10, 26, 54, 913422)),
),
]
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,678
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/user/views.py
|
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from post.models import Posts, React, Follow
from django.contrib import messages
import datetime
from django.db import IntegrityError
# Create your views here
'''#post that renders base template
#this function redirets to the page where user can view all the post'''
def base(request):
return redirect('/post')
'''
View function that registers any new user
Checks if all the userinput data are valid then only creates a user
'''
def register(request):
if request.method == 'POST':
try:
username = request.POST['username']
password1 = request.POST['password1']
password2 = request.POST['password2']
email = request.POST['email']
if password1 == password2:
user = User.objects.create_user(username=username, password=password1, email=email)
user.save()
else:
messages.warning( request, 'Password Did Not match')
return render(request, 'user/register.html')
user = authenticate(username=username, password=password1)
if user is not None:
login(request, user)
messages.success(request, f'Account has been sucessfully created for {username}!')
return redirect('user:profile')
except IntegrityError:
messages.warning(request, 'Username or Email already exists')
return render(request, 'user/register.html')
else:
return render(request, 'user/register.html')
'''
View function that logs in already exesting user after checking if all the data entered are valid
'''
def signin(request):
if request.user.is_authenticated:
return redirect('user:profile')
elif request.method == 'POST':
print(request.method)
username = request.POST['username']
password = request.POST['password']
user = authenticate(username=username, password=password)
print(user)
if user is not None:
login(request, user)
messages.success(request, 'Login sucessful')
return redirect('post:base')
else:
messages.warning(request, 'Username or Password did not match!!!')
return redirect('user:login')
return render(request, 'user/login.html')
'''view funstion to signout and kill the curent session'''
def signout(request):
logout(request)
messages.warning(request, 'You have been Logged Out!!')
return redirect('post:base')
'''
view funstion that shows the current user profile along with all the post created by the user
This function also creates a notification object from react table.
To create notification object, gets all the post id of the posts created by the user.
Then, using all the post id, gets all the entries made for the post id in the React table.
This is displayed as nottification in the html page
'''
def profile(request):
if request.user.is_authenticated:
postObj = Posts.objects.filter(username=request.user)
postList = []
for post in postObj:
postList.append(post.id)
notification = React.objects.filter(post_id__in=postList)
followObj = Follow.objects.filter(subscribed_by=request.user.username)
userFirstLetter = str(request.user)[0].upper()
return render(request, 'user/profile.html', {'posts': postObj, 'notification':notification, 'followObj': followObj, 'userFLetter': userFirstLetter})
else:
return redirect('user:login')
'''
Function to view profile of any other user.
Displays all the posts uploaded by the user.
User must be logged in to view all the posts
Provides option to follow the user if not already followed.
If already followed does not provide an option to follow again
'''
def viewProfile(request, USER):
if request.user.is_authenticated:
recentPost = Posts.objects.all().order_by('-post_date')[0:4]
inst = Posts.objects.filter(username=USER)
if not inst:
return HttpResponse(status=404)
for i in inst:
inst = i
break
if request.user == inst.username:
return redirect('user:profile')
elif request.method == 'POST':
subscribed_by = request.user
subscribed_to = inst.username
follow_obj = Follow(subscribed_to=subscribed_to,subscribed_by=subscribed_by, time=datetime.datetime.now())
follow_obj.save()
messages.success(request, 'Subscribed')
return redirect(f'/profile/{USER}')
else:
follow = False
if not Follow.objects.filter(subscribed_by=request.user, subscribed_to=USER):
follow = True
postObj = Posts.objects.filter(username=USER).order_by('-post_date')
return render(request, 'user/viewProfile.html', {'posts': postObj, 'follow': follow,'recentPost':recentPost})
else:
messages.warning(request, 'Please Login to Continue')
return redirect('user:login')
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,679
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/urls.py
|
from django.contrib import admin
from django.urls import path
from . import views
app_name = 'post'
urlpatterns = [
path('', views.base, name='base'),
path('<int:SIZE>/<int:PAGENO>', views.homePage, name='homePage'),
path('create/', views.create, name="create"),
path('view/<int:ID>', views.viewPost, name='viewPost'),
path('edit/<int:ID>', views.editPostUpdateForm, name='update'),
path('delete/<int:ID>', views.postDelete, name='delete'),
path('followed/<str:USER>', views.followed, name='followed' )
]
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,680
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/migrations/0001_initial.py
|
# Generated by Django 3.0.1 on 2020-01-10 03:12
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Posts',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('post_title', models.CharField(max_length=200)),
('post_content', models.TextField()),
('post_images', models.ImageField(default='default.jpg', upload_to='post_images')),
('date', models.DateTimeField()),
],
),
]
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,681
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/api/urls.py
|
from django.contrib import admin
from django.urls import path
from . import views
app_name = 'api'
urlpatterns = [
path('posts/read/', views.read_api, name="read_api"),
path('posts/read/pagination/<int:SIZE>/<int:PAGENO>/', views.read_api_pagination, name="read_api_pagination"),
path('posts/read/<int:ID>/', views.read_api_data, name="read_api_data"),
path('posts/read/user/<int:ID>/', views.read_api_user, name="read_api_user"),
path('posts/create/', views.create_api, name='create_api'),
path('posts/update/<int:ID>', views.update_api, name='update_api'),
path('posts/delete/<int:ID>', views.delete_api, name='delete_api'),
]
'''
Api's URL pattern uses slug to process the request made through api
'''
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,682
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/migrations/0005_auto_20200125_1032.py
|
# Generated by Django 3.0.1 on 2020-01-25 04:47
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0004_auto_20200125_1027'),
]
operations = [
migrations.AlterField(
model_name='posts',
name='post_date',
field=models.DateTimeField(default=datetime.datetime(2020, 1, 25, 10, 32, 38, 926502)),
),
]
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,683
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/api/views.py
|
from django.shortcuts import render, redirect
from django.http import HttpResponse, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from post.models import Posts
from django.contrib.auth.models import User
import json
import datetime
from math import ceil
# Create your views here.
'''
This is the view function of rest API app.
'''
'''
This view function is used to return all the posts stored in our database in JSON format.
This function returns title of post, its content, its last modifided date along with the userid of the user that created this post.
'''
def read_api(request):
post = Posts.objects.all()
dict_value = {"posts": list(post.values('post_title', 'post_content', 'post_date', 'username'))}
return JsonResponse(dict_value)
'''
This view function is used to return posts stored in our database by implemending pagination.
This view function uses slug in its url to got size and page number.
Depending upon the size and page no given it only returns required number of posts in JSON format.
'''
def read_api_pagination(request, SIZE, PAGENO):
skip = SIZE * (PAGENO - 1)
post = Posts.objects.all().order_by('-post_date')[skip: (PAGENO * SIZE)]
noOfPages = int(ceil(Posts.objects.all().count() / SIZE))
dict_value = {"posts": list(post.values('post_title', 'post_content', 'post_date', 'username'))}
if PAGENO > noOfPages:
return JsonResponse({"message": "This Page Contains no Data", "max_valid_page": noOfPages})
return JsonResponse(dict_value)
'''
This view function returns only one post.
This view function uses slug to get post id and using the post id it gets the post and returns it in JSON format
'''
def read_api_data(request, ID):
post = Posts.objects.get(id=ID)
return JsonResponse({"post_title": post.post_title, "post_content": post.post_content})
'''
This view functions returns all the posts created by a particular user.
This view function uses slug to get user id and using the user id it filters all the post created by the user and returns it in JSON format.
'''
def read_api_user(request, ID):
post = Posts.objects.filter(username=ID)
dict_value = {"posts": list(post.values('post_title', 'post_content', 'post_date', 'username'))}
return JsonResponse(dict_value)
'''
This view function is for creating a new post through api.
If url of this view function is opended in browser, it is redirected to create post page.
The api takes post request and body has title, content and username.
Username must be a valid username else it returns an error.
if username is valid it creats a new post with the given username, title and content.
Returns a suscessfull message after post is sucessfully created in json format
'''
@csrf_exempt
def create_api(request):
if request.method == 'GET':
return redirect('post:create')
if request.method == 'POST':
decoded_data = request.body.decode('utf-8')
post_data = json.loads(decoded_data)
title = post_data['title']
content = post_data['content']
username = [post_data['username']]
user = User.objects.get(username__in=username)
post = Posts(post_title=title, post_content=content, post_date=datetime.datetime.now(), username=user)
post.save()
return JsonResponse({"message": "Post Sucessfully Created!!!"})
'''
This view fuction is to update post stored in database.
This function uses slug to get the post id.
The api takes post request with title and content.
This function gets the post and changes the title and content to the title and content in the request.
This function also updates the time on the post
Returns a sucessful message after update is completed
'''
@csrf_exempt
def update_api(request, ID):
post = Posts.objects.get(id=ID)
if request.method == 'GET':
return JsonResponse({"post_title": post.post_title, "post_content": post.post_content})
if request.method == 'POST':
decoded_data = request.body.decode('utf-8')
post_data = json.loads(decoded_data)
post.post_title = post_data['title']
post.post_content = post_data['content']
post_date = datetime.datetime.now()
post.save()
return JsonResponse({"message": "Successfully updated!!"})
'''
This view function used to delete the post stored in data base.
This function uses slug to get post id.
Delets the post associated with the id and returns a message after the post has been deleted
'''
@csrf_exempt
def delete_api(request, ID):
if request.method == 'DELETE':
post = Posts.objects.get(id=ID)
post.delete()
return JsonResponse({"message": "Post Deleted"})
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,792,684
|
sthasubin429/Aurora_ADC5
|
refs/heads/master
|
/Travelers_Journal/post/migrations/0009_auto_20200210_1323.py
|
# Generated by Django 3.0.1 on 2020-02-10 07:38
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0008_react_time'),
]
operations = [
migrations.AlterField(
model_name='posts',
name='post_images',
field=models.ImageField(default='post_images/default.jpg', upload_to='post_images'),
),
migrations.AlterField(
model_name='react',
name='time',
field=models.DateTimeField(default=datetime.datetime(2020, 2, 10, 13, 23, 20, 721655)),
),
]
|
{"/Travelers_Journal/post/views.py": ["/Travelers_Journal/post/models.py", "/Travelers_Journal/post/forms.py"], "/Travelers_Journal/post/admin.py": ["/Travelers_Journal/post/models.py"], "/Travelers_Journal/post/forms.py": ["/Travelers_Journal/post/models.py"]}
|
30,810,176
|
santoshkumarradha/stochastic-qc
|
refs/heads/master
|
/QITE/hamiltonian.py
|
import numpy as np
import itertools
from scipy import optimize as opt
from pauli import pauli_action
from binary_functions import Bas2Int, Int2Bas, Opp2Str
from numpy import linalg as LA
from scipy import linalg as SciLA
from tools import print_state, fidelity, dgr, dpbc, dobc
from pauli import sigma_matrices
def Hpsi(H_, psi_):
phi = np.zeros(psi_.shape, dtype=complex)
for (A, h, imp, gmp) in H_:
for m in np.where(np.abs(h) > 1e-8)[0]:
phi += h[m]*gmp[m, imp[m, :]]*psi_[imp[m, :]]
return phi.copy()
def Hmat(H_):
N = H_[0][2].shape[1]
Hm = np.zeros((N, N), dtype=complex)
for i in range(N):
ei = np.zeros(N, dtype=complex)
ei[i] = 1.0
Hm[:, i] = Hpsi(H_, ei).copy()
return Hm
def Hmoms(H_, psi_):
phi_ = Hpsi(H_, psi_)
ea = np.vdot(psi_, phi_)
ev = np.vdot(phi_, phi_)
return np.real(ea), np.real(ev-ea**2)
def print_Hamiltonian(H_):
mu = 0
for (A, h, imp, gmp) in H_:
#print('A: ',A)
nact = len(A)
print("active qubits ", A)
print("operators: ")
for m in np.where(np.abs(h) > 1e-8)[0]:
print(Opp2Str(Int2Bas(m, 4, nact)), h[m])
mu += 1
def Hii(H_, i):
N = H_[0][2].shape[1]
nbit = int(np.log2(N))
hii = 0.0
xi = Int2Bas(i, 2, nbit)
for (A, h, imp, gmp) in H_:
nact = len(A)
for m in np.where(np.abs(h) > 1e-8)[0]:
sm = Int2Bas(m, 4, nact)
smx = [sigma_matrices[xi[A[w]], xi[A[w]], sm[w]]
for w in range(nact)]
hii += np.real(h[m]*np.prod(smx))
return hii
def TransverseIsing(nspin, R, J, h):
H = []
for i in range(nspin):
j = (i+1) % nspin
active = [k for k in range(nspin) if dpbc(
i, k, nspin) < R or dpbc(j, k, nspin) < R]
active = np.asarray(active)
print(active)
nact = len(active)
h_alpha = np.zeros(4**nact)
ii = np.where(active == i)[0][0]
jj = np.where(active == j)[0][0]
idx = [0]*nact
idx[ii] = 1
h_alpha[Bas2Int(idx, 4)] = h
idx = [0]*nact
idx[ii] = 3
idx[jj] = 3
if np.abs(i-j) == 1 and j != 0:
h_alpha[Bas2Int(idx, 4)] = J
imap, gmap = pauli_action(active, nspin)
H.append((active, h_alpha, imap, gmap))
Hm = Hmat(H)
print()
print_Hamiltonian(H)
return H
|
{"/cluster_test.py": ["/wick.py"]}
|
30,810,177
|
santoshkumarradha/stochastic-qc
|
refs/heads/master
|
/cluster_test.py
|
from wick import wick as wick_class
import matplotlib.pyplot as plt
import numpy as np
from tqdm import tqdm as tqdm
from wick import wick as wick_class
n = 4
depth = 2
wick = wick_class(n, depth=depth)
print("Optimizing the initial angles")
wick.get_initial_angles(maxiter=2000, method="COBYLA")
print("Initial closeness : ", wick.initial_closeness)
wick.evolve_system(dt=.01, N=50, verbose=False)
states = []
for j, i in enumerate(wick.angles):
states.append(wick.get_final_state(i))
np.save("states.npy", states)
|
{"/cluster_test.py": ["/wick.py"]}
|
30,810,178
|
santoshkumarradha/stochastic-qc
|
refs/heads/master
|
/wick_option.py
|
from qiskit.circuit import Parameter
from qiskit import (QuantumRegister, ClassicalRegister,
QuantumCircuit, Aer, execute)
from qiskit.circuit.library import (
YGate, CYGate, CRYGate, RYGate, XGate, CXGate, CRXGate, RXGate,
ZGate, CZGate, CRZGate, RZGate)
import numpy as np
from itertools import combinations
from qiskit.quantum_info import state_fidelity as distance
from scipy.optimize import minimize
from qiskit.quantum_info import Pauli
from qiskit.extensions import UnitaryGate as ug
import itertools
import functools
import numba
from scipy.sparse import diags
from tqdm.auto import tqdm
def get_paulis(H, hide_identity=True):
"""Claculate the pauli decomposition of given matrix
Args:
H (2D array): Hamiltonian matrix
hide_identity (bool, optional): Do you want to hide pure indentity term ?. Defaults to True.
Returns:
Dict: Coeefecients and Pauli string ex: {'II':2,'XY':1.2}
"""
n = int(np.log2(len(H)))
N = 2 ** n
paulis = [Pauli.from_label('I'), Pauli.from_label(
'X'), Pauli.from_label('Y'), Pauli.from_label('Z')]
obs = []
coeffs = []
final = {}
for term in itertools.product(paulis, repeat=n):
matrices = [i.to_matrix() for i in term]
coeff = np.trace(functools.reduce(np.kron, matrices) @ H) / N
coeff = np.real_if_close(coeff).item()
if not np.allclose(coeff, 0):
coeffs.append(coeff)
if not all([t == Pauli.from_label('I') for t in term]) and hide_identity:
final["".join([i.to_label() for i in term])] = coeff
else:
if hide_identity == False:
final["".join([i.to_label() for i in term])] = coeff
return final
def get_diff_mat(N, a=1, b=1, dx=1, boundary=1):
D = 1 # In Black-Scholes we have:
# D = ( 0.5 * (sigma**2) * (s**2) ),
C = 0 # C = ( r * s )
dt = .01
h = 6.1875
N -= 1
# Define ratios for matrix algorithm
Lambda = ((D*dt) / (h**2))
mu = ((C*dt) / (2*h))
# print mu, Lambda
# define the tridiagonal matrix A
A = np.zeros((N+1, N+1), dtype=float)
# need to eventually implement this form: (1 + 2*Lambda + dt * r**(m+1)), since we have r = 0
A[0, 0] = (1 + 2*Lambda)
A[0, 1] = (- Lambda - mu)
for n, l in enumerate(((mu - Lambda), (1 + 2*Lambda), (- Lambda - mu))):
np.fill_diagonal(A[1:, n:], l)
A[N, N-1] = (mu - Lambda)
A[N, N] = (1 + 2*Lambda)
return A
# print(A)
class wick:
def __init__(self, n, terminal_time=10, num_time_steps=30,
interest_rate=0, strike_price=25, volatility=0.18,
stock_start=0.5, stock_end=100, seed=0, depth=2, verbose=False):
# ** General declarations
self.verbose = verbose
# ** Declarations involving system
self.n = int(n)
self.N = 2**self.n
self.T = terminal_time
self.M = num_time_steps
self.r = interest_rate
self.sigma = volatility
self.k = strike_price
self.s_init = stock_start
self.s_end = int(stock_end/self.k)*self.k
self.t_init = 0
self.t_end = self.T
self.h = float(self.s_end - self.s_init) / self.N # step-size
# arrange grid in s, with N equally spaced nodes
self.s = np.zeros(self.N)
self.s = np.linspace(self.s_init, self.s_end,
num=self.N, endpoint=True)
self.s[0] = self.s_init
# time discretization
self.dt = float(self.t_end - self.t_init) / self.M
# arrange grid in time with step size dt
self.t = np.arange(self.t_init, self.t_end, self.dt)
self.t[0] = self.t_init
# Define Diffusion and Drift coefficients/constants we can change it to better represent later on
self.D = 1 # In Black-Scholes we have:
# D = ( 0.5 * (sigma**2) * (s**2) ),
self.C = 0 # C = ( r * s )
# Define ratios for matrix algorithm
self.Lambda = ((self.D*self.dt) / (self.h**2))
self.mu = ((self.C*self.dt) / (2*self.h))
# mesh storing time and space wavefunctions
self.u = np.zeros((self.N, self.M+1))
# ** Declarations involving calcualtion of circuit
self.circuit = None
self.main_circuit = None
self.seed = seed
self.depth = depth
self.anc = QuantumRegister(1, "ancilla")
self.basis = QuantumRegister(n, "basis")
self.meassure = ClassicalRegister(1, "meassure")
self.combi = None
self.theta = None
self.gates = None
self.set_gates()
self.parameter_two_qubit = 0 # As of now no two qubit gates
self.meassure_at_end = False
self.make_random_gate_anzats()
self.initial = None
self.set_initial()
self.initial_closeness = None
self.num_parameters = len(self.circuit.parameters)
self.angles = np.zeros((self.M+1, self.num_parameters))
self.circuit_aij = [
[0 for i in range(self.num_parameters)] for j in range(self.num_parameters)]
print("Calculating Aij circuits")
self.calculate_aij()
print("Done")
self.ham_pauli = None
self.make_pauli_ham()
self.num_ham_terms = len(self.ham_pauli)
# number of terms in ham * number of parameters
self.circuit_cik = [
[0 for i in range(len(self.ham_pauli))] for j in range(self.num_parameters)]
print("Calculating Cik circuits")
self.circuit_cik_a_values = [
[0j for i in range(len(self.ham_pauli))] for j in range(self.num_parameters)]
self.calculate_cik()
print("Done")
def get_diff_mat(self):
"""Calculates the differential^2 operator for given BM system
Returns:
2d Array: D^2 operator
"""
N = self.N-1
A = np.zeros((N+1, N+1), dtype=float)
# need to eventually implement this form: (1 + 2*self.Lambda + dt * r**(m+1)), since we have r = 0
A[0, 0] = (1 + 2*self.Lambda)
A[0, 1] = (- self.Lambda - self.mu)
for n, l in enumerate(((self.mu - self.Lambda), (1 + 2*self.Lambda), (- self.Lambda - self.mu))):
np.fill_diagonal(A[1:, n:], l)
A[N, N-1] = (self.mu - self.Lambda)
A[N, N] = (1 + 2*self.Lambda)
return A
def set_initial(self):
"""Initilize the time 0 boundary condition on the option price
"""
for j in range(0, self.N):
# ** s[j] - k, for a Call-Option
# ** OR: (k - s[j]) for a PUT-Option
if (self.s[j] - self.k) <= 0:
self.u[j, 0] = 0
elif (self.s[j] - self.k) > 0:
self.u[j, 0] = self.s[j] - self.k
self.u = np.sqrt(self.u)
self.u = np.nan_to_num(self.u/np.linalg.norm(self.u, axis=0))
self.initial = self.u[:, 0]
def get_random_gates(self, num_gates):
"""Generate random gates from X,Y,Z combination according to seed
Args:
num_gates (int): Number of gates needed
Returns:
Array(gates): list of randomg gates
"""
import random
random.seed(self.seed)
gates = [[YGate, CYGate, CRYGate, RYGate], [XGate, CXGate,
CRXGate, RXGate], [ZGate, CZGate, CRZGate, RZGate]]
# ? Seems like only Y gates alone does it much better ?
gates = [[YGate, CYGate, CRYGate, RYGate]]
return random.choices(gates, k=num_gates)
def set_gates(self):
"""set gates generated by get_random_gates function
"""
self.combi = list(combinations(range(self.n), 2))
self.theta = [Parameter(f't-{i}')
for i in range(len(self.combi)+self.n*self.depth)]
self.gates = self.get_random_gates(len(self.theta))
def get_final_state(self, angles):
circ_params_wo_meassure = self.main_circuit.remove_final_measurements(
inplace=False)
values = {i: angles[j] for j, i in enumerate(
circ_params_wo_meassure.parameters)}
circ_params_wo_meassure.assign_parameters(values, inplace=True)
simulator = Aer.get_backend('statevector_simulator')
result = execute(circ_params_wo_meassure, simulator).result()
statevector = result.get_statevector(circ_params_wo_meassure)
return statevector
# ! Check if final state claulation, ie Im{e^{it} <0|U|0>} is correct
def get_final_state_lm(self, angles, ij):
"""Returns the value of the curcit for Aij
Args:
angles (float array): angles for the anzats
ij (1d array): [i,j] values for Aij element
Returns:
array,dict: State vector and probablity of ancilla being 1 or 0
"""
circ_params_wo_meassure = self.circuit_aij[ij[0]][ij[1]].remove_final_measurements(
inplace=False)
values = {i: angles[j] for j, i in enumerate(
circ_params_wo_meassure.parameters)}
circ_params_wo_meassure.assign_parameters(values, inplace=True)
simulator = Aer.get_backend('statevector_simulator')
result = execute(circ_params_wo_meassure, simulator).result()
statevector = result.get_statevector(circ_params_wo_meassure)
temp = statevector*statevector.conj()
p_1 = np.real(temp[int(len(temp)/2):].sum())
results = {'1': p_1, '0': 1-p_1}
return statevector, results
# ! Check if final state claulation, ie Im{e^{it} <0|U|0>} is correct
def get_final_state_ik(self, angles, ij):
"""Returns the value of the curcit for Cik
Args:
angles (float array): angles for the anzats
ij (1d array): [i,j] values for Aij element
Returns:
array,dict: State vector and probablity of ancilla being 1 or 0
"""
circ_params_wo_meassure = self.circuit_cik[ij[0]][ij[1]].remove_final_measurements(
inplace=False)
values = {i: angles[j] for j, i in enumerate(
circ_params_wo_meassure.parameters)}
circ_params_wo_meassure.assign_parameters(values, inplace=True)
simulator = Aer.get_backend('statevector_simulator')
result = execute(circ_params_wo_meassure, simulator).result()
statevector = result.get_statevector(circ_params_wo_meassure)
temp = statevector*statevector.conj()
p_1 = np.real(temp[int(len(temp)/2):].sum())
results = {'1': p_1, '0': 1-p_1}
return statevector, results
def make_pauli_ham(self):
"""Make the hamiltonian part interms of pauli decomposition
"""
# As of now only \partial^2 k included in hamiltonian
# ham_mat = get_diff_mat(2**self.n, self.a, self.b,
# self.dx, self.boundary)
ham_mat = self.get_diff_mat()
self.ham_pauli = get_paulis(ham_mat)
def get_cost(self, angles, compare=None):
a = self.get_final_state(angles)
compare = self.initial
# return np.arccos(distance(a,compare))
return 2*(1-distance(a, compare))
def get_initial_angles(self, method="SLSQP", maxiter=1000, tol=1e-7):
angle0 = np.random.uniform(0, 2*np.pi, len(self.circuit.parameters))
bnds = [(0, 2*np.pi)] * len(angle0)
result = minimize(self.get_cost, angle0, method=method,
tol=tol, bounds=bnds, options={"maxiter": maxiter})
if result.success == False:
print("Warning: Angles not converged within given iterations")
self.initial_closeness = result.fun
self.angles[0] = result.x
def make_random_gate_anzats(self, lm=None):
# Initialize local variables and functions
cnt = 0
set_ancila = True
def check_lm(lm, qubit):
'''
Need to check if the two qubit derivative calculatin is correct, I am sure it is not
eq.33/34 in 1804.03023
'''
if cnt == lm[0]:
self.circuit.x(self.anc)
self.circuit.append(self.gates[cnt][1](), [self.anc, qubit])
self.circuit.x(self.anc)
if cnt == lm[1]:
self.circuit.append(self.gates[cnt][1](), [self.anc, qubit])
# Start constructiong the circuit
if lm == None:
set_ancila = 0
lm = [-1, -1]
if set_ancila:
self.circuit = QuantumCircuit(self.basis, self.anc, self.meassure)
self.circuit.h(self.anc)
else:
self.circuit = QuantumCircuit(self.basis)
self.circuit.h(self.basis)
self.circuit.barrier()
if self.parameter_two_qubit:
for i in self.combi:
qubits = [self.basis[i[0]], self.basis[i[1]]]
check_lm(lm, qubits[1])
self.circuit.append(
self.gates[cnt][2](self.theta[cnt]), qubits)
cnt += 1
self.circuit.barrier()
# single qubit gates
for iter_depth in range(self.depth):
if self.parameter_two_qubit == 0:
# Two qubit entangling gate series before single qubit parameterized gates
if iter_depth > 0:
for i in self.combi:
self.circuit.cx(self.basis[i[0]], self.basis[i[1]])
self.circuit.barrier()
for i in range(self.n):
qubits = [self.basis[i]]
check_lm(lm, qubits[0])
self.circuit.append(
self.gates[cnt][3](self.theta[cnt]), qubits)
cnt += 1
self.circuit.barrier()
# Final H on ancila
if set_ancila:
self.circuit.h(self.anc)
if self.meassure_at_end:
self.circuit.measure(self.anc, self.meassure)
if lm == [-1, -1]:
self.main_circuit = self.circuit.copy()
else:
self.circuit_aij[int(lm[0])][int(lm[1])] = self.circuit.copy()
def make_cik_circ(self, ik, h_pauli, coeff):
# Initialize local variables and functions
cnt = 0
def check_ik(ik, qubit):
'''
Need to check if the two qubit derivative calculatin is correct, I am sure it is not
eq.33/34 in 1804.03023
'''
if cnt == ik[0]:
self.circuit.x(self.anc)
self.circuit.append(self.gates[cnt][1](), [self.anc, qubit])
self.circuit.x(self.anc)
# Start constructiong the circuit
self.circuit = QuantumCircuit(self.basis, self.anc, self.meassure)
self.circuit.h(self.anc)
# ! Check If the angle rotation is correct.
# ! we want to have |0> + e^{ik[1]} |1>
# ! Damn seems to be working without it. So where does the hamiltonian parts go ?
# ! otherwise there seems to be no place in putting the Hamiltonian coeefcients !
theta_ham = np.angle(coeff[ik[1]]*(-1j/2))
# theta_ham = np.angle(coeff[ik[1]])
# self.circuit.rz(theta_ham, self.anc)
self.circuit.h(self.basis)
self.circuit.barrier()
if self.parameter_two_qubit:
for i in self.combi:
qubits = [self.basis[i[0]], self.basis[i[1]]]
check_ik(ik, qubits[1])
self.circuit.append(
self.gates[cnt][2](self.theta[cnt]), qubits)
cnt += 1
self.circuit.barrier()
# single qubit gates
for iter_depth in range(self.depth):
if self.parameter_two_qubit == 0:
# Two qubit entangling gate series before single qubit parameterized gates
if iter_depth > 0:
for i in self.combi:
self.circuit.cx(self.basis[i[0]], self.basis[i[1]])
self.circuit.barrier()
for i in range(self.n):
qubits = [self.basis[i]]
check_ik(ik, qubits[0])
self.circuit.append(
self.gates[cnt][3](self.theta[cnt]), qubits)
cnt += 1
self.circuit.barrier()
qs = [self.anc]
for i in self.basis:
qs.append(i)
self.circuit.append(h_pauli[ik[1]], qs)
self.circuit.h(self.anc)
if self.meassure_at_end:
self.circuit.measure(self.anc, self.meassure)
self.circuit_cik[int(ik[0])][int(ik[1])] = self.circuit.copy()
def calculate_aij(self):
if self.verbose:
iterations = tqdm(range(self.num_parameters),
desc="Calculating A_ij parameterized circuits\n")
else:
iterations = range(self.num_parameters)
for i in iterations:
for j in range(self.num_parameters):
self.make_random_gate_anzats(lm=[i, j])
def calculate_cik(self):
# ! ----------------------------------
# ! Check if this needs to be refersed ?
# ! ----------------------------------
ham_terms_circ = [ug(Pauli.from_label(i).to_operator(), label=" ("+i+")").control(
1) for i in list(self.ham_pauli.keys())]
coeff = list(self.ham_pauli.values())
if self.verbose:
iterations = tqdm(range(self.num_parameters),
desc="Calculating C_ij parameterized circuits\n")
else:
iterations = range(self.num_parameters)
for i in iterations:
for j in range(len(ham_terms_circ)):
self.make_cik_circ(
ik=[i, j], h_pauli=ham_terms_circ, coeff=coeff)
self.circuit_cik_a_values[i][j] = np.abs(-1j/2 * coeff[j])
def evolve_system(self, verbose=True):
"""Evolve the system with time gap dt up to steps N
Args:
verbose (bool, optional): Want to show progreess bar ?. Defaults to True.
"""
for ntime in tqdm(range(self.M)):
angles = self.angles[ntime]
if verbose:
iter_range = tqdm(range(self.num_parameters))
else:
iter_range = range(self.num_parameters)
A = np.zeros((self.num_parameters, self.num_parameters))
for i in iter_range:
for j in range(self.num_parameters):
# if j <= i:
state, p = self.get_final_state_lm(angles, [j, i])
A[i, j] = (p['0']-p['1']) * \
np.abs(-1j/2 * 1j/2) # 2*p['1']-1
# A[j, i] = p['0']-p['1']
C = np.zeros((self.num_parameters, self.num_ham_terms))
for i in iter_range:
for j in range(self.num_ham_terms):
state, p = self.get_final_state_ik(angles, [i, j])
# 2*p['1']-1 # p['0']-p['1'] # 2*p['0']-1
C[i, j] = (p['0']-p['1']) * \
np.abs(self.circuit_cik_a_values[i][j] * -1j/2)
try:
theta_dot = np.linalg.solve(A, C.sum(axis=1))
except:
print("diag did not work, going with lstq")
theta_dot, residuals, rank, s = np.linalg.lstsq(
A, C.sum(axis=1))
self.angles[ntime+1] = (angles+self.dt*theta_dot)
state = self.get_final_state(self.angles[ntime+1])
self.u[:, ntime+1] = state
|
{"/cluster_test.py": ["/wick.py"]}
|
30,810,179
|
santoshkumarradha/stochastic-qc
|
refs/heads/master
|
/QITE/tools.py
|
import numpy as np
from binary_functions import Int2Bas
def fidelity(psi_, phi_):
return np.abs(np.vdot(psi_, phi_))
def print_state(psi_, nbit, outf):
for i in range(psi_.shape[0]):
if(np.abs(psi_[i]) > 1e-4):
for x in Int2Bas(i, 2, nbit):
outf.write(str(x))
outf.write(" %.12f %.12f I \n" %
(np.real(psi_[i]), np.imag(psi_[i])))
def dump_state(psi_, nbit, fname):
outf = open(fname, 'w')
for i in np.where(np.abs(psi_) > 1e-4)[0]:
outf.write("%d %.12f %.12f \n" %
(i, np.real(psi_[i]), np.imag(psi_[i])))
outf.close()
def read_state(nbit, fname):
psi = np.zeros(2**nbit, dtype=complex)
V = np.loadtxt(fname)
idx = V[:, 0].astype(int)
psi[idx] = V[:, 1]+1j*V[:, 2]
return psi
def dump_lanz_vecs(hv, sv, fname):
nv = len(hv)
outf = open(fname, 'w')
for i in range(nv):
outf.write("%d %.12f %.12f \n" % (i, hv[i], sv[i]))
outf.close()
def dump_lanz_rte(hv, sv, fname):
nv = len(hv)
outf = open(fname, 'w')
for i in range(nv):
outf.write("%d %.8f %.8f %.8f %.8f\n" % (i, np.real(
hv[i]), np.imag(hv[i]), np.real(sv[i]), np.imag(sv[i])))
outf.close()
# ------------------------------------------------- #
def dpbc(a, b, n):
return np.min([(a-b) % n, (b-a) % n])
def dobc(a, b, n):
return np.abs(a-b)
def dgr(graph, i, j):
VV, EE = graph
nbit = len(VV)
paths = []
if(i == j):
return 0
for (a, b) in EE:
if(i == a and j == b or i == b and j == a):
return 1
for (a, b) in EE:
if(i == a):
paths.append([a, b])
if(i == b):
paths.append([b, a])
while(True):
new_paths = []
for p in paths:
end = p[len(p)-1]
for (a, b) in EE:
if(end == a):
if(b == j):
return len(p)
else:
new_paths.append(p+[b])
if(end == b):
if(a == j):
return len(p)
else:
new_paths.append(p+[a])
paths = [x for x in new_paths]
|
{"/cluster_test.py": ["/wick.py"]}
|
30,810,180
|
santoshkumarradha/stochastic-qc
|
refs/heads/master
|
/QITE/binary_functions.py
|
import numpy as np
import time
def Int2Bas(n, b, nbit):
if(n == 0):
return [0]*nbit
x = []
while(n):
x.append(int(n % b))
n //= b
return [0]*(nbit-len(x))+x[::-1]
def Bas2Int(x, b):
nbit = len(x)
z = [b**(nbit-1-i) for i in range(nbit)]
return np.dot(z, x)
def Psi2Str(x):
s = '|'
for i in x:
s += str(i)
return s+'>'
def str_op(i):
if(i == 0):
return 'I'
if(i == 1):
return 'X'
if(i == 2):
return 'Y'
if(i == 3):
return 'Z'
def Opp2Str(x):
s = ''
for i in x:
s += str_op(i)
return s
def Lst2Str(x):
s = ''
for i in x:
s += str(i)
return s
if __name__ == "__main__":
nbmax = 10
b = 4
v = []
for nbit in range(1, nbmax):
t0 = time.time()
for n0 in range(b**nbit):
assert(n0 == Bas2Int(Int2Bas(n0, b, nbit), b))
if(nbit == 4):
print(n0, Int2Bas(n0, b, nbit))
t1 = time.time()
dt = t1-t0
v.append(dt)
import matplotlib.pyplot as plt
plt.plot(range(1, nbmax), v, 'bo-')
plt.xlabel("bits")
plt.ylabel("t [s]")
plt.show()
|
{"/cluster_test.py": ["/wick.py"]}
|
30,810,181
|
santoshkumarradha/stochastic-qc
|
refs/heads/master
|
/QITE/qite.py
|
import numpy as np
from numpy import linalg as LA
from scipy import linalg as SciLA
from tools import print_state, fidelity, dump_state, read_state, dump_lanz_vecs
from hamiltonian import Hmat, Hmoms, print_Hamiltonian
def H_alpha_psi(H_, psi_, alpha_):
(A, h, imp, gmp) = H_[alpha_]
phi = np.zeros(psi_.shape, dtype=complex)
for m in np.where(np.abs(h) > 1e-8)[0]:
phi += h[m]*gmp[m, imp[m, :]]*psi_[imp[m, :]]
return phi.copy()
def ExpmbH_alpha(H_, psi_, alpha_, db):
phi = psi_.copy()
chi = psi_.copy()
i = 0
while(LA.norm(chi) > 1e-8):
chi = (-db/float(i+1))*H_alpha_psi(H_, chi, alpha_)
phi += chi.copy()
i += 1
nu = LA.norm(phi)
return phi.copy()/nu, nu
# ----- unitary evolution
def xP_psi(x_, psi_, imp_, gmp_):
phi = np.zeros(psi_.shape, dtype=complex)
#print('psi: ',psi_)
#print('x: ', x_)
for m in np.where(np.abs(x_) > 1e-8)[0]:
phi += x_[m]*gmp_[m, imp_[m, :]]*psi_[imp_[m, :]]
#print('phi: ',phi)
return phi.copy()
def Exp_ixP(x_, psi_, imp_, gmp_):
phi = psi_.copy()
print('psi:\n', psi_)
print(x_)
chi = psi_.copy()
i = 0
while(LA.norm(chi) > 1e-8):
chi = (1j/float(i+1))*xP_psi(x_, chi, imp_, gmp_)
phi += chi.copy()
i += 1
nu = LA.norm(phi)
print('phi:\n', phi/nu)
return phi.copy()/nu
def QITE_step(H_, psi_, db, xv, check):
import time
nalpha = len(H_)
dn_ = 1.0
if(xv is None):
xv = []
for alpha in range(nalpha):
(A, h, imp, gmp) = H_[alpha]
nact = imp.shape[0]
xv.append(np.zeros(nact))
for alpha in range(nalpha):
# ----- target state
t0 = time.time()
delta_alpha, dnalpha_ = ExpmbH_alpha(H_, psi_, alpha, db)
delta_alpha -= psi_.copy()
dn_ *= dnalpha_
Xop = []
# ----- pauli action
(A, h, imp, gmp) = H_[alpha]
nact = imp.shape[0]
# print('active:',imp)
Pmu_psi = np.zeros(imp.shape, dtype=complex)
for m in range(nact):
Pmu_psi[m, :] = gmp[m, imp[m, :]]*psi_[imp[m, :]]
t1 = time.time()
# ----- set linear system
Amat = np.dot(np.conj(Pmu_psi), Pmu_psi.T)
# print('Amat:\n',Amat)
Amat = 2.0*np.real(Amat)
t2 = time.time()
bvec = np.dot(Pmu_psi, np.conj(delta_alpha))
bvec = -2.0*np.imag(bvec)
t3 = time.time()
if(check):
x = SciLA.lstsq(Amat, bvec)[0]
else:
zct = np.dot(bvec, Amat)
def cost_fun(vct):
return LA.norm(np.dot(Amat, vct)-bvec)**2
def J_cost_fun(vct):
wct = np.dot(Amat, vct)
wct = np.dot(Amat.T, wct)
return 2.0*(wct-zct)
import scipy
x = scipy.optimize.minimize(
cost_fun, x0=xv[alpha], method='Newton-CG', jac=J_cost_fun, tol=1e-8).x
xv[alpha] = x.copy()
print('Pauli Operator')
Xop.append((A, x, imp, gmp))
print_Hamiltonian(Xop)
# print_Hamiltonian(xv)
#print('\n wavefunction before\n',Pmu_psi)
t4 = time.time()
psi_ = Exp_ixP(x, psi_, imp, gmp)
#print('\n wavefunction after\n', psi_,'\n')
t5 = time.time()
# print alpha,t5-t4,t4-t3,t3-t2,t2-t1,t1-t0
import sys
sys.stdout.flush()
# print('op:\n',xv)
return psi_, dn_, xv, Xop
def Lanczos_QITE(hv, sv, db):
nv = len(range(0, len(hv), 2))
hm = np.zeros((nv, nv), dtype=complex)
sm = np.zeros((nv, nv), dtype=complex)
for jr in range(0, len(hv), 2):
for js in range(0, len(hv), 2):
jk = (jr+js)//2
sm[jr//2, js//2] = np.exp(2*sv[jk]-sv[jr]-sv[js])
hm[jr//2, js//2] = hv[jk]*sm[jr//2, js//2]
# rarefied sampling
idx = []
for l in range(nv):
if(int(np.sqrt(2.0)**l) < nv+1):
idx.append(l)
if(nv-1 not in idx):
idx.append(nv-1)
sm = sm[idx, :]
sm = sm[:, idx]
hm = hm[idx, :]
hm = hm[:, idx]
nv = sm.shape[0]
# regularization
for jk in range(nv):
sm[jk, jk] *= 1.0+2*db
hm[jk, jk] *= 1.0+2*db
eps, U = SciLA.eigh(hm, sm)
eps = np.real(eps)
return np.min(eps)
def QITE(H_, db, bmax, lanczos=False, psi0=None, omega=None, ncheck=1):
Hm = Hmat(H_)
N = Hm.shape[0]
nbit = int(np.log2(N))
eps, U = SciLA.eigh(Hm)
m0 = np.argmin(eps)
epsm0 = eps[m0]
Um0 = U[:, m0]
zeta = np.exp(-db*(eps-eps[m0]))
fide = 1.0
fout = open('QITE.out', 'w')
fout.write("FCI gs energy %.6f \n" % epsm0)
fout.write("FCI gs wfn \n")
print_state(Um0, nbit, fout)
psi_QITE = psi0[:]
nbeta = int(bmax/db)+1
hvect_LANZ = np.zeros(nbeta+1)
svect_LANZ = np.zeros(nbeta+1)
xv = None
hpauli = {}
fout.write("QITE\n")
data = np.zeros((nbeta, 2))
for ib in range(nbeta):
print('B: ', ib*db)
ea, ev = Hmoms(H_, psi_QITE)
print('Energy: ', ea)
hvect_LANZ[ib] = ea
if(omega is None):
fide = fidelity(psi_QITE, Um0)
else:
fide = LA.norm(psi_QITE[omega])**2
if(lanczos):
ea_ = Lanczos_QITE(hvect_LANZ[:ib+1], svect_LANZ[:ib+1], db)
fout.write("%.6f %.6f %.6f %.6f %.6f \n" %
(ib*db, ea, ev, fide, ea_))
else:
fout.write("%.6f %.6f %.6f %.6f \n" % (ib*db, ea, ev, fide))
#print("%.6f %.6f %.6f %.6f \n" % (ib*db,ea,ev,fide))
fout.flush()
b = ib*db
data[ib, 0] = b
data[ib, 1] = ea
if(ncheck > 0):
check = (ib % ncheck == 0)
else:
check = False
psi_QITE, dnorm, xv, Xop = QITE_step(H_, psi_QITE, db, xv, check)
hpauli[b] = Xop
# Feedback portion
# Number of applications of the unitary to get ground state
svect_LANZ[ib+1] = svect_LANZ[ib]+np.log(dnorm)
fout.write("QITE gs wfn \n")
print_state(psi_QITE, nbit, fout)
dump_state(psi_QITE, nbit, 'qite.psi')
dump_lanz_vecs(hvect_LANZ[:nbeta], svect_LANZ[:nbeta], 'qlanz.vecs')
fout.close()
return data, hpauli
|
{"/cluster_test.py": ["/wick.py"]}
|
30,810,182
|
santoshkumarradha/stochastic-qc
|
refs/heads/master
|
/Hardware-efficient/hardware_efficient.py
|
from qiskit.circuit import Parameter
from qiskit import (QuantumRegister, ClassicalRegister,
QuantumCircuit, Aer, execute)
from qiskit.circuit.library import (
YGate, CYGate, CRYGate, RYGate, XGate, CXGate, CRXGate, RXGate,
ZGate, CZGate, CRZGate, RZGate)
from qiskit.aqua.operators import PauliExpectation, CircuitSampler, StateFn, CircuitStateFn, AerPauliExpectation, ExpectationFactory
from qiskit.aqua.operators import X, Y, Z, I
from qiskit.quantum_info import state_fidelity as distance
from qiskit.quantum_info import Pauli
from qiskit.aqua.operators.primitive_ops import MatrixOp
from qiskit.aqua.operators.primitive_ops import PauliOp
from scipy.optimize import minimize
import functools
import numba
import numpy as np
from scipy.sparse import diags
def get_u(p1, p2, label=None):
q = QuantumRegister(2)
anc = QuantumRegister(1)
c = QuantumCircuit(q, anc)
c.cry(p2, anc, q[0])
c.ry(p1, q[0])
c.cnot(q[0], q[1])
return c.to_gate(label=label)
def get_two_qubit_pqc(n, depth=1, label="2-qubit-PQC"):
two_qbit_pairs_1 = [list(i) for i in [np.arange(n)[i:i + 2]
for i in range(0, len(np.arange(n)), 2)] if len(i) == 2]
two_qbit_pairs_2 = [list(i) for i in [np.arange(n)[i:i + 2]
for i in range(1, len(np.arange(n)), 2)] if len(i) == 2]
q = QuantumRegister(n, 'q')
anc = QuantumRegister(1, "anc")
c = QuantumCircuit(anc, q)
for i in two_qbit_pairs_1:
p1 = Parameter("t_{}{}_{}".format(i[0], i[1], depth))
p2 = Parameter("opt_{}{}_{}".format(i[0], i[1], depth))
qubits = [q[i[0]], q[i[1]]]``
qubits.append(anc)
c.append(get_u(p1, p2, label="U"), qubits)
for i in two_qbit_pairs_2:
p1 = Parameter("t_{}{}_{}".format(i[0], i[1], depth))
p2 = Parameter("opt_{}{}_{}".format(i[0], i[1], depth))
qubits = [q[i[0]], q[i[1]]]
qubits.append(anc)
c.append(get_u(p1, p2, label="U"), qubits)
return c.to_gate(label=label)
def get_u_withoutmeassure(p1, label=None):
q = QuantumRegister(2)
c = QuantumCircuit(q)
c.ry(p1, q[0])
c.cnot(q[0], q[1])
return c.to_gate(label=label)
def get_two_qubit_pqc_withoutmeassure(n, depth=1, label="2-qubit-PQC"):
two_qbit_pairs_1 = [list(i) for i in [np.arange(n)[i:i + 2]
for i in range(0, len(np.arange(n)), 2)] if len(i) == 2]
two_qbit_pairs_2 = [list(i) for i in [np.arange(n)[i:i + 2]
for i in range(1, len(np.arange(n)), 2)] if len(i) == 2]
q = QuantumRegister(n, 'q')
c = QuantumCircuit(q)
for i in two_qbit_pairs_1:
p1 = Parameter("t_{}{}_{}".format(i[0], i[1], depth))
qubits = [q[i[0]], q[i[1]]]
c.append(get_u_withoutmeassure(p1, label="U"), qubits)
for i in two_qbit_pairs_2:
p1 = Parameter("t_{}{}_{}".format(i[0], i[1], depth))
qubits = [q[i[0]], q[i[1]]]
c.append(get_u_withoutmeassure(p1, label="U"), qubits)
return c.to_gate(label=label)
def get_circuit(n=4, depth=2, Imaginary=True, without_measure=False):
if without_measure == False:
q = QuantumRegister(n, 'q')
anc = QuantumRegister(1, "anc")
c = QuantumCircuit(anc, q)
c.h(anc)
if Imaginary:
c.s(anc)
c.barrier()
for i in range(depth):
pqc = get_two_qubit_pqc(n, depth=i, label=f"2-qubit-PQC-{i+1}")
c.append(pqc, range(n+1))
c.barrier()
c.h(anc)
return c
else:
q = QuantumRegister(n, 'q')
c = QuantumCircuit(q)
c.barrier()
for i in range(depth):
pqc = get_two_qubit_pqc_withoutmeassure(
n, depth=i, label=f"2-qubit-PQC-{i+1}")
c.append(pqc, range(n))
c.barrier()
return c
def get_exp(H, circ):
psi = CircuitStateFn(circ)
backend = Aer.get_backend('statevector_simulator')
measurable_expression = StateFn(H, is_measurement=True).compose(psi)
expectation = AerPauliExpectation().convert(measurable_expression)
sampler = CircuitSampler(backend).convert(expectation)
return sampler.eval().real
def get_random_param_dict(circ, seed=4):
values = {}
np.random.seed(seed)
for i in sorted(circ.parameters, key=lambda x: x.name):
values[i] = np.random.uniform(0, np.pi)
return values
def set_angle(circ, angle):
values = {}
for j, i in enumerate(sorted(circ.parameters, key=lambda x: x.name)):
values[i] = angle[j]
return circ.assign_parameters(values).copy()
def get_exp_angle(H, circ, angle):
calc_circ = set_angle(circ, angle)
return get_exp(H, calc_circ)
def get_diff_mat(N, a=1, b=1, dx=10, boundary=1):
'''
Returns the differential oppertor matrix for equation given by a(d/dx)+b(d/dx)^2 for log(N) qubits discritized by dx
'''
D = a*diags([1, -2, 1], [-1, 0, 1], shape=(N, N))*dx
D = D.toarray()
D[0][0] = D[-1][-1] = -boundary
return D
def set_angle(circ, angle):
values = {}
for j, i in enumerate(sorted(circ.parameters, key=lambda x: x.name)):
values[i] = angle[j]
return circ.assign_parameters(values).copy()
def get_exp_angle(H, circ, angle):
calc_circ = set_angle(circ, angle)
return get_exp(H, calc_circ)
|
{"/cluster_test.py": ["/wick.py"]}
|
30,810,183
|
santoshkumarradha/stochastic-qc
|
refs/heads/master
|
/wick.py
|
from qiskit.circuit import Parameter
from qiskit import (QuantumRegister, ClassicalRegister,
QuantumCircuit, Aer, execute)
from qiskit.circuit.library import (
YGate, CYGate, CRYGate, RYGate, XGate, CXGate, CRXGate, RXGate,
ZGate, CZGate, CRZGate, RZGate)
import numpy as np
from itertools import combinations
from qiskit.quantum_info import state_fidelity as distance
from scipy.optimize import minimize
from qiskit.quantum_info import Pauli
from qiskit.extensions import UnitaryGate as ug
import itertools
import functools
import numba
from scipy.sparse import diags
def get_paulis(H, hide_identity=False):
"""Claculate the pauli decomposition of given matrix
Args:
H (2D array): Hamiltonian matrix
hide_identity (bool, optional): Do you want to hide pure indentity term ?. Defaults to True.
Returns:
Dict: Coeefecients and Pauli string ex: {'II':2,'XY':1.2}
"""
n = int(np.log2(len(H)))
N = 2 ** n
paulis = [Pauli.from_label('I'), Pauli.from_label(
'X'), Pauli.from_label('Y'), Pauli.from_label('Z')]
obs = []
coeffs = []
final = {}
for term in itertools.product(paulis, repeat=n):
matrices = [i.to_matrix() for i in term]
coeff = np.trace(functools.reduce(np.kron, matrices) @ H) / N
coeff = np.real_if_close(coeff).item()
if not np.allclose(coeff, 0):
coeffs.append(coeff)
if not all([t == Pauli.from_label('I') for t in term]) and hide_identity:
final["".join([i.to_label() for i in term])] = coeff
else:
if hide_identity == False:
final["".join([i.to_label() for i in term])] = coeff
return final
# @numba.njit()
def get_diff_mat(N, a=1, b=1, dx=1, boundary=1):
'''
Returns the differential oppertor matrix for equation given by a(d/dx)+b(d/dx)^2 for log(N) qubits discritized by dx
'''
# N -= 1
# D = np.zeros((N+1, N+1))
# for i in range(1, N):
# D[i, i-1] = a/(2*(dx**2)) - b/(2*dx)
# D[i, i+1] = a/(2*(dx**2)) + b/(2*dx)
# D[i, i] = -a/((dx**2))
# D[0, 0] = D[N, N] = -a/((dx**2)) # 1
# D[0, 1] = D[N, N-1] = a/(2*(dx**2)) - b/(2*dx)
D = a*diags([1, -2, 1], [-1, 0, 1], shape=(N, N))/dx
D = D.toarray()
D[0][0] = D[-1][-1] = -boundary
return D
class wick:
def __init__(self, n, a=1, b=0, dt=1, dx=1, seed=0, depth=2, boundary=1, verbose=False):
self.n = n
self.verbose = verbose
self.a = a
self.b = b
self.dt = dt
self.dx = dx
self.circuit = None
self.main_circuit = None
self.seed = seed
self.depth = depth
self.anc = QuantumRegister(1, "ancilla")
self.basis = QuantumRegister(n, "basis")
self.meassure = ClassicalRegister(1, "meassure")
self.combi = None
self.theta = None
self.boundary = boundary
self.gates = None
self.set_gates()
self.parameter_two_qubit = 0 # As of now no two qubit gates
self.meassure_at_end = False
self.make_random_gate_anzats()
self.initial = None
self.set_initial()
self.angles = []
self.initial_angle = None
self.initial_closeness = None
self.num_parameters = len(self.circuit.parameters)
self.circuit_aij = [
[0 for i in range(self.num_parameters)] for j in range(self.num_parameters)]
self.calculate_aij()
self.ham_pauli = None
self.make_pauli_ham()
self.num_ham_terms = len(self.ham_pauli)
# number of terms in ham * number of parameters
self.circuit_cik = [
[0 for i in range(len(self.ham_pauli))] for j in range(self.num_parameters)]
self.calculate_cik()
def set_initial(self, mu=0.5, sigma=0.001):
"""Initalize the starting value with gaussian
Args:
mu (float, optional): Center of gaussian. Defaults to 0.5.
sigma (float, optional): Var of gaussian. Defaults to 0.001.
"""
def gaussian(x, mu, sig):
return np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
compare = gaussian(np.linspace(0, 1, 2**self.n), mu, sigma)
compare /= np.linalg.norm(compare)
compare = np.zeros(2**self.n)
compare[int(len(compare)/2)] = 1
# ! boundary for BS
N = 2**self.n
r = 0 # riskless interest rate
sigma = 0.18 # annual stock volatility
k = 25 # strike price in dollars
# uniform mesh in s
s_init = 0.5
s_end = 2*k
h = float(s_end - s_init) / N # step-size
# arrange grid in s, with N+1 equally spaced nodes
s = np.zeros(N)
s = np.linspace(s_init, s_end, num=N+1, endpoint=True)
s[0] = s_init
u = np.zeros(N)
for j in range(0, N):
if (s[j] - k) <= 0: # s[j] - k, for a Call-Option // OR: (k - s[j]) for a PUT-Option
u[j] = 0
elif (s[j] - k) > 0:
u[j] = s[j] - k
compare = u/np.linalg.norm(u)
self.initial = compare
def get_random_gates(self, num_gates):
"""Generate random gates from X,Y,Z combination according to seed
Args:
num_gates (int): Number of gates needed
Returns:
Array(gates): list of randomg gates
"""
import random
random.seed(self.seed)
gates = [[YGate, CYGate, CRYGate, RYGate], [XGate, CXGate,
CRXGate, RXGate], [ZGate, CZGate, CRZGate, RZGate]]
# ? Seems like only Y gates alone does it much better ?
gates = [[YGate, CYGate, CRYGate, RYGate]]
return random.choices(gates, k=num_gates)
def set_gates(self):
"""set gates generated by get_random_gates function
"""
self.combi = list(combinations(range(self.n), 2))
self.theta = [Parameter(f't-{i}')
for i in range(len(self.combi)+self.n*self.depth)]
self.gates = self.get_random_gates(len(self.theta))
def get_final_state(self, angles):
circ_params_wo_meassure = self.main_circuit.remove_final_measurements(
inplace=False)
values = {i: angles[j] for j, i in enumerate(
circ_params_wo_meassure.parameters)}
circ_params_wo_meassure.assign_parameters(values, inplace=True)
simulator = Aer.get_backend('statevector_simulator')
result = execute(circ_params_wo_meassure, simulator).result()
statevector = result.get_statevector(circ_params_wo_meassure)
return statevector
# ! Check if final state claulation, ie Im{e^{it} <0|U|0>} is correct
def get_final_state_lm(self, angles, ij):
"""Returns the value of the curcit for Aij
Args:
angles (float array): angles for the anzats
ij (1d array): [i,j] values for Aij element
Returns:
array,dict: State vector and probablity of ancilla being 1 or 0
"""
circ_params_wo_meassure = self.circuit_aij[ij[0]][ij[1]].remove_final_measurements(
inplace=False)
values = {i: angles[j] for j, i in enumerate(
circ_params_wo_meassure.parameters)}
circ_params_wo_meassure.assign_parameters(values, inplace=True)
simulator = Aer.get_backend('statevector_simulator')
result = execute(circ_params_wo_meassure, simulator).result()
statevector = result.get_statevector(circ_params_wo_meassure)
temp = statevector*statevector.conj()
p_1 = np.real(temp[int(len(temp)/2):].sum())
results = {'1': p_1, '0': 1-p_1}
return statevector, results
# ! Check if final state claulation, ie Im{e^{it} <0|U|0>} is correct
def get_final_state_ik(self, angles, ij):
"""Returns the value of the curcit for Cik
Args:
angles (float array): angles for the anzats
ij (1d array): [i,j] values for Aij element
Returns:
array,dict: State vector and probablity of ancilla being 1 or 0
"""
circ_params_wo_meassure = self.circuit_cik[ij[0]][ij[1]].remove_final_measurements(
inplace=False)
values = {i: angles[j] for j, i in enumerate(
circ_params_wo_meassure.parameters)}
circ_params_wo_meassure.assign_parameters(values, inplace=True)
simulator = Aer.get_backend('statevector_simulator')
result = execute(circ_params_wo_meassure, simulator).result()
statevector = result.get_statevector(circ_params_wo_meassure)
temp = statevector*statevector.conj()
p_1 = np.real(temp[int(len(temp)/2):].sum())
results = {'1': p_1, '0': 1-p_1}
return statevector, results
def make_pauli_ham(self):
"""Make the hamiltonian part interms of pauli decomposition
"""
# As of now only \partial^2 k included in hamiltonian
ham_mat = get_diff_mat(2**self.n, self.a, self.b,
self.dx, self.boundary)
self.ham_pauli = get_paulis(ham_mat)
def get_cost(self, angles, compare=None):
a = self.get_final_state(angles)
if compare == None:
compare = self.initial
# return np.arccos(distance(a,compare))
return 2*(1-distance(a, compare))
def get_initial_angles(self, method="SLSQP", maxiter=1000, tol=1e-7):
angle0 = np.random.uniform(0, 2*np.pi, len(self.circuit.parameters))
bnds = [(0, 2*np.pi)] * len(angle0)
result = minimize(self.get_cost, angle0, method=method,
tol=tol, bounds=bnds, options={"maxiter": maxiter})
if result.success == False:
print("Warning: Angles not converged within given iterations")
self.angles.append(result.x)
self.initial_closeness = result.fun
self.initial_angle = result.x
def make_random_gate_anzats(self, lm=None):
# Initialize local variables and functions
cnt = 0
set_ancila = True
def check_lm(lm, qubit):
'''
Need to check if the two qubit derivative calculatin is correct, I am sure it is not
eq.33/34 in 1804.03023
'''
if cnt == lm[0]:
self.circuit.x(self.anc)
self.circuit.append(self.gates[cnt][1](), [self.anc, qubit])
self.circuit.x(self.anc)
if cnt == lm[1]:
self.circuit.append(self.gates[cnt][1](), [self.anc, qubit])
# Start constructiong the circuit
if lm == None:
set_ancila = 0
lm = [-1, -1]
if set_ancila:
self.circuit = QuantumCircuit(self.basis, self.anc, self.meassure)
self.circuit.h(self.anc)
else:
self.circuit = QuantumCircuit(self.basis)
self.circuit.h(self.basis)
self.circuit.barrier()
if self.parameter_two_qubit:
for i in self.combi:
qubits = [self.basis[i[0]], self.basis[i[1]]]
check_lm(lm, qubits[1])
self.circuit.append(
self.gates[cnt][2](self.theta[cnt]), qubits)
cnt += 1
self.circuit.barrier()
# single qubit gates
for iter_depth in range(self.depth):
if self.parameter_two_qubit == 0:
# Two qubit entangling gate series before single qubit parameterized gates
if iter_depth > 0:
for i in self.combi:
self.circuit.cx(self.basis[i[0]], self.basis[i[1]])
self.circuit.barrier()
for i in range(self.n):
qubits = [self.basis[i]]
check_lm(lm, qubits[0])
self.circuit.append(
self.gates[cnt][3](self.theta[cnt]), qubits)
cnt += 1
self.circuit.barrier()
# Final H on ancila
if set_ancila:
self.circuit.h(self.anc)
if self.meassure_at_end:
self.circuit.measure(self.anc, self.meassure)
if lm == [-1, -1]:
self.main_circuit = self.circuit.copy()
else:
self.circuit_aij[int(lm[0])][int(lm[1])] = self.circuit.copy()
def make_cik_circ(self, ik, h_pauli, coeff):
# Initialize local variables and functions
cnt = 0
def check_ik(ik, qubit):
'''
Need to check if the two qubit derivative calculatin is correct, I am sure it is not
eq.33/34 in 1804.03023
'''
if cnt == ik[0]:
self.circuit.x(self.anc)
self.circuit.append(self.gates[cnt][1](), [self.anc, qubit])
self.circuit.x(self.anc)
# Start constructiong the circuit
self.circuit = QuantumCircuit(self.basis, self.anc, self.meassure)
self.circuit.h(self.anc)
# ! Check If the angle rotation is correct.
# ! we want to have |0> + e^{ik[1]} |1>
self.circuit.rz(coeff[ik[1]], self.anc)
self.circuit.h(self.basis)
self.circuit.barrier()
if self.parameter_two_qubit:
for i in self.combi:
qubits = [self.basis[i[0]], self.basis[i[1]]]
check_ik(ik, qubits[1])
self.circuit.append(
self.gates[cnt][2](self.theta[cnt]), qubits)
cnt += 1
self.circuit.barrier()
# single qubit gates
for iter_depth in range(self.depth):
if self.parameter_two_qubit == 0:
# Two qubit entangling gate series before single qubit parameterized gates
if iter_depth > 0:
for i in self.combi:
self.circuit.cx(self.basis[i[0]], self.basis[i[1]])
self.circuit.barrier()
for i in range(self.n):
qubits = [self.basis[i]]
check_ik(ik, qubits[0])
self.circuit.append(
self.gates[cnt][3](self.theta[cnt]), qubits)
cnt += 1
self.circuit.barrier()
qs = [self.anc]
for i in self.basis:
qs.append(i)
self.circuit.append(h_pauli[ik[1]], qs)
self.circuit.h(self.anc)
if self.meassure_at_end:
self.circuit.measure(self.anc, self.meassure)
self.circuit_cik[int(ik[0])][int(ik[1])] = self.circuit.copy()
def calculate_aij(self):
if self.verbose:
from tqdm import tqdm as tqdm
iterations = tqdm(range(self.num_parameters),
desc="Calculating A_ij parameterized circuits\n")
else:
iterations = range(self.num_parameters)
for i in iterations:
for j in range(self.num_parameters):
self.make_random_gate_anzats(lm=[i, j])
def calculate_cik(self):
# ! ----------------------------------
# ! Check if this needs to be refersed ?
# ! ----------------------------------
ham_terms_circ = [ug(Pauli.from_label(i).to_operator(), label=" ("+i+")").control(
1) for i in list(self.ham_pauli.keys())]
coeff = list(self.ham_pauli.values())
if self.verbose:
from tqdm import tqdm as tqdm
iterations = tqdm(range(self.num_parameters),
desc="Calculating C_ij parameterized circuits\n")
else:
iterations = range(self.num_parameters)
for i in iterations:
for j in range(len(ham_terms_circ)):
self.make_cik_circ(
ik=[i, j], h_pauli=ham_terms_circ, coeff=coeff)
def evolve_system(self, dt, N, verbose=True):
"""Evolve the system with time gap dt up to steps N
Args:
dt (float): dt
N (int): Number of times steps to evolve
verbose (bool, optional): Want to show progreess bar ?. Defaults to True.
"""
from tqdm import tqdm
for ntime in tqdm(range(N)):
if ntime == 0:
angles = self.initial_angle
self.angles.append(angles)
else:
angles = self.angles[ntime]
if verbose:
iter_range = tqdm(range(self.num_parameters))
else:
iter_range = range(self.num_parameters)
A = np.zeros((self.num_parameters, self.num_parameters))
for i in iter_range:
for j in range(self.num_parameters):
state, p = self.get_final_state_lm(angles, [j, i])
A[i, j] = p['0']-p['1'] # 2*p['1']-1
C = np.zeros((self.num_parameters, self.num_ham_terms))
for i in iter_range:
for j in range(self.num_ham_terms):
state, p = self.get_final_state_ik(angles, [i, j])
C[i, j] = p['0']-p['1'] # 2*p['0']-1
try:
theta_dot = np.linalg.solve(A, C.sum(axis=1))
except:
theta_dot, residuals, rank, s = np.linalg.lstsq(
A, C.sum(axis=1))
self.angles.append(angles+dt*theta_dot)
|
{"/cluster_test.py": ["/wick.py"]}
|
30,810,184
|
santoshkumarradha/stochastic-qc
|
refs/heads/master
|
/QITE/pauli.py
|
import numpy as np
import scipy
from binary_functions import Int2Bas, Bas2Int, Opp2Str, Psi2Str, Lst2Str
# ---------------------------------------------------------- #
pauli_product = [np.zeros((4, 4), dtype=int), np.zeros((4, 4), dtype=complex)]
pauli_product[0][0, :] = [0, 1, 2, 3]
pauli_product[0][1, :] = [1, 0, 3, 2]
pauli_product[0][2, :] = [2, 3, 0, 1]
pauli_product[0][3, :] = [3, 2, 1, 0]
pauli_product[1][0, :] = [1, 1, 1, 1]
pauli_product[1][1, :] = [1, 1, 1j, -1j]
pauli_product[1][2, :] = [1, -1j, 1, 1j]
pauli_product[1][3, :] = [1, 1j, -1j, 1]
sigma_matrices = np.zeros((2, 2, 4), dtype=complex)
for i in range(2):
j = (i+1) % 2
sigma_matrices[i, i, 0] = 1.0
sigma_matrices[i, j, 1] = 1.0
sigma_matrices[i, j, 2] = 1.0j*(-1.0)**(i+1.0)
sigma_matrices[i, i, 3] = (-1.0)**i
def d12(t): return 1 if t % 3 > 0 else 0
d12f = np.vectorize(d12)
def d2(t): return 1 if t == 2 else 0
d2f = np.vectorize(d2)
def d23(t): return 1 if t > 1 else 0
d23f = np.vectorize(d23)
# ---------------------------------------------------------- #
def computational_basis(nbit_):
N = 2**nbit_
for i in range(N):
print(i, Psi2Str(Int2Bas(i, 2, nbit_)))
def pauli_basis(nbit_):
M = 4**nbit_
for i in range(M):
print(i, Opp2Str(Int2Bas(i, 4, nbit_)))
# ---------------------------------------------------------- #
def pauli_action(active_, nbit_, verbose=False):
nact = len(active_)
N = 2**nbit_
M = 4**nact
dot = [2**(nbit_-1-i) for i in range(nbit_)]
ind_sx = np.zeros((M, N), dtype=int)
gmm_sx = np.zeros((M, N), dtype=complex)+1
svec = np.zeros((M, nbit_), dtype=int)
for mu in range(M):
svec[mu, active_] = Int2Bas(mu, 4, nact)
sxyvec = d12f(svec)
nyvec = d2f(svec)
syzvec = d23f(svec)
nyvec = np.einsum('ab->a', nyvec)
xvec = np.zeros((N, nbit_), dtype=int)
for xi in range(N):
xvec[xi, :] = np.asarray(Int2Bas(xi, 2, nbit_))
gmm_sx = np.einsum('am,bm->ba', xvec, syzvec)+0j
gmm_sx[:, :] = (-1)**gmm_sx[:, :]
for mu in range(M):
gmm_sx[mu, :] *= 1j**nyvec[mu]
yvec = (xvec[:, :]+sxyvec[mu, :]) % 2
ind_sx[mu, :] = np.einsum('a,ba->b', dot, yvec)
return ind_sx, gmm_sx
|
{"/cluster_test.py": ["/wick.py"]}
|
30,872,954
|
prabaldeshar/erp-project
|
refs/heads/main
|
/inventory/models.py
|
from django.db import models
# Create your models here.
class ProductCategory(models.Model):
name = models.CharField(max_length=50, unique=True)
costing_method = models.CharField(max_length=20)
parent = models.IntegerField(null=True, blank=True)
def __str__(self):
return self.name
class Product(models.Model):
name = models.CharField(max_length=50)
cost_price = models.IntegerField()
sales_price = models.IntegerField()
date_added = models.DateTimeField(auto_now_add=True)
category = models.ForeignKey(ProductCategory, on_delete=models.PROTECT, blank=True, null=True)
quantity = models.IntegerField(blank=True, null=True)
def __str__(self):
return self.name
|
{"/inventory/forms.py": ["/inventory/models.py"], "/inventory/views.py": ["/inventory/forms.py", "/inventory/models.py"]}
|
30,872,955
|
prabaldeshar/erp-project
|
refs/heads/main
|
/inventory/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('product_form/', views.AddProdcutView.as_view(), name='add_product'),
path('products/', views.get_all_products, name='get_products'),
path('categories/', views.get_all_catgories, name='get_categories'),
path('categories/<int:category_id>/', views.get_category_products, name='get_category_products'),
path('products/<int:product_id>/', views.product_detail, name='product_detail'),
path('category_form', views.AddProductCategoryView.as_view(), name='add_category'),
path('api/', views.apiOverView, name='api'),
path('api/product-list', views.productList, name='productList'),
path('api/detail-view/<int:product_id>', views.detailView, name='detailView'),
path('api/create-view/', views.createView, name='createView'),
]
|
{"/inventory/forms.py": ["/inventory/models.py"], "/inventory/views.py": ["/inventory/forms.py", "/inventory/models.py"]}
|
30,872,956
|
prabaldeshar/erp-project
|
refs/heads/main
|
/inventory/migrations/0004_auto_20210327_0821.py
|
# Generated by Django 3.1.7 on 2021-03-27 08:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inventory', '0003_auto_20210310_1630'),
]
operations = [
migrations.AlterField(
model_name='productcategory',
name='parent',
field=models.IntegerField(blank=True, null=True),
),
]
|
{"/inventory/forms.py": ["/inventory/models.py"], "/inventory/views.py": ["/inventory/forms.py", "/inventory/models.py"]}
|
30,872,957
|
prabaldeshar/erp-project
|
refs/heads/main
|
/inventory/test.py
|
student = {'name': 'John', 'age':25, 'courses': ['Math', 'Physics']}
for key,value in student.items():
print (key,value)
|
{"/inventory/forms.py": ["/inventory/models.py"], "/inventory/views.py": ["/inventory/forms.py", "/inventory/models.py"]}
|
30,872,958
|
prabaldeshar/erp-project
|
refs/heads/main
|
/inventory/views.py
|
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from .forms import ProductForm, ProductCategoryForm
from .models import Product,ProductCategory
from django.views.generic import View
from rest_framework.decorators import api_view
from rest_framework.response import Response
from .serializers import ProductSerializer
# Create your views here.
def index(request):
return render(request, 'inventory/index.html')
def add_product(request):
if request.method == "POST":
form = ProductForm(request.POST or None, request.FILES or None)
if form.is_valid():
form.save()
return HttpResponseRedirect('/inventory/')
else:
form = ProductForm()
return render(request, 'inventory/product_form.html', {'form': form})
def get_all_products(request):
products = Product.objects.all()
print(1)
return render(request, 'inventory/all_products.html', {'products': products})
def get_all_catgories(request):
categories = ProductCategory.objects.all()
parent_list = []
for category in categories:
if category.parent == None:
parent_list.append('None')
else:
parent_list.append(ProductCategory.objects.get(id=category.parent).name)
zipped = zip(categories, parent_list)
context = {
'zipped': zipped
}
return render(request, 'inventory/all_categories.html', context)
def get_category_products(request, category_id):
# products = Product.objects.filter(category=category)
category = ProductCategory.objects.get(id=category_id)
products = Product.objects.filter(category=category)
print(products)
context = {
'category' : category,
'products' : products
}
return render(request, 'inventory/category_products.html', context)
def product_detail(request, product_id):
product = Product.objects.get(id=product_id)
return render(request, 'inventory/product_detail.html', {'product':product})
@api_view(['GET'])
def detailView(request, product_id):
product = Product.objects.get(id=product_id)
serializer = ProductSerializer(product, many=False)
return Response(serializer.data)
@api_view(['POST'])
def createView(request):
serializer = ProductSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
class AddProductCategoryView(View):
def get(self, request):
form = ProductCategoryForm()
context = {'form': form}
return render(request, 'inventory/category_form.html', context)
def post(self, request):
form = ProductCategoryForm(request.POST or None)
if form.is_valid():
form.save()
return HttpResponseRedirect('/inventory/')
context = {'form' : form}
return render(request, 'inventory/category_form.html', context)
class AddProdcutView(View):
def get(self, request):
form = ProductForm()
context = {'form': form}
return render(request, 'inventory/product_form.html', context)
def post(self, request):
form = ProductForm(request.POST or None)
if form.is_valid():
form.save()
return HttpResponseRedirect('/inventory/')
context = {'form': form}
return render(request, 'inventory/prodcut_fomr.html', context)
@api_view(['GET'])
def apiOverView(request):
api_urls = {
'List': '/product-list',
'Create': '/product-create/',
'Detail View': '/task-detail/<str:pk>/'
}
return Response(api_urls)
@api_view(['GET'])
def productList(request):
products = Product.objects.all()
serializer = ProductSerializer(products, many= True)
return Response(serializer.data)
|
{"/inventory/forms.py": ["/inventory/models.py"], "/inventory/views.py": ["/inventory/forms.py", "/inventory/models.py"]}
|
30,872,959
|
prabaldeshar/erp-project
|
refs/heads/main
|
/inventory/forms.py
|
from django import forms
from django.forms import ModelForm
from .models import ProductCategory, Product
class ProductForm(ModelForm):
category = forms.ModelChoiceField(
required = False,
queryset = ProductCategory.objects.all(),
)
class Meta:
model = Product
fields = '__all__'
class ProductCategoryForm(ModelForm):
class Meta:
model = ProductCategory
fields = '__all__'
|
{"/inventory/forms.py": ["/inventory/models.py"], "/inventory/views.py": ["/inventory/forms.py", "/inventory/models.py"]}
|
30,916,909
|
vinosa75/script
|
refs/heads/main
|
/myapp/migrations/0008_gainloss.py
|
# Generated by Django 3.1.6 on 2021-09-29 21:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0007_livesegment'),
]
operations = [
migrations.CreateModel(
name='gainloss',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=20)),
('segment', models.CharField(max_length=20)),
('time', models.DateTimeField()),
],
),
]
|
{"/myapp/views.py": ["/myapp/models.py"], "/myapp/tasks.py": ["/myapp/models.py", "/myproject/celery.py"]}
|
30,916,910
|
vinosa75/script
|
refs/heads/main
|
/myapp/migrations/0011_livesegment_change_perc.py
|
# Generated by Django 3.1.6 on 2021-10-01 04:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0010_auto_20211001_0909'),
]
operations = [
migrations.AddField(
model_name='livesegment',
name='change_perc',
field=models.FloatField(default=0),
),
]
|
{"/myapp/views.py": ["/myapp/models.py"], "/myapp/tasks.py": ["/myapp/models.py", "/myproject/celery.py"]}
|
30,916,911
|
vinosa75/script
|
refs/heads/main
|
/myapp/migrations/0007_livesegment.py
|
# Generated by Django 3.1.6 on 2021-09-27 15:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0006_historyoipercentchange'),
]
operations = [
migrations.CreateModel(
name='LiveSegment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=20)),
('segment', models.CharField(max_length=20)),
('time', models.TimeField(auto_now_add=True)),
('date', models.DateField(auto_now_add=True)),
],
),
]
|
{"/myapp/views.py": ["/myapp/models.py"], "/myapp/tasks.py": ["/myapp/models.py", "/myproject/celery.py"]}
|
30,916,912
|
vinosa75/script
|
refs/heads/main
|
/myapp/migrations/0006_historyoipercentchange.py
|
# Generated by Django 3.1.6 on 2021-09-23 00:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0005_liveoipercentchange'),
]
operations = [
migrations.CreateModel(
name='HistoryOIPercentChange',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.DateTimeField()),
('call1', models.CharField(default='', max_length=20)),
('call2', models.CharField(default='', max_length=20)),
('put1', models.CharField(default='', max_length=20)),
('put2', models.CharField(default='', max_length=20)),
('callstrike', models.CharField(max_length=20)),
('putstrike', models.CharField(max_length=20)),
('symbol', models.CharField(max_length=20)),
('expiry', models.DateField()),
],
),
]
|
{"/myapp/views.py": ["/myapp/models.py"], "/myapp/tasks.py": ["/myapp/models.py", "/myproject/celery.py"]}
|
30,916,913
|
vinosa75/script
|
refs/heads/main
|
/myapp/migrations/0004_liveequityresult_opencrossed.py
|
# Generated by Django 3.1.6 on 2021-09-22 19:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myapp', '0003_liveoitotal_strikegap'),
]
operations = [
migrations.AddField(
model_name='liveequityresult',
name='opencrossed',
field=models.CharField(default='', max_length=20),
),
]
|
{"/myapp/views.py": ["/myapp/models.py"], "/myapp/tasks.py": ["/myapp/models.py", "/myproject/celery.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.