index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
36,571,693
|
teamkosy/infoweb
|
refs/heads/main
|
/album/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.album, name="album"),
path('album_write', views.album_write, name="album_write"),
path('album_insert', views.album_insert, name="album_insert"),
path('album_view', views.album_view, name="album_view"),
path('album_edit', views.album_edit, name="album_edit"),
path('album_update', views.album_update, name="album_update"),
path('album_delete', views.album_delete, name="album_delete"),
]
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,694
|
teamkosy/infoweb
|
refs/heads/main
|
/infoweb/views.py
|
from django.shortcuts import render
import requests
def index(request):
url = 'https://imnews.imbc.com/js/coviddata.json'
response = requests.get(url)
covid = response.json()
covid_list = list(covid.values())
return render(request, 'infoweb/index.html', {'covid_list': covid_list})
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,695
|
teamkosy/infoweb
|
refs/heads/main
|
/chet/apps.py
|
from django.apps import AppConfig
class ChetConfig(AppConfig):
name = 'chet'
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,696
|
teamkosy/infoweb
|
refs/heads/main
|
/mytube/views.py
|
from django.shortcuts import render
from .models import Video
def mytube(request):
video_list = Video.objects.order_by('-update')
return render(request, 'infoweb/video_list.html', {'video_list': video_list})
def video_new(request):
if request.method == 'POST':
title = request.POST.get('title')
video_key = request.POST.get('video_key')
if title or video_key != '':
Video.objects.create(title=title, video_key=video_key)
video_list = Video.objects.order_by('-update')
return render(request, 'infoweb/video_list.html', {'video_list': video_list})
else:
return render(request, 'infoweb/video_new.html')
elif request.method == 'GET':
return render(request, 'infoweb/video_new.html')
def video_detail(request, video_id):
video = Video.objects.get(id=video_id)
return render(request, 'infoweb/video_detail.html', {'video': video})
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,697
|
teamkosy/infoweb
|
refs/heads/main
|
/weather/views.py
|
from django.shortcuts import render
import requests
from bs4 import BeautifulSoup
def weather(request):
url = 'https://weather.naver.com/today/02150101'
response = requests.get(url)
text = response.text
soup = BeautifulSoup(text)
blind = soup.select_one('div.weather_area')
weather = blind.text.replace('\n', '').replace(' ', '')
return render(request,'infoweb/weather.html', context={'weather' : weather })
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,698
|
teamkosy/infoweb
|
refs/heads/main
|
/album/migrations/0001_initial.py
|
# Generated by Django 3.1.5 on 2021-03-18 10:59
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Album',
fields=[
('a_no', models.AutoField(db_column='a_no', primary_key=True, serialize=False)),
('a_type', models.CharField(db_column='a_type', max_length=50)),
('a_title', models.CharField(db_column='a_title', max_length=255)),
('a_note', models.CharField(db_column='a_note', max_length=4096)),
('a_image', models.CharField(db_column='a_image', max_length=1000)),
('a_count', models.IntegerField(db_column='a_count', default=0)),
('a_datetime', models.DateTimeField(db_column='a_datetime')),
('a_usage', models.CharField(db_column='a_usage', max_length=10)),
],
options={
'db_table': 'album',
'managed': False,
},
),
]
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,699
|
teamkosy/infoweb
|
refs/heads/main
|
/board/models.py
|
from django.db import models
class Message(models.Model):
title = models.CharField(max_length=50, help_text='최대 50자 내로 입력가능합니다.')
name = models.CharField(max_length=10, help_text='최대 10자 내로 입력가능합니다.')
contents = models.TextField(default='', help_text='내용을 입력해주세요~')
regdate = models.DateTimeField(auto_now=True)
def __str__(self):
return self.title
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,700
|
teamkosy/infoweb
|
refs/heads/main
|
/users/migrations/0004_delete_photo.py
|
# Generated by Django 3.1.5 on 2021-03-18 11:24
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0003_photo'),
]
operations = [
migrations.DeleteModel(
name='Photo',
),
]
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,701
|
teamkosy/infoweb
|
refs/heads/main
|
/lotto/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.lotto, name='lotto'),
path('lottonum', views.lottonum, name='lottonum'),
]
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,702
|
teamkosy/infoweb
|
refs/heads/main
|
/bmi/views.py
|
from django.shortcuts import render
def bmi(request):
if request.method == 'GET':
return render(request, 'infoweb/bmi.html')
elif request.method == 'POST':
w1 = request.POST.get('w')
h1 = request.POST.get('h')
if len(w1) == 0 or len(h1) == 0:
return render(request, 'infoweb/bmi.html')
else:
w = int(w1)
h = int(h1)
bmi = w / (h / 100) ** 2
if bmi < 18.5:
return render(request,'infoweb/bmi.html', context={'bmi' : "저체중 : 마른편입니다. 조금 더 먹어도 괜찮아요^^", 'rs':bmi})
if bmi < 25:
return render(request,'infoweb/bmi.html', context={'bmi' : "정상 : 적당하네요^^ 잘 유지하세요~", 'rs':bmi})
return render(request,'infoweb/bmi.html', context={'bmi' : "비만 ㅠㅠ : 다이어트가 필요합니다~", 'rs':bmi})
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,703
|
teamkosy/infoweb
|
refs/heads/main
|
/board/migrations/0001_initial.py
|
# Generated by Django 3.1.5 on 2021-03-07 16:41
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(help_text='최대 50자 내로 입력가능합니다.', max_length=50)),
('name', models.CharField(help_text='최대 10자 내로 입력가능합니다.', max_length=10)),
('contents', models.TextField(default='', help_text='내용을 입력해주세요~')),
('regdate', models.DateTimeField(auto_now=True)),
],
),
]
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,704
|
teamkosy/infoweb
|
refs/heads/main
|
/movie/views.py
|
from django.shortcuts import render
import requests
def movie(request):
url = 'https://www.kobis.or.kr/kobis/business/main/searchMainDailyBoxOffice.do'
response = requests.get(url)
movie = response.json()
date = movie[0]['startDate']
movie_list = []
for i in range(0, 10):
rs = movie[i]
a = (rs['rank'], rs['movieNm'], rs['openDt'], rs['salesAmt'], rs['audiCnt'], rs['rankInten'])
movie_list.append(a)
return render(request, 'infoweb/movie.html', {'movie_list':movie_list, 'date':date})
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,705
|
teamkosy/infoweb
|
refs/heads/main
|
/mytube/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.mytube, name='mytube'),
path('new', views.video_new, name='new'),
path('<int:video_id>', views.video_detail, name='detail'),
]
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,571,706
|
teamkosy/infoweb
|
refs/heads/main
|
/bmi/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.bmi, name='bmi'),
# path('bmiOk', views.bmiOk),
]
|
{"/member/views.py": ["/member/models.py", "/member/forms.py"], "/board/views.py": ["/board/models.py"], "/member/forms.py": ["/member/models.py"], "/users/views.py": ["/users/models.py"], "/mytube/views.py": ["/mytube/models.py"]}
|
36,580,418
|
NikaRossi/TelegramBot
|
refs/heads/main
|
/bot.py
|
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, ConversationHandler
from settings import TG_TOKEN
from telegram import ReplyKeyboardRemove, ReplyKeyboardMarkup
def start(bot, update):
print('Кто-то отправил команду /start')
bot.message.reply_text('Здравствуйте, {}! Я бот стоматологической клиники "Бриллиант"!'
' \nВы можете ознакомиться с нашим местоположением и часами работы, '
'а также заполнить анкету и записаться на приём.'
.format(bot.message.chat.first_name), reply_markup=get_keyboard())
def parrot(bot, update):
print(bot.message.text)
bot.message.reply_text(bot.message.text)
def close_keyboard(bot, update):
bot.message.reply_text('Ok', reply_markup=ReplyKeyboardRemove())
def get_keyboard():
my_keyboard = ReplyKeyboardMarkup([['/adress', '/work_time'], ['/anketa']], resize_keyboard=True)
return my_keyboard
def adress(bot, update):
bot.message.reply_text('Адрес: Россия, Санкт-Петербург, Вознесенский проспект, д. 46')
def time(bot, update):
bot.message.reply_text('Время работы: пн-пт, 9:00 - 18:00')
def anketa(bot, update):
bot.message.reply_text('Услуга записи пока недоступна')
def main():
my_bot = Updater(TG_TOKEN, use_context=True)
my_bot.dispatcher.add_handler(CommandHandler('start', start))
my_bot.dispatcher.add_handler(CommandHandler('close', close_keyboard))
my_bot.dispatcher.add_handler(CommandHandler('adress', adress))
my_bot.dispatcher.add_handler(CommandHandler('work_time', time))
my_bot.dispatcher.add_handler(CommandHandler('anketa', anketa))
# my_bot.dispatcher.add_handler(MessageHandler(Filters.text, parrot))
my_bot.start_polling()
my_bot.idle()
main()
|
{"/bot.py": ["/settings.py", "/database.py"]}
|
36,584,810
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/Final Submission/Pi/SendData.py
|
import create_keys
import Encrypt_data
import Decrypt_data
import Compression
import Decompression
import os
import sys
import time
def sendData(text):
#create_keys.createkeys()
start_time = time.time()
#first create the keys used for
#compresses data past into function
compressed_data = Compression.compress(text)
#Encrypt the data that has been compressed
Encrypt_data.encrypt(compressed_data)
print("The time to compress and encrypt together took: ")
print("--- %s seconds ---" % (time.time() - start_time))
if __name__ == "__main__":
inputfile = sys.argv[1]
sendData(sys.argv[1])
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,811
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/EncryptionAlgo/AsymetricEncryption.py
|
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
import binascii
global keyPair
global pubKey
#pair used for the encryption
keyPair = RSA.generate(5012)
pubKey = keyPair.publickey()
#print(f"Public key: (n={hex(pubKey.n)}, e={hex(pubKey.e)})")
pubKeyPEM = pubKey.exportKey()
print(pubKeyPEM.decode('ascii'))
#print(f"Private key: (n={hex(pubKey.n)}, d={hex(keyPair.d)})")
privKeyPEM = keyPair.exportKey()
print(privKeyPEM.decode('ascii'))
#encryption
msg = 'A message for encryption'
encryptor = PKCS1_OAEP.new(pubKey)
#Allows someone to enter filename for encryption
def encrypt(filename , key ):
#reads in data and encrypts it
with open(filename , "rb") as file_out:
filedata = file_out.read
global encrypted_data
encrypted_data= RSA.encrypt(filedata,pubKey)
#encrypted_data= RSA.RsaKey.encrypt(filedata.encode() , pubKey)
#creates empty file and appends encrypted data to it
with open("encryptedTxt.txt", "wb") as fp:
fp.write(encrypted_data )
fp.close()
def decrypt(filename , key ):
ciphertext = open(filename , "rb")
cipherdata = ciphertext.read()
global decrypted_data
decrypted_data = RSA.decrypt(cipherdata )
if __name__ == '__main__' :
fileIn = input()
encrypt(fileIn, pubKey)
print(encrypted_data)
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,812
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/Final Submission/All source files/CompareFile.py
|
from difflib import Differ
with open('IMU-data-2021-10-25-19:35:45.txt') as file_1, open('decompressedData_of_encrypted_fileofIMU.txt.txt') as file_2:
differ = Differ()
for line in differ.compare(file_1.readlines(), file_2.readlines()):
print(line)
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,813
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/Compression.py
|
# Zlib compression algorithm for csv data files.
# Script written by Michael Altshuler (ALTMIC003) and Taine de Buys (DBYTAI001)
import zlib, sys, time, base64;
import binascii
data = ['2018-09-19-03_57_11_VN100.csv', '2018-09-19-04_22_21_VN100.csv', '2018-09-19-06_28_11_VN100.csv', '2018-09-19-06_53_21_VN100.csv', '2018-09-19-08_59_11_VN100.csv', '2018-09-19-09_24_21_VN100.csv', '2018-09-19-09_49_31_VN100.csv', '2018-09-19-11_55_21_VN100.csv', '2018-09-19-12_20_31_VN100.csv']
#main function executes compress() and then in turn appends the result of each compressed file to a new text file to then be encrypted.
def main():
for i in data:
print(i)
output = open('compressedData' + '_of_' + i + '.txt', 'ab')
output.write(compress(i))
output.close()
#function serves the purpose of compressing any incoming .csv files.
def compress(file):
with open(file, 'rb') as fileobj:
file = fileobj.read()
#print statements compare the original size of the data set to the size of the compressed data (in bytes)
print("Raw data size: ", sys.getsizeof(file))
compressed = base64.b64encode(zlib.compress(file, 9))
print("Compressed data file size: ", sys.getsizeof(compressed))
return compressed
if __name__ == "__main__":
main()
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,814
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/Final Submission/Laptop/ICM20948.py
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
import time
import smbus
import math
from datetime import datetime
#!/usr/bin/python
# -*- coding:utf-8 -*-
import ctypes
class SHTC3:
def __init__(self):
self.dll = ctypes.CDLL("./SHTC3.so")
init = self.dll.init
init.restype = ctypes.c_int
init.argtypes = [ctypes.c_void_p]
init(None)
def SHTC3_Read_Temperature(self):
temperature = self.dll.SHTC3_Read_TH
temperature.restype = ctypes.c_float
temperature.argtypes = [ctypes.c_void_p]
return temperature(None)
def SHTC3_Read_Humidity(self):
humidity = self.dll.SHTC3_Read_RH
humidity.restype = ctypes.c_float
humidity.argtypes = [ctypes.c_void_p]
return humidity(None)
Gyro = [0,0,0]
Accel = [0,0,0]
Mag = [0,0,0]
pitch = 0.0
roll = 0.0
yaw = 0.0
pu8data=[0,0,0,0,0,0,0,0]
U8tempX=[0,0,0,0,0,0,0,0,0]
U8tempY=[0,0,0,0,0,0,0,0,0]
U8tempZ=[0,0,0,0,0,0,0,0,0]
GyroOffset=[0,0,0]
Ki = 1.0
Kp = 4.50
q0 = 1.0
q1=q2=q3=0.0
angles=[0.0,0.0,0.0]
true =0x01
false =0x00
# define ICM-20948 Device I2C address
I2C_ADD_ICM20948 = 0x68
I2C_ADD_ICM20948_AK09916 = 0x0C
I2C_ADD_ICM20948_AK09916_READ = 0x80
I2C_ADD_ICM20948_AK09916_WRITE = 0x00
# define ICM-20948 Register
# user bank 0 register
REG_ADD_WIA = 0x00
REG_VAL_WIA = 0xEA
REG_ADD_USER_CTRL = 0x03
REG_VAL_BIT_DMP_EN = 0x80
REG_VAL_BIT_FIFO_EN = 0x40
REG_VAL_BIT_I2C_MST_EN = 0x20
REG_VAL_BIT_I2C_IF_DIS = 0x10
REG_VAL_BIT_DMP_RST = 0x08
REG_VAL_BIT_DIAMOND_DMP_RST = 0x04
REG_ADD_PWR_MIGMT_1 = 0x06
REG_VAL_ALL_RGE_RESET = 0x80
REG_VAL_RUN_MODE = 0x01 # Non low-power mode
REG_ADD_LP_CONFIG = 0x05
REG_ADD_PWR_MGMT_1 = 0x06
REG_ADD_PWR_MGMT_2 = 0x07
REG_ADD_ACCEL_XOUT_H = 0x2D
REG_ADD_ACCEL_XOUT_L = 0x2E
REG_ADD_ACCEL_YOUT_H = 0x2F
REG_ADD_ACCEL_YOUT_L = 0x30
REG_ADD_ACCEL_ZOUT_H = 0x31
REG_ADD_ACCEL_ZOUT_L = 0x32
REG_ADD_GYRO_XOUT_H = 0x33
REG_ADD_GYRO_XOUT_L = 0x34
REG_ADD_GYRO_YOUT_H = 0x35
REG_ADD_GYRO_YOUT_L = 0x36
REG_ADD_GYRO_ZOUT_H = 0x37
REG_ADD_GYRO_ZOUT_L = 0x38
REG_ADD_EXT_SENS_DATA_00 = 0x3B
REG_ADD_REG_BANK_SEL = 0x7F
REG_VAL_REG_BANK_0 = 0x00
REG_VAL_REG_BANK_1 = 0x10
REG_VAL_REG_BANK_2 = 0x20
REG_VAL_REG_BANK_3 = 0x30
# user bank 1 register
# user bank 2 register
REG_ADD_GYRO_SMPLRT_DIV = 0x00
REG_ADD_GYRO_CONFIG_1 = 0x01
REG_VAL_BIT_GYRO_DLPCFG_2 = 0x10 # bit[5:3]
REG_VAL_BIT_GYRO_DLPCFG_4 = 0x20 # bit[5:3]
REG_VAL_BIT_GYRO_DLPCFG_6 = 0x30 # bit[5:3]
REG_VAL_BIT_GYRO_FS_250DPS = 0x00 # bit[2:1]
REG_VAL_BIT_GYRO_FS_500DPS = 0x02 # bit[2:1]
REG_VAL_BIT_GYRO_FS_1000DPS = 0x04 # bit[2:1]
REG_VAL_BIT_GYRO_FS_2000DPS = 0x06 # bit[2:1]
REG_VAL_BIT_GYRO_DLPF = 0x01 # bit[0]
REG_ADD_ACCEL_SMPLRT_DIV_2 = 0x11
REG_ADD_ACCEL_CONFIG = 0x14
REG_VAL_BIT_ACCEL_DLPCFG_2 = 0x10 # bit[5:3]
REG_VAL_BIT_ACCEL_DLPCFG_4 = 0x20 # bit[5:3]
REG_VAL_BIT_ACCEL_DLPCFG_6 = 0x30 # bit[5:3]
REG_VAL_BIT_ACCEL_FS_2g = 0x00 # bit[2:1]
REG_VAL_BIT_ACCEL_FS_4g = 0x02 # bit[2:1]
REG_VAL_BIT_ACCEL_FS_8g = 0x04 # bit[2:1]
REG_VAL_BIT_ACCEL_FS_16g = 0x06 # bit[2:1]
REG_VAL_BIT_ACCEL_DLPF = 0x01 # bit[0]
# user bank 3 register
REG_ADD_I2C_SLV0_ADDR = 0x03
REG_ADD_I2C_SLV0_REG = 0x04
REG_ADD_I2C_SLV0_CTRL = 0x05
REG_VAL_BIT_SLV0_EN = 0x80
REG_VAL_BIT_MASK_LEN = 0x07
REG_ADD_I2C_SLV0_DO = 0x06
REG_ADD_I2C_SLV1_ADDR = 0x07
REG_ADD_I2C_SLV1_REG = 0x08
REG_ADD_I2C_SLV1_CTRL = 0x09
REG_ADD_I2C_SLV1_DO = 0x0A
# define ICM-20948 Register end
# define ICM-20948 MAG Register
REG_ADD_MAG_WIA1 = 0x00
REG_VAL_MAG_WIA1 = 0x48
REG_ADD_MAG_WIA2 = 0x01
REG_VAL_MAG_WIA2 = 0x09
REG_ADD_MAG_ST2 = 0x10
REG_ADD_MAG_DATA = 0x11
REG_ADD_MAG_CNTL2 = 0x31
REG_VAL_MAG_MODE_PD = 0x00
REG_VAL_MAG_MODE_SM = 0x01
REG_VAL_MAG_MODE_10HZ = 0x02
REG_VAL_MAG_MODE_20HZ = 0x04
REG_VAL_MAG_MODE_50HZ = 0x05
REG_VAL_MAG_MODE_100HZ = 0x08
REG_VAL_MAG_MODE_ST = 0x10
# define ICM-20948 MAG Register end
MAG_DATA_LEN =6
class ICM20948(object):
def __init__(self,address=I2C_ADD_ICM20948):
self._address = address
self._bus = smbus.SMBus(1)
bRet=self.icm20948Check() #Initialization of the device multiple times after power on will result in a return error
# while true != bRet:
# print("ICM-20948 Error\n" )
# time.sleep(0.5)
# print("ICM-20948 OK\n" )
time.sleep(0.5) #We can skip this detection by delaying it by 500 milliseconds
# user bank 0 register
self._write_byte( REG_ADD_REG_BANK_SEL , REG_VAL_REG_BANK_0)
self._write_byte( REG_ADD_PWR_MIGMT_1 , REG_VAL_ALL_RGE_RESET)
time.sleep(0.1)
self._write_byte( REG_ADD_PWR_MIGMT_1 , REG_VAL_RUN_MODE)
#user bank 2 register
self._write_byte( REG_ADD_REG_BANK_SEL , REG_VAL_REG_BANK_2)
self._write_byte( REG_ADD_GYRO_SMPLRT_DIV , 0x07)
self._write_byte( REG_ADD_GYRO_CONFIG_1 , REG_VAL_BIT_GYRO_DLPCFG_6 | REG_VAL_BIT_GYRO_FS_1000DPS | REG_VAL_BIT_GYRO_DLPF)
self._write_byte( REG_ADD_ACCEL_SMPLRT_DIV_2 , 0x07)
self._write_byte( REG_ADD_ACCEL_CONFIG , REG_VAL_BIT_ACCEL_DLPCFG_6 | REG_VAL_BIT_ACCEL_FS_2g | REG_VAL_BIT_ACCEL_DLPF)
#user bank 0 register
self._write_byte( REG_ADD_REG_BANK_SEL , REG_VAL_REG_BANK_0)
time.sleep(0.1)
self.icm20948GyroOffset()
self.icm20948MagCheck()
self.icm20948WriteSecondary( I2C_ADD_ICM20948_AK09916|I2C_ADD_ICM20948_AK09916_WRITE,REG_ADD_MAG_CNTL2, REG_VAL_MAG_MODE_20HZ)
def icm20948_Gyro_Accel_Read(self):
self._write_byte( REG_ADD_REG_BANK_SEL , REG_VAL_REG_BANK_0)
data =self._read_block(REG_ADD_ACCEL_XOUT_H, 12)
self._write_byte( REG_ADD_REG_BANK_SEL , REG_VAL_REG_BANK_2)
Accel[0] = (data[0]<<8)|data[1]
Accel[1] = (data[2]<<8)|data[3]
Accel[2] = (data[4]<<8)|data[5]
Gyro[0] = ((data[6]<<8)|data[7]) - GyroOffset[0]
Gyro[1] = ((data[8]<<8)|data[9]) - GyroOffset[1]
Gyro[2] = ((data[10]<<8)|data[11]) - GyroOffset[2]
if Accel[0]>=32767: #Solve the problem that Python shift will not overflow
Accel[0]=Accel[0]-65535
elif Accel[0]<=-32767:
Accel[0]=Accel[0]+65535
if Accel[1]>=32767:
Accel[1]=Accel[1]-65535
elif Accel[1]<=-32767:
Accel[1]=Accel[1]+65535
if Accel[2]>=32767:
Accel[2]=Accel[2]-65535
elif Accel[2]<=-32767:
Accel[2]=Accel[2]+65535
if Gyro[0]>=32767:
Gyro[0]=Gyro[0]-65535
elif Gyro[0]<=-32767:
Gyro[0]=Gyro[0]+65535
if Gyro[1]>=32767:
Gyro[1]=Gyro[1]-65535
elif Gyro[1]<=-32767:
Gyro[1]=Gyro[1]+65535
if Gyro[2]>=32767:
Gyro[2]=Gyro[2]-65535
elif Gyro[2]<=-32767:
Gyro[2]=Gyro[2]+65535
def icm20948MagRead(self):
counter=20
while(counter>0):
time.sleep(0.01)
self.icm20948ReadSecondary( I2C_ADD_ICM20948_AK09916|I2C_ADD_ICM20948_AK09916_READ , REG_ADD_MAG_ST2, 1)
if ((pu8data[0] & 0x01)!= 0):
break
counter-=1
if counter!=0:
for i in range(0,8):
self.icm20948ReadSecondary( I2C_ADD_ICM20948_AK09916|I2C_ADD_ICM20948_AK09916_READ , REG_ADD_MAG_DATA , MAG_DATA_LEN)
U8tempX[i] = (pu8data[1]<<8)|pu8data[0]
U8tempY[i] = (pu8data[3]<<8)|pu8data[2]
U8tempZ[i] = (pu8data[5]<<8)|pu8data[4]
Mag[0]=(U8tempX[0]+U8tempX[1]+U8tempX[2]+U8tempX[3]+U8tempX[4]+U8tempX[5]+U8tempX[6]+U8tempX[7])/8
Mag[1]=-(U8tempY[0]+U8tempY[1]+U8tempY[2]+U8tempY[3]+U8tempY[4]+U8tempY[5]+U8tempY[6]+U8tempY[7])/8
Mag[2]=-(U8tempZ[0]+U8tempZ[1]+U8tempZ[2]+U8tempZ[3]+U8tempZ[4]+U8tempZ[5]+U8tempZ[6]+U8tempZ[7])/8
if Mag[0]>=32767: #Solve the problem that Python shift will not overflow
Mag[0]=Mag[0]-65535
elif Mag[0]<=-32767:
Mag[0]=Mag[0]+65535
if Mag[1]>=32767:
Mag[1]=Mag[1]-65535
elif Mag[1]<=-32767:
Mag[1]=Mag[1]+65535
if Mag[2]>=32767:
Mag[2]=Mag[2]-65535
elif Mag[2]<=-32767:
Mag[2]=Mag[2]+65535
def icm20948ReadSecondary(self,u8I2CAddr,u8RegAddr,u8Len):
u8Temp=0
self._write_byte( REG_ADD_REG_BANK_SEL, REG_VAL_REG_BANK_3) #swtich bank3
self._write_byte( REG_ADD_I2C_SLV0_ADDR, u8I2CAddr)
self._write_byte( REG_ADD_I2C_SLV0_REG, u8RegAddr)
self._write_byte( REG_ADD_I2C_SLV0_CTRL, REG_VAL_BIT_SLV0_EN|u8Len)
self._write_byte( REG_ADD_REG_BANK_SEL, REG_VAL_REG_BANK_0) #swtich bank0
u8Temp = self._read_byte(REG_ADD_USER_CTRL)
u8Temp |= REG_VAL_BIT_I2C_MST_EN
self._write_byte( REG_ADD_USER_CTRL, u8Temp)
time.sleep(0.01)
u8Temp &= ~REG_VAL_BIT_I2C_MST_EN
self._write_byte( REG_ADD_USER_CTRL, u8Temp)
for i in range(0,u8Len):
pu8data[i]= self._read_byte( REG_ADD_EXT_SENS_DATA_00+i)
self._write_byte( REG_ADD_REG_BANK_SEL, REG_VAL_REG_BANK_3) #swtich bank3
u8Temp = self._read_byte(REG_ADD_I2C_SLV0_CTRL)
u8Temp &= ~((REG_VAL_BIT_I2C_MST_EN)&(REG_VAL_BIT_MASK_LEN))
self._write_byte( REG_ADD_I2C_SLV0_CTRL, u8Temp)
self._write_byte( REG_ADD_REG_BANK_SEL, REG_VAL_REG_BANK_0) #swtich bank0
def icm20948WriteSecondary(self,u8I2CAddr,u8RegAddr,u8data):
u8Temp=0
self._write_byte( REG_ADD_REG_BANK_SEL, REG_VAL_REG_BANK_3) #swtich bank3
self._write_byte( REG_ADD_I2C_SLV1_ADDR, u8I2CAddr)
self._write_byte( REG_ADD_I2C_SLV1_REG, u8RegAddr)
self._write_byte( REG_ADD_I2C_SLV1_DO, u8data)
self._write_byte( REG_ADD_I2C_SLV1_CTRL, REG_VAL_BIT_SLV0_EN|1)
self._write_byte( REG_ADD_REG_BANK_SEL, REG_VAL_REG_BANK_0) #swtich bank0
u8Temp = self._read_byte(REG_ADD_USER_CTRL)
u8Temp |= REG_VAL_BIT_I2C_MST_EN
self._write_byte( REG_ADD_USER_CTRL, u8Temp)
time.sleep(0.01)
u8Temp &= ~REG_VAL_BIT_I2C_MST_EN
self._write_byte( REG_ADD_USER_CTRL, u8Temp)
self._write_byte( REG_ADD_REG_BANK_SEL, REG_VAL_REG_BANK_3) #swtich bank3
u8Temp = self._read_byte(REG_ADD_I2C_SLV0_CTRL)
u8Temp &= ~((REG_VAL_BIT_I2C_MST_EN)&(REG_VAL_BIT_MASK_LEN))
self._write_byte( REG_ADD_I2C_SLV0_CTRL, u8Temp)
self._write_byte( REG_ADD_REG_BANK_SEL, REG_VAL_REG_BANK_0) #swtich bank0
def icm20948GyroOffset(self):
s32TempGx = 0
s32TempGy = 0
s32TempGz = 0
for i in range(0,32):
self.icm20948_Gyro_Accel_Read()
s32TempGx += Gyro[0]
s32TempGy += Gyro[1]
s32TempGz += Gyro[2]
time.sleep(0.01)
GyroOffset[0] = s32TempGx >> 5
GyroOffset[1] = s32TempGy >> 5
GyroOffset[2] = s32TempGz >> 5
def _read_byte(self,cmd):
return self._bus.read_byte_data(self._address,cmd)
def _read_block(self, reg, length=1):
return self._bus.read_i2c_block_data(self._address, reg, length)
def _read_u16(self,cmd):
LSB = self._bus.read_byte_data(self._address,cmd)
MSB = self._bus.read_byte_data(self._address,cmd+1)
return (MSB << 8) + LSB
def _write_byte(self,cmd,val):
self._bus.write_byte_data(self._address,cmd,val)
time.sleep(0.0001)
def imuAHRSupdate(self,gx, gy,gz,ax,ay,az,mx,my,mz):
norm=0.0
hx = hy = hz = bx = bz = 0.0
vx = vy = vz = wx = wy = wz = 0.0
exInt = eyInt = ezInt = 0.0
ex=ey=ez=0.0
halfT = 0.024
global q0
global q1
global q2
global q3
q0q0 = q0 * q0
q0q1 = q0 * q1
q0q2 = q0 * q2
q0q3 = q0 * q3
q1q1 = q1 * q1
q1q2 = q1 * q2
q1q3 = q1 * q3
q2q2 = q2 * q2
q2q3 = q2 * q3
q3q3 = q3 * q3
norm = float(1/math.sqrt(ax * ax + ay * ay + az * az))
ax = ax * norm
ay = ay * norm
az = az * norm
norm = float(1/math.sqrt(mx * mx + my * my + mz * mz))
mx = mx * norm
my = my * norm
mz = mz * norm
# compute reference direction of flux
hx = 2 * mx * (0.5 - q2q2 - q3q3) + 2 * my * (q1q2 - q0q3) + 2 * mz * (q1q3 + q0q2)
hy = 2 * mx * (q1q2 + q0q3) + 2 * my * (0.5 - q1q1 - q3q3) + 2 * mz * (q2q3 - q0q1)
hz = 2 * mx * (q1q3 - q0q2) + 2 * my * (q2q3 + q0q1) + 2 * mz * (0.5 - q1q1 - q2q2)
bx = math.sqrt((hx * hx) + (hy * hy))
bz = hz
# estimated direction of gravity and flux (v and w)
vx = 2 * (q1q3 - q0q2)
vy = 2 * (q0q1 + q2q3)
vz = q0q0 - q1q1 - q2q2 + q3q3
wx = 2 * bx * (0.5 - q2q2 - q3q3) + 2 * bz * (q1q3 - q0q2)
wy = 2 * bx * (q1q2 - q0q3) + 2 * bz * (q0q1 + q2q3)
wz = 2 * bx * (q0q2 + q1q3) + 2 * bz * (0.5 - q1q1 - q2q2)
# error is sum of cross product between reference direction of fields and direction measured by sensors
ex = (ay * vz - az * vy) + (my * wz - mz * wy)
ey = (az * vx - ax * vz) + (mz * wx - mx * wz)
ez = (ax * vy - ay * vx) + (mx * wy - my * wx)
if (ex != 0.0 and ey != 0.0 and ez != 0.0):
exInt = exInt + ex * Ki * halfT
eyInt = eyInt + ey * Ki * halfT
ezInt = ezInt + ez * Ki * halfT
gx = gx + Kp * ex + exInt
gy = gy + Kp * ey + eyInt
gz = gz + Kp * ez + ezInt
q0 = q0 + (-q1 * gx - q2 * gy - q3 * gz) * halfT
q1 = q1 + (q0 * gx + q2 * gz - q3 * gy) * halfT
q2 = q2 + (q0 * gy - q1 * gz + q3 * gx) * halfT
q3 = q3 + (q0 * gz + q1 * gy - q2 * gx) * halfT
norm = float(1/math.sqrt(q0 * q0 + q1 * q1 + q2 * q2 + q3 * q3))
q0 = q0 * norm
q1 = q1 * norm
q2 = q2 * norm
q3 = q3 * norm
def icm20948Check(self):
bRet=false
if REG_VAL_WIA == self._read_byte(REG_ADD_WIA):
bRet = true
return bRet
def icm20948MagCheck(self):
self.icm20948ReadSecondary( I2C_ADD_ICM20948_AK09916|I2C_ADD_ICM20948_AK09916_READ,REG_ADD_MAG_WIA1, 2)
if (pu8data[0] == REG_VAL_MAG_WIA1) and ( pu8data[1] == REG_VAL_MAG_WIA2) :
bRet = true
return bRet
def icm20948CalAvgValue(self):
MotionVal[0]=Gyro[0]/32.8
MotionVal[1]=Gyro[1]/32.8
MotionVal[2]=Gyro[2]/32.8
MotionVal[3]=Accel[0]
MotionVal[4]=Accel[1]
MotionVal[5]=Accel[2]
MotionVal[6]=Mag[0]
MotionVal[7]=Mag[1]
MotionVal[8]=Mag[2]
if __name__ == '__main__':
import csv
import time
shtc3 = SHTC3()
print("\nSense HAT Test Program ...\n")
MotionVal=[0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]
icm20948=ICM20948()
dateTime = datetime.now()
dateTimeString=str(dateTime)
filename = "IMU-data.csv"
file = open(filename, 'w')
writer = csv.writer(file)
header=["AccX","AccY","AccZ","roll","pitch","yaw","Temp","Humidity","MagX","MagY","MagZ","GyroX","GyroY","GyroZ"]
writer.writerow(header)
numoflines =1
while True:
icm20948.icm20948_Gyro_Accel_Read()
icm20948.icm20948MagRead()
icm20948.icm20948CalAvgValue()
time.sleep(0.1)
icm20948.imuAHRSupdate(MotionVal[0] * 0.0175, MotionVal[1] * 0.0175,MotionVal[2] * 0.0175,
MotionVal[3],MotionVal[4],MotionVal[5],
MotionVal[6], MotionVal[7], MotionVal[8])
pitch = math.asin(-2 * q1 * q3 + 2 * q0* q2)* 57.3
roll = math.atan2(2 * q2 * q3 + 2 * q0 * q1, -2 * q1 * q1 - 2 * q2* q2 + 1)* 57.3
yaw = math.atan2(-2 * q1 * q2 - 2 * q0 * q3, 2 * q2 * q2 + 2 * q3 * q3 - 1) * 57.3
print("\r\n /-------------------------------------------------------------/ \r\n")
now = datetime.now()
print(now)
print('\r\n Roll = %.2f , Pitch = %.2f , Yaw = %.2f\r\n'%(roll,pitch,yaw))
print('\r\nAcceleration: X = %d , Y = %d , Z = %d\r\n'%(Accel[0]/16384,Accel[1]/16384,Accel[2]/16384))
print('\r\nGyroscope: X = %d , Y = %d , Z = %d\r\n'%(Gyro[0]/32.8,Gyro[1]/32.8,Gyro[2]/32.8))
print('\r\nMagnetic: X = %d , Y = %d , Z = %d'%((Mag[0]),Mag[1],Mag[2]))
print('Temperature = %6.2f°C , Humidity = %6.2f%%' % (shtc3.SHTC3_Read_Temperature(), shtc3.SHTC3_Read_Humidity()))
data=[now , Accel[0]/16384,Accel[1]/16384,Accel[2]/16384,roll,pitch,yaw,shtc3.SHTC3_Read_Temperature(), shtc3.SHTC3_Read_Humidity(),Mag[0],Mag[1],Mag[2],Gyro[0]/32.8,Gyro[1]/32.8,Gyro[2]/32.8]
writer.writerow(data)
numoflines = numoflines+1
if numoflines ==100000:
file.close()
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,815
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/ProgressReport2/Testing.py
|
import glob
import os
#os.system('python3 SendData.py ' + "IMU-data-2021-10-25-20:38:19.csv" )
os.system('python3 RecieveData.py' + " encrypted_fileofIMU.txt")
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,816
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/ProgressReport2/SHTC3.py
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
import ctypes
class SHTC3:
def __init__(self):
self.dll = ctypes.CDLL("./SHTC3.so")
init = self.dll.init
init.restype = ctypes.c_int
init.argtypes = [ctypes.c_void_p]
init(None)
def SHTC3_Read_Temperature(self):
temperature = self.dll.SHTC3_Read_TH
temperature.restype = ctypes.c_float
temperature.argtypes = [ctypes.c_void_p]
return temperature(None)
def SHTC3_Read_Humidity(self):
humidity = self.dll.SHTC3_Read_RH
humidity.restype = ctypes.c_float
humidity.argtypes = [ctypes.c_void_p]
return humidity(None)
if __name__ == "__main__":
shtc3 = SHTC3()
while True:
print('Temperature = %6.2f°C , Humidity = %6.2f%%' % (shtc3.SHTC3_Read_Temperature(), shtc3.SHTC3_Read_Humidity()))
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,817
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/Final Submission/All source files/Encrypt_data.py
|
from cryptography.fernet import Fernet
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
import time
import sys
compressed_files = ['compressedData_of_2018-09-19-03_57_11_VN100.csv', 'compressedData_of_2018-09-19-04_22_21_VN100.csv', 'compressedData_of_2018-09-19-06_28_11_VN100.csv', 'compressedData_of_2018-09-19-06_53_21_VN100.csv', 'compressedData_of_2018-09-19-08_59_11_VN100.csv', 'compressedData_of_2018-09-19-09_24_21_VN100.csv', 'compressedData_of_2018-09-19-09_49_31_VN100.csv', 'compressedData_of_2018-09-19-11_55_21_VN100.csv', 'compressedData_of_2018-09-19-12_20_31_VN100.csv']
def main():
for i in compressed_files:
print(i)
eout = open('eTest.txt_of_' + i + '.txt', 'ab')
eout.write(encrypt(i))
eout.close()
def encrypt(text):
key = Fernet.generate_key()
#print(key.decode())
#we now write that key to a file
k = open("symetricKey.key" , "wb" )
k.write(key)
k.close()
start_time = time.time()
#open the symetric key file
skey = open("symetricKey.key" , "rb" )
key = skey.read()
#create the cipher
cipher = Fernet(key)
#open the file that needs to be encrypted
#myfile = open(text, "rb")
#myfiledata = myfile.read()
myfiledata = text
#encrypt the data
encrypted_data = cipher.encrypt(myfiledata)
#write the data to a file
edata = open("encrypted_fileofIMU.txt", "wb")
edata.truncate(0)
edata.write(encrypted_data)
#print(encrypted_data)
#open public key
public_keydata= open("RSAkey.key")
public_key = RSA.importKey(public_keydata.read())
#encrypting the symetric key file with the public RSA key
encryptor = PKCS1_OAEP.new(public_key)
encrypted_key = encryptor.encrypt(key)
#write the encrypted key to a file
ekey = open("encryptedkey", "wb")
ekey.truncate(0)
ekey.write(encrypted_key)
##print(encrypted_key)
#used to benchmark runtime
print("Encryption took: " )
print("--- %s seconds ---" % (time.time() - start_time))
#returning the encrytped file:
return encrypted_data
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,818
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/Final Submission/All source files/Testing.py
|
import glob
import os
os.system('python3 SendData.py ' + "IMU-data.csv" )
os.system('python3 RecieveData.py' + " encrypted_fileofIMU.txt")
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,819
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/EncryptionAlgo/Testing.py
|
import glob
import os
# specifying the path to csv files
#path = "CSV_Files"
data = ['2018-09-19-03_57_11_VN100.csv', '2018-09-19-04_22_21_VN100.csv', '2018-09-19-06_28_11_VN100.csv', '2018-09-19-06_53_21_VN100.csv', '2018-09-19-08_59_11_VN100.csv', '2018-09-19-09_24_21_VN100.csv', '2018-09-19-09_49_31_VN100.csv', '2018-09-19-11_55_21_VN100.csv', '2018-09-19-12_20_31_VN100.csv']
compressed_files = ['compressedData_of_2018-09-19-03_57_11_VN100.csv.txt', 'compressedData_of_2018-09-19-04_22_21_VN100.csv.txt', 'compressedData_of_2018-09-19-06_28_11_VN100.csv.txt', 'compressedData_of_2018-09-19-06_53_21_VN100.csv.txt', 'compressedData_of_2018-09-19-08_59_11_VN100.csv.txt', 'compressedData_of_2018-09-19-09_24_21_VN100.csv.txt', 'compressedData_of_2018-09-19-09_49_31_VN100.csv.txt', 'compressedData_of_2018-09-19-11_55_21_VN100.csv.txt', 'compressedData_of_2018-09-19-12_20_31_VN100.csv.txt']
# csv files in the path
#files = glob.glob(path + "/*.csv")
#Changed to test different functions
for i in compressed_files:
os.system('python3 main.py ' +i )
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,820
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/EncryptionAlgo/Decrypt_data.py
|
from cryptography.fernet import Fernet
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
import time
import sys
def decrypt(text):
start_time = time.time()
# load the private key to decrypt
prkey = open("Privatekey.key" , "rb")
pkey = prkey.read()
private_key= RSA.import_key(pkey)
#open the encrypted public key
e = open("encryptedkey" , "rb")
encrypted_key= e.read()
#decrypt the encrypted public key using the private key
decryptor = PKCS1_OAEP.new(private_key)
decrypted = decryptor.decrypt(encrypted_key)
#CREATE THE CIPHER USED FOR DECRYPTION
cipher = Fernet(decrypted)
encrypted_data = open("encrypted_fileof"+str(text), "rb")
edata= encrypted_data.read()
decrypted_data = cipher.decrypt(edata)
##print(decrypted_data.decode())
#write that to a new file
ThankGodIamDone = open( "decrypted_data.txt" , "wb" )
ThankGodIamDone.truncate(0)
ThankGodIamDone.write(decrypted_data)
print("Decryption took: ")
print("--- %s seconds ---" % (time.time() - start_time))
if __name__ == '__main__':
inputfile = sys.argv[1]
decrypt(inputfile)
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,821
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/create_keys.py
|
from Crypto.PublicKey import RSA
from cryptography.fernet import Fernet
import time
def createkeys():
start_time = time.time()
#creating symetric key first
key = Fernet.generate_key()
#print(key.decode())
#we now write that key to a file
k = open("symetricKey.key" , "wb" )
k.write(key)
k.close()
#Creatng rsa public and private keys
keyPair= RSA.generate(1024)
pubKey = keyPair.publickey()
#writing public key to file
p = open("Publickey.key" , "wb")
pubKeyPEM = pubKey.exportKey()
p.write(pubKeyPEM)
p.close()
#writing private key in file
pr = open("Privatekey.Key", "wb")
privKeyPEM = keyPair.exportKey()
pr.write(privKeyPEM)
pr.close()
print("The time to create keys took: ")
print("--- %s seconds ---" % (time.time() - start_time))
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,822
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/Final Submission/Pi/md5sum.py
|
import hashlib
def file_as_bytes(file):
with file:
return file.read()
print (hashlib.md5(file_as_bytes(open("RSAkey.key", 'rb'))).hexdigest())
print (hashlib.md5(file_as_bytes(open("Publickey.key", 'rb'))).hexdigest())
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,823
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/Final Submission/Laptop/Server.py
|
#import necessary keys
import socket
from Crypto.PublicKey import RSA
import os
import sys
import io
import time
print(socket.gethostname())
def make_key():
start_time = time.time()
#Creatng rsa public and private keys
keyPair= RSA.generate(1024)
pubKey = keyPair.publickey()
#writing public key to file
p = open("Publickey.key" , "wb")
pubKeyPEM = pubKey.exportKey()
p.write(pubKeyPEM)
p.close()
#writing private key in file
pr = open("Privatekey.Key", "wb")
privKeyPEM = keyPair.exportKey()
pr.write(privKeyPEM)
pr.close()
print("The time to create keys took: ")
print("--- %s seconds ---" % (time.time() - start_time))
HOST = '127.0.0.1' # Standard loopback interface address (localhost)
PORT = 12345 # Port to listen on (non-privileged ports are > 1023)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: #this creates a socket of type IPV4 and Sock_STream means TCP connection
s.bind((HOST, PORT))
s.listen() #allows for server to wait for connections
conn, addr = s.accept() #blocks the rest and waitts for a client to connect, once this happens it populates the values on the left
make_key()
print(len(open("Publickey.key", "rb").read()))
with conn:
p = open("Publickey.key" , "rb")
keydata = p.read(1024)
conn.sendall(keydata)
print('Connected by', addr)
encryptedkey = conn.recv(128)
with open ("encryptedkey" , "wb") as key:
key.truncate(0)
key.write(encryptedkey)
print("key recieved")
print (len(open("encryptedkey", "rb").read()))
data = conn.recv(1500000) #this waits to recieve the data sent by the client
with open ("encrypted_fileofIMU.txt" , "wb") as r:
r.write(data)
print("data recieved")
os.system('python3 RecieveData.py' + " encrypted_fileofIMU.txt")
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,824
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/Final Submission/Pi/Client.py
|
import socket
import SendData
import os
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
HOST = "127.0.0.1" # The server's hostname or IP address
PORT = 12345 # The port used by the server
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((HOST, PORT))
#s.sendall(b'Hello, world')
#line below receieves data from server and decodes it
keydata = RSA.importKey(s.recv( 1024 ), passphrase=None)
with open('Publickey.key','wb') as f:
f.write(keydata.export_key('PEM'))
print('Received RSA key')
print(len(open("Publickey.key", "rb").read()))
SendData.sendData("IMU-data.csv")
#will send across the e crypted key
q = open("encryptedkey", "rb")
print ("key length=" , (len(open("encryptedkey", "rb").read())))
key = q.read()
print(len(key))
print("sending encrypted key")
s.send(key)
#the file is opened and sent
p = open("encrypted_fileofIMU.txt" , "rb")
data = p.read()
print("Sending data to laptop")
s.sendall(data)
s.close()
#
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,584,825
|
TainedeBlaze/EEE3097S-Project
|
refs/heads/main
|
/ProgressReport2/Decompression.py
|
# Zlib decompression algorithm for csv data files.
# Script written by Michael Altshuler (ALTMIC003) and Taine de Buys (DBYTAI001)
import zlib, sys, time, base64;
import binascii
import Compression
import os
#main function executes decompress() function, and further appends the decompressed data to new text files.
def main():
for i in Compression.data:
decompress('compressedData' + '_of_' + i + '.txt')
#Creating a new text file to which the decompressed data is sent.
decompressed_output = open('decompressedData_of_' + i + '.txt', 'ab')
decompressed_output.write(decompress('compressedData_of_' + i + '.txt'))
decompressed_output.close()
print("Successful decompression. Output file: " + i)
#function that serves the purpose of decompressing incoming decrypted compressed files.
def decompress(compressed_data, outputfileName):
with open(compressed_data+".txt", 'rb') as fileobj:
compressed_data = fileobj.read()
#decompressed = zlib.decompress(base64.b64decode(compressed_data))
decompressed_output = open('decompressedData_of_' + outputfileName + '.txt', 'ab')
#decompressed_output.write(decompress("decrypted_data.txt"))
decompressed_output.write(zlib.decompress(base64.b64decode(compressed_data)))
decompressed_output.close()
print('Size of decompressed file: ', sys.getsizeof(decompressed_output))
print("Successful decompression.")
|
{"/Final Submission/Pi/SendData.py": ["/create_keys.py", "/Compression.py"], "/ProgressReport2/Decompression.py": ["/Compression.py"]}
|
36,648,325
|
moh-IA/POOLeJardinier
|
refs/heads/main
|
/pimiento.py
|
from vegetable import Vegetable
class Pimiento(Vegetable):
"""
docstring
"""
def __init__(self, seed=0):
self.seed = seed
def grow(self, seed = 0):
self.seed = seed
return self.seed
|
{"/garden.py": ["/vegetable.py"], "/pimiento.py": ["/vegetable.py"], "/vegetable_factory.py": ["/pimiento.py"], "/main.py": ["/garden.py", "/jardiner.py"], "/jardiner.py": ["/vegetable_factory.py", "/garden.py"]}
|
36,648,326
|
moh-IA/POOLeJardinier
|
refs/heads/main
|
/vegetable_factory.py
|
from tomato import Tomato
from potato import Potato
from pimiento import Pimiento
class VegetableFactory():
"""
docstring
"""
def get_vegetable(self, name):
if name == 'Tomato' :
return Tomato()
elif name == 'Potato':
return Potato()
elif name == 'Pimiento':
return Pimiento()
|
{"/garden.py": ["/vegetable.py"], "/pimiento.py": ["/vegetable.py"], "/vegetable_factory.py": ["/pimiento.py"], "/main.py": ["/garden.py", "/jardiner.py"], "/jardiner.py": ["/vegetable_factory.py", "/garden.py"]}
|
36,648,327
|
moh-IA/POOLeJardinier
|
refs/heads/main
|
/main.py
|
# from tomato import Tomato
from potato import Potato
from garden import Garden
from jardiner import Jardinier
if __name__ == "__main__":
j = Jardinier()
veg = j.planter()
veg.grow(3)
g = Garden()
g.add(veg)
p = Potato()
p.grow(27)
g.add(p)
print(g.seeds)
print(len(g.vegetables))
|
{"/garden.py": ["/vegetable.py"], "/pimiento.py": ["/vegetable.py"], "/vegetable_factory.py": ["/pimiento.py"], "/main.py": ["/garden.py", "/jardiner.py"], "/jardiner.py": ["/vegetable_factory.py", "/garden.py"]}
|
36,648,328
|
moh-IA/POOLeJardinier
|
refs/heads/main
|
/garden.py
|
from vegetable import Vegetable
class Garden():
"""
docstring
"""
def __init__(self):
self.seeds = 0
self.vegetables = []
def _plant(self, vegetable):
self.seeds += vegetable.seed
if self.seeds <= 30:
self.vegetables.append(vegetable)
else:
self.seeds -= vegetable.seed
print('The garden can not grow more than thirty seeds')
def add(self, vegetable):
return self._plant(vegetable)
|
{"/garden.py": ["/vegetable.py"], "/pimiento.py": ["/vegetable.py"], "/vegetable_factory.py": ["/pimiento.py"], "/main.py": ["/garden.py", "/jardiner.py"], "/jardiner.py": ["/vegetable_factory.py", "/garden.py"]}
|
36,648,329
|
moh-IA/POOLeJardinier
|
refs/heads/main
|
/vegetable.py
|
from abc import ABC, abstractmethod
class Vegetable(ABC):
"""
docstring
"""
@abstractmethod
def grow(self, seed = 0):
pass
|
{"/garden.py": ["/vegetable.py"], "/pimiento.py": ["/vegetable.py"], "/vegetable_factory.py": ["/pimiento.py"], "/main.py": ["/garden.py", "/jardiner.py"], "/jardiner.py": ["/vegetable_factory.py", "/garden.py"]}
|
36,648,330
|
moh-IA/POOLeJardinier
|
refs/heads/main
|
/jardiner.py
|
from vegetable_factory import VegetableFactory
from garden import Garden
# from vegetable import Vegetable
class Jardinier():
"""
docstring
"""
def __init__(self):
self.vf = VegetableFactory()
def planter(self):
veget_name = input("set the vegetable that want to plant :")
vegetable = self.vf.get_vegetable(veget_name)
return vegetable
|
{"/garden.py": ["/vegetable.py"], "/pimiento.py": ["/vegetable.py"], "/vegetable_factory.py": ["/pimiento.py"], "/main.py": ["/garden.py", "/jardiner.py"], "/jardiner.py": ["/vegetable_factory.py", "/garden.py"]}
|
36,684,484
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/views/users/signups.py
|
from django.contrib import messages
from django.shortcuts import redirect,render
from django.contrib.auth import login, logout, authenticate
from django.urls import reverse
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django import forms
from django.views.decorators.csrf import csrf_exempt
from shop.forms.users.signups import SignupForm
from django.conf import settings
import requests
@csrf_exempt
def signupView(request):
form_signup = SignupForm()
# Submit Signup
if request.method == "POST":
payload = {'secret': settings.RECAPTCHA_SECRET_KEY,'response':request.POST['recaptcha_token_signUp']}
respone = requests.post('https://www.google.com/recaptcha/api/siteverify',payload)
if respone.json()['success']:
form_signup = SignupForm(request.POST)
# Check validation
if form_signup.is_valid() :
# Save comment
form_signup.save()
username = form_signup.cleaned_data.get('username')
raw_password = form_signup.cleaned_data.get('password1')
# image_form.user = username
# image_form.save()
user = authenticate(username=username, password=raw_password)
# auto login
login(request, user)
messages.add_message(request, messages.SUCCESS,'Sign Up Success!!',"success")
response = redirect('home')
return response
else:
messages.add_message(request, messages.ERROR, 'Recapcha timeout',"danger")
# Contexts
context = {
'form_signup' : form_signup,
}
return render(request, 'page/auth/signup.html',context)
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,485
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/workshop1/urls.py
|
"""workshop1 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf import settings
from django.conf.urls.static import static
from django.conf.urls import url
from shop.views.homepage import homepage
from shop.views.category import category
from shop.views.category import categoryFilter
from shop.views.product import product
from shop.views.contact import contact
from shop.views.about import about
from shop.views.users.signouts import signouts
from shop.views.users.signins import signInView
from shop.views.users.signups import signupView
from shop.views.users.profile import profileViews
urlpatterns = [
path('admin/', admin.site.urls,name='admin'),
# page
path('',homepage,name = 'home'),
path('category',category,name='category-list'),
path('category/<int:pk>',categoryFilter,name='category-filter'),
# path('category',search_category,name='category_search'),
path('product/<int:cat_id>/<int:pk>',product,name='product'),
path('contact',contact,name='contact'),
path('about',about,name='about'),
#authentication
path('signout', signouts, name="signout"),
path('signup', signupView, name='signup'),
path('signIn', signInView, name='signin'),
path('profile',profileViews,name='profile')
# path('profile',profile, name="profile"),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,486
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/models/Category.py
|
from django.db import models
from versatileimagefield.fields import VersatileImageField
class Category(models.Model):
name = models.CharField(max_length=50)
image = VersatileImageField(max_length=255,upload_to='image/categorys',default='',blank=True, null=True)
# image = models.ImageField(upload_to='image/categorys', max_length=900,default=None)
detail = models.CharField(max_length=225)
is_activate = models.BooleanField(default=True)
def __str__(self):
return self.name
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,487
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/views/users/signouts.py
|
from django.contrib import messages
from django.shortcuts import redirect,render
from django.contrib.auth import login, logout, authenticate
from django.urls import reverse
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
def signouts(request):
logout(request)
messages.info(request, "Logged out Successfully!")
return redirect("signin")
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,488
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/views/users/signins.py
|
from django.contrib import messages
from django.shortcuts import redirect,render
from django.contrib.auth import login, logout, authenticate
from django.urls import reverse
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django import forms
from shop.forms.users.signins import SignInForm
def signInView(request):
form_signIn = SignInForm()
if request.method=="POST":
form_signIn = SignInForm(request.POST)
# print (form_signIn)
#check validation
if form_signIn.is_valid():
# check login
username = form_signIn.cleaned_data['username']
password = form_signIn.cleaned_data['password']
user = authenticate(request,username=username,password=password)
# login success
if user is not None:
login(request,user)
response = redirect('home')
return response
else:
messages.add_message(request,messages.ERROR,'Wrong password!',"danger")
context = {
'form_signIn' : form_signIn
}
return render(request, 'page/auth/signin.html',{'form_signIn' : form_signIn})
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,489
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/forms/users/signins.py
|
from django import forms
class SignInForm(forms.Form):
username = forms.CharField(required=True,widget=forms.TextInput(attrs={'class': 'form-control'}))
password = forms.CharField(required=True,widget=forms.TextInput(attrs={'class': 'form-control','type': 'password'}))
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,490
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/views/category.py
|
from django.http import request
from django.shortcuts import redirect, render
from shop.models.Category import Category
from shop.models.Product import Product
from django.db.models import Q
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
def category(request):
list_product = Product.objects.filter(is_activate = True )
counters = None
# search
search_post = request.GET.get('search','')
if search_post:
list_product = list_product.filter(Q(name__icontains=search_post))
counters = list_product.count()
# sort
sort = request.GET.get('sort','desc')
if sort == 'desc':
list_product = list_product.order_by('price')
text_sort = 'น้อยไปมาก'
else:
list_product = list_product.order_by('-price')
text_sort = 'มากไปน้อย'
# pagination
paginator = Paginator(list_product, 8)
page = request.GET.get('page',1)
try:
list_product = paginator.page(page)
except PageNotAnInteger:
list_product = paginator.page(1)
except EmptyPage:
list_product = paginator.page(paginator.num_pages)
context = {
'list_product':list_product,
'search_post':search_post,
'sort':sort,
'page':page,
'text_sort':text_sort,
'counters':counters,
}
return render(request, 'page/category.html',context)
def categoryFilter(request,pk):
category = Category.objects.filter(is_activate=True)
list_product = Product.objects.filter(is_activate = True ).filter(category = pk)
title = category.get(pk=pk)
counters = None
# search
search_post = request.GET.get('search','')
if search_post:
list_product = list_product.filter(Q(name__icontains=search_post))
counters = list_product.count()
#sort
sort = request.GET.get('sort','desc')
if sort == 'desc':
list_product = list_product.order_by('price')
text_sort = 'น้อยไปมาก'
else:
list_product = list_product.order_by('-price')
text_sort = 'มากไปน้อย'
# pagination
paginator = Paginator(list_product,8)
page = request.GET.get('page','1')
try:
list_product = paginator.page(page)
except PageNotAnInteger:
list_product = paginator.page(1)
except EmptyPage:
list_product = paginator.page(paginator.num_pages)
# pages=list_product.paginator.page_range
context = {
'list_product':list_product,
'title':title,
'page':page,
'search_post':search_post,
'text_sort':text_sort,
'sort':sort,
'pk':pk,
'counters':counters,
}
return render(request, 'page/category.html',context)
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,491
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/views/product.py
|
from django.http import request
from django.shortcuts import redirect, render
from shop.models.Product import Product
from shop.models.Category import Category
from shop.models.ImageProduct import ImageProduct
from shop.forms.comment import CommentForm
def product(request, pk, cat_id):
products = Product.objects.get(pk=pk, is_activate=True)
images = ImageProduct.objects.filter(product=pk,)
category = Category.objects.filter(is_activate=True)
title = category.get(name=products.category)
recomment = Product.objects.filter(is_activate=True).filter(
is_recomment=True).order_by('price')[:8]
# comment
form_comment = CommentForm()
# Submit Comment
if request.method == "POST":
form_comment = CommentForm(request.POST)
# Check validation
if form_comment.is_valid():
comment = form_comment.save(commit=False)
# Save comment
if request.user.is_authenticated:
print(request.user.username)
comment.user = request.user
comment.product = products
comment.save()
# Reset CommentForm
form_comment = CommentForm()
context = {
'products': products,
'category': category,
'images': images,
'title': title,
'list_product': recomment,
'form_comment': form_comment,
}
return render(request, 'page/product.html', context)
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,492
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/models/ImageUser.py
|
from django.db import models
from django.contrib.auth.models import User
from versatileimagefield.fields import VersatileImageField
class ImageUser(models.Model):
images = VersatileImageField(max_length=255,upload_to='images/user',default='',blank=True, null=True)
user = models.OneToOneField(User, related_name='images',
on_delete=models.CASCADE, default=None, null=True)
def __str__(self):
return self.user.username
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,493
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/workshop1/context_processors.py
|
from django.conf import settings
from shop.models.ImageUser import ImageUser
from shop.models.Category import Category
def template_context(request):
category = Category.objects.filter(is_activate=True)
imageview = None
if request.user.is_authenticated:
try:
imageview = ImageUser.objects.get(user=request.user)
except:
pass
return {
'base': {
'category':category,
'imageview': imageview
}
}
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,494
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/forms/users/signups.py
|
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from shop.models.ImageUser import ImageUser
from django import forms
class SignupForm(UserCreationForm):
password1 = forms.CharField(required=True,widget=forms.TextInput(attrs={'class': 'form-control','type': 'password'}))
password2 = forms.CharField(required=True,widget=forms.TextInput(attrs={'class': 'form-control','type': 'password'}))
email = forms.EmailField(max_length=254, required=True,widget=forms.TextInput(attrs={'class': 'form-control','type':'email'}))
first_name = forms.CharField(max_length=30, required=True,widget=forms.TextInput(attrs={'class': 'form-control'}))
last_name = forms.CharField(max_length=30, required=True,widget=forms.TextInput(attrs={'class': 'form-control'}))
class Meta:
model = User
fields = ['username', 'password1', 'password2','email','first_name','last_name']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['username'].widget.attrs.update({'class': 'form-control'})
class ImageUserForm(forms.ModelForm):
images = forms.ImageField()
class Meta:
model = ImageUser
fields =['images']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['images'].widget.attrs.update({'class': 'form-control form-control-sm',})
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,495
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/views/homepage.py
|
from django.http import request
from django.shortcuts import redirect, render
from shop.models.Category import Category
from shop.models.Product import Product
def homepage(request):
category = Category.objects.filter(is_activate=True)
category = category.order_by('name')
list_products = Product.objects.filter(is_activate = True)
recomment = Product.objects.filter(is_activate=True).filter(is_recomment=True).order_by('price')[:8]
context = {
'list_products':list_products,
'category':category,
'list_product':recomment,
}
return render(request, 'page/index.html',context)
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,496
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/models/ImageProduct.py
|
from django.db import models
from shop.models.Product import Product
from versatileimagefield.fields import VersatileImageField
class ImageProduct(models.Model):
images = VersatileImageField(max_length=255,upload_to='images/products',default='',blank=True, null=True)
# image = ImageCropField(blank=True, null=True, upload_to='images/products')
# ImageField(upload_to='images')
product = models.ForeignKey(Product, related_name='image',
on_delete=models.CASCADE, default=None, null=True)
def __str__(self):
return self.product.name
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,497
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/views/contact.py
|
from django.http import request
from django.shortcuts import redirect, render
from shop.models.Contact import Contact
from shop.forms.contact import ContactForm
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
from django.contrib import messages
import requests
@csrf_exempt
def contact(request):
contacts = Contact.objects.filter(is_enabled=True)
form_contact = ContactForm()
if request.method == 'POST':
payload = {'secret': settings.RECAPTCHA_SECRET_KEY,'response':request.POST['recaptcha_token']}
respone = requests.post('https://www.google.com/recaptcha/api/siteverify',payload)
if respone.json()['success']:
form_contact = ContactForm(request.POST)
#check validation
if form_contact.is_valid():
print('validated')
contact=form_contact.save(commit=False)
contact.save()
form_contact = ContactForm()
messages.add_message(request, messages.SUCCESS, 'Message sent',"success")
# else:
# messages.add_message(request, messages.ERROR, 'กรุณากรอกให้ครบ','warning')
else:
messages.add_message(request, messages.ERROR, 'Recapcha timeout',"danger")
#reset form
# c = Contact.objects.get(pk=1)
# form_contact = ContactForm()
context = {
'contacts':contacts,
'form_contact':form_contact,
}
return render(request, 'page/contact.html',context)
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,498
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/views/about.py
|
from django.http import request
from django.shortcuts import redirect, render
from shop.forms.users.signups import ImageUserForm, SignupForm
from django.views.decorators.csrf import csrf_exempt
from shop.models.ImageUser import ImageUser
@csrf_exempt
def about(request):
form_image = ImageUserForm()
if request.method == 'POST':
check_img = ImageUser.objects.filter(user=request.user)
if len(check_img)>0:
form_image = ImageUserForm(request.POST ,request.FILES,instance=check_img[0])
else:
form_image = ImageUserForm(request.POST ,request.FILES)
if form_image.is_valid():
images = form_image.save(commit=False)
if request.user.is_authenticated:
images.user = request.user
images.save()
img_obj = form_image.instance
return render(request, 'page/about.html', {'form_image': form_image, 'img_obj': img_obj})
else:
form_image = ImageUserForm()
context = {
'form_image':form_image,
}
return render(request, 'page/about.html', context)
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,499
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/forms/comment.py
|
from django import forms
from shop.models.Comment import Comment
class CommentForm(forms.ModelForm):
message = forms.CharField(widget=forms.Textarea)
class Meta:
model = Comment
fields = ['message', 'name']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Customer class css
self.fields['name'].widget.attrs.update({'class': 'form-control'})
self.fields['message'].widget.attrs.update({'class': 'form-control'})
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,500
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/models/__init__.py
|
from shop.models.Category import *
from shop.models.Product import *
from shop.models.Contact import *
from shop.models.ImageProduct import *
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,501
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/views/users/profile.py
|
from django.http import request
from django.shortcuts import redirect, render
from shop.models.Category import Category
from shop.forms.users.signups import ImageUserForm
from django.views.decorators.csrf import csrf_exempt
from shop.models.ImageUser import ImageUser
from shop.forms.users.editprofile import UpdateProfileForm
from django.contrib.auth.models import User
def profileViews(request):
form_image = ImageUserForm()
edit_form = UpdateProfileForm()
if request.method == 'POST':
edit_form = UpdateProfileForm(request.POST, instance=request.user)
check_img = ImageUser.objects.filter(user=request.user)
if len(check_img) > 0:
form_image = ImageUserForm(
request.POST, request.FILES, instance=check_img[0])
else:
form_image = ImageUserForm(request.POST, request.FILES)
if form_image.is_valid() and edit_form.is_valid():
edit_form.save()
images = form_image.save(commit=False)
if request.user.is_authenticated:
images.user = request.user
images.save()
img_obj = form_image.instance
return render(request, 'page/auth/profile.html', {'edit_form': edit_form, 'form_image': form_image, 'img_obj': img_obj})
instances = User.objects.get(username=request.user)
edit_form = UpdateProfileForm(instance=instances)
context = {
'edit_form': edit_form,
'form_image': form_image,
}
return render(request, 'page/auth/profile.html', context)
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,502
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/admin.py
|
from django.contrib import admin
from django.db import models
from shop.models.Product import Product
from shop.models.Category import Category
from shop.models.Contact import Contact
from shop.models.ImageProduct import ImageProduct
from shop.models.Comment import Comment
from shop.models.ImageUser import ImageUser
# Register your models here.
class ImageProductAdmins(admin.StackedInline):
model = ImageProduct
class ProductAdmin(admin.ModelAdmin):
class Meta:
model = Product
inlines = [ ImageProductAdmins]
list_display = (
'name',
'category',
'detail',
'price',
'created_datetime',
'updated_datetime',
'is_recomment',
'is_activate',
'image_pro',
)
list_editable = (
'is_activate',
'is_recomment',
'detail',
'price'
)
list_filter = (
'category',
'is_activate',
'is_recomment',
)
search_fields = (
'name',
)
list_per_page=6
admin.site.register(Product,ProductAdmin)
class ContactAdmin(admin.ModelAdmin):
list_display = (
'email',
'first_name',
'last_name',
'messages',
'datetime',
'is_enabled',
)
list_editable = (
'is_enabled',
)
list_filter = (
'user',
'is_enabled'
)
list_per_page=10
admin.site.register(Contact,ContactAdmin)
class CategoryAdmin(admin.ModelAdmin):
list_display = (
'name',
'detail',
'image',
'is_activate',
)
list_editable = (
'is_activate',
'detail',
'image'
)
list_filter = (
'name',
'is_activate'
)
admin.site.register(Category,CategoryAdmin)
# class ImageProductAdmin(admin.StackedInline):
# model:ImageProduct
# @admin.register(ImageProductAdmin)
class ImageProductAdmin(admin.ModelAdmin):
list_display = (
'product',
'images',
)
list_filter = (
'product',
)
list_per_page=10
admin.site.register(ImageProduct,ImageProductAdmin)
class CommentAdmin(admin.ModelAdmin):
list_display = (
'product',
'datetime',
'is_activate'
)
list_editable = (
'is_activate',
)
list_filter = (
'product',
'is_activate'
)
admin.site.register(Comment,CommentAdmin)
class ImageUserAdmin(admin.ModelAdmin):
pass
admin.site.register(ImageUser,ImageUserAdmin)
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,503
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/models/Product.py
|
from django.db import models
from shop.models.Category import Category
import datetime
from versatileimagefield.fields import VersatileImageField
class Product(models.Model):
name = models.CharField(max_length=50)
detail = models.CharField(max_length=225)
category = models.ForeignKey(Category, on_delete=models.CASCADE,related_name= 'products',default=None,null=True)
price = models.IntegerField()
is_recomment = models.BooleanField(default=True)
is_activate = models.BooleanField(default=True)
image_pro = VersatileImageField(max_length=255,upload_to='images/product',default='',blank=True, null=True)
created_datetime = models.DateTimeField(auto_now=True)
updated_datetime = models.DateTimeField(default=None,null=True)
def save(self, *args, **kwargs):
self.updated_datetime = datetime.datetime.now()
super().save(*args, **kwargs)
def __str__(self):
return self.name
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,504
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/models/Contact.py
|
from django.db import models
from shop.models.Product import Product
from django.contrib.auth.models import User
class Contact(models.Model):
first_name = models.CharField(max_length=50,help_text='กรุณากรอกชื่อ',error_messages={'required': 'กรุณากรอกชื่อ'})
last_name = models.CharField(max_length=50,help_text='กรุณากรอกนามสกุล',error_messages={'required': 'กรุณากรอกนามสกุล'})
email = models.EmailField(max_length=55,help_text='กรุณากรอกอีเมลล์',error_messages={'required': 'กรุณากรอกอีเมลล์'})
messages = models.CharField(max_length=255,help_text='กรุณากรอกความคิดเห็น ไม่เกิน255ตัวอักษร',error_messages={'required': 'กรุณากรอกข้อความ'})
product = models.ForeignKey(Product, related_name='products', on_delete=models.CASCADE,default=None,null=True)
user = models.ForeignKey(User, on_delete=models.CASCADE,default=None,null=True)
datetime = models.DateTimeField(auto_now=True)
is_enabled = models.BooleanField(default=True)
class Meta:
ordering = ["-datetime"]
def __str__(self):
return self.messages
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,684,505
|
WeerachonYN/Workshop1
|
refs/heads/main
|
/shop/forms/contact.py
|
from django.forms import ModelForm
from django import forms
from shop.models.Contact import Contact
class ContactForm(ModelForm):
email = forms.EmailField(max_length=254, required=True)
first_name = forms.CharField(max_length=30, required=True)
last_name = forms.CharField(max_length=30, required=True)
messages = forms.CharField(max_length=255,required=True)
# recaptcha_token
class Meta:
model = Contact
fields = ['email','first_name','last_name','messages']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
#css
self.fields['email'].widget=forms.TextInput(attrs={'class': 'form-control','type':'email'})
self.fields['first_name'].widget=forms.TextInput(attrs={'class': 'form-control',})
self.fields['last_name'].widget=forms.TextInput(attrs={'class': 'form-control'})
self.fields['messages'].widget=forms.Textarea(attrs={'class':'form-control'})
|
{"/shop/models/ImageProduct.py": ["/shop/models/Product.py"], "/shop/admin.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py", "/shop/models/ImageUser.py"], "/shop/views/users/signups.py": ["/shop/forms/users/signups.py"], "/workshop1/urls.py": ["/shop/views/homepage.py", "/shop/views/category.py", "/shop/views/product.py", "/shop/views/contact.py", "/shop/views/about.py", "/shop/views/users/signouts.py", "/shop/views/users/signins.py", "/shop/views/users/signups.py", "/shop/views/users/profile.py"], "/shop/views/users/signins.py": ["/shop/forms/users/signins.py"], "/shop/views/category.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/product.py": ["/shop/models/Product.py", "/shop/models/Category.py", "/shop/models/ImageProduct.py", "/shop/forms/comment.py"], "/workshop1/context_processors.py": ["/shop/models/ImageUser.py", "/shop/models/Category.py"], "/shop/forms/users/signups.py": ["/shop/models/ImageUser.py"], "/shop/views/homepage.py": ["/shop/models/Category.py", "/shop/models/Product.py"], "/shop/views/contact.py": ["/shop/models/Contact.py", "/shop/forms/contact.py"], "/shop/views/about.py": ["/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/__init__.py": ["/shop/models/Category.py", "/shop/models/Product.py", "/shop/models/Contact.py", "/shop/models/ImageProduct.py"], "/shop/views/users/profile.py": ["/shop/models/Category.py", "/shop/forms/users/signups.py", "/shop/models/ImageUser.py"], "/shop/models/Product.py": ["/shop/models/Category.py"], "/shop/models/Contact.py": ["/shop/models/Product.py"], "/shop/forms/contact.py": ["/shop/models/Contact.py"]}
|
36,832,994
|
Kenzhebek-Taniyev/kingfisher
|
refs/heads/master
|
/Parser/parser.py
|
import re
from bs4 import BeautifulSoup
from Locators.types import types
from Locators.locators import locators
class Parser:
def __init__(self, parent, link):
self.parent = parent
self.link = link
def __repr__(self):
return f'<{self.category}: {self.name} ({self.price} T ) ({self.city})>'
@property
def name(self):
if self.parent.select_one(locators.NAME) == None:
return ''
item = self.parent.select_one(locators.NAME).string
return item
@property
def price(self):
if self.parent.select_one(locators.PRICE) == None:
return ''
item_price = self.parent.select_one(locators.PRICE).string
pattern = '([0-9 ]+) Т'
matcher = re.search(pattern, item_price)
return int(matcher.group(1).replace(" ", ""))
@property
def category(self):
return types.link_to_product[self.link]
@property
def city(self):
return 'No city'
# if self.parent.select_one(locators.CITY) == None:
# return ''
# return self.parent.select_one(locators.CITY).string
# @property
# def href(self):
# item = self.parent.select_one(locators.NAME)
# if hasattr(item, 'href'):
# href = item['href']
# return href
# else:
# return ''
|
{"/Parser/parser.py": ["/Locators/types.py", "/Locators/locators.py"], "/Pages/allPages.py": ["/Locators/locators.py", "/Parser/parser.py"], "/app.py": ["/Locators/types.py", "/Pages/allPages.py"]}
|
36,849,490
|
Ethea2/Lola-Helper
|
refs/heads/master
|
/backenddb.py
|
import sqlite3
from xlsxwriter.workbook import Workbook
from datetime import date
today = date.today()
display_date = today.strftime("%B %d, %Y")
def connect():
connection = sqlite3.connect("store.db")
cursor = connection.cursor()
cursor.execute("CREATE TABLE IF NOT EXISTS store (id INTEGER PRIMARY KEY, product TEXT, price INTEGER, date TEXT)")
connection.commit()
connection.close()
def add_product(product, price):
connection = sqlite3.connect("store.db")
cursor = connection.cursor()
cursor.execute("INSERT INTO store VALUES (NULL,?,?,?)", (product, price, display_date))
connection.commit()
connection.close()
def view_all():
connection = sqlite3.connect("store.db")
cursor = connection.cursor()
cursor.execute("SELECT * FROM store")
rows = cursor.fetchall()
connection.close()
return rows
def get_price():
connection = sqlite3.connect("store.db")
cursor = connection.cursor()
cursor.execute("SELECT price FROM store")
rows = cursor.fetchall()
price = [price[0] for price in rows]
connection.close()
return price
def get_profit():
sold_items = get_price()
profit = 0
for item in sold_items:
profit += item
return profit
def delete_product(id):
connection = sqlite3.connect("store.db")
cursor = connection.cursor()
cursor.execute("DELETE FROM store WHERE id=?", (id,))
connection.commit()
connection.close()
def search(product="",price=""):
connection = sqlite3.connect("store.db")
cursor = connection.cursor()
cursor.execute("SELECT * FROM store WHERE product=? OR price=?", (product,price))
rows = cursor.fetchall()
connection.close()
return rows
def update_product(id, product, price):
connection = sqlite3.connect("store.db")
cursor = connection.cursor()
cursor.execute("UPDATE store SET product=?, price=? WHERE id=?", (product, price, id))
connection.commit()
connection.close()
def delete_all():
connection = sqlite3.connect("store.db")
cursor = connection.cursor()
cursor.execute("DELETE FROM store;")
connection.commit()
connection.close()
def export_excel():
workbook = Workbook(f'store {display_date}.xlsx')
worksheet = workbook.add_worksheet()
data = view_all()
for i, row in enumerate(data):
for j, value in enumerate(row):
worksheet.write(i, j, value)
workbook.close()
connect()
#insert_product("Lucky Me", 30)
#delete_product()
#update_product(1, 'Lays', 40)
#print(search(price=20))
#export_excel()
#print(get_profit())
|
{"/LolaHelper.py": ["/frontend.py", "/backenddb.py"], "/frontend.py": ["/backenddb.py"]}
|
36,849,491
|
Ethea2/Lola-Helper
|
refs/heads/master
|
/LolaHelper.py
|
from tkinter import *
from tkinter import messagebox
from datetime import date
import backenddb as backend
today = date.today()
display_date = today.strftime("%B %d, %Y")
def clear_boxes():
product_box.delete(0,END)
price_box.delete(0, END)
def get_selected_row(event):
global selected_row
try:
index = list_box.curselection()[0]
selected_row = list_box.get(index)
clear_boxes()
product_box.insert(END, selected_row[1])
price_box.insert(END, selected_row[2])
return selected_row
except IndexError:
pass
def view_command():
items = backend.view_all()
list_box.delete(0, END)
for item in items:
list_box.insert(END, item)
def add_command():
backend.add_product(product_box.get(), price_box.get())
view_command()
def delete_command():
product_name_delete = product_box.get()
delete_popup = messagebox.askquestion("Delete Product", f"Are you sure you want to delete {product_name_delete}?")
if delete_popup == 'yes':
backend.delete_product(selected_row[0])
clear_boxes()
view_command()
else:
pass
def update_command():
backend.update_product(selected_row[0], product_box.get(), price_box.get())
view_command()
def search_command():
products = backend.search(product_box.get(), price_box.get())
list_box.delete(0, END)
for items in products:
list_box.insert(END, items)
def export_excel():
backend.export_excel()
def clear_command():
delete_popup = messagebox.askquestion("Delete Product", f"Are you sure you want to clear all?")
if delete_popup == 'yes':
backend.delete_all()
view_command()
else:
pass
def profit_command():
profit = backend.get_profit()
profit_box.delete(0, END)
profit_box.insert(END, profit)
window = Tk()
#labels
name_product = StringVar()
name_product.set("Product Name")
product_name = Label(window, textvariable=name_product, height=2, width=20)
product_name.grid(row=0, column=0)
price_name = StringVar()
price_name.set("Price")
price = Label(window, textvariable=price_name, height=2, width=20)
price.grid(row=0, column=2)
date_name = StringVar()
date_name.set(f"Date: {display_date}")
date_label = Label(window, textvariable=date_name, height=2, width=20)
date_label.grid(row=0, column=4)
profit_name = StringVar()
profit_name.set("Profit")
profit_label = Label(window, textvariable=profit_name)
profit_label.grid(row=15, column=1)
#entry box
product_box = Entry(window)
product_box.grid(row=0, column=1)
price_box = Entry(window)
price_box.grid(row=0, column=3)
profit_box = Entry(window)
profit_box.grid(row=15, column=2)
#listbox and scroll bar
list_box = Listbox(window, height=12, width=70)
list_box.grid(row=2,column=0,rowspan=12,columnspan=3)
scroll_bar = Scrollbar(window)
scroll_bar.grid(row=4, column=3, rowspan=7)
list_box.configure(yscrollcommand=scroll_bar.set)
scroll_bar.configure(command=list_box.yview)
list_box.bind('<<ListboxSelect>>', get_selected_row)
#buttons
view_all = Button(window, text="View All", height=1, width=20, command=view_command)
view_all.grid(row=2, column=4)
search_button = Button(window, text="Search", height=1, width=20, command=search_command)
search_button.grid(row=3, column=4)
add_button = Button(window, text="Add Entry", height=1, width=20, command=add_command)
add_button.grid(row=4, column=4)
update_button = Button(window, text="Update Selected", height=1, width=20, command=update_command)
update_button.grid(row=5, column=4)
delete_button = Button(window, text="Delete Selected", height=1, width=20, command=delete_command)
delete_button.grid(row=6, column=4)
sum_button = Button(window, text="Calculate Profit", height=1, width=20, command=profit_command)
sum_button.grid(row=7, column=4)
export_excel = Button(window, text="Export to Excel", height=1, width=20, command=export_excel)
export_excel.grid(row=8, column=4)
clear_all_button = Button(window, text="Clear All", height=1, width=20, command=clear_command)
clear_all_button.grid(row=9, column=4)
window.mainloop()
|
{"/LolaHelper.py": ["/frontend.py", "/backenddb.py"], "/frontend.py": ["/backenddb.py"]}
|
36,882,328
|
Silvana7211/Local-ebay
|
refs/heads/master
|
/mall/urls.py
|
from django.conf.urls import include, url
from rest_framework.routers import DefaultRouter
from django.urls import path
from mall import views
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'product', views.ProductViewSet)
router.register(r'users', views.UserViewSet)
router.register(r'bids', views.BidsViewSet)
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls))
]
urlpatterns += [
path('api-auth/', include('rest_framework.urls')),
]
|
{"/mall/views.py": ["/mall/serializers.py", "/mall/models.py"], "/mall/serializers.py": ["/mall/models.py"]}
|
36,906,019
|
jinga-lala/gnr602-programming-assignment
|
refs/heads/main
|
/main.py
|
import numpy as np
import argparse
from sklearn.model_selection import train_test_split
from model import Perceptron
from data_loader import load_sonar_data
from PIL import Image
from numpy import asarray
import os
np.random.seed(0)
def main(args):
if args.data == 'or':
X_train = np.array([[0,0],[0,1], [1,0],[1,1]])
y_train = np.array([0,1,1,1]) #OR
# Y = np.array([0,1,1,0]) #XOR
elif args.data == 'sonar':
X,Y = load_sonar_data()
X_train, X_test, y_train, y_test = train_test_split(X,Y,test_size=0.4, random_state=42)
args.input_dim = 60
elif args.data == 'toy':
X_train = np.array([[-6],[-3],[-2],[-1],[0],[1],[2],[3],[4],[5],[6]])
y_train = np.array([0,0,1,1,1,1,1,0,0,0,0])
X_test = np.array([[-0.1], [0.1], [1.2],[2.3],[-2.2],[-1.8],[1.9],[3.5], [3], [4], [-3], [-4]])
y_test = np.array([1,1,1,0,0,1,1,0,0,0,0,0])
args.input_dim = 1
elif args.data == 'image':
image = Image.open(args.train_image)
data = asarray(image)
image2 = Image.fromarray(data)
size = data.shape
label = Image.open(args.train_label)
ldata = asarray(label)
label2 = Image.fromarray(ldata)
X = []
Y = []
for i in range(size[0]):
for j in range(size[1]):
X.append(data[i][j]/255)
Y.append(ldata[i][j]/255)
X_train = np.array(X)
Y = np.array(Y).astype('int')
# assert (Y[:,0] == Y[:,1]).all() and (Y[:,0] == Y[:,2]).all()
y_train = Y[:]
image = Image.open(args.test_image)
data = asarray(image)
image2 = Image.fromarray(data)
size = data.shape
label = Image.open(args.test_label)
ldata = asarray(label)
label2 = Image.fromarray(ldata)
X = []
Y = []
for i in range(size[0]):
for j in range(size[1]):
X.append(data[i][j]/255)
Y.append(ldata[i][j]/255)
X_test = np.array(X)
Y = np.array(Y).astype('int')
shp = ldata.shape
# assert (Y[:,0] == Y[:,1]).all() and (Y[:,0] == Y[:,2]).all()
y_test = Y[:]
# X_train, X_test, y_train, y_test = train_test_split(X,Y,test_size=0.2, random_state=42)
args.input_dim = 3 # 3 channel input
model = Perceptron(args)
# print(X_train, y_train)
#Training the model
print('='*20+'Training'+'='*20)
for epoch in range(args.epoch):
t = model.forward(X_train,y_train)
print(f'Epoch {epoch+1}: No. of misclassifications = {abs(model.loss())}')
model.backward()
#Testing the model
if args.data == 'image':
print('='*20+'Testing'+'='*20)
t = model.forward(X_test,y_test)
out = t.reshape(shp)
im = Image.fromarray(np.uint8(out*255))
im.save(os.path.join(args.out_image, 'output.png'))
print(f'Test accuracy: {100-100.00*abs(model.loss())/ len(y_test)}%')
elif args.data != 'or':
print('='*20+'Testing'+'='*20)
t = model.forward(X_test,y_test)
# print(f'No. of misclassifications = {abs(model.loss())} out of {len(y_test)}')
print(f'Test accuracy: {100-100.00*abs(model.loss())/ len(y_test)}%')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Inputs to model run script')
parser.add_argument('--input_dim', default=2, type=int, help='input dimension of features')
parser.add_argument('--epoch', default=None, type=int, help='number of epochs')
parser.add_argument('--transform', default=None, type=int, help='do non linear transformation on input or not')
parser.add_argument('--learning_rate', default=None, type=float, help='learning rate')
parser.add_argument('--data', default=None, type=str, help='training and eval data')
parser.add_argument('--train_image', default=None, type=str, help='Training image path')
parser.add_argument('--train_label', default=None, type=str, help='Label image path')
parser.add_argument('--test_image', default=None, type=str, help='Test image path')
parser.add_argument('--test_label', default=None, type=str, help='Test label path')
parser.add_argument('--out_image', default=None, type=str, help='Predicted image store path')
args = parser.parse_args()
import tkinter as tk
from tkinter import simpledialog
ROOT = tk.Tk()
ROOT.withdraw()
ent1=tk.Entry(ROOT,font=40)
ent1.grid(row=2,column=2)
from tkinter.filedialog import askopenfilename
from tkinter import filedialog
# def browsefunc():
# filename = askopenfilename(filetypes=(("jpg file", "*.jpg"), ("png file ",'*.png'), ("All files", "*.*"),))
# ent1.insert(END, filename) # add this
# b1=Button(ROOT,text="DEM",font=40,command=browsefunc)
# b1.grid(row=2,column=4)
# browsefunc()
# the input dialog
if args.transform is None:
args.transform = simpledialog.askstring(title="Transformation",
prompt="Do you want non-linear transformation on input? Enter 0 for No, 1 for Yes \n Press OK for default value")
args.transform = 0 if args.transform == '' else int(args.transform)
if args.data is None:
args.data = simpledialog.askstring(title="Data",
prompt="Which dataset do you want to use? Possible choices [or, toy, sonar, image] \n Press OK for default value")
args.data = 'or' if args.data == '' else args.data
if args.data == 'image':
currdir = os.getcwd()
args.train_image = filedialog.askopenfilename(parent=ROOT, initialdir=currdir, title='Please select train image')
args.train_label = filedialog.askopenfilename(parent=ROOT, initialdir=currdir, title='Please select train label')
args.test_image = filedialog.askopenfilename(parent=ROOT, initialdir=currdir, title='Please select test image')
args.test_label = filedialog.askopenfilename(parent=ROOT, initialdir=currdir, title='Please select test label')
args.out_image = filedialog.askdirectory(parent=ROOT, initialdir=currdir, title='Please select a directory to store test output')
print(args)
if args.learning_rate is None:
args.learning_rate = simpledialog.askstring(title="Learning rate",
prompt="Set the learning rate for this run \n Press OK for default value")
args.learning_rate = 0.1 if args.learning_rate == '' else float(args.learning_rate)
if args.epoch is None:
args.epoch = simpledialog.askstring(title="Epochs",
prompt="Set the number of epochs for this run \n Press OK for default value")
args.epoch = 100 if args.epoch == '' else int(args.epoch)
# check it out
main(args)
|
{"/main.py": ["/model.py", "/data_loader.py"]}
|
36,906,020
|
jinga-lala/gnr602-programming-assignment
|
refs/heads/main
|
/model.py
|
import numpy as np
np.random.seed(0)
class Perceptron(object):
"""Perceptron"""
"""Implement a single layer perceptron classifier (input layer + output layer without any
hidden layer) with a polynomial function for nonlinear transformation of the input. Compare
this result with the result when no nonlinear transformation of the input is done."""
def __init__(self, arg):
super(Perceptron, self).__init__()
self.arg = arg
self.weights = np.random.randn(self.arg.input_dim+1).reshape(-1,1) #(d+1,1)
def square(x):
return x**2
if self.arg.transform==1:
self.transformation = square
else:
self.transformation = lambda x: x
self.learning_rate = arg.learning_rate if self.arg.learning_rate is not None else 0.1
# self.cutoff = args.cutoff if args.cutoff is not None else 0.5
def forward(self, x,y):
# import pdb;pdb.set_trace()
self.x = np.concatenate((self.transformation(x), np.ones((x.shape[0],1))), axis=-1) #(?,d+1)
# print(self.x)
out = self.x@self.weights #(?,1)
self.t = (out >= 0) + 0 #(?,1)
self.y = y.reshape(-1,1)
return self.t
def backward(self):
self.weights += np.sum(self.learning_rate*(self.y-self.t)*self.x, axis=0).reshape(-1,1)
def loss(self):
"""no of incorrect samples"""
# print(self.x[(self.y-self.t).reshape(-1)==0])
# print(self.y.reshape(-1))
# print(self.t.reshape(-1))
return np.sum(self.y-self.t)
|
{"/main.py": ["/model.py", "/data_loader.py"]}
|
36,906,021
|
jinga-lala/gnr602-programming-assignment
|
refs/heads/main
|
/data_loader.py
|
import pandas as pd
def load_sonar_data():
df = pd.read_csv('data/sonar.all-data', index_col=False, header=None)
df = df.replace(to_replace='R', value=0)
df = df.replace(to_replace='M', value=1)
Y = df[60].to_numpy()
X = df.to_numpy()[:,:60]
return X,Y
load_sonar_data()
|
{"/main.py": ["/model.py", "/data_loader.py"]}
|
36,938,878
|
SpaceDEVofficial/cov19py
|
refs/heads/main
|
/corona/corona.py
|
from .util import request_api
class root_api:
main_api = ""
class client():
def __init__(self):
self.main_api = root_api.main_api
async def getTodayCount(self):
res = await request_api(url=self.main_api)
return res
|
{"/corona/corona.py": ["/corona/util.py"]}
|
36,938,879
|
SpaceDEVofficial/cov19py
|
refs/heads/main
|
/corona/util.py
|
import json
import aiohttp
async def request_api(url):
async with aiohttp.ClientSession() as session:
async with session.get(url=url) as resp:
pr = await resp.read()
sid = pr.decode('utf-8')
answer = json.loads(sid)
return answer
|
{"/corona/corona.py": ["/corona/util.py"]}
|
36,941,913
|
lizwait/githugbot
|
refs/heads/master
|
/bot.py
|
import os # for importing env vars for the bot to use
from twitchio.ext import commands
bot = commands.Bot(
# set up the bot
irc_token=os.environ['TMI_TOKEN'],
client_id=os.environ['CLIENT_ID'],
nick=os.environ['BOT_NICK'],
prefix=os.environ['BOT_PREFIX'],
initial_channels=[os.environ['CHANNEL']]
)
@bot.event
async def event_ready():
'Called once when the bot goes online.'
print(f"{os.environ['BOT_NICK']} is online!")
ws = bot._ws # this is only needed to send messages within event_ready
await ws.send_privmsg(os.environ['CHANNEL'], f"/me has landed!")
@bot.event
async def event_message(ctx):
'Runs every time a message is sent in chat.'
# make sure the bot ignores itself and the streamer
if ctx.author.name.lower() == os.environ['BOT_NICK'].lower():
return
await bot.handle_commands(ctx)
if 'hello' in ctx.content.lower():
await ctx.channel.send(f"Oh hai, @{ctx.author.name}! <3 ")
if 'hi' in ctx.content.lower():
await ctx.channel.send(f"Oh hai, @{ctx.author.name}! <3 ")
if 'yaas' in ctx.content.lower():
await ctx.channel.send(f"YAAAAS! VirtualHug")
@bot.command(name='twitter')
async def twitter(ctx):
await ctx.send('Follow AuraSwap on Twitter for memes, cat pics, and to find out when she goes live! https://twitter.com/auraswap')
@bot.command(name='caster')
async def caster(ctx):
caster_name = ctx.content.split()
await ctx.send(f'Show some love to this amazing caster! https://www.twitch.tv/{caster_name[1]}')
if __name__ == "__main__":
bot.run()
|
{"/bot.py": ["/tarot_deck.py"]}
|
36,947,658
|
abdulmalik500/abdulmalik500.github.io
|
refs/heads/main
|
/buk/views.py
|
from django.shortcuts import render, get_object_or_404
from django.shortcuts import render
from buk.models import places
# Create your views here.
def index(request):
place = places.objects.all()
return render(request, 'buk/index.html', {'place': place})
def detail(request, slug=None):
place = get_object_or_404(Places, slug=slug)
return render(request, 'buk/detail.html', {'place': place})
#class HomePageView(TemplateView):
# template_view = 'buk/index.html'
|
{"/buk/admin.py": ["/buk/models.py"], "/buk/urls.py": ["/buk/views.py"], "/buk/views.py": ["/buk/models.py"], "/buk/roadlayer.py": ["/buk/models.py"], "/buk/placeslayer.py": ["/buk/models.py"]}
|
36,947,659
|
abdulmalik500/abdulmalik500.github.io
|
refs/heads/main
|
/buk/models.py
|
from django.db import models
from django.contrib.gis.db import models
from django.urls import reverse
from django.db import transaction
# Create your models here.
class road(models.Model):
state_code = models.CharField(max_length=254, null='False')
source = models.CharField(max_length=254, null='False')
name = models.CharField(max_length=254, null='False')
surface_ty = models.CharField(max_length=254, null='False')
smoothness = models.CharField(max_length=254, null='False')
one_way = models.IntegerField()
osm_class = models.CharField(max_length=254, null='False')
road_class = models.CharField(max_length=254, null='False')
global_id = models.CharField(max_length=254, null='False')
shape_leng = models.FloatField()
geom = models.MultiLineStringField(srid=4326)
def __str__(self):
return 'Name: %s' % self.name
class places(models.Model):
objectid = models.BigIntegerField()
state_code = models.CharField(max_length=254, null='False')
source = models.CharField(max_length=254, null='False')
name = models.CharField(max_length=254, null='False')
alt_name = models.CharField(max_length=254, null='False')
ward_code = models.CharField(max_length=254, null='False')
is_primary = models.IntegerField()
global_id = models.CharField(max_length=254, null='False')
geom = models.MultiPointField(srid=4326)
def __str__(self):
return 'Name: %s' % self.name
def save(self, *args, **kwargs): # < here
self.slug = slugify(self.name)
super(Places, self).save()
def get_absolute_url(self): # < here
return reverse('detail', args=[str(self.name)])
class boundary(models.Model):
id = models.IntegerField(primary_key=True)
geom = models.MultiPolygonField(srid=4326)
def __str__(self):
return 'Name: %s' % self.id
#def __str__(self):
#return self.surface_ty
#def __unicode__(self):
#return self.source
#class Meta:
#verbose_name = "road"
|
{"/buk/admin.py": ["/buk/models.py"], "/buk/urls.py": ["/buk/views.py"], "/buk/views.py": ["/buk/models.py"], "/buk/roadlayer.py": ["/buk/models.py"], "/buk/placeslayer.py": ["/buk/models.py"]}
|
37,146,437
|
a2subedi/kalimati_market
|
refs/heads/master
|
/scraper.py
|
import time
import requests
from bs4 import BeautifulSoup as bs
url = "https://kalimatimarket.gov.np/index.php/lang/en"
then = time.time()
try:
res = requests.get(url)
# wait for page load and data transfer
time.sleep(10)
soup = bs(res.text,"html.parser")
prices_table = soup.find('table',{'id':'commodityDailyPrice'})
rows = prices_table.find('tbody').findAll
print(rows)
except Exception:
pass
|
{"/scraper.py": ["/config.py", "/helpers.py"]}
|
37,181,279
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/figshare/views/widget.py
|
import httplib as http
from framework.exceptions import HTTPError
from website.project.decorators import must_be_contributor_or_public
from website.project.decorators import must_have_addon
from ..api import Figshare # noqa
@must_be_contributor_or_public
@must_have_addon('figshare', 'node')
def figshare_widget(*args, **kwargs):
figshare = kwargs['node_addon']
#TODO throw error
# if not figshare.figshare_id:
if figshare:
rv = {
'complete': True,
'figshare_id': figshare.figshare_id,
'src': figshare.embed_url,
'width': figshare_settings.IFRAME_WIDTH, # noqa
'height': figshare_settings.IFRAME_HEIGHT, # noqa
}
rv.update(figshare.config.to_json())
return rv
raise HTTPError(http.NOT_FOUND)
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,280
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/citations/views.py
|
import httplib as http
from flask import request
from modularodm import Q
from framework.exceptions import HTTPError
from website import citations
from website.models import CitationStyle
from website.project.decorators import must_be_contributor_or_public
def list_citation_styles():
query = None
term = request.args.get('q')
if term:
query = Q('title', 'icontains', term) | Q('short_title', 'icontains', term) | Q('_id', 'icontains', term)
return {
'styles': [style.to_json() for style in CitationStyle.find(query)],
}
@must_be_contributor_or_public
def node_citation(**kwargs):
node = kwargs.get('node') or kwargs.get('project')
try:
citation_text = citations.render(node, style=kwargs.get("style"))
except ValueError:
raise HTTPError(http.NOT_FOUND,
data={"message_short": "Invalid citation style"})
return {
'citation': citation_text
}
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,281
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/s3/routes.py
|
from framework.routing import Rule, json_renderer
from website.routes import OsfWebRenderer
from website.addons.s3 import views
settings_routes = {
'rules': [
Rule(
[
'/project/<pid>/s3/newbucket/',
'/project/<pid>/node/<nid>/s3/newbucket/',
],
'post',
views.crud.create_new_bucket,
json_renderer
),
Rule(
'/settings/s3/',
'post',
views.config.s3_authorize_user,
json_renderer
),
Rule(
[
'/project/<pid>/s3/settings/',
'/project/<pid>/node/<nid>/s3/settings/',
],
'post',
views.config.s3_node_settings,
json_renderer,
),
Rule(
[
'/project/<pid>/s3/settings/',
'/project/<pid>/node/<nid>/s3/settings/',
'/project/<pid>/s3/config/',
'/project/<pid>/node/<nid>/s3/config/',
],
'delete',
views.config.s3_remove_node_settings,
json_renderer,
),
Rule(
[
'/project/<pid>/s3/import-auth/',
'/project/<pid>/node/<nid>/s3/import-auth/',
],
'post',
views.config.s3_node_import_auth,
json_renderer,
),
Rule(
[
'/project/<pid>/s3/authorize/',
'/project/<pid>/node/<nid>/s3/authorize/',
],
'post',
views.config.s3_authorize_node,
json_renderer,
),
Rule(
'/settings/s3/',
'delete',
views.config.s3_remove_user_settings,
json_renderer,
),
],
'prefix': '/api/v1',
}
api_routes = {
'rules': [
Rule(
[
'/project/<pid>/s3/',
'/project/<pid>/node/<nid>/s3/'
],
'post',
views.crud.s3_upload,
json_renderer
),
Rule(
[
'/project/<pid>/s3/<path:path>/',
'/project/<pid>/node/<nid>/s3/<path:path>/',
],
'delete',
views.crud.s3_delete,
json_renderer
),
Rule(
[
'/project/<pid>/s3/<path:path>/render/',
'/project/<pid>/node/<nid>/s3/<path:path>/render/',
],
'get',
views.crud.ping_render,
json_renderer,
),
Rule(
[
'/project/<pid>/s3/hgrid/',
'/project/<pid>/node/<nid>/s3/hgrid/',
'/project/<pid>/s3/hgrid/<path:path>/',
'/project/<pid>/node/<nid>/s3/hgrid/<path:path>/',
],
'get',
views.hgrid.s3_hgrid_data_contents,
json_renderer
),
Rule(
[
'/project/<pid>/s3/hgrid/dummy/',
'/project/<pid>/node/<nid>/s3/hgrid/dummy/',
],
'get',
views.hgrid.s3_dummy_folder,
json_renderer,
),
Rule(
[
'/project/<pid>/s3/<path:path>/info/',
'/project/<pid>/node/<nid>/s3/<path:path>/info/',
],
'get',
views.crud.file_delete_info,
json_renderer,
),
],
'prefix': '/api/v1',
}
nonapi_routes = {
'rules': [
Rule(
[
'/project/<pid>/s3/<path:path>/',
'/project/<pid>/node/<nid>/s3/<path:path>/'
],
'get',
views.crud.s3_view,
OsfWebRenderer('../addons/s3/templates/s3_view_file.mako'),
),
Rule(
[
'/project/<pid>/s3/<path:path>/download/',
'/project/<pid>/node/<nid>/s3/<path:path>/download/'
],
'get',
views.crud.s3_download,
json_renderer,
),
]
}
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,282
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/twofactor/routes.py
|
from framework.routing import Rule, json_renderer
from . import views, SHORT_NAME
settings_routes = {
'rules': [
# OAuth: General
Rule([
'/settings/{}/'.format(SHORT_NAME),
], 'post', views.user_settings, json_renderer),
],
'prefix': '/api/v1',
}
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,283
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/dropbox/routes.py
|
# -*- coding: utf-8 -*-
"""Dropbox addon routes."""
from framework.routing import Rule, json_renderer
from website.addons.dropbox import views
from website.routes import OsfWebRenderer, notemplate
auth_routes = {
'rules': [
Rule(
'/settings/dropbox/',
'get',
views.auth.dropbox_user_config_get,
json_renderer,
),
##### OAuth #####
Rule(
'/settings/dropbox/oauth/',
'get',
views.auth.dropbox_oauth_start, # Use same view func as node oauth start
json_renderer,
endpoint_suffix='_user' # but add a suffix for url_for
),
Rule(
'/addons/dropbox/oauth/finish/',
'get',
views.auth.dropbox_oauth_finish,
json_renderer,
),
Rule(
'/settings/dropbox/oauth/',
'delete',
views.auth.dropbox_oauth_delete_user,
json_renderer,
),
Rule(
[
'/project/<pid>/dropbox/oauth/',
'/project/<pid>/node/<nid>/dropbox/oauth/',
],
'get',
views.auth.dropbox_oauth_start,
json_renderer,
),
],
'prefix': '/api/v1'
}
web_routes = {
'rules': [
##### View file #####
Rule(
[
'/project/<pid>/dropbox/files/<path:path>',
'/project/<pid>/node/<nid>/dropbox/files/<path:path>',
],
'get',
views.crud.dropbox_view_file,
OsfWebRenderer('../addons/dropbox/templates/dropbox_view_file.mako'),
),
##### Download file #####
Rule(
[
'/project/<pid>/dropbox/files/<path:path>/download/',
'/project/<pid>/node/<nid>/dropbox/files/<path:path>/download/',
],
'get',
views.crud.dropbox_download,
notemplate,
),
],
}
api_routes = {
'rules': [
##### Node settings #####
Rule(
['/project/<pid>/dropbox/config/',
'/project/<pid>/node/<nid>/dropbox/config/'],
'get',
views.config.dropbox_config_get,
json_renderer
),
Rule(
['/project/<pid>/dropbox/config/',
'/project/<pid>/node/<nid>/dropbox/config/'],
'put',
views.config.dropbox_config_put,
json_renderer
),
Rule(
['/project/<pid>/dropbox/config/',
'/project/<pid>/node/<nid>/dropbox/config/'],
'delete',
views.config.dropbox_deauthorize,
json_renderer
),
Rule(
['/project/<pid>/dropbox/config/import-auth/',
'/project/<pid>/node/<nid>/dropbox/config/import-auth/'],
'put',
views.config.dropbox_import_user_auth,
json_renderer
),
Rule(
['/project/<pid>/dropbox/config/share/',
'/project/<pid>/node/<nid>/dropbox/config/share/'],
'get',
views.config.dropbox_get_share_emails,
json_renderer
),
##### CRUD #####
# Delete
Rule(
[
'/project/<pid>/dropbox/files/<path:path>',
'/project/<pid>/node/<nid>/dropbox/files/<path:path>',
],
'delete',
views.crud.dropbox_delete_file,
json_renderer
),
# Upload
Rule(
[
'/project/<pid>/dropbox/files/',
'/project/<pid>/dropbox/files/<path:path>',
'/project/<pid>/node/<nid>/dropbox/files/',
'/project/<pid>/node/<nid>/dropbox/files/<path:path>',
],
'post',
views.crud.dropbox_upload,
json_renderer
),
##### File rendering #####
Rule(
[
'/project/<pid>/dropbox/files/<path:path>/render/',
'/project/<pid>/node/<nid>/dropbox/files/<path:path>/render/',
],
'get',
views.crud.dropbox_render_file,
json_renderer
),
##### Revisions #####
Rule(
[
'/project/<pid>/dropbox/files/<path:path>/revisions/',
'/project/<pid>/node/<nid>/dropbox/files/<path:path>/revisions/',
],
'get',
views.crud.dropbox_get_revisions,
json_renderer
),
##### HGrid #####
Rule(
[
'/project/<pid>/dropbox/hgrid/',
'/project/<pid>/node/<nid>/dropbox/hgrid/',
'/project/<pid>/dropbox/hgrid/<path:path>',
'/project/<pid>/node/<nid>/dropbox/hgrid/<path:path>',
],
'get',
views.hgrid.dropbox_hgrid_data_contents,
json_renderer
),
],
'prefix': '/api/v1'
}
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,284
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/framework/render/core.py
|
# -*- coding: utf-8 -*-
import os
import mfr
from mfr.ext import ALL_HANDLERS
def init_mfr(app):
"""Register all available FileHandlers and collect each
plugin's static assets to the app's static path.
"""
# Available file handlers
mfr.register_filehandlers(ALL_HANDLERS)
# Update mfr config with static path and url
mfr.config.update({
# Base URL for static files
'ASSETS_URL': os.path.join(app.static_url_path, 'mfr'),
# Where to save static files
'ASSETS_FOLDER': os.path.join(app.static_folder, 'mfr'),
})
mfr.collect_static(dest=mfr.config['ASSETS_FOLDER'])
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,285
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/figshare/views/crud.py
|
# -*- coding: utf-8 -*-
import os
import datetime
import httplib as http
from urllib2 import urlopen
from flask import request, make_response
from modularodm import Q
from framework.exceptions import HTTPError
from framework.flask import redirect
from framework.auth.utils import privacy_info_handle
from framework.utils import secure_filename
from website.addons.base.views import check_file_guid
from website.project import decorators # noqa
from website.project.decorators import must_be_contributor_or_public, must_be_contributor # noqa
from website.project.decorators import must_have_addon
from website.project.views.node import _view_project
from website.project.views.file import get_cache_content
from website.project.model import has_anonymous_link
from website.addons.figshare import settings as figshare_settings
from website.addons.figshare.model import FigShareGuidFile
from ..api import Figshare
from website.addons.figshare import messages
# Helpers
# ----------------- PROJECTS ---------------
# PROJECTS: U
@decorators.must_have_permission('write')
@decorators.must_have_addon('figshare', 'node')
@decorators.must_not_be_registration
def figshare_add_article_to_project(**kwargs):
node = kwargs['node'] or kwargs['project']
figshare = node.get_addon('figshare')
project_id = kwargs.get('project_id')
if project_id is None:
raise HTTPError(http.BAD_REQUEST)
article_id = kwargs.get('aid')
article = None
connect = Figshare.from_settings(figshare.user_settings)
if not article_id:
article = file_as_article(figshare)
connect.add_article_to_project(figshare, article['article_id'], project_id)
# PROJECTS: D
@decorators.must_have_permission('write')
@decorators.must_have_addon('figshare', 'node')
@decorators.must_not_be_registration
def figshare_remove_article_from_project(*args, **kwargs):
node = kwargs['node'] or kwargs['project']
figshare = node.get_addon('figshare')
project_id = kwargs.get('project_id') or None
article_id = kwargs.get('aid') or None
if project_id is None or article_id is None:
raise HTTPError(http.BAD_REQUEST)
connect = Figshare.from_settings(figshare.user_settings)
connect.remove_article_from_project(figshare, article_id, project_id)
# ---------------- ARTICLES -------------------
# ARTICLES: C
def file_as_article(figshare):
upload = request.files['file']
filename = secure_filename(upload.filename)
article = {
'title': filename,
'files': [upload]
}
return article
@decorators.must_be_contributor_or_public
@decorators.must_have_addon('figshare', 'node')
@decorators.must_have_permission('write')
@decorators.must_not_be_registration
def figshare_upload(*args, **kwargs):
node = kwargs['node'] or kwargs['project']
figshare = node.get_addon('figshare')
upload = request.files['file']
connect = Figshare.from_settings(figshare.user_settings)
fs_id = kwargs.get('aid', figshare.figshare_id)
if fs_id is None:
raise HTTPError(http.BAD_REQUEST)
if figshare.figshare_type == 'project' and not kwargs.get('aid', None):
item = connect.create_article(figshare, file_as_article(upload))
else:
item = connect.article(figshare, fs_id)
if not item:
raise HTTPError(http.BAD_REQUEST)
resp = connect.upload_file(node, figshare, item['items'][0], upload)
#TODO Clean me up
added = True
if figshare.figshare_type == 'project' and not kwargs.get('aid', None):
added = connect.add_article_to_project(figshare, figshare.figshare_id, str(item['items'][0]['article_id']))
if resp and added:
node.add_log(
action='figshare_file_added',
params={
'project': node.parent_id,
'node': node._primary_key,
'path': upload.filename, # TODO Path?
'urls': {
'view': resp['urls']['view'],
'download': resp['urls']['download'],
},
'figshare': {
'id': figshare.figshare_id,
'type': figshare.figshare_type
}
},
auth=kwargs['auth'],
log_date=datetime.datetime.utcnow(),
)
return resp
else:
raise HTTPError(http.INTERNAL_SERVER_ERROR) # TODO better error?
@decorators.must_be_contributor_or_public
@decorators.must_have_addon('figshare', 'node')
@decorators.must_have_permission('write')
@decorators.must_not_be_registration
def figshare_upload_file_as_article(*args, **kwargs):
node = kwargs['node'] or kwargs['project']
figshare = node.get_addon('figshare')
upload = request.files['file']
project_id = kwargs.get('project_id') or figshare.figshare_id
if project_id is None:
raise HTTPError(http.BAD_REQUEST)
connect = Figshare.from_settings(figshare.user_settings)
article = connect.create_article(figshare, file_as_article(upload))
rv = connect.upload_file(node, figshare, article['items'][0], upload)
if rv:
node.add_log(
action='figshare_file_added',
params={
'project': node.parent_id,
'node': node._primary_key,
'path': upload.filename, # TODO Path?
'urls': {
'view': rv['urls']['view'],
'download': rv['urls']['download'],
},
'figshare': {
'id': figshare.figshare_id,
'type': figshare.figshare_type
}
},
auth=kwargs['auth'],
log_date=datetime.datetime.utcnow(),
)
return rv
else:
raise HTTPError(http.INTERNAL_SERVER_ERROR) # TODO better error?
@decorators.must_have_permission('write')
@decorators.must_have_addon('figshare', 'node')
@decorators.must_not_be_registration
def figshare_publish_article(*args, **kwargs):
node = kwargs['node'] or kwargs['project']
figshare = node.get_addon('figshare')
article_id = kwargs.get('aid')
if article_id is None:
raise HTTPError(http.BAD_REQUEST)
cat = request.json.get('category', '')
tags = request.json.get('tags', '') # noqa
if not cat:
raise HTTPError(http.BAD_REQUEST)
connect = Figshare.from_settings(figshare.user_settings)
connect.update_article(figshare, article_id, {'category_id': cat})
connect.publish_article(figshare, article_id)
return {"published": True}
# ARTICLES: D
def figshare_delete_article(*args, **kwargs):
# TODO implement me?
pass
# ----------------- FILES --------------------
# FILES: C
@decorators.must_be_contributor_or_public
@decorators.must_have_addon('figshare', 'node')
@decorators.must_have_permission('write')
@decorators.must_not_be_registration
def figshare_upload_file_to_article(*args, **kwargs):
node = kwargs['node'] or kwargs['project']
figshare = node.get_addon('figshare')
article = kwargs.get('aid') or None
connect = Figshare.from_settings(figshare.user_settings)
if not article:
article = connect.create_article()
article = connect.article(figshare, article)['items'][0]
upload = request.files['file']
rv = connect.upload_file(
node,
figshare,
article,
upload
)
node.add_log(
action='figshare_file_added',
params={
'project': node.parent_id,
'node': node._primary_key,
'path': upload.filename, # TODO Path?
'urls': {
'view': rv['urls']['view'],
'download': rv['urls']['download'],
},
'figshare': {
'id': figshare.figshare_id,
'type': figshare.figshare_type
}
},
auth=kwargs['auth'],
log_date=datetime.datetime.utcnow(),
)
return rv
# FILES: R
@must_be_contributor_or_public
@must_have_addon('figshare', 'node')
def figshare_view_file(*args, **kwargs):
auth = kwargs['auth']
node = kwargs['node'] or kwargs['project']
node_settings = kwargs['node_addon']
article_id = kwargs.get('aid') or None
file_id = kwargs.get('fid') or None
anonymous = has_anonymous_link(node, auth)
if not article_id or not file_id:
raise HTTPError(http.NOT_FOUND)
connect = Figshare.from_settings(node_settings.user_settings)
if node_settings.figshare_type == 'project':
item = connect.project(node_settings, node_settings.figshare_id)
else:
item = connect.article(node_settings, node_settings.figshare_id)
if article_id not in str(item):
raise HTTPError(http.NOT_FOUND)
article = connect.article(node_settings, article_id)
found = False
for f in article['items'][0]['files']:
if f['id'] == int(file_id):
found = f
break
if not found:
raise HTTPError(http.NOT_FOUND)
try:
# If GUID has already been created, we won't redirect, and can check
# whether the file exists below
guid = FigShareGuidFile.find_one(
Q('node', 'eq', node) &
Q('article_id', 'eq', article_id) &
Q('file_id', 'eq', file_id)
)
except:
guid = FigShareGuidFile(node=node, article_id=article_id, file_id=file_id)
guid.save()
redirect_url = check_file_guid(guid)
if redirect_url:
return redirect(redirect_url)
private = not(article['items'][0]['status'] == 'Public')
figshare_url = 'http://figshare.com/'
if private:
figshare_url += 'preview/_preview/{0}'.format(article['items'][0]['article_id'])
else:
figshare_url += 'articles/{0}/{1}'.format(article['items'][0]['title'].replace(' ', '_'), article['items'][0]['article_id'])
version_url = "http://figshare.com/articles/{filename}/{file_id}".format(
filename=article['items'][0]['title'], file_id=article['items'][0]['article_id'])
download_url = node.api_url + 'figshare/download/article/{aid}/file/{fid}'.format(aid=article_id, fid=file_id)
render_url = node.api_url + \
'figshare/render/article/{aid}/file/{fid}'.format(aid=article_id, fid=file_id)
delete_url = node.api_url + 'figshare/article/{aid}/file/{fid}/'.format(aid=article_id, fid=file_id)
filename = found['name']
cache_file_name = get_cache_file(
article_id, file_id
)
rendered = get_cache_content(node_settings, cache_file_name)
if private:
rendered = messages.FIGSHARE_VIEW_FILE_PRIVATE.format(url='http://figshare.com/')
elif rendered is None:
filename, size, filedata = connect.get_file(node_settings, found)
if figshare_settings.MAX_RENDER_SIZE is not None and size > figshare_settings.MAX_RENDER_SIZE:
rendered = messages.FIGSHARE_VIEW_FILE_OVERSIZED.format(
url=found.get('download_url'))
else:
rendered = get_cache_content(
node_settings,
cache_file_name,
start_render=True,
remote_path=filename,
file_content=filedata,
download_url=download_url,
)
# categories = connect.categories()['items'] # TODO Cache this
# categories = ''.join(
# ["<option value='{val}'>{label}</option>".format(val=i['id'], label=i['name']) for i in categories])
rv = {
'node': {
'id': node._id,
'title': node.title
},
'file_name': filename,
'rendered': rendered,
'file_status': article['items'][0]['status'],
'file_version': article['items'][0]['version'],
'doi': 'http://dx.doi.org/10.6084/m9.figshare.{0}'.format(article['items'][0]['article_id']),
'parent_type': 'fileset' if article['items'][0]['defined_type'] == 'fileset' else 'singlefile',
'parent_id': article['items'][0]['article_id'],
# 'figshare_categories': categories,
'figshare_title': article['items'][0]['title'],
'figshare_desc': article['items'][0]['description'],
'render_url': render_url,
'urls': {
'render': render_url,
'download': found.get('download_url'),
'version': version_url,
'figshare': privacy_info_handle(figshare_url, anonymous),
'delete': delete_url,
'files': node.web_url_for('collect_file_trees')
}
}
rv.update(_view_project(node, auth, primary=True))
return rv
def get_cache_file(article_id, file_id):
return '{1}_{0}.html'.format(article_id, file_id)
# FILES: D
@decorators.must_be_contributor_or_public
@decorators.must_have_addon('figshare', 'node')
@decorators.must_have_permission('write')
@decorators.must_not_be_registration
def figshare_delete_file(*args, **kwargs):
node = kwargs['node'] or kwargs['project']
figshare = node.get_addon('figshare')
file_id = kwargs.get('fid', '')
article_id = kwargs.get('aid', '')
if file_id is None or article_id is None:
raise HTTPError(http.BAD_REQUEST)
connect = Figshare.from_settings(figshare.user_settings)
#connect.remove_article_from_project(figshare, figshare.figshare_id, article_id)
return connect.delete_file(node, figshare, article_id, file_id)
@must_be_contributor_or_public
@must_have_addon('figshare', 'node')
def figshare_get_rendered_file(*args, **kwargs):
node_settings = kwargs['node_addon']
article_id = kwargs['aid']
file_id = kwargs['fid']
cache_file = get_cache_file(
article_id, file_id
)
return get_cache_content(node_settings, cache_file)
@must_be_contributor_or_public
@must_have_addon('figshare', 'node')
def figshare_download_file(*args, **kwargs):
node_settings = kwargs['node_addon']
article_id = kwargs['aid']
file_id = kwargs['fid']
connect = Figshare.from_settings(node_settings.user_settings)
article = connect.article(node_settings, article_id)
found = None
for f in article['items'][0]['files']:
if str(f['id']) == file_id:
found = f
if found:
f = urlopen(found['download_url'])
name = found['name']
filedata = f.read()
resp = make_response(filedata)
resp.headers['Content-Disposition'] = 'attachment; filename={0}'.format(name)
# Add binary MIME type if extension missing
_, ext = os.path.splitext(name)
if not ext:
resp.headers['Content-Type'] = 'application/octet-stream'
return resp
@decorators.must_be_contributor_or_public
@decorators.must_have_addon('figshare', 'node')
@decorators.must_have_permission('write')
@decorators.must_not_be_registration
def figshare_create_project(*args, **kwargs):
node_settings = kwargs['node_addon']
project_name = request.json.get('project')
if not node_settings or not node_settings.has_auth or not project_name:
raise HTTPError(http.BAD_REQUEST)
resp = Figshare.from_settings(node_settings.user_settings).create_project(node_settings, project_name)
if resp:
return resp
else:
raise HTTPError(http.BAD_REQUEST)
@decorators.must_be_contributor_or_public
@decorators.must_have_addon('figshare', 'node')
@decorators.must_have_permission('write')
@decorators.must_not_be_registration
def figshare_create_fileset(*args, **kwargs):
node_settings = kwargs['node_addon']
name = request.json.get('name')
if not node_settings or not node_settings.has_auth or not name:
raise HTTPError(http.BAD_REQUEST)
resp = Figshare.from_settings(node_settings.user_settings).create_article(node_settings, {'title': name}, d_type='fileset')
if resp:
return resp
else:
raise HTTPError(http.BAD_REQUEST)
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,286
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/github/views/crud.py
|
# -*- coding: utf-8 -*-
import os
import hashlib
import logging
import datetime
import httplib as http
from modularodm import Q
from modularodm.exceptions import ModularOdmException
from flask import request, make_response
from framework.exceptions import HTTPError
from framework.utils import secure_filename
from framework.flask import redirect # VOL-aware redirect
from website import models
from website.project.decorators import (
must_be_contributor_or_public, must_have_permission, must_have_addon,
must_not_be_registration
)
from website.project.views.node import _view_project
from website.project.views.file import get_cache_content
from website.project.model import has_anonymous_link
from website.addons.base.views import check_file_guid
from website.util import rubeus, permissions
from website.util.mimetype import get_mimetype
from website.addons.github import settings as github_settings
from website.addons.github.exceptions import (
NotFoundError, EmptyRepoError, TooBigError
)
from website.addons.github.api import GitHub, ref_to_params, build_github_urls
from website.addons.github.model import GithubGuidFile
from website.addons.github.utils import MESSAGES, get_path
logger = logging.getLogger(__name__)
@must_be_contributor_or_public
@must_have_addon('github', 'node')
def github_download_file(**kwargs):
node_settings = kwargs['node_addon']
path = get_path(kwargs)
ref = request.args.get('sha')
connection = GitHub.from_settings(node_settings.user_settings)
try:
name, data, _ = connection.file(
node_settings.user, node_settings.repo, path, ref=ref
)
except TooBigError:
raise HTTPError(
http.BAD_REQUEST,
data={
'message_short': 'File too large',
'message_long': 'This file is too large to download through '
'the GitHub API.',
},
)
if data is None:
raise HTTPError(http.NOT_FOUND)
# Build response
resp = make_response(data)
mimetype = get_mimetype(path, data)
# Add binary MIME type if mimetype not found
if mimetype is None:
resp.headers['Content-Type'] = 'application/octet-stream'
else:
resp.headers['Content-Type'] = mimetype
resp.headers['Content-Disposition'] = 'attachment; filename={0}'.format(
name)
return resp
def get_cache_file(path, sha):
return '{0}_{1}.html'.format(
hashlib.md5(path.encode('utf-8', 'ignore')).hexdigest(),
sha,
)
@must_be_contributor_or_public
@must_have_addon('github', 'node')
def github_view_file(auth, **kwargs):
node = kwargs['node'] or kwargs['project']
node_settings = kwargs['node_addon']
path = get_path(kwargs)
file_name = os.path.split(path)[1]
# Get branch / commit
branch = request.args.get('branch')
sha = request.args.get('sha', branch)
ref = sha or branch
connection = GitHub.from_settings(node_settings.user_settings)
# Get current file for delete url
current_file = connection.contents(
user=node_settings.user, repo=node_settings.repo, path=path,
ref=sha or branch)
anonymous = has_anonymous_link(node, auth)
try:
# If GUID has already been created, we won't redirect, and can check
# whether the file exists below
guid = GithubGuidFile.find_one(
Q('node', 'eq', node) &
Q('path', 'eq', path)
)
except ModularOdmException:
# If GUID doesn't exist, check whether file exists before creating
commits = connection.history(
node_settings.user, node_settings.repo, path, ref,
)
if not commits:
raise HTTPError(http.NOT_FOUND)
guid = GithubGuidFile(
node=node,
path=path,
)
guid.save()
redirect_url = check_file_guid(guid)
if redirect_url:
return redirect(redirect_url)
# Get default branch if neither SHA nor branch is provided
if ref is None:
repo = connection.repo(node_settings.user, node_settings.repo)
ref = branch = repo.default_branch
# Get file history; use SHA or branch if registered, else branch
start_sha = ref if node.is_registration else branch
commits = connection.history(
node_settings.user, node_settings.repo, path, sha=start_sha
)
# Get current commit
shas = [
commit['sha']
for commit in commits
]
if not shas:
raise HTTPError(http.NOT_FOUND)
current_sha = sha if sha in shas else shas[0]
# Get file URL
download_url = '/' + guid._id + '/download/' + ref_to_params(branch, current_sha)
render_url = os.path.join(
node.api_url, 'github', 'file', path, 'render'
) + '/' + ref_to_params(branch, current_sha)
delete_url = None
if current_file:
delete_url = node.api_url_for('github_delete_file', path=path) + ref_to_params(branch, current_file.sha)
for commit in commits:
commit['download'] = (
'/' + guid._id + '/download/' + ref_to_params(sha=commit['sha'])
)
commit['view'] = (
'/' + guid._id + '/' + ref_to_params(branch, sha=commit['sha'])
)
if anonymous:
commit['name'] = 'A user'
commit['email'] = ''
# Get or create rendered file
cache_file_name = get_cache_file(
path, current_sha,
)
rendered = get_cache_content(node_settings, cache_file_name)
if rendered is None:
try:
_, data, size = connection.file(
node_settings.user, node_settings.repo, path, ref=sha,
)
if data is None:
# The file was deleted in this commit
rendered = 'This file does not exist at this commit.'
except TooBigError:
rendered = 'File too large to download.'
if rendered is None:
# Skip if too large to be rendered.
if github_settings.MAX_RENDER_SIZE is not None and size > github_settings.MAX_RENDER_SIZE:
rendered = 'File too large to render; download file to view it.'
else:
rendered = get_cache_content(
node_settings,
cache_file_name,
start_render=True,
remote_path=guid.path,
file_content=data,
download_url=download_url,
)
rv = {
'node': {
'id': node._id,
'title': node.title
},
'file_name': file_name,
'files_page_url': node.web_url_for('collect_file_trees'),
'current_sha': current_sha,
'render_url': render_url,
'rendered': rendered,
'download_url': download_url,
'delete_url': delete_url,
'commits': commits,
}
rv.update(_view_project(node, auth, primary=True))
return rv
@must_have_permission(permissions.WRITE)
@must_not_be_registration
@must_have_addon('github', 'node')
def github_upload_file(auth, node_addon, **kwargs):
node = kwargs['node'] or kwargs['project']
user = auth.user
now = datetime.datetime.utcnow()
path = get_path(kwargs, required=False) or ''
branch = request.args.get('branch')
sha = request.args.get('sha')
if branch is None:
raise HTTPError(http.BAD_REQUEST)
connection = GitHub.from_settings(node_addon.user_settings)
upload = request.files.get('file')
filename = secure_filename(upload.filename)
content = upload.read()
# Check max file size
upload.seek(0, os.SEEK_END)
size = upload.tell()
if size > node_addon.config.max_file_size * 1024 * 1024:
raise HTTPError(http.BAD_REQUEST)
# Get SHA of existing file if present; requires an additional call to the
# GitHub API
try:
tree = connection.tree(
node_addon.user, node_addon.repo, sha=sha or branch
).tree
except EmptyRepoError:
tree = []
except NotFoundError:
raise HTTPError(http.BAD_REQUEST)
existing = [
thing
for thing in tree
if thing.path == os.path.join(path, filename)
]
sha = existing[0].sha if existing else None
author = {
'name': user.fullname,
'email': '{0}@osf.io'.format(user._id),
}
if existing:
data = connection.update_file(
node_addon.user, node_addon.repo, os.path.join(path, filename),
MESSAGES['update'], content, sha=sha, branch=branch, author=author
)
else:
data = connection.create_file(
node_addon.user, node_addon.repo, os.path.join(path, filename),
MESSAGES['add'], content, branch=branch, author=author
)
if data is not None:
ref = ref_to_params(sha=data['commit'].sha)
view_url = os.path.join(
node.url, 'github', 'file', path, filename
) + '/' + ref
download_url = os.path.join(
node.url, 'github', 'file', path, filename, 'download'
) + '/' + ref
node.add_log(
action=(
'github_' + (
models.NodeLog.FILE_UPDATED
if sha
else models.NodeLog.FILE_ADDED
)
),
params={
'project': node.parent_id,
'node': node._primary_key,
'path': os.path.join(path, filename),
'urls': {
'view': view_url,
'download': download_url,
},
'github': {
'user': node_addon.user,
'repo': node_addon.repo,
'sha': data['commit'].sha,
},
},
auth=auth,
log_date=now,
)
# Fail if file size is not provided; this happens when the file was
# too large to upload to GitHub
if data['content'].size is None:
logger.error(
'Could not upload file {0} to GitHub: No size provided'.format(
filename
)
)
raise HTTPError(http.BAD_REQUEST)
info = {
'addon': 'github',
'name': filename,
'size': [
data['content'].size,
rubeus.format_filesize(data['content'].size),
],
'kind': 'file',
'urls': build_github_urls(
data['content'], node.url, node.api_url, branch, sha,
),
'permissions': {
'view': True,
'edit': True,
},
}
return info, 201
raise HTTPError(http.BAD_REQUEST)
@must_have_permission(permissions.WRITE)
@must_not_be_registration
@must_have_addon('github', 'node')
def github_delete_file(auth, node_addon, **kwargs):
node = kwargs['node'] or kwargs['project']
now = datetime.datetime.utcnow()
# Must remove trailing slash, else GitHub fails silently on delete
path = get_path(kwargs).rstrip('/')
sha = request.args.get('sha')
if sha is None:
raise HTTPError(http.BAD_REQUEST)
branch = request.args.get('branch')
author = {
'name': auth.user.fullname,
'email': '{0}@osf.io'.format(auth.user._id),
}
connection = GitHub.from_settings(node_addon.user_settings)
data = connection.delete_file(
node_addon.user, node_addon.repo, path, MESSAGES['delete'],
sha=sha, branch=branch, author=author,
)
if data is None:
raise HTTPError(http.BAD_REQUEST)
node.add_log(
action='github_' + models.NodeLog.FILE_REMOVED,
params={
'project': node.parent_id,
'node': node._primary_key,
'path': path,
'github': {
'user': node_addon.user,
'repo': node_addon.repo,
},
},
auth=auth,
log_date=now,
)
return {}
# TODO Add me Test me
@must_be_contributor_or_public
@must_have_addon('github', 'node')
def github_download_starball(node_addon, **kwargs):
archive = kwargs.get('archive', 'tar')
ref = request.args.get('sha', 'master')
connection = GitHub.from_settings(node_addon.user_settings)
headers, data = connection.starball(
node_addon.user, node_addon.repo, archive, ref
)
resp = make_response(data)
for key, value in headers.iteritems():
resp.headers[key] = value
return resp
# File rendering #
@must_be_contributor_or_public
@must_have_addon('github', 'node')
def github_get_rendered_file(**kwargs):
"""
"""
node_settings = kwargs['node_addon']
path = get_path(kwargs)
sha = request.args.get('sha')
cache_file = get_cache_file(path, sha)
return get_cache_content(node_settings, cache_file)
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,287
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/scripts/osfstorage/utils.py
|
#!/usr/bin/env python
# encoding: utf-8
def ensure_osf_files(settings):
"""Ensure `osffiles` is enabled for access to legacy models.
"""
settings.COPY_GIT_REPOS = True
if 'osffiles' not in settings.ADDONS_REQUESTED:
settings.ADDONS_REQUESTED.append('osffiles')
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,288
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/citations/__init__.py
|
import os
from citeproc import CitationStylesStyle, CitationStylesBibliography
from citeproc import Citation, CitationItem
from citeproc import formatter
from citeproc.source.json import CiteProcJSON
from website.settings import CITATION_STYLES_PATH
def render(node, style='apa'):
"""Given a node, return a citation"""
data = [node.csl, ]
bib_source = CiteProcJSON(data)
bib_style = CitationStylesStyle(os.path.join(CITATION_STYLES_PATH, style), validate=False)
bibliography = CitationStylesBibliography(bib_style, bib_source, formatter.plain)
citation = Citation([CitationItem(node._id)])
bibliography.register(citation)
def warn(citation_item):
pass
bibliography.cite(citation, warn)
return unicode(bibliography.bibliography()[0])
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,289
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/dropbox/utils.py
|
# -*- coding: utf-8 -*-
import os
import logging
import httplib as http
from flask import make_response
from dropbox.rest import ErrorResponse
from framework.exceptions import HTTPError
from website.project.utils import get_cache_content
from website.util import rubeus
from website.addons.dropbox.client import get_node_addon_client
logger = logging.getLogger(__name__)
# TODO: Generalize this for other addons?
class DropboxNodeLogger(object):
"""Helper class for adding correctly-formatted Dropbox logs to nodes.
Usage: ::
from website.project.model import NodeLog
file_obj = DropboxFile(path='foo/bar.txt')
file_obj.save()
node = ...
auth = ...
nodelogger = DropboxNodeLogger(node, auth, file_obj)
nodelogger.log(NodeLog.FILE_REMOVED, save=True)
:param Node node: The node to add logs to
:param Auth auth: Authorization of the person who did the action.
:param DropboxFile file_obj: File object for file-related logs.
"""
def __init__(self, node, auth, file_obj=None, path=None):
self.node = node
self.auth = auth
self.file_obj = file_obj
self.path = path
def log(self, action, extra=None, save=False):
"""Log an event. Wraps the Node#add_log method, automatically adding
relevant parameters and prefixing log events with `"dropbox_"`.
:param str action: Log action. Should be a class constant from NodeLog.
:param dict extra: Extra parameters to add to the ``params`` dict of the
new NodeLog.
"""
params = {
'project': self.node.parent_id,
'node': self.node._primary_key,
'folder': self.node.get_addon('dropbox', deleted=True).folder
}
# If logging a file-related action, add the file's view and download URLs
if self.file_obj or self.path:
path = self.file_obj.path if self.file_obj else self.path
cleaned_path = clean_path(path)
params.update({
'urls': {
'view': self.node.web_url_for('dropbox_view_file', path=cleaned_path),
'download': self.node.web_url_for(
'dropbox_download', path=cleaned_path)
},
'path': cleaned_path,
})
if extra:
params.update(extra)
# Prefix the action with dropbox_
self.node.add_log(
action="dropbox_{0}".format(action),
params=params,
auth=self.auth
)
if save:
self.node.save()
def is_subdir(path, directory):
if not (path and directory):
return False
# directory is root directory
if directory == '/':
return True
#make both absolute
abs_directory = os.path.abspath(directory).lower()
abs_path = os.path.abspath(path).lower()
return os.path.commonprefix([abs_path, abs_directory]) == abs_directory
def is_authorizer(auth, node_addon):
"""Return if the auth object's user is the same as the authorizer of the node."""
return auth.user == node_addon.user_settings.owner
def abort_if_not_subdir(path, directory):
"""Check if path is a subdirectory of directory. If not, abort the current
request with a 403 error.
"""
if not is_subdir(clean_path(path), clean_path(directory)):
raise HTTPError(http.FORBIDDEN)
return True
def get_file_name(path):
"""Given a path, get just the base filename.
Handles "/foo/bar/baz.txt/" -> "baz.txt"
"""
return os.path.basename(path.strip('/'))
def clean_path(path):
"""Ensure a path is formatted correctly for url_for."""
if path is None:
return ''
if path == '/':
return path
return path.strip('/')
def make_file_response(fileobject, metadata):
"""Builds a response from a file-like object and metadata returned by
a Dropbox client.
"""
resp = make_response(fileobject.read())
filename = get_file_name(metadata['path'])
rev = metadata.get('rev')
if rev:
# add revision to filename
# foo.mp3 -> foo-abc123.mp3
filename = '-{rev}'.format(rev=rev).join(os.path.splitext(filename))
disposition = 'attachment; filename={0}'.format(filename)
resp.headers['Content-Disposition'] = disposition
resp.headers['Content-Type'] = metadata.get('mime_type', 'application/octet-stream')
return resp
def render_dropbox_file(file_obj, client=None, rev=None):
"""Render a DropboxFile with the MFR.
:param DropboxFile file_obj: The file's GUID record.
:param DropboxClient client:
:param str rev: Revision ID.
:return: The HTML for the rendered file.
"""
# Filename for the cached MFR HTML file
cache_file_name = file_obj.get_cache_filename(client=client, rev=rev)
node_settings = file_obj.node.get_addon('dropbox')
rendered = get_cache_content(node_settings, cache_file_name)
if rendered is None: # not in MFR cache
dropbox_client = client or get_node_addon_client(node_settings)
try:
file_response, metadata = dropbox_client.get_file_and_metadata(
file_obj.path, rev=rev)
except ErrorResponse as err:
logger.error(err.body['error'])
if err.status == 461:
message = ('This file is no longer available due to a takedown request '
'under the Digital Millennium Copyright Act.')
else:
message = 'This Dropbox file cannot be rendered.'
return ''.join(['<p class="text-danger">', message, '</p>'])
rendered = get_cache_content(
node_settings=node_settings,
cache_file_name=cache_file_name,
start_render=True,
remote_path=file_obj.path,
file_content=file_response.read(),
download_url=file_obj.download_url(guid=True, rev=rev),
)
return rendered
def ensure_leading_slash(path):
if not path.startswith('/'):
return '/' + path
return path
def build_dropbox_urls(item, node):
path = clean_path(item['path']) # Strip trailing and leading slashes
if item['is_dir']:
return {
'upload': node.api_url_for('dropbox_upload', path=path),
# Endpoint for fetching all of a folder's contents
'fetch': node.api_url_for('dropbox_hgrid_data_contents', path=path),
# Add extra endpoint for fetching folders only (used by node settings page)
# NOTE: querystring params in camel-case
'folders': node.api_url_for('dropbox_hgrid_data_contents',
path=path, foldersOnly=1)
}
else:
return {
'download': node.web_url_for('dropbox_download', path=path),
'view': node.web_url_for('dropbox_view_file', path=path),
'delete': node.api_url_for('dropbox_delete_file', path=path)
}
def metadata_to_hgrid(item, node, permissions):
"""Serializes a dictionary of metadata (returned from the DropboxClient)
to the format expected by Rubeus/HGrid.
"""
filename = get_file_name(item['path'])
serialized = {
'addon': 'dropbox',
'permissions': permissions,
'name': get_file_name(item['path']),
'ext': os.path.splitext(filename)[1],
rubeus.KIND: rubeus.FOLDER if item['is_dir'] else rubeus.FILE,
'urls': build_dropbox_urls(item, node),
'path': item['path'],
}
return serialized
def get_share_folder_uri(path):
"""Return the URI for sharing a folder through the dropbox interface.
This is not exposed through Dropbox's REST API, so need to build the URI
"manually".
"""
cleaned = clean_path(path)
return ('https://dropbox.com/home/{cleaned}'
'?shareoptions=1&share_subfolder=0&share=1').format(cleaned=cleaned)
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,290
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/dropbox/views/crud.py
|
# -*- coding: utf-8 -*-
import os
import httplib as http
from flask import request
from dropbox.rest import ErrorResponse
from modularodm import Q
from modularodm.exceptions import ModularOdmException
from website.project.model import NodeLog
from framework.flask import redirect # VOL-aware redirect
from website.project.utils import serialize_node
from website.project.decorators import must_have_permission
from website.project.decorators import must_not_be_registration
from website.project.decorators import must_have_addon
from website.project.decorators import must_be_contributor_or_public
from website.addons.base.views import check_file_guid
from framework.exceptions import HTTPError
from website.addons.dropbox.model import DropboxFile
from website.addons.dropbox.client import get_node_addon_client
from website.addons.dropbox.utils import (
render_dropbox_file,
get_file_name,
metadata_to_hgrid,
clean_path,
DropboxNodeLogger,
make_file_response,
abort_if_not_subdir,
is_authorizer,
)
@must_have_permission('write')
@must_not_be_registration
@must_have_addon('dropbox', 'node')
def dropbox_delete_file(path, auth, node_addon, **kwargs):
node = node_addon.owner
if path and auth:
# Check that user has access to the folder of the file to be deleted
if not is_authorizer(auth, node_addon):
abort_if_not_subdir(path, node_addon.folder)
client = get_node_addon_client(node_addon)
client.file_delete(path)
# log the event
nodelogger = DropboxNodeLogger(node=node, auth=auth, path=path)
nodelogger.log(NodeLog.FILE_REMOVED, save=True)
return None
raise HTTPError(http.BAD_REQUEST)
@must_have_permission('write')
@must_not_be_registration
@must_have_addon('dropbox', 'node')
def dropbox_upload(node_addon, auth, **kwargs):
"""View for uploading a file from the filebrowser interface. Must return
the Rubeus/HGrid representation of the newly added file.
"""
node = node_addon.owner
# Route may or may not have a path
path = kwargs.get('path', node_addon.folder)
client = get_node_addon_client(node_addon)
file_obj = request.files.get('file', None)
node = node_addon.owner
if path and file_obj and client:
filepath = os.path.join(path, file_obj.filename)
# Check that user has access to the folder being uploaded to
if not is_authorizer(auth, node_addon):
abort_if_not_subdir(path, node_addon.folder)
try:
metadata = client.put_file(filepath, file_obj)
except ErrorResponse as error:
if error.status == 401:
raise HTTPError(http.UNAUTHORIZED, data=dict(message_short='Invalid Access Token',
message_long='Your Dropbox token is no longer valid. '
+ 'Check your project settings page for details.'))
else:
raise HTTPError(http.BAD_REQUEST)
permissions = {
'edit': node.can_edit(auth),
'view': node.can_view(auth)
}
# Log the event
nodelogger = DropboxNodeLogger(node=node, auth=auth, path=filepath)
nodelogger.log(NodeLog.FILE_ADDED, save=True)
# Return the HGrid-formatted JSON response
return metadata_to_hgrid(metadata,
node=node, permissions=permissions), http.CREATED
raise HTTPError(http.BAD_REQUEST)
@must_be_contributor_or_public
@must_have_addon('dropbox', 'node')
def dropbox_download(path, node_addon, auth, **kwargs):
if not path:
raise HTTPError(http.BAD_REQUEST)
# Check if current user has access to the path
if not is_authorizer(auth, node_addon):
abort_if_not_subdir(path, node_addon.folder)
client = get_node_addon_client(node_addon)
revision = request.args.get('rev') or ''
fileobject, metadata = client.get_file_and_metadata(path, rev=revision)
return make_file_response(fileobject, metadata)
@must_be_contributor_or_public
@must_have_addon('dropbox', 'node')
def dropbox_get_revisions(path, node_addon, auth, **kwargs):
"""API view that gets a list of revisions for a file."""
# Check if current user has access to the path
if not is_authorizer(auth, node_addon):
abort_if_not_subdir(path, node_addon.folder)
node = node_addon.owner
client = get_node_addon_client(node_addon)
# Get metadata for each revision of the file
# Don't show deleted revisions
revisions = [rev for rev in client.revisions(path) if not rev.get('is_deleted')]
# Return GUID short urls if a GUID record exists
try:
file_obj = DropboxFile.find_one(Q('node', 'eq', node) & Q('path', 'eq', path))
except ModularOdmException:
file_obj = None
for revision in revisions:
# Add download and view links
rev = revision.get('rev') or ''
if file_obj:
download_url = file_obj.download_url(guid=True, rev=rev)
view_url = file_obj.url(guid=True, rev=rev)
else: # No GUID, use long URLs
download_url = node.web_url_for('dropbox_download',
path=path, rev=rev)
view_url = node.web_url_for('dropbox_view_file', path=path, rev=rev)
revision['download'] = download_url
revision['view'] = view_url
return {
'result': revisions,
# Hyperlinks sans revision ID
'urls': {
'download': node.web_url_for('dropbox_download', path=path),
'delete': node.api_url_for('dropbox_delete_file', path=path),
'view': node.web_url_for('dropbox_view_file', path=path),
'files': node.web_url_for('collect_file_trees'),
},
'node': {
'id': node._id,
'title': node.title,
},
'path': path,
'registered': node.registered_date.isoformat() if node.registered_date else None,
}, http.OK
@must_be_contributor_or_public
@must_have_addon('dropbox', 'node')
def dropbox_view_file(path, node_addon, auth, **kwargs):
"""Web view for the file detail page."""
if not path:
raise HTTPError(http.NOT_FOUND)
# check that current user has access to the path
if not is_authorizer(auth, node_addon):
abort_if_not_subdir(path, node_addon.folder)
node = node_addon.owner
client = get_node_addon_client(node_addon)
# Lazily create a file GUID record
file_obj, created = DropboxFile.get_or_create(node=node, path=path)
redirect_url = check_file_guid(file_obj)
if redirect_url:
return redirect(redirect_url)
rev = request.args.get('rev') or ''
rendered = render_dropbox_file(file_obj, client=client, rev=rev)
cleaned_path = clean_path(path)
response = {
'revisions_url': node.api_url_for('dropbox_get_revisions',
path=cleaned_path, rev=rev), # Append current revision as a query param
'file_name': get_file_name(path),
'render_url': node.api_url_for('dropbox_render_file', path=cleaned_path),
'download_url': file_obj.download_url(guid=True, rev=rev),
'rendered': rendered,
}
response.update(serialize_node(node, auth, primary=True))
return response, http.OK
##### MFR Rendering #####
@must_be_contributor_or_public
@must_have_addon('dropbox', 'node')
def dropbox_render_file(path, node_addon, auth, **kwargs):
"""View polled by the FileRenderer. Return the rendered HTML for the
requested file.
"""
# check that current user has access to the path
if not is_authorizer(auth, node_addon):
abort_if_not_subdir(path, node_addon.folder)
node = node_addon.owner
file_obj = DropboxFile.find_one(Q('node', 'eq', node) & Q('path', 'eq', path))
client = get_node_addon_client(node_addon)
rev = request.args.get('rev', '')
return render_dropbox_file(file_obj, client=client, rev=rev)
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,291
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/tests/test_features.py
|
import os
import unittest
from website import settings
requires_search = unittest.skipIf(
not settings.SEARCH_ENGINE,
'search disabled'
)
requires_piwik = unittest.skipIf(
settings.PIWIK_HOST is None,
'no PIWIK_HOST specified in settings'
)
requires_gnupg = unittest.skipIf(
not settings.USE_GNUPG,
'gnupg disabled'
)
requires_csl_styles = unittest.skipIf(
not os.path.exists(os.path.join(settings.CITATION_STYLES_PATH, '.git')),
'CSL styles not detected'
)
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,292
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/figshare/views/__init__.py
|
from . import auth, config, crud, hgrid, widget # noqa
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,293
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/tests/test_addons.py
|
# -*- coding: utf-8 -*-
import webtest
import unittest
from nose.tools import *
from tests.base import OsfTestCase
from tests.factories import AuthUserFactory, ProjectFactory
import time
import furl
import itsdangerous
from framework.auth import signing
from framework.auth.core import Auth
from framework.exceptions import HTTPError
from framework.sessions.model import Session
from website import settings
from website.util import api_url_for
from website.project import new_private_link
from website.addons.base import AddonConfig, AddonNodeSettingsBase, views
from website.addons.github.model import AddonGitHubOauthSettings
class TestAddonConfig(unittest.TestCase):
def setUp(self):
self.addon_config = AddonConfig(
short_name='test', full_name='test', owners=['node'],
added_to={'node': False}, categories=[],
settings_model=AddonNodeSettingsBase,
)
def test_static_url_relative(self):
url = self.addon_config._static_url('foo')
assert_equal(
url,
'/static/addons/test/foo'
)
def test_deleted_defaults_to_false(self):
class MyAddonSettings(AddonNodeSettingsBase):
pass
config = MyAddonSettings()
assert_is(config.deleted, False)
def test_static_url_absolute(self):
url = self.addon_config._static_url('/foo')
assert_equal(
url,
'/foo'
)
class SetEnvironMiddleware(object):
def __init__(self, app, **kwargs):
self.app = app
self.kwargs = kwargs
def __call__(self, environ, start_response):
environ.update(self.kwargs)
return self.app(environ, start_response)
class TestAddonAuth(OsfTestCase):
def setUp(self):
super(TestAddonAuth, self).setUp()
self.flask_app = SetEnvironMiddleware(self.app.app, REMOTE_ADDR='127.0.0.1')
self.test_app = webtest.TestApp(self.flask_app)
self.user = AuthUserFactory()
self.auth_obj = Auth(user=self.user)
self.node = ProjectFactory(creator=self.user)
self.session = Session(data={'auth_user_id': self.user._id})
self.session.save()
self.cookie = itsdangerous.Signer(settings.SECRET_KEY).sign(self.session._id)
self.configure_addon()
def configure_addon(self):
self.user.add_addon('github')
self.user_addon = self.user.get_addon('github')
self.oauth_settings = AddonGitHubOauthSettings(github_user_id='john')
self.oauth_settings.save()
self.user_addon.oauth_settings = self.oauth_settings
self.user_addon.oauth_access_token = 'secret'
self.user_addon.save()
self.node.add_addon('github', self.auth_obj)
self.node_addon = self.node.get_addon('github')
self.node_addon.user = 'john'
self.node_addon.repo = 'youre-my-best-friend'
self.node_addon.user_settings = self.user_addon
self.node_addon.save()
def build_url(self, **kwargs):
options = dict(
action='download',
cookie=self.cookie,
nid=self.node._id,
provider=self.node_addon.config.short_name,
)
options.update(kwargs)
return api_url_for('get_auth', **options)
def test_auth_download(self):
url = self.build_url()
res = self.test_app.get(url)
assert_equal(res.json['auth'], views.make_auth(self.user))
assert_equal(res.json['credentials'], self.node_addon.serialize_waterbutler_credentials())
assert_equal(res.json['settings'], self.node_addon.serialize_waterbutler_settings())
expected_url = furl.furl(self.node.api_url_for('create_waterbutler_log', _absolute=True))
observed_url = furl.furl(res.json['callback_url'])
observed_url.port = expected_url.port
assert_equal(expected_url, observed_url)
def test_auth_missing_args(self):
url = self.build_url(cookie=None)
res = self.test_app.get(url, expect_errors=True)
assert_equal(res.status_code, 400)
def test_auth_bad_cookie(self):
url = self.build_url(cookie=self.cookie[::-1])
res = self.test_app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_auth_missing_addon(self):
url = self.build_url(provider='queenhub')
res = self.test_app.get(url, expect_errors=True)
assert_equal(res.status_code, 400)
def test_auth_bad_ip(self):
flask_app = SetEnvironMiddleware(self.app.app, REMOTE_ADDR='192.168.1.1')
test_app = webtest.TestApp(flask_app)
url = self.build_url()
res = test_app.get(url, expect_errors=True)
assert_equal(res.status_code, 403)
class TestAddonLogs(OsfTestCase):
def setUp(self):
super(TestAddonLogs, self).setUp()
self.flask_app = SetEnvironMiddleware(self.app.app, REMOTE_ADDR='127.0.0.1')
self.test_app = webtest.TestApp(self.flask_app)
self.user = AuthUserFactory()
self.auth_obj = Auth(user=self.user)
self.node = ProjectFactory(creator=self.user)
self.session = Session(data={'auth_user_id': self.user._id})
self.session.save()
self.cookie = itsdangerous.Signer(settings.SECRET_KEY).sign(self.session._id)
self.configure_addon()
def configure_addon(self):
self.user.add_addon('github')
self.user_addon = self.user.get_addon('github')
self.oauth_settings = AddonGitHubOauthSettings(github_user_id='john')
self.oauth_settings.save()
self.user_addon.oauth_settings = self.oauth_settings
self.user_addon.oauth_access_token = 'secret'
self.user_addon.save()
self.node.add_addon('github', self.auth_obj)
self.node_addon = self.node.get_addon('github')
self.node_addon.user = 'john'
self.node_addon.repo = 'youre-my-best-friend'
self.node_addon.user_settings = self.user_addon
self.node_addon.save()
def build_payload(self, metadata, **kwargs):
options = dict(
auth={'id': self.user._id},
action='create',
provider=self.node_addon.config.short_name,
metadata=metadata,
time=time.time() + 1000,
)
options.update(kwargs)
options = {
key: value
for key, value in options.iteritems()
if value is not None
}
message, signature = signing.default_signer.sign_payload(options)
return {
'payload': message,
'signature': signature,
}
def test_add_log(self):
path = 'pizza'
url = self.node.api_url_for('create_waterbutler_log')
payload = self.build_payload(metadata={'path': path})
nlogs = len(self.node.logs)
self.test_app.put_json(url, payload, headers={'Content-Type': 'application/json'})
self.node.reload()
assert_equal(len(self.node.logs), nlogs + 1)
def test_add_log_missing_args(self):
path = 'pizza'
url = self.node.api_url_for('create_waterbutler_log')
payload = self.build_payload(metadata={'path': path}, auth=None)
nlogs = len(self.node.logs)
res = self.test_app.put_json(
url,
payload,
headers={'Content-Type': 'application/json'},
expect_errors=True,
)
assert_equal(res.status_code, 400)
self.node.reload()
assert_equal(len(self.node.logs), nlogs)
def test_add_log_no_user(self):
path = 'pizza'
url = self.node.api_url_for('create_waterbutler_log')
payload = self.build_payload(metadata={'path': path}, auth={'id': None})
nlogs = len(self.node.logs)
res = self.test_app.put_json(
url,
payload,
headers={'Content-Type': 'application/json'},
expect_errors=True,
)
assert_equal(res.status_code, 400)
self.node.reload()
assert_equal(len(self.node.logs), nlogs)
def test_add_log_no_addon(self):
path = 'pizza'
node = ProjectFactory(creator=self.user)
url = node.api_url_for('create_waterbutler_log')
payload = self.build_payload(metadata={'path': path})
nlogs = len(node.logs)
res = self.test_app.put_json(
url,
payload,
headers={'Content-Type': 'application/json'},
expect_errors=True,
)
assert_equal(res.status_code, 400)
self.node.reload()
assert_equal(len(node.logs), nlogs)
def test_add_log_bad_action(self):
path = 'pizza'
url = self.node.api_url_for('create_waterbutler_log')
payload = self.build_payload(metadata={'path': path}, action='dance')
nlogs = len(self.node.logs)
res = self.test_app.put_json(
url,
payload,
headers={'Content-Type': 'application/json'},
expect_errors=True,
)
assert_equal(res.status_code, 400)
self.node.reload()
assert_equal(len(self.node.logs), nlogs)
class TestCheckAuth(OsfTestCase):
def setUp(self):
super(TestCheckAuth, self).setUp()
self.user = AuthUserFactory()
self.node = ProjectFactory(creator=self.user)
def test_has_permission(self):
res = views.check_access(self.node, self.user, 'upload')
assert_true(res)
def test_not_has_permission_read_public(self):
self.node.is_public = True
self.node.save()
res = views.check_access(self.node, None, 'download')
def test_not_has_permission_read_has_link(self):
link = new_private_link('red-special', self.user, [self.node], anonymous=False)
res = views.check_access(self.node, None, 'download', key=link.key)
def test_not_has_permission_logged_in(self):
user2 = AuthUserFactory()
with assert_raises(HTTPError) as exc_info:
views.check_access(self.node, user2, 'download')
assert_equal(exc_info.exception.code, 403)
def test_not_has_permission_not_logged_in(self):
with assert_raises(HTTPError) as exc_info:
views.check_access(self.node, None, 'download')
assert_equal(exc_info.exception.code, 401)
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,294
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/github/tests/webtest_tests.py
|
import mock
from nose.tools import * # PEP8 asserts
from tests.base import OsfTestCase
from tests.factories import ProjectFactory, AuthUserFactory, PrivateLinkFactory
from framework.auth import Auth
from website.addons.github.tests.utils import create_mock_github
from github3.repos import Repository
from github3.repos.commit import RepoCommit as Commit
class TestGitHubFileView(OsfTestCase):
def setUp(self):
super(TestGitHubFileView, self).setUp()
self.user = AuthUserFactory()
self.consolidated_auth = Auth(user=self.user)
self.project = ProjectFactory(creator=self.user)
self.project.add_addon('github', auth=self.consolidated_auth)
self.project.creator.add_addon('github')
self.github = create_mock_github(user='fred', private=False)
self.node_settings = self.project.get_addon('github')
self.node_settings.user_settings = self.project.creator.get_addon('github')
# Set the node addon settings to correspond to the values of the mock repo
self.node_settings.user = self.github.repo.return_value.owner.login
self.node_settings.repo = self.github.repo.return_value.name
self.node_settings.save()
@mock.patch('website.addons.github.api.GitHub.commits')
@mock.patch('website.addons.github.api.GitHub.file')
@mock.patch('website.addons.github.api.GitHub.repo')
def test_can_see_files_tab(self, mock_repo, mock_file, mock_commits):
mock_commits.return_value = [Commit.from_json({
"url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
"sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
"commit": {
"url": "https://api.github.com/repos/octocat/Hello-World/git/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
"author": {
"name": "Monalisa Octocat",
"email": "support@github.com",
"date": "2011-04-14T16:00:49Z"
}
}
})]
mock_repo.return_value = Repository.from_json({
"default_branch": "dev",
'url': u'https://api.github.com/repos/{user}/mock-repo/git/trees/dev'.format(user=self.user),
'sha': 'dev',
'private': False,
'tree': [
{u'mode': u'100644',
u'path': u'coveragerc',
u'sha': u'92029ff5ce192425d346b598d7e7dd25f5f05185',
u'size': 245,
u'type': u'file',
u'url': u'https://api.github.com/repos/{user}/mock-repo/git/blobs/92029ff5ce192425d346b598d7e7dd25f5f05185'.format(user=self.user)}]
})
mock_file.return_value = {
u'name': u'coveragerc',
u'content': u'ClRleHRCbG9iOiBTaW1wbGlmaWVkIFRleHQgUHJvY2Vzc2luZwo9PT09PT09',
u'size': 245
}
res = self.app.get(self.project.url, auth=self.user.auth)
assert_in('a href="/{0}/files/"'.format(self.project._id), res)
@mock.patch('website.addons.github.api.GitHub.commits')
@mock.patch('website.addons.github.api.GitHub.file')
@mock.patch('website.addons.github.api.GitHub.repo')
@mock.patch('website.addons.github.api.GitHub.contents')
def test_file_view(self, mock_contents, mock_repo, mock_file, mock_commits):
mock_contents.return_value = None
mock_commits.return_value = [Commit.from_json({
"url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
"sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
"commit": {
"url": "https://api.github.com/repos/octocat/Hello-World/git/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
"author": {
"name": "Monalisa Octocat",
"email": "support@github.com",
"date": "2011-04-14T16:00:49Z"
}
}
})]
mock_repo.return_value = Repository.from_json({
"default_branch": "dev",
'url': u'https://api.github.com/repos/{user}/mock-repo/git/trees/dev'.format(user=self.user),
'sha': 'dev',
'private': False,
'tree': [
{u'mode': u'100644',
u'path': u'coveragerc',
u'sha': u'92029ff5ce192425d346b598d7e7dd25f5f05185',
u'size': 245,
u'type': u'file',
u'url': u'https://api.github.com/repos/{user}/mock-repo/git/blobs/92029ff5ce192425d346b598d7e7dd25f5f05185'.format(user=self.user)}]
})
mock_file.return_value = {
u'name': u'coveragerc',
u'content': u'ClRleHRCbG9iOiBTaW1wbGlmaWVkIFRleHQgUHJvY2Vzc2luZwo9PT09PT09',
u'size': 245
}
url = "/project/{0}/github/file/{1}/".format(
self.project._id,
"coveragerc"
)
self.app.auth = self.user.auth
res = self.app.get(url).maybe_follow()
assert_in("6dcb09b5b57875f334f61aebed695e2e4193db5e", res)
assert_in("Thu Apr 14 16:00:49 2011", res)
assert_in("file-version-history", res)
assert_in("icon-download-alt", res)
@mock.patch('website.addons.github.api.GitHub.commits')
@mock.patch('website.addons.github.api.GitHub.file')
@mock.patch('website.addons.github.api.GitHub.repo')
@mock.patch('website.addons.github.api.GitHub.contents')
def test_file_view_deleted(self, mock_contents, mock_repo, mock_file, mock_commits):
mock_contents.return_value = None
mock_commits.return_value = [Commit.from_json({
"url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
"sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
"commit": {
"url": "https://api.github.com/repos/octocat/Hello-World/git/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
"author": {
"name": "Monalisa Octocat",
"email": "support@github.com",
"date": "2011-04-14T16:00:49Z"
}
}
})]
mock_repo.return_value = Repository.from_json({
"default_branch": "dev",
'url': u'https://api.github.com/repos/{user}/mock-repo/git/trees/dev'.format(user=self.user),
'sha': 'dev',
'private': False,
'tree': [
{u'mode': u'100644',
u'path': u'coveragerc',
u'sha': u'92029ff5ce192425d346b598d7e7dd25f5f05185',
u'size': 245,
u'type': u'file',
u'url': u'https://api.github.com/repos/{user}/mock-repo/git/blobs/92029ff5ce192425d346b598d7e7dd25f5f05185'.format(user=self.user)}]
})
mock_file.return_value = (None, None, None)
url = "/project/{0}/github/file/{1}/".format(
self.project._id,
"coveragerc"
)
self.app.auth = self.user.auth
res = self.app.get(url).maybe_follow()
assert_in("icon-download-alt", res)
assert_in("Thu Apr 14 16:00:49 2011", res)
assert_in("This file does not exist at this commit", res)
assert_in("6dcb09b5b57875f334f61aebed695e2e4193db5e", res)
@mock.patch('website.addons.github.api.GitHub.commits')
@mock.patch('website.addons.github.api.GitHub.file')
@mock.patch('website.addons.github.api.GitHub.repo')
@mock.patch('website.addons.github.api.GitHub.contents')
def test_file_view_with_anonymous_link(self, mock_contents, mock_repo, mock_file, mock_commits):
mock_contents.return_value = None
mock_commits.return_value = [Commit.from_json({
"url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
"sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
"commit": {
"url": "https://api.github.com/repos/octocat/Hello-World/git/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
"author": {
"name": "Monalisa Octocat",
"email": "support@github.com",
"date": "2011-04-14T16:00:49Z"
}
}
})]
mock_repo.return_value = Repository.from_json({
"default_branch": "dev",
'url': u'https://api.github.com/repos/{user}/mock-repo/git/trees/dev'.format(user=self.user),
'sha': 'dev',
'private': False,
'tree': [
{u'mode': u'100644',
u'path': u'coveragerc',
u'sha': u'92029ff5ce192425d346b598d7e7dd25f5f05185',
u'size': 245,
u'type': u'file',
u'url': u'https://api.github.com/repos/{user}/mock-repo/git/blobs/92029ff5ce192425d346b598d7e7dd25f5f05185'.format(user=self.user)}]
})
mock_file.return_value = {
u'name': u'coveragerc',
u'content': u'ClRleHRCbG9iOiBTaW1wbGlmaWVkIFRleHQgUHJvY2Vzc2luZwo9PT09PT09',
u'size': 245
}
link = PrivateLinkFactory(anonymous=True)
link.nodes.append(self.project)
link.save()
url = self.project.web_url_for('github_view_file', path="coveragerc")
res = self.app.get(url, {'view_only': link.key}).maybe_follow()
assert_in("6dcb09b5b57875f334f61aebed695e2e4193db5e", res)
assert_in("Thu Apr 14 16:00:49 2011", res)
assert_in("file-version-history", res)
assert_in("icon-download-alt", res)
assert_not_in("Monalisa Octocat", res)
assert_not_in("support@github.com", res)
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,295
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/figshare/routes.py
|
"""
"""
from framework.routing import Rule, json_renderer
from website.routes import OsfWebRenderer
from . import views
settings_routes = {
'rules': [
# Widget
Rule([
'/project/<pid>/figshare/widget/',
'/project/<pid>/node/<nid>/figshare/widget/',
], 'get', views.widget.figshare_widget, json_renderer),
# CRUD: Projects
Rule([
'/project/<pid>/figshare/project/<project_id>/article/<aid>',
'/project/<pid>/node/<nid>/figshare/project/<project_id>/article/<aid>'
], 'delete', views.crud.figshare_remove_article_from_project, json_renderer),
Rule([
'/project/<pid>/figshare/article/<aid>/file/<fid>/',
'/project/<pid>/node/<nid>/figshare/article/<aid>/file/<fid>/',
], 'delete', views.crud.figshare_delete_file, json_renderer),
Rule([
'/project/<pid>/figshare/',
'/project/<pid>/figshare/<aid>/',
'project/<pid>/node/<nid>/figshare/',
'/project/<pid>/node/<nid>/figshare/<aid>/',
], 'post', views.crud.figshare_upload, json_renderer),
# OAuth: Node
Rule([
'/project/<pid>/figshare/oauth/',
'/project/<pid>/node/<nid>/figshare/oauth',
], 'get', views.auth.figshare_oauth_start, json_renderer),
Rule([
'/project/<pid>/figshare/user_auth/',
'/project/<pid>/node/<nid>/figshare/user_auth/',
], 'post', views.auth.figshare_add_user_auth, json_renderer),
Rule([
'/project/<pid>/figshare/oauth/',
'/project/<pid>/node/<nid>/figshare/oauth/',
], 'delete', views.auth.figshare_oauth_delete_node, json_renderer),
# OAuth: User
Rule(
'/settings/figshare/oauth/',
'get', views.auth.figshare_oauth_start, json_renderer,
endpoint_suffix='__user'
),
Rule(
'/settings/figshare/oauth/',
'delete', views.auth.figshare_oauth_delete_user, json_renderer
),
# OAuth: General
Rule([
'/addons/figshare/callback/<uid>/',
'/addons/figshare/callback/<uid>/<nid>/',
], 'get', views.auth.figshare_oauth_callback, json_renderer),
Rule([
'/project/<pid>/figshare/new/project/',
'/project/<pid>/node/<nid>/figshare/new/project/',
], 'post', views.crud.figshare_create_project, json_renderer),
Rule([
'/project/<pid>/figshare/new/fileset/',
'/project/<pid>/node/<nid>/figshare/new/fileset/',
], 'post', views.crud.figshare_create_fileset, json_renderer)
],
'prefix': '/api/v1',
}
api_routes = {
'rules': [
##### Node settings #####
Rule(
['/project/<pid>/figshare/config/',
'/project/<pid>/node/<nid>/figshare/config/'],
'get',
views.config.figshare_config_get,
json_renderer
),
Rule(
['/project/<pid>/figshare/config/',
'/project/<pid>/node/<nid>/figshare/config/'],
'put',
views.config.figshare_config_put,
json_renderer
),
Rule(
['/project/<pid>/figshare/hgrid/options/',
'/project/<pid>/node/<nid>/figshare/hgrid/options/'],
'get',
views.config.figshare_get_options,
json_renderer
),
Rule(
['/project/<pid>/figshare/config/import-auth/',
'/project/<pid>/node/<nid>/figshare/config/import-auth/'],
'put',
views.config.figshare_import_user_auth,
json_renderer
),
Rule(
['/project/<pid>/figshare/config/',
'/project/<pid>/node/<nid>/figshare/config/'],
'delete',
views.config.figshare_deauthorize,
json_renderer
),
#######################
Rule([
'/project/<pid>/figshare/hgrid/',
'/project/<pid>/node/<nid>/figshare/hgrid/',
'/project/<pid>/figshare/hgrid/<type>/<id>/',
'/project/<pid>/node/<nid>/figshare/hgrid/<type>/<id>/',
], 'get', views.hgrid.figshare_hgrid_data_contents, json_renderer),
Rule([
'/project/<pid>/figshare/render/article/<aid>/file/<fid>/',
'/project/<pid>/node/<nid>/figshare/render/article/<aid>/file/<fid>/'
], 'get', views.crud.figshare_get_rendered_file, json_renderer,),
Rule([
'/project/<pid>/figshare/download/article/<aid>/file/<fid>/',
'/project/<pid>/node/<nid>/figshare/download/article/<aid>/file/<fid>/'
], 'get', views.crud.figshare_download_file, json_renderer,),
],
'prefix': '/api/v1',
}
page_routes = {
'rules': [
Rule([
'/project/<pid>/figshare/article/<aid>/file/<fid>/',
'/project/<pid>/node/<nid>/figshare/article/<aid>/file/<fid>/',
], 'get', views.crud.figshare_view_file, OsfWebRenderer('../addons/figshare/templates/figshare_view_file.mako')),
],
}
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,296
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/s3/views/crud.py
|
# -*- coding: utf-8 -*-
import urllib
import datetime
import httplib as http
from boto.exception import S3ResponseError, BotoClientError
from flask import request
from modularodm import Q
from framework.exceptions import HTTPError
from framework.flask import redirect # VOL-aware redirect
from website.models import NodeLog
from website.addons.base.views import check_file_guid
from website.project.views.node import _view_project
from website.project.views.file import get_cache_content
from website.project.decorators import (
must_have_permission, must_be_contributor_or_public,
must_not_be_registration, must_have_addon
)
from website.addons.s3.model import S3GuidFile
from website.addons.s3.settings import MAX_RENDER_SIZE
from website.addons.s3.api import S3Wrapper, create_bucket
from website.addons.s3.utils import (
create_version_list, build_urls, get_cache_file_name, generate_signed_url,
validate_bucket_name
)
@must_be_contributor_or_public
@must_have_addon('s3', 'node')
def s3_download(**kwargs):
node_settings = kwargs['node_addon']
key_name = urllib.unquote(kwargs['path'])
vid = request.args.get('vid')
if key_name is None:
raise HTTPError(http.NOT_FOUND)
connect = S3Wrapper.from_addon(node_settings)
if not connect.does_key_exist(key_name):
raise HTTPError(http.NOT_FOUND)
return redirect(connect.download_file_URL(key_name, vid))
@must_have_permission('write')
@must_not_be_registration
@must_have_addon('s3', 'node')
def s3_delete(**kwargs):
node = kwargs['node'] or kwargs['project']
node_settings = kwargs['node_addon']
dfile = urllib.unquote(kwargs['path'])
connect = S3Wrapper.from_addon(node_settings)
connect.delete_file(dfile)
node.add_log(
action='s3_' + NodeLog.FILE_REMOVED,
params={
'project': node.parent_id,
'node': node._id,
'bucket': node_settings.bucket,
'path': dfile,
},
auth=kwargs['auth'],
log_date=datetime.datetime.utcnow(),
)
return {}
@must_be_contributor_or_public
@must_have_addon('s3', 'node')
def s3_view(**kwargs):
path = kwargs.get('path')
vid = request.args.get('vid')
if not path:
raise HTTPError(http.NOT_FOUND)
if vid == 'Pre-versioning':
vid = 'null'
node_settings = kwargs['node_addon']
auth = kwargs['auth']
node = kwargs['node'] or kwargs['project']
wrapper = S3Wrapper.from_addon(node_settings)
key = wrapper.get_wrapped_key(urllib.unquote(path), vid=vid)
if key is None:
raise HTTPError(http.NOT_FOUND)
try:
guid = S3GuidFile.find_one(
Q('node', 'eq', node) &
Q('path', 'eq', path)
)
except:
guid = S3GuidFile(
node=node,
path=path,
)
guid.save()
redirect_url = check_file_guid(guid)
if redirect_url:
return redirect(redirect_url)
cache_file_name = get_cache_file_name(path, key.etag)
urls = build_urls(node, path, etag=key.etag)
if key.s3Key.size > MAX_RENDER_SIZE:
render = 'File too large to render; download file to view it'
else:
# Check to see if the file has already been rendered.
render = get_cache_content(node_settings, cache_file_name)
if render is None:
file_contents = key.s3Key.get_contents_as_string()
render = get_cache_content(
node_settings,
cache_file_name,
start_render=True,
remote_path=path,
file_content=file_contents,
download_url=urls['download'],
)
versions = create_version_list(wrapper, urllib.unquote(path), node)
rv = {
'file_name': key.name,
'rendered': render,
'download_url': urls['download'],
'render_url': urls['render'],
'versions': versions,
'current': key.version_id,
'info_url': urls['info'],
'delete_url': urls['delete'],
'files_page_url': node.web_url_for('collect_file_trees')
}
rv.update(_view_project(node, auth, primary=True))
return rv
@must_be_contributor_or_public
@must_have_addon('s3', 'node')
def ping_render(**kwargs):
node_settings = kwargs['node_addon']
path = kwargs.get('path')
etag = request.args.get('etag')
cache_file = get_cache_file_name(path, etag)
return get_cache_content(node_settings, cache_file)
@must_have_permission('write')
@must_not_be_registration
@must_have_addon('s3', 'node')
def s3_upload(**kwargs):
node = kwargs['node'] or kwargs['project']
s3 = kwargs['node_addon']
file_name = request.json.get('name')
if file_name is None:
raise HTTPError(http.BAD_REQUEST)
file_name = urllib.quote_plus(file_name.encode('utf-8'))
mime = request.json.get('type') or 'application/octet-stream'
update = S3Wrapper.from_addon(s3).does_key_exist(file_name)
signed_url = generate_signed_url(mime, file_name, s3)
node.add_log(
action='s3_' +
(NodeLog.FILE_UPDATED if update else NodeLog.FILE_ADDED),
params={
'project': node.parent_id,
'node': node._primary_key,
'bucket': s3.bucket,
'path': file_name,
'urls': build_urls(node, file_name),
},
auth=kwargs['auth'],
log_date=datetime.datetime.utcnow(),
)
return signed_url
@must_be_contributor_or_public
@must_have_addon('s3', 'node')
def create_new_bucket(**kwargs):
user = kwargs['auth'].user
user_settings = user.get_addon('s3')
bucket_name = request.json.get('bucket_name')
if not validate_bucket_name(bucket_name):
return {'message': 'That bucket name is not valid.'}, http.NOT_ACCEPTABLE
try:
create_bucket(user_settings, request.json.get('bucket_name'))
return {}
except BotoClientError as e:
return {'message': e.message}, http.NOT_ACCEPTABLE
except S3ResponseError as e:
return {'message': e.message}, http.NOT_ACCEPTABLE
@must_be_contributor_or_public # returns user, project
@must_have_addon('s3', 'node')
def file_delete_info(**kwargs):
node = kwargs['node'] or kwargs['project']
api_url = node.api_url
files_page_url = node.web_url_for('collect_file_trees')
if files_page_url is None or api_url is None:
raise HTTPError(http.NOT_FOUND)
return {
'api_url': api_url,
'files_page_url': files_page_url,
}
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,297
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/base/views.py
|
# -*- coding: utf-8 -*-
import httplib
import functools
import itsdangerous
from flask import request
from flask import redirect
from framework.auth import Auth
from framework.sessions import Session
from framework.exceptions import HTTPError
from framework.auth.decorators import must_be_logged_in, must_be_signed
from website import settings
from website.models import User, Node, NodeLog
from website.project import decorators
from website.project.decorators import must_be_valid_project
@decorators.must_have_permission('write')
@decorators.must_not_be_registration
def disable_addon(auth, **kwargs):
node = kwargs['node'] or kwargs['project']
addon_name = kwargs.get('addon')
if addon_name is None:
raise HTTPError(httplib.BAD_REQUEST)
deleted = node.delete_addon(addon_name, auth)
return {'deleted': deleted}
@must_be_logged_in
def get_addon_user_config(**kwargs):
user = kwargs['auth'].user
addon_name = kwargs.get('addon')
if addon_name is None:
raise HTTPError(httplib.BAD_REQUEST)
addon = user.get_addon(addon_name)
if addon is None:
raise HTTPError(httplib.BAD_REQUEST)
return addon.to_json(user)
def check_file_guid(guid):
guid_url = '/{0}/'.format(guid._id)
if not request.path.startswith(guid_url):
url_split = request.url.split(guid.file_url)
try:
guid_url += url_split[1].lstrip('/')
except IndexError:
pass
return guid_url
return None
def get_user_from_cookie(cookie):
if not cookie:
return None
try:
token = itsdangerous.Signer(settings.SECRET_KEY).unsign(cookie)
except itsdangerous.BadSignature:
raise HTTPError(httplib.UNAUTHORIZED)
session = Session.load(token)
if session is None:
raise HTTPError(httplib.UNAUTHORIZED)
return User.load(session.data['auth_user_id'])
permission_map = {
'metadata': 'read',
'download': 'read',
'upload': 'write',
'delete': 'write',
'copy': 'write',
'move': 'write',
}
def check_access(node, user, action, key=None):
"""Verify that user can perform requested action on resource. Raise appropriate
error code if action cannot proceed.
"""
permission = permission_map.get(action, None)
if permission is None:
raise HTTPError(httplib.BAD_REQUEST)
if node.has_permission(user, permission):
return True
if permission == 'read':
if node.is_public or key in node.private_link_keys_active:
return True
code = httplib.FORBIDDEN if user else httplib.UNAUTHORIZED
raise HTTPError(code)
def make_auth(user):
if user is not None:
return {
'id': user._id,
'email': '{}@osf.io'.format(user._id),
'name': user.fullname,
}
return {}
def restrict_addrs(*addrs):
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
remote = request.remote_addr
if remote not in addrs:
raise HTTPError(httplib.FORBIDDEN)
return func(*args, **kwargs)
return wrapped
return wrapper
restrict_waterbutler = restrict_addrs(*settings.WATERBUTLER_ADDRS)
@restrict_waterbutler
def get_auth(**kwargs):
try:
action = request.args['action']
cookie = request.args['cookie']
node_id = request.args['nid']
provider_name = request.args['provider']
except KeyError:
raise HTTPError(httplib.BAD_REQUEST)
view_only = request.args.get('viewOnly')
user = get_user_from_cookie(cookie)
node = Node.load(node_id)
if not node:
raise HTTPError(httplib.NOT_FOUND)
check_access(node, user, action, key=view_only)
provider_settings = node.get_addon(provider_name)
if not provider_settings:
raise HTTPError(httplib.BAD_REQUEST)
credentials = provider_settings.serialize_waterbutler_credentials()
settings = provider_settings.serialize_waterbutler_settings()
return {
'auth': make_auth(user),
'credentials': credentials,
'settings': settings,
'callback_url': node.api_url_for(
'create_waterbutler_log',
_absolute=True,
),
}
LOG_ACTION_MAP = {
'create': NodeLog.FILE_ADDED,
'update': NodeLog.FILE_UPDATED,
'delete': NodeLog.FILE_REMOVED,
}
@must_be_signed
@restrict_waterbutler
@must_be_valid_project
def create_waterbutler_log(payload, **kwargs):
try:
auth = payload['auth']
action = payload['action']
provider = payload['provider']
metadata = payload['metadata']
except KeyError:
raise HTTPError(httplib.BAD_REQUEST)
metadata['path'] = metadata['path'].lstrip('/')
user = User.load(auth['id'])
if user is None:
raise HTTPError(httplib.BAD_REQUEST)
node = kwargs['node'] or kwargs['project']
node_addon = node.get_addon(provider)
if node_addon is None:
raise HTTPError(httplib.BAD_REQUEST)
try:
osf_action = LOG_ACTION_MAP[action]
except KeyError:
raise HTTPError(httplib.BAD_REQUEST)
auth = Auth(user=user)
node_addon.create_waterbutler_log(auth, osf_action, metadata)
return {'status': 'success'}
@must_be_valid_project
def get_waterbutler_render_url(**kwargs):
provider = request.args.get('provider')
node = kwargs.get('node') or kwargs['project']
node_addon = node.get_addon(provider)
if not node_addon:
raise HTTPError(httplib.BAD_REQUEST)
try:
url = node_addon.get_waterbutler_render_url(**request.args.to_dict())
except TypeError:
raise HTTPError(httplib.BAD_REQUEST)
return redirect(url)
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,298
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/dropbox/tests/test_models.py
|
# -*- coding: utf-8 -*-
import os
import hashlib
from nose.tools import * # noqa (PEP8 asserts)
from framework.auth import Auth
from website.addons.dropbox.model import (
DropboxUserSettings, DropboxNodeSettings, DropboxFile
)
from tests.base import OsfTestCase
from tests.factories import UserFactory, ProjectFactory
from website.addons.dropbox.tests.utils import MockDropbox
from website.addons.dropbox.tests.factories import (
DropboxUserSettingsFactory, DropboxNodeSettingsFactory,
DropboxFileFactory
)
from website.util import web_url_for
from website.addons.base import exceptions
class TestUserSettingsModel(OsfTestCase):
def setUp(self):
super(TestUserSettingsModel, self).setUp()
self.user = UserFactory()
def test_fields(self):
user_settings = DropboxUserSettings(
access_token='12345',
dropbox_id='abc',
owner=self.user)
user_settings.save()
retrieved = DropboxUserSettings.load(user_settings._primary_key)
assert_true(retrieved.access_token)
assert_true(retrieved.dropbox_id)
assert_true(retrieved.owner)
def test_has_auth(self):
user_settings = DropboxUserSettingsFactory(access_token=None)
assert_false(user_settings.has_auth)
user_settings.access_token = '12345'
user_settings.save()
assert_true(user_settings.has_auth)
def test_clear_clears_associated_node_settings(self):
node_settings = DropboxNodeSettingsFactory.build()
user_settings = DropboxUserSettingsFactory()
node_settings.user_settings = user_settings
node_settings.save()
user_settings.clear()
user_settings.save()
# Node settings no longer associated with user settings
assert_is(node_settings.user_settings, None)
assert_is(node_settings.folder, None)
def test_clear(self):
node_settings = DropboxNodeSettingsFactory.build()
user_settings = DropboxUserSettingsFactory(access_token='abcde',
dropbox_id='abc')
node_settings.user_settings = user_settings
node_settings.save()
assert_true(user_settings.access_token)
user_settings.clear()
user_settings.save()
assert_false(user_settings.access_token)
assert_false(user_settings.dropbox_id)
def test_delete(self):
user_settings = DropboxUserSettingsFactory()
assert_true(user_settings.has_auth)
user_settings.delete()
user_settings.save()
assert_false(user_settings.access_token)
assert_false(user_settings.dropbox_id)
assert_true(user_settings.deleted)
def test_delete_clears_associated_node_settings(self):
node_settings = DropboxNodeSettingsFactory.build()
user_settings = DropboxUserSettingsFactory()
node_settings.user_settings = user_settings
node_settings.save()
user_settings.delete()
user_settings.save()
# Node settings no longer associated with user settings
assert_is(node_settings.user_settings, None)
assert_is(node_settings.folder, None)
assert_false(node_settings.deleted)
def test_to_json(self):
user_settings = DropboxUserSettingsFactory()
result = user_settings.to_json()
assert_equal(result['has_auth'], user_settings.has_auth)
class TestDropboxNodeSettingsModel(OsfTestCase):
def setUp(self):
super(TestDropboxNodeSettingsModel, self).setUp()
self.user = UserFactory()
self.user.add_addon('dropbox')
self.user.save()
self.user_settings = self.user.get_addon('dropbox')
self.project = ProjectFactory()
self.node_settings = DropboxNodeSettingsFactory(
user_settings=self.user_settings,
owner=self.project
)
def test_fields(self):
node_settings = DropboxNodeSettings(user_settings=self.user_settings)
node_settings.save()
assert_true(node_settings.user_settings)
assert_equal(node_settings.user_settings.owner, self.user)
assert_true(hasattr(node_settings, 'folder'))
assert_true(hasattr(node_settings, 'registration_data'))
def test_folder_defaults_to_none(self):
node_settings = DropboxNodeSettings(user_settings=self.user_settings)
node_settings.save()
assert_is_none(node_settings.folder)
def test_has_auth(self):
settings = DropboxNodeSettings(user_settings=self.user_settings)
settings.save()
assert_false(settings.has_auth)
settings.user_settings.access_token = '123abc'
settings.user_settings.save()
assert_true(settings.has_auth)
def test_to_json(self):
settings = self.node_settings
user = UserFactory()
result = settings.to_json(user)
assert_equal(result['addon_short_name'], 'dropbox')
def test_delete(self):
assert_true(self.node_settings.user_settings)
assert_true(self.node_settings.folder)
old_logs = self.project.logs
self.node_settings.delete()
self.node_settings.save()
assert_is(self.node_settings.user_settings, None)
assert_is(self.node_settings.folder, None)
assert_true(self.node_settings.deleted)
assert_equal(self.project.logs, old_logs)
def test_deauthorize(self):
assert_true(self.node_settings.user_settings)
assert_true(self.node_settings.folder)
self.node_settings.deauthorize(auth=Auth(self.user))
self.node_settings.save()
assert_is(self.node_settings.user_settings, None)
assert_is(self.node_settings.folder, None)
last_log = self.project.logs[-1]
assert_equal(last_log.action, 'dropbox_node_deauthorized')
params = last_log.params
assert_in('node', params)
assert_in('project', params)
assert_in('folder', params)
def test_set_folder(self):
folder_name = 'queen/freddie'
self.node_settings.set_folder(folder_name, auth=Auth(self.user))
self.node_settings.save()
# Folder was set
assert_equal(self.node_settings.folder, folder_name)
# Log was saved
last_log = self.project.logs[-1]
assert_equal(last_log.action, 'dropbox_folder_selected')
def test_set_user_auth(self):
node_settings = DropboxNodeSettingsFactory()
user_settings = DropboxUserSettingsFactory()
node_settings.set_user_auth(user_settings)
node_settings.save()
assert_true(node_settings.has_auth)
assert_equal(node_settings.user_settings, user_settings)
# A log was saved
last_log = node_settings.owner.logs[-1]
assert_equal(last_log.action, 'dropbox_node_authorized')
log_params = last_log.params
assert_equal(log_params['folder'], node_settings.folder)
assert_equal(log_params['node'], node_settings.owner._primary_key)
assert_equal(last_log.user, user_settings.owner)
def test_serialize_credentials(self):
self.user_settings.access_token = 'secret'
self.user_settings.save()
credentials = self.node_settings.serialize_waterbutler_credentials()
expected = {'token': self.node_settings.user_settings.access_token}
assert_equal(credentials, expected)
def test_serialize_credentials_not_authorized(self):
self.node_settings.user_settings = None
self.node_settings.save()
with assert_raises(exceptions.AddonError):
self.node_settings.serialize_waterbutler_credentials()
def test_serialize_settings(self):
settings = self.node_settings.serialize_waterbutler_settings()
expected = {'folder': self.node_settings.folder}
assert_equal(settings, expected)
def test_serialize_settings_not_configured(self):
self.node_settings.folder = None
self.node_settings.save()
with assert_raises(exceptions.AddonError):
self.node_settings.serialize_waterbutler_settings()
def test_create_log(self):
action = 'file_added'
path = 'pizza.nii'
nlog = len(self.project.logs)
self.node_settings.create_waterbutler_log(
auth=Auth(user=self.user),
action=action,
metadata={'path': path},
)
self.project.reload()
assert_equal(len(self.project.logs), nlog + 1)
assert_equal(
self.project.logs[-1].action,
'dropbox_{0}'.format(action),
)
assert_equal(
self.project.logs[-1].params['path'],
os.path.join(self.node_settings.folder, path),
)
class TestNodeSettingsCallbacks(OsfTestCase):
def setUp(self):
super(TestNodeSettingsCallbacks, self).setUp()
# Create node settings with auth
self.user_settings = DropboxUserSettingsFactory(access_token='123abc')
self.node_settings = DropboxNodeSettingsFactory(
user_settings=self.user_settings,
folder='',
)
self.project = self.node_settings.owner
self.user = self.user_settings.owner
def test_after_fork_by_authorized_dropbox_user(self):
fork = ProjectFactory()
clone, message = self.node_settings.after_fork(
node=self.project, fork=fork, user=self.user_settings.owner
)
assert_equal(clone.user_settings, self.user_settings)
def test_after_fork_by_unauthorized_dropbox_user(self):
fork = ProjectFactory()
user = UserFactory()
clone, message = self.node_settings.after_fork(
node=self.project, fork=fork, user=user,
save=True
)
# need request context for url_for
assert_is(clone.user_settings, None)
def test_before_fork(self):
node = ProjectFactory()
message = self.node_settings.before_fork(node, self.user)
assert_true(message)
def test_before_remove_contributor_message(self):
message = self.node_settings.before_remove_contributor(
self.project, self.user)
assert_true(message)
assert_in(self.user.fullname, message)
assert_in(self.project.project_or_component, message)
def test_after_remove_authorized_dropbox_user(self):
message = self.node_settings.after_remove_contributor(
self.project, self.user_settings.owner)
self.node_settings.save()
assert_is_none(self.node_settings.user_settings)
assert_true(message)
def test_after_delete(self):
self.project.remove_node(Auth(user=self.project.creator))
# Ensure that changes to node settings have been saved
self.node_settings.reload()
assert_true(self.node_settings.user_settings is None)
assert_true(self.node_settings.folder is None)
class TestDropboxGuidFile(OsfTestCase):
def test_verbose_url(self):
project = ProjectFactory()
file_obj = DropboxFile(node=project, path='foo.txt')
file_obj.save()
file_url = file_obj.url(guid=False)
url = web_url_for('dropbox_view_file',
pid=project._primary_key, path=file_obj.path, rev='')
assert_equal(url, file_url)
def test_guid_url(self):
file_obj = DropboxFileFactory()
result = file_obj.url(guid=True, rev='123')
assert_equal(result, '/{guid}/?rev=123'.format(guid=file_obj._primary_key))
def test_cache_file_name(self):
project = ProjectFactory()
path = 'My Project/foo.txt'
file_obj = DropboxFile(node=project, path=path)
mock_client = MockDropbox()
file_obj.update_metadata(client=mock_client)
file_obj.save()
result = file_obj.get_cache_filename(client=mock_client)
assert_equal(
result,
'{0}_{1}.html'.format(
hashlib.md5(file_obj.path).hexdigest(),
file_obj.metadata['rev'],
)
)
def test_cache_file_name_encode(self):
project = ProjectFactory()
path = 'à/ é éà'
file_obj = DropboxFile(node=project, path=path)
mock_client = MockDropbox()
file_obj.update_metadata(client=mock_client)
file_obj.save()
result = file_obj.get_cache_filename(client=mock_client)
assert_equal(
result,
'{0}_{1}.html'.format(
hashlib.md5(path).hexdigest(),
file_obj.metadata['rev'],
)
)
def test_download_url(self):
file_obj = DropboxFileFactory()
dl_url = file_obj.download_url(guid=False)
expected = file_obj.node.web_url_for('dropbox_download', path=file_obj.path,
rev='', _absolute=True)
assert_equal(dl_url, expected)
def test_download_url_guid(self):
file_obj = DropboxFileFactory()
dl_url = file_obj.download_url(guid=True, rev='123')
expected = os.path.join('/', file_obj._primary_key, 'download/') + "?rev=123"
assert_equal(dl_url, expected)
def test_update_metadata(self):
client = MockDropbox()
file_obj = DropboxFileFactory(metadata=None)
file_obj.update_metadata(client=client)
file_obj.save()
assert_equal(file_obj.metadata, client.metadata(file_obj.path, list=False))
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,181,299
|
WenTingZhu/osf.io
|
refs/heads/develop
|
/website/addons/osfstorage/routes.py
|
# encoding: utf-8
from framework.routing import Rule, json_renderer
from website.routes import OsfWebRenderer, notemplate
from website.addons.osfstorage import views
web_routes = {
'rules': [
Rule(
[
'/project/<pid>/osfstorage/files/<path:path>/',
'/project/<pid>/node/<nid>/osfstorage/files/<path:path>/',
],
'get',
views.osf_storage_view_file,
OsfWebRenderer('../addons/osfstorage/templates/osfstorage_view_file.mako'),
),
Rule(
[
# Legacy routes for `view_file`
'/project/<pid>/osffiles/<fid>/',
'/project/<pid>/node/<nid>/osffiles/<fid>/',
],
'get',
views.osf_storage_view_file_legacy,
OsfWebRenderer('../addons/osfstorage/templates/osfstorage_view_file.mako'),
),
Rule(
[
# Legacy routes for `download_file`
'/project/<pid>/osffiles/<fid>/download/',
'/project/<pid>/node/<nid>/osffiles/<fid>/download/',
# Note: Added these old URLs for backwards compatibility with
# hard-coded links.
'/project/<pid>/osffiles/download/<fid>/',
'/project/<pid>/node/<nid>/osffiles/download/<fid>/',
'/project/<pid>/files/<fid>/',
'/project/<pid>/node/<nid>/files/<fid>/',
'/project/<pid>/files/download/<fid>/',
'/project/<pid>/node/<nid>/files/download/<fid>/',
# Legacy routes for `download_file_by_version`
'/project/<pid>/osffiles/<fid>/version/<vid>/download/',
'/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/download/',
# Note: Added these old URLs for backwards compatibility with
# hard-coded links.
'/project/<pid>/osffiles/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/',
'/project/<pid>/osffiles/download/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/osffiles/download/<fid>/version/<vid>/',
'/project/<pid>/files/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/files/<fid>/version/<vid>/',
'/project/<pid>/files/download/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/',
],
'get',
views.osf_storage_download_file_legacy,
notemplate,
),
],
}
api_routes = {
'prefix': '/api/v1',
'rules': [
Rule(
[
'/project/<pid>/osfstorage/files/',
'/project/<pid>/node/<nid>/osfstorage/files/',
'/project/<pid>/osfstorage/files/<path:path>/',
'/project/<pid>/node/<nid>/osfstorage/files/<path:path>/',
],
'get',
views.osf_storage_get_metadata_hook,
json_renderer,
),
Rule(
[
'/project/<pid>/osfstorage/revisions/<path:path>',
'/project/<pid>/node/<nid>/osfstorage/revisions/<path:path>',
],
'get',
views.osf_storage_get_revisions,
json_renderer,
),
Rule(
[
'/project/<pid>/osfstorage/hooks/crud/',
'/project/<pid>/node/<nid>/osfstorage/hooks/crud/',
],
'get',
views.osf_storage_download_file_hook,
json_renderer,
),
Rule(
[
'/project/<pid>/osfstorage/hooks/crud/',
'/project/<pid>/node/<nid>/osfstorage/hooks/crud/',
],
'put',
views.osf_storage_update_metadata_hook,
json_renderer,
),
Rule(
[
'/project/<pid>/osfstorage/hooks/crud/',
'/project/<pid>/node/<nid>/osfstorage/hooks/crud/',
],
'delete',
views.osf_storage_crud_hook_delete,
json_renderer,
),
Rule(
[
'/project/<pid>/osfstorage/hooks/crud/',
'/project/<pid>/node/<nid>/osfstorage/hooks/crud/',
],
'post',
views.osf_storage_upload_file_hook,
json_renderer,
),
Rule(
[
'/project/<pid>/osfstorage/render/<path:path>/',
'/project/<pid>/node/<nid>/osfstorage/render/<path:path>/',
],
'get',
views.osf_storage_render_file,
json_renderer,
),
Rule(
[
# Legacy routes for `download_file`
'/project/<pid>/osffiles/<fid>/',
'/project/<pid>/node/<nid>/osffiles/<fid>/',
'/project/<pid>/files/download/<fid>/',
'/project/<pid>/node/<nid>/files/download/<fid>/',
# Legacy routes for `download_file_by_version`
'/project/<pid>/osffiles/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/',
'/project/<pid>/files/download/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/',
],
'get',
views.osf_storage_download_file_legacy,
json_renderer,
),
],
}
|
{"/website/addons/figshare/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/github/views/crud.py": ["/website/addons/base/views.py"], "/website/addons/dropbox/views/crud.py": ["/website/addons/base/views.py", "/website/addons/dropbox/utils.py"], "/website/addons/s3/views/crud.py": ["/website/addons/base/views.py"]}
|
37,193,782
|
pharnoux/client_python
|
refs/heads/master
|
/arize/model.py
|
import math
from abc import ABC, abstractmethod
from google.protobuf.timestamp_pb2 import Timestamp
import pandas as pd
from arize import public_pb2 as public__pb2
class BaseRecord(ABC):
def __init__(self, organization_key, model_id):
self.organization_key = organization_key
self.model_id = model_id
@abstractmethod
def validate_inputs(self):
pass
@abstractmethod
def _build_proto(self):
pass
def _base_validation(self):
if not isinstance(self.organization_key, str):
raise TypeError(
f'organization_key {self.organization_key} is type {type(self.organization_key)}, but must be a str'
)
if not isinstance(self.model_id, str):
raise TypeError(
f'model_id {self.model_id} is type {type(self.model_id)}, but must be a str'
)
def _get_timestamp(self, time_overwrite=None):
ts = Timestamp()
if time_overwrite is not None:
time = self._convert_element(time_overwrite)
if not isinstance(time_overwrite, int):
raise TypeError(
f'time_overwrite {time_overwrite} is type {type(time_overwrite)}, but expects int. (Unix epoch time in seconds)'
)
ts.FromSeconds(time)
else:
ts.GetCurrentTime()
return ts
def _convert_element(self, value):
""" converts scalar or array to python native """
return getattr(value, "tolist", lambda: value)()
def _get_value(self, name: str, value):
if isinstance(value, public__pb2.Value):
return value
val = self._convert_element(value)
if isinstance(val, (str, bool)):
return public__pb2.Value(string=str(val))
if isinstance(val, int):
return public__pb2.Value(int=val)
if isinstance(val, float):
return public__pb2.Value(double=val)
else:
raise TypeError(
f'feature "{name}" = {value} is type {type(value)}, but must be one of: bool, str, float, int.'
)
def _get_label(self, name: str, value):
if isinstance(value, public__pb2.Label):
return value
val = self._convert_element(value)
if isinstance(val, bool):
return public__pb2.Label(binary=val)
if isinstance(val, str):
return public__pb2.Label(categorical=val)
if isinstance(val, (int, float)):
return public__pb2.Label(numeric=val)
else:
raise TypeError(
f'{name}_label = {value} of type {type(value)}. Must be one of bool, str, float/int'
)
class Prediction(BaseRecord):
def __init__(self, organization_key, model_id, model_version,
prediction_id, prediction_label, features, time_overwrite):
super().__init__(organization_key=organization_key, model_id=model_id)
self.model_version = model_version
self.prediction_id = prediction_id
self.prediction_label = prediction_label
self.features = features
self.time_overwrite = time_overwrite
def validate_inputs(self):
self._base_validation()
if not isinstance(self._convert_element(self.prediction_label),
(str, bool, float, int)):
raise TypeError(
f'prediction_label {self.prediction_label} has type {type(self.prediction_label)}, but must be one of: str, bool, float, int'
)
if self.features is not None and bool(self.features):
for k, v in self.features.items():
if not isinstance(self._convert_element(v),
(str, bool, float, int)):
raise TypeError(
f'feature {k} with value {v} is type {type(v)}, but expected one of: str, bool, float, int'
)
if self.time_overwrite is not None and not isinstance(
self.time_overwrite, int):
raise TypeError(
f'time_overwrite {self.time_overwrite} is type {type(self.time_overwrite)} but expected int'
)
def _build_proto(self):
p = public__pb2.Prediction(label=self._get_label(
value=self.prediction_label, name='prediction'))
if self.features is not None:
feats = public__pb2.Prediction(
features={
k: self._get_value(value=v, name=k)
for (k, v) in self.features.items()
})
p.MergeFrom(feats)
if self.model_version is not None:
p.model_version = self.model_version
p.timestamp.MergeFrom(self._get_timestamp(self.time_overwrite))
return public__pb2.Record(organization_key=self.organization_key,
model_id=self.model_id,
prediction_id=self.prediction_id,
prediction=p)
class Actual(BaseRecord):
def __init__(self, organization_key, model_id, prediction_id,
actual_label):
super().__init__(organization_key=organization_key, model_id=model_id)
self.prediction_id = prediction_id
self.actual_label = actual_label
def validate_inputs(self):
self._base_validation()
if not isinstance(self._convert_element(self.actual_label),
(str, bool, float, int)):
raise TypeError(
f'actual_label {self.actual_label} has type {type(self._convert_element(self.actual_label))}, but must be one of: str, bool, float, int'
)
def _build_proto(self):
a = public__pb2.Actual(
label=self._get_label(value=self.actual_label, name='actual'))
a.timestamp.MergeFrom(self._get_timestamp())
return public__pb2.Record(organization_key=self.organization_key,
model_id=self.model_id,
prediction_id=self.prediction_id,
actual=a)
class BaseBulkRecord(BaseRecord):
MAX_BYTES_PER_BULK_RECORD = 100000
prediction_labels, actual_labels, time_overwrite, features = None, None, None, None
def __init__(self, organization_key, model_id, prediction_ids):
super().__init__(organization_key, model_id)
self.organization_key = organization_key
self.model_id = model_id
self.prediction_ids = prediction_ids
def _base_bulk_validation(self):
self._base_validation()
if not isinstance(self.prediction_ids, (pd.DataFrame, pd.Series)):
raise TypeError(
f'prediction_ids is type {type(self.prediction_ids)}, but expect one of: pd.DataFrame, pd.Series'
)
def _bundle_records(self, records, model_version):
recs_per_msg = self._num_chuncks(records)
recs = [
records[i:i + recs_per_msg]
for i in range(0, len(records), recs_per_msg)
]
return [
public__pb2.BulkRecord(records=r,
organization_key=self.organization_key,
model_id=self.model_id,
model_version=model_version,
timestamp=self._get_timestamp())
for r in recs
]
def _num_chuncks(self, records):
total_bytes = 0
for r in records:
total_bytes += r.ByteSize()
num_of_bulk = math.ceil(total_bytes / self.MAX_BYTES_PER_BULK_RECORD)
recs_per_msg = math.ceil(len(records) / num_of_bulk)
return recs_per_msg
def _normalize_inputs(self):
"""Converts inputs from DataFrames, Series, lists to numpy arrays or lists for consitent interations downstream."""
self.prediction_ids = self.prediction_ids.to_numpy()
if isinstance(self.prediction_labels, (pd.DataFrame, pd.Series)):
self.prediction_labels = self.prediction_labels.to_numpy()
if isinstance(self.actual_labels, (pd.DataFrame, pd.Series)):
self.actual_labels = self.actual_labels.to_numpy()
if isinstance(self.time_overwrite, pd.Series):
self.time_overwrite = self.time_overwrite.tolist()
if isinstance(self.features, pd.DataFrame):
self.feature_names = self.feature_names_overwrite or self.features.columns
self.features = self.features.to_numpy()
class BulkPrediction(BaseBulkRecord):
def __init__(self, organization_key, model_id, model_version,
prediction_ids, prediction_labels, features,
feature_names_overwrite, time_overwrite):
super().__init__(organization_key, model_id, prediction_ids)
self.model_version = model_version
self.prediction_labels = prediction_labels
self.features = features
self.feature_names_overwrite = feature_names_overwrite
self.time_overwrite = time_overwrite
def validate_inputs(self):
self._base_bulk_validation()
if not isinstance(self.prediction_labels, (pd.DataFrame, pd.Series)):
raise TypeError(
f'prediction_labels is type: {type(self.prediction_labels)}, but expects one of: pd.DataFrame, pd.Series'
)
if self.prediction_labels.shape[0] != self.prediction_ids.shape[0]:
raise ValueError(
f'prediction_labels contains {self.prediction_labels.shape[0]} elements, but must have the same as predictions_ids: {self.prediction_ids.shape[0]}.'
)
self._validate_features()
self._validate_time_overwrite()
def _build_proto(self):
self._normalize_inputs()
records = []
for row, v in enumerate(self.prediction_ids):
pred_id = v[0]
if not isinstance(pred_id, (str, bytes)):
raise TypeError(
f'prediction_id {pred_id} is type {type(pred_id)}, but expected one of: str, bytes'
)
p = public__pb2.Prediction(label=self._get_label(
value=self.prediction_labels[row][0], name='prediction'))
if self.features is not None:
converted_feats = {}
for column, name in enumerate(self.feature_names):
converted_feats[name] = self._get_value(
value=self.features[row][column], name=name)
feats = public__pb2.Prediction(features=converted_feats)
p.MergeFrom(feats)
if self.time_overwrite is not None:
p.timestamp.MergeFrom(
self._get_timestamp(self.time_overwrite[row]))
records.append(
public__pb2.Record(prediction_id=pred_id, prediction=p))
return self._bundle_records(records, self.model_version)
def _validate_features(self):
if self.features is None:
return
if not isinstance(self.features, pd.DataFrame):
raise TypeError(
f'features is type {type(self.features)}, but expect type pd.DataFrame.'
)
if self.features.shape[0] != self.prediction_ids.shape[0]:
raise ValueError(
f'features has {self.features.shape[0]} sets of features, but must match size of predictions_ids: {self.prediction_ids.shape[0]}.'
)
if self.feature_names_overwrite is not None:
if len(self.features.columns) != len(self.feature_names_overwrite):
raise ValueError(
f'feature_names_overwrite has len:{len(self.feature_names_overwrite)}, but expects the same number of columns in features dataframe: {len(self.features.columns)}.'
)
else:
if isinstance(self.features.columns,
pd.core.indexes.numeric.NumericIndex):
raise TypeError(
f'fatures.columns is of type {type(self.features.columns)}, but expect elements to be str. Alternatively, feature_names_overwrite must be present.'
)
for name in self.features.columns:
if not isinstance(name, str):
raise TypeError(
f'features.column {name} is type {type(name)}, but expect str'
)
def _validate_time_overwrite(self):
if self.time_overwrite is None:
return
expected_count = self.prediction_ids.shape[0]
if isinstance(self.time_overwrite, pd.Series):
if self.time_overwrite.shape[0] != expected_count:
raise ValueError(
f'time_overwrite has {self.time_overwrite.shape[0]} elements, but must have same number of elements as prediction_ids: {expected_count}.'
)
elif isinstance(self.time_overwrite, list):
if len(self.time_overwrite) != expected_count:
raise ValueError(
f'time_overwrite has length {len(self.time_overwrite)} but must have same number of elements as prediction_ids: {expected_count}.'
)
else:
raise TypeError(
f'time_overwrite is type {type(self.time_overwrite)}, but expected one of: pd.Series, list<int>'
)
class BulkActual(BaseBulkRecord):
def __init__(self, organization_key, model_id, prediction_ids,
actual_labels):
super().__init__(organization_key, model_id, prediction_ids)
self.actual_labels = actual_labels
def validate_inputs(self):
self._base_bulk_validation()
if not isinstance(self.actual_labels, (pd.DataFrame, pd.Series)):
raise TypeError(
f'actual_labels is type: {type(self.actual_labels)}, but expects one of: pd.DataFrame, pd.Series'
)
if self.actual_labels.shape[0] != self.prediction_ids.shape[0]:
raise ValueError(
f'actual_labels contains {self.actual_labels.shape[0]} elements, but must have the same as predictions_ids: {self.prediction_ids.shape[0]}.'
)
def _build_proto(self):
self._normalize_inputs()
records = []
for i, v in enumerate(self.prediction_ids):
pred_id = v[0]
if not isinstance(pred_id, (str, bytes)):
raise TypeError(
f'prediction_id {pred_id} is type {type(pred_id)}, but expected one of: str, bytes'
)
a = public__pb2.Actual(label=self._get_label(
value=self.actual_labels[i][0], name='actual'))
records.append(public__pb2.Record(prediction_id=pred_id, actual=a))
return self._bundle_records(records, None)
|
{"/arize/api.py": ["/arize/utils/types.py", "/arize/model.py", "/arize/utils/utils.py"], "/tests/test_api.py": ["/arize/model.py", "/arize/utils/types.py", "/arize/api.py"], "/tests/extension_test.py": ["/arize/utils/types.py", "/arize/pandas/logger.py"], "/arize/model.py": ["/arize/utils/types.py", "/arize/utils/utils.py"], "/arize/examples/preproduction_client.py": ["/arize/api.py"], "/arize/examples/client.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/tutorials/Partnerships/SageMaker/PipeLinePlusLambda/lambda_function.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/utils/utils.py": ["/arize/utils/types.py"], "/arize/pandas/logger.py": ["/arize/utils/types.py"], "/arize/examples/tutorials/Partnerships/SageMaker/BatchSageMaker/lambda_function.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/bulk_client.py": ["/arize/api.py"], "/arize/examples/tutorials/Partnerships/rayserve.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/log_pandas_dataframe.py": ["/arize/utils/types.py", "/arize/pandas/logger.py"]}
|
37,193,783
|
pharnoux/client_python
|
refs/heads/master
|
/tests/test_api.py
|
import pandas as pd
import numpy as np
import uuid
from pathlib import Path
from google.protobuf.timestamp_pb2 import Timestamp
import arize.public_pb2 as public__pb2
from arize.model import Prediction, Actual, BulkPrediction, BulkActual
NUM_VAL = 20.20
STR_VAL = 'arize'
BOOL_VAL = True
INT_VAL = 0
file_to_open = Path(__file__).parent / "fixtures/mpg.csv"
expected = {
'model': 'model_v0',
'model_version': 'v1.2.3.4',
'api_key': 'API_KEY',
'prediction_id': 'prediction_0',
'value_binary': BOOL_VAL,
'value_categorical': STR_VAL,
'value_numeric': NUM_VAL,
'organization_key': 'test_org',
'features': {
'feature_str': STR_VAL,
'feature_double': NUM_VAL,
'feature_int': INT_VAL,
'feature_bool': BOOL_VAL
}
}
def mock_dataframes(file):
features = pd.read_csv(file)
labels = pd.DataFrame(
np.random.randint(1, 100, size=(features.shape[0], 1)))
ids = pd.DataFrame([str(uuid.uuid4()) for _ in range(len(labels.index))])
return features, labels, ids
def mock_series(file):
features = pd.read_csv(file)
labels = pd.Series(np.random.randint(1, 100, size=features.shape[0]))
ids = pd.Series([str(uuid.uuid4()) for _ in range(len(labels.index))])
return features, labels, ids
def test_build_binary_prediction_features():
pred = Prediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_id=expected['prediction_id'],
prediction_label=expected['value_binary'],
features=expected['features'],
time_overwrite=None)
record = pred._build_proto()
assert isinstance(record, public__pb2.Record)
assert isinstance(record.prediction, public__pb2.Prediction)
assert isinstance(record.prediction.label, public__pb2.Label)
for feature in record.prediction.features:
assert isinstance(record.prediction.features[feature],
public__pb2.Value)
assert record.organization_key == expected['organization_key']
assert record.model_id == expected['model']
assert record.prediction_id == expected['prediction_id']
assert record.prediction.model_version == expected['model_version']
assert record.prediction.label.binary == expected['value_binary']
assert record.prediction.features['feature_str'].WhichOneof(
'data') == 'string'
assert record.prediction.features['feature_double'].WhichOneof(
'data') == 'double'
assert record.prediction.features['feature_int'].WhichOneof(
'data') == 'int'
assert record.prediction.features['feature_bool'].WhichOneof(
'data') == 'string'
def test_build_categorical_prediction():
pred = Prediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_id=expected['prediction_id'],
prediction_label=expected['value_categorical'],
features=expected['features'],
time_overwrite=None)
record = pred._build_proto()
assert isinstance(record, public__pb2.Record)
assert isinstance(record.prediction, public__pb2.Prediction)
assert isinstance(record.prediction.label, public__pb2.Label)
assert record.organization_key == expected['organization_key']
assert record.model_id == expected['model']
assert record.prediction_id == expected['prediction_id']
assert record.prediction.model_version == expected['model_version']
assert bool(record.prediction.features)
assert record.prediction.label.categorical == expected['value_categorical']
def test_build_numeric_prediction():
pred = Prediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_id=expected['prediction_id'],
prediction_label=expected['value_numeric'],
features=expected['features'],
time_overwrite=None)
record = pred._build_proto()
assert isinstance(record, public__pb2.Record)
assert isinstance(record.prediction, public__pb2.Prediction)
assert isinstance(record.prediction.label, public__pb2.Label)
assert record.organization_key == expected['organization_key']
assert record.model_id == expected['model']
assert record.prediction_id == expected['prediction_id']
assert record.prediction.model_version == expected['model_version']
assert bool(record.prediction.features)
assert record.prediction.label.numeric == expected['value_numeric']
def test_build_prediction_no_features():
pred = Prediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_id=expected['prediction_id'],
prediction_label=expected['value_numeric'],
features=None,
time_overwrite=None)
record = pred._build_proto()
assert isinstance(record.prediction, public__pb2.Prediction)
assert not bool(record.prediction.features)
def test_build_numeric_actual():
actual = Actual(organization_key=expected['organization_key'],
model_id=expected['model'],
prediction_id=expected['prediction_id'],
actual_label=expected['value_numeric'])
record = actual._build_proto()
assert isinstance(record, public__pb2.Record)
assert isinstance(record.actual, public__pb2.Actual)
assert isinstance(record.actual.label, public__pb2.Label)
assert record.organization_key == expected['organization_key']
assert record.model_id == expected['model']
assert record.prediction_id == expected['prediction_id']
assert record.actual.label.numeric == expected['value_numeric']
def test_build_categorical_actual():
actual = Actual(organization_key=expected['organization_key'],
model_id=expected['model'],
prediction_id=expected['prediction_id'],
actual_label=expected['value_categorical'])
record = actual._build_proto()
assert isinstance(record, public__pb2.Record)
assert isinstance(record.actual, public__pb2.Actual)
assert isinstance(record.actual.label, public__pb2.Label)
assert record.organization_key == expected['organization_key']
assert record.model_id == expected['model']
assert record.prediction_id == expected['prediction_id']
assert record.actual.label.categorical == expected['value_categorical']
def test_build_binary_actual():
actual = Actual(organization_key=expected['organization_key'],
model_id=expected['model'],
prediction_id=expected['prediction_id'],
actual_label=expected['value_binary'])
record = actual._build_proto()
assert isinstance(record, public__pb2.Record)
assert isinstance(record.actual, public__pb2.Actual)
assert isinstance(record.actual.label, public__pb2.Label)
assert record.organization_key == expected['organization_key']
assert record.model_id == expected['model']
assert record.prediction_id == expected['prediction_id']
assert record.actual.label.binary == expected['value_binary']
def test_build_bulk_predictions_dataframes():
features, labels, ids = mock_dataframes(file_to_open)
preds = BulkPrediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_ids=ids,
prediction_labels=labels,
features=features,
feature_names_overwrite=None,
time_overwrite=None)
bulk_records = preds._build_proto()
record_count = 0
for bulk in bulk_records:
assert bulk.organization_key == expected['organization_key']
assert bulk.model_id == expected['model']
assert bulk.model_version == expected['model_version']
assert isinstance(bulk.timestamp, Timestamp)
for i in range(len(bulk.records)):
record = bulk.records[i]
assert isinstance(record, public__pb2.Record)
assert isinstance(record.prediction.label, public__pb2.Label)
assert len(record.prediction.features) == features.shape[1]
assert record.prediction.label.WhichOneof('data') == 'numeric'
record_count += 1
assert record_count == len(ids)
def test_build_bulk_predictions_no_features():
features, labels, ids = mock_dataframes(file_to_open)
preds = BulkPrediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_ids=ids,
prediction_labels=labels,
features=None,
feature_names_overwrite=None,
time_overwrite=None)
records = preds._build_proto()
for bulk in records:
assert isinstance(bulk, public__pb2.BulkRecord)
for r in bulk.records:
assert isinstance(r, public__pb2.Record)
assert not bool(r.organization_key)
assert not bool(r.model_id)
assert not bool(r.prediction.features)
def test_build_bulk_prediction_with_feature_names_overwrites():
features, labels, ids = mock_dataframes(file_to_open)
feature_names_overwrite = [
'mask_' + str(i) for i in range(len(features.columns))
]
preds = BulkPrediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_ids=ids,
prediction_labels=labels,
features=features,
feature_names_overwrite=feature_names_overwrite,
time_overwrite=None)
records = preds._build_proto()
for bulk in records:
assert isinstance(bulk, public__pb2.BulkRecord)
for r in bulk.records:
assert isinstance(r, public__pb2.Record)
assert not bool(r.organization_key)
assert not bool(r.model_id)
assert bool(r.prediction.features)
for feature in r.prediction.features:
assert feature in feature_names_overwrite
def test_build_bulk_actuals_dataframes():
_, labels, ids = mock_dataframes(file_to_open)
actuals = BulkActual(organization_key=expected['organization_key'],
model_id=expected['model'],
prediction_ids=ids,
actual_labels=labels)
bulk_records = actuals._build_proto()
record_count = 0
for bulk in bulk_records:
assert bulk.organization_key == expected['organization_key']
assert bulk.model_id == expected['model']
assert isinstance(bulk.timestamp, Timestamp)
for i in range(len(bulk.records)):
record = bulk.records[i]
assert isinstance(record, public__pb2.Record)
assert isinstance(record.actual.label, public__pb2.Label)
assert record.prediction_id == ids[0][record_count]
assert record.actual.label.WhichOneof('data') == 'numeric'
record_count += 1
assert record_count == len(ids)
def test_validate_bulk_predictions_missmatched_shapes():
features, labels, ids = mock_dataframes(file_to_open)
feature_names_overwrite = [
'mask_' + str(i) for i in range(len(features.columns))
]
id_ex, feature_ex, label_ex, overwrite_ex = None, None, None, None
try:
preds = BulkPrediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_ids=ids[3:],
prediction_labels=labels,
features=features,
feature_names_overwrite=feature_names_overwrite,
time_overwrite=None)
preds.validate_inputs()
except Exception as err:
id_ex = err
try:
preds = BulkPrediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_ids=ids,
prediction_labels=labels,
features=features[3:],
feature_names_overwrite=None,
time_overwrite=None)
preds.validate_inputs()
except Exception as err:
feature_ex = err
try:
preds = BulkPrediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_ids=ids,
prediction_labels=labels[3:],
features=None,
feature_names_overwrite=None,
time_overwrite=None)
preds.validate_inputs()
except Exception as err:
label_ex = err
try:
preds = BulkPrediction(
organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_ids=ids,
prediction_labels=labels,
features=features,
feature_names_overwrite=feature_names_overwrite[3:],
time_overwrite=None)
preds.validate_inputs()
except Exception as err:
overwrite_ex = err
assert isinstance(id_ex, ValueError)
assert isinstance(feature_ex, ValueError)
assert isinstance(label_ex, ValueError)
assert isinstance(overwrite_ex, ValueError)
def test_validate_bulk_predictions_default_columns_int():
features, labels, ids = mock_dataframes(file_to_open)
features_default_columns = pd.DataFrame(features[:].values)
ex = None
try:
preds = BulkPrediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_ids=ids,
prediction_labels=labels,
features=features_default_columns,
feature_names_overwrite=None,
time_overwrite=None)
preds.validate_inputs()
except Exception as err:
ex = err
assert isinstance(ex, TypeError)
def test_build_bulk_prediction_with_time_overwrites():
features, labels, ids = mock_dataframes(file_to_open)
time = [1593626247 + i for i in range(features.shape[0])]
preds = BulkPrediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_ids=ids,
prediction_labels=labels,
features=features,
feature_names_overwrite=None,
time_overwrite=time)
records = preds._build_proto()
for bulk in records:
assert isinstance(bulk, public__pb2.BulkRecord)
for r in bulk.records:
assert isinstance(r, public__pb2.Record)
assert not bool(r.organization_key)
assert not bool(r.model_id)
assert bool(r.prediction.features)
assert r.prediction.timestamp is not None
def test_handle_log_prediction_with_time_overwrites():
preds = Prediction(organization_key=expected['organization_key'],
model_id=expected['model'],
model_version=expected['model_version'],
prediction_id=expected['prediction_id'],
prediction_label=expected['value_binary'],
features=expected['features'],
time_overwrite=1593626247)
record = preds._build_proto()
assert isinstance(record.prediction, public__pb2.Prediction)
assert bool(record.prediction.features)
assert record.prediction.timestamp.seconds == 1593626247
|
{"/arize/api.py": ["/arize/utils/types.py", "/arize/model.py", "/arize/utils/utils.py"], "/tests/test_api.py": ["/arize/model.py", "/arize/utils/types.py", "/arize/api.py"], "/tests/extension_test.py": ["/arize/utils/types.py", "/arize/pandas/logger.py"], "/arize/model.py": ["/arize/utils/types.py", "/arize/utils/utils.py"], "/arize/examples/preproduction_client.py": ["/arize/api.py"], "/arize/examples/client.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/tutorials/Partnerships/SageMaker/PipeLinePlusLambda/lambda_function.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/utils/utils.py": ["/arize/utils/types.py"], "/arize/pandas/logger.py": ["/arize/utils/types.py"], "/arize/examples/tutorials/Partnerships/SageMaker/BatchSageMaker/lambda_function.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/bulk_client.py": ["/arize/api.py"], "/arize/examples/tutorials/Partnerships/rayserve.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/log_pandas_dataframe.py": ["/arize/utils/types.py", "/arize/pandas/logger.py"]}
|
37,193,784
|
pharnoux/client_python
|
refs/heads/master
|
/arize/api.py
|
from requests_futures.sessions import FuturesSession
from google.protobuf.json_format import MessageToDict
from arize.bounded_executor import BoundedExecutor
from arize.model import Prediction, Actual, BulkPrediction, BulkActual
class Client:
"""
Arize API Client to report model predictions and actuals to Arize AI platform
"""
def __init__(self,
api_key: str,
organization_key: str,
model_id=None,
model_version=None,
uri='https://api.arize.com/v1',
max_workers=8,
max_queue_bound=5000,
retry_attempts=3,
timeout=200):
"""
:params api_key: (str) api key associated with your account with Arize AI
:params organization_key: (str) organization key in Arize AI
:params model_id: (str) model id
:params model_version: (str) model version
:params max_workers: (int) number of max concurrent requests to Arize. Default: 20
:max_queue_bound: (int) number of maximum concurrent future objects being generated for publishing to Arize. Default: 5000
"""
self._retry_attempts = retry_attempts
self._uri = uri + '/log'
self._bulk_url = uri + '/bulk'
self._api_key = api_key
self._organization_key = organization_key
self._model_id = model_id
self._model_version = model_version
self._timeout = timeout
self._session = FuturesSession(
executor=BoundedExecutor(max_queue_bound, max_workers))
def log_prediction(self,
prediction_id: str,
prediction_label,
features=None,
model_id=None,
model_version=None,
time_overwrite=None):
""" Logs a prediction to Arize via a POST request. Returns :class:`Future` object.
:param prediction_id: (str) Unique string indentifier for a specific prediction. This value is used to match a prediction to an actual label in the Arize platform.
:param prediction_label: (one of bool, str, float, int) The predicted value for a given model input.
:param features: (str, <value>) Optional dictionary containing human readable and debuggable model features. Keys must be str, values one of str, bool, float, long.
:param model_id: (str) Optional Unique identifier for a given model. If not supplied, defaults to value used during instantiation.
:param model_version: (str) Optional field used to group together a subset of predictions and actuals for a given model_id. If not supplied, defaults to value used during instantiation.
:param time_overwrite: (int) Optional field with unix epoch time in seconds to overwrite timestamp for prediction. If None, prediction uses current timestamp.
:rtype : concurrent.futures.Future
"""
pred = Prediction(organization_key=self._organization_key,
prediction_id=prediction_id,
model_id=(model_id or self._model_id),
model_version=(model_version or self._model_version),
prediction_label=prediction_label,
features=features,
time_overwrite=time_overwrite)
pred.validate_inputs()
return self._post(record=pred._build_proto(), uri=self._uri)
def log_actual(self, prediction_id: str, actual_label, model_id=None):
""" Logs an actual to Arize via a POST request. Returns :class:`Future` object.
:param prediction_id: (str) Unique string indentifier for a specific prediction. This value is used to match a prediction to an actual label in the Arize platform.
:param actual_label: (one of bool, str, float, int) The actual true value for a given model input. This actual will be matched to the prediction with the same prediction_id as the one in this call.
:param model_id: (str) Optional Unique identifier for a given model. If not supplied, defaults to value used during instantiation.
:rtype : concurrent.futures.Future
"""
actual = Actual(organization_key=self._organization_key,
model_id=(model_id or self._model_id),
prediction_id=prediction_id,
actual_label=actual_label)
actual.validate_inputs()
return self._post(record=actual._build_proto(), uri=self._uri)
def log_bulk_predictions(self,
prediction_ids,
prediction_labels,
features=None,
feature_names_overwrite=None,
model_id=None,
model_version=None,
time_overwrite=None):
""" Logs a collection of predictions with Arize via a POST request. Returns list<:class:`Future`> object.
:param prediction_ids: 1-D Pandas Dataframe or Series with string elements. Each element corresponding to a unique string indentifier for a specific prediction. These values are needed to match latent actual labels to their original prediction labels. Each element corresponds to feature values of the same index.
:param prediction_labels: 1-D Pandas Dataframe or Series. The predicted values for a given model input. Values are associates to the labels in the same index.
:param features: Optional 2-D Pandas Dataframe containing human readable and debuggable model features. Dataframes columns (df.columns) should contain feature names and must have same number of rows as prediction_ids and prediction_labels.
:param features_name_overwrite: Optional list<str> that if present will overwrite features.columns values. Must contain the same number of elements as features.columns.
:param model_id: (str) Optional Unique identifier for a given model. If not supplied, defaults to value used during instantiation.
:param model_version: (str) Optional field used to group together a subset of predictions and actuals for a given model_id. If not supplied, defaults to value used during instantiation.
:param time_overwrite: (list<int>) Optional list with same number of elements as prediction_labels field with unix epoch time in seconds to overwrite timestamp for each prediction. If None, prediction uses current timestamp.
:rtype : list<concurrent.futures.Future>
"""
preds = BulkPrediction(organization_key=self._organization_key,
model_id=(model_id or self._model_id),
model_version=(model_version
or self._model_version),
prediction_ids=prediction_ids,
prediction_labels=prediction_labels,
features=features,
feature_names_overwrite=feature_names_overwrite,
time_overwrite=time_overwrite)
preds.validate_inputs()
return self._post_list(records=preds._build_proto(), uri=self._bulk_url)
def log_bulk_actuals(self, prediction_ids, actual_labels, model_id=None):
""" Logs a collection of actuals with Arize via a POST request. Returns list<:class:`Future`> object.
:param prediction_ids: 1-D Pandas Dataframe or Series with string elements. Each element corresponding to a unique string indentifier for a specific prediction. These values are needed to match latent actual labels to their original prediction labels. Each element corresponds to feature values of the same index.
:param actual_labels: 1-D Pandas Dataframe or Series. The actual true values for a given model input. Values are associates to the labels in the same index.
:param model_id: (str) Optional Unique identifier for a given model. If not supplied, defaults to value used during instantiation.
:rtype : list<concurrent.futures.Future>
"""
actuals = BulkActual(organization_key=self._organization_key,
model_id=(model_id or self._model_id),
prediction_ids=prediction_ids,
actual_labels=actual_labels)
actuals.validate_inputs()
return self._post_list(records=actuals._build_proto(),
uri=self._bulk_url)
def _post_list(self, records, uri):
responses = []
for record in records:
responses.append(self._post(record, uri))
return responses
def _post(self, record, uri):
payload = MessageToDict(message=record,
preserving_proto_field_name=True)
return self._session.post(uri,
headers={'Authorization': self._api_key},
timeout=self._timeout,
json=payload)
|
{"/arize/api.py": ["/arize/utils/types.py", "/arize/model.py", "/arize/utils/utils.py"], "/tests/test_api.py": ["/arize/model.py", "/arize/utils/types.py", "/arize/api.py"], "/tests/extension_test.py": ["/arize/utils/types.py", "/arize/pandas/logger.py"], "/arize/model.py": ["/arize/utils/types.py", "/arize/utils/utils.py"], "/arize/examples/preproduction_client.py": ["/arize/api.py"], "/arize/examples/client.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/tutorials/Partnerships/SageMaker/PipeLinePlusLambda/lambda_function.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/utils/utils.py": ["/arize/utils/types.py"], "/arize/pandas/logger.py": ["/arize/utils/types.py"], "/arize/examples/tutorials/Partnerships/SageMaker/BatchSageMaker/lambda_function.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/bulk_client.py": ["/arize/api.py"], "/arize/examples/tutorials/Partnerships/rayserve.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/log_pandas_dataframe.py": ["/arize/utils/types.py", "/arize/pandas/logger.py"]}
|
37,193,785
|
pharnoux/client_python
|
refs/heads/master
|
/arize/examples/bulk_client.py
|
import os
import uuid
import pandas as pd
import numpy as np
import concurrent.futures as cf
from arize.api import Client
ITERATIONS = 1
NUM_RECORDS = 2
arize = Client(organization_key=os.environ.get('ARIZE_ORG_KEY'),
api_key=os.environ.get('ARIZE_API_KEY'),
model_id='benchmark_bulk_client',
model_version='v0.1')
features = pd.DataFrame(np.random.randint(0, 100000000,
size=(NUM_RECORDS, 12)),
columns=list('ABCDEFGHIJKL'))
pred_labels = pd.DataFrame(
np.random.randint(0, 100000000, size=(NUM_RECORDS, 1)))
ids = pd.DataFrame([str(uuid.uuid4()) for _ in range(NUM_RECORDS)])
column_overwrite = list('abcdefghijkl')
preds = arize.log_bulk_predictions(prediction_ids=ids,
prediction_labels=pred_labels,
features=features,
feature_names_overwrite=column_overwrite)
actuals = arize.log_bulk_actuals(prediction_ids=ids, actual_labels=pred_labels)
preds.extend(actuals)
for future in cf.as_completed(preds):
res = future.result()
print(f'future completed with response code {res.status_code}')
if res.status_code != 200:
print(
f'future failed with response code {res.status_code}, {res.text}')
|
{"/arize/api.py": ["/arize/utils/types.py", "/arize/model.py", "/arize/utils/utils.py"], "/tests/test_api.py": ["/arize/model.py", "/arize/utils/types.py", "/arize/api.py"], "/tests/extension_test.py": ["/arize/utils/types.py", "/arize/pandas/logger.py"], "/arize/model.py": ["/arize/utils/types.py", "/arize/utils/utils.py"], "/arize/examples/preproduction_client.py": ["/arize/api.py"], "/arize/examples/client.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/tutorials/Partnerships/SageMaker/PipeLinePlusLambda/lambda_function.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/utils/utils.py": ["/arize/utils/types.py"], "/arize/pandas/logger.py": ["/arize/utils/types.py"], "/arize/examples/tutorials/Partnerships/SageMaker/BatchSageMaker/lambda_function.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/bulk_client.py": ["/arize/api.py"], "/arize/examples/tutorials/Partnerships/rayserve.py": ["/arize/api.py", "/arize/utils/types.py"], "/arize/examples/log_pandas_dataframe.py": ["/arize/utils/types.py", "/arize/pandas/logger.py"]}
|
37,241,273
|
Heila-Almogren/FSND_Capstone
|
refs/heads/main
|
/starter_code/backend/src/api.py
|
import os
from flask import Flask, request, jsonify, abort, redirect
from sqlalchemy import exc
import json
from flask_cors import CORS
from .database.models import db_drop_and_create_all, setup_db, Movie, Actor
from .auth.auth import AuthError, requires_auth
from functools import wraps
import json
from os import environ as env
from werkzeug.exceptions import HTTPException
from dotenv import load_dotenv, find_dotenv
from flask import render_template
from flask import session
from flask import url_for
from authlib.integrations.flask_client import OAuth
from six.moves.urllib.parse import urlencode
app = Flask(__name__)
setup_db(app)
CORS(app)
oauth = OAuth(app)
auth0 = oauth.register(
'auth0',
client_id='6GgkbfV45KVhO5kc6jq3Ot7itvpXW98j',
client_secret='LX04ZbMM61z27IakrzoilR6Co5qpezk17iuYkBYZ_2et8Dx4iGKS1dCCZbEMWUyZ',
api_base_url='https://heilafsnd.us.auth0.com',
access_token_url='https://heilafsnd.us.auth0.com/oauth/token',
authorize_url='https://heilafsnd.us.auth0.com/authorize',
client_kwargs={
'scope': 'openid profile email',
},
)
db_drop_and_create_all()
@app.route('/movies', methods=['GET'])
@requires_auth('get:movies')
def get_movies(jwt):
"""
Returns:
The list of all movies
"""
try:
movies = Movie.query.all()
return jsonify({
'success': True,
"movies": [movie.id for movie in movies]
})
except Exception as e:
print(e)
return jsonify({
'success': False,
})
@app.route('/actors', methods=['GET'])
@requires_auth('get:actors')
def get_actors(jwt):
"""
Returns:
The list of all actors
"""
try:
actors = Actor.query.all()
return jsonify({
'success': True,
"actors": [actor.id for actor in actors]
})
except Exception as e:
print(e)
return jsonify({
'success': False,
})
@app.route('/movies', methods=['POST'])
@requires_auth('post:movies')
def post_movie(jwt):
"""
Returns:
The object of newly created movie
"""
try:
title = request.json['title']
release_date = request.json['release_date']
nMovie = Movie(title=title, release_date=release_date)
nMovie.insert()
return jsonify({
'success': True,
'movie': nMovie
})
except Exception as e:
print(e)
return jsonify({
'success': False,
})
@app.route('/actors', methods=['POST'])
@requires_auth('post:actors')
def post_actor(jwt):
"""
Returns:
The object of newly created actor
"""
try:
name = request.json['name']
age = request.json['age']
gender = request.json['gender']
nActor = Actor(name=name, age=age, gender=gender)
nActor.insert()
return jsonify({
'success': True,
'actor': nActor
})
except Exception as e:
print(e)
return jsonify({
'success': False,
})
@app.route('/movies/<int:id>', methods=['PATCH'])
@requires_auth('patch:movies')
def patch_movie(jwt, id):
"""
Args
The id of the movie to be edited
Returns:
The edited movie object
"""
try:
body = request.get_json()
ntitle = body.get('title', None)
nrelease = body.get('release_date', None)
movie = Movie.query.filter(Movie.id == id).one_or_none()
if not ntitle == 'null':
movie.title = ntitle
if not nrelease == 'null':
movie.release_date = nrelease
movie.update()
return jsonify({
'success': True,
'movie': movie
})
except Exception as e:
print(e)
return jsonify({
'success': False,
})
@app.route('/actors/<int:id>', methods=['PATCH'])
@requires_auth('patch:actors')
def patch_actor(jwt, id):
"""
Args
The id of the actor to be edited
Returns:
The edited actor object
"""
try:
body = request.get_json()
nname = body.get('name', None)
nage = body.get('age', None)
ngender = body.get('gender', None)
actor = Actor.query.filter(Actor.id == id).one_or_none()
if not nname == 'null':
actor.name = nname
if not nage == 'null':
actor.age = nage
if not ngender == 'null':
actor.gender = ngender
actor.update()
return jsonify({
'success': True,
'actor': actor
})
except Exception as e:
print(e)
return jsonify({
'success': False,
})
@app.route('/movies/<int:id>', methods=['DELETE'])
@requires_auth('delete:movies')
def delete_movie(jwt, id):
"""
Args
The id of the movie to be deleted
Returns:
The id of the deleted movie
"""
try:
movie = Movie.query.filter(Movie.id == id).one_or_none()
movie.delete()
return jsonify({
'success': True,
"delete": id
})
except Exception as e:
print(e)
return jsonify({
'success': False,
})
@app.route('/actors/<int:id>', methods=['DELETE'])
@requires_auth('delete:actors')
def delete_actor(jwt, id):
"""
Args
The id of the movie to be deleted
Returns:
The id of the deleted movie
"""
try:
actor = Actor.query.filter(Actor.id == id).one_or_none()
actor.delete()
return jsonify({
'success': True,
"delete": id
})
except Exception as e:
print(e)
return jsonify({
'success': False,
})
# Error Handling
@app.errorhandler(422)
def unprocessable(error):
return jsonify({
"success": False,
"error": 422,
"message": "unprocessable"
}), 422
@app.errorhandler(404)
def not_found(error):
"""
returns:
handling for 404 (Not found) Error
"""
return jsonify({
"success": False,
"error": 404,
"message": "Not found"
}), 404
@app.errorhandler(400)
def Bad_request(error):
"""
returns:
handling for 400 (Bad request) Error
"""
return jsonify({
"success": False,
"error": 400,
"message": "Bad request"
}), 400
@app.errorhandler(405)
def method_not_allowed(error):
"""
returns:
handling for 405 (method not allowed) Error
"""
return jsonify({
"success": False,
"error": 405,
"message": "method not allowed"
}), 405
@app.errorhandler(500)
def Server_error(error):
"""
returns:
handling for 500 (Server error) Error
"""
return jsonify({
"success": False,
"error": 500,
"message": "Server error"
}), 500
@app.errorhandler(403)
def forbidden(error):
"""
returns:
handling for 403 (forbidden) Error
"""
return jsonify({
"success": False,
"error": 403,
"message": "forbidden"
}), 403
@app.errorhandler(409)
def Duplicate_resource(error):
"""
returns:
handling for 409 (Duplicate resource) Error
"""
return jsonify({
"success": False,
"error": 409,
"message": "Duplicate resource"
}), 409
'''
@TODO implement error handlers using the @app.errorhandler(error) decorator
each error handler should return (with approprate messages):
jsonify({
"success": False,
"error": 404,
"message": "resource not found"
}), 404
'''
'''
@TODO implement error handler for 404
error handler should conform to general task above
'''
'''
@TODO implement error handler for AuthError
error handler should conform to general task above
'''
@app.errorhandler(AuthError)
def Auth_Error(error):
"""
returns:
handling for 401 (Authentication error) Error
"""
return jsonify({
"success": False,
"error": 401,
"message": "Authentication error"
}), 401
|
{"/tests.py": ["/app.py", "/models.py"], "/app.py": ["/models.py"], "/starter_code/backend/src/api.py": ["/starter_code/backend/src/database/models.py"]}
|
37,241,274
|
Heila-Almogren/FSND_Capstone
|
refs/heads/main
|
/starter_code/backend/src/database/models.py
|
from sqlalchemy.sql import case
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy import func
from enum import Enum
from flask_migrate import Migrate
import dateutil.parser
import babel
from flask import Flask, render_template, request, Response, flash, redirect, url_for
from flask_moment import Moment
import logging
from logging import Formatter, FileHandler
from flask_wtf import Form
import os
from sqlalchemy import Column, String, Integer
from flask_sqlalchemy import SQLAlchemy
import json
import datetime
import os
SECRET_KEY = os.urandom(32)
basedir = os.path.abspath(os.path.dirname(__file__))
DEBUG = True
db = SQLAlchemy()
def setup_db(app):
app.config["SQLALCHEMY_DATABASE_URI"] = 'postgresql://postgres@localhost:5432/agency'
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db.app = app
db.init_app(app)
def db_drop_and_create_all():
db.drop_all()
db.create_all()
class Movie(db.Model):
__tablename__ = 'Movie'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String)
release_date = db.Column(db.DateTime, default=datetime.datetime.utcnow)
def insert(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def update(self):
db.session.commit()
class Actor(db.Model):
__tablename__ = 'Actor'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
age = db.Column(db.Integer)
gender = db.Column(db.String)
def insert(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def update(self):
db.session.commit()
|
{"/tests.py": ["/app.py", "/models.py"], "/app.py": ["/models.py"], "/starter_code/backend/src/api.py": ["/starter_code/backend/src/database/models.py"]}
|
37,249,342
|
0xDECAFC0FFEE/edge_meta_learning
|
refs/heads/master
|
/prune_0_iter_exps.py
|
import os
from pathlib import Path
from pprint import pprint
import shutil
import json
log_dir = Path("logs")
tensorboard_dir = Path("tensorboard")
files = sorted(list(os.listdir(log_dir)))
files = [file for file in files if str(file)[:4] == "expr"]
expr_iterations = {}
expr_params_iterations = {}
for expr_log in files:
if (log_dir/expr_log/"prune_iterations.txt").exists():
with open(log_dir/expr_log/"prune_iterations.txt", "r") as handle:
expr_iterations[expr_log] = int(handle.readline().strip())
else:
expr_iterations[expr_log] = 0
if (log_dir/expr_log/"expr_params.json").exists():
with open(log_dir/expr_log/"expr_params.json", "r") as handle:
expr_params_iterations[expr_log] = json.load(handle)
else:
expr_params_iterations[expr_log] = {}
files.sort()
files_to_delete = []
for expr_id in files:
if expr_id in expr_iterations:
if expr_iterations[expr_id] == 0:
print("\u001b[31m", end="")
files_to_delete.append(expr_id)
else:
print("\u001b[0m", end="")
print(expr_id, expr_iterations[expr_id])
# print(expr_id, expr_params_iterations[expr_id]["model_training_params"]["meta_lr"])
delete = input(f"found {len(files_to_delete)} 0 iteration logs. delete? (y/n)")
if delete == "y":
for expr_id in files_to_delete:
if (log_dir/expr_id).exists():
print(f"deleting {log_dir/expr_id}")
shutil.rmtree(log_dir/expr_id)
if (tensorboard_dir/expr_id).exists():
print(f"deleting {tensorboard_dir/expr_id}")
shutil.rmtree(tensorboard_dir/expr_id)
|
{"/src/models/lth_maml.py": ["/src/models/mask_ops.py", "/src/models/lth.py", "/src/models/meta.py"], "/src/models/lth.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/vgg.py", "/src/models/lenet.py"], "/src/models/meta.py": ["/src/models/mask_ops.py"], "/src/models/rigl_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"], "/src/load_data.py": ["/src/utils.py"], "/src/models/sparse_maml.py": ["/src/utils.py", "/src/models/mask_ops.py", "/src/models/lth.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.