index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
29,421
|
nliaoTW/simple_rest_service
|
refs/heads/main
|
/profile_api/views/ProfileViewSet.py
|
from rest_framework import viewsets
from rest_framework.authentication import TokenAuthentication
from rest_framework import filters
from profile_api import serializers
from profile_api import models
from profile_api import permissions
class ProfileViewSet(viewsets.ModelViewSet):
"""Handle creating and updating profiles"""
serializer_class = serializers.SimpleServiceSerializer
queryset = models.UserProfile.objects.all()
authentication_classes = (TokenAuthentication,)
permission_classes = (permissions.UpdateOwnProfile,)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'email',)
|
{"/profile_api/views/__init__.py": ["/profile_api/views/ProfileViewSet.py", "/profile_api/views/UserLoginApiView.py", "/profile_api/views/UserProfileFeedViewSet.py"], "/profile_api/models/__init__.py": ["/profile_api/models/ProfileFeedItem.py"]}
|
29,422
|
nliaoTW/simple_rest_service
|
refs/heads/main
|
/profile_api/views/UserProfileFeedViewSet.py
|
from rest_framework import viewsets
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from profile_api import serializers
from profile_api import models
from profile_api import permissions
class UserProfileFeedViewSet(viewsets.ModelViewSet):
"""Handle creating, reading and updating profile feed items"""
authentication_classes = (TokenAuthentication,)
serializer_class = serializers.ProfileFeedItemSerializer
queryset = models.ProfileFeedItem.objects.all()
permission_classes = (permissions.UpdateOwnStatus, IsAuthenticated)
def perform_create(self, serializer):
"""Sets the user profile to the logged in user"""
serializer.save(user_profile=self.request.user)
|
{"/profile_api/views/__init__.py": ["/profile_api/views/ProfileViewSet.py", "/profile_api/views/UserLoginApiView.py", "/profile_api/views/UserProfileFeedViewSet.py"], "/profile_api/models/__init__.py": ["/profile_api/models/ProfileFeedItem.py"]}
|
29,423
|
nliaoTW/simple_rest_service
|
refs/heads/main
|
/profile_api/tests/integration/tests_HelloView.py
|
from django.test import TestCase
from rest_framework.test import APIClient
class HelloViewIntegrationTests(TestCase):
def test_get(self):
self.client = APIClient();
self.uri = '/api/hello-view/'
resp = self.client.get(self.uri)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data['message'], 'Hello!')
self.assertEqual(resp.data['an_apiview'], [
'Users HTTP method as function (get, post, patch, put, delete)',
'Is similar to a traditional Django View',
'Gives you the most control over your application logic',
'Is mapped manually to URLs'
])
def test_post(self):
self.client = APIClient();
self.uri = '/api/hello-view/'
post_data = {'name': 'test'}
resp = self.client.post(self.uri, post_data)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data['message'], 'Hello test')
def test_put(self):
self.client = APIClient();
self.uri = '/api/hello-view/'
resp = self.client.put(self.uri)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data['method'], 'PUT')
def test_patch(self):
self.client = APIClient();
self.uri = '/api/hello-view/'
resp = self.client.patch(self.uri)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data['method'], 'PATCH')
def test_delete(self):
self.client = APIClient();
self.uri = '/api/hello-view/'
resp = self.client.delete(self.uri)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data['method'], 'DELETE')
|
{"/profile_api/views/__init__.py": ["/profile_api/views/ProfileViewSet.py", "/profile_api/views/UserLoginApiView.py", "/profile_api/views/UserProfileFeedViewSet.py"], "/profile_api/models/__init__.py": ["/profile_api/models/ProfileFeedItem.py"]}
|
29,424
|
nliaoTW/simple_rest_service
|
refs/heads/main
|
/intergration_tests/feed_tests.py
|
import requests
import json
token = '21728c56cfa66d7ef9990f7aa1c1b1d0d69595d9'
url = 'http://localhost:8000/api/feed/'
def test_get_feed():
header = {
'Authorization': 'Token ' + token
}
resp = requests.get(url, headers=header)
assert resp.status_code == 200
def test_post_feed():
header = {
'Authorization': 'Token ' + token
}
payload = {
'status_text': 'test_status'
}
resp = requests.post(url, headers=header, data=payload)
assert resp.status_code == 201
|
{"/profile_api/views/__init__.py": ["/profile_api/views/ProfileViewSet.py", "/profile_api/views/UserLoginApiView.py", "/profile_api/views/UserProfileFeedViewSet.py"], "/profile_api/models/__init__.py": ["/profile_api/models/ProfileFeedItem.py"]}
|
29,425
|
nliaoTW/simple_rest_service
|
refs/heads/main
|
/profile_api/models/__init__.py
|
from .UserProfileManager import *
from .UserProfile import *
from .ProfileFeedItem import *
|
{"/profile_api/views/__init__.py": ["/profile_api/views/ProfileViewSet.py", "/profile_api/views/UserLoginApiView.py", "/profile_api/views/UserProfileFeedViewSet.py"], "/profile_api/models/__init__.py": ["/profile_api/models/ProfileFeedItem.py"]}
|
29,426
|
nliaoTW/simple_rest_service
|
refs/heads/main
|
/profile_api/tests/integration/tests_Profile.py
|
from django.test import TestCase
from rest_framework.test import APIClient
class ProfileIntegrationTest(TestCase):
def test_get(self):
self.client = APIClient();
self.uri = '/api/profile/'
resp = self.client.get(self.uri)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, [])
def test_post(self):
post_data = { 'email': 'test_user@test.com', 'name': 'test_user', 'password': 'test_password' }
self.client = APIClient();
self.uri = '/api/profile/'
resp = self.client.post(self.uri, post_data)
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp.data['id'], 1)
self.assertEqual(resp.data['email'], 'test_user@test.com')
self.assertEqual(resp.data['name'], 'test_user')
|
{"/profile_api/views/__init__.py": ["/profile_api/views/ProfileViewSet.py", "/profile_api/views/UserLoginApiView.py", "/profile_api/views/UserProfileFeedViewSet.py"], "/profile_api/models/__init__.py": ["/profile_api/models/ProfileFeedItem.py"]}
|
29,467
|
lepisma/lol
|
refs/heads/master
|
/lol/types.py
|
from typing import Tuple
import numpy as np
Audio = Tuple[np.ndarray, int]
|
{"/lol/cli.py": ["/lol/model.py"], "/lol/model.py": ["/lol/types.py"]}
|
29,468
|
lepisma/lol
|
refs/heads/master
|
/lol/cli.py
|
"""
lol
Usage:
lol train --audio-dir=<audio-dir> --transforms-file=<transforms-file> --output-model=<output-model>
lol --audio-dir=<audio-dir> --model=<model> --output-csv=<output-csv>
Options:
--transforms-file=<transforms-file> Plain text file with lines mapping to ffmpeg lossy transforms.
"""
import csv
import os
import pickle
import random
import shlex
import subprocess as sp
from glob import glob
from typing import List
import librosa
from docopt import docopt
from sklearn.metrics import classification_report
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline
from sklearn.svm import SVC
from tqdm import tqdm
from lol import __version__
from lol.model import Featurizer
def list_audios(directory: str) -> List[str]:
return glob(os.path.join(directory, "*.wav"))
def prepare_lossy_examples(files: List[str], output_dir: str, transforms: List[str]):
random.seed(1234)
# HACK:
tmp_file = "/tmp/lol.mp3"
base_transform = "-f wav -ar 8k"
for f in tqdm(files):
transform = random.choice(transforms)
command = f"ffmpeg -i {shlex.quote(f)} {transform} {tmp_file} -y -hide_banner -loglevel warning"
sp.run(command, shell=True)
output_file = os.path.join(output_dir, os.path.basename(f))
command = f"ffmpeg -i {tmp_file} {base_transform} {shlex.quote(output_file)} -y -hide_banner -loglevel warning"
sp.run(command, shell=True)
def main():
args = docopt(__doc__, version=__version__)
if args["train"]:
with open(args["--transforms-file"]) as fp:
transforms = [line.strip() for line in fp.readlines()]
files = list_audios(args["--audio-dir"])
staging_dir = os.path.join(args["--audio-dir"], "lol-staging")
if not os.path.exists(staging_dir):
os.makedirs(staging_dir)
prepare_lossy_examples(files, staging_dir, transforms)
# TODO: Chunk audios
audios = [librosa.load(f, sr=8000, mono=True, duration=10) for f in tqdm(files)]
lossy_audios = [librosa.load(f, sr=8000, mono=True, duration=10) for f in tqdm(list_audios(staging_dir))]
X = audios + lossy_audios
y = [1] * len(audios) + [0] * len(lossy_audios)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=1234)
f = Featurizer()
clf = SVC(kernel="linear")
pipeline = make_pipeline(f, clf)
pipeline.fit(X_train, y_train)
y_test_pred = pipeline.predict(X_test)
print(classification_report(y_test, y_test_pred))
with open(args["--output-model"], "wb") as fp:
pickle.dump(pipeline, fp)
else:
files = list_audios(args["--audio-dir"])
with open(args["--model"], "rb") as fp:
pipeline = pickle.load(fp)
audios = [librosa.load(f, sr=8000, mono=True, duration=10) for f in tqdm(files)]
preds = pipeline.predict(audios)
with open(args["--output-csv"], "w") as fp:
writer = csv.writer(fp)
writer.writerow(["filepath", "pred"])
for f, p in zip(files, preds):
writer.writerow([f, p])
|
{"/lol/cli.py": ["/lol/model.py"], "/lol/model.py": ["/lol/types.py"]}
|
29,469
|
lepisma/lol
|
refs/heads/master
|
/lol/model.py
|
"""
Models and features stuff
"""
from typing import List
import librosa
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from lol.types import Audio
def melspectrogram(
y=None,
sr=22050,
S=None,
n_fft=2048,
hop_length=512,
win_length=None,
window="hann",
center=True,
pad_mode="reflect",
power=2.0,
**kwargs,
):
"""
Patched melspectrogram function.
"""
S, n_fft = librosa.core.spectrum._spectrogram(
y=y,
S=S,
n_fft=n_fft,
hop_length=hop_length,
power=power,
win_length=win_length,
window=window,
center=center,
pad_mode=pad_mode,
)
# Build a Mel filter
mel_basis = librosa.filters.mel(sr, n_fft, **kwargs)
mel_basis = np.flip(mel_basis, axis=1)
return np.dot(mel_basis, S)
class Featurizer(BaseEstimator, TransformerMixin):
"""
Default featurizer for audios. Uses inverted mel filters for MFCCs.
"""
def fit(self, X: List[Audio], y=None):
return self
def transform(self, X: List[Audio], y=None):
n_mfcc = 20
output = np.zeros((len(X), n_mfcc))
for i, (y, sr) in enumerate(X):
S = librosa.power_to_db(melspectrogram(y, sr))
output[i, :] = librosa.feature.mfcc(sr=sr, S=S, n_mfcc=n_mfcc).mean(axis=1)
return output
|
{"/lol/cli.py": ["/lol/model.py"], "/lol/model.py": ["/lol/types.py"]}
|
29,473
|
nathan-yan/opurtbot-real
|
refs/heads/main
|
/server.py
|
import subprocess
from aiohttp import web
import asyncio
import socketio
import os
import re
import sys
chat_reg = re.compile("<[^ ]+>")
from queue import Queue
inputQ = Queue()
async def minecraft_handler(process, sock):
"""p = await asyncio.create_subprocess_shell(' '.join(["java", "-jar", "-Xmx12G", "-Xms12G", "../server-files/minecraft_fridays/forge-1.16.1-32.0.107.jar", "nogui"]),
stdout = asyncio.subprocess.PIPE,
stdin = asyncio.subprocess.PIPE,
stderr = asyncio.subprocess.STDOUT)
"""
print('bruh')
while True:
line = await process.stdout.readline()
print('test ' + line.decode()[:-1])
if line == b'quit':
print("quit the minecraft thread")
break;
if line == b'':
print("process has stopped")
break;
line = line.decode()
if "joined the game" in line:
end_idx = line.index(" joined the game")
start_idx = line.rindex(' ', 0, end_idx)
name = line[start_idx + 1: end_idx]
await sock.emit('joinleave', {
"task" : "message-discord-joinleave",
"user" : name,
"message" : "%s joined the game 💎" % name,
"joining" : True
})
elif "left the game" in line:
end_idx = line.index(" left the game")
start_idx = line.rindex(' ', 0, end_idx)
name = line[start_idx + 1: end_idx]
await sock.emit('joinleave', {
"task" : "message-discord-joinleave",
"user" : name,
"message" : "%s left the game 🏃" % name,
"joining" : False
})
match = chat_reg.search(line)
if match:
print("found match!")
span = match.span()
user = line[span[0] + 1 : span[1] - 1]
message = line[span[1] + 1 : -1]
await sock.emit('minecraft-chat', {
"task" : "message-discord",
"user" : user,
"message" : message
})
async def run_server(runner):
print("running server")
await runner.setup()
site = web.TCPSite(runner, "0.0.0.0", 5000)
print("run")
await site.start()
print("ran")
async def main():
p = await asyncio.create_subprocess_shell(sys.argv[1],
stdout = asyncio.subprocess.PIPE,
stdin = asyncio.subprocess.PIPE,
stderr = asyncio.subprocess.STDOUT)
sock = socketio.AsyncServer()
app = web.Application()
sock.attach(app)
@sock.event
async def connect(sid, environ):
print("connection:", sid, environ)
await sock.emit('ack')
@sock.on('discord-chat')
async def recv_message(sid, data):
print("message", data)
# do stuff here
command = 'tellraw @a {"text": "[%s] %s", "color" : "green"}' % (data['user'], data['message'].replace('\n', ' | '))
p.stdin.write((command + '\n').encode())
await sock.emit('ack')
@sock.event
async def disconnect(sid):
print("disconnect", sid)
@sock.on('quit')
async def quit_minecraft(sid):
p.stdin.write(b'stop\n') #await p.kill()
runner = web.AppRunner(app)
await asyncio.gather(
run_server(runner),
minecraft_handler(p, sock),
)
if __name__ == "__main__":
asyncio.run(main())
|
{"/opurtbot.py": ["/utils.py"]}
|
29,474
|
nathan-yan/opurtbot-real
|
refs/heads/main
|
/opurtbot.py
|
import discord
from discord.ext import tasks, commands
import asyncio
import socketio
import threading
import subprocess
import time
from queue import Queue, Empty
from threading import Thread
from requests import get
import os
from dotenv import load_dotenv
import re
import boto3
import utils
load_dotenv()
ec2 = boto3.client('ec2')
chat_reg = re.compile("\[INFO\] <[^ ]+>")
active_players = set()
class SpinupThread (threading.Thread):
def __init__(self, ):
threading.Thread.__init__(self)
def run(self):
client = Spinup()
client.run(os.getenv('DISCORD_TOKEN'))
class ServerThread (threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
run_minecraft([])
class Spinup(discord.Client):
def __init__(self):
super().__init__()
self.voting = False
self.voted = set()
self.running = False
self.upsince = 0
self.voteStarted = 0
self.voteChannel = None
self.locked = False
self.dimensional_rift = None
self.ip = None
self.vc = None
self.sock = None
self.sock_connected = False
async def on_ready(self):
print('Logged on as {0}!'.format(self.user))
self.dimensional_rift = discord.utils.get(self.get_all_channels(), name = "dimensional-rift")
self.server_status = discord.utils.get(self.get_all_channels(), name = "server-status")
async def on_message(self, message):
print(message.author.id, message.channel.name, message.channel.id)
if message.channel.name == 'dimensional-rift':
# this is a message sent from minecraft
if (message.author == client.user) and message.content.startswith("```"):
return
await self.sock.emit('discord-chat', {
"task" : 'message-minecraft',
"message" : message.content,
"user" : message.author.nick
})
if message.content.startswith('#purge'):
summary = {}
num = int(message.content.split(" ")[1])
if num > 10:
num = 10
num += 1
if 'admin' in [r.name for r in message.author.roles]:
history = await message.channel.history(limit = 100).flatten()
for m in history[:num]:
if m.author.display_name not in summary:
summary[m.author.display_name] = 1
else:
summary[m.author.display_name] += 1
summary_msg = ">>> "
for n in summary:
summary_msg += n + ": " + str(summary[n]) + "\n"
await message.channel.delete_messages(history[:num])
await message.channel.send(summary_msg)
# TODO: Put these in a dictionary or smth
if message.content == "!clipthat":
print(message.author.voice.channel)
try:
self.vc = await message.author.voice.channel.connect()
self.vc.play(
discord.FFmpegPCMAudio("./audio/wardell_clipthat.mp3")
)
while self.vc.is_playing():
await asyncio.sleep(.1)
await self.vc.disconnect()
except discord.errors.ClientException:
await message.channel.send(
"opurtbot is already playing a clip"
)
if message.content == "!yessir":
print(message.author.voice.channel)
try:
self.vc = await message.author.voice.channel.connect()
self.vc.play(
discord.FFmpegPCMAudio("./audio/wardell_yessir.mp3")
)
while self.vc.is_playing():
await asyncio.sleep(.1)
await self.vc.disconnect()
except discord.errors.ClientException:
await message.channel.send(
"opurtbot is already playing a clip"
)
if message.content == "!yooo":
print(message.author.voice.channel)
try:
self.vc = await message.author.voice.channel.connect()
self.vc.play(
discord.FFmpegPCMAudio("./audio/csgo_niceknife.mp3")
)
while self.vc.is_playing():
await asyncio.sleep(.1)
await self.vc.disconnect()
except discord.errors.ClientException:
await message.channel.send(
"opurtbot is already playing a clip"
)
if message.content == '!bwaaa':
try:
self.vc = await message.author.voice.channel.connect()
self.vc.play(
discord.FFmpegPCMAudio("./audio/victory.mp3")
)
while self.vc and self.vc.is_playing():
await asyncio.sleep(.1)
await self.vc.disconnect()
except discord.errors.ClientException:
await message.channel.send(
"opurtbot is already playing a clip"
)
if message.content == '!bwaa':
try:
self.vc = await message.author.voice.channel.connect()
self.vc.play(
discord.FFmpegPCMAudio("./audio/imposter_victory.mp3")
)
while self.vc and self.vc.is_playing():
await asyncio.sleep(.1)
await self.vc.disconnect()
except discord.errors.ClientException:
await message.channel.send(
"opurtbot is already playing a clip"
)
if message.content == '!delib':
try:
self.vc = await message.author.voice.channel.connect()
self.vc.play(
discord.FFmpegPCMAudio("./audio/naruto_deliberation.mp3")
)
while self.vc and self.vc.is_playing():
await asyncio.sleep(.1)
await self.vc.disconnect()
except discord.errors.ClientException:
await message.channel.send(
"opurtbot is already playing a clip"
)
elif message.content == '!!delib':
if self.vc:
await self.vc.disconnect()
self.vc = None
if message.content == '!ez4ence':
try:
self.vc = await message.author.voice.channel.connect()
self.vc.play(
discord.FFmpegPCMAudio("./audio/ez4ence.mp3")
)
while self.vc and self.vc.is_playing():
await asyncio.sleep(.1)
await self.vc.disconnect()
except discord.errors.ClientException:
await message.channel.send(
"opurtbot is already playing a clip"
)
if message.content == '!windows95':
try:
self.vc = await message.author.voice.channel.connect()
self.vc.play(
discord.FFmpegPCMAudio("./audio/windows95.mp3")
)
while self.vc and self.vc.is_playing():
await asyncio.sleep(.1)
await self.vc.disconnect()
except discord.errors.ClientException:
await message.channel.send(
"opurtbot is already playing a clip"
)
if message.content == '!universal':
try:
self.vc = await message.author.voice.channel.connect()
self.vc.play(
discord.FFmpegPCMAudio("./audio/universal.mp3")
)
while self.vc and self.vc.is_playing():
await asyncio.sleep(.1)
await self.vc.disconnect()
except discord.errors.ClientException:
await message.channel.send(
"opurtbot is already playing a clip"
)
if message.content.startswith("!spinup"):
return # disable spinup so people can't spend my money -Kavel
if self.voting:
await message.channel.send("you mf clown there's already an active vote")
elif self.running:
await message.channel.send("the server is already up u fool")
elif self.locked:
await message.channel.send("the server is locked! nathan's probably playing valorant...")
else:
if (message.author.id == 279456734773510145) and not message.content.endswith("nosudo"):
await self.spinup(message)
else:
await message.channel.send("starting vote, need 5 people to confirm. you have 3 MINUTES to vote [type `!yes` to vote, `!no` to cancel your existing vote]")
self.voteChannel = message.channel
self.voteStarted = time.time()
self.voting = True
self.voted = set()
elif message.content.startswith("!whois"):
if len(active_players):
res = "players currently on: \n```"
for p in active_players:
res += " - " + p + "\n"
await message.channel.send(res + "```")
else:
await message.channel.send("no one is on, hop in!")
elif message.content.startswith("!lock"):
if (message.author.id == 279456734773510145):
await message.channel.send("the server is locked and cannot be spun up")
self.locked = True
if self.voting:
await message.channel.send("the active vote has been cancelled")
self.voting = False
self.voted = set()
elif message.content.startswith("!unlock"):
if (message.author.id == 279456734773510145):
await message.channel.send("the server is unlocked can can be spun up")
self.locked = False
elif message.content.startswith("!help"):
await message.channel.send("""
`!spinup` - starts a vote to spin up the minecraft server, type `!yes` to vote, `!no` to cancel
`!spindown` - spins down the minecraft server, there is NO voting process
`!ip` - returns the IP address of the server
`!isup` - checks if the server is currently up/starting up
`!uptime` - returns the uptime of the server in seconds
""")
elif message.content.startswith("!yes"):
if message.author not in self.voted and self.voting:
self.voted.add(message.author)
await message.channel.send("%s out of 5 votes recorded" % len(self.voted))
if len(self.voted) == 5:
# spin up the mineraft server
await self.spinup(message)
elif message.content.startswith("!no"):
if message.author in self.voted and self.voting:
self.voted.remove(message.author)
await message.channel.send("%s out of 5 votes recorded" % len(self.voted))
elif message.content.startswith("!spindown"):
await message.channel.send("spinning down the minecraft server")
try:
# tell the minecraft server to gracefully shut down
await self.sock.emit("quit")
# dc from the websocket connection
await self.sock.disconnect()
except:
pass
# then spin down the server
utils.alter_instance(ec2, os.environ['EC2_INSTANCE_ID'], state = 'OFF')
active_players = set()
self.running = False
elif message.content.startswith("!isup"):
if self.running:
await message.channel.send("the server IS up")
else:
await message.channel.send("the server is NOT up")
elif message.content.startswith("!uptime"):
if self.running:
await message.channel.send("the server has been up for %s seconds" % ((time.time() - self.upsince)))
else:
await message.channel.send("the server is not currently up")
elif message.content.startswith("!ip"):
self.ip = ec2.describe_instances()['Reservations'][1]['Instances'][0]['NetworkInterfaces'][0]['Association']['PublicIp']
await message.channel.send("`%s:25565`" % (self.ip))
async def spinup(self, message):
#self.ip = ec2.describe_instances()['Reservations'][0]['Instances'][0]['NetworkInterfaces'][0]['Association']['PublicIp']
await message.channel.send("vote succeeded, spinning up minecraft (IP will be sent soon!)")
self.voting = False
self.voted = set()
if (not self.running):
# spin up the server
utils.alter_instance(ec2, os.environ['EC2_INSTANCE_ID'], state = 'ON')
self.running = True
self.upsince = time.time()
client = Spinup()
c = 0
async def check_messages(ctx):
await ctx.wait_until_ready()
sock = socketio.AsyncClient(logger = True, reconnection_attempts=1)
@sock.event
def connect():
ctx.sock_connected = True
print("I'm connected!")
@sock.event
async def connect_error():
print("The connection failed!")
@sock.event
def disconnect():
ctx.sock_connected = False
print("I'm disconnected!")
@sock.on("joinleave")
async def joinleave(data):
if data['task'] == 'message-discord-joinleave':
user = data['user']
message = data['message']
if data['joining']:
active_players.add(user)
else:
active_players.remove(user)
await ctx.dimensional_rift.send(message)
@sock.on('minecraft-chat')
async def chat(data):
if data['task'] == 'message-discord':
#channel = discord.utils.get(ctx.get_all_channels(), name = "dimensional-rift")
#print(channel)
if not data['message'].endswith("Disconnected"):
await ctx.dimensional_rift.send("```diff\n+ <%s> %s```" % (data['user'], data['message']))
last_message = None
prev_topic = ""
c = 0
while True:
c += 1
# establish connection to the aws instance
# we're going to run this every 2 seconds
if ctx.running and (time.time() - ctx.upsince) > 1 and not ctx.sock_connected and c % 20 == 0:
try:
instances = ec2.describe_instances()
ip_addr = instances['Reservations'][1]['Instances'][0]['NetworkInterfaces'][0]['Association']['PublicIp']
await sock.connect(url = 'http://{}:5000'.format(os.environ['PRIVATE_IP']))
except:
print("attempted to connect and failed.")
else:
await ctx.voteChannel.send("minecraft is up @ {}:25565. Hop in!".format(ip_addr))
ctx.sock = sock
if ctx.dimensional_rift and ctx.server_status:
if not last_message:
last_message = ctx.server_status.last_message_id
# set the topic of the chat
statuses = []
statuses.append("ON @ %s" % ctx.ip if ctx.running else "OFF")
statuses.append("LOCKED" if ctx.locked else "UNLOCKED")
if ctx.voting:
statuses.append("VOTING")
topic = "SERVER: "
for status in statuses:
topic += status + ", "
topic = topic[:-2]
if len(active_players) and ctx.running:
topic += " | "
for player in active_players:
topic += player + ", "
topic = topic[:-2]
elif len(active_players) == 0 and ctx.running:
topic += " | no one is on, hop on!"
if topic != prev_topic:
print("EDITING TOPIC: %s, %s" % (prev_topic, topic))
# delete the last message
if last_message:
try:
if type(last_message) == int:
msg = await ctx.server_status.fetch_message(last_message)
await msg.delete()
else:
await last_message.delete()
except Exception as e:
print(e)
last_message = await ctx.server_status.send(topic)
prev_topic = topic
if (time.time() - ctx.voteStarted) > 180 and ctx.voting:
ctx.voting = False
ctx.voted = set()
await ctx.voteChannel.send("sorry! the vote has ended, type `!spinup` to start another vote")
elif int(time.time() - ctx.voteStarted) == 120 and ctx.voting:
ctx.voteStarted -= 1 # this is so fucking janky. we only want this message sent once, so we rely on the 0.1 second resolution of the check_messages function. we subtract one from voteStarted to simulate a second of time passing, ensuring this message is only sent once.
await ctx.voteChannel.send("the vote will end in 1 MINUTE")
elif int(time.time() - ctx.voteStarted) == 60 and ctx.voting:
ctx.voteStarted -= 1
await ctx.voteChannel.send("the vote will end in 2 MINUTES")
"""
while not outq.empty():
item = outq.get()
if item['task'] == 'message-discord':
#channel = discord.utils.get(ctx.get_all_channels(), name = "dimensional-rift")
#print(channel)
if not item['message'].endswith("Disconnected"):
await ctx.dimensional_rift.send("```diff\n+ <%s> %s```" % (item['user'], item['message']))
elif item['task'] == 'message-discord-joinleave':
user = item['user']
message = item['message']
await ctx.dimensional_rift.send(message)
"""
await asyncio.sleep(0.1)
async def main():
pass
if __name__ == '__main__':
client.loop.create_task(check_messages(client))
client.run(os.environ['DISCORD_TOKEN'])
#loop = asyncio.get_event_loop()
#loop.run_until_complete(client.start(os.environ['DISCORD_TOKEN']))
#loop.close()
#print("closed")
#asyncio.run(main())
|
{"/opurtbot.py": ["/utils.py"]}
|
29,475
|
nathan-yan/opurtbot-real
|
refs/heads/main
|
/utils.py
|
import boto3
from botocore.exceptions import ClientError
def alter_instance(client, instance_id, state):
method = client.start_instances if state == 'ON' else client.stop_instances
try:
method(InstanceIds = [instance_id], DryRun = True)
except ClientError as e:
print(e)
try:
response = method(InstanceIds = [instance_id], DryRun = False)
print("Successfully {} the instance. Here is the response: {}".format("started" if state == 'ON' else "stopped",
response))
except ClientError as e:
print(e)
|
{"/opurtbot.py": ["/utils.py"]}
|
29,497
|
GitBl/analysisTB
|
refs/heads/master
|
/utils/engine.py
|
"""
This is designed to embed the major computation of algorithms.
"""
import time
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import copy
import matplotlib.pyplot as plt
import utils.plotting_tools as plt_tools
import utils.messing as messing
import torchvision
from matplotlib import cm
def compute_grad_list_diff_score(list_1, list_2):
"""
Compute the L1 value of the difference between two lists of parameter vectors (list of Tensors)
"""
if(len(list_1) != len(list_2)):
raise ValueError("Entry list should be of the same size")
diff_list = [0] * len(list_1)
for i in range(len(list_1)):
diff_list[i] = list_1[i] - list_2[i]
diff_list = [elem.flatten() for elem in diff_list]
score = 0
for i in range(len(diff_list)):
score += np.sum(np.abs(diff_list[i])) / (diff_list[i].shape[0])
return score
def get_grad_norm(net):
"""
Return the norm of the gradient of a specific network
"""
grad_list = []
for elem in net.parameters():
if(hasattr(elem, 'grad')):
grad_list.append(elem.grad)
grad_list = [torch.flatten(elem) for elem in grad_list]
norm = 0
total_length = 0
for elem in grad_list:
norm += torch.mean(torch.abs(elem)).item() * elem.shape[0]
total_length += elem.shape[0]
norm = norm / total_length
return norm
def compute_loss(net, train_loader, criterion=nn.CrossEntropyLoss()):
"""
Return the value of the loss for the network over the given loader
"""
loss_value = 0
for image, label in train_loader:
pred_label = net(image.cuda())
loss = criterion(pred_label, label.cuda())
loss_value += loss.data.item() / len(train_loader)
del pred_label
torch.cuda.empty_cache()
return loss_value
def get_score(net, dataset):
"""
Return the score of a given network over a given dataset
"""
test_score = 0.
for data, label in dataset:
pred = net(data.cuda())
if(type(net) == torchvision.models.GoogLeNet):
pred = pred.logits
test_score = test_score + \
torch.sum((torch.max(pred.cpu(), 1)[1] == label))
return float(test_score) / (float(len(dataset)) * dataset.batch_size)
def weight_projection(weight_1, weight_2):
"""
For two vector of weights of the same size, compute their naïve scalar product and returns it
"""
return [
torch.dot(
weight_1[i].flatten(),
weight_2[i].flatten()).item() for i in range(
weight_1.shape[0])]
def complex_norm(vector):
"""
Computes the norm of a weight vector using the scalar product defined earlier
"""
return np.mean(weight_projection(vector, vector))
def GSplot(net, train_loader, learning_rate=None, GSrange=2, GSratio=1):
"""
Stand for Grid Search Plot.
When called, take two gradient step of the given train_loader.
/!\ -> Not final and unstable
"""
criterion = nn.CrossEntropyLoss()
optim = torch.optim.SGD(net.parameters(), learning_rate)
iteration = 0
gradient_list = []
for image, label in train_loader:
if (iteration < 2):
pred_label = net(image.cuda())
optim.zero_grad()
loss = criterion(pred_label, label.cuda())
loss.backward()
optim.step()
parameters = list(elem.grad for elem in net.parameters())
gradient_list.append(parameters)
iteration += 1
loss_list = np.zeros(
(GSrange * GSratio * 2 + 1,
GSrange * GSratio * 2 + 1))
parameter_state = np.array(list(net.parameters()))
gradient_list = np.array(gradient_list)
for i in range(gradient_list[0].shape[0]): # Global shape checker
if (gradient_list[0][i].shape != gradient_list[1][i].shape):
print(gradient_list[0][i].shape)
#gradient_list[0] = weight_projection(gradient_list[0])
# gradient_list[1] =
scalar_value = np.mean(
weight_projection(
gradient_list[0],
gradient_list[1]))
print("Scalar value : {}".format(scalar_value))
scalar_vector = scalar_value * gradient_list[0]
remaining_vector = gradient_list[1] - scalar_vector
gradient_list[1] = (remaining_vector) * \
complex_norm(gradient_list[1]) / complex_norm(remaining_vector)
boundary = int(GSrange * GSratio)
for x in range(-boundary, boundary + 1):
for y in range(-boundary, boundary + 1):
goal_param = parameter_state + x / GSratio * \
gradient_list[0] + y / GSratio * gradient_list[1]
goal_iteration = 0
for param in net.parameters():
param = goal_param[goal_iteration]
goal_iteration += 1
loss_list[x][y] = compute_loss(net, train_loader)
del goal_param
print('Done : {},{}, ratio = {}'.format(x, y, GSratio))
iteration = 0
for element in net.parameters():
element = parameter_state[iteration]
iteration += 1
# Do not forget to reset network weight - Faire ca sur serveur
return loss_list
def sweet_spot(net_value, net_target, mess_generator, verbose=False):
"""
Return the according value linked to a mess generator, with a comparable loss to the net_target one
"""
value = 1
done = False
nb_iter = 0
previous_loss = float('inf')
while(not done):
net_value.apply(mess_generator(value))
loss_value_list = []
loss_target_list = []
for i, data in enumerate(train_loader):
image = data[0].type(torch.FloatTensor).cuda()
label = data[1].type(torch.LongTensor).cuda()
pred_label = net_value(image)
pred_non_res = net_target(image)
loss = CRITERION(pred_label, label)
loss_non_res = CRITERION(pred_non_res, label)
loss_value_list.append(loss.data.item())
loss_target_list.append(loss_non_res.item())
loss_value = np.mean(loss_value_list)
loss_target = np.mean(loss_target_list)
if(verbose):
print(
"Loss_value : {:.2f}, Loss_target : {:.2f}, messing value {}".format(
loss_value, loss_target, value))
if (previous_loss < loss_value):
print("Unstable loss evolution, cutting short")
return value
else:
previous_loss = loss_value
if(loss_value < loss_target * 4 and loss_value > loss_target * 0.9):
done = True
return value
if(loss_value < loss_target):
value = value * 2
else:
value = value / 2
nb_iter = nb_iter + 1
return value
def shooting_star(net, train_loader, ratio=1, gradient_step_range=10, LEARNING_RATE=0.01, CRITERION=nn.CrossEntropyLoss()):
"""
Project the gradient over several steps, allowing to "see" the loss space over several iterations
"""
new_net = copy.deepcopy(net)
if(torch.cuda.is_available()):
new_net.cuda()
new_optim = torch.optim.SGD(new_net.parameters(), LEARNING_RATE / ratio)
i, data = next(enumerate(train_loader))
image = data[0].type(torch.FloatTensor).cuda()
label = data[1].type(torch.LongTensor).cuda()
pred_label = new_net(image)
loss = CRITERION(pred_label, label)
loss.backward()
loss_list = []
for i in range(int(ratio * gradient_step_range) + 1):
print("Entering gradient extension step: {}/{}".format(i+1, ratio * gradient_step_range))
loop_loss_list = []
for iter_nb, data in enumerate(train_loader):
if(int(5*iter_nb/len(train_loader)) != int(5*(iter_nb+1)/len(train_loader)) or iter_nb == len(train_loader)-1):
print("Gradient step: {}, current progression {:.2f}".format(i+1, (iter_nb+1)/len(train_loader)), end = "\r")
image = data[0].type(torch.FloatTensor).cuda()
label = data[1].type(torch.LongTensor).cuda()
pred_label = new_net(image)
loss = CRITERION(pred_label, label)
loop_loss_list.append(loss.data.item())
loss_list.append(np.mean(loop_loss_list))
new_optim.step()
print(" ",end = "\r")
return loss_list
def resnet_compare(
test_net,
model_2,
nb_epochs=2,
points_per_epochs=10,
gradient_extension=False,
gradient_ratio=1,
gradient_range=10,
GSgradient=False,
GSrange=2,
GSratio=1,
LEARNING_RATE=0.1,
CRITERION=nn.CrossEntropyLoss(),
train_loader=None,
test_loader=None,
gradient_scale_plot=False,
bias_inspector_report=None,
eigenvalues_inspector=False,
eigenvalues=None,
eigenvectors=None,
shift_try=False,
shift_range=5):
"""
Compare the training of two different networks.
It is a toolbox set for personal research, with many options, which are referenced here.
Arguments:
-------------------------------------------
test_net: The baseline network. It should be the "control point" over your experiment.
model_2: The model which undergoes change.
nb_epochs: Number of training epochs
points_per_epochs: The algorithm periodicaly stops to "report" its state. Its stops this number of time per epochs.
gradient_extension: Extend the current gradient at the current training point, and then plot the extended loss space.
gradient_ratio: The distance between each point when the gradient_extension is enabled
gradient_range: The maximum distance when the gradient_extension is enabled
GSgradient: Enable the grid-search loss computation. Takes the gradient at time t and t+1, then orthonormalize them and compute the loss value over several iteration of the normalized gradient.
GSrange: The maximum distance when the GSgradient is enabled.
GSratio: The distance between each point when the GSgradient is enabled
LEARNING_RATE: The usual learning rate to be taken by the SGD
CRITERION: The loss pattern to be taken
train_loader: The baseline train_loader to train on.
test_loader: The baseline test_loader to test on.
gradient_scale_plot: Plot the scale of the gradient over the course of training.
bias_inspector_report: Plot the bias "evolution" over the course of training.
"""
optimizer = torch.optim.SGD(test_net.parameters(), LEARNING_RATE)
optimizer_2 = torch.optim.SGD(model_2.parameters(), LEARNING_RATE)
score_list = []
score_list_2 = []
loss_list = []
loss_list_2 = []
if(gradient_scale_plot):
gradient_scale_list_1 = []
gradient_scale_list_2 = []
if(bias_inspector_report):
bias_diff_1 = plt_tools.bias_inspector(test_net)[0]
bias_diff_2 = plt_tools.bias_inspector(model_2)[0]
overall_diff_1 = [elem.cpu().detach().numpy()
for elem in test_net.parameters()]
overall_diff_2 = [elem.cpu().detach().numpy()
for elem in model_2.parameters()]
bias_score_evolution_1 = []
bias_score_evolution_2 = []
overall_score_evolution_1 = []
overall_score_evolution_2 = []
if(eigenvalues_inspector):
eigenvalue_diff_list_1 = []
eigenvalue_diff_list_2 = []
if(gradient_extension):
gradient_list_1 = []
gradient_list_2 = []
for j in range(nb_epochs):
print("Starting epoch n°{}".format(j))
if(gradient_extension):
shs_range = np.linspace(
0, gradient_range, gradient_ratio * gradient_range + 1)
plt.figure(figsize=(10, 10))
shooting_value_1 = shooting_star(
test_net,
train_loader,
ratio=gradient_ratio,
gradient_step_range=gradient_range,
LEARNING_RATE=LEARNING_RATE)
shooting_value_2 = shooting_star(
model_2,
train_loader,
ratio=gradient_ratio,
gradient_step_range=gradient_range,
LEARNING_RATE=LEARNING_RATE)
gradient_list_1.append(shooting_value_1)
gradient_list_2.append(shooting_value_2)
plt.plot(
shs_range,
shooting_value_1,
label='Model1')
plt.plot(
shs_range,
shooting_value_2,
label='Model2')
plt.xlabel(r'$\alpha$')
plt.title("Gradient extension at the start of epoch {}".format(j))
plt.ylabel("loss")
plt.legend()
plt.show()
if(GSgradient):
GSmap_1 = GSplot(
test_net,
train_loader,
learning_rate=LEARNING_RATE,
GSrange=GSrange,
GSratio=GSratio)
GSmap_2 = GSplot(
model_2,
train_loader,
learning_rate=LEARNING_RATE,
GSrange=GSrange,
GSratio=GSratio)
X = np.arange(-GSrange, GSrange + 1 / GSratio, 1 / GSratio)
Y = np.arange(-GSrange, GSrange + 1 / GSratio, 1 / GSratio)
X, Y = np.meshgrid(X, Y)
print("Sizes: {}, {}, {}".format(X.shape, Y.shape, GSmap_1.shape))
fig_1 = plt.figure()
plt.title("First network")
ax_1 = fig_1.add_subplot(111, projection='3d')
surf_1 = ax_1.plot_surface(
X, Y, GSmap_1, cmap=cm.get_cmap("bwr"), alpha=0.5)
fig_1.show()
fig_2 = plt.figure()
plt.title("Second network")
ax_2 = fig_2.add_subplot(111, projection='3d')
surf_2 = ax_2.plot_surface(
X, Y, GSmap_2, cmap=cm.get_cmap("bwr"), alpha=0.5)
fig_2.show()
if (j == 0 and shift_try):
for module in model_2.modules():
if(hasattr(module, "in_channels") and hasattr(module, "out_channels")):
if(module.in_channels == module.out_channels):
if(module.kernel_size[0] != 1):
torch.nn.init.uniform_(
module.weight, -10**-shift_range, 10**-shift_range)
for i, data in enumerate(train_loader):
# test_net.train()
# model_2.train()
image = data[0].type(torch.FloatTensor).cuda()
label = data[1].type(torch.LongTensor).cuda()
pred_label = test_net(image)
pred_2 = model_2(image)
loss = CRITERION(pred_label, label)
loss_2 = CRITERION(pred_2, label)
optimizer.zero_grad()
loss.backward()
optimizer.step()
optimizer_2.zero_grad()
loss_2.backward()
optimizer_2.step()
if(int((i - 1) * points_per_epochs / len(train_loader)) != int(i * points_per_epochs / len(train_loader)) or i == len(train_loader) - 1):
# test_net.eval()
# model_2.eval()
res_score = get_score(test_net, test_loader)
unres_score = get_score(model_2, test_loader)
score_list.append(res_score)
score_list_2.append(unres_score)
loss_list.append(loss.data)
loss_list_2.append(loss_2.data)
if(gradient_scale_plot):
gradient_scale_list_1.append(get_grad_norm(test_net))
gradient_scale_list_2.append(get_grad_norm(model_2))
if(eigenvalues_inspector):
eigenvalue_diff_list_1.append(eigenvalue_observer(
test_net, eigenvalues, eigenvectors))
eigenvalue_diff_list_2.append(
eigenvalue_observer(model_2, eigenvalues, eigenvectors))
if(bias_inspector_report): # This part causes huge GPU memory leaks
#init_mem_state = torch.cuda.memory_allocated()
#init_cached_mem_state = torch.cuda.memory_cached()
bias_temp_1_inspector = plt_tools.bias_inspector(test_net)
bias_temp_1 = copy.deepcopy(
plt_tools.bias_inspector(test_net))
bias_temp_2_inspector = plt_tools.bias_inspector(model_2)
bias_temp_2 = copy.deepcopy(bias_temp_2_inspector)
# print("")
#print("Inspect: {:.2e},{:.2e}".format(float(torch.cuda.memory_allocated() - init_mem_state), float(torch.cuda.memory_cached() - init_cached_mem_state)))
overall_temp_1 = [elem.clone().cpu().detach().numpy()
for elem in test_net.parameters()]
overall_temp_2 = [elem.clone().cpu().detach().numpy()
for elem in model_2.parameters()]
#print("Overall: {:.2e},{:.2e}".format(float(torch.cuda.memory_allocated() - init_mem_state), float(torch.cuda.memory_cached() - init_cached_mem_state)))
bias_score_evolution_1.append(
compute_grad_list_diff_score(
bias_temp_1, bias_diff_1))
bias_score_evolution_2.append(
compute_grad_list_diff_score(
bias_temp_2, bias_diff_2))
overall_score_evolution_1.append(
compute_grad_list_diff_score(
overall_temp_1, overall_diff_1))
overall_score_evolution_2.append(
compute_grad_list_diff_score(
overall_temp_2, overall_diff_2))
#print("Evol: {:.2e},{:.2e}".format(float(torch.cuda.memory_allocated() - init_mem_state), float(torch.cuda.memory_cached() - init_cached_mem_state)))
bias_diff_1 = copy.deepcopy(bias_temp_1)
bias_diff_2 = copy.deepcopy(bias_temp_2)
overall_diff_1 = overall_temp_1
overall_diff_2 = overall_temp_2
#print("Copyt: {:.2e},{:.2e}".format(float(torch.cuda.memory_allocated() - init_mem_state), float(torch.cuda.memory_cached() - init_cached_mem_state)))
del(bias_temp_1_inspector)
del(bias_temp_2_inspector)
del(bias_temp_1)
del(bias_temp_2)
del(overall_temp_1)
del(overall_temp_2)
#print("After del before free: {:.2e},{:.2e}".format(float(torch.cuda.memory_allocated() - init_mem_state), float(torch.cuda.memory_cached() - init_cached_mem_state)))
torch.cuda.empty_cache()
#print("After free: {:.2e},{:.2e}".format(float(torch.cuda.memory_allocated() - init_mem_state), float(torch.cuda.memory_cached() - init_cached_mem_state)))
print("Done: {}%, model1: {:.2f}%, model2: {:.2f}%, loss1: {:.4f}, loss2: {:.4f} ".format(str(int(
i * 100 / len(train_loader))), 100 * res_score, 100 * unres_score, loss.data, loss_2.data), end='\r')
print("")
torch.cuda.empty_cache()
print("Ending epoch n°{}".format(j))
return_list = [[score_list, score_list_2], [loss_list, loss_list_2]]
if(gradient_scale_plot):
return_list.append([gradient_scale_list_1, gradient_scale_list_2])
if(bias_inspector_report):
return_list.append(
[bias_score_evolution_1, bias_score_evolution_2])
return_list.append(
[overall_score_evolution_1, overall_score_evolution_2])
if(eigenvalues_inspector):
return_list.append(
[eigenvalue_diff_list_1, eigenvalue_diff_list_2])
if(gradient_extension):
return_list.append(
[gradient_list_1, gradient_list_2])
return return_list
def train_and_test(
depth,
width,
nb_epochs,
points_per_epochs=10,
messed_up_init_generator=None,
gradient_extension=False,
gradient_ratio=1,
gradient_range=10,
statistics=False,
detector_plot=False,
biased_start=False,
zero_start=False):
net = GenResNet(depth, width).cuda()
non_resNet = GenResNet(depth, width, residual=False).cuda()
if(zero_start):
net.apply(zero_mess)
non_resNet.apply(zero_mess)
if not(messed_up_init_generator is None):
optimal_mess = sweet_spot(
net,
non_resNet,
messed_up_init_generator,
verbose=True)
print("Final mess : {:2f}".format(optimal_mess))
# replace with optimal_mess
net.apply(messed_up_init_generator(optimal_mess))
if(biased_start):
for name, param in non_resNet.named_parameters():
if('.conv.weight' in name):
in_c, out_c, kernel, kernel = param.size()
for i in range(in_c):
for j in range(out_c):
torch.nn.init.normal_(
param[i][j][1][1], 1 / in_c, 1 / (2 * in_c))
optimizer = torch.optim.SGD(net.parameters(), LEARNING_RATE)
optimizer_non_res = torch.optim.SGD(non_resNet.parameters(), LEARNING_RATE)
score_list = []
score_list_non_res = []
loss_list = []
loss_list_non_res = []
if(statistics):
stat = []
stat_unres = []
if(detector_plot):
detector_done = False
for j in range(nb_epochs):
print("Starting epoch n°{}".format(j))
if(gradient_extension):
shs_range = np.linspace(
0, gradient_range, gradient_ratio * gradient_range + 1)
plt.figure(figsize=(10, 10))
plt.plot(
shs_range,
shooting_star(
net,
train_loader,
ratio=gradient_ratio,
gradient_step_range=gradient_range),
label='Residual')
plt.plot(
shs_range,
shooting_star(
non_resNet,
train_loader,
ratio=gradient_ratio,
gradient_step_range=gradient_range),
label='NonRes')
plt.xlabel(r'$\alpha$')
plt.title("Gradient extension at the start of epoch {}".format(j))
plt.yscale('log')
plt.ylabel("log(loss)")
plt.legend()
plt.show()
for i, data in enumerate(train_loader):
image = data[0].type(torch.FloatTensor).cuda()
label = data[1].type(torch.LongTensor).cuda()
pred_label = net(image)
pred_non_res = non_resNet(image)
loss = CRITERION(pred_label, label)
loss_non_res = CRITERION(pred_non_res, label)
optimizer.zero_grad()
loss.backward()
optimizer.step()
optimizer_non_res.zero_grad()
loss_non_res.backward()
optimizer_non_res.step()
if(int((i - 1) * points_per_epochs / len(train_loader)) != int(i * points_per_epochs / len(train_loader)) or i == len(train_loader) - 1):
res_score = get_score(net, test_loader)
unres_score = get_score(non_resNet, test_loader)
score_list.append(res_score)
score_list_non_res.append(unres_score)
loss_list.append(loss.data)
loss_list_non_res.append(loss_non_res.data)
if(statistics):
stat.append(get_gradient_statistics(net))
stat_unres.append(get_gradient_statistics(non_resNet))
print("Done: {}%, residual: {:.2f}%, non_res: {:.2f}%, loss: {:.4f}, loss nonres: {:.4f} ".format(str(int(
i * 100 / len(train_loader))), 100 * res_score, 100 * unres_score, loss.data, loss_non_res.data), end='\r')
if(detector_plot and not detector_done):
if(loss_non_res.data.item() < 2.2):
weight_reporter(non_resNet)
detector_done = True
print("")
print("Ending epoch n°{}".format(j))
if (statistics):
return((score_list, loss_list), (score_list_non_res, loss_list_non_res), (stat, stat_unres))
return (score_list, loss_list), (score_list_non_res, loss_list_non_res)
def plot_and_compare(
max_depth,
width,
nb_epochs=5,
points_per_epochs=10,
messed_up_init_generator=None,
skip_zero_depth=True):
plt.style.use("ggplot")
score_range = []
score_range_non_res = []
loss_range = []
loss_range_non_res = []
epoch_range = np.linspace(0, nb_epochs, points_per_epochs * nb_epochs)
if(skip_zero_depth):
init_depth = 1
else:
init_depth = 0
for i in range(init_depth, max_depth):
score_and_loss, score_and_loss_unres = train_and_test(
i, width, nb_epochs, points_per_epochs, messed_up_init_generator=messed_up_init_generator)
curr_score, curr_loss = score_and_loss
curr_score_unres, curr_loss_unres = score_and_loss_unres
plt.figure(figsize=(10, 10))
plt.plot(epoch_range, curr_score, label="Residual")
plt.plot(epoch_range, curr_score_unres, label="NonRes")
plt.title("Score with width :{} and depth: {}".format(width, i))
plt.xlabel("Epochs")
plt.ylabel("Score")
plt.legend()
plt.show()
plt.figure(figsize=(10, 10))
plt.plot(epoch_range, curr_loss, label="Residual")
plt.plot(epoch_range, curr_loss_unres, label="NonRes")
plt.title("Loss with width :{} and depth: {}".format(width, i))
plt.xlabel("Epochs")
plt.ylabel("Loss")
plt.legend()
plt.show()
score_range.append(curr_score)
score_range_non_res.append(curr_score_unres)
loss_range.append(curr_loss)
loss_range_non_res.append(curr_loss_unres)
return (score_range, loss_range), (score_range_non_res, loss_range_non_res)
def get_gradient_statistics(net):
return [(torch.mean(param.grad).item(), torch.var(param.grad).item())
for param in net.parameters()]
def net_training(
net,
train_loader,
test_loader,
learning_rate,
variance_modifier,
nb_epoch,
CRITERION=nn.CrossEntropyLoss()):
curr_net = copy.deepcopy(net).cuda()
optimizer = torch.optim.SGD(curr_net.parameters(), learning_rate)
for module in curr_net.modules():
if(hasattr(module, "in_channels") and hasattr(module, "out_channels")):
if(module.in_channels == module.out_channels):
if(module.kernel_size[0] != 1):
messing.var_modifier(module.weight, variance_modifier)
for epoch in range(nb_epoch):
for i, data in enumerate(train_loader):
image = data[0].type(torch.FloatTensor).cuda()
label = data[1].type(torch.LongTensor).cuda()
pred = curr_net(image)
loss = CRITERION(pred, label)
optimizer.zero_grad()
loss.backward()
optimizer.step()
return get_score(curr_net, test_loader)
def lr_variance_gradient_norm(
net,
train_loader,
test_loader,
learning_rate,
variance_modifier,
nb_epoch,
CRITERION=nn.CrossEntropyLoss()):
curr_net = copy.deepcopy(net).cuda()
optimizer = torch.optim.SGD(curr_net.parameters(), learning_rate)
for module in curr_net.modules():
if(hasattr(module, "in_channels") and hasattr(module, "out_channels")):
if(module.in_channels == module.out_channels):
if(module.kernel_size[0] != 1):
messing.var_modifier(module.weight, variance_modifier)
for i, data in enumerate(train_loader):
image = data[0].type(torch.FloatTensor).cuda()
label = data[1].type(torch.LongTensor).cuda()
pred = curr_net(image)
loss = CRITERION(pred, label)
optimizer.zero_grad()
loss.backward()
optimizer.step()
break
return get_grad_norm(curr_net)
def lr_variance_GD_strategy(
base_net,
var_range,
lr_range,
train_loader,
test_loader,
epoch_limit=1,
gradient_norm_computation=False):
return_matrix = np.zeros((len(var_range), len(lr_range)))
if(gradient_norm_computation):
gradient_matrix = np.zeros_like(return_matrix)
for i in range(len(var_range)):
var_value = var_range[i]
for j in range(len(lr_range)):
lr_value = lr_range[j]
test_score = net_training(
base_net,
train_loader=train_loader,
test_loader=test_loader,
learning_rate=lr_value,
variance_modifier=var_value,
nb_epoch=epoch_limit)
return_matrix[i][j] = test_score
if(gradient_norm_computation):
gradient_matrix[i][j] = lr_variance_gradient_norm(base_net,
train_loader=train_loader,
test_loader=test_loader,
learning_rate=lr_value,
variance_modifier=var_value,
nb_epoch=epoch_limit)
if(gradient_norm_computation):
print('Done: var:{}, lr{} - Score = {}, gradient norm: {}'.format(
var_value, lr_value, test_score, gradient_matrix[i][j]))
else:
print('Done: var:{}, lr{} - Score = {}'.format(var_value,
lr_value, test_score))
return_list = []
return_list.append(return_matrix)
if(gradient_norm_computation):
return_list.append(gradient_matrix)
return return_list
def eigenvalue_observer(network, eigenvalues, eigenvectors):
eigenvalue_diff_list = []
for i in range(eigenvectors.shape[0]//20):
torch_eigen = torch.Tensor(
eigenvectors[i*20: min((i+1)*20, eigenvectors.shape[0])]).cuda()
eigenvalue_diff_list.append(np.linalg.norm(network.without_eigen(torch_eigen).detach(
).cpu().numpy() - network.with_eigen(torch_eigen).detach().cpu().numpy()))
return eigenvalue_diff_list
def deactivate_batchnorm(m):
if isinstance(m, nn.BatchNorm2d):
m.reset_parameters()
m.eval()
with torch.no_grad():
m.weight.fill_(1.0)
m.bias.zero_()
|
{"/utils/engine.py": ["/utils/plotting_tools.py", "/utils/messing.py"], "/utils/testingpattern.py": ["/utils/plotting_tools.py", "/utils/messing.py", "/utils/engine.py"]}
|
29,498
|
GitBl/analysisTB
|
refs/heads/master
|
/utils/testingpattern.py
|
import time
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import copy
import matplotlib.pyplot as plt
import utils.plotting_tools as plt_tools
import utils.messing as messing
import utils.engine as engine
import copy
import torchvision
def testing_pattern(network, train_loader, test_loader, LR_list, points_per_epochs=10, nb_epochs=5, naive=True):
cuda_state = torch.cuda.is_available()
initialization_list = ["Smart", "Test"]
if(naive):
initialization_list.append("Naïve")
print(initialization_list)
return_dict = {}
return_dict["points_per_epochs"] = points_per_epochs
return_dict["nb_epochs"] = nb_epochs
for initialization_pattern in initialization_list:
for LR in LR_list:
current_network = copy.deepcopy(network)
if(cuda_state):
current_network = current_network.cuda()
if(initialization_pattern == "Naïve"):
for module in current_network.modules():
if(type(module) == nn.Conv2d):
torch.nn.init.zeros_(module.weight)
elif(initialization_pattern == "Smart"):
for module in current_network.modules():
if(type(module) == nn.Conv2d):
if(module.in_channels == module.out_channels):
if(module.kernel_size[0] != 1):
torch.nn.init.uniform_(
module.weight, -10**-5, 10**-5)
curr_loss_list = []
curr_score_list = []
current_optim = torch.optim.SGD(current_network.parameters(), LR)
CRITERION = nn.CrossEntropyLoss()
for epoch_num in range(nb_epochs):
# Training pass
for i, data in enumerate(train_loader):
image = data[0].type(torch.FloatTensor).cuda()
label = data[1].type(torch.LongTensor).cuda()
pred_label = current_network(image)
if(type(network) == torchvision.models.GoogLeNet):
pred_label = pred_label.logits
loss = CRITERION(pred_label, label)
current_optim.zero_grad()
loss.backward()
current_optim.step()
if(int((i - 1) * points_per_epochs / len(train_loader)) != int(i * points_per_epochs / len(train_loader)) or i == len(train_loader) - 1):
curr_loss_list.append(loss.data.item())
curr_score_list.append(engine.get_score(
current_network, test_loader))
print("Technique: {}, LR: {}, Done: {}%, epoch:{}/{}, score = {:.2f}, loss = {:.2f} ".format(initialization_pattern,
LR, int(100.*(i - 1) / len(train_loader)), epoch_num+1, nb_epochs, curr_score_list[-1], curr_loss_list[-1]), end="\r")
curr_key = initialization_pattern + "/"+str(LR)
return_dict[curr_key + "/loss"] = curr_loss_list
return_dict[curr_key + "/score"] = curr_score_list
print("")
del current_network
return return_dict
|
{"/utils/engine.py": ["/utils/plotting_tools.py", "/utils/messing.py"], "/utils/testingpattern.py": ["/utils/plotting_tools.py", "/utils/messing.py", "/utils/engine.py"]}
|
29,499
|
GitBl/analysisTB
|
refs/heads/master
|
/utils/CKA.py
|
import torch.nn as nn
import math
from torch import cuda
import numpy as np
import torch
# According to paper - Include parts from https://github.com/google-research/google-research/tree/master/representation_similarity
def kernalize(X, cbf=True, sigma=1):
# if (type(X) == torch.Tensor):
# X = X.detach().numpy()
if(cbf):
proj_mat = X.dot(X.T)
first_part = np.diag(proj_mat) - proj_mat
final_mat = first_part + first_part.T
final_mat *= -1/(2*(sigma ** 2))
print("Final mat : {}".format(final_mat))
return np.exp(final_mat)
else:
return X.dot(X.T)
def gram_centering(gram):
if(type(gram) == torch.Tensor):
means = torch.mean(gram, 0, dtype=torch.float32)
means -= torch.mean(means) / 2
else:
means = np.mean(gram, 0, dtype=np.float64)
means -= np.mean(means) / 2
gram -= means[:, None]
gram -= means[None, :]
return gram
def CKA(X, Y, cbf, sigma=1, verbose=False):
K = gram_centering(googlegram_rbf(X)) # ,cbf,sigma))
L = gram_centering(googlegram_rbf(Y)) # ,cbf,sigma))
numerator = HSIC(K, L)
first_h = HSIC(K, K)
second_h = HSIC(L, L)
if verbose:
print("{}, {}, {}".format(numerator, first_h, second_h))
if(first_h == 0):
print("K : {}".format(K))
print("pure K : {}".format(kernalize(X, cbf, sigma)))
print(X)
if(second_h == 0):
print("L : {}".format(L))
print(Y)
if(first_h == 0 or second_h == 0 or numerator == 0):
return 0
if(type(first_h) == torch.Tensor):
return numerator/(torch.sqrt(first_h * second_h))
del X
del Y
else:
return numerator/(np.sqrt(first_h * second_h))
def HSIC(K, L):
n = K.shape[0]
if(type(K) == torch.Tensor):
H = torch.from_numpy(np.eye(n) - np.ones((n, n))/n).float()
final_mat = (K.mm(H)).mm(L.mm(H))
return (torch.trace(final_mat)) # /(n-1)**2)
else:
H = np.eye(n) - np.ones((n, n))/n
final_mat = (K.dot(H)).dot(L.dot(H))
return (np.trace(final_mat)) # /(n-1)**2)
def CKA_net_computation(network, dataset, cbf=True, sigma=1, verbose=False, fast_computation = False, iteration_limit = 10):
"""
Returns the CKA matrix for the input networks, thanks to the Google algorithms.
Excpect a matrix of size (nn.Conv layers size*nn.Conv layers size)
cbf: Whether or not to use RBF kernel
sigma: which sigma to use for the RBF kernel
fast_computation: take only "iteration_limit" batchs for early results
"""
if (next(network.parameters()).is_cuda): #CUDA Trick
network = network.cpu()
linking_list = []
for module in network.modules():
if type(module) == nn.Conv2d:
linking_list.append(module)
hook_value = [-1]*len(linking_list)
n = len(linking_list)
def registering_hook(self, in_val, out_val):
to_store = in_val[0]
to_store = to_store.view(
to_store.shape[0], np.product(to_store.shape[1:]))
hook_value[linking_list.index(self)] = to_store
for module in network.modules():
if type(module) == nn.Conv2d:
module.register_forward_hook(registering_hook)
return_matrix = torch.zeros((n, n))
# Dataset pass
if(fast_computation):
iteration = 0
for batch, _ in dataset:
if(iteration<iteration_limit):
iteration += 1
network(batch)
for i in range(n):
for j in range(i+1):
# print(hook_value[i])
temp = CKA(hook_value[i], hook_value[j],
cbf, sigma, verbose)/iteration_limit
return_matrix[i][j] += temp
del temp
print("Done: {:.2f}".format(100*(iteration/(iteration_limit))), end='\r')
for i in range(n):
for j in range(i, n):
return_matrix[i, j] = return_matrix[j, i]
else:
iteration = 0
for batch, _ in dataset:
iteration += 1
network(batch)
for i in range(n):
for j in range(i+1):
# print(hook_value[i])
temp = CKA(hook_value[i], hook_value[j],
cbf, sigma, verbose)/len(dataset)
return_matrix[i][j] += temp
del temp
print("Done: {:.2f}".format(100*(iteration/(len(dataset)+1))), end='\r')
for i in range(n):
for j in range(i, n):
return_matrix[i, j] = return_matrix[j, i]
return return_matrix
def googlegram_rbf(x, threshold=1.0):
if (type(x) == torch.Tensor):
dot_products = x.mm(torch.transpose(x, 0, 1))
sq_norms = torch.diag(dot_products)
sq_distances = -2 * dot_products + \
sq_norms[:, None] + sq_norms[None, :]
sq_median_distance = torch.median(sq_distances)
return torch.exp(-sq_distances / (2 * threshold ** 2 * sq_median_distance))
else:
dot_products = x.dot(x.T)
sq_norms = np.diag(dot_products)
sq_distances = -2 * dot_products + \
sq_norms[:, None] + sq_norms[None, :]
sq_median_distance = np.median(sq_distances)
return np.exp(-sq_distances / (2 * threshold ** 2 * sq_median_distance))
|
{"/utils/engine.py": ["/utils/plotting_tools.py", "/utils/messing.py"], "/utils/testingpattern.py": ["/utils/plotting_tools.py", "/utils/messing.py", "/utils/engine.py"]}
|
29,500
|
GitBl/analysisTB
|
refs/heads/master
|
/utils/plotting_tools.py
|
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import torch
def plot_specific_weight(curr_list, value):
plot_list = []
for element in curr_list:
plot_list.append(element[value//3][value % 3])
plt.figure()
plt.title("Weigth n°{}".format(value))
plt.hist(plot_list, bins=50)
plt.show()
def weight_reporter(net):
param_list = []
for name, param in net.named_parameters():
if (".conv.weight" in name):
in_layer, out_layer, kernel_size, kernel_size = param.size()
for i in range(in_layer):
for j in range(out_layer):
param_list.append(param[i][j].cpu().detach())
for i in range(9):
plot_specific_weight(param_list, i)
def comparaison_plot(out, label_list=None, name_list=None, NB_EPOCH=1, plot_text=""):
"""
Plot the result of the comparaison of two networks
Currently flawed as it expects the user to know the specifics of the input to plot.
"""
plt.style.use("ggplot")
for i in range(len(out)):
if(type(out[i][0][0]) == list):
for j in range(len(out[i][0])):
plt.figure()
plt.plot(out[i][0][j], label = name_list[0])
plt.plot(out[i][1][j], label = name_list[0])
plt.title(label_list[i]+" " + plot_text)
plt.ylabel(label_list[i])
plt.legend()
plt.show()
else:
plt.figure()
if(label_list):
epoch_list = np.linspace(0, NB_EPOCH, len(out[i][0]))
plt.plot(epoch_list, out[i][0], label=name_list[0])
plt.plot(epoch_list, out[i][1], label=name_list[1])
plt.title(label_list[i]+" " + plot_text)
plt.xlabel("Epochs")
plt.ylabel(label_list[i])
plt.legend()
plt.show()
def bias_inspector(net, return_name=False):
if(return_name):
name_list = []
parameter_list = []
for name, param in net.named_parameters():
if('bias' in name and not 'fc' in name and not 'lin' in name):
if(return_name):
name_list.append(name)
parameter_list.append(param.cpu().detach().numpy())
return_list = parameter_list
if(return_name):
return_list = [return_list]
return_list.append(name_list)
return return_list
def LRVarshow(var_range, lr_range, out_val_list, epoch_number=None, model_name=None):
for out_val in out_val_list:
fig = plt.figure()
ax = Axes3D(fig)
x_range = len(var_range)
y_range = len(lr_range)
X = np.zeros((x_range, y_range))
Y = np.zeros((x_range, y_range))
for i in range(x_range):
for j in range(y_range):
X[i][j] = i
Y[i][j] = j
ax.set_xlabel('Variance Coefficient')
ax.set_ylabel('LR')
ax.set_zlabel('Test Score')
ax.plot_surface(X, Y, out_val, cmap='seismic', alpha=0.7)
plt.yticks(range(y_range), lr_range)
plt.xticks(range(x_range), var_range)
title = ""
if(epoch_number):
title = title + "Epoch(s): " + str(epoch_number) + " "
if(model_name):
title = title + str(model_name)
if(title != ""):
plt.title(title)
plt.show()
def CKAmatshow(matrix):
if (type(matrix) == torch.Tensor):
matrix = matrix.detach().numpy()
fig = plt.figure()
ax = fig.add_subplot(111)
cax = ax.matshow(matrix)
ax.set_xlabel("Layer n°")
ax.xaxis.set_ticks_position('bottom')
ax.set_ylabel("Layer n°")
cbar = fig.colorbar(cax)
cbar.set_label("CKA value between layer")
plt.show()
def dictionnary_show(dictionnary, key_list, LR_list, plotting=False, epoch_analysis=False, return_param=False):
nb_epochs = dictionnary["nb_epochs"]
ppe = dictionnary["points_per_epochs"]
if return_param:
return_mat = np.zeros((len(LR_list), len(key_list)))
for LR in LR_list:
for plot in key_list:
if(plotting):
plt.figure()
for element in dictionnary:
param = element.split("/")
if(len(param) != 1):
plt.style.use("ggplot")
if(param[1] == str(LR) and param[-1] == plot):
if(plotting):
plt.title(param[1])
plt.ylabel(param[-1])
epoch_list = np.arange(
nb_epochs, step=1/(ppe))
plt.xlabel("Epochs")
if(plot == "loss"):
interesting_value = min(dictionnary[element])
if(epoch_analysis):
for i in range(nb_epochs):
print("\t \t \t \t Epochmax :{}, value: {:.2f}".format(
i+1, min(dictionnary[element][:int((i+1)*ppe)])))
if(plot == "score"):
interesting_value = max(dictionnary[element])
if(epoch_analysis):
for i in range(nb_epochs):
print("\t \t \t \t Epochmax :{}, value: {:.2f}".format(
i+1, max(dictionnary[element][:int((i+1)*ppe)])))
if(plotting):
plt.plot(
epoch_list, dictionnary[element], label=param[0] + " ,{:.2f}".format(interesting_value))
else:
print("{} - : {:.2f}".format(element, interesting_value))
print(
"--------------------------------------------------------")
print("")
if(plotting):
plt.legend()
if(plotting):
plt.show()
|
{"/utils/engine.py": ["/utils/plotting_tools.py", "/utils/messing.py"], "/utils/testingpattern.py": ["/utils/plotting_tools.py", "/utils/messing.py", "/utils/engine.py"]}
|
29,501
|
GitBl/analysisTB
|
refs/heads/master
|
/utils/networks.py
|
"""
Network and module definition
"""
import numpy as np
import torch
import torch.nn as nn
import torchvision.datasets
from torchvision import transforms
import matplotlib.pyplot as plt
import torch.nn.functional as F
class ResConv(nn.Module):
"""
Define a Residual Convolutional unit
"""
def __init__(self, channel, residual=True, batch_norm=False):
super(ResConv, self).__init__()
self.residual = residual
self.conv = nn.Conv2d(channel, channel, 3, padding=1)
self.batch_norm_enabled = batch_norm
if(self.batch_norm_enabled):
self.bn = nn.BatchNorm2d(channel)
def forward(self, x):
output = F.relu(self.conv(x))
if(self.batch_norm_enabled):
output = self.bn(output)
if(self.residual):
output = output + x
return output
class GenResNet(nn.Module):
"""
Generate a range of convolutional units
"""
def __init__(self, depth, width, residual=True, batch_norm=False):
self.width = width
super(GenResNet, self).__init__()
self.first_conv = nn.Conv2d(3, width, 3, padding=1)
self.first_conv.first = True
layers = []
for i in range(depth):
layers += [ResConv(width, residual, batch_norm)]
self.multi_conv = nn.Sequential(*layers)
self.lin = nn.Linear(width*32*32, 10)
def forward(self, x):
x = self.first_conv(x)
x = self.multi_conv(x)
x = x.view(-1, self.width*32*32)
x = self.lin(x)
return x
class SimpleResNet(nn.Module): # Taken from HW3
def __init__(self):
super(SimpleResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 32, 3, padding=1)
self.conv2 = nn.Conv2d(32, 32, 3, padding=1)
self.pool1 = nn.MaxPool2d(2, stride=2)
self.conv3 = nn.Conv2d(32, 64, 3, padding=1)
self.conv4 = nn.Conv2d(64, 64, 3, padding=1) # tagged as end-like
self.conv5 = nn.Conv2d(64, 64, 3, padding=1) # tagged as end-like
self.pool2 = nn.MaxPool2d(2, stride=2)
self.conv6 = nn.Conv2d(64, 128, 3, padding=1)
self.conv7 = nn.Conv2d(128, 128, 3, padding=1) # tagged as end-like
self.conv8 = nn.Conv2d(128, 128, 3, padding=1) # tagged as end-like
self.avgpool = nn.AvgPool2d(kernel_size=8, stride=8)
self.lin = nn.Linear(128*1*1, 10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = self.pool1(x)
x = F.relu(self.conv3(x))
x_state_1 = x
x = F.relu(self.conv4(x))
x = F.relu(self.conv5(x) + x_state_1)
x = self.pool2(x)
x = F.relu(self.conv6(x))
x_state_2 = x
x = F.relu(self.conv7(x))
x = F.relu(self.conv8(x) + x_state_2)
x = self.avgpool(x)
x = x.view(-1, 128)
x = self.lin(x)
return x
|
{"/utils/engine.py": ["/utils/plotting_tools.py", "/utils/messing.py"], "/utils/testingpattern.py": ["/utils/plotting_tools.py", "/utils/messing.py", "/utils/engine.py"]}
|
29,502
|
GitBl/analysisTB
|
refs/heads/master
|
/utils/messing.py
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
def messing_up(module):
"""
Shifts the weigth by 0.1.
"""
if type(module) == nn.Conv2d:
if not hasattr(module, 'first'): # Do not mess up the first Conv2D, else it's unfair
torch.nn.init.kaiming_normal_(module.weight)
module.weight = nn.Parameter(module.weight + 0.1)
def zero_mess(module):
"""
Initiate the weight not tagged by the first attribute
"""
if type(module) == nn.Conv2d:
if not hasattr(module, 'first'): # Do not mess up the first Conv2D, else it's unfair
torch.nn.init.zeros_(module.weight)
def var_modifier(tensor, val):
"""
Modifies the variance of the given Tensor by the input constant value.
"""
with torch.no_grad():
return tensor.mul_(np.sqrt(val))
def non_res_reslike_initialization(scaling):
"""
Initialize a non-resnet convolutional network according to a resnet type initialization
"""
def init_pattern(module):
if(type(module)) == nn.Conv2d:
# Do not mess up the first Conv2D, else it's unfair
if not hasattr(module, 'first'):
init_matrix = torch.zeros(module.weight.size())
k_size = module.weight.size()[2]
init_weight = torch.zeros(k_size, k_size)
scaling_value = 1
if(scaling):
scaling_value = module.in_channels
init_weight[(k_size-1) // 2][(k_size-1) // 2] = (1. +
np.random.uniform(-10**-9, 10**-9))/scaling_value
for in_channel in range(module.weight.size()[0]):
for out_channel in range(module.weight.size()[1]):
init_matrix[in_channel][out_channel] = init_weight
module.weight = nn.Parameter(init_matrix)
return init_pattern
# def variance_modifier(value):
# if type(module) == nn.Conv2d:
# if hasattr(module, 'tagged'):
# var_modifier(module.weight)
# return variance_modifier
def mess_generator(value):
"""
Returns a messing function offsetting every non "first" layer by value.
Is particularly helpful when using net.apply(mess_generator(value))0
"""
def mess(module):
if type(module) == nn.Conv2d:
# Do not mess up the first Conv2D, else it's unfair
if not hasattr(module, 'first'):
torch.nn.init.kaiming_normal_(module.weight)
module.weight = nn.Parameter(module.weight + value)
return mess
def zero_init(net, verbose=False, iteration_max=float('inf')):
iteration = 0
for module in net.modules():
tagged = False
if(hasattr(module, "in_channels") and hasattr(module, "out_channels")):
iteration += 1
if(module.in_channels == module.out_channels and iteration < iteration_max):
if(module.kernel_size[0] != 1 and module.stride == (1, 1)):
if(verbose):
print(module.kernel_size)
tagged = True
if(not tagged):
module.first = True
net.apply(zero_mess)
if(verbose):
for name, weight in net.named_parameters():
if(torch.sum(weight) == 0 and "conv" in name and not 'bias' in name):
print(name)
|
{"/utils/engine.py": ["/utils/plotting_tools.py", "/utils/messing.py"], "/utils/testingpattern.py": ["/utils/plotting_tools.py", "/utils/messing.py", "/utils/engine.py"]}
|
29,510
|
jb3dahmen/CatchTheFruitComplete
|
refs/heads/master
|
/Item.py
|
'''This is the "parent" class for both Fruit and Player. It defines important functionality that they both share
such as location, size, image and collision detection functionality'''
class Item:
#Here we set up the Item
def __init__(self, animage, xlocation, ylocation, mywidth, myheight):
self.myimage = animage
self.xlocation = xlocation
self.ylocation = ylocation
self.mywidth = mywidth
self.myheight = myheight
#This function draws the Item with and image at the x,y location
def draw(self):
image(self.myimage, self.xlocation, self.ylocation)
#This is where we can check if two items have collided or intersected, this will be important for
#Checking if our player has "caught" a fruit
#This function returns true if the items have intersected and False if not
def Intersects(self, anitem):
itemwidth = anitem.mywidth
itemheight = anitem.myheight
itemxloc = anitem.xlocation
itemyloc = anitem.ylocation
if(itemxloc < self.xlocation + self.mywidth and
itemxloc + itemwidth > self.xlocation and
itemyloc < self.ylocation + self.myheight and
itemheight + itemyloc > self.ylocation):
return True
return False
|
{"/Fruit.py": ["/Item.py"], "/GameManager.py": ["/Fruit.py", "/Player.py"], "/Player.py": ["/Item.py"]}
|
29,511
|
jb3dahmen/CatchTheFruitComplete
|
refs/heads/master
|
/Fruit.py
|
from Item import *
'''This class will define how our fruits behave. It "inherits" from its parent class Item so it can do
everything Item can do but we can also add more behavior without having to duplicate code. Pretty neat!'''
class Fruit(Item):
#Here we set up the Fruit
def __init__(self, animage, xlocation, aspeed, value):
#We call on Fruit's parent constuctor to set up the basic image, location, and size information
#We set ylocation to 0 because we want our fruits to first appear at the top of the screen
Item.__init__(self, animage, xlocation, 0, animage.width, animage.height)
#Here we add more information to fruit about how fast it's going to move
self.speed = aspeed
#Here we set how many points the fruit is worth
self.value = value
#This is where we define how the fruit will move, we move its y location by adding the speed to
#make it look like it's falling
def move(self):
self.ylocation = self.ylocation + self.speed
|
{"/Fruit.py": ["/Item.py"], "/GameManager.py": ["/Fruit.py", "/Player.py"], "/Player.py": ["/Item.py"]}
|
29,512
|
jb3dahmen/CatchTheFruitComplete
|
refs/heads/master
|
/GameManager.py
|
add_library('sound')
from Fruit import *
from Player import *
'''The GameManager class controls and stores important aspects of the game. It controls how the player and fruits move
as well as keeping track of time and score'''
class GameManager:
#Here we set up the GameManager
def __init__(self, bgimage, framerate, playerimage, timer, fruitimage, itemCatchMusicPlayer, backgroundMusicPlayer):
#The game background image
self.backgroundimage = bgimage
#The player image
self.playerimage = playerimage
#The game framerate in frames/second
self.framerate = framerate
#The game timer that will help control how long the game lasts
self.timer = timer
#This boolean will help us check if the game is done (the timer ran out)
self.done = False
#We can load custom fonts to display the fruit points and total score
self.scorefont = loadFont("Sansation-Bold-35.vlw");
self.pointsfont = loadFont("Sansation-Bold-35.vlw");
#The total score so far
self.score = 0
#How "fast" the fruits will fall
self.fruitspeed = 7
#The time that will be used to spawn fruits
self.time = 0
#This is a list that will store the fruits
self.items = []
#How many points the fruits are worth
self.pointsValue = 0
#Where the fruit points will appear once a fruit is caught
self.pointsx = 0
self.pointsy = 0
#Checks whether the fruit points should be displayed
self.pointsOn = False
#How many frames the fruit points should be displayed for
self.pointsFrameCount = 0
#How long the game will last in seconds
self.gameDurationSeconds = 30
#The fruit image
self.fruitimage = fruitimage
#The background and caught fruit sounds
self.itemCatchMusicPlayer = itemCatchMusicPlayer
self.backgroundMusicPlayer = backgroundMusicPlayer
#Set up the game's player
self.player = Player(self.playerimage, width/2, height - 100)
#This calls all the methods that will be needed to make the game run
def playGame(self):
#Increase the difficulty (speed of fruit) over time
self.difficultyChange()
#Spawn the fruits
self.spawn()
#Move the fruits
self.moveFruit()
#Draw the player, fruits, and points
self.drawWorld()
#Check if the game has ended
self.checkEnd()
#Check if the player has caught a fruit
self.checkCollision()
#Show the score and current fruit speed
self.displayScore()
self.displaySpeed()
#Every time this is used add 1 second to the game time
self.time = self.time + 1
#Here we draw all the fruits and player on our game screen
def drawWorld(self):
#draw all the fruits that have been spawned by looping through the items list
for i in range(len(self.items)):
aFruit = self.items[i]
aFruit.draw()
#draw the player on the screen
self.player.draw()
#Here we create fruits and add them to the items list
def spawn(self):
#Control how often the fruits will spawn
if(self.time == self.framerate / 2):
#reset the fruit spawn time
self.time = 0
#Create a fruit with a random x location
aFruit = Fruit(self.fruitimage, int(random(30,470)), self.fruitspeed, 3)
#CHALLENGE1: Add a new type of fruit
#CHALLENGE2: Add a powerup
#Add the fruit to the list
self.items.append(aFruit)
#Here we move the fruits by using their move() method
def moveFruit(self):
for i in range(len(self.items)):
#inside the loop
aFruit = self.items[i]
aFruit.move()
#Here we check if the game has ended by checking the time
#How could we change this to end the game by reaching a certain score?
def checkEnd(self):
#If the elapsed time has reached the game duration time
if(self.timer.currentTime() == self.gameDurationSeconds):
self.done = True
self.timer.pause()
#Here we increase the speed of the fruits falling every 5 seconds that pass
#What other ways could difficulty increase?
def difficultyChange(self):
#Increase the difficulty every 5 seconds
if(self.timer.currentTime() % 5 == 0 and self.timer.currentTime() != 0 and frameCount % self.framerate == 0 and frameCount != 0):
self.fruitspeed = self.fruitspeed + 1
#Here we check if the player has collide with or caught a fruit
def checkCollision(self):
#store items you want removed in this list, never a good idea to modify a list you are iterating over in python
itemsToRemove = []
#loop through the fruits
#We need to check every fruit to see if it is touching the player
for i in range(len(self.items)):
aFruit = self.items[i]
#If the player caught a fruit
#Display the fruit points
if(self.player.Intersects(aFruit)):
self.score = self.score + aFruit.value
self.pointsValue = aFruit.value
self.pointsx = aFruit.xlocation
self.pointsy = aFruit.ylocation
self.pointsOn = True
#remove the fruit from the items list
itemsToRemove.append(aFruit)
#print("CAUGHT!")
self.itemCatchMusicPlayer.play()
self.displayPoints(self.pointsValue, self.pointsx, self.pointsy)
#remove caught fruits from the list
for i in range(len(itemsToRemove)):
itemToRemove = itemsToRemove[i]
self.items.remove(itemToRemove)
#Here we show the fruit points
def displayPoints(self, value, x, y):
if(self.pointsOn and self.pointsFrameCount < 90):
textFont(self.pointsfont, 30)
fill(255)
text("+" + str(value), x, y)
self.pointsFrameCount = self.pointsFrameCount + 1
else:
self.pointsOn = False
self.pointsFrameCount = 0
#Here we show the fruit speed
def displaySpeed(self):
fill(255)
textFont(self.scorefont, 20)
text("Speed: " + str(self.fruitspeed), 10,31)
#Here we show the total score
def displayScore(self):
fill(255)
textFont(self.scorefont, 20)
text("Score: " + str(self.score), 10,15)
#Here we show the game over message
def displayGameOverMessage(self):
textFont(self.scorefont, 40)
text("Good Job!",10,40)
textFont(self.scorefont,20)
text("SCORE: " + str(self.score),10,70)
|
{"/Fruit.py": ["/Item.py"], "/GameManager.py": ["/Fruit.py", "/Player.py"], "/Player.py": ["/Item.py"]}
|
29,513
|
jb3dahmen/CatchTheFruitComplete
|
refs/heads/master
|
/Timer.py
|
#Copyright 2009, Leutenegger
#Credit to this Timer Class: From http://www.cs.du.edu/~leut/1671/09_Fall/ProcessingNotes7.pdf
#rewritten using Python syntax
#This class is used to define the game timer
#It will control how long the game lasts
class Timer:
def __init__(self, inX, inY):
self.x = inX
self.y = inY
self.running = False
self.timeSoFar = 0
self.startTime = 0
def currentTime(self):
if(self.running):
return int((millis() - self.startTime) / 1000.0)
else:
return int(self.timeSoFar / 1000.0)
def start(self):
self.running = True
self.startTime = millis()
def restart(self):
self.start()
def pause(self):
if(self.running):
self.timeSoFar = millis() - self.startTime
self.running = False
def continueRunning(self):
if(not self.running):
self.startTime = millis() - self.timeSoFar
self.running = True
def DisplayTime(self, maxtime):
theTime = self.currentTime()
output = "Time: " + str(maxtime - theTime)
fill(255)
font = loadFont("Sansation-Bold-35.vlw")
textFont(font)
text(output, self.x, self.y)
|
{"/Fruit.py": ["/Item.py"], "/GameManager.py": ["/Fruit.py", "/Player.py"], "/Player.py": ["/Item.py"]}
|
29,514
|
jb3dahmen/CatchTheFruitComplete
|
refs/heads/master
|
/Player.py
|
from Item import *
'''This class defines how out player will behave. It "inherits" from its parent class Item so it can do
everything Item can do but we can also add more behavior without having to duplicate code. Pretty neat!'''
class Player(Item):
#Here we set up the player
def __init__(self, animage, xlocation, ylocation):
#We call on Player's parent constuctor to set up the basic image, location, and size information
Item.__init__(self, animage, xlocation, ylocation, animage.width, animage.height)
#This is where we tell the player how to move left using its location
def moveLeft(self):
#print("Move Left")
if(self.xlocation - 10 > 0):
self.xlocation -= 10
#This is where we tell the player how to move right using its location
def moveRight(self):
#print("Move Right")
if(self.xlocation + 10 < width):
self.xlocation += 10
|
{"/Fruit.py": ["/Item.py"], "/GameManager.py": ["/Fruit.py", "/Player.py"], "/Player.py": ["/Item.py"]}
|
29,515
|
jb3dahmen/CatchTheFruitComplete
|
refs/heads/master
|
/Credits.py
|
'''Music Credit:
Peppy and the Firing Squad: http://sampleswap.org/mp3/artist/5101/Peppy--The-Firing-Squad_YMXB-160.mp3
Artist xnoybis: http://sampleswap.org/artist/xnoybis
licensend by CC 2.0: http://creativecommons.org/licenses/by/2.0/
Falls: http://sampleswap.org/mp3/artist/24468/Blackjwell_Falls-160.mp3
Artist blackjwell: http://sampleswap.org/artist/blackjwell
licensend by CC 2.0: http://creativecommons.org/licenses/by/2.0/
Wrath Demo: http://sampleswap.org/mp3/artist/46669/joevirus_wrath-demo-160.mp3
Artist JOEVIRUS: http://sampleswap.org/artist/joevirus
licensend by CC 2.0: http://creativecommons.org/licenses/by/2.0/
The Great River: http://sampleswap.org/mp3/artist/2/Canton_The-Great-River-160.mp3
Artist CANTON: http://sampleswap.org/artist/canton
licensend by CC 2.0: http://creativecommons.org/licenses/by/2.0/
Hyperbola: http://sampleswap.org/mp3/artist/26971/Tejaswi_Hyperbola-160.mp3
Artist TEJASWI: http://sampleswap.org/artist/TranceAddict
licensend by CC 2.0: http://creativecommons.org/licenses/by/2.0/
Escape: http://sampleswap.org/mp3/artist/31511/soLid-Xciter_Escape-160.mp3
Artist SOLID XCITER: http://sampleswap.org/artist/xciter
licensend by CC 2.0: http://creativecommons.org/licenses/by/2.0/
Wiggle: http://sampleswap.org/samples-ghost/SFX%20and%20UNUSUAL%20SOUNDS/VIDEO%20GAMES/dkong/7[kb]effect01.wav.mp3
Boip: http://sampleswap.org/samples-ghost/SFX%20and%20UNUSUAL%20SOUNDS/bleeps%20blips%20blonks%20blarts%20and%20zaps/11[kb]boip.aif.mp3
Checkpoint: http://sampleswap.org/samples-ghost/SFX%20and%20UNUSUAL%20SOUNDS/bleeps%20blips%20blonks%20blarts%20and%20zaps/92[kb]checkpoint-hit.aif.mp3
Moo: http://sampleswap.org/samples-ghost/SFX%20and%20UNUSUAL%20SOUNDS/cow%20construction%20kit/121[kb]moo3.aif.mp3
Bulldog: http://sampleswap.org/samples-ghost/SFX%20and%20UNUSUAL%20SOUNDS/SOUND%20FX%20ZOO%20AND%20NATURE/175[kb]bulldog.wav.mp3
*/
/* Image and Art Credit
Daisy: http://fantendo.wikia.com/wiki/File:Sunshine_daisy_.png
Luma: http://videogames-fanon.wikia.com/wiki/File:Co-Star_Luma.png
Mario: http://www.playbuzz.com/gamergirlxox10/what-mario-character-are-you
PrincessPeach: http://mariokart.wikia.com/wiki/Princess_Peach
Toad: http://ilvg.wikia.com/wiki/Toad
Apple: http://pngimg.com/img/fruits/apple
Banana: http://www.fancyicons.com/free-icon/138/fruits-icon-set/free-banana-icon-png/
Cherry: http://pngimg.com/download/614
Peach: https://www.google.com/search?q=peach+png&espv=2&biw=1093&bih=665&site=webhp&source=lnms&tbm=isch&sa=X&ved=0CAYQ_AUoAWoVChMIud_CgL3wxgIVyTOICh23KgGM#imgrc=lslaLw476YQqWM%3A
Starfruit: http://www.iconarchive.com/show/fruitsalad-icons-by-fi3ur/starfruit-icon.html
spacebg: http://blogs.msdn.com/b/cdnstudents/archive/2013/09/12/free-space-art-assets-to-help-you-build-your-game.aspx
fieldbg: http://opengameart.org/content/large-nature-background
underwaterbg:http://opengameart.org/content/underwater-scene-loopable
moonbg: http://opengameart.org/content/background-night
desertbg: http://www.indiedb.com/games/paper-cowboys/images/new-background-concept
'''
|
{"/Fruit.py": ["/Item.py"], "/GameManager.py": ["/Fruit.py", "/Player.py"], "/Player.py": ["/Item.py"]}
|
29,527
|
utkarshparkhi/article_scraper
|
refs/heads/main
|
/article_scraper/spiders/gsm_arena.py
|
import re
import scrapy
from bs4 import BeautifulSoup
from article_scraper.utils import writer
from article_scraper.constants import *
class GSMArena(scrapy.Spider):
name = "GSM"
base_url = "https://www.gsmarena.com/"
flen = 0
def start_requests(self):
urls = [f"https://www.gsmarena.com/res.php3?sSearch={self.product}"]
for url in urls:
yield scrapy.Request(url=url, callback=self.get_review_page,
meta={'proxy': 'http://scraperapi:82814b162327ecde5d84e2a712ff85bb@proxy-server.scraperapi.com:8001'})
def get_review_page(self, response):
soup = BeautifulSoup(response.body, 'html.parser')
review_links = soup.find(id="reviews")
if review_links is not None:
review_links = review_links.find_all("div", {"class": "review-item-content"})
for link in review_links:
pub_date = link.find("span", {"class": "meta-item-time"}).text
yield scrapy.Request(url=self.base_url + link.a["href"], callback=self.get_review,
cb_kwargs={PUB_DATE: pub_date},
meta={'proxy': 'http://scraperapi:82814b162327ecde5d84e2a712ff85bb@proxy-server.scraperapi.com:8001'}
)
product_links = soup.find(id="review-body")
if product_links is not None:
product_links = product_links.find_all("div", {"class": "makers"})
if len(product_links):
product_links = product_links[0].find_all("a")
for link in product_links:
yield scrapy.Request(url=self.base_url + link['href'], callback=self.get_review_from_product,
meta={'proxy': 'http://scraperapi:your_key@proxy-server.scraperapi.com:8001'})
def get_review_from_product(self, response):
soup = BeautifulSoup(response.body, 'html.parser')
review_link = soup.find_all("li", {"class": "article-info-meta-link article-info-meta-link-review light large "
"help help-review"})
if len(review_link):
review_link = review_link[0]
link = review_link.a['href']
yield scrapy.Request(url=self.base_url + link, callback=self.get_review,
meta={'proxy': 'http://scraperapi:82814b162327ecde5d84e2a712ff85bb@proxy-server.scraperapi.com:8001'})
def get_review(self, response, **kwargs):
soup = BeautifulSoup(response.body, 'html.parser')
review = soup.find(id="review-body")
rating = soup.find("span", {"class": "score"})
if rating is not None:
kwargs.update({RATING: rating.text})
url = response.url
kwargs.update({DOMAIN: self.name})
comments = soup.find("li", {"class": "article-info-meta-link meta-link-opinions"})
if comments is not None:
comments = re.search(r"\(([0-9_]+)\)", comments.text)
comments = comments.group(1)
kwargs.update({COMMENT_COUNT: comments})
kwargs.update({"queries": [self.product], "domain": self.name})
writer.dump_data(review.text, url, **kwargs)
next_page_url = soup.find("div", {"class": "article-pages col"})
if next_page_url is not None:
next_page_url = next_page_url.a['href']
yield scrapy.Request(url=self.base_url + next_page_url, callback=self.get_review, cb_kwargs=kwargs,
meta={'proxy': 'http://scraperapi:82814b162327ecde5d84e2a712ff85bb@proxy-server.scraperapi.com:8001'})
|
{"/article_scraper/spiders/gsm_arena.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/ndtv_gadget.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/verge.py": ["/article_scraper/constant/custom_search.py", "/article_scraper/constants.py"]}
|
29,528
|
utkarshparkhi/article_scraper
|
refs/heads/main
|
/article_scraper/utils/writer.py
|
from article_scraper.constant import DB_CONFIG
def dump_data(article, url, **kwargs):
data = {"url": url, "text": article, "processed": False}
data.update(kwargs)
if DB_CONFIG.review_col.find_one({"url": url}) is None:
x = DB_CONFIG.review_col.insert_one(data)
return x
else:
ex_data = DB_CONFIG.review_col.find_one({"url": url})
if 'queries' in ex_data.keys():
data['queries'] = list(set(data['queries'] + ex_data['queries']))
x = DB_CONFIG.review_col.update_many({"url": url}, {"$set": data})
return x
|
{"/article_scraper/spiders/gsm_arena.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/ndtv_gadget.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/verge.py": ["/article_scraper/constant/custom_search.py", "/article_scraper/constants.py"]}
|
29,529
|
utkarshparkhi/article_scraper
|
refs/heads/main
|
/runner.py
|
import json
# from run_crawler import add_queries
def read_models(filename):
f = open(filename, 'r')
data = f.read()
data = data.split('\n')
clubbed_d = []
print(len(data))
for i in range(0, len(data), 2):
intent = data[i + 1].strip('\t').split('\t')
store_path = intent[0]
facets = json.loads(intent[1])
clubbed_d.append({"query": data[i], "store": store_path, "facets": facets})
return clubbed_d
def get_queries_model_name(queries):
qmn = []
for query in queries:
if query['store'] != "tyy/4io":
continue
model_name = False
brand_name = False
for facet in query['facets']:
if facet['fk'] == 'model_name':
model_name = facet['fv']
if facet['fk'] == 'brand':
brand_name = facet['fv']
if model_name:
qmn.append({"query": query['query'], "model": model_name, })
if brand_name:
qmn[-1].update({"brand": brand_name})
return qmn
def get_queries(qmn):
queries = []
products = []
for q in qmn:
if 'brand' in q.keys():
queries.append([q['query'], " ".join([q['brand'], q['model']])])
products.append(" ".join([q['brand'], q['model']]))
else:
queries.append([q['query'], q['model']])
products.append(q['model'])
products = set(products)
qdict = dict(zip(products, [set() for i in range(len(products))]))
for q in queries:
qdict[q[1]].add(q[0])
qdict[q[1]].add(q[1])
return list(products), qdict
queries_intent = read_models("mobile_intent_queries.txt")
processed_queries = get_queries_model_name(queries_intent)
products, qdict = get_queries(processed_queries)
# add_queries(products[0:10], qdict)
|
{"/article_scraper/spiders/gsm_arena.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/ndtv_gadget.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/verge.py": ["/article_scraper/constant/custom_search.py", "/article_scraper/constants.py"]}
|
29,530
|
utkarshparkhi/article_scraper
|
refs/heads/main
|
/article_scraper/constant/custom_search.py
|
API_KEY = "CUSTOM SEARCH API KEY"
SEARCH_ID = "bbc7234e25df1a10d"
|
{"/article_scraper/spiders/gsm_arena.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/ndtv_gadget.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/verge.py": ["/article_scraper/constant/custom_search.py", "/article_scraper/constants.py"]}
|
29,531
|
utkarshparkhi/article_scraper
|
refs/heads/main
|
/article_scraper/spiders/ndtv_gadget.py
|
import scrapy
from bs4 import BeautifulSoup
from article_scraper.utils import writer
from article_scraper.constants import *
class NDTVGadgets(scrapy.Spider):
name = "ndtv"
def start_requests(self):
url = f"https://gadgets.ndtv.com/search?searchtext={self.product}"
yield scrapy.Request(url=url, callback=self.get_product)
def get_product(self, response):
soup = BeautifulSoup(response.body, 'html.parser')
products = soup.find(id="productSearch")
for a in products.find_all('a'):
yield scrapy.Request(url=a['href'], callback=self.get_review_page)
def get_review_page(self, response):
soup = BeautifulSoup(response.body, 'html.parser')
review = soup.find(id='review')
if review is not None:
review_link = review.find_all('a')[-1]['href']
yield scrapy.Request(url=review_link, callback=self.get_review)
def get_review(self, response):
soup = BeautifulSoup(response.body, 'html.parser')
date = soup.find("div", {"class": "dateline"})
kwargs = {}
if date is not None:
date = date.text.split("Updated:")[1].strip()
kwargs.update({PUB_DATE: date})
rating = soup.find("div", {"class": "avg_rating"})
if rating is not None:
rating = rating.i.get("class")
if isinstance(rating, list):
if len(rating) > 0:
kwargs.update({RATING: rating[1][-1]})
comments = soup.find(id="btncc").text
if len(comments) == 0:
comments = 0
kwargs.update({COMMENT_COUNT: comments})
review = soup.find(id="center_content_div").find("div", {"class": "content_text row description"})
review_text = review.text
url = response.url
kwargs.update({DOMAIN: self.name, "queries": [self.product]})
writer.dump_data(review_text, url, **kwargs)
|
{"/article_scraper/spiders/gsm_arena.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/ndtv_gadget.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/verge.py": ["/article_scraper/constant/custom_search.py", "/article_scraper/constants.py"]}
|
29,532
|
utkarshparkhi/article_scraper
|
refs/heads/main
|
/article_scraper/spiders/verge.py
|
import scrapy
from bs4 import BeautifulSoup
from article_scraper.constant.custom_search import API_KEY, SEARCH_ID
from article_scraper.constants import *
from article_scraper.utils import writer
class Verge(scrapy.Spider):
name = "verge"
def start_requests(self):
url = f"https://www.googleapis.com/customsearch/v1?key={API_KEY}&cx={SEARCH_ID}&q={self.product}"
yield scrapy.Request(url=url, callback=self.get_links)
def get_links(self, response):
res = response.json()
res = res['items']
for r in res[:3]:
yield scrapy.Request(url=r['link'], callback=self.find_spider(r['link'], r['displayLink']))
def find_spider(self, url, display_link):
if display_link == "www.theverge.com":
return self.verge_get_review
def verge_get_review(self, response):
kwargs = {}
soup = BeautifulSoup(response.body, "html.parser")
text = soup.find("div", {"class": "c-entry-content"})
if text is None:
text = soup.find("div", {"class": "l-col__main"}).text
else:
text = text.text
score = soup.find("span", {"class": "c-scorecard__score-number"})
if score is not None:
score = score.text.split()[0]
kwargs.update({RATING: score})
pub_date = soup.time
if soup.time is not None:
pub_date = pub_date['datetime']
kwargs.update({PUB_DATE: pub_date})
kwargs.update({DOMAIN: "verge", "queries": [self.product]})
writer.dump_data(text, response.url, **kwargs)
|
{"/article_scraper/spiders/gsm_arena.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/ndtv_gadget.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/verge.py": ["/article_scraper/constant/custom_search.py", "/article_scraper/constants.py"]}
|
29,533
|
utkarshparkhi/article_scraper
|
refs/heads/main
|
/article_scraper/constant/DB_CONFIG.py
|
import pymongo
review_client = pymongo.MongoClient("mongodb://localhost:27018/")
review_db = review_client["review_db"]
review_col = review_db["reviews"]
|
{"/article_scraper/spiders/gsm_arena.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/ndtv_gadget.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/verge.py": ["/article_scraper/constant/custom_search.py", "/article_scraper/constants.py"]}
|
29,534
|
utkarshparkhi/article_scraper
|
refs/heads/main
|
/article_scraper/middleware/ProxyMiddleware.py
|
import time
from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware
from stem import Signal
from stem.control import Controller
def new_tor_identity():
with Controller.from_port(port=9051) as controller:
controller.authenticate(password='articles')
controller.signal(Signal.NEWNYM)
class ProxyMiddleware(HttpProxyMiddleware):
def process_response(self, request, response, spider):
# Get a new identity depending on the response
if response.status != 200 and spider.name == "GSM":
new_tor_identity()
return request
elif response.status == 429:
time.sleep(60) # If the rate limit is renewed in a minute, put 60 seconds, and so on.
return response
return response
def process_request(self, request, spider):
# Set the Proxy
# A new identity for each request
# Comment out if you want to get a new Identity only through process_response
if spider.name == "GSM":
new_tor_identity()
request.meta['proxy'] = 'http://127.0.0.1:8118'
spider.log('Proxy : %s' % request.meta['proxy'])
|
{"/article_scraper/spiders/gsm_arena.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/ndtv_gadget.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/verge.py": ["/article_scraper/constant/custom_search.py", "/article_scraper/constants.py"]}
|
29,535
|
utkarshparkhi/article_scraper
|
refs/heads/main
|
/article_scraper/constants.py
|
PUB_DATE = "pub_date"
COMMENT_COUNT = "comments"
RATING = "rating"
DOMAIN = "domain"
|
{"/article_scraper/spiders/gsm_arena.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/ndtv_gadget.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/verge.py": ["/article_scraper/constant/custom_search.py", "/article_scraper/constants.py"]}
|
29,536
|
utkarshparkhi/article_scraper
|
refs/heads/main
|
/run_crawler.py
|
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
from article_scraper.spiders import ndtv_gadget, gsm_arena, CustomSearch
def get_process():
return CrawlerProcess(get_project_settings())
def scrape(process, product, queries):
process.crawl(gsm_arena.GSMArena, product=product, queries=queries)
# process.crawl(ndtv_gadget.NDTVGadgets, product=product, queries=queries)
# process.crawl(CustomSearch.CustomSearch, product=product, queries=queries)
def add_queries(products, qdict):
process = get_process()
for product in products:
scrape(process, product, list(qdict[product]))
process.start()
|
{"/article_scraper/spiders/gsm_arena.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/ndtv_gadget.py": ["/article_scraper/constants.py"], "/article_scraper/spiders/verge.py": ["/article_scraper/constant/custom_search.py", "/article_scraper/constants.py"]}
|
29,537
|
thoth-ky/oba-django
|
refs/heads/master
|
/businesses/urls.py
|
from django.urls import path
from datetime import datetime, timedelta
from businesses.views import BusinessList, BusinessDetails, BusinessTransactions
date_format = '%Y-%m-%d'
end = datetime.strftime(datetime.now(), date_format)
start = datetime.strftime(datetime.now() - timedelta(30), date_format)
urlpatterns = [
path('', BusinessList.as_view(), name='business'),
path('<int:pk>', BusinessDetails.as_view(), name='business_details'),
path('<int:pk>/dashboard', BusinessTransactions.as_view(), kwargs={'from': start, 'to': end }, name='business_transactions'),
]
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,538
|
thoth-ky/oba-django
|
refs/heads/master
|
/user/views.py
|
from rest_framework import generics, mixins
from rest_framework.authtoken.models import Token
from rest_framework.permissions import AllowAny, IsAdminUser
from rest_framework.views import APIView
from rest_framework.response import Response
from user.serializers import UserSerializer, AuthSerializer
from user.models import User
class UserList(generics.ListCreateAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
def get_permissions(self):
method = self.request.method
if method == 'GET':
self.permission_classes = [IsAdminUser,]
else:
self.permission_classes = [AllowAny,]
return super(UserList, self).get_permissions()
class AuthenticateUser(APIView):
serializer_class = AuthSerializer
def post(self, request, format=None):
serializer = AuthSerializer(data=request.data)
if serializer.is_valid():
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({
'username': user.username,
'email': user.email,
'token': token.key,
'user_id': user.id,
'created': created,
})
return Response({
'errors': serializer.errors,
})
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,539
|
thoth-ky/oba-django
|
refs/heads/master
|
/transactions/models.py
|
from django.db import models
from businesses.models import Business
TRANSACTIONS =(
('Order','Order'),
('Order payment','Order payment'),
('Bill','Bill'),
('Bill Payment','Bill Payment)'),
)
STATUSES = (
('Pending', 'Pending'),
('Accepted', 'Accepted'),
('Rejected', 'Rejected'),
('Completed', 'Completed'),
('Open', 'Open'),
('Closed', 'Closed'),
)
class Transaction(models.Model):
business = models.ForeignKey(Business, on_delete=models.CASCADE, related_name='transactions')
transaction_type = models.CharField(max_length=15, choices=TRANSACTIONS)
transaction_id = models.IntegerField()
transaction_status = models.CharField(max_length=10, choices=STATUSES)
transaction_date = models.DateField()
due_date= models.DateField(blank=True, null=True)
customer_or_supplier = models.CharField(max_length=50)
item = models.CharField(max_length=50)
quantity = models.IntegerField()
unit_amount = models.FloatField()
total_transaction_amount = models.FloatField()
def __str__(self):
return f'Transaction {self.transaction_id} ID: {self.id}'
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,540
|
thoth-ky/oba-django
|
refs/heads/master
|
/businesses/views.py
|
from django.shortcuts import get_object_or_404
from django.core.exceptions import PermissionDenied
from django.db.models import Sum
from rest_framework import generics, status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from businesses.models import Business
from businesses.serializers import BusinessSerializer
class BusinessList(generics.ListCreateAPIView):
serializer_class = BusinessSerializer
queryset = Business.objects.all()
permission_classes = [IsAuthenticated, ]
def get_queryset(self):
if self.request.user.is_superuser:
return Business.objects.all()
return Business.objects.filter(owner=self.request.user)
class BusinessDetails(generics.RetrieveUpdateDestroyAPIView):
serializer_class = BusinessSerializer
permission_classes =(IsAuthenticated,)
def get_object(self):
if self.request.user.is_superuser:
return get_object_or_404(Business, id=self.kwargs.get('pk'))
else:
return get_object_or_404(Business, id=self.kwargs.get('pk'), owner=self.request.user)
class BusinessTransactions(generics.RetrieveAPIView):
def get_permissions(self):
business = get_object_or_404(Business, id=self.kwargs.get('pk'))
if business not in self.request.user.businesses.all() and not self.request.user.is_superuser:
raise PermissionDenied
else:
self.permission_classes = [IsAuthenticated,]
return super(BusinessTransactions, self).get_permissions()
def get(self, *args, **kwargs):
# get business
business = get_object_or_404(Business, id=self.kwargs.get('pk'))
# get params
start_date = self.request.GET.get('from') or self.kwargs['from']
end_date = self.request.GET.get('to') or self.kwargs['to']
date_range = [start_date, end_date]
# get summaries
cash_flow = business.cash_flow(date_range)
top_five_by_quantity = business.top_five_items_by_quantity(date_range)
top_five_by_value = business.top_five_items_by_value(date_range)
return Response({
'cash_flow': cash_flow,
'top_five_by_quantity': top_five_by_quantity,
'top_five_by_value': top_five_by_value,
})
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,541
|
thoth-ky/oba-django
|
refs/heads/master
|
/transactions/admin.py
|
from django.contrib import admin
from transactions.models import Transaction
# Register your models here.
class TransactionAdmin(admin.ModelAdmin):
list_display = ('transaction_id','business', 'item', 'transaction_type', 'transaction_status', 'transaction_date', 'quantity', 'total_transaction_amount')
list_filter = ('business','transaction_status', 'transaction_type', 'item', 'transaction_date')
admin.site.register(Transaction, TransactionAdmin)
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,542
|
thoth-ky/oba-django
|
refs/heads/master
|
/transactions/serializers.py
|
import csv, io
from datetime import datetime
from django.core.exceptions import ValidationError
from rest_framework import serializers
from businesses.serializers import BusinessSerializer
from businesses.models import Business
from transactions.validators import (
validate_file_extension,
validate_required_fields,
validate_dates,
CSV_HEADERS)
from transactions.models import Transaction
class TransactionSerializer(serializers.ModelSerializer):
business = BusinessSerializer(required=False)
class Meta:
model = Transaction
fields = '__all__'
def create(self, validated_data):
return Transaction.objects.create(business_id=self.context['business_id'], **validated_data)
class FileSerializer(serializers.Serializer):
csv_file = serializers.FileField(
help_text='CSV file containing transaction details',
validators=[validate_file_extension,]
)
def validate(self, data):
transactions = csv.reader(io.StringIO(data['csv_file'].read().decode('utf-8')))
headers = next(transactions)
if headers != CSV_HEADERS:
raise ValidationError(f'Headers do not match expected. Order matters {CSV_HEADERS}')
csv_data = []
# business_id = self.context['business_id']
DATE_INPUT_FORMAT = '%m/%d/%Y'
def convert_date(cell):
if cell in (None, ""):
return ""
return datetime.strptime(cell, DATE_INPUT_FORMAT).strftime('%Y-%m-%d')
for row in transactions:
validate_required_fields(row)
validate_dates(row)
csv_data.append({
'transaction_type': row[0],
'transaction_id': row[1],
'transaction_status': row[2],
'transaction_date': convert_date(row[3]),
'due_date': convert_date(row[4]),
'customer_or_supplier': row[5],
'item': row[6],
'quantity': row[7],
'unit_amount': row[8],
'total_transaction_amount': row[9],
})
validated_data = {'csv_data': csv_data}
return validated_data
def create(self, validated_data):
trans_serializer = TransactionSerializer(data=validated_data['csv_data'], many=True, context=self.context)
trans_serializer.is_valid(raise_exception=True)
return trans_serializer.save()
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,543
|
thoth-ky/oba-django
|
refs/heads/master
|
/businesses/admin.py
|
from django.contrib import admin
from businesses.models import Business
class BusinessAdmin(admin.ModelAdmin):
list_display = ('business_abbreviation', 'name', 'owner', 'entity')
list_filter = ('entity', 'annual_sales_revenue', 'owner')
admin.site.register(Business, BusinessAdmin)
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,544
|
thoth-ky/oba-django
|
refs/heads/master
|
/transactions/views.py
|
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from rest_framework import generics
from rest_framework.status import HTTP_201_CREATED
from rest_framework.response import Response
from rest_framework.parsers import MultiPartParser
from rest_framework.views import APIView
from rest_framework.permissions import IsAuthenticated
from businesses.models import Business
from transactions.serializers import FileSerializer, TransactionSerializer
class FileUploadView(generics.GenericAPIView):
serializer_class = FileSerializer
parser_classes = (MultiPartParser,)
def get_permissions(self):
business = get_object_or_404(Business, id=self.kwargs.get('business_id'))
if business not in self.request.user.businesses.all() and not self.request.user.is_superuser:
raise PermissionDenied
else:
self.permission_classes = [IsAuthenticated,]
return super(FileUploadView, self).get_permissions()
def post(self, *args, **kwargs):
context = {
'request': self.request,
'business_id': self.kwargs.get('business_id')
}
serializer = FileSerializer(data=self.request.data, context=context)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response({
'message': 'CSV uploaded and saved into database successfully'
}, status=HTTP_201_CREATED)
class TransactionsList(generics.ListAPIView):
serializer_class = TransactionSerializer
permission_classes = (IsAuthenticated,)
def get_queryset(self):
business = get_object_or_404(Business, id=self.kwargs.get('business_id'))
return business.transactions.all()
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,545
|
thoth-ky/oba-django
|
refs/heads/master
|
/transactions/urls.py
|
from django.urls import path
from transactions.views import FileUploadView, TransactionsList
urlpatterns = [
path('business/<int:business_id>/csv_upload', FileUploadView.as_view(), name='file_upload'),
path('business/<int:business_id>/transactions', TransactionsList.as_view(), name='business_transactions'),
]
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,546
|
thoth-ky/oba-django
|
refs/heads/master
|
/businesses/migrations/0001_initial.py
|
# Generated by Django 3.0.7 on 2020-06-16 08:36
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_countries.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Business',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Name of business', max_length=25)),
('business_abbreviation', models.CharField(help_text='Business name abbreviation', max_length=5)),
('company_address', models.CharField(help_text='Address of business', max_length=50)),
('country', django_countries.fields.CountryField(default='KE', max_length=2)),
('annual_sales_revenue', models.IntegerField(choices=[(1, 'Below KeS 50,000'), (2, 'KeS 50,000 - KeS 150,000'), (3, 'KeS 150,000 - KeS 300,000'), (4, 'KeS 300,000 - KeS 500, 000'), (5, 'Above KeS 500,000')])),
('entity', models.CharField(choices=[('R', 'Retailer'), ('S', 'Supplier')], max_length=1)),
('accounting_software', models.CharField(choices=[('QB', 'Quickbooks'), ('EX', 'Excel SpreadSheets')], max_length=2)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='businesses', to=settings.AUTH_USER_MODEL)),
],
),
]
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,547
|
thoth-ky/oba-django
|
refs/heads/master
|
/transactions/validators.py
|
import os, csv, io
from datetime import datetime
from django.core.exceptions import ValidationError
CSV_HEADERS = [
'Transaction',
'ID',
'Status',
'Transaction Date',
'Due Date',
'Customer or Supplier',
'Item',
'Quantity',
'Unit Amount',
'Total Transaction Amount',
]
def validate_file_extension(value):
ext = os.path.splitext(value.name)[1]
if ext.lower() != '.csv':
msg = f'Unsupported file extension: {ext}. Only CSV files supported'
raise ValidationError(msg)
def validate_dates(row):
try:
datetime.strptime(row[3], '%m/%d/%Y')
if row[4] not in (None, ""):
datetime.strptime(row[4], '%m/%d/%Y')
except:
raise ValidationError('Ensure all dates are in the format "MM/DD/YYYY"')
def validate_required_fields(row):
if (row[0] or row[1] or row[3] or row[5] or row[6] or row[7] or row[8] or row[9]) in (None, ""):
raise ValidationError("Some rows are missing required fields")
if (row[0] in ("Bill", "Order")) and (row[2] in (None, "")):
raise ValidationError("Bill or Order transactions require a status")
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,548
|
thoth-ky/oba-django
|
refs/heads/master
|
/businesses/serializers.py
|
from rest_framework import serializers
from businesses.models import Business
from user.serializers import UserSerializer
class BusinessSerializer(serializers.ModelSerializer):
owner = UserSerializer(read_only=True)
class Meta:
model = Business
fields = '__all__'
depth=1
def create(self, validated_data):
owner = self.context['request'].user
return Business.objects.create(owner=owner, **validated_data)
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,549
|
thoth-ky/oba-django
|
refs/heads/master
|
/transactions/migrations/0001_initial.py
|
# Generated by Django 3.0.7 on 2020-06-16 09:16
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('businesses', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('transaction_type', models.CharField(choices=[('O', 'Orders,'), ('OP', 'Order payments'), ('B', 'Bills'), ('BP', 'Bills Payment)')], max_length=2)),
('transaction_id', models.IntegerField()),
('transaction_status', models.IntegerField(choices=[(1, 'Pending'), (2, 'Accepted'), (3, 'Rejected'), (4, 'Completed'), (5, 'Open'), (6, 'Closed')])),
('transaction_date', models.DateField()),
('due_date', models.DateField(blank=True, null=True)),
('customer_or_supplier', models.CharField(max_length=50)),
('item', models.CharField(max_length=50)),
('quantity', models.IntegerField()),
('unit_amount', models.FloatField()),
('total_transaction', models.FloatField()),
('amount', models.FloatField()),
('business', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='transactions', to='businesses.Business')),
],
),
]
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,550
|
thoth-ky/oba-django
|
refs/heads/master
|
/transactions/migrations/0003_auto_20200616_1344.py
|
# Generated by Django 3.0.7 on 2020-06-16 13:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('transactions', '0002_auto_20200616_1250'),
]
operations = [
migrations.AlterField(
model_name='transaction',
name='transaction_status',
field=models.CharField(choices=[('Pending', 'Pending'), ('Accepted', 'Accepted'), ('Rejected', 'Rejected'), ('Completed', 'Completed'), ('Open', 'Open'), ('Closed', 'Closed')], max_length=10),
),
migrations.AlterField(
model_name='transaction',
name='transaction_type',
field=models.CharField(choices=[('Orders', 'Orders'), ('Order payment', 'Order payments'), ('Bill', 'Bills'), ('Bills Payment', 'Bills Payment)')], max_length=15),
),
]
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,551
|
thoth-ky/oba-django
|
refs/heads/master
|
/transactions/migrations/0004_auto_20200616_1348.py
|
# Generated by Django 3.0.7 on 2020-06-16 13:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('transactions', '0003_auto_20200616_1344'),
]
operations = [
migrations.AlterField(
model_name='transaction',
name='transaction_type',
field=models.CharField(choices=[('Order', 'Order'), ('Order payment', 'Order payment'), ('Bill', 'Bill'), ('Bill Payment', 'Bill Payment)')], max_length=15),
),
]
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,552
|
thoth-ky/oba-django
|
refs/heads/master
|
/user/urls.py
|
from django.urls import path
from user.views import AuthenticateUser, UserList
urlpatterns = [
path('', UserList.as_view(), name='users'),
path('login', AuthenticateUser.as_view(), name='login'),
]
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,553
|
thoth-ky/oba-django
|
refs/heads/master
|
/transactions/migrations/0005_auto_20200617_2343.py
|
# Generated by Django 3.0.7 on 2020-06-17 23:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('businesses', '0002_auto_20200617_2343'),
('transactions', '0004_auto_20200616_1348'),
]
operations = [
migrations.AlterField(
model_name='transaction',
name='business',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='transactions', to='businesses.Business'),
),
]
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,554
|
thoth-ky/oba-django
|
refs/heads/master
|
/businesses/models.py
|
from django.db import models
from django.db.models import Sum
from django.db.models.functions import Coalesce
from django_countries.fields import CountryField
from user.models import User
REVENUE_RANGE = (
(1, 'Below KeS 50,000'),
(2, 'KeS 50,000 - KeS 150,000'),
(3, 'KeS 150,000 - KeS 300,000'),
(4, 'KeS 300,000 - KeS 500, 000'),
(5, 'Above KeS 500,000')
)
ENTITY_CHOICES = (('R', 'Retailer'), ('S', 'Supplier'))
ACCOUNTS_SOFTWARE = (('QB', 'Quickbooks'), ('EX', 'Excel SpreadSheets'))
class Business(models.Model):
owner = models.ForeignKey(User, related_name='businesses', on_delete=models.CASCADE)
name = models.CharField(max_length=25, help_text='Name of business')
business_abbreviation = models.CharField(
max_length=5, help_text='Business name abbreviation')
company_address = models.CharField(max_length=50, help_text='Address of business')
country = CountryField(default='KE')
annual_sales_revenue = models.IntegerField(choices=REVENUE_RANGE)
entity = models.CharField(max_length=1, choices=ENTITY_CHOICES)
accounting_software = models.CharField(max_length=2, choices=ACCOUNTS_SOFTWARE)
def __str__(self):
return 'Business {}'.format(self.business_abbreviation)
def aggregate_values_by_type(self, date_range, type, aggregate_over):
return self.transactions.filter(
transaction_date__range=date_range, transaction_type=type
).aggregate(total=Coalesce(Sum(aggregate_over),0))
def cash_flow(self, date_range):
total_orders = self.aggregate_values_by_type(
date_range, 'Order', 'total_transaction_amount')
total_order_payments = self.aggregate_values_by_type(
date_range, 'Order Payment', 'total_transaction_amount')
total_bills = self.aggregate_values_by_type(
date_range, 'Bill', 'total_transaction_amount')
total_bill_payments = self.aggregate_values_by_type(
date_range, 'Bill Payment', 'total_transaction_amount')
return {
'amount_in': total_orders['total'] - total_order_payments['total'],
'bills_due': total_bills['total']- total_bill_payments['total']
}
def top_five_items_by_quantity(self, date_range):
items_ordered_by_quantity = self.transactions.filter(
transaction_type='Order', transaction_date__range=date_range
).values('item', 'transaction_type').annotate(
total=Sum('quantity')).order_by('-total')
items_billed_by_quantity = self.transactions.filter(
transaction_type='Bill', transaction_date__range=date_range
).values('item', 'transaction_type').annotate(
total=Sum('quantity')).order_by('-total')
return {
'items_ordered_by_quantity': items_ordered_by_quantity[:5],
'items_billed_by_quantity': items_billed_by_quantity[:5],
}
def top_five_items_by_value(self, date_range):
items_ordered_by_value = self.transactions.filter(
transaction_type='Order', transaction_date__range=date_range
).values('item', 'transaction_type').annotate(
total=Sum('total_transaction_amount')).order_by('-total')
items_billed_by_value = self.transactions.filter(
transaction_type='Bill', transaction_date__range=date_range
).values('item', 'transaction_type').annotate(
total=Sum('total_transaction_amount')).order_by('-total')
return {
'items_ordered_by_value': items_ordered_by_value[:5],
'items_billed_by_value': items_billed_by_value[:5],
}
|
{"/businesses/urls.py": ["/businesses/views.py"], "/transactions/models.py": ["/businesses/models.py"], "/businesses/views.py": ["/businesses/models.py", "/businesses/serializers.py"], "/transactions/admin.py": ["/transactions/models.py"], "/transactions/serializers.py": ["/businesses/serializers.py", "/businesses/models.py", "/transactions/validators.py", "/transactions/models.py"], "/businesses/admin.py": ["/businesses/models.py"], "/transactions/views.py": ["/businesses/models.py", "/transactions/serializers.py"], "/transactions/urls.py": ["/transactions/views.py"], "/businesses/serializers.py": ["/businesses/models.py"], "/user/urls.py": ["/user/views.py"]}
|
29,577
|
IDEES-Rouen/Geocache-Scrapping
|
refs/heads/master
|
/geoscrap_project/spiders/GeocachingExtractorSpider.py
|
import scrapy
from scrapy.linkextractors.lxmlhtml import LxmlLinkExtractor
from scrapy.spiders import Rule
from geoscrap_project.items.Items import *
from bs4 import BeautifulSoup
import cfscrape
import json
import jmespath
import pendulum
import random
from urllib.parse import urlparse
from urllib import parse
from pyproj import Proj
import re
# TODO : récupération de l'inventaire des travel bugs sur la droite
class GeocachingExtractorSpider(scrapy.Spider):
name = "GeocachingExtractorSpider"
start_urls = ['http://www.geocaching.com/account/signin']
custom_settings = {
'CONCURRENT_REQUESTS': '1',
'DOWNLOAD_DELAY': '2',
'COOKIES_ENABLED': True,
'ITEM_PIPELINES': {
'geoscrap_project.pipelines.FullInfoJsonPipeline': 200,
},
'HTTPERROR_ALLOWED_CODES': [301,302,404],
'HTTPPROXY_ENABLED': False,
'REDIRECT_ENABLED': True
}
def __init__(self, urls):
super(GeocachingExtractorSpider, self).__init__()
print(urls)
self.urls = urls
def parse(self, response):
meta = response.meta
self.logger.debug('Parse function called on %s', response.url)
# https://stackoverflow.com/questions/34076989/python-scrapy-login-authentication-spider-issue
token = response.css('input[name=__RequestVerificationToken]::attr(value)').extract()[0]
return scrapy.FormRequest.from_response(
response,
meta=meta,
formxpath="//form[@action='/account/signin']",
formdata={'__RequestVerificationToken':token,'UsernameOrEmail': 'xxx', 'Password': 'xxx'},
callback=self.after_login
)
def after_login(self, response):
meta = response.meta
for url in self.urls:
yield scrapy.Request(url=url,
meta=meta,
callback=self.parse_cacheInfo,
dont_filter=True)
def parse_cacheInfo(self, response):
cache = GeoCacheItem()
cache["content"] = response.xpath('//div[@class="UserSuppliedContent"]').extract()
cache["code"] = response.xpath('//span[@class="CoordInfoCode"]/text()').extract_first()
cache["location"] = response.xpath('//span[@id="uxLatLon"]/text()').extract_first()
cache["nom"] = response.xpath('//span[@id="ctl00_ContentBody_CacheName"]/text()').extract_first()
cache["searchLocation"] = response.xpath('//span[@id="ctl00_ContentBody_Location"]/text()').extract_first()
cache["auteur"] = response.xpath('//div[@id="ctl00_ContentBody_mcd1"]/a/text()').extract_first()
auteurUID = response.xpath('//div[@id="ctl00_ContentBody_mcd1"]/a/@href').extract_first()
UTMLocation = response.xpath('//span[@id="ctl00_ContentBody_LocationSubPanel"]/text()').extract_first()
UTMLocation = " ".join(UTMLocation.split())
UTMsplitted = UTMLocation.split(" ")
zone = UTMsplitted[1][:-1]
UTMx =UTMsplitted[3]
UTMy = UTMsplitted[5]
myProj = Proj("+proj=utm +zone=" + \
zone + ", +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs")
Lon, Lat = myProj(UTMx, UTMy, inverse=True)
print("**********")
print("ZONE = ",zone )
print("UTMx = ",Lon )
print("UTMy = ",Lat )
print("**********")
cache["locationWGSLon"] = Lon
cache["locationWGSLat"] = Lat
p = urlparse(auteurUID)
cache["auteurUID"] = parse.parse_qs(p.query)['guid'][0]
cache["type"] = response.xpath('//div[@id="cacheDetails"]/p/a/img/@title').extract_first()
date = response.xpath('//div[@id="ctl00_ContentBody_mcd2"]/text()').extract_first()
cache["cachedate"] = re.search("([0-9]{1,2}\/[0-9]{1,2}\/[0-9]{4})", date).group(0)
difficulte = response.xpath('//span[@id="ctl00_ContentBody_uxLegendScale"]/img/@alt').extract_first()
cache["difficulte"] = re.search(r'\b\d+([\.,]\d+)?',difficulte).group(0)
terrain = response.xpath('//span[@id="ctl00_ContentBody_Localize12"]/img/@alt').extract_first()
cache["terrain"] = re.search(r'\b\d+([\.,]\d+)?',terrain).group(0)
taille = response.xpath('//span[@class="minorCacheDetails"]/img/@alt').extract_first()
cache["taille"] = taille.split(" ")[1]
cache["urlGallerie"] = response.xpath('//*[contains(concat(" ", normalize-space(@class), " "), "CacheDetailNavigation NoPrint")]/ul/li/a/@href').extract_first()
cacheAttributesList = response.xpath('//div[@class="WidgetBody"]/img/@src').extract()
cacheAttribute = []
for attributes in cacheAttributesList:
p = urlparse(attributes)
codeAttribute = p.path.split("/")[3].split(".")[0]
cacheAttribute.append(codeAttribute)
cache["cacheAttributs"] = cacheAttribute
logsAttributesList = response.xpath('//span[@id="ctl00_ContentBody_lblFindCounts"]/p')
logsList = []
data = logsAttributesList.extract_first()
logsNumber = re.sub("(</?p[^>]*>|<img.*?>)", "", str(data), 0, re.IGNORECASE | re.DOTALL | re.MULTILINE | re.UNICODE)
logsNumber = " ".join(logsNumber.split()).split(" ")
attributes = logsAttributesList.xpath("./img/@alt")
for number,attribute in zip(logsNumber,attributes):
logsList.append({attribute.extract().replace(" ","_"):number})
cache["logsAttributs"] = logsList
numberOfLogs = response.xpath('//*[contains(concat(" ", normalize-space(@class), " "), "InformationWidget Clear")]/h3').extract()
cache["logsNombre"] = re.findall(r'\b\d+\b',str(numberOfLogs))[0]
yield cache
|
{"/geoscrap_project/spiders/GeocachingExtractorSpider.py": ["/geoscrap_project/items/Items.py"], "/geoscrap_project/spiders/GeocachingSpider.py": ["/geoscrap_project/items/Items.py"]}
|
29,578
|
IDEES-Rouen/Geocache-Scrapping
|
refs/heads/master
|
/main.py
|
from twisted.internet import reactor, defer
from scrapy.crawler import CrawlerRunner
from scrapy.utils.project import get_project_settings
import logging
from scrapy.utils.log import configure_logging
from geoscrap_project.spiders import GeocachingSpider
from geoscrap_project.spiders import GeocachingExtractorSpider
import pandas as pd
import pendulum
from pathlib import Path
from twisted.internet import task
configure_logging(install_root_handler=True)
logging.basicConfig(filename='geocache.log',level=logging.DEBUG)
logger = logging.getLogger()
def get_timestamp():
aDate = pendulum.today()
return aDate.timestamp()
def getUrl():
p = Path('.').resolve()
name = 'geocaches'+str(get_timestamp())+'.json'
geocacheFile = p / 'data' / name
def resolve_problem_panda(jsonFile):
with jsonFile.open() as json_file:
json_list = list(json_file)
import json
for json_str in json_list:
result = json.loads(json_str)
return json.dumps(result)
result = resolve_problem_panda(geocacheFile)
json = pd.read_json(result, orient='index')
logger.debug(json)
df = list(json['url'])
return df
def crawl(reactor):
runner = CrawlerRunner()
d = runner.crawl(GeocachingSpider.GeocachingSpider)
d.addCallback(getResult)
d.addCallback(crawl2, runner)
return d
def getResult(result):
return getUrl()
def crawl2( result, runner):
return runner.crawl(GeocachingExtractorSpider.GeocachingExtractorSpider, urls = result)
task.react(crawl)
|
{"/geoscrap_project/spiders/GeocachingExtractorSpider.py": ["/geoscrap_project/items/Items.py"], "/geoscrap_project/spiders/GeocachingSpider.py": ["/geoscrap_project/items/Items.py"]}
|
29,579
|
IDEES-Rouen/Geocache-Scrapping
|
refs/heads/master
|
/geoscrap_project/parse_yield.py
|
airport_list = ["airport1", "airport2", "airport3", "airport4"]
def parse_page_departure(airport, next_url, page_urls):
print(airport, " / ", next_url)
if not page_urls:
return
next_url = page_urls.pop()
yield from parse_page_departure(airport, next_url, page_urls)
###################################
# PARSE EACH AIRPORT OF COUNTRY
###################################
def parse_schedule(next_airport, airport_list):
## GET EACH DEPARTURE PAGE
departures_list = ["p1", "p2", "p3", "p4"]
next_departure_url = departures_list.pop()
yield parse_page_departure(next_airport,next_departure_url, departures_list)
if not airport_list:
print("no new airport")
return
next_airport_url = airport_list.pop()
yield from parse_schedule(next_airport_url, airport_list)
next_airport_url = airport_list.pop()
result = parse_schedule(next_airport_url, airport_list)
for i in result:
print(i)
for d in i:
print(d)
|
{"/geoscrap_project/spiders/GeocachingExtractorSpider.py": ["/geoscrap_project/items/Items.py"], "/geoscrap_project/spiders/GeocachingSpider.py": ["/geoscrap_project/items/Items.py"]}
|
29,580
|
IDEES-Rouen/Geocache-Scrapping
|
refs/heads/master
|
/dataUtils.py
|
import pandas as pd
from pathlib import Path
import psycopg2
from psycopg2 import sql
#conn = psycopg2.connect("dbname=reyman64_trenum user=reyman64_trenum")
p = Path('.').resolve()
geocacheFile = p / 'data' / 'fullGeochache.json'
pd.set_option('display.width', 4000)
pd.set_option('display.max_columns', 50)
pd.set_option('display.max_row', 1000)
df = pd.read_json(geocacheFile.as_uri(), orient='index', lines=True)
dfT = df.transpose()
# https://stackoverflow.com/questions/38895856/python-pandas-how-to-compile-all-lists-in-a-column-into-one-unique-list
uniqueAttributes = (list(set([a for b in dfT.cacheAttributs.tolist() for a in b])))
dfTExtracted = dfT[['code','logsAttributs']]
print(dfT)
#def generate_input_attributes(row):
# print(row['code'])
# query = sql.SQL("insert into {} values (%s, %s)").format(sql.Identifier('my_table'))
# print(query.as_string(conn))
#dfTExtracted.apply(generate_input_attributes , axis=1)
#df = df.apply(pd.Series, index=df[0].keys())
#df1 = pd.DataFrame(df, columns=['code','logsAttributs'])
#print(df1)
|
{"/geoscrap_project/spiders/GeocachingExtractorSpider.py": ["/geoscrap_project/items/Items.py"], "/geoscrap_project/spiders/GeocachingSpider.py": ["/geoscrap_project/items/Items.py"]}
|
29,581
|
IDEES-Rouen/Geocache-Scrapping
|
refs/heads/master
|
/geoscrap_project/pipelines.py
|
# -*- coding: utf-8 -*-
import pymongo
from scrapy.exceptions import DropItem
from scrapy.exporters import JsonLinesItemExporter
import os
from pathlib import Path
import logging
import pendulum
def get_timestamp():
aDate = pendulum.today()
return aDate.timestamp()
class FullInfoJsonPipeline(object):
def __init__(self):
print("FullInfoJsonPipeline")
name = 'fullgeocaches'+str(get_timestamp())+'.json'
p = Path('.') / 'data' / name
self.file = p.open('wb')
self.exporter = JsonLinesItemExporter(self.file, encoding='utf-8', ensure_ascii=False)
self.exporter.start_exporting()
def close_spider(self, spider):
self.exporter.finish_exporting()
self.file.close()
def process_item(self, item, spider):
self.exporter.export_item(item)
return item
class JsonPipeline(object):
logger = logging.getLogger()
def __init__(self):
print("JsonPipeline")
name = 'geocaches'+str(get_timestamp())+'.json'
p = Path('.') / 'data' / name
self.file = p.open('wb')
self.exporter = JsonLinesItemExporter(self.file, encoding='utf-8', ensure_ascii=False)
self.exporter.start_exporting()
def close_spider(self, spider):
self.exporter.finish_exporting()
self.file.close()
def process_item(self, item, spider):
self.logger.debug(" ** PROCESS ** ")
if len(item) ==0:
self.logger.debug("EMPTY")
raise DropItem()
else:
self.logger.debug("NOT EMPTY")
self.exporter.export_item(item)
return item
|
{"/geoscrap_project/spiders/GeocachingExtractorSpider.py": ["/geoscrap_project/items/Items.py"], "/geoscrap_project/spiders/GeocachingSpider.py": ["/geoscrap_project/items/Items.py"]}
|
29,582
|
IDEES-Rouen/Geocache-Scrapping
|
refs/heads/master
|
/geoscrap_project/items/Items.py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class GeoCacheItem(scrapy.Item):
nom = scrapy.Field()
content = scrapy.Field()
location = scrapy.Field()
locationWGSLon = scrapy.Field()
locationWGSLat = scrapy.Field()
searchLocation = scrapy.Field()
auteur = scrapy.Field()
auteurUID = scrapy.Field()
code = scrapy.Field()
type = scrapy.Field()
cachedate = scrapy.Field()
difficulte = scrapy.Field()
terrain = scrapy.Field()
taille = scrapy.Field()
urlGallerie = scrapy.Field()
cacheAttributs = scrapy.Field()
logsAttributs = scrapy.Field()
logsNombre = scrapy.Field()
|
{"/geoscrap_project/spiders/GeocachingExtractorSpider.py": ["/geoscrap_project/items/Items.py"], "/geoscrap_project/spiders/GeocachingSpider.py": ["/geoscrap_project/items/Items.py"]}
|
29,583
|
IDEES-Rouen/Geocache-Scrapping
|
refs/heads/master
|
/geoscrap_project/spiders/GeocachingSpider.py
|
import scrapy
from scrapy.linkextractors.lxmlhtml import LxmlLinkExtractor
from scrapy.spiders import Rule
from geoscrap_project.items.Items import *
from bs4 import BeautifulSoup
import cfscrape
import json
import jmespath
import pendulum
from pathlib import Path
import random
from urllib.parse import urlparse
class GeocachingSpider(scrapy.Spider):
name = "GeocachingSpider"
start_urls = ['https://www.geocaching.com/account/signin']
custom_settings = {
'CONCURRENT_REQUESTS': '1',
'DOWNLOAD_DELAY': '2',
'COOKIES_ENABLED': True,
'ITEM_PIPELINES': {
'geoscrap_project.pipelines.JsonPipeline': 200,
},
'HTTPERROR_ALLOWED_CODES': [301,302,404],
'HTTPPROXY_ENABLED': False,
'REDIRECT_ENABLED': True
}
allowed_domains = ['geocaching.com']
def parse(self, response):
meta = response.meta
self.logger.debug('Parse function called on %s', response.url)
# https://stackoverflow.com/questions/34076989/python-scrapy-login-authentication-spider-issue
token = response.css('input[name=__RequestVerificationToken]::attr(value)').extract()[0]
return scrapy.FormRequest.from_response(
response,
meta = meta,
formxpath="//form[@action='/account/signin']",
formdata={'__RequestVerificationToken':token,'UsernameOrEmail': 'xxx', 'Password': 'xxx'},
callback=self.after_login
)
def after_login(self, response):
print(response)
meta = response.meta
# go to nearest page
return scrapy.Request(url="https://www.geocaching.com/seek/nearest.aspx",
meta=meta,
callback=self.parse_cacheSearch,
dont_filter=True)
## SIMULATE THE THREE STEP TO POPULATE THE FORM : search type, country, state
## NEEDED TO POPULATE ASP __VIEWSTATE hidden value
## STEP 1 : SEARCH TYPE = SC
def parse_cacheSearch(self,response):
print("TYPE OF SEARCH")
return scrapy.FormRequest.from_response(
response,
#meta={'proxy': 'http://localhost:8888'},
formxpath="//form[@id='aspnetForm']",
formdata={
'ctl00$ContentBody$uxTaxonomies':'9a79e6ce-3344-409c-bbe9-496530baf758',
'ctl00$ContentBody$LocationPanel1$ddSearchType':'SC'},
callback=self.parse_cacheCountry
)
## STEP 2 : SELECT COUNTRY
def parse_cacheCountry(self, response):
print("COUNTRY SELECT")
return scrapy.FormRequest.from_response(
response,
#meta={'proxy': 'http://localhost:8888'},
formxpath="//form[@id='aspnetForm']",
formdata={
'ctl00$ContentBody$uxTaxonomies': '9a79e6ce-3344-409c-bbe9-496530baf758',
'ctl00$ContentBody$LocationPanel1$ddSearchType': 'SC',
'ctl00$ContentBody$LocationPanel1$CountryStateSelector1$selectCountry': '73'},
callback=self.parse_cacheState
)
## STEP 3 : SELECT STATE AND SENT FINAL QUERY by submit
## 421 haute normandie
## 414 basse normandie
def parse_cacheState(self, response):
print ("SELECT STATE NORMANDY")
return scrapy.FormRequest.from_response(
response,
#meta={'proxy': 'http://localhost:8888'},
formxpath="//form[@id='aspnetForm']",
formdata={
'ctl00$ContentBody$uxTaxonomies': '9a79e6ce-3344-409c-bbe9-496530baf758',
'ctl00$ContentBody$LocationPanel1$ddSearchType': 'SC',
'ctl00$ContentBody$LocationPanel1$CountryStateSelector1$selectCountry': '73',
'ctl00$ContentBody$LocationPanel1$CountryStateSelector1$selectState': '487',
'ctl00$ContentBody$LocationPanel1$btnLocale': 'Recherche+de+géocaches'},
callback=self.parse_pages
)
def display_hidden_tag(self,response):
soup = BeautifulSoup(response.body)
hidden_tags = soup.find_all("input", type="hidden")
for tag in hidden_tags:
print(tag)
def parse_cachesList(self, response):
# print("PAGE >> ", response.meta['page'] ," <<<<<<<<<<<<<")
# self.display_hidden_tag(response)
# Update Meta
geocaches = {}
#response.meta['viewstate'] = self.get_viewstate(response)
tdList = response.xpath('(//td[@class="Merge"][2])')
for td in tdList:
geocache={}
link = td.xpath('a//@href')
name = td.xpath('a/span/text()')
print("links = ", link.extract())
# print("name = ", name.extract())
geocache["url"] = link.extract_first()
geocache["name"] = name.extract_first()
p = urlparse(geocache["url"])
code = p.path.split("/")[2].split("_")[0]
if "page" not in response.meta.keys():
print("PAGE NOT IN RESPONSE")
geocache["page"] = 1
else:
print("PAGE IN RESPONSE")
geocache["page"] = response.meta['page'][0]
geocaches[code] = geocache
return geocaches
def get_viewstate(self,response):
state = response.xpath('//input[@id="__VIEWSTATE"]/@value').extract()
state1 = response.xpath('//input[@id="__VIEWSTATE1"]/@value').extract()
print('xxx STATE = ', state)
print('xxx STATE1 = ', state1)
return [state, state1]
def parse_pages(self,response):
print("META KEY = ", response.meta.keys())
viewstate = self.get_viewstate(response)
geocaches = self.parse_cachesList(response)
if 'page' not in response.meta.keys():
infoPage = response.xpath('//td[@class="PageBuilderWidget"]/span/b[3]//text()')
print("PAGE NOT IN RESPONSE META KEY")
numberOfPage = int(infoPage.extract_first())
response.meta['page'] = [1, 3]#numberOfPage
yield scrapy.FormRequest.from_response(
response,
#meta={'proxy': 'http://localhost:8888', 'page': response.meta['page'], 'geocaches': geocaches},
meta={ 'page': response.meta['page']},
formname="aspnetForm",
formxpath="//form[@id='aspnetForm']",
formdata={'recaptcha_challenge_field': None,
'recaptcha_response_field': None,
'ctl00$ContentBody$chkHighlightBeginnerCaches': None,
'ctl00$ContentBody$chkAll': None,
'__EVENTTARGET': None,
'__EVENTARGUMENT': None},
dont_click=True,
callback=self.parse_pages,
dont_filter=True
)
else:
if response.meta['page'][0] > response.meta['page'][1]:
return
print("NEXT Page : ", response.meta['page'])
response.meta['page'][0] += 1
if (response.meta['page'][0] - 1) % 10 == 0:
yield scrapy.FormRequest.from_response(
response,
meta={ 'page': response.meta['page']},
#meta={'proxy': 'http://localhost:8888', 'page': response.meta['page'], 'geocaches': geocaches},
formname="aspnetForm",
# meta={'page': page},
formxpath="//form[@id='aspnetForm']",
formdata={'recaptcha_challenge_field': None,
'recaptcha_response_field': None,
'ctl00$ContentBody$chkHighlightBeginnerCaches': None,
'ctl00$ContentBody$chkAll': None,
'__EVENTTARGET': 'ctl00$ContentBody$pgrBottom$ctl06', },
dont_click=True,
callback=self.parse_pages,
dont_filter=True
#priority=(21 - response.meta['page'][0])
)
else:
print("ctl00$ContentBody$pgrTop$lbGoToPage_"+ str(response.meta['page'][0]))
yield scrapy.FormRequest.from_response(
response,
meta={'page': response.meta['page']},
#meta={'proxy': 'http://localhost:8888', 'page': response.meta['page'], 'geocaches':geocaches},
formname="aspnetForm",
# meta={'page': page},
formxpath="//form[@id='aspnetForm']",
formdata={'recaptcha_challenge_field': None,
'recaptcha_response_field': None,
'ctl00$ContentBody$chkHighlightBeginnerCaches': None,
'ctl00$ContentBody$chkAll': None,
'__EVENTTARGET': 'ctl00$ContentBody$pgrTop$lbGoToPage_' + str(response.meta['page'][0]), },
dont_click=True,
callback=self.parse_pages,
dont_filter=True
#priority=(21 - response.meta['page'][0])
)
print("GEOCACHES = ", geocaches)
yield geocaches
#print ("RUN > ", 'ctl00$ContentBody$pgrTop$lbGoToPage_'+str(response.meta['page'][0]))
#yield result
def __init__(self, aDate = pendulum.today()):
super(GeocachingSpider, self).__init__()
self.aDate = aDate
self.timestamp = self.aDate.timestamp()
print("PENDULUM UTC TODAY ", self.aDate.isoformat())
print("PENDULUM TO TIMESTAMP ", self.timestamp)
|
{"/geoscrap_project/spiders/GeocachingExtractorSpider.py": ["/geoscrap_project/items/Items.py"], "/geoscrap_project/spiders/GeocachingSpider.py": ["/geoscrap_project/items/Items.py"]}
|
29,586
|
bakasui/python_final
|
refs/heads/master
|
/app.py
|
from flask import Flask,render_template,request
import pandas as pd
import os
import plotly.graph_objs as go
from plotly.offline import init_notebook_mode, iplot, plot
from pyecharts.faker import Faker
from pyecharts import options as opts
from pyecharts.charts import Bar
from pyecharts.charts import Pie
from collections import Counter
import minding
mapbox_access_token = "pk.eyJ1IjoiY3VwYmVpIiwiYSI6ImNrNG1iMDJrZDI4NngzZXF3MHY1ZTB2aXUifQ.AlCV3ory0DrxiKwJqlFZiQ"
dir_path=os.path.dirname(os.path.abspath(__file__))
sea_ename=['philippines','cambodia','thailand','brunei','vitetnam','laos','malaysia','myanmar','east-timor','indonesia']
qyer_position=pd.read_csv(dir_path+'/qiongyou_position.csv',index_col=['country'],encoding='utf_8')
df=pd.read_csv(dir_path+'/all_destination.csv',index_col=['country'],encoding='utf_8')
qp=qyer_position
sea_text=minding.sea
def visited_sum():
list1=[]
for i in range(10):
list1.append(str(qp.loc[sea_ename[i]]['visitedNumber'].sum()))
return list1
def place_sum():
list1=[]
for i in range(10):
list1.append(str(qp.loc[sea_ename[i]]['placeNumber'].sum()))
return list1
visited_sum=visited_sum()
def bar_sea_visited() -> Bar:
c = (
Bar()
.add_xaxis(sea_ename)
.add_yaxis("东南亚",visited_sum, color=Faker.rand_color())
.set_global_opts(
title_opts=opts.TitleOpts(title="Bar-东南亚各国参观(visited)数"),
datazoom_opts=[opts.DataZoomOpts()],
)
)
return c.render_embed()
place_sum=place_sum()
def bar_sea_place() -> Bar:
c = (
Bar()
.add_xaxis(sea_ename)
.add_yaxis("东南亚",place_sum, color=Faker.rand_color())
.set_global_opts(
title_opts=opts.TitleOpts(title="Bar-东南亚各国景观/目的地数"),
datazoom_opts=[opts.DataZoomOpts()],
)
)
return c.render_embed()
html1=bar_sea_visited()
html2=bar_sea_place()
html='''{% extends 'base.html' %}
{% block body %}
'''+'''
<div class='each_country'>
{html1}
</div>
<div class='each_country'>
{html2}
</div>
<div>
{sea_text}
</div>
'''.format(html1=html1,html2=html2,sea_text=sea_text)+'''
{% endblock %}
'''
with open(dir_path+'/templates/sea_all.html','w',encoding='utf-8')as f:
f.write(html)
app = Flask(__name__)
@app.route('/',methods=["GET"]) #生成东南亚参观数和景观数
def all_sea():
return render_template('sea_all.html')
@app.route('/anyone',methods=("GET","POST"))
def map():
def each_country_city_visited(country_name):
list1=[str(i) for i in qp.loc[country_name]['visitedNumber']]
return list1
def each_country_city_place(country_name):
list1=[str(i) for i in qp.loc[country_name]['placeNumber']]
return list1
def each_country_city_name(country_name):
list1=[i for i in qp.loc[country_name]['city_cn']]
return list1
country=request.form['country']
each_country_city_visited=each_country_city_visited(country)
each_country_city_place=each_country_city_place(country)
each_country_city_name=each_country_city_name(country)
def bar_each_visited() -> Bar:
c = (
Bar()
.add_xaxis(each_country_city_name)
.add_yaxis(str(country),each_country_city_visited, color=Faker.rand_color())
.set_global_opts(
title_opts=opts.TitleOpts(title="Bar-"+str(country)+"各地参观(visited)数"),
datazoom_opts=[opts.DataZoomOpts()],
)
)
return c.render_embed()
def bar_each_place() -> Bar:
c = (
Bar()
.add_xaxis(each_country_city_name)
.add_yaxis(str(country),each_country_city_place,color=Faker.rand_color())
.set_global_opts(
title_opts=opts.TitleOpts(title="Bar-"+str(country)+"各地景观/目的地数"),
datazoom_opts=[opts.DataZoomOpts()],
)
)
return c.render_embed()
th=df.loc[country]
def pie_each_country_lei() -> Pie:
ss=Counter([i for i in th['catename'].fillna('未分类')])
lei=[k for k,v in ss.items()]
count=[v for k,v in ss.items()]
c = (
Pie()
.add(
"",
[list(z) for z in zip(lei, count)],
radius=["35%", "75%"],
center=["50%", "50%"],
rosetype="area",
)
.set_global_opts(title_opts=opts.TitleOpts(title="Pie-"+str(country)+"景观/目的地类别示例"))
)
return c.render_embed()
px = th['position_x']
py = th['position_y']
pt = [i for i in th['en_destination']]
pc = th['count']
x=[i for i in px]
y=[i for i in py]
ptc=[i for i in th['cn_destination']]
ptt=[str(ptc[i])+str(pt[i]) for i in range(len(ptc))]
fig = go.Figure(go.Densitymapbox(
name=str(country)+'热力图',
lat=px,
lon=py,
z=pc,
radius=25,
text=ptt))
fig.update_layout(
title=str(country)+'热力图',
paper_bgcolor='rgba(170,95,134,1)',
plot_bgcolor='rgba(170,95,134,1)',
mapbox=dict(
style='outdoors',
accesstoken=mapbox_access_token,
bearing=0,
center=dict(
lat=float(x[0]),
lon=float(y[0]),
),
pitch=0,
zoom=4
),
)
data = [
go.Scattermapbox(
name=str(country)+'散点图',
lat=px,
lon=py,
mode='markers',
marker=dict(
size=9
),
text=ptt,
)]
layout1 = go.Layout(
title=str(country)+'散点图',
autosize=True,
hovermode='closest',
paper_bgcolor='rgba(170,95,134,1)',
plot_bgcolor='rgba(170,95,134,1)',
mapbox=dict(
style='outdoors',
accesstoken=mapbox_access_token,
bearing=0,
center=dict(
lat=float(x[0]),
lon=float(y[0])
),
pitch=0,
zoom=4
),
)
fig1 = dict(data=data, layout=layout1)
div1=plot(fig,output_type="div")
div2=plot(fig1,output_type="div")
html1=bar_each_visited()
html2=bar_each_place()
html3=pie_each_country_lei()
each_sea_text=minding.each_sea[country]
html='''{% extends 'base.html' %}
{% block body %}
'''+'''
<div class='daxiao'>
{div1}
</div>
<div class='daxiao'>
{div2}
</div>
<div class='each_country'>
{html1}
</div>
<div class='each_country'>
{html2}
</div>
<div class='each_country'>
{html3}
</div>
<div class='font'>
{each_sea_text}
</div>
'''.format(div1=div1,div2=div2,html1=html1,html2=html2,html3=html3,each_sea_text=each_sea_text)+'''
{% endblock %}
'''
with open(dir_path+'/templates/'+str(country)+'.html','w',encoding='utf-8') as f:
f.write(html)
return render_template(str(country)+'.html')
if __name__ == '__main__':
app.run(debug=True)
|
{"/app.py": ["/minding.py"]}
|
29,587
|
bakasui/python_final
|
refs/heads/master
|
/minding.py
|
sea='''
<div>
<title>东南亚</title>
<p>东南亚(SEA)位于亚洲东南部,包括中南半岛和马来群岛两大部分。东南亚地区共有11个国家:越南、老挝、柬埔寨、泰国、缅甸、马来西亚、新加坡、印度
尼西亚、文莱、菲律宾、东帝汶,面积约457万平方千米。在此,我们分析东南亚十个国家的旅游景点情况。</p>
<p>由上图可知:东南亚参观数最多的是泰国,其次是马来西亚,第三是越南,第四是印度尼西亚,第五是菲律宾,第六是柬埔寨,第七是缅甸,第八是老挝,第九是
文莱,最少的东帝汶。</p>
<p>景观数最多的依旧是泰国,其次是越南,第三是马来西亚,第四是印度尼西亚,第五是柬埔寨,第六是菲律宾,第七是缅甸,第八是老挝,第九
是文莱,最少的还是东帝汶。</p>
</div>
'''
each_sea={'philippines':'''
<div>
<title>菲律宾</title>
<p>菲律宾位于亚洲东南部。</p>
<p>北隔巴士海峡与中国台湾省遥遥相对,南和西南隔苏拉威西海、巴拉巴克海峡与印度尼西亚、马来西亚相望,西濒南中国海,东临太平洋。</p>
<p>以热力图表示来菲律宾的参观人数,颜色越深代表参观人数越多。</p>
<p>以地图形式展示菲律宾景点的位置,下面两张分别就是以条形图参观人数。</p>
<p>由高到低展示菲律宾的参观人数和景点数的具体概况,由图可得,参观人数最多得就是菲律宾的马尼拉,达到22822,其次为长滩岛,人数为17049,最少的是帕拉
纳克,仅有16人。</p>
<p>景点数最多的是加莱拉港,为20个,其次为干米银,为17个。</p>
</div>
''',
'cambodia':'''
<div>
<title>柬埔寨</title>
<p>柬埔寨位于中南半岛,占地181,035平方公里,20%为农业用地。西部及西北部与泰国接壤,东北部与老挝交界,东部及东南部与越南毗邻,南部则面向暹罗湾。</p>
<p>旅游资源丰富,曾经被环球旅行杂志评为全世界第一的旅行圣地。</p>
<p>以热力地图的形式展示柬埔寨的参观人数,颜色越深代表参观人数越多。</p>
<p>再以位置地图展示柬埔寨的各个景点位置,以条形图的形式展示柬埔寨参观人数由高到低排列。</p>
<p>参观人数最多的为波贝,为819,其次为贡布519,最少的为班龙,仅为18。</p>
<p>以条形图的形式展示柬埔寨景点数,景点数最多的是贡布,为14个。</p>
<p>其次为桔井,为6个。再以饼图形式展示柬埔寨类别情况,占比最大的就是美食,其次为景点观光。</p>
</div>
''',
'thailand':'''
<div>
<title>泰国</title>
<p>泰国位于亚洲中南半岛中南部,与柬埔寨、老挝、缅甸、马来西亚接壤。</p>
<p>泰国被称为度假天堂,景色非常优美。</p>
<p>连续多年进入全世界最受欢迎前三名的国家。 </p>
<p>泰国美食口味偏酸、甜、辣,各个岛上海鲜众多,物美价廉,无论是街边小摊还是海边大排档,都能够品尝到美味。</p>
<p>泰国是个佛教圣地,95%以上的人都信奉佛教,民风非常淳朴,泰国人民的友好给每个游客留下了深刻的印象。</p>
<p>泰国的物价非常便宜,而且作为一个旅游国度,各种旅游设施非常完善。特产丰富,例如泰国燕窝。</p>
<p>泰国珠宝,泰国独有的蛇药。独有的人妖文化。</p>
<p>以热力地图的形式展示泰国的参观人数,颜色越深代表参观人数越多。</p>
<p>再以位置地图展示泰国的各个景点位置,以条形图的形式展示泰国参观人数由高到低排列,参观人数最多的为春篷,为1766,其次为夜功,为1551,最少的为农磨和匹莱海滩,都仅为7。</p>
<p>以条形图的形式展示泰国景点数,景点数最多的是巴蜀府,一枝独秀,景点数达到567个,其次也就是北碧也就28个。</p>
<p>再以饼图形式展示泰国类别情况,占比最大的就是美食,其次为景点观光和休闲娱乐。</p>
</div>
''',
'brunei':'''
<div>
<title>文莱</title>
<p>文莱达鲁萨兰国位于加里曼丹岛北部,北濒南中国海,东南西三面与马来西亚的沙捞越州接壤,并被沙捞越州的林梦分隔为不相连的东西两部分,总面积为5765
平方公里。</p>
<p>文莱的旅游景点并不多,但处处显示着它的豪华与富有。</p>
<p>文莱是禁酒国家,在文莱是找不到卖酒的商店,游客只能携带275毫升酒入境自用。在文莱河上,层层叠叠地矗立着一排排人字形屋顶的高脚小木屋,组成了文莱最有特色的景观—水村。</p>
<p>以热力地图的形式展示文莱的参观人数,颜色越深代表参观人数越多。</p>
<p>再以位置地图展示文莱的各个景点位置。</p>
<p>以条形图的形式展示文莱参观人数由高到低排列,参观人数最多的为斯里巴家湾市,为1766,其次为白拉奕,为331,最少的为甘榜杰鲁登,都仅为124。</p>
<p>以条形图的形式展示泰国景点数,景点数最多的是斯里巴家湾市,景点数达到53个,其次也就是白拉奕也就3个。</p>
<P>再以饼图形式展示泰国类别情况,占比最大的就是美食,其次为景点观光和购物三足鼎立。</p>
</div>
''',
'vitetnam':'''
<div>
<title>越南</title>
<p>越南位于中南半岛东部,地理坐标为北纬8°10'~23°24'、东经102°09'~109°30'之间,北与中华人民共和国接壤,西与老挝人民民主共和国、柬埔寨王国交界,
东面和南面临南海。</p>
<p>越南女子的日常装扮是长裙、拖鞋与帽子。</p>
<p>越南是世界第二大稻米出口国,米粉是越南必吃的主食。</p>
<p>越南的四处都充斥着中国气息。摩托车在越南的普及程度就如同中国的自行车,那是家家必备、人人必用的交通工具。</p>
<p>以热力地图的形式展示越南的参观人数,颜色越深代表参观人数越多。</p>
<p>再以位置地图展示越南的各个景点位置。</p>
<p>以条形图的形式展示越南参观人数由高到低排列,参观人数最多的为海防市,为1809,其次为潘切,为331,最少的为甘榜杰鲁登,都仅为1676。</p>
<p>以条形图的形式展示越南景点数,景点数最多的是富国岛,景点数达到48个,其次也就是芹苴,为22个。</p>
<p>再以饼图形式展示泰国类别情况,占比最大的就是美食,其次为景点观光和购物。</p>
</div>
''',
'laos':'''
<div>
<title>老挝</title>
<p>老挝是一个位于中南半岛北部的内陆国家,北邻中国,南接柬埔寨,东临越南,西北毗邻缅甸,西南毗邻泰国。曾是法国殖民地,随处可见东西方文化的鲜明
冲撞。</p>
<p>东南亚冷门旅游地,看见的只有美景,没有人山人海。人与自然和谐相处,景观没有过分的商业化改造。 85%以上民众信奉佛教,拥有逛不完的古寺庙。
拥有世界文化古迹与古城,能够游览王国遗址。适合坐大巴游玩、适合穷游。</p>
<p>以热力地图的形式展示老挝的参观人数,颜色越深代表参观人数越多。</p>
<p>再以位置地图展示老挝的各个景点位置。</p>
<p>以条形图的形式展示老挝参观人数由高到低排列,参观人数最多的为琅勃拉邦,为7899,其次为万象,为6580,最少的桑怒,仅为2。以条形图的形式展示老挝景点数,景点数最多的是万象,景点数达到92个,其次也就是琅勃拉邦,为68个。</p>
<p>再以饼图形式展示泰国类别情况,占比最大的就是景点观光,其次为美食。</p>
</div>
''',
'malaysia':'''
<div>
<title>马来西亚</title>
<p>马来西亚国土面积330345平方公里,位于太平洋和印度洋之间,北与泰国接壤,西濒马六甲海峡,东临南中国海,南濒柔佛海峡与新加坡毗邻。</p>
<p>马来西亚位赤道附近,属于热带雨林气候和热带季风气候,无明显四季之分。</p >
<p>以热力图表示来马来西亚的参观人数,颜色越亮代表参观人数越多。</p>
<p>以地图形式展示马来西亚景点的位置。</p>
<p>下面两张分别是以柱状图由高到低展示马来西亚的参观人数和景点数的具体概况,由图可得,马来西亚参观人数最多的地区是吉隆坡,达到108220,其次为马六甲,人数为33007,最少的是马廖盆地,仅有1人。</p>
<p>景点数最多的是吉隆坡,为1500个,其次为马六甲,为578个。</p>
<p>再以饼图形式展示马来西亚旅游类别,占比最大的是美食,其次是景点观光和休闲娱乐。</p>
</div>
''',
'myanmar':'''
<div>
<title>缅甸</title>
<p>缅甸位于亚洲东南部、中南半岛西部,面积约67.85万平方公里,其北部和东北部同中国西藏和云南接界,东部与老挝和泰国毗邻,西部与印度
、孟加拉国接壤。</p >
<p>以热力图表示来缅甸的参观人数,颜色越亮代表参观人数越多。</p >
<p>以地图形式展示缅甸景点的位置。</p >
<p>下面两张分别是以柱状图由高到低展示缅甸的参观人数和景点数的具体概况,由图可得,缅甸参观人数最多的地区是仰光,达到7877,其次为蒲甘,人数为6904,最少的是新平洋,仅有1人。</p >
<p>景点数最多的是仰光,为109个,其次为蒲甘,为97个。</p >
<p>再以饼图形式展示缅甸旅游类别,占比最大的是景点观光,其次是美食和购物。</p >
</div>
''',
'east-timor':'''
<div>
<title>东帝汶</title>
<p>东帝汶位于东南亚努沙登加拉群岛最东端,岛国。包括帝汶岛东部和西部北海岸的欧库西地区以及附近的阿陶罗岛和东端的雅库岛。西部与印尼
西帝汶相接,南隔帝汶海与澳大利亚相望。国土面积14919平方公里。</p >
<p>以热力图表示来东帝汶的参观人数,颜色越亮代表参观人数越多。</p >
<p>以地图形式展示东帝汶景点的位置。</p >
<p>下面两张分别是以柱状图由高到低展示东帝汶的参观人数和景点数的具体概况,由图可得,东帝汶参观人数最多的地区是帝力,为146,其次为包考,人数为31;景点数最多的帝力,为5个。</p >
<p>再以饼图形式展示东帝汶旅游类别,景点观光占了所有比例。</p >
</div>
''',
'indonesia':'''
<div>
<title>印度尼西亚</title>
<p>印度尼西亚位于亚洲东南部,地跨赤道,与巴布亚新几内亚、东帝汶、马来西亚接壤,与泰国、新加坡、菲律宾、澳大利亚等国隔海相望。
印度尼西亚是典型的热带雨林气候,年平均温度25-27℃,无四季分别。</p >
<p>以热力图表示来印度尼西亚的参观人数,颜色越亮代表参观人数越多。</p >
<p>以地图形式展示印度尼西亚景点的位置,下面两张分别是以柱状图由高到低展示印度尼西亚的参观人数和景点数的具体概况,由图可得,印度尼西亚参观人数最多的地区是巴厘岛,达到44372,其次为库塔,人数为17653,最少的是博拉科特,
仅有1人。</p >
<p>景点数最多的是巴厘岛,为1500个,其次为库塔,为502个。</p >
<p>再以饼图形式展示印度尼西亚旅游类别,占比最大的是美食,其次是景点观光和休闲娱乐。</p >
</div>
'''
}
|
{"/app.py": ["/minding.py"]}
|
29,628
|
miyax0227/quizScatterer
|
refs/heads/main
|
/quizScatterer/classes/qs.py
|
# -*- coding: utf-8 -*-
import os
import re
import math
from pprint import pprint
import MeCab
import numpy as np
import gensim
import itertools
import pandas as pd
import scipy.spatial.distance as distance
from scipy.cluster.hierarchy import dendrogram, linkage
# 実行ファイルパスを取得
execPath = os.path.dirname(__file__)
# 学習済みベクターモデルの読込
wv = gensim.models.Word2Vec.load(execPath + "/gensimModel/word2vec.gensim.model").wv
# MeCab辞書読込
mt = MeCab.Tagger('-d /usr/local/lib/mecab/dic/mecab-ipadic-neologd')
# 問題文正規化
def regulateQuestion(q):
q = q.translate(str.maketrans({'(':'(',')':')'}))
q = re.sub('\([\u3041-\u309F・]+\)','',q)
q = re.sub('[??]','',q)
return q
# コサイン類似度を得る
def cosSim(v1, v2):
""" コサイン類似度を得る
Args: v1(np.Array), v2(np.Array): ベクトル(同次元)
Returns: float: 類似度(-1~1)
"""
return np.dot(v1, v2) / (np.linalg.norm(v1) * np.linalg.norm(v2))
# 問題ベクターから単語対類似度リスト(類似度が高い順)を得る
def getDirectProduct(l1, l2):
"""問題ベクターから単語対類似度リスト(類似度が高い順)を得る
Args: l1(dict), l2(dict): 問題ベクター
Returns: list[dic]: 単語対類似度リスト
"""
directProduct = []
for v1, v2 in itertools.product(l1, l2):
directProduct.append({
'word1': v1['surface'],
'word2': v2['surface'],
'cosSim': cosSim(v1['vector'], v2['vector'])
})
return sorted(directProduct, key=lambda x:x['cosSim'], reverse=True)
def getWakachigaki(text):
node = mt.parseToNode(text)
wakachigaki = []
while node:
wakachigaki.append({
'_surface': node.surface,
'feature': node.feature
})
node = node.next
return wakachigaki
# 問題文から問題ベクターを得る
def getVector(text):
"""問題文から問題ベクターを得る
Args: text(str): 問題文
Returns: dic: 問題ベクター
"""
node = mt.parseToNode(text)
nounList = []
elements = []
while node:
fields = node.feature.split(",")
if fields[0] in ['名詞','動詞','形容詞'] \
and not (fields[0] == '名詞' and fields[1] in ['代名詞','非自立','数']) \
and not (fields[0] == '動詞' and fields[1] in ['接尾']) \
and not (fields[0] == '動詞' and fields[6] in ['する','いう','ある']) \
and node.surface not in ['年'] \
and node.surface in wv:
if node.surface not in elements:
elements.append(node.surface)
nounList.append({
'surface': node.surface,
'type': fields[0] + "." + fields[1],
'fields':fields,
'vector': wv[node.surface],
'count': 1
})
else:
nounList[min(i for i in range(len(nounList)) if nounList[i]['surface'] == node.surface)]['count'] += 1
node = node.next
return nounList
# 問題ベクター情報からTF-IDFに基づくサマリベクタを取得する
def getSummaryVector(questionVectors):
count=len(questionVectors)
nounCount = {}
for qv in questionVectors:
for v in qv:
if v['surface'] in nounCount:
nounCount[v['surface']] += 1
else:
nounCount[v['surface']] = 1
returnList = []
for qv in questionVectors:
sum = np.zeros([50])
for v in qv:
pprint(v['vector'] * v['count'] * math.log(count / nounCount[v['surface']]))
sum += v['vector'] * v['count'] * math.log(count / nounCount[v['surface']])
pprint([sum])
returnList.append(sum)
return returnList
# 単語出現数dictを作成する
def getNounCountDict(questionVectors):
nounCountDict={}
for qv in questionVectors:
for v in qv:
if v['surface'] in nounCountDict:
nounCountDict[v['surface']] += 1
else:
nounCountDict[v['surface']] = 1
return nounCountDict
# 問題ベクター間距離関数
def getDistance(l1, l2):
"""問題ベクター間距離関数
Args: l1(dict), l2(dict): 問題ベクター
Returns: float: 距離
"""
threshold = 9
cosSims = getDirectProduct(l1, l2)
dist = 0
for i in range(min(threshold, len(cosSims))):
dist += (1 - cosSims[i]['cosSim']) # * (1 / (i+1) ** 0.5)
if len(cosSims) < threshold:
dist += (len(cosSims) - threshold)
return dist
# テキスト樹形図出力
def getTextDendrogram(num, indent, Z, questions, n):
"""テキスト樹形図出力
Args: num(float): 枝番号
indent: 表示する樹形
Z: クラスタリング結果
questions(list): 問題文リスト
n(int): 問題数
Returns: list: 樹形図(上から順の1行毎リスト)
"""
if(num < n):
return [indent + str(int(num)) + "." + questions[int(num)]]
else:
branchChars = "①②③④⑤⑥⑦⑧⑨"
branchRank = int(n*2-num-1)
if branchRank <= len(branchChars):
branchChar = branchChars[branchRank-1]
else:
branchChar = "┬"
return getTextDendrogram(Z[int(num-n), 0], indent+branchChar, Z, questions, n) \
+ getTextDendrogram(Z[int(num-n), 1], re.sub("[┬"+branchChars+"]","│",indent).replace("└"," ")+"└", Z, questions, n)
# 最遠配置リストを得る
def scatterQuestion(num, Z, dMatrix, n):
"""最遠配置リストを得る
Args: num(float): 枝番号
Z: クラスタリング結果
dMatrix: 距離マトリクス
n(int): 問題数
Returns: list: 最遠配置リスト
"""
if(num < n):
return [int(num)]
else:
v1 = scatterQuestion(Z[int(num-n), 0], Z, dMatrix, n)
v2 = scatterQuestion(Z[int(num-n), 1], Z, dMatrix, n)
i1 = 1
i2 = 1
d = 1.0 / (2.0 * (len(v1) + 1) * (len(v2) + 1))
# 2つのリストの間で最も近い要素のインデックスを取得する
dMatrixv1v2 = dMatrix[np.ix_(v1, v2)]
# print(dMatrixv1v2)
minIndex = np.unravel_index(np.argmin(dMatrixv1v2), dMatrixv1v2.shape)
minIndexv1 = minIndex[0]
minIndexv2 = minIndex[1]
# v1は当該要素が先頭に来るよう要素を移動
v1 = v1[minIndexv1:] + v1[0:minIndexv1]
# v2は当該要素が真ん中に来るよう要素を移動
v2LenHalf = int((len(v2)+1)/2)
v2 = v2[minIndexv2:] + v2[0:minIndexv2]
v2 = v2[v2LenHalf:] + v2[0:v2LenHalf]
returnList = []
while i1 <= len(v1) or i2 <= len(v2):
if (i1 / (len(v1)+1)) > (i2 / (len(v2)+1) + d):
returnList.append(v2[i2-1])
i2 += 1
else:
returnList.append(v1[i1-1])
i1 += 1
return returnList
|
{"/quizScatterer/__main__.py": ["/quizScatterer/classes/qs.py"]}
|
29,629
|
miyax0227/quizScatterer
|
refs/heads/main
|
/quizScatterer/__main__.py
|
# -*- coding: utf-8 -*-
import os
from pprint import pprint
import sys
import numpy as np
from .classes.qs import *
# 引数からファイル名を受け取る
filename = sys.argv[1]
# ファイル読込
with open(filename) as f:
questions = f.read().splitlines()
# 空文字列の行を除く
questions = [q for q in questions if not q == ""]
# 問題文正規化
questionsForVectors = [regulateQuestion(q) for q in questions]
# ベクター作成
vectors = [getVector(v) for v in questionsForVectors]
# サンプル
# pprint(vectors[41])
# サマリベクタを取得する
# summaryVectors = getSummaryVector(vectors)
# pprint(summaryVectors)
# 単語出現回数dictを取得する
# nounCountDict = getNounCountDict(vectors)
# 距離マトリクス生成
n = len(vectors)
dMatrix = np.zeros([n,n])
for i in range(n):
for j in range(n):
if i == j:
dMatrix[i,j] = 0
elif i > j:
dMatrix[i,j] = getDistance(vectors[i],vectors[j])
#dMatrix[i,j] = cos_sim(summaryVectors[i],summaryVectors[j])
else:
dMatrix[i,j] = getDistance(vectors[j],vectors[i])
dArray = distance.squareform(dMatrix)
# 階層クラスタリング
Z = linkage(dArray, method="ward")
#pprint(Z)
# テキスト樹形図出力
for i in getTextDendrogram(n*2-2, "", Z, questions, n):
print(i)
# 最遠配置リスト出力
for i in scatterQuestion(n*2-2, Z, dMatrix, n):
print(str(i) + "." + questions[i])
|
{"/quizScatterer/__main__.py": ["/quizScatterer/classes/qs.py"]}
|
29,630
|
miyax0227/quizScatterer
|
refs/heads/main
|
/setup.py
|
from setuptools import setup
setup(
name="quizScatterer",
version='1.0',
description='word2vecとクラスタリングでクイズの出題順を最適化したい',
author='Miyax',
url='https://github.com/miyax0227/quizScatterer',
install_requires=open('requirements.txt').read().splitlines(),
)
|
{"/quizScatterer/__main__.py": ["/quizScatterer/classes/qs.py"]}
|
29,670
|
nxs5899/SAM2017_G4
|
refs/heads/master
|
/SAM2017_G4/sam2017/samapp/models.py
|
from django.db import models
from django.core.validators import RegexValidator
from django import forms
from django.forms import ModelForm
from datetime import date
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.db import models
from django.utils import timezone
from django.utils.http import urlquote
from django.utils.translation import ugettext_lazy as _
from django.core.mail import send_mail
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin
from django.contrib.auth.models import BaseUserManager
from django.contrib.auth.models import User
# Create your models here.
class Author(models.Model):
user = models.OneToOneField(User)
fname = models.CharField(max_length=255)
lname = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.user.username
def __str__(self):
return str(self.fname) + ' ' + str(self.lname)
class PCM(models.Model):
user = models.OneToOneField(User)
fname = models.CharField(max_length=255)
lname = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.user.username
def __str__(self):
return str(self.fname) + ' ' + str(self.lname)
class PCC(models.Model):
user = models.OneToOneField(User)
fname = models.CharField(max_length=255)
lname = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.user.username
def __str__(self):
return str(self.fname) + ' ' + str(self.lname)
class Samadmin(models.Model):
user = models.OneToOneField(User)
fname = models.CharField(max_length=255)
lname = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.user.username
def __str__(self):
return str(self.fname) + ' ' + str(self.lname)
class Paper(models.Model):
formatChoices = (
('PDF', 'PDF'),
('Word', 'Word'),
)
contact_author = models.ForeignKey(Author)
submitter = models.CharField(max_length=255)
title = models.CharField(max_length=255)
version = models.FloatField()
formats = models.CharField(max_length=5, choices=formatChoices) # find the enumerate field for word and PDF
document = models.FileField()
rate = models.FloatField(default=None, null=True)
sub_date = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
pcm1 = models.ForeignKey(PCM, null=True, related_name="pcm1")
pcm2 = models.ForeignKey(PCM, null=True, related_name="pcm2")
pcm3 = models.ForeignKey(PCM, null=True, related_name="pcm3")
assigned = models.NullBooleanField(default=False)
class Meta:
ordering = ["-title"]
def ratePaper(self, rate):
self.rate = rate
self.save()
def __str__(self):
return self.title
class NotificationTemp(models.Model):
messageTypes = (
('paperSubmitted', 'paperSubmitted'),
('selectpaper', 'selectpaper'),
('assigntoReview', 'assigntoReview'),
('startReview', 'startReview'),
('reviewComplete', 'reviewComplete'),
('paperRate', 'paperRate'),
)
title = models.CharField(max_length=500, choices=messageTypes)
message = models.CharField(max_length=500)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.title + '' + self.message
class Deadline(models.Model):
deadlineTypes = (
('paperSubmission', 'Paper Submission'),
('paperSelection', 'Paper Selection'),
('paperAssign', 'Paper Assign'),
('paperReview', 'Paper Review'),
('paperRate', 'Paper Rate'),
)
deadlineType = models.CharField(max_length=500, choices=deadlineTypes)
deadline = models.DateTimeField(blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return str(self.deadline)
class Notification(models.Model):
title = models.CharField(max_length=500, verbose_name=u"Title")
message = models.CharField(max_length=500)
viewed = models.BooleanField(default=False, verbose_name=u"Viewd?")
recipient = models.ManyToManyField(User)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.title + '' + self.message
# notification_message_mapper = {
# "PaperSubmitted": " Your paper was successfully submitted. Check your notification bar for updates.",
# "SelectPaper": "Please select papers to review.",
# "Selection Complete": "Please assign papers to PCMs.",
# "PaperAssigned": "The papers is assigned to you. Please start your review.",
# "ReviewComplete": "Reviews are complete. Please check them.",
# "PaperRate": "Your paper has been rated.",
# }
#
# def _constructNotificationMessage(self, message):
# # paper = Paper.objects.get(pk=paperid)
# custom_message = message
# #print("Changes message ??? ", custom_message)
# return custom_message
def sendNotification(self, type, recipients):
newmessage = NotificationTemp.objects.get(title=type)
notification = self
notification.title = type
#print("message " + self.notification_message_mapper[type])
notification.message = newmessage.message# added .message to save the message
#self._constructNotificationMessage(self.notification_message_mapper[type])
#print("constructed message " + notification.message)
notification.save()
notification.recipient.set(recipients)
notification.save()
class Review(models.Model):
'''
Model for Review
author: smruthi
'''
paperId=models.ForeignKey(Paper)
reviewer=models.ForeignKey(PCM)
grade=models.IntegerField(null=True)
comments=models.TextField()
submissiondate=models.DateTimeField(auto_now_add=True)
submissionDeadline=models.DateTimeField(auto_now_add=True)# change this after deadlines are set
def __str__(self):
return str(self.pk)
@classmethod
def create(cls,paperId,grade,comments,reviewer):
reviewPaper=cls(paperId=paperId,grade=grade,comments=comments,reviewer=reviewer)
reviewPaper.save()
return reviewPaper
class Selection(models.Model):
PCM = models.ForeignKey(PCM)
selected_papers = models.ForeignKey(Paper)
def __str__(self):
return self.PCM.fname + " " + self.PCM.lname + " " + self.selected_papers.title
@classmethod
def create(cls, PCM, selected_papers):
selection = cls(PCM=PCM, selected_papers=selected_papers)
selection.save()
return selection
|
{"/SAM2017_G4/sam2017/samapp/forms.py": ["/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/views.py": ["/SAM2017_G4/sam2017/samapp/forms.py", "/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/admin.py": ["/SAM2017_G4/sam2017/samapp/models.py", "/SAM2017_G4/sam2017/samapp/forms.py"]}
|
29,671
|
nxs5899/SAM2017_G4
|
refs/heads/master
|
/SAM2017_G4/sam2017/samapp/forms.py
|
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
from .models import Author, Deadline,Paper,Review, NotificationTemp
class AuthorForm(forms.Form):
username = forms.RegexField(regex=r'^\w+$', widget=forms.TextInput(attrs=dict(required=True, max_length=30)),
label=_("Username"), error_messages={
'invalid': _("This value must contain only letters, numbers and underscores.")})
email = forms.EmailField(widget=forms.TextInput(attrs=dict(required=True, max_length=30)), label=_("Email address"))
password1 = forms.CharField(
widget=forms.PasswordInput(attrs=dict(required=True, max_length=30, render_value=False)), label=_("Password"))
password2 = forms.CharField(
widget=forms.PasswordInput(attrs=dict(required=True, max_length=30, render_value=False)),
label=_("Password (again)"))
fname = forms.CharField(max_length=25, label=_("First Name"))
lname = forms.CharField(max_length=25, label=_("Last Name"))
class Meta:
model = Author
exclude = ('user')
def clean_username(self):
try:
user = User.objects.get(username__iexact=self.cleaned_data['username'])
except User.DoesNotExist:
return self.cleaned_data['username']
raise forms.ValidationError(_("The username already exists. Please try another one."))
def clean(self):
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_("The two password fields did not match."))
return self.cleaned_data
def getUsername(self):
return self.username
def getEmail(self):
return self.email
def getPassword(self):
return self.password1
class AdminForm(forms.Form):
username = forms.RegexField(regex=r'^\w+$', widget=forms.TextInput(attrs=dict(required=True, max_length=30)),
label=_("Username"), error_messages={
'invalid': _("This value must contain only letters, numbers and underscores.")})
email = forms.EmailField(widget=forms.TextInput(attrs=dict(required=True, max_length=30)), label=_("Email address"))
password1 = forms.CharField(
widget=forms.PasswordInput(attrs=dict(required=True, max_length=30, render_value=False)), label=_("Password"))
password2 = forms.CharField(
widget=forms.PasswordInput(attrs=dict(required=True, max_length=30, render_value=False)),
label=_("Password (again)"))
fname = forms.CharField(max_length=25, label=_("First Name"))
lname = forms.CharField(max_length=25, label=_("Last Name"))
class Meta:
model = Author
exclude = ('user')
def clean_username(self):
try:
user = User.objects.get(username__iexact=self.cleaned_data['username'])
except User.DoesNotExist:
return self.cleaned_data['username']
raise forms.ValidationError(_("The username already exists. Please try another one."))
def clean(self):
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_("The two password fields did not match."))
return self.cleaned_data
def getUsername(self):
return self.username
def getEmail(self):
return self.email
def getPassword(self):
return self.password1
class UserProfileForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'readonly':'readonly'}))
email = forms.EmailField(widget=forms.TextInput(attrs=dict(max_length=30)), label=_("Email address"))
fname = forms.CharField(max_length=25)
lname = forms.CharField(max_length=25)
class PaperForm(forms.Form):
formatChoices = (
('PDF', 'PDF'),
('Word', 'Word'),
)
submitter = forms.CharField(max_length=255)
title = forms.CharField(max_length=255)
version = forms.FloatField()
formats = forms.ChoiceField(choices=formatChoices, required=True)
document = forms.FileField()
def clean(self):
try:
document = self.cleaned_data['document']
except KeyError:
raise forms.ValidationError(_('Please upload a paper.'), code='invalid')
if self.cleaned_data['formats'] == 'PDF':
if '.pdf' not in self.cleaned_data['document'].name:
raise forms.ValidationError(_("You have selected the PDF format. Please upload a PDF document or change the format"))
elif self.cleaned_data['formats'] == 'Word':
if not self.cleaned_data['document'].name.endswith('.docx') and not self.cleaned_data['document'].name.endswith('.doc'):
raise forms.ValidationError(_("You have selected the Word format. Please upload a Word document or change the format."))
else:
raise forms.ValidationError(_("Please upload a pdf or word document."))
return self.cleaned_data
class PccForm(forms.Form):
rate = forms.CharField(max_length=25)
class NotifTemForm(forms.ModelForm):
class Meta:
model = NotificationTemp
fields = ('title', 'message')
# messageTypes = (
# ('paperSubmitted', 'paperSubmitted'),
# ('selectpaper', 'selectpaper'),
# ('assigntoReview', 'assigntoReview'),
# ('startReview', 'startReview'),
# ('reviewComplete', 'reviewComplete'),
# ('paperRate', 'paperRate'),
# )
# title = forms.ChoiceField(choices=messageTypes, required=True)
# message = forms.CharField(max_length=500)
#
# def clean(self):
# # try:
# # document = self.cleaned_data['document']
# # except KeyError:
# # raise forms.ValidationError(_('Please provide the messages.'), code='invalid')
#
# return self.cleaned_data
class DeadlineForm(forms.ModelForm):
class Meta:
model = Deadline
fields = ('deadlineType', 'deadline')
# widgets = {'deadline': forms.DateInput(attrs={'class':'datepicker'})}
# deadlineTypes = (
# ('paperSubmission', 'paperSubmission'),
# ('paperSelection', 'paperSelection'),
# ('paperAssign', 'paperAssign'),
# ('paperReview', 'paperReview'),
# ('paperRate', 'paperRate'),
# )
#
# deadlineType = forms.ChoiceField(choices=deadlineTypes)
# deadline = forms.DateTimeField()
#
# def clean(self):
# # try:
# # document = self.cleaned_data['document']
# # except KeyError:
# # raise forms.ValidationError(_('Please provide the messages.'), code='invalid')
#
# return self.cleaned_data
|
{"/SAM2017_G4/sam2017/samapp/forms.py": ["/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/views.py": ["/SAM2017_G4/sam2017/samapp/forms.py", "/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/admin.py": ["/SAM2017_G4/sam2017/samapp/models.py", "/SAM2017_G4/sam2017/samapp/forms.py"]}
|
29,672
|
nxs5899/SAM2017_G4
|
refs/heads/master
|
/SAM2017_G4/sam2017/sam2017/urls.py
|
"""sam2017 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import patterns, include, url
from samapp.views import *
from django.contrib import admin
urlpatterns = patterns('',
url(r'^admin/',admin.site.urls),
url(r'^$', 'django.contrib.auth.views.login'),
url(r'^logout/$', logout_page),
url(r'^accounts/login/$', 'django.contrib.auth.views.login'),
# If user is not login it will redirect to login page
url(r'^register/$', register),
url(r'^registeradmin/$', create_admin),
url(r'^register/success/$', register_success),
url(r'^home/$', home),
url(r'^submitpaper/$', SubmitPaper),
url(r'^notiftemp/$', NotifTemp),
url(r'^deadline/$', Deadlines),
url(r'^successpaper/$', successpaper),
url(r'^SubmittedPapers/$', submittedpapers),
url(r'^(?P<paper_id>[0-9]+)/downloadPDF/$', downloadPDF, name='downloadPDF'),
url(r'^pcmpapers/$', pcmpapers),
url(r'^pccpapers/$', pccpapers),
url(r'^notifications/$', show_notification),
url(r'^createpcc/$', createpcc),
url(r'^createpcm/$', createpcm),
url(r'^manageaccounts/$', manageaccounts),
# url(r'^(?P<user_id>[0-9]+)/UpdateUser/$', UpdateUser, name='UpdateUser'),
url(r'^(?P<user_id>[0-9]+)/UpdatePCC/$', UpdatePCC, name='UpdatePCC'),
url(r'^(?P<user_id>[0-9]+)/UpdatePCM/$', UpdatePCM, name='UpdatePCM'),
url(r'^(?P<paper_id>[0-9]+)/PCM_review/$', review_Rate_PCM, name='ReviewPCM'),
url(r'^(?P<paper_id>[0-9]+)/PCCreview/$', review_PCC, name='ReviewPCC'),
url(r'^Deadline_Error/$', Deadline_Error),
url(r'^(?P<paper_id>[0-9]+)/assignpapers/$', assignpapers, name='assignpapers'),
url(r'^successassignment/$', successassignment),
url(r'^failassignment/$', failassignment),
url(r'^assignments/$', assignments),
url(r'^selections/$', selections),
url(r'^paperselected/$', paperselected),
)
|
{"/SAM2017_G4/sam2017/samapp/forms.py": ["/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/views.py": ["/SAM2017_G4/sam2017/samapp/forms.py", "/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/admin.py": ["/SAM2017_G4/sam2017/samapp/models.py", "/SAM2017_G4/sam2017/samapp/forms.py"]}
|
29,673
|
nxs5899/SAM2017_G4
|
refs/heads/master
|
/SAM2017_G4/sam2017/samapp/views.py
|
# views.py
from django.conf.global_settings import MEDIA_ROOT
from .forms import *
from django.contrib.auth.decorators import login_required, user_passes_test
from django.contrib.auth import logout
from django.views.decorators.csrf import csrf_protect
from django.shortcuts import render_to_response, get_object_or_404, render
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseRedirect, HttpResponse
from django.template import RequestContext
from django.core.exceptions import ObjectDoesNotExist
from .models import *
# from sam2017.settings import MEDIA_ROOT
from django.contrib.auth.models import User, Group
from datetime import datetime
import pytz
from django.utils import timezone
from django.db.models import Q
@csrf_protect
def register(request):
if request.method == 'POST':
form = AuthorForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
username=form.cleaned_data['username'],
password=form.cleaned_data['password1'],
email=form.cleaned_data['email']
)
author = Author(
user = user,
fname = form.cleaned_data['fname'],
lname = form.cleaned_data['lname']
)
user.save()
author.save()
g = Group.objects.get(name='author')
g.user_set.add(user)
return HttpResponseRedirect('/register/success/')
else:
form = AuthorForm()
variables = RequestContext(request, {
'form': form
})
return render_to_response(
'registration/register.html',
variables,
)
@csrf_protect
def create_admin(request):
if request.method == 'POST':
form = AdminForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
username=form.cleaned_data['username'],
password=form.cleaned_data['password1'],
email=form.cleaned_data['email']
)
samadmin = Samadmin(
user=user,
fname=form.cleaned_data['fname'],
lname=form.cleaned_data['lname']
)
user.save()
samadmin.save()
g = Group.objects.get(name='admin')
g.user_set.add(user)
return HttpResponseRedirect('/register/success/')
else:
form = AdminForm()
variables = RequestContext(request, {
'form': form
})
return render_to_response(
'registration/registeradmin.html',
variables,
)
def register_success(request):
return render_to_response(
'registration/success.html',
)
def logout_page(request):
logout(request)
return HttpResponseRedirect('/')
def is_member2(user):
return user.groups.filter(name='admin').exists()
@user_passes_test(is_member2)
@login_required
def createpcc(request):
if request.method == 'POST':
form = AdminForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
username=form.cleaned_data['username'],
password=form.cleaned_data['password1'],
email=form.cleaned_data['email']
)
pcc = PCC(
user=user,
fname=form.cleaned_data['fname'],
lname=form.cleaned_data['lname']
)
user.save()
pcc.save()
g = Group.objects.get(name='PCC')
g.user_set.add(user)
return HttpResponseRedirect('/home/')
else:
form = AdminForm()
variables = RequestContext(request, {
'form': form
})
return render_to_response(
'createpcc.html',
variables,
)
@user_passes_test(is_member2)
@login_required
def createpcm(request):
if request.method == 'POST':
form = AdminForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
username=form.cleaned_data['username'],
password=form.cleaned_data['password1'],
email=form.cleaned_data['email']
)
pcm = PCM(
user=user,
fname=form.cleaned_data['fname'],
lname=form.cleaned_data['lname']
)
user.save()
pcm.save()
g = Group.objects.get(name='PCM')
g.user_set.add(user)
return HttpResponseRedirect('/home/')
else:
form = AdminForm()
variables = RequestContext(request, {
'form': form
})
return render_to_response(
'createpcm.html',
variables,
)
@user_passes_test(is_member2)
@login_required
def manageaccounts(request):
user = request.user
pccusers = PCC.objects.all()
pcmusers = PCM.objects.all()
context = {
'PCC': pccusers,
'PCM': pcmusers
}
if request.method == 'POST' and 'DeactivatePCC' in request.POST:
username = request.POST.get('RequestID')
user2 = User.objects.get(pk=username)
user2.is_active = False
user2.save()
return HttpResponseRedirect('/manageaccounts/')
elif request.method == 'POST' and 'ActivatePCC' in request.POST:
username = request.POST.get('RequestID')
user2 = User.objects.get(pk=username)
user2.is_active = True
user2.save()
return HttpResponseRedirect('/manageaccounts/')
elif request.method == 'POST' and 'DeactivatePCM' in request.POST:
username = request.POST.get('RequestID1')
user3 = User.objects.get(pk=username)
user3.is_active = False
user3.save()
return HttpResponseRedirect('/manageaccounts/')
elif request.method == 'POST' and 'ActivatePCM' in request.POST:
username = request.POST.get('RequestID1')
user3 = User.objects.get(pk=username)
user3.is_active = True
user3.save()
return HttpResponseRedirect('/manageaccounts/')
return render_to_response('manageaccounts.html', context_instance=RequestContext(request, context))
@user_passes_test(is_member2)
@login_required
def UpdatePCC(request, user_id):
user = User.objects.get(pk= user_id)
userProfile = PCC.objects.get(user = user)
user1 = request.user
userProfile1 = Samadmin.objects.get(user = user1)
if request.method == 'POST' and 'Save' in request.POST:
form = UserProfileForm(request.POST)
if form.is_valid():
user.email = form.cleaned_data['email']
userProfile.fname = form.cleaned_data['fname']
userProfile.lname = form.cleaned_data['lname']
user.save()
userProfile.save()
variables = RequestContext(request, {
'form': form
})
return HttpResponseRedirect('/manageaccounts/')
else:
form = UserProfileForm()
form.fields['username'].initial = user.username
form.fields['email'].initial = user.email
form.fields['fname'].initial = userProfile.fname
form.fields['lname'].initial = userProfile.lname
return render_to_response('UpdateUser.html', context_instance=RequestContext(request,
{'form': form}))
@user_passes_test(is_member2)
@login_required
def UpdatePCM(request, user_id):
user = User.objects.get(pk= user_id)
userProfile = PCM.objects.get(user = user)
user1 = request.user
userProfile1 = Samadmin.objects.get(user = user1)
if request.method == 'POST' and 'Save' in request.POST:
form = UserProfileForm(request.POST)
if form.is_valid():
user.email = form.cleaned_data['email']
userProfile.fname = form.cleaned_data['fname']
userProfile.lname = form.cleaned_data['lname']
user.save()
userProfile.save()
variables = RequestContext(request, {
'form': form
})
return HttpResponseRedirect('/manageaccounts/')
else:
form = UserProfileForm()
form.fields['username'].initial = user.username
form.fields['email'].initial = user.email
form.fields['fname'].initial = userProfile.fname
form.fields['lname'].initial = userProfile.lname
return render_to_response('UpdateUser.html', context_instance=RequestContext(request,
{'form': form}))
@login_required
def assignments(request):
user = PCM.objects.get(user=request.user)
paper = Paper.objects.filter(Q(pcm1 = user)| Q(pcm2 = user) | Q(pcm3=user))
context = {
'paper': paper
}
return render_to_response('assignments.html',context)
@login_required
def selections(request):
paper = Selection.objects.all()
context = {
'paper': paper
}
return render_to_response('selections.html',context)
@login_required
def home(request):
return render_to_response(
'home.html',
{'user': request.user}
)
@login_required
def Deadline_Error(request):
return render_to_response(
'Deadline_Error.html',
{'user': request.user}
)
@login_required
def NotifTemp(request):
user = request.user
if request.method == 'POST' and 'submittemp' in request.POST:
form = NotifTemForm(request.POST)
if form.is_valid():
notiftype = form.cleaned_data['title']
notif_count = NotificationTemp.objects.filter(title=notiftype)
if len(notif_count)==0:
newNotif = form.save(commit=False)
newNotif.save()
else:
notif_count.delete()
newNotif = form.save(commit=False)
newNotif.save()
return HttpResponseRedirect('/notiftemp/')
else:
form = NotifTemForm()
variables = RequestContext(request, {'form': form })
return render_to_response('notiftemp.html', context_instance=RequestContext(request,{'form': form}))
@login_required
def Deadlines(request):
user = request.user
if request.method == 'POST' and 'submitdeadline' in request.POST:
form = DeadlineForm(request.POST)
if form.is_valid():
deadlinetype = form.cleaned_data['deadlineType']
deadline = (request.POST.get('deadline'))
deadline_count = Deadline.objects.filter(deadlineType=deadlinetype)
if len(deadline_count)==0:
newdeadline = form.save(commit=False)
newdeadline.save()
else:
deadline_count.delete()
newdeadline = form.save(commit=False)
newdeadline.save()
# newdeadline = Deadline(
# deadlineType=form.cleaned_data['deadlineType'],
# deadline=form.cleaned_data['deadline'],
# )
# newdeadline.save()
return HttpResponseRedirect('/deadline/')
else:
form = DeadlineForm()
variables = RequestContext(request, {'form': form })
return render_to_response('deadline.html', context_instance=RequestContext(request,{'form': form}))
@login_required
def SubmitPaper(request):
user = request.user
author = Author.objects.get(user=user)
utc=pytz.UTC
# current_pcc = User.objects.filter(groups__name='PCC')
if request.method == 'POST' and 'submitpaper' in request.POST:
form = PaperForm(request.POST, request.FILES)
if form.is_valid():
paper = Paper(contact_author = author,
title=form.cleaned_data['title'],
submitter=form.cleaned_data['submitter'],
version=form.cleaned_data['version'],
formats=form.cleaned_data['formats'],
document=form.cleaned_data['document']
)
# check if the deadline for papersubmission--To-Do
deadlines =get_object_or_404(Deadline, deadlineType='paperSubmission')
#deadline_val=deadlines[0]
# deadline_val=deadlines[0]
submissiondate=utc.localize(datetime.now())
# print('if ',str(submissiondate) > str(deadline_val))
if submissiondate < deadlines.deadline:
paper.save()
notification = Notification()
recipients = [user]
notification.sendNotification("paperSubmitted", recipients)
return HttpResponseRedirect('/SubmittedPapers/')
else:
return HttpResponseRedirect('/Deadline_Error/')
else:
form = PaperForm()
variables = RequestContext(request, {'form': form })
return render_to_response('submitpaper.html', context_instance=RequestContext(request,{'form': form}))
def successpaper(request):
return render_to_response(
'successpaper.html',
)
@login_required
def submittedpapers(request):
author = Author.objects.get(user=request.user)
paper_info=Paper.objects.all()
paper_data={
'paper_detail':paper_info
}
try:
context = {'authorId':author.id}
papers = Paper.objects.all()
for object in papers:
if object.contact_author_id == author.id:
print (object.submitter)
print(object.title)
print(object.version)
print(object.formats)
print(object.document)
context['papers'] = papers
except ObjectDoesNotExist:
print("Need to show the user that they haven't created the tables till now.")
# Need to have some functionality for this
return render_to_response('SubmittedPapers.html',context)
def is_member(user):
return user.groups.filter(name='PCM').exists()
def is_member1(user):
return user.groups.filter(name='PCC').exists()
def paperselected(request):
return render_to_response('paperselected.html')
@user_passes_test(is_member)
@login_required
def pcmpapers(request):
user = request.user
pcm = PCM.objects.get(user=user)
paper_info=Paper.objects.all()
paper_data={
'paper_detail':paper_info
}
context = {
'pcm': pcm,
'paper': paper_info,
}
if request.method=='POST':#request.POST.get('Rate'):
paper = request.POST.get('RequestID')
paper1 = Paper.objects.get(pk=paper)
selectionlist = Selection.objects.all().filter(PCM=pcm, selected_papers=paper1)
context['slist'] = selectionlist
if selectionlist:
return HttpResponseRedirect('/paperselected/')
elif not selectionlist and 'Selected' in request.POST:
selection = Selection.create(pcm, paper1)
return HttpResponseRedirect('/pcmpapers/')
else:
variables = RequestContext(request)
return render_to_response('pcmpapers.html', context, variables)
return render_to_response('pcmpapers.html', context)
@user_passes_test(is_member1)
@login_required
def pccpapers(request):
context=RequestContext(request)
paper_info=Paper.objects.all()
paper_data={
'paper_detail':paper_info
}
context = {
'paper': paper_info,
}
return render_to_response('pccpapers.html',context)
@user_passes_test(is_member1)
@csrf_protect
@login_required
def assignpapers(request, paper_id):
doc = Paper.objects.get(pk = paper_id)
pcms = PCM.objects.all()
selection = Selection.objects.all()
context = {
'selection': selection,
'pcm': pcms,
'paper': doc
}
if request.method == 'POST' and 'Assigned' in request.POST:
pcm1 = request.POST.get('PCMa')
pcm1_a = PCM.objects.get(pk=pcm1)
pcm2 = request.POST.get('PCMb')
pcm2_a = PCM.objects.get(pk=pcm2)
pcm3 = request.POST.get('PCMa')
pcm3_a = PCM.objects.get(pk=pcm3)
if pcm1 != pcm2 !=pcm3:
doc.pcm1 = pcm1_a
doc.pcm2 = pcm2_a
doc.pcm3 = pcm3_a
doc.assigned = True
doc.save()
pcma = PCM.objects.get(pk=pcm1)
pcmb = PCM.objects.get(pk=pcm2)
pcmc = PCM.objects.get(pk=pcm3)
notification = Notification()
recipients = [pcma.user, pcmb.user, pcmc.user]
notification.sendNotification("assigntoReview", recipients)
return HttpResponseRedirect('../../pccpapers/')
elif pcm1 == pcm2 or pcm2 == pcm3 or pcm1 == pcm3:
return HttpResponseRedirect('../../failassignment/')
return render_to_response('assignpapers.html', context_instance=RequestContext(request, context))
def successassignment(request):
return render_to_response('successassignment.html')
def failassignment(request):
return render_to_response('failassignment.html')
@login_required
def downloadPDF(request, paper_id):
doc = Paper.objects.get(pk=paper_id)
title = doc.title
version = doc.version
formats = doc.formats
document = doc.document
context = {
'doc':doc,
'title':title,
'version': version,
'formats': formats,
'document': document
}
if 'PDF' == doc.formats:
image_data = open(MEDIA_ROOT+'/'+doc.document.name, 'rb').read()
return HttpResponse(image_data, content_type='application/pdf')
else:
image_data = open(MEDIA_ROOT+'/'+doc.document.name, 'rb').read()
return HttpResponse(image_data, content_type='application/vnd.openxmlformats-officedocument.wordprocessingml.document')
@login_required
def show_notification(request):
notifications = Notification.objects.filter(recipient=request.user.pk)
user = get_object_or_404(User, pk=request.user.pk)
return render(request, 'view-notifications.html',{'notifications':notifications,'user':user})
@login_required
def review_Rate_PCM(request, paper_id):
'''
pCM rate
:param request:
:return:
'''
doc = Paper.objects.get(pk = paper_id)
context=RequestContext(request)
reviewer = Review.objects.all()#(reviewer=reviewer,paperId=paper)# change to current user
paper_info = Paper.objects.get(pk=doc.id)# fetch paper id from pcm page
utc = pytz.UTC
deadlines = Deadline.objects.filter(deadlineType='paperReview')
# deadline_val = deadlines[0]
currentDate = utc.localize(datetime.now())
# if the method is POST and rating has to be saved
if request.method=='POST' or '/PCM_review/' in request.POST:
print('inside POst')
print('deadline', currentDate , deadlines.deadline)
if currentDate<deadlines.deadline:
# reviewer.paperId=paprer_info#request.POST.get('title')
grade=request.POST.get('rating')
print(grade)
comments=request.POST.get('comments')
print("user id is",(request.user))
pcm=PCM.objects.get(user=request.user)
print("user id is", (pcm))
review1=Review.create(paper_info,grade,comments,pcm)
# return render_to_response('PCM_review.html', context_instance=RequestContext(request))
return render_to_response('Home.html', context)
else:
return HttpResponseRedirect('/Deadline_Error/')
# return pcmpapers(request)
else:
context['title']=paper_info
return render_to_response('PCM_review.html', context)
@user_passes_test(is_member1)
@login_required
def review_PCC(request, paper_id):
utc = pytz.UTC
doc = Paper.objects.get(pk=paper_id)
context = RequestContext(request)
# reviewer = Review.objects.all() # (reviewer=reviewer,paperId=paper)# change to current user
#paper_info = Paper.objects.get(pk=doc.id) # fetch paper id from pcm page
review = Review.objects.get(id=doc.id)
print(review.grade)
# for object in review:
#
# if object.paperId==doc:
# print('inside if',doc,object)
# print(object.reviewer)
# context = {
# 'paper': doc,
# 'Review': review
# }
print(request.method)
deadlines = Deadline.objects.filter(deadlineType='paperRate')
# deadline_val = deadlines[0]
currentDate = utc.localize(datetime.now())
print('deadline', currentDate, deadlines.deadline)
if request.method == 'POST' and 'Rate' in request.POST:
if currentDate<deadlines.deadline:
form = PccForm(request.POST)
if form.is_valid():
rate = form.cleaned_data['rate']
doc.rate = rate
doc.save()
return HttpResponseRedirect('/pccpapers')
else:
return HttpResponseRedirect('/Deadline_Error/')
elif request.method =='POST' and 'Conflict' in request.POST:
if currentDate < deadlines.deadline:
review1=Review.objects.get(id=review.id)
review1.grade=0
review1.save()
return HttpResponseRedirect('/pccpapers/')
else:
return HttpResponseRedirect('/Deadline_Error/')
else:
form = PccForm()
return render_to_response('PCCreview.html',context_instance=RequestContext(request,{'form':form}))
|
{"/SAM2017_G4/sam2017/samapp/forms.py": ["/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/views.py": ["/SAM2017_G4/sam2017/samapp/forms.py", "/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/admin.py": ["/SAM2017_G4/sam2017/samapp/models.py", "/SAM2017_G4/sam2017/samapp/forms.py"]}
|
29,674
|
nxs5899/SAM2017_G4
|
refs/heads/master
|
/SAM2017_G4/sam2017/samapp/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-12-05 17:16
# Generated by Django 1.9.1 on 2016-12-05 17:15
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fname', models.CharField(max_length=255)),
('lname', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Deadline',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deadlineType', models.CharField(choices=[('paperSubmission', 'Paper Submission'), ('paperSelection', 'Paper Selection'), ('paperAssign', 'Paper Assign'), ('paperReview', 'Paper Review'), ('paperRate', 'Paper Rate')], max_length=500)),
('deadline', models.DateTimeField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=500, verbose_name='Title')),
('message', models.CharField(max_length=500)),
('viewed', models.BooleanField(default=False, verbose_name='Viewd?')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('recipient', models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='NotificationTemp',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(choices=[('paperSubmitted', 'paperSubmitted'), ('selectpaper', 'selectpaper'), ('assigntoReview', 'assigntoReview'), ('startReview', 'startReview'), ('reviewComplete', 'reviewComplete'), ('paperRate', 'paperRate')], max_length=500)),
('message', models.CharField(max_length=500)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Paper',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('submitter', models.CharField(max_length=255)),
('title', models.CharField(max_length=255)),
('version', models.FloatField()),
('formats', models.CharField(choices=[('PDF', 'PDF'), ('Word', 'Word')], max_length=5)),
('document', models.FileField(upload_to='')),
('rate', models.FloatField(default=None, null=True)),
('sub_date', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('assigned', models.NullBooleanField(default=False)),
('contact_author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='samapp.Author')),
],
options={
'ordering': ['-title'],
},
),
migrations.CreateModel(
name='PCC',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fname', models.CharField(max_length=255)),
('lname', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='PCM',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fname', models.CharField(max_length=255)),
('lname', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('grade', models.IntegerField(null=True)),
('comments', models.TextField()),
('submissiondate', models.DateTimeField(auto_now_add=True)),
('submissionDeadline', models.DateTimeField(auto_now_add=True)),
('paperId', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='samapp.Paper')),
('reviewer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='samapp.PCM')),
],
),
migrations.CreateModel(
name='Samadmin',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fname', models.CharField(max_length=255)),
('lname', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Selection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('PCM', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='samapp.PCM')),
('selected_papers', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='samapp.Paper')),
],
),
migrations.AddField(
model_name='paper',
name='pcm1',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='pcm1', to='samapp.PCM'),
),
migrations.AddField(
model_name='paper',
name='pcm2',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='pcm2', to='samapp.PCM'),
),
migrations.AddField(
model_name='paper',
name='pcm3',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='pcm3', to='samapp.PCM'),
),
]
|
{"/SAM2017_G4/sam2017/samapp/forms.py": ["/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/views.py": ["/SAM2017_G4/sam2017/samapp/forms.py", "/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/admin.py": ["/SAM2017_G4/sam2017/samapp/models.py", "/SAM2017_G4/sam2017/samapp/forms.py"]}
|
29,675
|
nxs5899/SAM2017_G4
|
refs/heads/master
|
/SAM2017_G4/sam2017/samapp/migrations/0002_auto_20161205_1216.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-10-27 00:43
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import User, Group
from samapp.models import Paper
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
def add_group_permissions(samapp, schemaeditor):
# admin
PCC, created = Group.objects.get_or_create(name='PCC')
if created:
content_type = ContentType.objects.get_for_model(Paper)
permission = Permission.objects.create(
codename='can_read',
name='Can see paper submissions',
content_type=content_type,
)
PCC.permissions.add(permission)
PCM, created = Group.objects.get_or_create(name='PCM')
if created:
content_type1 = ContentType.objects.get_for_model(Paper)
permission = Permission.objects.create(
codename="can_view",
name="Can View Paper Submissions",
content_type=content_type1,
)
PCM.permissions.add(permission)
admin, created = Group.objects.get_or_create(name='admin')
if created:
content_type2 = ContentType.objects.get_for_model(Paper)
permission = Permission.objects.create(
codename="can_assign",
name="Can assign Paper Submissions",
content_type=content_type2,
)
content_type3 = ContentType.objects.get_for_model(User)
permission1 = Permission.objects.create(
codename="can_change",
name="Can update users",
content_type=content_type3,
)
admin.permissions.add(permission, permission1)
author, created = Group.objects.get_or_create(name='author')
class Migration(migrations.Migration):
dependencies = [
('samapp', '0001_initial'),
]
operations = [
migrations.RunPython(add_group_permissions),
]
|
{"/SAM2017_G4/sam2017/samapp/forms.py": ["/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/views.py": ["/SAM2017_G4/sam2017/samapp/forms.py", "/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/admin.py": ["/SAM2017_G4/sam2017/samapp/models.py", "/SAM2017_G4/sam2017/samapp/forms.py"]}
|
29,676
|
nxs5899/SAM2017_G4
|
refs/heads/master
|
/SAM2017_G4/sam2017/samapp/admin.py
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
# Register your models here.
from .models import *
from .forms import AuthorForm, PaperForm
class AuthorAdmin(admin.ModelAdmin):
class Meta:
model = Author
class PaperAdmin(admin.ModelAdmin):
class Meta:
model = Paper
admin.site.register(Author, AuthorAdmin)
admin.site.register(Paper)
admin.site.register(Review)
admin.site.register(Samadmin)
admin.site.register(PCC)
admin.site.register(PCM)
admin.site.register(Deadline)
admin.site.register(Notification)
admin.site.register(NotificationTemp)
admin.site.register(Selection)
|
{"/SAM2017_G4/sam2017/samapp/forms.py": ["/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/views.py": ["/SAM2017_G4/sam2017/samapp/forms.py", "/SAM2017_G4/sam2017/samapp/models.py"], "/SAM2017_G4/sam2017/samapp/admin.py": ["/SAM2017_G4/sam2017/samapp/models.py", "/SAM2017_G4/sam2017/samapp/forms.py"]}
|
29,691
|
SungjiCho/ipsi
|
refs/heads/master
|
/suneung/apps.py
|
from django.apps import AppConfig
class SuneungConfig(AppConfig):
name = 'suneung'
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,692
|
SungjiCho/ipsi
|
refs/heads/master
|
/susi/migrations/0001_initial.py
|
# Generated by Django 3.0.5 on 2020-06-05 06:34
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('university', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Susi',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=31, verbose_name='전형명')),
('year', models.IntegerField(choices=[(2021, 2021)], verbose_name='학년도')),
('university', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='susis', to='university.University', verbose_name='대학')),
],
options={
'verbose_name': '수시전형',
'verbose_name_plural': '수시전형',
},
),
migrations.CreateModel(
name='SusiSchedule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=255, verbose_name='설명')),
('start_date', models.DateTimeField(verbose_name='시작시간')),
('end_date', models.DateTimeField(verbose_name='종료시간')),
('major_block', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='susi_schedules', to='university.SusiMajorBlock', verbose_name='학과 블록')),
('susi', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='susi_schedules', to='susi.Susi', verbose_name='수시전형 종류')),
],
options={
'verbose_name': '수시전형 일정',
'verbose_name_plural': '수시전형 일정',
},
),
]
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,693
|
SungjiCho/ipsi
|
refs/heads/master
|
/review/views.py
|
from django.http import Http404
from rest_framework.views import APIView
from rest_framework.response import Response
from university.models import University
from review.serializers import ReviewSerializer
class ReviewList(APIView):
'''
대학 리뷰 URL 목록을 반환하는 API
---
## `/review/`
## OUTPUT
- 'name': 대학 이름
- 'review_url': 대학 리뷰 URL
'''
def get(self, request):
universities = University.objects.all()
serializer = ReviewSerializer(universities, many=True)
return Response(serializer.data)
class ReviewDetail(APIView):
'''
특정 대학의 리뷰 URL을 반환하는 API
---
## `/review/<univ>`
## OUTPUT
- 'name': 대학 이름
- 'review_url': 대학 리뷰 URL
'''
def get_object(self, univ):
try:
return University.objects.get(name=univ)
except University.DoesNotExist:
raise Http404
def get(self, request, univ):
university = get_object(univ)
serializer = ReviewSerializer(university)
return Response(serializer.data)
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,694
|
SungjiCho/ipsi
|
refs/heads/master
|
/review/migrations/0002_auto_20200605_0302.py
|
# Generated by Django 3.0.5 on 2020-06-04 18:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('review', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='review',
name='url',
field=models.URLField(max_length=50),
),
]
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,695
|
SungjiCho/ipsi
|
refs/heads/master
|
/jeongsi/serializers.py
|
from rest_framework import serializers
from jeongsi.models import *
class JeongsiSerializer(serializers.ModelSerializer):
class Meta:
model = Jeongsi
fields = ('year', 'gun')
class JeongsiScheduleSerializer(serializers.ModelSerializer):
class Meta:
model = JeongsiSchedule
fields = ('description', 'start_date', 'end_date')
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,696
|
SungjiCho/ipsi
|
refs/heads/master
|
/susi/apps.py
|
from django.apps import AppConfig
class SusiConfig(AppConfig):
name = 'susi'
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,697
|
SungjiCho/ipsi
|
refs/heads/master
|
/university/views.py
|
from django.db.models import Q
from rest_framework.views import APIView
from rest_framework.response import Response
from university.models import *
from university.serializers import *
from susi.models import *
from susi.serializers import *
from jeongsi.models import *
from jeongsi.serializers import *
class UniversityList(APIView):
'''
대학 목록을 반환하는 API
---
## `/`
## OUTPUT
- 'name': 대학 이름
- 'logo': 대학 로고 파일의 이름
- 'susis':
- 'year': 학년도
- 'name': 전형 이름
- 'susi_major_blocks':
- 'name': 수시 학과 블록 이름
- 'jeongsis':
- 'year': 학년도
- 'gun': 군
- 'jeongsi_major_blocks':
- 'name': 정시 학과 블록 이름
'''
def get(self, request):
universities = University.objects.all()
serializer = UniversitySerializer(universities, many=True)
return Response(serializer.data)
class UniversitySelect(APIView):
'''
입시 일정을 반환하는 API
---
## `/select/?univ={대학}&sj={수시/정시}&jh={(수시)전형명}&gun={(정시)군}&block={학과블록}`
## INPUT
- &로 구분되는 query parameter (ex: /select/?univ=서울대학교&sj=수시&jh=일반전형&block=의과대학,수의과대학,치의과대학)
- 'univ': 대학 이름 (ex: 서울대학교)
- 'sj': 수시 or 정시
- 'jh': (수시) 전형 이름 (ex: 일반전형)
- 'gun': (정시) 군 (ex: 가군)
- 'block': 학과 블록 이름 (ex: 의과대학,수의과대학,치의과대학)
## OUTPUT
- 'description': 일정 이름
- 'start_date' : 일정 시작 시간
- 'end_date' : 일정 종료 시간
'''
def get(self, request):
def error_msg(message):
return {'detail': message}
univ = sj = jh = gun = block = None
# 파라미터 받고
if 'univ' in request.GET:
univ = request.GET['univ']
if 'sj' in request.GET:
sj = request.GET['sj']
if 'jh' in request.GET:
jh = request.GET['jh']
if 'gun' in request.GET:
gun = request.GET['gun']
if 'block' in request.GET:
block = request.GET['block']
# 필요한 파라미터가 없을 경우 에러 메시지 출력
if not univ:
return Response(error_msg("requires 'univ' parameter"))
if not sj:
return Response(error_msg("requires 'sj' parameter"))
if sj != '수시' and sj != '정시':
return Response(error_msg("wrong 'sj' parameter: it must be '수시' or '정시'"))
if sj == '수시' and not jh:
return Response(error_msg("sj='수시' requires 'jh' parameter"))
if sj == '정시' and not gun:
return Response(error_msg("sj='정시' requires 'gun' parameter"))
if not block:
return Response(error_msg("requires 'block' parameter"))
# 받은 파라미터로 적절한 스케줄 검색 후 출력
# 스케줄이 없으면 에러 메시지 출력
if sj == '수시':
schedules = SusiSchedule.objects.filter(Q(susi__university__name=univ) &
Q(susi__name=jh) &
Q(major_block__name=block))
serializer = SusiScheduleSerializer(schedules, many=True)
return Response(serializer.data)
elif sj == '정시':
schedules = JeongsiSchedule.objects.filter(Q(jeongsi__university__name=univ) &
Q(jeongsi__gun=gun) &
Q(major_block__name=block))
serializer = JeongsiScheduleSerializer(schedules, many=True)
return Response(serializer.data)
class UniversitySelectAll(APIView):
'''
여러 입시 일정을 한 번에 반환하는 API
---
## `/select/?num={대학개수}&univ0={대학0}&sj0={수시/정시0}&jh0={(수시)전형명0}&gun0={(정시)군0}&block0={학과블록0}&univ1={대학1}&sj1={수시/정시1}&jh0={(수시)전형명1}&gun0={(정시)군1}&block0={학과블록1}&...`
## INPUT
- &로 구분되는 query parameter (ex: /selectall/?num=2&univ0=서울대학교&sj0=수시&jh0=일반전형&block0=의과대학,수의과대학,치의과대학&univ1=서울대학교&sj1=정시&gun1=가군&block1=전 학과)
- 'num': 입력하는 대학의 개수 (ex: 2)
- 'univ0': 0번째 대학 이름
- 'sj0': 0번째 수시 or 정시
- 'jh0': 0번째 (수시) 전형 이름
- 'gun0': 0번째 (정시) 군
- 'block0': 0번째 학과 블록 이름
- ... (num 개수만큼 입력)
## OUTPUT
- 'num': 대학 인덱스
- 'univ': 대학 이름
- 'sj': 수시 or 정시
- 'jh': (수시) 전형 이름
- 'gun': (정시) 군
- 'block': 학과 블록 이름
- 'schedules': 일정 목록
- 'description': 일정 이름
- 'start_date' : 일정 시작 시간
- 'end_date' : 일정 종료 시간
'''
def get(self, request):
def error_msg(message):
return {'detail': message}
# 대학 개수 받고
num = None
if 'num' in request.GET:
num = int(request.GET['num'])
if not num:
return Response(error_msg("requires 'num' parameter"))
# 대학 개수만큼 리스트 원소 미리 생성 (인덱스로 접근하기 위해)
univs = []
sjs = []
jhs = []
guns = []
blocks = []
for i in range(num):
univs.append(None)
sjs.append(None)
jhs.append(None)
guns.append(None)
blocks.append(None)
# 대학 개수만큼 파라미터 받기
for i in range(num):
if ('univ' + str(i)) in request.GET:
univs[i] = request.GET['univ' + str(i)]
if ('sj' + str(i)) in request.GET:
sjs[i] = request.GET['sj' + str(i)]
if ('jh' + str(i)) in request.GET:
jhs[i] = request.GET['jh' + str(i)]
if ('gun' + str(i)) in request.GET:
guns[i] = request.GET['gun' + str(i)]
if ('block' + str(i)) in request.GET:
blocks[i] = request.GET['block' + str(i)]
# 필요한 파라미터가 없을 경우 에러 메시지 출력
for i in range(num):
if not univs[i]:
return Response(error_msg(f"univ {i}: requires 'univ' parameter"))
if not sjs[i]:
return Response(error_msg(f"univ {i}: requires 'sj' parameter"))
if sjs[i] != '수시' and sjs[i] != '정시':
return Response(error_msg(f"univ {i}: wrong 'sj' parameter: it must be '수시' or '정시'"))
if sjs[i] == '수시' and not jhs[i]:
return Response(error_msg(f"univ {i}: sj='수시' requires 'jh' parameter"))
if sjs[i] == '정시' and not guns[i]:
return Response(error_msg(f"univ {i}: sj='정시' requires 'gun' parameter"))
if not blocks[i]:
return Response(error_msg(f"univ {i}: requires 'block' parameter"))
# 받은 파라미터로 적절한 스케줄 검색 후 출력
# 스케줄이 없으면 에러 메시지 출력
responses = []
for i in range(num):
responses.append(None)
for i in range(num):
if sjs[i] == '수시':
schedules = SusiSchedule.objects.filter(Q(susi__university__name=univs[i]) &
Q(susi__name=jhs[i]) &
Q(major_block__name=blocks[i]))
responses[i] = {
'num': i,
'univ': univs[i],
'sj': sjs[i],
'jh': jhs[i],
'block': blocks[i],
'schedules': SusiScheduleSerializer(schedules, many=True).data,
}
elif sjs[i] == '정시':
schedules = JeongsiSchedule.objects.filter(Q(jeongsi__university__name=univs[i]) &
Q(jeongsi__gun=guns[i]) &
Q(major_block__name=blocks[i]))
responses[i] = {
'num': i,
'univ': univs[i],
'sj': sjs[i],
'jh': guns[i],
'block': blocks[i],
'schedules': JeongsiScheduleSerializer(schedules, many=True).data,
}
return Response(responses)
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,698
|
SungjiCho/ipsi
|
refs/heads/master
|
/config/urls.py
|
"""ipsi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
from django.conf.urls import url
from drf_yasg.views import get_schema_view
from rest_framework.permissions import AllowAny
from drf_yasg import openapi
schema_view = get_schema_view(
openapi.Info(
title="IPSI API",
default_version='v1',
description="입시 애플리케이션을 위한 API입니다.",
contact=openapi.Contact(email="tjddn8770@naver.com"),
),
validators=['flex'],
public=True,
permission_classes=(AllowAny,),
)
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('university.urls')),
path('', include('review.urls')),
path('', include('suneung.urls')),
# API document generation with drf_yasg
path('swagger<str:format>', schema_view.without_ui(cache_timeout=0), name='schema-json'),
path('swagger/', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
path('docs/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc-v1')
]
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,699
|
SungjiCho/ipsi
|
refs/heads/master
|
/jeongsi/migrations/0001_initial.py
|
# Generated by Django 3.0.5 on 2020-06-05 06:34
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('university', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Jeongsi',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year', models.IntegerField(choices=[(2021, 2021)], verbose_name='학년도')),
('gun', models.CharField(choices=[('GA', '가군'), ('NA', '나군'), ('DA', '다군'), ('ETC', '군외')], max_length=7, verbose_name='군')),
('university', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='jeongsis', to='university.University', verbose_name='대학')),
],
options={
'verbose_name': '정시전형',
'verbose_name_plural': '정시전형',
},
),
migrations.CreateModel(
name='JeongsiSchedule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=255, verbose_name='설명')),
('start_date', models.DateTimeField(verbose_name='시작시간')),
('end_date', models.DateTimeField(verbose_name='종료시간')),
('jeongsi', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='jeongsi_schedules', to='jeongsi.Jeongsi', verbose_name='정시전형 종류')),
('major_block', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='jeongsi_schedules', to='university.JeongsiMajorBlock', verbose_name='학과 블록')),
],
options={
'verbose_name': '정시전형 일정',
'verbose_name_plural': '정시전형 일정',
},
),
]
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,700
|
SungjiCho/ipsi
|
refs/heads/master
|
/jeongsi/migrations/0002_auto_20200606_0030.py
|
# Generated by Django 3.0.5 on 2020-06-05 15:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jeongsi', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='jeongsi',
name='gun',
field=models.CharField(choices=[('가군', '가군'), ('나군', '나군'), ('다군', '다군'), ('군외', '군외')], max_length=7, verbose_name='군'),
),
]
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,701
|
SungjiCho/ipsi
|
refs/heads/master
|
/model_to_csv.py
|
import os
import csv
import django
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings')
django.setup()
from university.models import *
from susi.models import *
from jeongsi.models import *
from suneung.models import *
# university
with open('csv/university.csv', 'w', newline='') as csvfile:
fieldnames = ['name', 'logo', 'review_url']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for univ in University.objects.all():
writer.writerow({'name': univ.name, 'logo': univ.logo})
with open('csv/susi_major_block.csv', 'w', newline='') as csvfile:
fieldnames = ['university', 'name']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for major_block in SusiMajorBlock.objects.all():
writer.writerow({'university': major_block.university, 'name': major_block.name})
with open('csv/jeongsi_major_block.csv', 'w', newline='') as csvfile:
fieldnames = ['university', 'name']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for major_block in JeongsiMajorBlock.objects.all():
writer.writerow({'university': major_block.university, 'name': major_block.name})
# susi
with open('csv/susi.csv', 'w', newline='') as csvfile:
fieldnames = ['university', 'name', 'year']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for susi in Susi.objects.all():
writer.writerow({'university': susi.university, 'name': susi.name, 'year': susi.year})
with open('csv/susi_schedule.csv', 'w', newline='') as csvfile:
fieldnames = ['susi', 'major_block', 'description', 'start_date', 'end_date']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for schedule in SusiSchedule.objects.all():
writer.writerow({'susi': schedule.susi, 'major_block': schedule.major_block, 'description': schedule.description, 'start_date': schedule.start_date, 'end_date': schedule.end_date})
# jeongsi
with open('csv/jeongsi.csv', 'w', newline='') as csvfile:
fieldnames = ['university', 'gun', 'year']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for jeongsi in Jeongsi.objects.all():
writer.writerow({'university': jeongsi.university, 'year': jeongsi.year, 'gun': jeongsi.gun})
with open('csv/jeongsi_schedule.csv', 'w', newline='') as csvfile:
fieldnames = ['jeongsi', 'major_block', 'description', 'start_date', 'end_date']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for schedule in JeongsiSchedule.objects.all():
writer.writerow({'jeongsi': schedule.jeongsi, 'major_block': schedule.major_block, 'description': schedule.description, 'start_date': schedule.start_date, 'end_date': schedule.end_date})
# suneung
with open('csv/suneung.csv', 'w', newline='') as csvfile:
fieldnames = ['description', 'start_date', 'end_date']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for suneung in Suneung.objects.all():
writer.writerow({'description': suneung.description, 'start_date': suneung.start_date, 'end_date': suneung.end_date})
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,702
|
SungjiCho/ipsi
|
refs/heads/master
|
/review/serializers.py
|
from rest_framework import serializers
from university.models import University
class ReviewSerializer(serializers.ModelSerializer):
class Meta:
model = University
fields = ('name', 'review_url')
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,703
|
SungjiCho/ipsi
|
refs/heads/master
|
/university/admin.py
|
from django.contrib import admin
from university.models import *
admin.site.register(University)
admin.site.register(SusiMajorBlock)
admin.site.register(JeongsiMajorBlock)
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,704
|
SungjiCho/ipsi
|
refs/heads/master
|
/suneung/models.py
|
from django.db import models
class Suneung(models.Model):
class Meta:
verbose_name = '수능 및 모의고사 일정'
verbose_name_plural = '수능 및 모의고사 일정'
description = models.CharField(
verbose_name='설명',
max_length=255,
)
start_date = models.DateTimeField(
verbose_name='시작시간',
)
end_date = models.DateTimeField(
verbose_name='종료시간',
)
def __str__(self):
return self.description
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,705
|
SungjiCho/ipsi
|
refs/heads/master
|
/university/models.py
|
from django.db import models
class University(models.Model):
class Meta:
verbose_name = '대학'
verbose_name_plural = '대학'
name = models.CharField(
verbose_name='대학명',
unique=True,
max_length=31,
)
logo = models.CharField(
verbose_name='대학 로고',
blank=True,
max_length=31,
)
review_url = models.CharField(
verbose_name='리뷰 url',
blank=True,
max_length=63,
)
def __str__(self):
return self.name
class SusiMajorBlock(models.Model):
class Meta:
verbose_name = '수시 학과 블록'
verbose_name_plural = '수시 학과 블록'
university = models.ForeignKey(
verbose_name='소속 대학',
to='University',
related_name='susi_major_blocks',
on_delete=models.CASCADE,
)
name = models.CharField(
verbose_name='분류명',
max_length=255,
)
def __str__(self):
return self.university.name + '/' + self.name
class JeongsiMajorBlock(models.Model):
class Meta:
verbose_name = '정시 학과 블록'
verbose_name_plural = '정시 학과 블록'
university = models.ForeignKey(
verbose_name='소속 대학',
to='University',
related_name='jeongsi_major_blocks',
on_delete=models.CASCADE,
)
name = models.CharField(
verbose_name='분류명',
max_length=255,
)
def __str__(self):
return self.university.name + '/' + self.name
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,706
|
SungjiCho/ipsi
|
refs/heads/master
|
/suneung/admin.py
|
from django.contrib import admin
from suneung.models import *
admin.site.register(Suneung)
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,707
|
SungjiCho/ipsi
|
refs/heads/master
|
/susi/serializers.py
|
from rest_framework import serializers
from susi.models import *
class SusiSerializer(serializers.ModelSerializer):
class Meta:
model = Susi
fields = ('year', 'name')
class SusiScheduleSerializer(serializers.ModelSerializer):
class Meta:
model = SusiSchedule
fields = ('description', 'start_date', 'end_date')
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,708
|
SungjiCho/ipsi
|
refs/heads/master
|
/suneung/migrations/0001_initial.py
|
# Generated by Django 3.0.5 on 2020-06-05 06:34
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Suneung',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=255, verbose_name='설명')),
('start_date', models.DateTimeField(verbose_name='시작시간')),
('end_date', models.DateTimeField(verbose_name='종료시간')),
],
options={
'verbose_name': '수능 및 모의고사 일정',
'verbose_name_plural': '수능 및 모의고사 일정',
},
),
]
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,709
|
SungjiCho/ipsi
|
refs/heads/master
|
/susi/models.py
|
import datetime
from django.db import models
from university.models import *
YEARS = []
for r in range(2021, (datetime.datetime.now().year+2)):
YEARS.append((r,r))
class Susi(models.Model):
class Meta:
verbose_name = '수시전형'
verbose_name_plural = '수시전형'
university = models.ForeignKey(
verbose_name='대학',
to='university.University',
related_name='susis',
on_delete=models.CASCADE,
)
name = models.CharField(
verbose_name='전형명',
max_length=31,
)
year = models.IntegerField(
verbose_name='학년도',
choices=YEARS,
)
def __str__(self):
return str(self.year) + '/' + self.university.name + '/수시전형/' + self.name
# ex) 2021/서울대학교/수시전형/일반전형
class SusiSchedule(models.Model):
class Meta:
verbose_name = '수시전형 일정'
verbose_name_plural = '수시전형 일정'
susi = models.ForeignKey(
verbose_name='수시전형 종류',
to='Susi',
related_name='susi_schedules',
on_delete=models.CASCADE,
)
major_block = models.ForeignKey(
verbose_name='학과 블록',
to='university.SusiMajorBlock',
related_name='susi_schedules',
on_delete=models.CASCADE,
)
description = models.CharField(
verbose_name='설명',
max_length=255,
)
start_date = models.DateTimeField(
verbose_name='시작시간',
)
end_date = models.DateTimeField(
verbose_name='종료시간',
)
def __str__(self):
return str(self.susi.year) + '/' + self.susi.university.name + '/수시전형/' + self.susi.name + '/' + self.description + '/' + self.major_block.name
# ex) 2021/서울대학교/수시전형/일반전형/지원서 접수/의과대학, 수의과대학, 치의과대학
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,710
|
SungjiCho/ipsi
|
refs/heads/master
|
/university/urls.py
|
from django.urls import path
from university import views
urlpatterns = [
path('', views.UniversityList.as_view()),
path('select/', views.UniversitySelect.as_view()),
path('selectall/', views.UniversitySelectAll.as_view()),
]
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,711
|
SungjiCho/ipsi
|
refs/heads/master
|
/university/serializers.py
|
from rest_framework import serializers
from university.models import *
from susi.serializers import SusiSerializer
from jeongsi.serializers import JeongsiSerializer
class SusiMajorBlockSerializer(serializers.ModelSerializer):
class Meta:
model = SusiMajorBlock
fields = ('name', )
class JeongsiMajorBlockSerializer(serializers.ModelSerializer):
class Meta:
model = JeongsiMajorBlock
fields = ('name', )
class UniversitySerializer(serializers.ModelSerializer):
susis = SusiSerializer(many=True, read_only=True)
susi_major_blocks = SusiMajorBlockSerializer(many=True, read_only=True)
jeongsis = JeongsiSerializer(many=True, read_only=True)
jeongsi_major_blocks = JeongsiMajorBlockSerializer(many=True, read_only=True)
class Meta:
model = University
fields = ('name', 'logo', 'susis', 'susi_major_blocks', 'jeongsis', 'jeongsi_major_blocks')
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,712
|
SungjiCho/ipsi
|
refs/heads/master
|
/jeongsi/models.py
|
from django.db import models
from university.models import *
from susi.models import YEARS
GUNS = (
('가군', '가군'),
('나군', '나군'),
('다군', '다군'),
('군외', '군외'),
)
class Jeongsi(models.Model):
class Meta:
verbose_name = '정시전형'
verbose_name_plural = '정시전형'
university = models.ForeignKey(
verbose_name='대학',
to='university.University',
related_name='jeongsis',
on_delete=models.CASCADE,
)
year = models.IntegerField(
verbose_name='학년도',
choices=YEARS,
)
gun = models.CharField(
verbose_name='군',
choices=GUNS,
max_length=7,
)
def __str__(self):
return str(self.year) + '/'+ self.university.name + '/정시전형/' + self.gun
# ex) 2021/서울대학교/정시전형/가군
class JeongsiSchedule(models.Model):
class Meta:
verbose_name = '정시전형 일정'
verbose_name_plural = '정시전형 일정'
jeongsi = models.ForeignKey(
verbose_name = '정시전형 종류',
to='Jeongsi',
related_name='jeongsi_schedules',
on_delete=models.CASCADE,
)
major_block = models.ForeignKey(
verbose_name='학과 블록',
to='university.JeongsiMajorBlock',
related_name='jeongsi_schedules',
on_delete=models.CASCADE,
)
description = models.CharField(
verbose_name='설명',
max_length=255,
)
start_date = models.DateTimeField(
verbose_name='시작시간',
)
end_date = models.DateTimeField(
verbose_name='종료시간',
)
def __str__(self):
return str(self.jeongsi.year) + '/' + self.jeongsi.university.name + '/정시전형/' + self.jeongsi.gun + '/' + self.description + '/' + self.major_block.name
# ex) 2021/서울대학교/정시전형/가군/지원서 접수/의과대학, 수의과대학, 치의과대학
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,713
|
SungjiCho/ipsi
|
refs/heads/master
|
/csv_to_model.py
|
import os
import csv
import django
from django.db.models import Q
from django.utils.dateparse import parse_datetime
from django.utils.timezone import make_aware
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings')
django.setup()
from university.models import *
from susi.models import *
from jeongsi.models import *
from suneung.models import *
# add universities
with open('csv/university.csv', 'r') as f:
reader = csv.reader(f)
next(reader)
for row in reader:
if not University.objects.filter(name=row[0]):
University(name=row[0], logo=row[1], review_url=row[2]).save()
# for each university, add susis / susi major blocks / susi schedules
for univ in University.objects.all():
susi_file = 'csv/' + univ.name + '/susi.csv'
susi_major_block_file = 'csv/' + univ.name + '/susi_major_block.csv'
susi_schedule_file = 'csv/' + univ.name + '/susi_schedule.csv'
if os.path.isfile(susi_file):
with open(susi_file, 'r') as f:
reader = csv.reader(f)
next(reader)
for row in reader:
if not Susi.objects.filter(Q(university__name=row[0]) & Q(name=row[1]) & Q(year=row[2])):
university = University.objects.get(name=row[0])
Susi(university=university, name=row[1], year=row[2]).save()
if os.path.isfile(susi_major_block_file):
with open(susi_major_block_file, 'r') as f:
reader = csv.reader(f)
next(reader)
for row in reader:
if not SusiMajorBlock.objects.filter(Q(university__name=row[0]) & Q(name=row[1])):
university = University.objects.get(name=row[0])
SusiMajorBlock(university=university, name=row[1]).save()
if os.path.isfile(susi_schedule_file):
with open(susi_schedule_file, 'r') as f:
reader = csv.reader(f)
next(reader)
for row in reader:
# susi_infos[0]: year, [1]: univ_name, [2]: susi, [3]: susi_name
susi_infos = row[0].split('/')
susi = Susi.objects.get(Q(university__name=susi_infos[1]) & Q(name=susi_infos[3]) & Q(year=susi_infos[0]))
# major_block_infos[0]: univ, [1]: major_block_name
major_block_infos = row[1].split('/')
major_block = SusiMajorBlock.objects.get(Q(university__name=major_block_infos[0]) & Q(name=major_block_infos[1]))
# save schedule
if not SusiSchedule.objects.filter(susi=susi).filter(major_block=major_block).filter(description=row[2]):
start_date = make_aware(parse_datetime(row[3]))
end_date = make_aware(parse_datetime(row[4]))
SusiSchedule(susi=susi, major_block=major_block, description=row[2], start_date=start_date, end_date=end_date).save()
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,714
|
SungjiCho/ipsi
|
refs/heads/master
|
/jeongsi/apps.py
|
from django.apps import AppConfig
class JeongsiConfig(AppConfig):
name = 'jeongsi'
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,715
|
SungjiCho/ipsi
|
refs/heads/master
|
/jeongsi/admin.py
|
from django.contrib import admin
from jeongsi.models import *
admin.site.register(Jeongsi)
admin.site.register(JeongsiSchedule)
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,716
|
SungjiCho/ipsi
|
refs/heads/master
|
/suneung/views.py
|
from rest_framework.views import APIView
from rest_framework.response import Response
from suneung.models import *
from suneung.serializers import *
class SuneungList(APIView):
'''
수능 및 모의고사 일정을 반환하는 API
---
## `/suneung/`
## OUTPUT
- 'description': 일정 이름
- 'start_date' : 일정 시작 시간
- 'end_date' : 일정 종료 시간
'''
def get(self, request):
schedules = Suneung.objects.all()
serializer = SuneungSerializer(schedules, many=True)
return Response(serializer.data)
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,717
|
SungjiCho/ipsi
|
refs/heads/master
|
/suneung/serializers.py
|
from rest_framework import serializers
from suneung.models import *
class SuneungSerializer(serializers.ModelSerializer):
class Meta:
model = Suneung
fields = ('description', 'start_date', 'end_date')
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,718
|
SungjiCho/ipsi
|
refs/heads/master
|
/review/urls.py
|
from django.urls import path
from review import views
urlpatterns = [
path('review/', views.ReviewList.as_view()),
path('review/<univ>', views.ReviewDetail.as_view()),
]
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
29,719
|
SungjiCho/ipsi
|
refs/heads/master
|
/susi/admin.py
|
from django.contrib import admin
from susi.models import *
admin.site.register(Susi)
admin.site.register(SusiSchedule)
|
{"/review/views.py": ["/university/models.py", "/review/serializers.py"], "/jeongsi/serializers.py": ["/jeongsi/models.py"], "/university/views.py": ["/university/models.py", "/university/serializers.py", "/susi/models.py", "/susi/serializers.py", "/jeongsi/models.py", "/jeongsi/serializers.py"], "/model_to_csv.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/review/serializers.py": ["/university/models.py"], "/university/admin.py": ["/university/models.py"], "/suneung/admin.py": ["/suneung/models.py"], "/susi/serializers.py": ["/susi/models.py"], "/susi/models.py": ["/university/models.py"], "/university/serializers.py": ["/university/models.py", "/susi/serializers.py", "/jeongsi/serializers.py"], "/jeongsi/models.py": ["/university/models.py", "/susi/models.py"], "/csv_to_model.py": ["/university/models.py", "/susi/models.py", "/jeongsi/models.py", "/suneung/models.py"], "/jeongsi/admin.py": ["/jeongsi/models.py"], "/suneung/views.py": ["/suneung/models.py", "/suneung/serializers.py"], "/suneung/serializers.py": ["/suneung/models.py"], "/susi/admin.py": ["/susi/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.