index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
25,599
|
fgassert/eeUtil
|
refs/heads/master
|
/eeUtil/__init__.py
|
'''
Python wrapper for easier data management on Google Earth Engine.
Files are staged via Google Cloud Storage for upload.
A service account with access to GEE and Storage is required.
See: https://developers.google.com/earth-engine/service_account
```
import eeUtil
# initialize from environment variables
eeUtil.init()
# create image collection
eeUtil.createFolder('mycollection', imageCollection=True)
# upload image to collection
eeUtil.uploadAsset('image.tif', 'mycollection/myasset')
eeUtil.setAcl('mycollection', 'public')
eeUtil.ls('mycollection')
# export from earthengine to storage and download
eeUtil.downloadAsset('mycollection/myasset', 'image.tif')
```
'''
from .eeutil import *
|
{"/eeUtil/eeutil.py": ["/eeUtil/__init__.py"], "/eeUtil/__init__.py": ["/eeUtil/eeutil.py"], "/eeUtil/gsbucket.py": ["/eeUtil/__init__.py"]}
|
25,600
|
fgassert/eeUtil
|
refs/heads/master
|
/eeUtil/gsbucket.py
|
import os
import re
from google.cloud import storage
import logging
from . import eeutil
# Silence warnings from googleapiclient.discovery_cache
# see https://github.com/googleapis/google-api-python-client/issues/299
logging.getLogger('googleapiclient.discovery_cache').setLevel(logging.ERROR)
logger = logging.getLogger(__name__)
# Unary client object
_gsClient = None
_gsBucket = None
def init(bucket=None, project=None, credentials=None):
'''Initialize Google Cloud Storage client and default bucket
Args:
bucket (str): Default bucket to use
project (str): Authenticate to this GCP project
credentials (google.auth.credentials.Credentials): OAuth credentials
'''
global _gsClient
global _gsBucket
_gsClient = storage.Client(project, credentials=credentials) if project else storage.Client(credentials=credentials)
if bucket:
_gsBucket = _gsClient.bucket(bucket)
if not _gsBucket.exists():
logger.warning('Bucket gs://{bucket} does not exist, creating')
_gsBucket.create()
def Client():
'''Returns Google Cloud Storage client'''
if not _gsClient:
init()
return _gsClient
def Bucket(bucket=None):
'''Returns authenticated Bucket object'''
bucket = _defaultBucketName(bucket)
return Client().bucket(bucket)
def _defaultBucketName(bucket=None):
'''Returns default bucket name if bucket is None'''
if bucket is not None:
return bucket
if _gsBucket is None:
raise Exception('No default bucket, run eeUtil.init() to set a default bucket')
return _gsBucket.name
def asURI(path, bucket=None):
'''Returns blob path as URI'''
bucket = _defaultBucketName(bucket)
return f"gs://{os.path.join(bucket, path)}"
def isURI(path, bucket=''):
'''Returns true if path is valid URI for bucket'''
start = f'gs://{bucket}'
return (
len(path) > len(start)+2
and path[:len(start)] == start
and path[len(start)+1:].find('/') > -1
)
def pathFromURI(uri):
'''Returns blob path from URI'''
return fromURI(uri)[1]
def fromURI(uri):
'''Returns bucket name and blob path from URI'''
if not isURI(uri):
raise Exception(f'Path {uri} does not match gs://<bucket>/<blob>')
return uri[5:].split('/', 1)
def exists(uri):
'''check if blob exists'''
bucket, path = fromURI(uri)
return Bucket(bucket).blob(path).exists()
def stage(files, prefix='', bucket=None):
'''Upload files to GCS
Uploads files to gs://<bucket>/<prefix>/<filename>
Args:
files (list, str): Filenames of local files to upload
prefix (str): Folder to upload to (prepended to file name)
bucket (str): GCS bucket to upload to
Returns:
list: URIs of uploaded files
'''
files = (files,) if isinstance(files, str) else files
gs_uris = []
for f in files:
path = os.path.join(prefix, os.path.basename(f))
uri = asURI(path, bucket)
logger.info(f'Uploading {f} to {uri}')
Bucket(bucket).blob(path).upload_from_filename(f)
gs_uris.append(uri)
return gs_uris
def remove(gs_uris):
'''
Remove blobs from GCS
Args:
gs_uris (list, str): Full paths to blob(s) to remove `gs://<bucket>/<blob>`
'''
gs_uris = (gs_uris,) if isinstance(gs_uris, str) else gs_uris
paths = {}
for uri in gs_uris:
bucket, path = fromURI(uri)
if bucket in paths:
paths[bucket].append(path)
else:
paths[bucket] = [path]
for bucket, paths in paths:
logger.info(f"Deleting {paths} from gs://{bucket}")
Bucket(bucket).delete_blobs(paths, on_error=lambda x:x)
def download(gs_uri, filename=None, directory=None):
'''
Download blob from GCS
Args:
gs_uri (string): full path to blob `gs://<bucket>/<blob>`
filename (string): name of local file to save (default: blob name)
directory (string): local directory to save files to
'''
if filename is None:
filename = os.path.basename(gs_uri)
if directory is not None:
filename = os.path.join(directory, filename)
bucket, path = fromURI(gs_uri)
logger.info(f"Downloading {gs_uri}")
Bucket(bucket).blob(path).download_to_filename(filename)
def getTileBlobs(uri):
'''Check the existance of an exported image or image tiles
Matches either <blob>.tif or <blob>00000000X-00000000X.tif following
EE image export tiling naming scheme.
Returns:
list: List of matching blobs
'''
bucket, path = fromURI(uri)
prefix = f'{os.path.dirname(path)}/'
basename, ext = os.path.splitext(os.path.basename(path))
blobs = Bucket(bucket).list_blobs(prefix=prefix, delimiter='/')
pattern = re.compile(rf'{prefix}{basename}(\d{{10}}-\d{{10}})?{ext}$')
matches = [asURI(blob.name, bucket) for blob in blobs if pattern.match(blob.name)]
return matches
|
{"/eeUtil/eeutil.py": ["/eeUtil/__init__.py"], "/eeUtil/__init__.py": ["/eeUtil/eeutil.py"], "/eeUtil/gsbucket.py": ["/eeUtil/__init__.py"]}
|
25,622
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/base/migrations/0004_auto_20191129_0751.py
|
# Generated by Django 2.2.6 on 2019-11-29 07:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0003_auto_20191116_1921'),
]
operations = [
migrations.AlterField(
model_name='baseoption',
name='color',
field=models.CharField(choices=[('LT', 'under 21'), ('MD', '21'), ('DK', '23 and over'), (None, None)], default=None, max_length=2, null=True),
),
migrations.AlterField(
model_name='baseoption',
name='sub_color',
field=models.CharField(choices=[('WM', 'Warm Tone'), ('NT', 'Neutral Tone'), ('CL', 'Cool Tone'), (None, None)], default=None, max_length=2, null=True),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,623
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/test_views_base.py
|
from django.test import TestCase, Client
import json
from products.base.models import Base, BaseOption
from brand.models import Brand
from products.base.serializers import BaseSerializer, BaseOptionSerializer
class BaseTestCase(TestCase):
def setUp(self):
self.client=Client()
self.brand = Brand.objects.create(name="brand1")
self.product1 = Base.objects.create(
name="base1", price=100, category="P", img_url="tmp_url1", brand=self.brand)
self.product2 = Base.objects.create(
name="base2", price=200, category="CU", img_url="tmp_url2", brand=self.brand)
self.color1 = BaseOption.objects.create(
color="LT", sub_color="WM", color_hex="hex1", optionName="option1", product=self.product1)
self.color2 = BaseOption.objects.create(
color="MD", color_hex="hex2", optionName="option2", product=self.product1)
self.color3 = BaseOption.objects.create(
color="LT", sub_color="MD", color_hex="hex1", optionName="option1", product=self.product2)
def test_bad_request(self):
# not-allowed request
response = self.client.put('/api/base/tag')
self.assertEqual(response.status_code, 405)
def test_base_search(self):
response = self.client.get('/api/base/category=P')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["name"], self.product1.name)
self.assertEqual(res[0]["category"], 'P')
response = self.client.get('/api/base/category=CU')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["name"], self.product2.name)
self.assertEqual(res[0]["category"], 'CU')
response = self.client.get('/api/base/color=MD')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["color"][0]["color"], self.color2.color)
self.assertEqual(res[0]["category"], 'P')
response = self.client.get('/api/base/color=LT&subcolor=WM')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["color"][0]["color"], self.color1.color)
self.assertEqual(res[0]["category"], 'P')
response = self.client.get('/api/base/brand=brand1')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(len(res), 2)
response = self.client.get('/api/base/subcolor=WM')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(len(res), 1)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,624
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/migrations/0003_auto_20191123_1835.py
|
# Generated by Django 2.2.7 on 2019-11-23 18:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ml', '0002_ml_user_id'),
]
operations = [
migrations.AlterField(
model_name='ml',
name='result',
field=models.CharField(default='NONE', max_length=30),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,625
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/face_color_ml.py
|
""" Face Color Machine Learning code """
from collections import Counter
import pprint
import base64
from io import BytesIO
from facenet_pytorch import MTCNN, InceptionResnetV1, extract_face
from PIL import Image, ImageDraw
import numpy as np
import cv2
from sklearn.cluster import KMeans
import imutils
from matplotlib import pyplot as plt
#from matplotlib.figure import Figure
#path = 'backend/ml/image/anne-marie'
plt.switch_backend('Agg')
resnet = InceptionResnetV1(pretrained='vggface2').eval()
def img_to_face(image_path):
if not isinstance(image_path, str):
final_path = image_path
else:
final_path = 'media/' + str(image_path)
img = Image.open(final_path).convert('RGB')
mtcnn = MTCNN(image_size=100)
# Get cropped and prewhitened image tensor
img_cropped = mtcnn(img, save_path='media/output/face.png')
# Calculate embedding (unsqueeze to add batch dimension)
img_embedding = resnet(img_cropped.unsqueeze(0))
# Or, if using for VGGFace2 classification
resnet.classify = True
img_probs = resnet(img_cropped.unsqueeze(0))
#img_to_face(path)
def extractSkin(image):
# Taking a copy of the image
img = image.copy()
# Converting from BGR Colours Space to HSV
img = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
# Defining HSV Threadholds
# lower_threshold = np.array([0, 0, 0], dtype=np.uint8) # 0 48 80
# upper_threshold = np.array([255, 255, 255], dtype=np.uint8) # 26 255 255
lower_threshold = np.array([0, 48, 80], dtype=np.uint8) # 0 48 80 0 133 77
upper_threshold = np.array([20, 255, 255], dtype=np.uint8) # 26 255 255 255 173 127
# Single Channel mask,denoting presence of colours in the about threshold
skinMask = cv2.inRange(img, lower_threshold, upper_threshold)
# Cleaning up mask using Gaussian Filter
skinMask = cv2.GaussianBlur(skinMask, (3, 3), 0)
# Extracting skin from the threshold mask
skin = cv2.bitwise_and(img, img, mask=skinMask)
# Return the Skin image
return cv2.cvtColor(skin, cv2.COLOR_HSV2BGR)
def removeBlack(estimator_labels, estimator_cluster):
# Check for black
hasBlack = False
# Get the total number of occurance for each color
occurance_counter = Counter(estimator_labels)
# Quick lambda function to compare to lists
def compare(x, y): return Counter(x) == Counter(y)
# Loop through the most common occuring color
for x in occurance_counter.most_common(len(estimator_cluster)):
# Quick List comprehension to convert each of RBG Numbers to int
color = [int(i) for i in estimator_cluster[x[0]].tolist()]
# Check if the color is [0,0,0] that if it is black
if compare(color, [0, 0, 0]) == True:
# delete the occurance
del occurance_counter[x[0]]
# remove the cluster
hasBlack = True
estimator_cluster = np.delete(estimator_cluster, x[0], 0)
break
return (occurance_counter, estimator_cluster, hasBlack)
def getColorInformation(estimator_labels, estimator_cluster, hasThresholding=False):
# Variable to keep count of the occurance of each color predicted
occurance_counter = None
# Output list variable to return
colorInformation = []
# Check for Black
hasBlack = False
# If a mask has be applied, remove th black
if hasThresholding == True:
(occurance, cluster, black) = removeBlack(
estimator_labels, estimator_cluster)
occurance_counter = occurance
estimator_cluster = cluster
hasBlack = black
else:
occurance_counter = Counter(estimator_labels)
# Get the total sum of all the predicted occurances
list_occurance_counter = list(occurance_counter.values())
totalOccurance = int(sum(list_occurance_counter))
# Loop through all the predicted colors
for x in occurance_counter.most_common(len(estimator_cluster)):
index = (int(x[0]))
# Quick fix for index out of bound when there is no threshold
index = (index-1) if ((hasThresholding & hasBlack)
& (int(index) != 0)) else index
# Get the color number into a list
color = estimator_cluster[index].tolist()
# Get the percentage of each color
color_percentage = (int(x[1])/totalOccurance)
# make the dictionay of the information
colorInfo = {"cluster_index": index, "color": color,
"color_percentage": color_percentage}
# Add the dictionary to the list
colorInformation.append(colorInfo)
return colorInformation
def extractDominantColor(image, number_of_colors= 20, hasThresholding=False):
# Quick Fix Increase cluster counter to neglect the black(Read Article)
if hasThresholding == True:
number_of_colors += 1
# Taking Copy of the image
img = image.copy()
# Convert Image into RGB Colours Space
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
# Reshape Image
img = img.reshape((img.shape[0]*img.shape[1]), 3)
# Initiate KMeans Object
estimator = KMeans(n_clusters=number_of_colors, random_state=0)
# Fit the image
estimator.fit(img)
# Get Colour Information
colorInformation = getColorInformation(
estimator.labels_, estimator.cluster_centers_, hasThresholding)
return colorInformation
def savepic(user_id):
"""save pictures """
image = cv2.imread("media/output/face.png")
plt.subplot(3, 1, 1)
plt.imshow(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
skin = extractSkin(image)
plt.subplot(3, 1, 2)
plt.imshow(cv2.cvtColor(skin, cv2.COLOR_BGR2RGB))
dominantcolors = extractDominantColor(skin, hasThresholding=True)
colour_bar = plotColorBar(dominantcolors)
plt.subplot(3, 1, 3)
plt.axis("off")
plt.imshow(colour_bar)
plt.savefig('media/output/colorbar' + user_id + '.png')
plt.close()
#fig = Figure()
#ax = fig.subplots()
#ax.imshow(cv2.cvtColor(image, cv2.COLOR_BGR2RGB), cv2.cvtColor(skin, cv2.COLOR_BGR2RGB),
# colour_bar)
#buf = BytesIO()
#fig.savefig('color_bar', format="png")
return
def plotColorBar(colorInformation):
""" create a 500*100 color bar """
color_bar = np.zeros((100, 500, 3), dtype="uint8")
top_x = 0
for x in colorInformation:
bottom_x = top_x + (x["color_percentage"] * color_bar.shape[1])
color = tuple(map(int, (x['color'])))
cv2.rectangle(color_bar, (int(top_x), 0),
(int(bottom_x), color_bar.shape[0]), color, -1)
top_x = bottom_x
return color_bar
def tone_analysis(img_path, user_id):
""" tone_analysis method """
# pylint: disable=line-too-long,no-member,too-many-locals,no-self-use
img_to_face(img_path)
image = cv2.imread("media/output/face.png")
savepic(user_id)
image = imutils.resize(image, width=250)
skin = extractSkin(image)
dominantColors = extractDominantColor(skin, hasThresholding=True)
max_color = [0,0,0]
sel_color = [0,0,0]
site1 = 0
site2 = 1
for i in range(20):
lum = dominantColors[i].get('color')[0]*0.2126 + dominantColors[i].get('color')[1]*0.7152 + dominantColors[i].get('color')[2]*0.0722
max_lum = max_color[0]*0.2126 + max_color[1]*0.7152 + max_color[2]*0.0722
sel_lum = sel_color[0]*0.2126 + sel_color[1]*0.7152 + sel_color[2]*0.0722
if max_lum < lum:
sel_color[0] = max_color[0]
sel_color[1] = max_color[1]
sel_color[2] = max_color[2]
max_color[0] = dominantColors[i].get('color')[0]
max_color[1] = dominantColors[i].get('color')[1]
max_color[2] = dominantColors[i].get('color')[2]
site2 = site1
site1 = i
elif sel_lum < lum:
sel_color[0] = dominantColors[i].get('color')[0]
sel_color[1] = dominantColors[i].get('color')[1]
sel_color[2] = dominantColors[i].get('color')[2]
site2 = i
return sel_color
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,626
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/test_views_ml.py
|
from django.test import TestCase, Client
from urllib.request import urlopen
from django.core.files import File
from django.core import files
from django.core.files.temp import NamedTemporaryFile
from ml.models import ML
from brand.models import Brand
import json
from products.base.models import Base, BaseOption
class MLTestCase(TestCase):
def setUp(self):
self.client=Client()
self.brand = Brand.objects.create(name="brand1")
self.product1 = Base.objects.create(
name="base1", price=100, category="BAS_F", img_url="tmp_url1", brand=self.brand)
self.color1 = BaseOption.objects.create(
color="LT", sub_color="color",
color_hex="111111", optionName="option", product=self.product1
)
img_url = 'http://ph.spotvnews.co.kr/news/photo/201905/285546_351974_2458.jpg'
self.img_temp = NamedTemporaryFile(delete=True, dir='media', suffix='.jpg')
self.img_temp.write(urlopen(img_url).read())
self.img_temp.flush()
self.ml_object = ML.objects.create(
user_id='1', result='result', image=files.File(self.img_temp), base='base', product = self.product1
)
def test_get(self):
response = self.client.get('/api/ml/')
res = json.loads(response.content)
self.assertEqual(res[0]['user_id'], self.ml_object.user_id)
self.assertEqual(res[0]['result'], self.ml_object.result)
self.assertEqual(res[0]['base'], self.ml_object.base)
def test_put(self):
response = self.client.post('/api/ml/', json.dumps({}),
content_type='application/json')
self.assertEqual(response.status_code, 400)
response = self.client.put('/api/ml/', json.dumps({'userID': '1'}),
content_type='application/json')
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,627
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/base/migrations/0005_auto_20191130_1643.py
|
# Generated by Django 2.2.6 on 2019-11-30 16:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0004_auto_20191129_0751'),
]
operations = [
migrations.AddField(
model_name='base',
name='product_url',
field=models.CharField(default='//:0', max_length=255),
),
migrations.AlterField(
model_name='baseoption',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='color', to='base.Base'),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,628
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/migrations/0004_auto_20191123_1840.py
|
# Generated by Django 2.2.7 on 2019-11-23 18:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ml', '0003_auto_20191123_1835'),
]
operations = [
migrations.AlterField(
model_name='ml',
name='file',
field=models.ImageField(upload_to='images'),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,629
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0004_lipoption_optionname.py
|
# Generated by Django 2.2.6 on 2019-11-01 13:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lip', '0003_lipoption'),
]
operations = [
migrations.AddField(
model_name='lipoption',
name='optionName',
field=models.CharField(default='tmpoption', max_length=30),
preserve_default=False,
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,630
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/migrations/0011_auto_20191126_1335.py
|
# Generated by Django 2.2.6 on 2019-11-26 13:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ml', '0010_auto_20191126_1333'),
]
operations = [
migrations.AlterField(
model_name='ml',
name='result',
field=models.CharField(default=None, max_length=30),
),
migrations.AlterField(
model_name='ml',
name='user_id',
field=models.CharField(default=None, max_length=30),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,631
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/models.py
|
""" ML model """
from django.db import models
# from products.base import models as product_models
# Create your models here.
from products.base import models as base_model
class ML(models.Model):
""" model for ML results """
user_id = models.CharField(max_length=30, default="NONE")
result = models.CharField(max_length=30, default="NONE")
image = models.ImageField(upload_to='images')
base = models.CharField(max_length=100, default="NONE")
output_image = models.ImageField(null=True)
product = models.ForeignKey(
base_model.Base,
on_delete=models.CASCADE,
null=True,
blank=True,
)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,632
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/crawl/spiders/mac_lip.py
|
# -*- coding: utf-8 -*-
""" scrapy spider for aritaum webpage """
import random
import scrapy
from crawl.items import LipProduct, LipColor
from brand.models import Brand as Brand_db
from products.lip.models import Lip as Lip_db
from .color_tag import cal_color_tag
class MacLipSpider(scrapy.Spider):
""" scrapy spider for mac lip product """
name = "mac-lip"
def __init__(self):
scrapy.Spider.__init__(self)
self.brand = Brand_db.objects.filter(name="MAC")[0]
def start_requests(self):
urls = [{'category': 'LIP_S',
'link': 'https://www.maccosmetics.co.kr/products/13854/Products/Makeup/Lips/Lipstick'},
{'category': 'LIP_G',
'link': 'https://www.maccosmetics.co.kr/products/13853/Products/Makeup/Lips/Lip-Gloss'},
{'category': 'LIP_S',
'link': 'https://www.maccosmetics.co.kr/products/13852/Products/Makeup/Lips/Lip-Pencil'}]
for url in urls:
yield scrapy.Request(
url=url["link"],
meta={"category": url["category"]},
callback=self.parse
)
@staticmethod
def parse_price(price):
""" return int -price """
return int(price[2:].replace(',', ''))
def parse(self, response):
product_name = response.css('h3.product__subline::text').getall()
price = response.css(
'header > div > div > span.product__price--standard::text').getall()
product_url = response.css(
'div.product__image-medium > a.product__image-medium-link::attr(href)').getall()
thumb_url = response.css(
'div.product__image-medium > a.product__image-medium-link > img.product__sku-image--rendered--medium::attr(src)').getall()
host = 'https://www.maccosmetics.co.kr'
category = response.meta['category']
for i, name in enumerate(product_name):
url = host + product_url[i]
int_price = self.parse_price(price[i])
index = random.randint(0, 2)
forms = ['LIP_M', 'LIP_G', 'LIP_N']
yield LipProduct(
name=product_name[i],
price=int_price,
brand=self.brand,
category=category,
img_url=host + thumb_url[i],
crawled="lip",
product_url=host + product_url[i],
form=forms[index]
)
yield scrapy.Request(
url=url,
meta={'product': name},
callback=self.parse_color
)
@staticmethod
def parse_hex(hexa):
''' return parsed hex value '''
return hexa[11:18]
def parse_color(self, response):
''' yield scrapy color object '''
rgb = response.css(
'div.product-full__shade > div.product-full__shade-swatch::attr(style)').getall()
name = response.css(
'div.product-full__shade > div.product-full__shade-name::text').getall()
product_name = response.meta['product']
print(product_name)
product = Lip_db.objects.filter(name=product_name)[0]
for i, hexa in enumerate(rgb):
color_hex = self.parse_hex(hexa)
color_name = name[i]
color_tuple = cal_color_tag("lip", color_hex)
yield LipColor(
color_hex=color_hex,
color=color_tuple[0],
sub_color=color_tuple[1],
optionName=color_name,
product=product,
crawled="lip_option"
)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,633
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/test_models.py
|
from products.lip.models import Lip, LipOption
from products.base.models import Base, BaseOption
from products.cheek.models import Cheek, CheekOption
from brand.models import Brand
import unittest
from ml.models import ML
from user.models import Profile
from django.contrib.auth.models import User
class ItemTest(unittest.TestCase):
def test_lip_class(self):
i = Lip(name="tmp_name")
self.assertEqual(str(i), i.name)
def test_brand_class(self):
i = Brand(name_ko="tmp_name")
self.assertEqual(str(i), i.name_ko)
def test_lipoption_class(self):
i = Lip(name="tmp_lip")
o = LipOption(product=i, optionName="tmp_option")
self.assertEqual(str(o), str(i) + " " + o.optionName)
def test_base_class(self):
i = Base(name="tmp_name")
self.assertEqual(str(i), i.name)
def test_baseoption_class(self):
i = Base(name="tmp_base")
o = BaseOption(product=i, optionName="tmp_option")
self.assertEqual(str(o), str(i) + " " + o.optionName)
def test_cheek_class(self):
i = Cheek(name="tmp_name")
self.assertEqual(str(i), i.name)
def test_cheek_option_class(self):
i = Cheek(name="tmp_cheek")
o = CheekOption(product=i, optionName="tmp_option")
self.assertEqual(str(o), str(i) + " " + o.optionName)
def test_ml_class(self):
i = ML(result='0,0,0')
i.save()
self.assertEqual('0,0,0', i.result)
def test_user_model(self):
new_user = User.objects.create_user(
username='test_name', email='test_mail', password='test_pw')
i = Profile(user=new_user)
self.assertEqual(str(i), new_user.username)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,634
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/views.py
|
"""VIEW MODEL"""
import json
from json import JSONDecodeError
from django.http import JsonResponse, HttpResponseBadRequest
from rest_framework.parsers import FileUploadParser
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from products.base import models as Base_models
from .face_color_ml import tone_analysis
from .serializers import FileSerializer
from .models import ML
from .find_base import best_match
class FileUploadView(APIView):
""" ML Model View"""
parser_class = (FileUploadParser,)
def get(self, request):
# pylint: disable=no-member
""" GET """
mls = ML.objects.all()
serializer = FileSerializer(mls, many=True)
return Response(serializer.data)
def post(self, request):
""" POST """
file = FileSerializer(data=request.data)
if file.is_valid():
file.save()
return Response(file.data, status=status.HTTP_201_CREATED)
return Response(file.errors, status=status.HTTP_400_BAD_REQUEST)
def put(self, request):
# pylint: disable=line-too-long,no-member,too-many-locals,no-self-use
""" PUT """
try:
body = request.body.decode()
u_id = json.loads(body)['userID']
except(KeyError, JSONDecodeError):
return HttpResponseBadRequest()
ml_object = ML.objects.filter(user_id=u_id).latest('id')
ml_object.result = tone_analysis(ml_object.image, u_id)
ml_object.save()
base_products = Base_models.Base.objects.filter(category='BAS_F')
base_products_info = []
total_base_products_info = Base_models.BaseOption.objects.all()
for product1 in base_products.all():
for product2 in total_base_products_info:
if product2.product == product1:
base_products_info.append(product2)
#base_products_info = Base_models.BaseOption.objects.filter(product=base_products.all())
base_products_hexa = [i.color_hex for i in base_products_info]
best_product = best_match(base_products_hexa, ml_object.result)
final = base_products_info[best_product]
ml_object.base = str(final.product) + " " + final.optionName
ml_object.product = Base_models.Base.objects.get(name=final.product)
ml_object.save()
product_info = {
'price': ml_object.product.price,
'brand': ml_object.product.brand.name,
'img_url': ml_object.product.img_url,
'product_url': ml_object.product.product_url}
response_dict = {
'id': ml_object.id,
'user_id': ml_object.user_id,
'r': ml_object.result[0],
'g': ml_object.result[1],
'b': ml_object.result[2],
'base': ml_object.base,
'product': product_info}
return JsonResponse(response_dict, status=201)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,635
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/cheek/migrations/0001_initial.py
|
# Generated by Django 2.2.6 on 2019-12-02 11:34
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('brand', '0003_brand_name_ko'),
]
operations = [
migrations.CreateModel(
name='Cheek',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('price', models.IntegerField()),
('category', models.CharField(choices=[('B', 'Blusher'), ('C', 'Contouring'), ('H', 'Highlighter')], max_length=1)),
('product_url', models.CharField(default='//:0', max_length=255)),
('img_url', models.CharField(default='//:0', max_length=255)),
('brand', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='brand.Brand')),
],
),
migrations.CreateModel(
name='CheekOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('color', models.CharField(choices=[('RD', 'Red'), ('PK', 'Pink'), ('OR', 'Orange')], max_length=2)),
('sub_color', models.CharField(max_length=30)),
('color_hex', models.CharField(max_length=10)),
('optionName', models.CharField(max_length=30)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='color', to='cheek.Cheek')),
],
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,636
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/urls.py
|
""" ML model urls """
from django.urls import path
from .views import *
urlpatterns = [
path('ml/', FileUploadView.as_view(), name="ML"),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,637
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0011_auto_20191126_1807.py
|
# Generated by Django 2.2.6 on 2019-11-26 18:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lip', '0010_auto_20191121_0959'),
]
operations = [
migrations.AlterField(
model_name='lipoption',
name='color',
field=models.CharField(choices=[('RD', 'Red'), ('PK', 'Pink'), ('OR', 'Orange'), ('PU', 'Purple')], max_length=2),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,638
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/admin.py
|
""" django admin page setting """
from django.contrib import admin
from .lip.models import Lip, LipOption
from .base.models import Base, BaseOption
from .cheek.models import Cheek, CheekOption
admin.site.register(Lip)
admin.site.register(LipOption)
admin.site.register(Base)
admin.site.register(BaseOption)
admin.site.register(Cheek)
admin.site.register(CheekOption)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,639
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0010_auto_20191121_0959.py
|
# Generated by Django 2.2.6 on 2019-11-21 09:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lip', '0009_auto_20191117_2206'),
]
operations = [
migrations.AlterField(
model_name='lipoption',
name='sub_color',
field=models.CharField(default='#ffffff', max_length=30),
preserve_default=False,
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,640
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/crawl/spiders/spider_helper.py
|
""" Help Scrapy Aritaum Spider for multiple product category """
def translate_category(raw_text, category):
""" get eng value """
trim_size = {'lip': 12, 'base': 14, 'cheek': 13}
eng_name = {'lip': {
"립스틱": "LIP_S",
"립글로즈": "LIP_G",
"립케어/립밤": "LIP_B",
"립틴트": "LIP_T",
"립글로스": "LIP_S"
}, 'base': {
"쿠션": "BAS_CU",
"파운데이션": "BAS_F",
"파우더/팩트": "BAS_P",
"프라이머/베이스": "BAS_PR",
"비비/씨씨크림": "BAS_B",
"컨실러": "BAS_C"
}, 'cheek': {
"블러셔": "CHK_B",
"브론져": "CHK_C",
"하이라이터": "CHK_H"
}}
size = trim_size[category]
category_ko = raw_text[size:]
try:
res = eng_name[category][category_ko]
return res
except KeyError:
return -1
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,641
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/base/models.py
|
""" Django Base Product Model """
from django.db import models
from brand import models as brand_models
# Create your models here.
class Base(models.Model):
""" django Base model """
POWDER = 'BAS_P'
CUSHION = 'BAS_CU'
FOUNDATION = 'BAS_F'
PRIMER = 'BAS_PR'
BB = 'BAS_B'
CONCEALER = 'BAS_C'
CATEGORY = (
(POWDER, 'Powder'),
(CUSHION, 'Cushion'),
(CONCEALER, 'Concealer'),
(FOUNDATION, 'Foundation'),
(PRIMER, 'Primer'),
(BB, 'BB & CC')
)
name = models.CharField(max_length=20)
price = models.IntegerField()
brand = models.ForeignKey(
brand_models.Brand,
on_delete=models.CASCADE
)
category = models.CharField(
max_length=6,
choices=CATEGORY,
)
product_url = models.CharField(
max_length=255,
default="//:0",
)
img_url = models.TextField(default="//:0")
def __str__(self):
return self.name
class BaseOption(models.Model):
""" option of django base model """
LIGHT = "BAS_LT"
MIDDLE = "BAS_MD"
DARK = "BAS_DK"
COLOR = (
(LIGHT, "under 21"),
(MIDDLE, "21"),
(DARK, "23 and over"),
(None, None)
)
color = models.CharField(
default=None,
max_length=6,
choices=COLOR,
null=True
)
WARM = "BAS_WM"
NEUTRAL = "BAS_NT"
COOL = "BAS_CL"
SUBCOLOR = (
(WARM, "Warm Tone"),
(NEUTRAL, "Neutral Tone"),
(COOL, "Cool Tone"),
(None, None)
)
sub_color = models.CharField(
default=None,
max_length=6,
choices=SUBCOLOR,
null=True
)
color_hex = models.CharField(max_length=10)
optionName = models.CharField(max_length=30)
product = models.ForeignKey(
Base,
related_name='color',
on_delete=models.CASCADE
)
def __str__(self):
return str(self.product) + " " + self.optionName
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,642
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/serializers.py
|
""" TODO : DOCSTRING"""
from rest_framework import serializers
from .models import Lip, LipOption
class LipOptionSerializer(serializers.ModelSerializer):
""" TODO : DOCSTRING"""
class Meta:
""" TODO : DOCSTRING"""
model = LipOption
fields = ['color', 'sub_color', 'color_hex', 'optionName']
class LipSerializer(serializers.ModelSerializer):
""" TODO : DOCSTRING"""
brand = serializers.StringRelatedField()
color = serializers.SerializerMethodField('colors')
def colors(self, lip):
""" TODO : DOCSTRING"""
if self.context['sub'] is not None:
subcolor = self.context['sub']
lip_colors = []
for color in self.context['color']:
if len(subcolor[color]) != 0:
lip_colors += LipOption.objects.filter(product=lip).filter(
color=color
).filter(
sub_color__in=subcolor[color]
)
else:
lip_colors += LipOption.objects.filter(product=lip).filter(
color=color
)
elif self.context['color'] is not None:
lip_colors = LipOption.objects.filter(product=lip).filter(
color__in=self.context['color'])
else:
lip_colors = LipOption.objects.filter(product=lip)
serializer = LipOptionSerializer(instance=lip_colors, many=True)
return serializer.data
class Meta:
model = Lip
fields = [
'name',
'price',
'form',
'category',
'product_url',
'img_url',
'brand',
'color',
'id'
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,643
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/cheek/serializers.py
|
""" TODO : DOCSTRING"""
from rest_framework import serializers
from .models import Cheek, CheekOption
class CheekOptionSerializer(serializers.ModelSerializer):
""" TODO : DOCSTRING"""
class Meta:
""" TODO : DOCSTRING"""
model = CheekOption
fields = ['color', 'sub_color', 'color_hex', 'optionName']
class CheekSerializer(serializers.ModelSerializer):
""" TODO : DOCSTRING"""
brand = serializers.StringRelatedField()
color = serializers.SerializerMethodField('colors')
def colors(self, cheek):
""" TODO : DOCSTRING"""
if self.context is not None:
cheek_colors = CheekOption.objects.filter(product=cheek).filter(
color__in=self.context
)
serializer = CheekOptionSerializer(
instance=cheek_colors, many=True)
return serializer.data
cheek_colors = CheekOption.objects.filter(product=cheek)
if len(cheek_colors) == 0:
return []
serializer = CheekOptionSerializer(
instance=cheek_colors, many=True)
return serializer.data
class Meta:
model = Cheek
fields = [
'name',
'price',
'category',
'product_url',
'img_url',
'brand',
'color',
'id'
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,644
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/base/migrations/0006_auto_20191202_1618.py
|
# Generated by Django 2.2.6 on 2019-12-02 16:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0005_auto_20191130_1643'),
]
operations = [
migrations.AlterField(
model_name='base',
name='category',
field=models.CharField(choices=[('BAS_P', 'Powder'), ('BAS_CU', 'Cushion'), ('BAS_C', 'Concealer'), ('BAS_F', 'Foundation'), ('BAS_PR', 'Primer'), ('BAS_B', 'BB & CC')], max_length=6),
),
migrations.AlterField(
model_name='baseoption',
name='color',
field=models.CharField(choices=[('BAS_LT', 'under 21'), ('BAS_MD', '21'), ('BAS_DK', '23 and over'), (None, None)], default=None, max_length=6, null=True),
),
migrations.AlterField(
model_name='baseoption',
name='sub_color',
field=models.CharField(choices=[('BAS_WM', 'Warm Tone'), ('BAS_NT', 'Neutral Tone'), ('BAS_CL', 'Cool Tone'), (None, None)], default=None, max_length=6, null=True),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,645
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0002_auto_20191101_1303.py
|
# Generated by Django 2.2.6 on 2019-11-01 13:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('brand', '0001_initial'),
('lip', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='lip',
name='brand',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='brand.Brand'),
preserve_default=False,
),
migrations.AddField(
model_name='lip',
name='category',
field=models.CharField(choices=[('S', 'Stick'), ('G', 'Gloss'), ('B', 'Balm'), ('T', 'Tint')], default='T', max_length=1),
preserve_default=False,
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,646
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/models.py
|
""" model for lip cosmetic information """
from django.db import models
from brand import models as brand_models
# Create your models here.
class Lip(models.Model):
""" django lip model """
STICK = 'LIP_S'
GLOSS = 'LIP_G'
BALM = 'LIP_B'
TINT = 'LIP_T'
CATEGORY = (
(STICK, 'Stick'),
(GLOSS, 'Gloss'),
(BALM, 'Balm'),
(TINT, 'Tint'),
)
MATTE = 'LIP_M'
GLOSSY = 'LIP_G'
NONE = 'LIP_N'
FORM = (
(MATTE, 'Matte'),
(GLOSSY, 'Glossy'),
(NONE, 'None')
)
name = models.CharField(max_length=20)
price = models.IntegerField()
brand = models.ForeignKey(
brand_models.Brand,
on_delete=models.CASCADE
)
form = models.CharField(
max_length=5,
choices=FORM,
default=NONE
)
category = models.CharField(
max_length=5,
choices=CATEGORY,
)
product_url = models.CharField(
max_length=255,
default="//:0",
)
img_url = models.CharField(
max_length=255,
default="//:0",
)
def __str__(self):
return self.name
class LipOption(models.Model):
""" option of django lip model """
RED = "LIP_RD"
PINK = "LIP_PK"
ORANGE = "LIP_OR"
PURPLE = "LIP_PU"
COLOR = (
(RED, "Red"),
(PINK, "Pink"),
(ORANGE, "Orange"),
(PURPLE, "Purple"),
)
color = models.CharField(
max_length=6,
choices=COLOR
)
sub_color = models.CharField(
max_length=30
)
color_hex = models.CharField(max_length=10)
optionName = models.CharField(max_length=30)
product = models.ForeignKey(
Lip,
related_name='color',
on_delete=models.CASCADE
)
def __str__(self):
return str(self.product) + " " + self.optionName
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,647
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/test_views_lip.py
|
""" test views """
from django.test import TestCase, Client
import json
from products.lip.models import Lip, LipOption
from brand.models import Brand
from products.lip.serializers import LipSerializer, LipOptionSerializer
class LipTestCase(TestCase):
def setUp(self):
self.client=Client()
self.brand = Brand.objects.create(name="brand1")
self.product1 = Lip.objects.create(
name="lip1", price=100, form="LIP_M", category="LIP_S", img_url="tmp_url1", brand=self.brand)
self.product2 = Lip.objects.create(
name="lip2", price=200, form="LIP_G", category="LIP_B", img_url="tmp_url2", brand=self.brand)
self.color1 = LipOption.objects.create(
color="LIP_PK", sub_color="color1", color_hex="hex1", optionName="option1", product=self.product1)
self.color3 = LipOption.objects.create(
color="LIP_OR", sub_color="color3", color_hex="hex3", optionName="option3", product=self.product2)
def test_bad_request(self):
# not-allowed request
response = self.client.put('/api/lip/tag')
self.assertEqual(response.status_code, 405)
def test_lip_search(self):
response = self.client.get('/api/lip/category=LIP_S')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["name"], self.product1.name)
self.assertEqual(res[0]["category"], 'LIP_S')
response = self.client.get('/api/lip/form=LIP_M')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["name"], self.product1.name)
self.assertEqual(res[0]["form"], 'LIP_M')
response = self.client.get('/api/lip/form=LIP_M&category=LIP_S')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["name"], self.product1.name)
self.assertEqual(res[0]["category"], 'LIP_S')
response = self.client.get('/api/lip/form=LIP_M&color=LIP_PK')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["color"][0]["color"], self.color1.color)
self.assertEqual(res[0]["form"], 'LIP_M')
response = self.client.get('/api/lip/color=LIP_RD&color=LIP_PK')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(len(res), 1)
response = self.client.get('/api/lip/color=LIP_PK&sub_color=color1_LIP_PK')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(len(res), 0)
response = self.client.get('/api/lip/color=LIP_RD&color=LIP_PK&sub_color=color3_LIP_OR')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(len(res), 1)
response = self.client.get('/api/lip/brand=brand1')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(len(res), 2)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,648
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0008_lipoption_sub_color.py
|
# Generated by Django 2.2.6 on 2019-11-11 17:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lip', '0007_auto_20191111_1712'),
]
operations = [
migrations.AddField(
model_name='lipoption',
name='sub_color',
field=models.CharField(default=None, max_length=30,null=True),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,649
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/crawl/spiders/mac_base.py
|
# -*- coding: utf-8 -*-
""" scrapy spider for aritaum webpage """
import random
import scrapy
from crawl.items import BaseProduct, BaseColor
from brand.models import Brand as Brand_db
from products.base.models import Base as Base_db
from .color_tag import cal_color_tag
class MacLipSpider(scrapy.Spider):
""" scrapy spider for mac lip product """
name = "mac-base"
def __init__(self):
scrapy.Spider.__init__(self)
self.brand = Brand_db.objects.filter(name="MAC")[0]
def start_requests(self):
urls = [{'category': 'BAS_P',
'link': 'https://www.maccosmetics.co.kr/products/13849/Products/Makeup/Face/Powder'},
{'category': 'BAS_F',
'link': 'https://www.maccosmetics.co.kr/products/13847/Products/Makeup/Face/Foundation'},
{'category': 'BAS_PR',
'link': 'https://www.maccosmetics.co.kr/products/14764/Products/Makeup/Face/Face-Primer'},
{'category': 'BAS_C',
'link': 'https://www.maccosmetics.co.kr/products/13844/Products/Makeup/Face/Concealer'}]
for url in urls:
yield scrapy.Request(
url=url["link"],
meta={"category": url["category"]},
callback=self.parse
)
@staticmethod
def parse_price(price):
""" return int -price """
return int(price[2:].replace(',', ''))
def parse(self, response):
product_name = response.css('h3.product__subline::text').getall()
price = response.css(
'header > div > div > span.product__price--standard::text').getall()
product_url = response.css(
'div.product__image-medium > a.product__image-medium-link::attr(href)').getall()
thumb_url = response.css(
'div.product__image-medium > a.product__image-medium-link > img.product__sku-image--rendered--medium::attr(src)').getall()
host = 'https://www.maccosmetics.co.kr'
category = response.meta['category']
for i, name in enumerate(product_name):
url = host + product_url[i]
int_price = self.parse_price(price[i])
yield BaseProduct(
name=product_name[i],
price=int_price,
brand=self.brand,
category=category,
img_url=host + thumb_url[i],
crawled="base",
product_url=host + product_url[i]
)
yield scrapy.Request(
url=url,
meta={'product': name},
callback=self.parse_color
)
@staticmethod
def parse_hex(hexa):
''' return parsed hex value '''
return hexa[11:18]
@staticmethod
def parse_subcolor(option):
''' return parsed option value '''
if option == 'num':
index = random.randint(0, 2)
color = ['BAS_LT', 'BAS_MD', 'BAS_DK']
res = color[index]
return res
if option <= 19:
return 'BAS_LT'
if option <= 22:
return 'BAS_MD'
if option > 22:
return 'BAS_DK'
return None
def parse_option(self, option, hexa):
''' return parsed option value '''
try:
option_num = int(option[2:4])
except ValueError:
option_num = 'num'
if option.startswith('NC'):
sub_color = self.parse_subcolor(option_num)
return ('BAS_CL', sub_color)
if option.startswith('NW'):
sub_color = self.parse_subcolor(option_num)
return ('BAS_WM', sub_color)
if option.startswith('N'):
sub_color = self.parse_subcolor(option_num)
return ('BAS_NT', sub_color)
sub_color = self.parse_subcolor(option_num)
color_tone = cal_color_tag("base", hexa)
return (color_tone, sub_color)
def parse_color(self, response):
''' yield scrapy color object '''
rgb = response.css(
'div.product-full__shade > div.product-full__shade-swatch::attr(style)').getall()
name = response.css(
'div.product-full__shade > div.product-full__shade-name::text').getall()
product_name = response.meta['product']
product = Base_db.objects.filter(name=product_name)[0]
for i, hexa in enumerate(rgb):
color_hex = self.parse_hex(hexa)
color_name = name[i]
color = self.parse_option(color_name, color_hex)
yield BaseColor(
color_hex=color_hex,
optionName=color_name,
color=color[1], # text under 19, 21, over23
sub_color=color[0],
product=product,
crawled="base_option"
)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,650
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/user/tests.py
|
""" TODO : DOCSTRING"""
# pylint: disable=unused-variable
# pylint: disable=unused-import
import json
from django.test import TestCase, Client
from django.contrib.auth.models import User
from .models import Profile
class ModelTestCase(TestCase):
""" TODO : DOCSTRING"""
def test_csrf(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.post('/api/signup/',
{'username': 'chris',
'password': 'chris'},
content_type='application/json')
# Request without csrf token returns 403 response
self.assertEqual(response.status_code, 400)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.post('/api/signup/',
{'username': 'chris',
'password': 'chris'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 400) # Pass csrf protection
def test_csrf_notget(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.post(
'/api/token/',
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 405) # Pass csrf protection
def test_signup(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.delete('/api/signup/',
{'username': 'chris',
'password': 'chris'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 405) # Pass csrf protection
def test_signup_get(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
user_info = User.objects.create_user(
username='a', email='a@a.com', password='123')
client.login(username='a', password='123')
profile = Profile.objects.create(
user=user_info,
prefer_color='12345',
prefer_base='123',
prefer_brand='123')
response = client.get('/api/signup/')
self.assertEqual(response.status_code, 200) # Pass csrf protection
def test_signin_put(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
user_info = User.objects.create_user(
username='a', email='a@a.com', password='123')
profile = Profile.objects.create(
user=user_info,
prefer_color='12345',
prefer_base='123',
prefer_brand='123')
client.login(username='a', password='123')
response = client.put('/api/signin/',
{'preferColor': '1',
'preferBase': '2',
'preferBrand': '3'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 200) # Pass csrf protection
def test_signin_put_wrongdata(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
user_info = User.objects.create_user(
username='a', email='a@a.com', password='123')
profile = Profile.objects.create(
user=user_info,
prefer_color='12345',
prefer_base='123',
prefer_brand='123')
client.login(username='a', password='123')
response = client.put('/api/signin/',
{'nickName': 'a',
'preferBase': '2',
'preferBrand': '3'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 400) # Pass csrf protection
def test_signin_get(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
user_info = User.objects.create_user(
username='a', email='a@a.com', password='123')
client.login(username='a', password='123')
response = client.get('/api/signin/')
self.assertEqual(response.status_code, 200) # Pass csrf protection
def test_signup_(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.post('/api/signup/',
{'username': 'chris',
'email': '1',
'password': '2',
'nickname': '3'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 201) # Pass csrf protection
def test_signup_wrongrequest(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.delete('/api/signup/',
{'username': 'chris',
'email': '1',
'password': '2',
'nickname': '3'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 405) # Pass csrf protection
def test_signup_wrongdata(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.post('/api/signup/',
{'nickname': '1',
'password': '2'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 400) # Pass csrf protection
def test_signup_wronguser(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
user_info = User.objects.create_user(
username='1', email='1', password='2')
response = client.post('/api/signup/',
{'username': '1',
'email': '1',
'password': '2',
'nickname': '2'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 400) # Pass csrf protection
def test_signin1(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.post('/api/signin/',
{'username': 'test',
'password': '12345'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 401) # Pass csrf protection
def test_signin_wrongdata(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.post('/api/signin/',
{'password': '12345'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 400) # Pass csrf protection
def test_login(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
user = User.objects.create_user(username='test', password='12345')
response = client.post('/api/signin/',
{'username': 'test',
'password': '12345'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 204)
def test_logout(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
user = User.objects.create_user(username='test', password='12345')
client.login(username='test', password='12345') # 가상 로그인
response = client.get('/api/signout/',
{'username': 'test',
'password': '12345'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 204)
def test_signin2(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.delete('/api/signin/',
{'username': 'chris',
'password': 'chris'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 405) # Pass csrf protection
def test_signout1(self):
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.get('/api/signout/',
{'username': 'chris',
'password': 'chris'},
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 401) # Pass csrf protection
def test_signout2(self): # FIXME : 요거 해결하야함
""" TODO : DOCSTRING"""
client = Client(enforce_csrf_checks=True)
response1 = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response1.cookies['csrftoken'].value
response2 = client.get(
'/api/signout/',
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response2.status_code, 401) # Pass csrf protection
def test_signout3(self):
""" TODO : DOCSTRING"""
client = Client(enㅌforce_csrf_checks=True)
response = client.get('/api/token/')
# Get csrf token from cookie
csrftoken = response.cookies['csrftoken'].value
response = client.post(
'/api/signout/',
content_type='application/json',
HTTP_X_CSRFTOKEN=csrftoken)
self.assertEqual(response.status_code, 405) # Pass csrf protection
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,651
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/user/views.py
|
"""VIEW MODEL"""
import json
from json import JSONDecodeError
from django.http import HttpResponse, HttpResponseNotAllowed, JsonResponse, HttpResponseBadRequest
from django.contrib.auth import login, authenticate, logout
from django.contrib.auth.models import User
from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt
from django.db import IntegrityError
from .models import Profile
@csrf_exempt
def signup(request):
"""SIGNUP FUNCTION"""
if request.method == 'GET':
user_info = list(
Profile.objects.filter(
user=request.user).values())
return JsonResponse(user_info, safe=False)
if request.method == 'POST':
try:
req_data = json.loads(request.body.decode())
username = req_data['username']
email = req_data['email']
password = req_data['password']
except (KeyError, JSONDecodeError):
return HttpResponseBadRequest()
try:
User.objects.create_user(
username=username, email=email, password=password)
except IntegrityError:
return HttpResponseBadRequest()
user = authenticate(request, username=username, password=password)
login(request, user)
Profile.objects.create(user=request.user)
return HttpResponse(status=201)
return HttpResponseNotAllowed(['GET', 'POST'])
def signin(request): # Signin function
# pylint: disable=too-many-return-statements
"""SIGNIN FUNCTION"""
if request.method == 'GET':
user_info = list(
User.objects.filter(
username=request.user.username).values())
return JsonResponse(user_info, safe=False)
if request.method == 'POST':
try:
req_data = json.loads(request.body.decode())
username = req_data['username']
password = req_data['password']
except (KeyError, JSONDecodeError):
return HttpResponseBadRequest()
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
response = user.email
return HttpResponse(response, status=204)
return HttpResponse(status=401)
if request.method == 'PUT':
user_info = Profile.objects.get(user=request.user)
try:
req_data = json.loads(request.body.decode())
edit_prefer_color = req_data['preferColor']
edit_prefer_base = req_data['preferBase']
edit_prefer_brand = req_data['preferBrand']
except (KeyError, JSONDecodeError):
return HttpResponseBadRequest()
user_info.prefer_color = edit_prefer_color
user_info.prefer_base = edit_prefer_base
user_info.prefer_brand = edit_prefer_brand
user_info.save()
response = user_info.prefer_color
return HttpResponse(response, status=200)
return HttpResponseNotAllowed(['GET', 'POST', 'PUT'])
def signout(request):
"""SIGNOUT FUNCTION"""
if request.method == 'GET':
if request.user.is_authenticated:
logout(request)
return HttpResponse(status=204)
return HttpResponse(status=401)
return HttpResponseNotAllowed(['GET'])
@ensure_csrf_cookie
def token(request):
"""TOKEN FUNCTION"""
if request.method == 'GET':
return HttpResponse(status=204)
return HttpResponseNotAllowed(['GET'])
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,652
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0006_auto_20191108_1920.py
|
# Generated by Django 2.2.6 on 2019-11-08 19:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lip', '0005_lip_img_url'),
]
operations = [
migrations.AddField(
model_name='lipoption',
name='color_hex',
field=models.CharField(default='#ffffff', max_length=10),
preserve_default=False,
),
migrations.AlterField(
model_name='lipoption',
name='color',
field=models.CharField(choices=[('RD', 'Red'), ('CR', 'Coral'), ('PK', 'Pink'), ('OR', 'Orange')], default='RD', max_length=2),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,653
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/migrations/0019_ml_output_image.py
|
# Generated by Django 2.2.6 on 2019-12-04 11:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ml', '0018_ml_product'),
]
operations = [
migrations.AddField(
model_name='ml',
name='output_image',
field=models.ImageField(null=True, upload_to=''),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,654
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/base/migrations/0002_auto_20191116_1735.py
|
# Generated by Django 2.2.6 on 2019-11-16 17:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='baseoption',
name='color',
field=models.CharField(choices=[('LT', 'under 21'), ('MD', '21 ~ 23'), ('DK', 'over 23'), ('NO', 'Other type')], max_length=2),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,655
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/apps.py
|
""" ML apps.py """
from django.apps import AppConfig
class MlConfig(AppConfig):
""" docstring """
name = 'ml'
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,656
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/migrations/0017_auto_20191126_1355.py
|
# Generated by Django 2.2.6 on 2019-11-26 13:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ml', '0016_auto_20191126_1350'),
]
operations = [
migrations.AlterField(
model_name='ml',
name='base',
field=models.CharField(default='NONE', max_length=100),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,657
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/base/views.py
|
""" TODO : DOCSTRING"""
from urllib.parse import urlparse, parse_qs
from django.http import JsonResponse, HttpResponseNotAllowed
from .models import Base, BaseOption
from .serializers import BaseSerializer
def search(request, option):
""" TODO : DOCSTRING"""
# pylint: disable=too-many-branches
if request.method == "GET":
url = urlparse('?' + option)
query = parse_qs(url.query)
if 'category' in query:
base = Base.objects.filter(category__in=query['category'])
else:
base = Base.objects.all()
color_option = None
subcolor_option = None
if 'color' in query and 'subcolor' in query:
color_option = query['color']
subcolor_option = query['subcolor']
base = base.exclude(
color__isnull=True
)
for i in base:
basess = BaseOption.objects.filter(product=i).filter(
color__in=color_option
).filter(
sub_color__in=subcolor_option
)
if len(basess) == 0:
base = base.exclude(name=i.name)
elif 'color' in query:
color_option = query['color']
base = base.exclude(
color__isnull=True
)
for i in base:
basess = BaseOption.objects.filter(product=i).filter(
color__in=color_option
)
if len(basess) == 0:
base = base.exclude(name=i.name)
elif 'subcolor' in query:
subcolor_option = query['subcolor']
base = base.exclude(
color__isnull=True
)
for i in base:
basess = BaseOption.objects.filter(product=i).filter(
sub_color__in=subcolor_option
)
if len(basess) == 0:
base = base.exclude(name=i.name)
if 'brand' in query:
base = base.filter(brand__name__in=query['brand'])
baseserializer = BaseSerializer(
base, many=True, context={
'color': color_option, 'sub_color': subcolor_option})
return JsonResponse(baseserializer.data, safe=False)
return HttpResponseNotAllowed(['GET'])
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,658
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/migrations/0002_ml_user_id.py
|
# Generated by Django 2.2.7 on 2019-11-23 17:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ml', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='ml',
name='user_id',
field=models.CharField(default='NONE', max_length=30),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,659
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0012_auto_20191126_1917.py
|
# Generated by Django 2.2.6 on 2019-11-26 19:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lip', '0011_auto_20191126_1807'),
]
operations = [
migrations.AddField(
model_name='lip',
name='product_url',
field=models.CharField(default='//:0', max_length=255),
),
migrations.AlterField(
model_name='lip',
name='img_url',
field=models.CharField(default='//:0', max_length=255),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,660
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0007_auto_20191111_1712.py
|
# Generated by Django 2.2.6 on 2019-11-11 17:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lip', '0006_auto_20191108_1920'),
]
operations = [
migrations.AddField(
model_name='lip',
name='form',
field=models.CharField(choices=[('M', 'Matte'), ('G', 'Glossy'), ('N', 'None')], default='N', max_length=1),
),
migrations.AlterField(
model_name='lipoption',
name='color',
field=models.CharField(choices=[('RD', 'Red'), ('PK', 'Pink'), ('OR', 'Orange')], max_length=2),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,661
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/user/models.py
|
"""MODELS"""
from django.db import models
from django.db.models.signals import post_save
from django.conf import settings
from django.dispatch import receiver
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
class Profile(models.Model):
"""PROFILE MODELS"""
user = models.OneToOneField(User, on_delete=models.CASCADE)
prefer_color = models.TextField(max_length=10, null=True, blank=True)
prefer_base = models.TextField(null=True, blank=True)
prefer_brand = models.TextField(null=True)
def __str__(self):
return self.user.username
# post_save 시그널을 받아 토큰을 생성한다.
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(instance=None, created=False, **kwargs):
# pylint: disable=unused-argument
"""AUTH TOKEN"""
if created:
Token.objects.create(user=instance)
# Create your models here.
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,662
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/brand/models.py
|
""" model for brands information """
from django.db import models
class Brand(models.Model):
""" django brand model store name, url for sale calender """
name = models.CharField(max_length=20)
url = models.TextField(default="//:0")
name_ko = models.CharField(max_length=20)
def __str__(self):
return self.name_ko
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,663
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/cheek/migrations/0002_auto_20191202_1618.py
|
# Generated by Django 2.2.6 on 2019-12-02 16:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cheek', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='cheek',
name='category',
field=models.CharField(choices=[('CHK_B', 'Blusher'), ('CHK_C', 'Contouring'), ('CHK_H', 'Highlighter')], max_length=5),
),
migrations.AlterField(
model_name='cheekoption',
name='color',
field=models.CharField(choices=[('CHK_RD', 'Red'), ('CHK_PK', 'Pink'), ('CHK_OR', 'Orange')], max_length=6),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,664
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/cheek/views.py
|
""" CHEEK VIEWS """
from urllib.parse import urlparse, parse_qs
from django.http import JsonResponse, HttpResponseNotAllowed
from .models import Cheek, CheekOption
from .serializers import CheekSerializer
def search(request, option):
""" TODO : DOCSTRING"""
if request.method == "GET":
url = urlparse('?' + option)
query = parse_qs(url.query)
if 'category' not in query:
cheek = Cheek.objects.all()
if 'brand' in query:
cheek = cheek.filter(brand__name__in=query['brand'])
else:
try:
color_option = query['color']
cheek = Cheek.objects.exclude(
color__isnull=True
) # only Blusher
for i in cheek:
cheeks = CheekOption.objects.filter(product=i).filter(
color__in=color_option
)
if len(cheeks) == 0:
cheek = cheek.exclude(name=i.name)
data1 = CheekSerializer(cheek, many=True, context=color_option)
query['category'].remove('CHK_B')
cheek = Cheek.objects.filter(
category__in=query['category']
)
data2 = CheekSerializer(cheek, many=True, context=None)
return JsonResponse(data1.data + data2.data, safe=False)
except KeyError:
cheek = Cheek.objects.filter(
category__in=query['category']
)
cheekserializer = CheekSerializer(cheek, many=True, context=None)
return JsonResponse(cheekserializer.data, safe=False)
return HttpResponseNotAllowed(['GET'])
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,665
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/urls.py
|
"""products URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from .lip import views as lip
from .base import views as base
from .cheek import views as cheek
urlpatterns = [
path('lip/<option>', lip.search, name='lip'),
path('base/<option>', base.search, name='base'),
path('cheek/<option>', cheek.search, name='cheek')
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,666
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/crawl/test_items.py
|
import unittest
import os
import django
from crawl.items import LipProduct, Brand
from scrapy_djangoitem import DjangoItem
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backend.settings")
django.setup()
class ItemTest(unittest.TestCase):
def assert_sorted_equal(self, first, second, msg=None):
return self.assertEqual(sorted(first), sorted(second), msg)
def test_lip_product(self):
i = LipProduct()
self.assert_sorted_equal(
i.fields.keys(), [
'name', 'price', 'brand', 'category', 'product_url', 'img_url', 'crawled', 'form'])
def test_brand_product(self):
i = Brand()
self.assert_sorted_equal(
i.fields.keys(), [
'crawled', 'name', 'url', 'name_ko'])
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,667
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/crawl/spiders/aritaum_cheek.py
|
# -*- coding: utf-8 -*-
""" scrapy spider for aritaum webpage """
import scrapy
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
import requests
import webcolors
from PIL import Image
from crawl.items import CheekProduct, CheekColor, Brand
from brand.models import Brand as Brand_db
from products.cheek.models import Cheek as Cheek_db
from .spider_helper import translate_category
from .color_tag import cal_color_tag
class AritaumSpider(scrapy.Spider):
""" Real Spider extends scrapy.Spider """
name = 'aritaum-cheek'
def __init__(self):
scrapy.Spider.__init__(self)
options = webdriver.ChromeOptions()
options.add_argument('headless')
self.browser = webdriver.Chrome('chromedriver', chrome_options=options)
print('init')
def start_requests(self):
link = 'link'
idx = 'category'
urls = [
{idx: "cheek",
link: "https://www.aritaum.com/shop/pr/shop_pr_product_list.do?i_sCategorycd1=CTGA2000&i_sCategorycd2=CTGA2400"
},
]
for url in urls:
self.browser.get(url[link])
yield scrapy.Request(url=url[link], meta={idx: url[idx]}, callback=self.parse)
def parse(self, response):
driver = self.browser
brand_list = driver.find_elements_by_name("tagging_brandNm")
product_list = driver.find_elements_by_xpath(
"//*[@id='ul_prod_list']/li")
brand_name = driver.find_elements_by_css_selector("span.goods-brand")
yield scrapy.Request(
url=response.url,
meta={"brand": brand_list,
"brand_name": brand_name,
"product": product_list,
"category": response.meta["category"],
},
callback=self.parse_brand,
dont_filter=True)
def parse_brand(self, response):
""" parse brand name from page """
brand_list = response.meta["brand"]
brand_name_ko = response.meta["brand_name"]
# pylint: disable=unused-variable
for i, item in enumerate(brand_list):
name_ko = brand_name_ko[i].text
name_en = brand_list[i].get_property("value")
yield Brand(name=name_en, name_ko=name_ko, crawled="brand")
yield scrapy.Request(
url=response.url,
meta={
"product": response.meta["product"],
"category": response.meta["category"]
},
callback=self.parse_product,
dont_filter=True)
def parse_product(self, response):
""" parse product information from page """
#pylint: disable=too-many-locals
for item in response.meta["product"]:
product_name = item.find_element_by_name(
"tagging_productNm").get_property("value")
print(product_name)
brand_name = item.find_element_by_name(
"tagging_brandNm").get_property("value")
price = item.find_element_by_name(
"tagging_price").get_property("value")
product_id = item.find_element_by_name(
"i_sProductcd").get_property("value")
product_url = "https://www.aritaum.com/shop/pr/shop_pr_product_view.do?i_sProductcd=" + product_id
category_raw = item.find_element_by_name(
"tagging_category").get_property("value")
category = translate_category(category_raw, "cheek")
if category == -1:
pass
brand = Brand_db.objects.filter(name=brand_name)
thumb_url = item.find_element_by_css_selector(
"div.product-thumb img").get_property("src")
yield CheekProduct(
name=product_name,
price=price,
brand=brand[0],
category=category,
img_url=thumb_url,
crawled="cheek",
product_url=product_url
)
if category == "CHK_B":
color_range = item.find_elements_by_class_name(
"product-unit__scroller-item")
for color in color_range:
color_name = color.find_element_by_tag_name(
"label").get_attribute("data-tooltip")
if color_name is None:
continue
try:
color_url = color.find_element_by_tag_name(
"img").get_property("src")
yield scrapy.Request(
url=color_url,
meta={
"product": product_name,
"color": color_name
},
callback=self.save_color_by_url,
dont_filter=True
)
except NoSuchElementException:
color_rgb = color.find_element_by_xpath(
"./span/label/span").value_of_css_property("background-color")
product = Cheek_db.objects.filter(name=product_name)[0]
color_hex = self.save_color_by_rgb(color_rgb)
color_tuple = cal_color_tag("cheek", color_hex)
yield CheekColor(
color_hex=color_hex,
color=color_tuple[0],
sub_color=color_tuple[1],
optionName=color_name,
product=product,
crawled="cheek_option"
)
yield scrapy.Request(
url=response.url,
meta={"category": response.meta["category"]},
callback=self.go_next,
dont_filter=True
)
def save_color_by_url(self, response):
"""yield Request for color url"""
url = response.url
name = response.meta["product"]
color_name = response.meta["color"]
img = Image.open(requests.get(url, stream=True).raw)
img = img.resize((30, 30))
color_hex = self.getcolors(img, url)
color_tuple = cal_color_tag("cheek", color_hex)
product = Cheek_db.objects.filter(name=name)[0]
yield CheekColor(
color_hex=color_hex,
color=color_tuple[0],
sub_color=color_tuple[1],
optionName=color_name,
product=product,
crawled="cheek_option"
)
@staticmethod
def save_color_by_rgb(rgba):
"""yield scrapy request for color rgb"""
r_1 = rgba.split("(")[1]
r_2 = r_1.split(")")[0]
colors = r_2.split(", ")
rgb = (int(colors[0]), int(colors[1]), int(colors[2]))
color_hex = webcolors.rgb_to_hex(rgb)
return color_hex
@staticmethod
def getcolors(img, url):
"""get color hexa value from url image"""
#pylint: disable=too-many-locals
width, height = img.size
colors = img.getcolors(width * height)
r_total = 0
g_total = 0
b_total = 0
count = 0
for pixel in colors:
red, green, blue = pixel[1]
if (red + green + blue) / 3 >= 250:
count += pixel[0]
continue
r_total += pixel[0] * red
g_total += pixel[0] * green
b_total += pixel[0] * blue
try:
r_sum = int(r_total / (width * height - count))
g_sum = int(g_total / (width * height - count))
b_sum = int(b_total / (width * height - count))
return webcolors.rgb_to_hex((r_sum, g_sum, b_sum))
except ZeroDivisionError:
print(url)
def go_next(self, response):
""" click next button on page """
driver = self.browser
try:
click_next = driver.find_element_by_css_selector(
"a.page-nav__link.is-current + a.page-nav__link")
click_next.click()
driver.implicitly_wait(10)
yield scrapy.Request(
url=response.url,
meta={"category": response.meta["category"]},
callback=self.parse,
dont_filter=True
)
except NoSuchElementException: # spelling error making this code not work as expected
try:
click_next_page = driver.find_element_by_css_selector(
"a.page-nav__link.page-nav__link--next")
click_next_page.click()
driver.implicitly_wait(5)
yield scrapy.Request(
url=response.url,
meta={"category": response.meta["category"]},
callback=self.parse,
dont_filter=True
)
except NoSuchElementException:
pass
def spider_closed(self):
""" close selenium browser after spider closed """
self.browser.close()
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,668
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/crawl/spiders/mac_cheek.py
|
# -*- coding: utf-8 -*-
""" scrapy spider for aritaum webpage """
import scrapy
from crawl.items import CheekProduct, CheekColor
from brand.models import Brand as Brand_db
from products.cheek.models import Cheek as Cheek_db
from .color_tag import cal_color_tag
class MacLipSpider(scrapy.Spider):
""" scrapy spider for mac lip product """
name = "mac-cheek"
def __init__(self):
scrapy.Spider.__init__(self)
self.brand = Brand_db.objects.filter(name="MAC")[0]
def start_requests(self):
urls = [{
'category': 'CHK_B',
'link': 'https://www.maccosmetics.co.kr/products/13842/Products/Makeup/Face/Blush'
}, {
'category': 'CHK_H',
'link': 'https://www.maccosmetics.co.kr/products/20158/products/makeup/face/highlighting-contouring'
}]
for url in urls:
yield scrapy.Request(
url=url["link"],
meta={"category": url["category"]},
callback=self.parse
)
@staticmethod
def parse_price(price):
""" return int -price """
return int(price[2:].replace(',', ''))
def parse(self, response):
product_name = response.css('h3.product__subline::text').getall()
price = response.css(
'header > div > div > span.product__price--standard::text').getall()
product_url = response.css(
'div.product__image-medium > a.product__image-medium-link::attr(href)').getall()
thumb_url = response.css(
'div.product__image-medium > a.product__image-medium-link > img.product__sku-image--rendered--medium::attr(src)').getall()
host = 'https://www.maccosmetics.co.kr'
category = response.meta['category']
thumb_url = [
'//:0',
'/media/export/cms/products/280x320/mac_sku_SGY801_280x320_1.jpg'] + thumb_url
price = ['₩ 40,000'] + price
price[4] = '₩ 24,000'
for i, name in enumerate(product_name):
url = host + product_url[i]
int_price = self.parse_price(price[i])
yield CheekProduct(
name=product_name[i],
price=int_price,
brand=self.brand,
category=category,
img_url=host + thumb_url[i],
crawled="cheek",
product_url=host + product_url[i]
)
if category == "CHK_B":
yield scrapy.Request(
url=url,
meta={'product': name},
callback=self.parse_color
)
@staticmethod
def parse_hex(hexa):
''' return parsed hex value '''
return hexa[11:18]
def parse_color(self, response):
''' yield scrapy color object '''
rgb = response.css(
'div.product-full__shade > div.product-full__shade-swatch::attr(style)').getall()
name = response.css(
'div.product-full__shade > div.product-full__shade-name::text').getall()
product_name = response.meta['product']
product = Cheek_db.objects.filter(name=product_name)[0]
for i, hexa in enumerate(rgb):
color_hex = self.parse_hex(hexa)
color_name = name[i]
color_tuple = cal_color_tag("cheek", color_hex)
yield CheekColor(
color_hex=color_hex,
color=color_tuple[0],
sub_color=color_tuple[1],
optionName=color_name,
product=product,
crawled="cheek_option"
)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,669
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/crawl/items.py
|
"""Define Scrapy Items"""
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
import scrapy
from scrapy_djangoitem import DjangoItem
from brand.models import Brand as BrandModel
from products.lip.models import Lip, LipOption
from products.cheek.models import Cheek, CheekOption
from products.base.models import Base, BaseOption
#pylint: disable=ungrouped-imports
class CheekProduct(DjangoItem):
""" Cheek Scrapy Item"""
django_model = Cheek
crawled = scrapy.Field()
class CheekColor(DjangoItem):
django_model = CheekOption
crawled = scrapy.Field()
class BaseProduct(DjangoItem):
"""BaseProduct Scrapy Item"""
django_model = Base
crawled = scrapy.Field()
class BaseColor(DjangoItem):
"""BaseColor Scrapy Item"""
django_model = BaseOption
crawled = scrapy.Field()
class LipColor(DjangoItem):
"""LipColor Scrapy Item"""
django_model = LipOption
crawled = scrapy.Field()
class LipProduct(DjangoItem):
"""LipProduct Scrapy Item"""
django_model = Lip
crawled = scrapy.Field()
class Brand(DjangoItem):
"""Brand Scrapy Item"""
django_model = BrandModel
crawled = scrapy.Field()
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,670
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0009_auto_20191117_2206.py
|
# Generated by Django 2.2.6 on 2019-11-17 22:06
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('lip', '0008_lipoption_sub_color'),
]
operations = [
migrations.AlterField(
model_name='lipoption',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='color', to='lip.Lip'),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,671
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/admin.py
|
""" admin """
from django.contrib import admin
# Register your models here.
from .models import ML
admin.site.register(ML)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,672
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/serializers.py
|
""" file serializer """
from rest_framework import serializers
from .models import ML
class FileSerializer(serializers.ModelSerializer):
""" docstring """
class Meta:
""" docstring """
model = ML
fields = "__all__"
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,673
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/crawl/spiders/color_tag.py
|
""" finds closest sub_color tag """
import webcolors
from colormath.color_objects import sRGBColor, LabColor
from colormath.color_conversions import convert_color
from colormath.color_diff import delta_e_cie2000
# PINK COLOR
PINK_LIST = [(255, 202, 226), (236, 199, 205), (252, 174, 187), (255, 141, 161), (251, 132, 186),
(241, 106, 183), (224, 98, 135), (247, 74, 131), (225, 0, 152), (236, 0, 95)]
# RED COLOR
RED_LIST = [(255, 120, 117), (255, 99, 113), (255, 88, 93), (224, 62, 82), (239, 51, 64),
(184, 58, 75), (172, 20, 90), (153, 0, 0), (138, 21, 56), (111, 38, 61)]
# ORANGE COLOR
ORANGE_LIST = [(255, 189, 167), (255, 186, 179), (255, 141, 109), (255, 126, 113), (255, 139, 83),
(255, 153, 85), (255, 135, 15), (248, 102, 39), (254, 80, 0), (225, 71, 26)]
# PURPLE COLOR
PURPLE_LIST = [(218, 207, 221), (197, 180, 227), (139, 132, 215), (215, 159, 191), (185, 87, 143),
(187, 41, 187), (100, 48, 122), (129, 54, 97), (101, 49, 101), (63, 42, 86)]
# LIGHT_WARM_SKIN
LW_LIST = [(252, 237, 218), (252, 233, 200), (243, 212, 185), (233, 192, 155)]
# LIGHT_COOL_SKIN
LC_LIST = [(251, 214, 184), (251, 210, 192), (251, 189, 168), (239, 161, 149)]
# MEDIUM_SKIN
LM_LIST = [(231, 208, 145), (215, 179, 125), (201, 149, 99), (179, 120, 85)]
def two_point_length_by_cie(tuple1, tuple2):
""" find color diff using CIELAB"""
color1_rgb = sRGBColor(tuple1[0], tuple1[1], tuple1[1])
color2_rgb = sRGBColor(tuple2[0], tuple2[1], tuple2[1])
color1_lab = convert_color(color1_rgb, LabColor)
color2_lab = convert_color(color2_rgb, LabColor)
delta_e = delta_e_cie2000(color1_lab, color2_lab)
return delta_e
def two_point_length_by_euc(tuple1, tuple2):
""" find color diff using euclidean"""
tot_sum = pow(tuple1[0] - tuple2[0], 2)
tot_sum += pow(tuple1[1] - tuple2[1], 2)
tot_sum += pow(tuple1[2] - tuple2[2], 2)
final_len = pow(tot_sum, 1 / 2)
return final_len
def cal_similar_color(color_list, hexa):
""" calculate similar color """
len_cie = []
len_euc = []
tar_color = -1
for idx, color in enumerate(color_list):
len_cie.append((idx, two_point_length_by_cie(hexa, color)))
len_euc.append((idx, two_point_length_by_euc(hexa, color)))
len_cie.sort(key=lambda x: x[1])
len_euc.sort(key=lambda x: x[1])
len_euc = [x[0] for x in len_euc]
rank = [0] * len(color_list)
for idx, result in enumerate(len_cie):
rank[result[0]] = idx
rank[result[0]] += len_euc.index(result[0])
if rank.count(min(rank)) == 1:
tar_color = rank.index(min(rank))
else:
mins = [i for i, x in enumerate(rank) if x == min(rank)]
mins_len = [two_point_length_by_cie(hexa, color_list[i]) for i in mins]
tar_color = mins[mins_len.index(min(mins_len))]
return tar_color
def min_lip_len(hexa):
""" find sub_color with minimum length in lip model """
# 색조 sub-color
color_total_list = PINK_LIST + RED_LIST + ORANGE_LIST + PURPLE_LIST
tar_color = cal_similar_color(color_total_list, hexa)
if int(tar_color / 10) == 0:
return ('LIP_PK', webcolors.rgb_to_hex(color_total_list[tar_color]))
if int(tar_color / 10) == 1:
return ('LIP_RD', webcolors.rgb_to_hex(color_total_list[tar_color]))
if int(tar_color / 10) == 2:
return ('LIP_OR', webcolors.rgb_to_hex(color_total_list[tar_color]))
if int(tar_color / 10) == 3:
return ('LIP_PU', webcolors.rgb_to_hex(color_total_list[tar_color]))
return (None, None)
def min_cheek_len(hexa):
""" find sub_color with minimum length in cheek model """
color_total_list = PINK_LIST + RED_LIST + ORANGE_LIST
tar_color = cal_similar_color(color_total_list, hexa)
if int(tar_color / 10) == 0:
return ('CHK_PK', webcolors.rgb_to_hex(color_total_list[tar_color]))
if int(tar_color / 10) == 1:
return ('CHK_RD', webcolors.rgb_to_hex(color_total_list[tar_color]))
if int(tar_color / 10) == 2:
return ('CHK_OR', webcolors.rgb_to_hex(color_total_list[tar_color]))
return (None, None)
# pylint: disable=pointless-string-statement
# TODO : FIX
def min_base_len(hexa):
""" find sub_color with minimum length in base model """
face_total_list = LW_LIST + LC_LIST + LM_LIST
tar_color = cal_similar_color(face_total_list, hexa)
if (tar_color / 4) == 0:
return 'BAS_WM'
if (tar_color / 4) == 1:
return 'BAS_CL'
if (tar_color / 4) == 2:
return 'BAS_NT'
return (None, None)
def cal_color_tag(title, hexa):
""" return closest sub_color with given input """
item_rgb = webcolors.hex_to_rgb(hexa)
if title == 'lip':
result = min_lip_len(item_rgb)
elif title == 'cheek':
result = min_cheek_len(item_rgb)
elif title == 'base':
result = min_base_len(item_rgb)
else:
result = -1
return result
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,674
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/views.py
|
""" TODO : DOCSTRING"""
from urllib.parse import urlparse, parse_qs
from django.http import JsonResponse, HttpResponseNotAllowed
from .models import Lip, LipOption
from .serializers import LipSerializer
def search(request, option):
""" TODO : DOCSTRING"""
# pylint: disable=too-many-branches, too-many-nested-blocks
if request.method == "GET":
url = urlparse('?' + option)
query = parse_qs(url.query)
if 'category' not in query and 'form' not in query:
lip = Lip.objects.all()
else:
if 'category' in query and 'form' in query:
lip = Lip.objects.filter(
category__in=query['category']).filter(
form__in=query['form'])
elif 'category' in query:
lip = Lip.objects.filter(category__in=query['category'])
else:
lip = Lip.objects.filter(form__in=query['form'])
if 'brand' in query:
lip = lip.filter(brand__name__in=query['brand'])
subcolor = None
try:
color_option = query['color']
lip = lip.exclude(
color__isnull=True
)
if 'sub_color' in query:
subcolor = {
'LIP_RD': [],
'LIP_PU': [],
'LIP_OR': [],
'LIP_PK': []}
for color in query['sub_color']:
color = color.split('_', 1)
color_str = ''.join(['#', color[0]])
subcolor[color[1]].append(color_str)
for i in lip:
lipss = []
for color in query['color']:
tmp = LipOption.objects.filter(product=i).filter(
color=color
)
if len(subcolor[color]) != 0:
tmp = tmp.filter(
sub_color__in=subcolor[color]
)
lipss += tmp
if len(lipss) == 0:
lip = lip.exclude(name=i.name)
else:
for i in lip:
lipss = LipOption.objects.filter(product=i).filter(
color__in=color_option
)
if len(lipss) == 0:
lip = lip.exclude(name=i.name)
except KeyError:
color_option = None
lipserializer = LipSerializer(
lip, many=True, context={
'color': color_option, 'sub': subcolor})
return JsonResponse(lipserializer.data, safe=False)
return HttpResponseNotAllowed(['GET'])
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,675
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0013_auto_20191202_1618.py
|
# Generated by Django 2.2.6 on 2019-12-02 16:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lip', '0012_auto_20191126_1917'),
]
operations = [
migrations.AlterField(
model_name='lip',
name='category',
field=models.CharField(choices=[('LIP_S', 'Stick'), ('LIP_G', 'Gloss'), ('LIP_B', 'Balm'), ('LIP_T', 'Tint')], max_length=5),
),
migrations.AlterField(
model_name='lip',
name='form',
field=models.CharField(choices=[('LIP_M', 'Matte'), ('LIP_G', 'Glossy'), ('LIP_N', 'None')], default='LIP_N', max_length=5),
),
migrations.AlterField(
model_name='lipoption',
name='color',
field=models.CharField(choices=[('LIP_RD', 'Red'), ('LIP_PK', 'Pink'), ('LIP_OR', 'Orange'), ('LIP_PU', 'Purple')], max_length=6),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,676
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/cheek/models.py
|
""" model for lip cosmetic information """
from django.db import models
from brand import models as brand_models
class Cheek(models.Model):
""" django lip model """
BLUSHER = 'CHK_B'
CONTOURING = 'CHK_C'
HIGHLIGHT = 'CHK_H'
CATEGORY = (
(BLUSHER, 'Blusher'),
(CONTOURING, 'Contouring'),
(HIGHLIGHT, 'Highlighter'),
)
name = models.CharField(max_length=20)
price = models.IntegerField()
brand = models.ForeignKey(
brand_models.Brand,
on_delete=models.CASCADE
)
category = models.CharField(
max_length=5,
choices=CATEGORY,
)
product_url = models.CharField(
max_length=255,
default="//:0",
)
img_url = models.CharField(
max_length=255,
default="//:0",
)
def __str__(self):
return self.name
class CheekOption(models.Model):
""" option of django lip model """
RED = "CHK_RD"
PINK = "CHK_PK"
ORANGE = "CHK_OR"
COLOR = (
(RED, "Red"),
(PINK, "Pink"),
(ORANGE, "Orange"),
)
color = models.CharField(
max_length=6,
choices=COLOR
)
sub_color = models.CharField(
max_length=30
)
color_hex = models.CharField(max_length=10)
optionName = models.CharField(max_length=30)
product = models.ForeignKey(
Cheek,
related_name='color',
on_delete=models.CASCADE
)
def __str__(self):
return str(self.product) + " " + self.optionName
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,677
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/base/serializers.py
|
""" TODO : DOCSTRING"""
from rest_framework import serializers
from .models import Base, BaseOption
class BaseOptionSerializer(serializers.ModelSerializer):
""" TODO : DOCSTRING"""
class Meta:
""" TODO : DOCSTRING"""
model = BaseOption
fields = ['color', 'sub_color', 'color_hex', 'optionName']
class BaseSerializer(serializers.ModelSerializer):
""" TODO : DOCSTRING"""
brand = serializers.StringRelatedField()
color = serializers.SerializerMethodField('colors')
def colors(self, base):
""" TODO : DOCSTRING"""
base_colors = BaseOption.objects.filter(product=base)
if self.context['color'] is not None:
base_colors = base_colors.filter(
color__in=self.context['color']
)
if self.context['sub_color'] is not None:
base_colors = base_colors.filter(
sub_color__in=self.context['sub_color']
)
serializer = BaseOptionSerializer(instance=base_colors, many=True)
return serializer.data
class Meta:
model = Base
fields = [
'name',
'price',
'category',
'product_url',
'img_url',
'brand',
'color',
'id'
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,678
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/brand/migrations/0002_auto_20191102_1642.py
|
# Generated by Django 2.2.6 on 2019-11-02 16:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('brand', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='brand',
name='url',
field=models.TextField(default='//:0'),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,679
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/crawl/pipelines.py
|
""" spider pipelines """
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
from products.lip.models import Lip, LipOption
from products.base.models import Base, BaseOption
from products.cheek.models import Cheek, CheekOption
from brand.models import Brand
class CrawlPipeline(object):
""" basic pipeline class """
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
""" processing item depends on its type """
# pylint: disable=too-many-return-statements,too-many-branches
if item["crawled"] == "brand":
self.process_brand(item, spider)
elif item["crawled"] == "lip":
if len(Lip.objects.filter(name=item["name"])) == 0:
item.save()
return item
elif item["crawled"] == "lip_option":
if len(LipOption.objects.filter(
optionName=item["optionName"]
).filter(
product=item["product"])) == 0:
item.save()
return item
elif item["crawled"] == "base":
if len(Base.objects.filter(name=item["name"])) == 0:
item.save()
return item
elif item["crawled"] == "base_option":
if len(BaseOption.objects.filter(
optionName=item["optionName"]
).filter(
product=item["product"])) == 0:
item.save()
return item
elif item["crawled"] == "cheek":
if len(Cheek.objects.filter(name=item["name"])) == 0:
item.save()
return item
elif item["crawled"] == "cheek_option":
if len(CheekOption.objects.filter(
optionName=item["optionName"]
).filter(
product=item["product"])) == 0:
item.save()
return item
return item
def process_brand(self, item, spider):
# pylint: disable=unused-argument
""" check if that brand is already seen """
if item["name"] not in self.ids_seen and len(
Brand.objects.filter(name=item["name"])) == 0:
self.ids_seen.add(item["name"])
item.save()
return item
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,680
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/base/migrations/0001_initial.py
|
# Generated by Django 2.2.6 on 2019-11-16 17:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('brand', '0003_brand_name_ko'),
]
operations = [
migrations.CreateModel(
name='Base',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('price', models.IntegerField()),
('category', models.CharField(choices=[('P', 'Powder'), ('CU', 'Cushion'), ('C', 'Concealer'), ('F', 'Foundation'), ('PR', 'Primer'), ('B', 'BB & CC')], max_length=1)),
('img_url', models.TextField(default='//:0')),
('brand', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='brand.Brand')),
],
),
migrations.CreateModel(
name='BaseOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('color', models.CharField(choices=[('LT', 'under 21'), ('MD', '21 ~ 23'), ('DK', 'over 23')], max_length=2)),
('sub_color', models.CharField(default=None, max_length=30, null=True)),
('color_hex', models.CharField(max_length=10)),
('optionName', models.CharField(max_length=30)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.Base')),
],
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,681
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/lip/migrations/0003_lipoption.py
|
# Generated by Django 2.2.6 on 2019-11-01 13:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('lip', '0002_auto_20191101_1303'),
]
operations = [
migrations.CreateModel(
name='LipOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('color', models.CharField(choices=[('RD', 'Red'), ('CR', 'Coral'), ('PK', 'Pink'), ('OR', 'Orange')], max_length=2)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lip.Lip')),
],
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,682
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/test_views_cheek.py
|
from django.test import TestCase, Client
import json
from products.cheek.models import Cheek, CheekOption
from brand.models import Brand
from products.cheek.serializers import CheekSerializer, CheekOptionSerializer
class BaseTestCase(TestCase):
def setUp(self):
self.client=Client()
self.brand = Brand.objects.create(name="brand1")
self.product1 = Cheek.objects.create(
name="cheek1", price=100, category="CHK_B", img_url="tmp_url1", brand=self.brand)
self.product2 = Cheek.objects.create(
name="cheek2", price=200, category="CHK_H", img_url="tmp_url2", brand=self.brand)
self.product3 = Cheek.objects.create(
name="cheek3", price=300, category="CHK_B", img_url="tmp_url1", brand=self.brand)
self.color1 = CheekOption.objects.create(
color="CHK_RD", sub_color="hex1", color_hex="hex2", optionName="option1", product=self.product1)
def test_bad_request(self):
# not-allowed request
response = self.client.put('/api/cheek/tag')
self.assertEqual(response.status_code, 405)
def test_cheek_search(self):
response = self.client.get('/api/cheek/category=CHK_H')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["name"], self.product2.name)
self.assertEqual(res[0]["category"], 'CHK_H')
response = self.client.get('/api/cheek/category=CHK_B')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["name"], self.product1.name)
self.assertEqual(res[0]["category"], 'CHK_B')
response = self.client.get('/api/cheek/color=CHK_RD&category=CHK_B')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(res[0]["color"][0]["color"], self.color1.color)
self.assertEqual(res[0]["category"], 'CHK_B')
response = self.client.get('/api/cheek/color=CHK_PK&category=CHK_B')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(len(res), 0)
response = self.client.get('/api/cheek/all')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(len(res), 3)
response = self.client.get('/api/cheek/brand=brand1')
self.assertEqual(response.status_code, 200)
res = json.loads(response.content)
self.assertEqual(len(res), 3)
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,683
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/migrations/0018_ml_product.py
|
# Generated by Django 2.2.6 on 2019-11-29 04:18
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0003_auto_20191116_1921'),
('ml', '0017_auto_20191126_1355'),
]
operations = [
migrations.AddField(
model_name='ml',
name='product',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='base.Base'),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,684
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/products/base/migrations/0003_auto_20191116_1921.py
|
# Generated by Django 2.2.6 on 2019-11-16 19:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0002_auto_20191116_1735'),
]
operations = [
migrations.AlterField(
model_name='baseoption',
name='color',
field=models.CharField(choices=[('LT', 'under 21'), ('MD', '21 ~ 23'), ('DK', 'over 23'), ('NO', 'Other type')], default='NO', max_length=2),
),
]
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,685
|
swsnu/swpp2019-team2
|
refs/heads/dev/master
|
/backend/ml/find_base.py
|
""" finds closest base product """
def hex_into_rgb(hexa):
""" change hex value into rgb value """
if hexa[0] == '#':
hexa = hexa[1:]
str_r = hexa[0:2]
str_g = hexa[2:4]
str_b = hexa[4:]
hex_r = '0x' + str_r
hex_g = '0x' + str_g
hex_b = '0x' + str_b
rgb_r = int(hex_r, 16)
rgb_g = int(hex_g, 16)
rgb_b = int(hex_b, 16)
rgb = (rgb_r, rgb_g, rgb_b)
return rgb
def two_point_length(tuple1, tuple2):
""" find distance between two rgb values """
tot_sum = 0
tot_sum += pow(tuple1[0]-tuple2[0], 2)
tot_sum += pow(tuple1[1]-tuple2[1], 2)
tot_sum += pow(tuple1[2]-tuple2[2], 2)
final_len = pow(tot_sum, 1/2)
return final_len
def best_match(base_list, ml_result):
""" find best match base product """
result = tuple(ml_result)
rgb_list = []
for product in base_list:
rgb_list.append(hex_into_rgb(product))
best_base = -1
min_len = -1
temp = -1
i = -1
for product in rgb_list:
i+=1
if best_base == -1:
best_base = 0
min_len = two_point_length(product, result)
continue
temp = two_point_length(product, result)
if temp < min_len:
best_base = i
min_len = temp
return best_base
|
{"/backend/crawl/spiders/mac_lip.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/views.py": ["/backend/ml/face_color_ml.py", "/backend/ml/serializers.py", "/backend/ml/models.py", "/backend/ml/find_base.py"], "/backend/ml/urls.py": ["/backend/ml/views.py"], "/backend/products/admin.py": ["/backend/products/lip/models.py", "/backend/products/base/models.py", "/backend/products/cheek/models.py"], "/backend/products/lip/serializers.py": ["/backend/products/lip/models.py"], "/backend/products/cheek/serializers.py": ["/backend/products/cheek/models.py"], "/backend/crawl/spiders/mac_base.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/user/tests.py": ["/backend/user/models.py"], "/backend/user/views.py": ["/backend/user/models.py"], "/backend/products/base/views.py": ["/backend/products/base/models.py", "/backend/products/base/serializers.py"], "/backend/products/cheek/views.py": ["/backend/products/cheek/models.py", "/backend/products/cheek/serializers.py"], "/backend/crawl/spiders/aritaum_cheek.py": ["/backend/crawl/spiders/spider_helper.py", "/backend/crawl/spiders/color_tag.py"], "/backend/crawl/spiders/mac_cheek.py": ["/backend/crawl/spiders/color_tag.py"], "/backend/ml/admin.py": ["/backend/ml/models.py"], "/backend/ml/serializers.py": ["/backend/ml/models.py"], "/backend/products/lip/views.py": ["/backend/products/lip/models.py", "/backend/products/lip/serializers.py"], "/backend/products/base/serializers.py": ["/backend/products/base/models.py"]}
|
25,689
|
waynekwon/finalproject
|
refs/heads/master
|
/clean_data.py
|
import pandas as pd
def get_clean_data(file_name):
moviedata=pd.read_csv(file_name)
#sort by years and drop observations before year 2000
moviedata.sort_values(by='title_year')
moviedata = moviedata.drop(moviedata[moviedata.title_year < 2000].index)
#binning response variable into categorical variable - imdb movie score
bins_response = [0, 5, 7, 10]
movie_response_group = [0, 1, 2]
moviedata['movie_response'] = pd.cut(moviedata['imdb_score'], bins_response, labels = movie_response_group)
#keeping only USA
moviedata = moviedata[moviedata['country']== 'USA']
#creating dummy variables for genres
dummy_genre=moviedata['genres'].str.get_dummies(sep='|')
#drop genre from moviedata
moviedata.drop(['genres'], axis = 1, inplace = True)
merged_data = pd.concat([moviedata, dummy_genre], axis = 1)
moviedata = merged_data
#managing categorical variables
# COLOR - 1 if color, 0 if black and white
moviedata.loc[moviedata['color']==" Black and White", "color"] = 0
moviedata.loc[moviedata['color']=="Color", "color"] = 1
moviedata['color']=moviedata['color'].fillna(1)
#Language - 0 if English, 1 if non-english
moviedata["language"] = moviedata["language"].fillna("English")
moviedata.loc[moviedata["language"] != "English", "language"] = 1
moviedata.loc[moviedata["language"] == "English", "language"] = 0
#content rating...putting into 6 different categories...
#Unrated,G, PG, PG-13, R, NC-17 -> 0,1,2,3,4,5
#print moviedata['content_rating'].unique()
moviedata.loc[moviedata["content_rating"]=="X", "content_rating"] = "NC-17"
moviedata.loc[moviedata["content_rating"]=="GP", "content_rating"] = "PG"
moviedata.loc[moviedata["content_rating"]=="TV-G", "content_rating"] = "G"
moviedata.loc[moviedata["content_rating"]=="TV-PG", "content_rating"] = "PG"
moviedata.loc[moviedata["content_rating"]=="M", "content_rating"] = "PG"
moviedata.loc[moviedata["content_rating"]=="TV-14", "content_rating"] = "PG-13"
moviedata.loc[moviedata["content_rating"]=="Not Rated", "content_rating"] = "Unrated"
moviedata.loc[moviedata["content_rating"]=="Passed", "content_rating"] = "Unrated"
moviedata.loc[moviedata["content_rating"]=="Approved", "content_rating"] = "Unrated"
moviedata["content_rating"] = moviedata["content_rating"].fillna("Unrated")
moviedata.loc[moviedata["content_rating"] == "Unrated", "content_rating"] = 0
moviedata.loc[moviedata["content_rating"] == "G", "content_rating"] = 1
moviedata.loc[moviedata["content_rating"] == "PG", "content_rating"] = 2
moviedata.loc[moviedata["content_rating"] == "PG-13", "content_rating"] = 3
moviedata.loc[moviedata["content_rating"] == "R", "content_rating"] = 4
moviedata.loc[moviedata["content_rating"] == "NC-17", "content_rating"] = 5
#get rid of missing values in our dataset
moviedata = moviedata.dropna(subset = ['title_year'])
moviedata = moviedata.dropna(subset=["actor_1_facebook_likes","actor_2_facebook_likes","actor_3_facebook_likes"])
moviedata = moviedata.dropna(subset = ["duration", "num_critic_for_reviews", "facenumber_in_poster", "num_user_for_reviews", "budget"])
return moviedata
|
{"/website.py": ["/clean_data.py", "/final_model.py", "/movie_recommend.py"]}
|
25,690
|
waynekwon/finalproject
|
refs/heads/master
|
/linreg.py
|
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import train_test_split
import pandas as pd
import numpy as np
#must run code to clean the dataset first upon using the linear regression
mydata=pd.read_csv('rough_cleandata.csv', index_col = 0)
#creating variables
predictors = ['color', 'num_critic_for_reviews', 'duration', 'director_facebook_likes', 'actor_3_facebook_likes', 'actor_1_facebook_likes', 'num_voted_users', 'cast_total_facebook_likes', 'facenumber_in_poster', 'num_user_for_reviews', 'language', 'content_rating', 'budget', 'actor_2_facebook_likes']
X = mydata[predictors]
y = mydata['imdb_score']
#linear regression
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state = 1)
linreg = LinearRegression()
linreg.fit(X_train, y_train)
print(linreg.intercept_)
print list(zip(predictors, linreg.coef_))
#testing the model
y_pred = linreg.predict(X_test)
#MSE
print 'The MSE is'
print(metrics.mean_squared_error(y_test, y_pred))
|
{"/website.py": ["/clean_data.py", "/final_model.py", "/movie_recommend.py"]}
|
25,691
|
waynekwon/finalproject
|
refs/heads/master
|
/website.py
|
from flask import Flask, render_template, redirect, url_for, request
from clean_data import get_clean_data
from final_model import predict_movie_rating
from movie_recommend import recommend_movie
import pandas
app = Flask(__name__)
#importing rawdata and cleaning it so that it can be used in the model
file_n = 'movie_metadata.csv'
moviedata=get_clean_data(file_n)
#list of predictors used for calculating/predicting movie response
list_of_predictors = ['title_year', 'director_facebook_likes', 'actor_1_facebook_likes', 'actor_2_facebook_likes', 'actor_3_facebook_likes', 'facenumber_in_poster', 'content_rating', 'budget',
'Action','Adventure', 'Animation', 'Biography', 'Comedy', 'Crime', 'Documentary', 'Drama', 'Family', 'Fantasy', 'History', 'Horror', 'Music', 'Musical', 'Mystery', 'News',
'Romance', 'Sci-Fi', 'Short', 'Sport', 'Thriller','War', 'Western' ]
#predictors and dataset for movie recommendation
predictors_movie_recommend = ['Action','Adventure', 'Animation', 'Biography', 'Comedy', 'Crime', 'Documentary', 'Drama', 'Family', 'Fantasy', 'History', 'Horror', 'Music', 'Musical', 'Mystery', 'News',
'Romance', 'Sci-Fi', 'Short', 'Sport', 'Thriller','War', 'Western', 'imdb_score', 'movie_title']
moviedata_recommend = moviedata[predictors_movie_recommend]
#genre list
genrelist = ['Action','Adventure', 'Animation', 'Biography', 'Comedy', 'Crime', 'Documentary', 'Drama', 'Family', 'Fantasy', 'History', 'Horror', 'Music', 'Musical', 'Mystery', 'News',
'Romance', 'Sci-Fi', 'Short', 'Sport', 'Thriller','War', 'Western']
@app.route('/')
def hello_world():
return render_template('index.html')
@app.route('/rating/<movie>')
def rating(movie):
return render_template('score_page.html', movie = movie)
@app.route('/submission', methods=['POST'])
def login():
error = None
if request.method == 'POST':
user_input = []
user_input_genre = []
for predictor in list_of_predictors:
value = request.form[predictor]
value_float = int(value)
user_input.append(value_float)
for predictor in genrelist:
value = request.form[predictor]
value_float = int(value)
user_input_genre.append(value_float)
#print user_input
movie_rating_index=predict_movie_rating(user_input, moviedata)
dic = {0:"Not Recommended", 1:"Hmm..I'd consider it", 2:"Go Watch It!"}
movie_rating = [dic[n] if n in dic.keys() else n for n in movie_rating_index]
#result = movie_rating[0]
#movie recommendation
genre_selected_by_user = []
for i in range(23):
if user_input_genre[i]==1:
genre_selected_by_user.append(genrelist[i])
movie_recommendation = recommend_movie(genre_selected_by_user, moviedata_recommend)
result = movie_rating[0] + ' | ' + 'Based off your genre preferences, we recommend the following movies: ' + movie_recommendation[0] + ", " + movie_recommendation[1] + ", " + movie_recommendation[2]
return redirect(url_for('rating', movie = result))
else:
error = 'Error: Please fill out all fields'
if __name__ == '__main__':
app.run()
|
{"/website.py": ["/clean_data.py", "/final_model.py", "/movie_recommend.py"]}
|
25,692
|
waynekwon/finalproject
|
refs/heads/master
|
/final_model.py
|
import numpy as np
import pandas as pd
from sklearn.linear_model import LogisticRegression
def predict_movie_rating(list_of_user_input, data_for_prediction):
#storing the resopnse variable in 'response' for easier code writing
response = data_for_prediction['movie_response']
#create a list of predictors to include in classification model
predictor_list = ['title_year', 'director_facebook_likes', 'actor_1_facebook_likes', 'actor_2_facebook_likes', 'actor_3_facebook_likes', 'facenumber_in_poster', 'content_rating', 'budget',
'Action','Adventure', 'Animation', 'Biography', 'Comedy', 'Crime', 'Documentary', 'Drama', 'Family', 'Fantasy', 'History', 'Horror', 'Music', 'Musical', 'Mystery', 'News',
'Romance', 'Sci-Fi', 'Short', 'Sport', 'Thriller','War', 'Western' ]
movie_features = data_for_prediction[predictor_list]
#transform X from pandas dataframe to numpy arrays
X = movie_features.as_matrix()
#transform y from pandas to 1D list
y = list(data_for_prediction['movie_response'].values)
#--------------- CARRY OUT CLASSIFICATION MODELS ---------------
#call in the models
logreg = LogisticRegression()
# fit the model with our dataset
logreg.fit(X, y)
#make predictions on the testing set
log_y_pred = logreg.predict(list_of_user_input)
return log_y_pred
|
{"/website.py": ["/clean_data.py", "/final_model.py", "/movie_recommend.py"]}
|
25,693
|
waynekwon/finalproject
|
refs/heads/master
|
/correlation.py
|
import pandas as pd
import matplotlib.pyplot as plt
from pandas.tools import plotting
mydata=pd.read_csv('rough_cleandata.csv')
mydata.drop(['Unnamed: 0'], axis = 1, inplace = True)
#correlation of selective variables - put in name of columns you wish to calculate
mydata.loc[mydata["movie_response"]=="Bad", "movie_response"] = 0
mydata.loc[mydata["movie_response"]=="Average", "movie_response"] = 1
mydata.loc[mydata["movie_response"]=="Good", "movie_response"] = 2
response = mydata['movie_response']
#print mydata.corrwith(mydata['movie_response'])
#print type(mydata)
#print mydata[["imdb_score", "budget", "duration"]].corrwith(mydata["movie_response"])
#correlation of all variables
corr_coef=mydata.corr()
print corr_coef['imdb_score']
plt.scatter(mydata['num_user_for_reviews'], mydata['imdb_score'])
#pd.scatter_matrix(mydata, diagonal = 'kde', color = 'k', alpha = 0.3)
#plt.show()
#matrix plot, add or remove predictors as necessary
plotting.scatter_matrix(data[['num_critic_for_reviews', 'duration', 'director_facebook_likes', 'actor_1_facebook_likes', 'num_voted_users', 'cast_total_facebook_likes', 'num_user_for_reviews', 'budget']])
plt.show()
|
{"/website.py": ["/clean_data.py", "/final_model.py", "/movie_recommend.py"]}
|
25,694
|
waynekwon/finalproject
|
refs/heads/master
|
/movie_recommend.py
|
import pandas
def recommend_movie(genre_from_user, data_used_for_recommendation):
#filtering user_input for movie recommendation
for genres in genre_from_user:
data_used_for_recommendation = data_used_for_recommendation[data_used_for_recommendation[genres]== 1]
data_used_for_recommendation.sort_values(by='imdb_score')
#keep only movies of high scores
data_used_for_recommendation = data_used_for_recommendation.drop(data_used_for_recommendation[data_used_for_recommendation.imdb_score < 7].index)
movie_for_recommendation = data_used_for_recommendation['movie_title'].map(lambda x: str(x)[:-2])
movie_for_recommendation = movie_for_recommendation.tolist()
return movie_for_recommendation[:3]
|
{"/website.py": ["/clean_data.py", "/final_model.py", "/movie_recommend.py"]}
|
25,695
|
waynekwon/finalproject
|
refs/heads/master
|
/classification_models.py
|
import numpy as np
import pandas as pd
from sklearn.neighbors import KNeighborsClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.cross_validation import train_test_split
from sklearn import metrics
from sklearn.tree import DecisionTreeClassifier
from clean_data import get_clean_data
file_n = 'movie_metadata.csv'
mydata=get_clean_data(file_n)
#--------------- PREPARING FOR CLASSIFICATION MODELS ---------------
#storing the resopnse variable in 'response' for easier code writing
response = mydata['movie_response']
#create a list of predictors to include in classification model
predictor_list = ['title_year', 'director_facebook_likes', 'actor_3_facebook_likes', 'actor_1_facebook_likes', 'facenumber_in_poster', 'content_rating', 'budget', 'actor_2_facebook_likes',
'Action','Adventure', 'Animation', 'Biography', 'Comedy', 'Crime', 'Documentary', 'Drama', 'Family', 'Fantasy', 'History', 'Horror', 'Music', 'Musical', 'Mystery', 'News', 'Romance', 'Sci-Fi', 'Short','Sport', 'Thriller','War', 'Western' ]
movie_features = mydata[predictor_list]
#transform X from pandas dataframe to numpy arrays
X = movie_features.as_matrix()
#transform y from pandas to 1D list
y = list(mydata['movie_response'].values)
#--------------- SPLITTING INTO TRAINING & TESTING SETS ---------------
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2)#, random_state = 2) #to keep splitting constant, set 'random_state=1'
#print X_train.shape, X_test.shape
#print len(y_train), len(y_test)
#--------------- CARRY OUT CLASSIFICATION MODELS ---------------
#call in the models
logreg = LogisticRegression()
knn2 = KNeighborsClassifier(n_neighbors=15) # K=15
knn3 = KNeighborsClassifier(n_neighbors=20) # K=20
dec_tree = DecisionTreeClassifier()
#fit the model with training set
logreg.fit(X_train, y_train)
knn2.fit(X_train, y_train)
knn3.fit(X_train, y_train)
dec_tree.fit(X_train, y_train)
#make predictions on the testing set
log_y_pred = logreg.predict(X_test)
knn2_y_pred = knn2.predict(X_test)
knn3_y_pred = knn3.predict(X_test)
dec_tree_y_pred = dec_tree.predict(X_test)
#compare actual response values with predicted values
log_accuracy = metrics.accuracy_score(y_test, log_y_pred)
knn2_accuracy = metrics.accuracy_score(y_test, knn2_y_pred)
knn3_accuracy = metrics.accuracy_score(y_test, knn3_y_pred)
dec_tree_accuracy = metrics.accuracy_score(y_test, dec_tree_y_pred)
print "Logitstic Accuracy: ", log_accuracy
print "KNN (K=15) Accuracy: ", knn2_accuracy
print "KNN (K=20) Accuracy: ", knn3_accuracy
print "Decision Tree Accuracy: ", dec_tree_accuracy
#FOR K-NN: find the best value of k by iterating its value
k_range = range(1,25)
scores = []
for k in k_range:
knn = KNeighborsClassifier(n_neighbors = k)
knn.fit(X_train, y_train)
y_pred = knn.predict(X_test)
scores.append(metrics.accuracy_score(y_test, y_pred))
#if you want to predict using out-of-sample observation
#knn.predict(["put in the value for each column, or predictors, as a list" ])
#--------------- GRAPH K-NEAREST NEIGHBORS ---------------
import matplotlib.pyplot as plt
plt.plot(k_range, scores)
plt.xlabel('Value of K for KNN')
plt.ylabel('Testing Accuracy')
plt.show()
|
{"/website.py": ["/clean_data.py", "/final_model.py", "/movie_recommend.py"]}
|
25,728
|
dongxiangcat/questionnaire
|
refs/heads/master
|
/utils.py
|
# -*- coding: utf-8 -*-
from django.shortcuts import render
def mp_render(request,template,context = {}):
pass
return render(request,template,context)
|
{"/question/models.py": ["/theme/models.py"], "/theme/views.py": ["/theme/models.py", "/theme/forms.py", "/utils.py"]}
|
25,729
|
dongxiangcat/questionnaire
|
refs/heads/master
|
/theme/forms.py
|
# -*- coding: utf-8 -*-
from django import forms
class AddThemeForm(forms.Form):
title = forms.CharField(max_length=250)
description = forms.CharField(max_length=500)
|
{"/question/models.py": ["/theme/models.py"], "/theme/views.py": ["/theme/models.py", "/theme/forms.py", "/utils.py"]}
|
25,730
|
dongxiangcat/questionnaire
|
refs/heads/master
|
/question/models.py
|
from django.db import models
from theme.models import Theme
from django.utils import timezone
# Create your models here.
class Question(models.Model):
title = models.CharField(max_length=250,null=False,default='')
description = models.CharField(max_length=250,null=False,default='')
theme = models.ForeignKey(Theme)
create_at = models.DateTimeField(auto_now_add=True)
update_at = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.title
class Meta:
get_latest_by = ('id')
ordering = ('id','create_at')
class Answer(models.Model):
content = models.CharField(max_length=250,null=False,default='')
sign = models.CharField(max_length=100,null=False,default='')
select_num = models.IntegerField()
question = models.ForeignKey(Question)
create_at = models.DateTimeField(auto_now_add=True)
update_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('sign',)
|
{"/question/models.py": ["/theme/models.py"], "/theme/views.py": ["/theme/models.py", "/theme/forms.py", "/utils.py"]}
|
25,731
|
dongxiangcat/questionnaire
|
refs/heads/master
|
/theme/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Theme',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(default=b'', max_length=250)),
('description', models.TextField(default=b'', max_length=500)),
('question_num', models.IntegerField(default=0)),
('create_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['-id'],
'get_latest_by': ['id'],
},
),
]
|
{"/question/models.py": ["/theme/models.py"], "/theme/views.py": ["/theme/models.py", "/theme/forms.py", "/utils.py"]}
|
25,732
|
dongxiangcat/questionnaire
|
refs/heads/master
|
/question/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from .models import Question,Answer
from theme.models import Theme
from utils import mp_render
import json
# Create your views here.
def index(request,question_id):
try:
question = Question.objects.get(id=question_id)
answers = Answer.objects.filter(question_id=question_id)
context = {}
context['answers'] = answers
context['question'] = question
except:
return render(request,'404.html')
else:
return mp_render(request,'question.html',context)
def question(request,theme_id,question_num=0):
try:
theme = Theme.objects.get(id=theme_id)
question_num = int(question_num)
if(theme.question_num<=question_num):
return mp_render(request,'finish.html')
else:
questions = Question.objects.filter(theme_id=theme_id)[question_num:question_num+1]
for question in questions:pass
answers = Answer.objects.filter(question_id=question.id)
context = {}
context['answers'] = answers
context['question'] = question
context['theme_id'] = theme_id
context['question_num'] = question_num+1
return mp_render(request,'question.html',context)
except:
return render(request,'404.html')
def addAnswer(request):
result = {}
result['status'] = 1
try:
answer_id = request.REQUEST.get('answer_id')
answer = Answer.objects.get(pk=answer_id)
answer.select_num = answer.select_num + 1
answer.save()
except Exception,e:
result['status'] = 0
result['info'] = 'error'
return HttpResponse(json.dumps(result),content_type="application/json")
else:
result['info'] = 'success'
return HttpResponse(json.dumps(result),content_type="application/json")
|
{"/question/models.py": ["/theme/models.py"], "/theme/views.py": ["/theme/models.py", "/theme/forms.py", "/utils.py"]}
|
25,733
|
dongxiangcat/questionnaire
|
refs/heads/master
|
/theme/views.py
|
# -*- coding: utf-8 -*-
from django.shortcuts import render
from django.http import HttpResponse,HttpResponseRedirect
from .models import Theme
from .forms import AddThemeForm
from utils import mp_render
# Create your views here.
#首页
def index(request):
try:
theme = Theme.objects.latest()
context = {}
context['theme'] = theme
except:
return mp_render(request,'404.html')
else:
#return HttpResponse(utils.a())
return mp_render(request,'index.html',context)
def addTheme(request):
if request.method == 'POST':
form = AddThemeForm(request.POST)
if form.is_valid():
theme = Theme()
theme.title = form.cleaned_data['title']
theme.description = form.cleaned_data['description']
theme.save()
else:
form = AddThemeForm()
return render(request,'background/addTheme.html',{'form':form})
|
{"/question/models.py": ["/theme/models.py"], "/theme/views.py": ["/theme/models.py", "/theme/forms.py", "/utils.py"]}
|
25,734
|
dongxiangcat/questionnaire
|
refs/heads/master
|
/theme/models.py
|
from django.db import models
from django.utils import timezone
# Create your models here.
class Theme(models.Model):
title = models.CharField(max_length=250,null=False,default='')
description = models.TextField(max_length=500,null=False,default='')
question_num = models.IntegerField(null=False,default=0)
create_at = models.DateTimeField(auto_now_add=True)
update_at = models.DateTimeField(auto_now=True)
class Meta:
get_latest_by = 'id'
ordering = ['-id']
def __unicode__(self):
return self.title
|
{"/question/models.py": ["/theme/models.py"], "/theme/views.py": ["/theme/models.py", "/theme/forms.py", "/utils.py"]}
|
25,758
|
gitter-badger/ethically
|
refs/heads/master
|
/ethically/we/__init__.py
|
__all__ = ['GenderBiasWE']
from .bias import GenderBiasWE
|
{"/ethically/we/core.py": ["/ethically/we/utils.py"], "/ethically/tests/test_we.py": ["/ethically/we/__init__.py", "/ethically/we/utils.py"]}
|
25,759
|
gitter-badger/ethically
|
refs/heads/master
|
/ethically/we/utils.py
|
import numpy as np
def normalize(v):
if v.ndim != 1:
raise ValueError('v should be 1-D, {}-D was given'.format(
v.ndim))
norm = np.linalg.norm(v)
if norm == 0:
return v
return v / norm
def cosine_similarity(v, u):
v_norm = np.linalg.norm(v)
u_norm = np.linalg.norm(u)
similarity = v @ u / (v_norm * u_norm)
return similarity
def project_vector(v, u):
"""Projecting the vector v onto direction u."""
normalize_u = normalize(u)
return (v @ normalize_u) * normalize_u
def reject_vector(v, u):
"""Rejecting the vector v onto direction u."""
return v - project_vector(v, u)
def project_reject_vector(v, u):
"""Projecting and rejecting the vector v onto direction u."""
projected_vector = project_vector(v, u)
rejected_vector = v - project_vector(v, u)
return projected_vector, rejected_vector
def update_word_vector(model, word, new_vector):
model.syn0[model.vocab[word].index] = new_vector
if model.syn0norm is not None:
model.syn0norm[model.vocab[word].index] = normalize(new_vector)
def generate_one_word_forms(word):
return [word.lower(), word.upper(), word.title()]
def generate_words_forms(words):
return sum([generate_one_word_forms(word) for word in words], [])
|
{"/ethically/we/core.py": ["/ethically/we/utils.py"], "/ethically/tests/test_we.py": ["/ethically/we/__init__.py", "/ethically/we/utils.py"]}
|
25,760
|
gitter-badger/ethically
|
refs/heads/master
|
/ethically/we/core.py
|
import copy
import os
import warnings
import matplotlib.pylab as plt
import numpy as np
import pandas as pd
import seaborn as sns
from gensim.models.keyedvectors import KeyedVectors
from pkg_resources import resource_filename
from sklearn.decomposition import PCA
from sklearn.svm import LinearSVC
from tqdm import tqdm
from tabulate import tabulate
from ..consts import RANDOM_STATE
from .utils import (
cosine_similarity, normalize, project_reject_vector, project_vector,
reject_vector, update_word_vector,
)
DIRECTION_METHODS = ['single', 'sum', 'pca']
DEBIAS_METHODS = ['neutralize', 'hard', 'soft']
FIRST_PC_THRESHOLD = 0.5
MAX_NON_SPECIFIC_EXAMPLES = 1000
class BiasWordsEmbedding:
"""Audit and Adjust a Bias in English Words Embedding.
:param model: Words embedding model of ``gensim.model.KeyedVectors``
:param bool only_lower: Whether the words embedding contrains
only lower case words
:param bool verbose: Set vebosity
"""
def __init__(self, model, only_lower=False, verbose=False):
if not isinstance(model, KeyedVectors):
raise TypeError('model should be of type KeyedVectors, not {}'
.format(type(model)))
self.model = model
# TODO: write unitest for when it is False
self.only_lower = only_lower
self._verbose = verbose
self.direction = None
self.positive_end = None
self.negative_end = None
def __copy__(self):
bias_words_embedding = self.__class__(self.model)
bias_words_embedding.direction = copy.deepcopy(self.direction)
bias_words_embedding.positive_end = copy.deepcopy(self.positive_end)
bias_words_embedding.negative_end = copy.deepcopy(self.negative_end)
return bias_words_embedding
def __deepcopy__(self, memo):
bias_words_embedding = copy.copy(self)
bias_words_embedding.model = copy.deepcopy(bias_words_embedding.model)
return bias_words_embedding
def __getitem__(self, key):
return self.model[key]
def __contains__(self, item):
return item in self.model
def _filter_words_by_model(self, words):
return [word for word in words if word in self]
def _is_direction_identified(self):
if self.direction is None:
raise RuntimeError('The direction was not identified'
' for this {} instance'
.format(self.__class__.__name__))
# There is a mistake in the article
# it is written (section 5.1):
# "To identify the gender subspace, we took the ten gender pair difference
# vectors and computed its principal components (PCs)"
# however in the source code:
# https://github.com/tolga-b/debiaswe/blob/10277b23e187ee4bd2b6872b507163ef4198686b/debiaswe/we.py#L235-L245
def _identify_subspace_by_pca(self, definitional_pairs, n_components):
matrix = []
for word1, word2 in definitional_pairs:
vector1 = normalize(self[word1])
vector2 = normalize(self[word2])
center = (vector1 + vector2) / 2
matrix.append(vector1 - center)
matrix.append(vector2 - center)
pca = PCA(n_components=n_components)
pca.fit(matrix)
if self._verbose:
table = enumerate(pca.explained_variance_ratio_, start=1)
headers = ['Principal Component',
'Explained Variance Ratio']
print(tabulate(table, headers=headers))
return pca
# TODO: add the SVD method from section 6 step 1
# It seems there is a mistake there, I think it is the same as PCA
# just with repleacing it with SVD
def _identify_direction(self, positive_end, negative_end,
definitional, method='pca'):
if method not in DIRECTION_METHODS:
raise ValueError('method should be one of {}, {} was given'.format(
DIRECTION_METHODS, method))
if positive_end == negative_end:
raise ValueError('positive_end and negative_end'
'should be different, and not the same "{}"'
.format(positive_end))
if self._verbose:
print('Identify direction using {} method...'.format(method))
direction = None
if method == 'single':
direction = normalize(normalize(self[definitional[0]])
- normalize(self[definitional[1]]))
elif method == 'sum':
groups = list(zip(*definitional))
group1_sum_vector = np.sum([self[word]
for word in groups[0]], axis=0)
group2_sum_vector = np.sum([self[word]
for word in groups[1]], axis=0)
diff_vector = (normalize(group1_sum_vector)
- normalize(group2_sum_vector))
direction = normalize(diff_vector)
elif method == 'pca':
pca = self._identify_subspace_by_pca(definitional, 10)
if pca.explained_variance_ratio_[0] < FIRST_PC_THRESHOLD:
raise RuntimeError('The Explained variance'
'of the first principal component should be'
'at least {}, but it is {}'
.format(FIRST_PC_THRESHOLD,
pca.explained_variance_ratio_[0]))
direction = pca.components_[0]
# if direction is oposite (e.g. we cannot control
# what the PCA will return)
ends_diff_projection = cosine_similarity((self[positive_end]
- self[negative_end]),
direction)
if ends_diff_projection < 0:
direction = -direction # pylint: disable=invalid-unary-operand-type
self.direction = direction
self.positive_end = positive_end
self.negative_end = negative_end
def project_on_direction(self, word):
"""Project the normalized vector of the word on the direction.
:param str word: The word tor project
:return float: The projection scalar
"""
self._is_direction_identified()
vector = self[word]
projection_score = self.model.cosine_similarities(self.direction,
[vector])[0]
return projection_score
def _calc_projection_scores(self, words):
self._is_direction_identified()
df = pd.DataFrame({'word': words})
# TODO: maybe using cosine_similarities on all the vectors?
# it might be faster
df['projection'] = df['word'].apply(self.project_on_direction)
df = df.sort_values('projection', ascending=False)
return df
def plot_projection_scores(self, words,
ax=None, axis_projection_step=None):
"""Plot the projection scalar of words on the direction.
:param list words: The words tor project
:return: The ax object of the plot
"""
self._is_direction_identified()
projections_df = self._calc_projection_scores(words)
projections_df['projection'] = projections_df['projection'].round(2)
if ax is None:
_, ax = plt.subplots(1)
if axis_projection_step is None:
axis_projection_step = 0.1
cmap = plt.get_cmap('RdBu')
projections_df['color'] = ((projections_df['projection'] + 0.5)
.apply(cmap))
most_extream_projection = (projections_df['projection']
.abs()
.max()
.round(1))
sns.barplot(x='projection', y='word', data=projections_df,
palette=projections_df['color'])
plt.xticks(np.arange(-most_extream_projection, most_extream_projection,
axis_projection_step))
plt.title('← {} {} {} →'.format(self.negative_end,
' ' * 20,
self.positive_end))
plt.xlabel('Direction Projection')
plt.ylabel('Words')
return ax
def plot_dist_projections_on_direction(self, word_groups, ax=None):
"""Plot the projection scalars distribution on the direction.
:param dict word_groups word: The groups to projects
:return float: The ax object of the plot
"""
if ax is None:
_, ax = plt.subplots(1)
names = sorted(word_groups.keys())
for name in names:
words = word_groups[name]
label = '{} (#{})'.format(name, len(words))
vectors = [self[word] for word in words]
projections = self.model.cosine_similarities(self.direction,
vectors)
sns.distplot(projections, hist=False, label=label, ax=ax)
plt.axvline(0, color='k', linestyle='--')
plt.title('← {} {} {} →'.format(self.negative_end,
' ' * 20,
self.positive_end))
plt.xlabel('Direction Projection')
plt.ylabel('Density')
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
return ax
def calc_direct_bias(self, neutral_words, c=None):
"""Calculate the direct bias.
Based on the projection of neuteral words on the direction.
:param list neutral_words
:param c: Strictness of bias measuring
:type c: float or None
:return: The direct bias
"""
if c is None:
c = 1
projections = self._calc_projection_scores(neutral_words)['projection']
direct_bias_terms = np.abs(projections) ** c
direct_bias = direct_bias_terms.sum() / len(neutral_words)
return direct_bias
def calc_indirect_bias(self, word1, word2):
"""Calculate the indirect bias between two words.
Based on the amount of shared projection of the words on the direction.
Also called PairBias.
:param str word1: First word
:param str word2: Second word
:type c: float or None
:return The indirect bias between the two words
"""
self._is_direction_identified()
vector1 = normalize(self[word1])
vector2 = normalize(self[word2])
perpendicular_vector1 = reject_vector(vector1, self.direction)
perpendicular_vector2 = reject_vector(vector2, self.direction)
inner_product = vector1 @ vector2
perpendicular_similarity = cosine_similarity(perpendicular_vector1,
perpendicular_vector2)
indirect_bias = ((inner_product - perpendicular_similarity)
/ inner_product)
return indirect_bias
def _extract_neutral_words(self, specific_words):
extended_specific_words = set()
# because or specific_full data was trained on partial words embedding
for word in specific_words:
extended_specific_words.add(word)
extended_specific_words.add(word.lower())
extended_specific_words.add(word.upper())
extended_specific_words.add(word.title())
neutral_words = [word for word in self.model.vocab
if word not in extended_specific_words]
return neutral_words
def _neutralize(self, neutral_words):
self._is_direction_identified()
if self._verbose:
neutral_words_iter = tqdm(neutral_words)
else:
neutral_words_iter = iter(neutral_words)
for word in neutral_words_iter:
neutralized_vector = reject_vector(self[word],
self.direction)
update_word_vector(self.model, word, neutralized_vector)
self.model.init_sims(replace=True)
def _equalize(self, equality_sets):
for equality_set_words in equality_sets:
equality_set_vectors = [normalize(self[word])
for word in equality_set_words]
center = np.mean(equality_set_vectors, axis=0)
(projected_center,
rejected_center) = project_reject_vector(center,
self.direction)
for word, vector in zip(equality_set_words, equality_set_vectors):
projected_vector = project_vector(vector, self.direction)
projected_part = normalize(projected_vector - projected_center)
scaling = np.sqrt(1 - np.linalg.norm(rejected_center)**2)
# TODO - in the code it is different - why?
# equalized_vector = rejected_center + scaling * self.direction
# https://github.com/tolga-b/debiaswe/blob/10277b23e187ee4bd2b6872b507163ef4198686b/debiaswe/debias.py#L36-L37
equalized_vector = rejected_center + scaling * projected_part
update_word_vector(self.model, word, equalized_vector)
self.model.init_sims(replace=True)
def debias(self, method='hard', neutral_words=None, equality_sets=None,
inplace=True):
"""Debias the words embedding.
:param str method: The method of debiasing.
:param list neutral_words: List of neutral words
for the neutralize step
:param list equality_sets: List of equality sets
for the equalize step
:param bool inplace: Whether to debias the object inplace
or return a new one
.. warning::
After calling `debias`,
all the vectors of the words embedding
will be normalized to unit length.
"""
# pylint: disable=W0212
if inplace:
bias_words_embedding = self
else:
bias_words_embedding = copy.deepcopy(self)
if method not in DEBIAS_METHODS:
raise ValueError('method should be one of {}, {} was given'.format(
DEBIAS_METHODS, method))
if method in ['hard', 'neutralize']:
if self._verbose:
print('Neutralize...')
bias_words_embedding._neutralize(neutral_words)
if method == 'hard':
if self._verbose:
print('Equalize...')
bias_words_embedding._equalize(equality_sets)
if inplace:
return None
else:
return bias_words_embedding
def evaluate_words_embedding(self):
"""Print a words embedding evaluation based on standard protocols.
1. Word pairs task
2. Analogies task
"""
with warnings.catch_warnings():
warnings.simplefilter('ignore', category=FutureWarning)
if self._verbose:
print('Evaluate word pairs...')
word_pairs_path = resource_filename(__name__,
os.path.join('data',
'evaluation',
'wordsim353.tsv'))
word_paris_result = self.model.evaluate_word_pairs(word_pairs_path)
if self._verbose:
print('Evaluate analogies...')
analogies_path = resource_filename(__name__,
os.path.join('data',
'evaluation',
'questions-words.txt')) # pylint: disable=C0301
analogies_result = self.model.evaluate_word_analogies(analogies_path) # pylint: disable=C0301
if self._verbose:
print()
print('From Gensim')
print()
print('-' * 30)
print()
print('Word Pairs Result - WordSimilarity-353:')
print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
print('Pearson correlation coefficient:', word_paris_result[0])
print('Spearman rank-order correlation coefficient'
'between the similarities from the dataset'
'and the similarities produced by the model itself:',
word_paris_result[1])
print('Ratio of pairs with unknown words:', word_paris_result[2])
print()
print('-' * 30)
print()
print('Analogies Result')
print('~~~~~~~~~~~~~~~~')
print('Overall evaluation score:', analogies_result[0])
def learn_full_specific_words(self, seed_specific_words,
max_non_specific_examples=None, debug=None):
"""Learn specific words given a list of seed specific wordsself.
Using Linear SVM.
:param list seed_specific_words: List of seed specific words
:param int max_non_specific_examples: The number of non-specifc words
to sample for training
:return: List of learned specific words and the classifier object
"""
if debug is None:
debug = False
if max_non_specific_examples is None:
max_non_specific_examples = MAX_NON_SPECIFIC_EXAMPLES
data = []
non_specific_example_count = 0
for word in self.model.vocab:
is_specific = word in seed_specific_words
if not is_specific:
non_specific_example_count += 1
if non_specific_example_count <= max_non_specific_examples:
data.append((self[word], is_specific))
else:
data.append((self[word], is_specific))
np.random.seed(RANDOM_STATE)
np.random.shuffle(data)
X, y = zip(*data)
X = np.array(X)
X /= np.linalg.norm(X, axis=1)[:, None]
y = np.array(y).astype('int')
clf = LinearSVC(C=1, class_weight='balanced',
random_state=RANDOM_STATE)
clf.fit(X, y)
full_specific_words = []
for word in self.model.vocab:
vector = [normalize(self[word])]
if clf.predict(vector):
full_specific_words.append(word)
if not debug:
return full_specific_words, clf
return full_specific_words, clf, X, y
|
{"/ethically/we/core.py": ["/ethically/we/utils.py"], "/ethically/tests/test_we.py": ["/ethically/we/__init__.py", "/ethically/we/utils.py"]}
|
25,761
|
gitter-badger/ethically
|
refs/heads/master
|
/ethically/tests/test_we.py
|
"Unit test module for ethically.we.core "
# pylint: disable=redefined-outer-name,unused-variable,expression-not-assigned,singleton-comparison,protected-access
import copy
import os
from math import isclose
import numpy as np
import pytest
from gensim.models.keyedvectors import KeyedVectors
from pkg_resources import resource_filename
from ethically.we import GenderBiasWE
from ethically.we.utils import project_reject_vector, project_vector
from ..consts import RANDOM_STATE
ATOL = 1e-6
N_RANDOM_NEUTRAL_WORDS_DEBIAS_TO_TEST = 1000
@pytest.fixture
def gender_biased_we():
# pylint: disable=C0301
model = KeyedVectors.load_word2vec_format(
resource_filename(__name__, os.path.join('data',
'GoogleNews-vectors-negative300-bolukbasi.bin')),
binary=True)
return GenderBiasWE(model, only_lower=True, verbose=True)
def test_words_embbeding_loading(gender_biased_we):
assert len(gender_biased_we.model.vocab) == 26423
def test_contains(gender_biased_we):
assert 'home' in gender_biased_we
assert 'HOME' not in gender_biased_we
def test_calc_direct_bias(gender_biased_we):
"""
Test calc_direct_bias method in GenderBiasWE
Based on section 5.2
"""
# TODO: it seemse that in the article it was checked on
# all the professions names including gender specific ones
# (e.g. businesswomen)
assert isclose(gender_biased_we.calc_direct_bias(), 0.07, abs_tol=1e-2)
assert isclose(gender_biased_we.calc_direct_bias(gender_biased_we
._data['profession_names']), # pylint: disable=C0301
0.08, abs_tol=1e-2)
# TODO: iterate over a dictionary
def test_calc_indirect_bias(gender_biased_we, all_zero=False):
"""
Test calc_direct_bias method in GenderBiasWE
Based on figure 3 & section 3.5
"""
assert isclose(gender_biased_we.calc_indirect_bias('softball',
'pitcher'),
0 if all_zero else -0.01, abs_tol=1e-2)
assert isclose(gender_biased_we.calc_indirect_bias('softball',
'bookkeeper'),
0 if all_zero else 0.20, abs_tol=1e-2)
assert isclose(gender_biased_we.calc_indirect_bias('softball',
'receptionist'),
0 if all_zero else 0.67, abs_tol=1e-2)
# these words have legit gender direction projection
if not all_zero:
assert isclose(gender_biased_we.calc_indirect_bias('softball',
'registered_nurse'),
0 if all_zero else 0.29, abs_tol=1e-2)
# TODO: in the article it is 0.35 - why?
assert isclose(gender_biased_we.calc_indirect_bias('softball',
'waitress'),
0 if all_zero else 0.31, abs_tol=1e-2)
assert isclose(gender_biased_we.calc_indirect_bias('softball',
'homemaker'),
0 if all_zero else 0.38, abs_tol=1e-2)
assert isclose(gender_biased_we.calc_indirect_bias('football',
'footballer'),
0 if all_zero else 0.02, abs_tol=1e-2)
# this word have legit gender direction projection
if not all_zero:
# TODO in the article it is 0.31 - why?
assert isclose(gender_biased_we.calc_indirect_bias('football',
'businessman'),
0 if all_zero else 0.17, abs_tol=1e-2)
assert isclose(gender_biased_we.calc_indirect_bias('football',
'pundit'),
0 if all_zero else 0.10, abs_tol=1e-2)
assert isclose(gender_biased_we.calc_indirect_bias('football',
'maestro'),
0 if all_zero else 0.41, abs_tol=1e-2)
assert isclose(gender_biased_we.calc_indirect_bias('football',
'cleric'),
0 if all_zero else 0.02, abs_tol=1e-2)
def check_all_vectors_unit_length(bias_we):
for word in bias_we.model.vocab:
vector = bias_we[word]
norm = (vector ** 2).sum()
np.testing.assert_allclose(norm, 1, atol=ATOL)
def test_neutralize(gender_biased_we, is_preforming=True):
"""
Test _neutralize method in GenderBiasWE
"""
neutral_words = gender_biased_we._data['neutral_words']
if is_preforming:
gender_biased_we._neutralize(neutral_words)
direction_projections = [project_vector(gender_biased_we[word],
gender_biased_we.direction)
for word in neutral_words]
np.testing.assert_allclose(direction_projections, 0, atol=ATOL)
np.testing.assert_allclose(gender_biased_we.calc_direct_bias(), 0,
atol=ATOL)
check_all_vectors_unit_length(gender_biased_we)
test_calc_indirect_bias(gender_biased_we, all_zero=True)
def test_equalize(gender_biased_we, is_preforming=True):
"""
Test _equalize method in GenderBiasWE
"""
equality_sets = gender_biased_we._data['definitional_pairs']
if is_preforming:
gender_biased_we._equalize(equality_sets)
for equality_set in equality_sets:
projection_vectors = []
rejection_vectors = []
for equality_word in equality_set:
vector = gender_biased_we[equality_word]
np.testing.assert_allclose(np.linalg.norm(vector), 1, atol=ATOL)
# pylint: disable=C0301
(projection_vector,
rejection_vector) = project_reject_vector(vector,
gender_biased_we.direction)
projection_vectors.append(projection_vector)
rejection_vectors.append(rejection_vector)
for rejection_vector in rejection_vectors[1:]:
np.testing.assert_allclose(rejection_vectors[0],
rejection_vector,
atol=ATOL)
check_all_vectors_unit_length(gender_biased_we)
def test_hard_debias_inplace(gender_biased_we, is_preforming=True):
"""
Test hard_debias method in GenderBiasWE
"""
# pylint: disable=C0301
if is_preforming:
test_calc_direct_bias(gender_biased_we)
gender_biased_we.debias(method='hard')
test_neutralize(gender_biased_we, is_preforming=False)
test_equalize(gender_biased_we, is_preforming=False)
equality_sets = gender_biased_we._data['definitional_pairs']
np.random.seed(RANDOM_STATE)
neutral_words = np.random.choice(gender_biased_we._data['neutral_words'],
N_RANDOM_NEUTRAL_WORDS_DEBIAS_TO_TEST,
replace=False)
for neutral_word in neutral_words:
for equality_word1, equality_word2 in equality_sets:
we1 = gender_biased_we[neutral_word] @ gender_biased_we[equality_word1]
we2 = gender_biased_we[neutral_word] @ gender_biased_we[equality_word2]
np.testing.assert_allclose(we1, we2, atol=ATOL)
we1_distance = np.linalg.norm(gender_biased_we[neutral_word]
- gender_biased_we[equality_word1])
we2_distance = np.linalg.norm(gender_biased_we[neutral_word]
- gender_biased_we[equality_word2])
np.testing.assert_allclose(we1_distance, we2_distance, atol=ATOL)
def test_hard_debias_not_inplace(gender_biased_we):
test_calc_direct_bias(gender_biased_we)
gender_debiased_we = gender_biased_we.debias(method='hard',
inplace=False)
test_calc_direct_bias(gender_biased_we)
test_hard_debias_inplace(gender_debiased_we, is_preforming=False)
def test_copy(gender_biased_we):
gender_biased_we_copy = copy.copy(gender_biased_we)
assert gender_biased_we.direction is not gender_biased_we_copy.direction
assert gender_biased_we.model is gender_biased_we_copy.model
def test_deepcopy(gender_biased_we):
gender_biased_we_copy = copy.deepcopy(gender_biased_we)
assert gender_biased_we.direction is not gender_biased_we_copy.direction
assert gender_biased_we.model is not gender_biased_we_copy.model
# TODO deeper testing
def test_evaluate_words_embedding(gender_biased_we):
gender_biased_we.evaluate_words_embedding()
# TODO deeper testing, not sure that the number is true
def test_learn_full_specific_words(gender_biased_we):
(full_specific_words,
clf, X, y) = gender_biased_we.learn_full_specific_words(debug=True)
assert len(full_specific_words) == 5753
|
{"/ethically/we/core.py": ["/ethically/we/utils.py"], "/ethically/tests/test_we.py": ["/ethically/we/__init__.py", "/ethically/we/utils.py"]}
|
25,763
|
DM8tyProgrammer/ml-movie-trailer
|
refs/heads/master
|
/util.py
|
"""
This modules contains the utilities code.
"""
import unicodecsv
def read_csv(file):
""" loads the csv as list of dictionary from the specfied file """
with open(file, "rb") as f:
return list(unicodecsv.DictReader(f))
|
{"/movie_center.py": ["/util.py", "/movie.py"]}
|
25,764
|
DM8tyProgrammer/ml-movie-trailer
|
refs/heads/master
|
/movie_center.py
|
"""
This module integrates all the pieces together to wield the website
"""
import util
from fresh_tomatoes import open_movies_page
from movie import Movie
def main():
""" load the movies meta-data from the csv and \
objectify them and showcase the site """
# load the movies
raw_movies = util.read_csv('movie.csv')
# make movies objects
movies = []
for raw_movie in raw_movies:
movie_genres = [] if 'genres' not in raw_movie \
else raw_movie['genres'].split('|')
movies.append(Movie(raw_movie['title'],
raw_movie['poster'],
raw_movie['trailer'],
movie_genres))
# open the webpage with movies
open_movies_page(movies)
main()
|
{"/movie_center.py": ["/util.py", "/movie.py"]}
|
25,765
|
DM8tyProgrammer/ml-movie-trailer
|
refs/heads/master
|
/movie.py
|
"""
This module holds the Movie class which represents meta-data of a movie.
"""
import re
class Movie():
"""Represents meta data of a Moive"""
def __init__(self, title, poster_image_url, trailer_youtube_url, genres):
self.title = title
self.poster_image_url = poster_image_url
self.trailer_youtube_url = trailer_youtube_url
self.genres = genres
def get_youtube_id(self):
""" Extract the youtube ID from the url """
id_match = re.search(r'(?<=v=)[^&#]+', self.trailer_youtube_url)
id_match = id_match or re.search(
r'(?<=be/)[^&#]+',
self.trailer_youtube_url)
trailer_youtube_id = (id_match.group(0) if id_match
else None)
return trailer_youtube_id
|
{"/movie_center.py": ["/util.py", "/movie.py"]}
|
25,791
|
Rutemberg/AUTOSEI-GUI
|
refs/heads/main
|
/util/log.py
|
import logging
from datetime import datetime
from locale import LC_ALL, setlocale
setlocale(LC_ALL, 'pt_BR.utf-8')
def log(nome_do_arquivo, mensagem, tipo, titulo=False, console=True, modo="a"):
logger = logging.getLogger(f"{mensagem}")
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
if console == False:
logger.removeHandler(ch)
fl = logging.FileHandler(
filename=f"{nome_do_arquivo}.log", encoding='utf-8', mode=modo)
fl.setLevel(logging.DEBUG)
if titulo == True:
formatter = logging.Formatter(fmt='%(name)s\n')
else:
data = datetime.now()
hora = data.strftime('%H')
minuto = data.strftime('%M')
segundo = data.strftime('%S')
formatter = logging.Formatter(
fmt='%(asctime)s - %(levelname)s - %(message)s', datefmt=f'%d/%m/%Y {hora}:{minuto}:{segundo}')
if console == True:
ch.setFormatter(formatter)
logger.addHandler(ch)
fl.setFormatter(formatter)
logger.addHandler(fl)
if tipo == "info":
logger.info(f"{mensagem}")
elif tipo == "warn":
logger.warning(f"{mensagem}")
logger.removeHandler(fl)
logger.removeHandler(ch)
|
{"/modules/conteudo.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/modules/prova.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/util/funcoes.py": ["/util/log.py"], "/modules/conteudoMDB.py": ["/classes/ConteudoMDB.py"], "/main.py": ["/modules/conteudo.py", "/modules/conteudoMDB.py", "/util/funcoes.py"]}
|
25,792
|
Rutemberg/AUTOSEI-GUI
|
refs/heads/main
|
/modules/conteudo.py
|
# importando a classe Inserir_Conteudo
from classes.Conteudo import Inserir_Conteudo
from util.funcoes import * # funcao de criacao de menu e pasta
from util.log import log # funcao de criacao de logs.
from locale import LC_ALL, setlocale
import os
import sys
import eel
from datetime import datetime, timedelta
# funcao de insercao dos conteudos das disciplinas
def iniciar_insercao(disciplinas, configuracoes, titulosemana, opcao):
agora = datetime.now()
hora_inicial = agora.strftime("%d/%m/%Y %H:%M:%S")
total_videos = 0
total_d_inseridas = 0
d_faltam = 0
setlocale(LC_ALL, 'pt_BR.utf-8')
# Se a opcao do menu for == 2 aramazenara o codigo em uma variavel
# if opcao == 2:
# key = None
# while key == None:
# disciplina_codigo = int(input("Digite o código do conteúdo: "))
# # Pega a chave de um elemento da lista se o codigo digitado for encontrado
# key = next((index for (index, d) in enumerate(disciplinas)
# if d["codigo_conteudo"] == disciplina_codigo), None)
# # Substitui a lista de disciplinas pela disciplina encontrada com a chave
# if key != None:
# log("app",
# f"Disciplina: {disciplinas[key]['professor']} - {disciplinas[key]['nome_disciplina']} encontrada", "info")
# disciplinas = [disciplinas[key]]
# else:
# log("app",
# f"Código de conteudo {disciplina_codigo} não encontrado", "warn")
titulo_semana = titulosemana
pasta_atual = os.getcwd() # Pega a localizacao da pasta atual
if "S" in opcao:
agora = datetime.now()
hora_inicial = agora.strftime("%d/%m/%Y %H:%M:%S")
# Funcao criar_pasta(nome, path) para criar pasta com o nome e a localizacao atual
pasta_log = criar_pasta("logs", pasta_atual)
pasta_semanas = criar_pasta(titulo_semana, pasta_log)
arquivo = f"{pasta_semanas}\{titulo_semana}"
Disciplinas_que_faltam = f"{pasta_semanas}\Disciplinas_que_faltam"
arquivo_videos_sem_titulos = f"{pasta_semanas}\Videos_sem_temas"
# Iniciar caso a resposta seja sim
Processar = Inserir_Conteudo()
Processar.abrir(configuracoes["site"]) # Abre o site
# Loga com usuario e senha
Processar.logar(configuracoes["usuario"], configuracoes["senha"])
for disciplina in disciplinas: # For para percorrer as disciplinas
# Abre um arquivo de log e insere o titulo da disciplina
log(arquivo,
f"\n\n{disciplina['professor']} - {disciplina['nome_disciplina']}", "info", True)
videos = True if len(disciplina["videos"]) > 0 else False
eel.timeline(disciplina['professor'], disciplina['nome_disciplina'], videos, disciplina['codigo_conteudo'])
# Se existir videos para serem lançados prossiga com a inserção
if len(disciplina["videos"]) > 0:
# Pesquisa a disciplina pela url e codigo
Processar.pesquisar_conteudo(
configuracoes["url_conteudo"], disciplina["codigo_conteudo"])
# Aguarda o carregamento do sistema
Processar.aguardar_processo()
# Seleciona a disciplina listada
Processar.selecionar_disciplina()
# Verifica se a semana em questão existe
semana_existe = Processar.verificar_conteudo(
titulosemana)
# Se a semana nao existir prossiga com a inserção
if semana_existe == 0:
eel.logsemana(False, disciplina['codigo_conteudo'])
cont_titulo_video = 0
# Insere a semana
Processar.inserir_semana(titulosemana)
Processar.aguardar_processo()
for video in disciplina["videos"]:
cont_titulo_video += 1
numero_video = f"{cont_titulo_video:02d}"
if video["titulo"] != '':
titulo = video["titulo"]
else:
titulo = f'VIDEO {numero_video}'
log(arquivo_videos_sem_titulos,
f"*{disciplina['professor']}* - {disciplina['nome_disciplina']} - {titulo} sem tema", "warn", True, False)
log(arquivo,
f"{disciplina['professor']} - {disciplina['nome_disciplina']} - {titulo} sem tema", "warn")
# Adiciona o video
Processar.adicionar_video()
Processar.aguardar_processo()
# insere o video com titulo e frame
Processar.inserir_video(titulo, video["frame"])
Processar.aguardar_processo()
# Cria ou abre o log informando
log(arquivo, f"Video: {titulo} inserido", "info")
eel.logvideos(titulo, disciplina['codigo_conteudo'])
# Se a semana já existir
total_videos+=1
total_d_inseridas+=1
else:
log(arquivo, f"{titulo_semana} já inserida !", "info")
eel.logsemana(True, disciplina['codigo_conteudo'])
# Se não existir videos para serem lançados
else:
# Cria ou abre o log informando em dois arquivos
log(Disciplinas_que_faltam,
f"*{disciplina['professor']}* - {disciplina['nome_disciplina']}", "warn", True, False)
log(arquivo, "Videos indisponiveis !", "warn")
d_faltam+=1
else:
# sys.exit()
pass
agora = datetime.now()
hora_final = agora.strftime("%d/%m/%Y %H:%M:%S")
diferenca = tempo(hora_inicial, hora_final)
print("\n")
print("-"*50)
log("app",f"\nTempo de execução - {diferenca}", "info", True)
print("-"*50)
diferenca = str(diferenca)
eel.relatoriofinal(diferenca, len(disciplinas), total_videos, total_d_inseridas, d_faltam, hora_final)
|
{"/modules/conteudo.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/modules/prova.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/util/funcoes.py": ["/util/log.py"], "/modules/conteudoMDB.py": ["/classes/ConteudoMDB.py"], "/main.py": ["/modules/conteudo.py", "/modules/conteudoMDB.py", "/util/funcoes.py"]}
|
25,793
|
Rutemberg/AUTOSEI-GUI
|
refs/heads/main
|
/modules/prova.py
|
from classes.Conteudo import Inserir_Prova
from util.funcoes import *
from config.prova import configuracao_prova
import os
from util.log import log
from locale import LC_ALL, setlocale
from datetime import datetime, timedelta
def iniciar_insercao_prova(disciplinas, configuracoes, iniciar, configuracao_prova=configuracao_prova):
setlocale(LC_ALL, 'pt_BR.utf-8')
agora = datetime.now()
hora_inicial = agora.strftime("%d/%m/%Y %H:%M:%S")
titulo_semana = configuracao_prova["titulo_semana"]
pasta_atual = os.getcwd() # Pega a localizacao da pasta atual
pasta_log = criar_pasta("logs", pasta_atual)
pasta_semanas = criar_pasta(titulo_semana, pasta_log)
arquivo = f"{pasta_semanas}\{titulo_semana}"
if "S" in iniciar:
Prova = Inserir_Prova()
Prova.abrir(configuracoes["site"])
Prova.logar(configuracoes["usuario"], configuracoes["senha"])
for disciplina in disciplinas:
log(arquivo,f"\n\n{disciplina['professor']} - {disciplina['nome_disciplina']}", "info", True)
Prova.pesquisar_conteudo(configuracoes["url_conteudo"], disciplina['codigo_conteudo'])
Prova.aguardar_processo()
Prova.selecionar_disciplina()
semana_existe = Prova.verificar_conteudo(configuracao_prova["titulo_semana"])
if semana_existe == 0:
Prova.inserir_semana(configuracao_prova["titulo_semana"])
Prova.aguardar_processo()
# Prova.selecionar_assunto("85")
# Prova.salvar()
# Prova.aguardar_processo()
Prova.adicionar_prova()
Prova.aguardar_processo()
Prova.inserir_informaçoes(configuracao_prova["titulo_da_prova"], configuracao_prova["conteudo_apresentacao"])
Prova.aguardar_processo()
Prova.editar_conteudo()
Prova.adicionar_avaliacao_online()
Prova.aguardar_processo()
Prova.adicionar_valor_por_id("formAddRecursoEducacional:tituloRE", configuracao_prova["titulo_da_prova"])
# Tipo Geração
# Política de Seleção de Questão
Prova.selecionar_opcao("formAddRecursoEducacional:politicaSelecaoQuestao", "QUESTOES_TODOS_ASSUNTOS_CONTEUDO")
Prova.aguardar_processo()
# Regra de Distribuição de Questão
Prova.selecionar_opcao("formAddRecursoEducacional:regraDistruicaoQuestaoAvaliacaoOnline", "QUANTIDADE_DISTRUIBUIDA_ENTRE_ASSUNTOS")
Prova.aguardar_processo()
# Parâmetros de Monitoramento das Avaliações
Prova.selecionar_opcao_por_path("/html/body/div[1]/div[2]/table/tbody/tr/td/table/tbody/tr[17]/td/div/div[2]/div[4]/div/form/div[2]/div[1]/table/tbody/tr[4]/td[2]/select", "1")
Prova.aguardar_processo()
# Acertos Considerar Aprovado
Prova.adicionar_valor_por_path('//*[@id="formAddRecursoEducacional:panelAvaliacaoOnline"]/table/tbody/tr[5]/td[2]/input', configuracao_prova["acertos"])
# Tempo Limite Realização Avaliação On-line (Em Minutos)
Prova.adicionar_valor_por_id("formAddRecursoEducacional:tempoLimiteRealizacaoAvaliacaoOnline", configuracao_prova["tempo"])
# Regra Definição Período Liberação Avaliação On-line
Prova.selecionar_opcao("formAddRecursoEducacional:regraDefinicaoPeriodoAvaliacaoOnline", "CALENDARIO_LANCAMENTO_NOTA")
Prova.aguardar_processo()
# Nº Dias Entre Avaliação On-line (Dias)
# Nº Vezes Pode Repetir Avaliação On-line
# Variável Nota Padrão Avaliação On-line
Prova.selecionar_opcao("formAddRecursoEducacional:variavelNotaPadraoAvaliacaoOnline", "SR")
# Permitir Repetições De Questões A Partir Da Segunda Avaliação On-line Do Aluno
Prova.click_opcao_id("formAddRecursoEducacional:permiteRepeticoesDeQuestoesAPartirSegundaAvaliacaoOnlineAluno")
# Apresentar a Nota da Questão Na Visão do Aluno
# Permite Aluno Avançar Conteúdo Sem o REA esta Realizado.
Prova.click_opcao_id("formAddRecursoEducacional:permiteAlunoAvancarConteudoSemLancarNotaReaAvaliacaoOnline")
# Limitar o Tempo da Prova Dentro do Período de Realização
# Apresentar Gabarito da Prova do Aluno Após a Data do Período de Realização
# Descrição / Orientação
Prova.inserir_descricao(configuracao_prova["descricao"])
# Randômico Por Complexidade Da Questão
# Qtde. Questões facil
Prova.questoes(configuracao_prova["dificuldade"], configuracao_prova["qnt_de_questoes"], configuracao_prova["valor_por_questao"])
Prova.aguardar_processo()
# Qtde. Questões Medianas
Prova.questoes(configuracao_prova["dificuldade2"], configuracao_prova["qnt_de_questoes"], configuracao_prova["valor_por_questao"])
Prova.aguardar_processo()
#Simular prova e obter resposta
#Clica em simular avaliacao
Prova.xpath_click_script("/html/body/div[1]/div[2]/table/tbody/tr/td/table/tbody/tr[17]/td/div/div[2]/div[4]/div/form/table[2]/tbody/tr[3]/td/table/tbody/tr/td/input[2]")
Prova.aguardar_processo()
#Clica na segunda tela de simular avaliacao
Prova.xpath_click_script("/html/body/div[6]/div[4]/div/form/div/input")
Prova.aguardar_processo()
#Verificar as questoes
questoes = Prova.verificar_questoes()
log(arquivo, f"{questoes['texto']}", "info")
if questoes["tipo"] == "info":
Prova.salvar_prova()
log(arquivo, f"Prova inserida com sucesso!", "info")
else:
log(arquivo, f"Prova não inserida!", "warn")
else:
log(arquivo, f"{titulo_semana} já inserida !", "info")
agora = datetime.now()
hora_final = agora.strftime("%d/%m/%Y %H:%M:%S")
diferenca = tempo(hora_inicial, hora_final)
print("\n")
print("-"*50)
log(arquivo,f"\nTempo de execução - {diferenca}", "info", True)
log("app",f"\nTempo de execução - {diferenca}", "info", True, False)
print("-"*50)
|
{"/modules/conteudo.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/modules/prova.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/util/funcoes.py": ["/util/log.py"], "/modules/conteudoMDB.py": ["/classes/ConteudoMDB.py"], "/main.py": ["/modules/conteudo.py", "/modules/conteudoMDB.py", "/util/funcoes.py"]}
|
25,794
|
Rutemberg/AUTOSEI-GUI
|
refs/heads/main
|
/util/funcoes.py
|
import os
import pathlib
from util.log import log
from datetime import datetime
import json
def menu(*args):
os.system("cls")
print(f"\nAUTOSEI\n\n")
for key, item in enumerate(args):
print(f"{key+1}. {item}")
print(f"\n{len(args)+1}. Sair\n\n\n")
return len(args)+1
def criar_pasta(nome_da_pasta, pasta_atual):
caminho_pasta = os.path.join(pasta_atual, nome_da_pasta)
if pathlib.Path(caminho_pasta).is_dir():
log("app", f"A pasta {nome_da_pasta} já existe", "info")
return caminho_pasta
else:
try:
os.mkdir(caminho_pasta)
log("app", f"Pasta {nome_da_pasta} criada", "info")
return caminho_pasta
except OSError as error:
print(error)
def tempo(hora_inicial, hora_final):
hora_inicial= datetime.strptime(hora_inicial, "%d/%m/%Y %H:%M:%S")
hora_final = datetime.strptime(hora_final, "%d/%m/%Y %H:%M:%S")
dif = hora_final - hora_inicial
return dif
def abrir_arquivo_json(caminho):
with open(caminho, encoding="utf8") as file:
dados = file.read()
dados = json.loads(dados)
return dados
def listar_disciplinas(disciplinas):
os.system("cls")
print("\nDISCIPLINAS\n\n")
for disciplina in disciplinas:
print(
f"{disciplina['professor']} - {disciplina['nome_disciplina']}\n")
print("-"*50)
print(f"\nTotal de disciplinas encontradas: {len(disciplinas)}\n\n")
confirmar = input("Tecle enter para continuar...")
|
{"/modules/conteudo.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/modules/prova.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/util/funcoes.py": ["/util/log.py"], "/modules/conteudoMDB.py": ["/classes/ConteudoMDB.py"], "/main.py": ["/modules/conteudo.py", "/modules/conteudoMDB.py", "/util/funcoes.py"]}
|
25,795
|
Rutemberg/AUTOSEI-GUI
|
refs/heads/main
|
/teste/main.py
|
import pymongo
class ConteudoMDB:
def __init__(self, banco):
self.client = pymongo.MongoClient(
"mongodb+srv://novousuario:ggwaE2eZPpPAGAEx@cluster0.2if2x.mongodb.net")
self.banco = self.client[f"{banco}"]
self.table = None
def tabela(self, tabela):
table = self.banco[f"{tabela}"]
self.table = table
def insert(self, document):
try:
q = self.table.insert_one(document)
print("Registrado com sucesso")
return True
except pymongo.errors.DuplicateKeyError:
print(f"Já existe um registro")
return False
def find_all(self):
query = self.table
count = query.estimated_document_count()
if count > 0:
result = query.find()
return result
else:
print("Sem registros")
return 0
# disciplina = {
# "_id": 768,
# "codigo_disciplina": 447,
# "nome_disciplina": "Direito Processual Civil III",
# "professor": "Thiago",
# "codigo_conteudo": 768
# }
# conteudo_mdb = ConteudoMDB("AutoSEI")
# conteudo_mdb.tabela("Disciplinas")
# # conteudo_mdb.insert(disciplina)
# conteudo_mdb.find_all()
|
{"/modules/conteudo.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/modules/prova.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/util/funcoes.py": ["/util/log.py"], "/modules/conteudoMDB.py": ["/classes/ConteudoMDB.py"], "/main.py": ["/modules/conteudo.py", "/modules/conteudoMDB.py", "/util/funcoes.py"]}
|
25,796
|
Rutemberg/AUTOSEI-GUI
|
refs/heads/main
|
/modules/conteudoMDB.py
|
from classes.ConteudoMDB import ConteudoMDB
import eel
def criar_db(nome):
criar = ConteudoMDB(nome)
criar.tabela("tabela de teste")
result = criar.insert({"criado": True})
return result
def listar_dbs():
listar = ConteudoMDB()
result = listar.list_dbs()
return result
def inserir(banco, document, tabela, muitos=False):
inserirD = ConteudoMDB(banco)
inserirD.tabela(tabela)
result = inserirD.insert(document, muitos)
if result == True:
print("Inserido com sucesso!")
else:
print("Registro já existe!")
return result
def listartudo(banco, tabela):
listarD = ConteudoMDB(banco)
listarD.tabela(tabela)
result = listarD.find_all()
disciplinas = []
if result == 0:
return disciplinas
print("Nenhuma disciplina encontrada")
else:
for r in result:
disciplinas.append(r)
# print(disciplinas)
return disciplinas
def remover(banco, tabela, documento):
removerD = ConteudoMDB(banco)
removerD.tabela(tabela)
result = removerD.remove(documento)
return result
def listar_tbs(banco, nome):
listar = ConteudoMDB(banco)
result = listar.list_tables(nome)
return result
def criar_semana_insercao(banco, grupo_disciplinas, semana_a_ser_inserida):
criar = ConteudoMDB(banco)
criar.tabela(grupo_disciplinas)
result = criar.montar_videos_da_semana(semana_a_ser_inserida)
return result
|
{"/modules/conteudo.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/modules/prova.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/util/funcoes.py": ["/util/log.py"], "/modules/conteudoMDB.py": ["/classes/ConteudoMDB.py"], "/main.py": ["/modules/conteudo.py", "/modules/conteudoMDB.py", "/util/funcoes.py"]}
|
25,797
|
Rutemberg/AUTOSEI-GUI
|
refs/heads/main
|
/main.py
|
import eel
import json
from modules.conteudo import iniciar_insercao
from modules.conteudoMDB import *
# from modules.prova import iniciar_insercao_prova
from util.funcoes import *
import os
# APLICAÇÂO
eel.init('HTMLS', allowed_extensions=['.js', '.html'])
@eel.expose
def inserir_documento(banco, document, tabela, muitos=False):
return inserir(banco, document, tabela, muitos)
@eel.expose
def listar_documentos(banco, tabela="Nenhuma"):
return listartudo(banco, tabela)
@eel.expose
def listar_bancos_de_dados():
return listar_dbs()
@eel.expose
def remover_documentos(banco, tabela, document):
return remover(banco, tabela, document)
@eel.expose
def listar_tabelas(banco, nome):
return listar_tbs(banco, nome)
@eel.expose
def criar_banco(nome):
return criar_db(nome)
@eel.expose
def carregar_disciplinas_para_insercao(banco, grupo_disciplinas, semana_a_ser_inserida):
return criar_semana_insercao(banco, grupo_disciplinas, semana_a_ser_inserida)
@eel.expose
def carregar_disciplinas():
return disciplinas
@eel.expose
def carregar_configuracoes():
return configuracoes
@eel.expose
def insercao(disciplinas, configuracoes, titulosemana, opcao):
iniciar_insercao(disciplinas, configuracoes, titulosemana, opcao)
eel.start('index.html')
# INSERÇÂO DE DISCIPLINAS NO BANCO
# path = os.getcwd() + "/"
# semana = path + "config/henrique.json"
# configuracoes = path + "config/config.json"
# disciplinas = abrir_arquivo_json(semana)
# configuracao = abrir_arquivo_json(configuracoes)
# inserir("Henrique", disciplinas, "Disciplinas 2021.2", True)
# iniciar_insercao_prova(disciplinas, configuracao , "Sim")
|
{"/modules/conteudo.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/modules/prova.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/util/funcoes.py": ["/util/log.py"], "/modules/conteudoMDB.py": ["/classes/ConteudoMDB.py"], "/main.py": ["/modules/conteudo.py", "/modules/conteudoMDB.py", "/util/funcoes.py"]}
|
25,798
|
Rutemberg/AUTOSEI-GUI
|
refs/heads/main
|
/classes/ConteudoMDB.py
|
import pymongo
class ConteudoMDB:
def __init__(self, banco="Default"):
self.client = pymongo.MongoClient(
f"mongodb://novousuario:ggwaE2eZPpPAGAEx@cluster0-shard-00-00.2if2x.mongodb.net:27017,cluster0-shard-00-01.2if2x.mongodb.net:27017,cluster0-shard-00-02.2if2x.mongodb.net:27017/{banco}?ssl=true&replicaSet=atlas-sara4b-shard-0&authSource=admin&retryWrites=true&w=majority")
self.banco = self.client[f"{banco}"]
self.table = None
# Retorna um array
def list_tables(self, nome=""):
filter = {"name": {"$regex": f".*{nome}", "$options": 'i'}}
lista = self.banco.list_collection_names(filter=filter)
return [l.replace("_", " ") for l in lista]
def list_dbs(self):
lista = self.client.list_database_names()
lista = [l for l in lista if not("admin" in l) and not("local" in l)]
return lista
def tabela(self, tabela):
tabela = tabela.replace(" ", "_")
table = self.banco[f"{tabela}"]
self.table = table
# def deletar_tabela_criada(self):
# self.table.drop()
def insert(self, document, muitos=False):
try:
if muitos == False:
q = self.table.insert_one(document)
else:
q = self.table.insert_many(document)
# print("Registrado com sucesso")
return True
except pymongo.errors.DuplicateKeyError:
# print("Já existe um registro")
return False
except pymongo.errors.BulkWriteError:
# print("Já existe um registro")
return False
def find_all(self):
query = self.table
count = query.estimated_document_count()
if count > 0:
result = query.find({"$query": {}, "$orderby": {"_id": 1}})
return result
else:
# print("Sem registros")
return 0
def find_in_week(self, semana, query):
tabela = self.banco[f"{semana}"]
count = tabela.estimated_document_count()
if count > 0:
result = tabela.find(query)
return result
else:
# print("Sem registros")
return 0
def remove(self, document):
q = self.table.delete_one(document)
result = q.deleted_count
return result
def montar_videos_da_semana(self, semana):
semana = semana.replace(" ", "_")
disciplinas = []
resultD = self.find_all()
if resultD != 0:
for r in resultD:
disciplina = {"codigo_disciplina": r["codigo_disciplina"],
"nome_disciplina": r["nome_disciplina"],
"professor": r["professor"],
"codigo_conteudo": r["codigo_conteudo"],
"videos": []}
resultV = self.find_in_week(
semana, {"codigo_conteudo": r["codigo_conteudo"]})
if resultV != 0:
for v in resultV:
frames = {
"titulo": v["titulo"],
"frame": v["frame"]
}
disciplina["videos"].append(frames)
disciplinas.append(disciplina)
return disciplinas
disciplinas = [
{
"_id": 6,
"codigo_disciplina": 94,
"nome_disciplina": "Ciência Política",
"professor": "Joao",
"codigo_conteudo": 6
},
{
"_id": 7,
"codigo_disciplina": 94,
"nome_disciplina": "Ciência Política II",
"professor": "Paula",
"codigo_conteudo": 7
},
{
"_id": 8,
"codigo_disciplina": 1286,
"nome_disciplina": "Filosofia Geral e Jurídica",
"professor": "Mauricio",
"codigo_conteudo": 8
}
]
# nome = "Rutemberg"
# conteudo_mdb = ConteudoMDB()
# conteudo_mdb.tabela("bancocriado")
# conteudo_mdb.insert({"nome": nome})
# print(conteudo_mdb.list_dbs())
# conteudo_mdb.deletar_tabela_criada()
# conteudo_mdb.tabela("DISCIPLINAS TESTE")
# print(conteudo_mdb.montar_videos_da_semana("SEMANA TESTE"))
# conteudo_mdb.insert(disciplinas, True)
# frame = {"frame": "123"}
# print(conteudo_mdb.remove(frame))
# conteudo_mdb.insert(videos, True)
# conteudo_mdb.find_all()
# lista = conteudo_mdb.list_tables("disci")
# print(lista)
# print(conteudo_mdb.find_one(1))
|
{"/modules/conteudo.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/modules/prova.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/util/funcoes.py": ["/util/log.py"], "/modules/conteudoMDB.py": ["/classes/ConteudoMDB.py"], "/main.py": ["/modules/conteudo.py", "/modules/conteudoMDB.py", "/util/funcoes.py"]}
|
25,799
|
Rutemberg/AUTOSEI-GUI
|
refs/heads/main
|
/classes/Conteudo.py
|
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common import exceptions
import sys
class Inserir_Conteudo:
# Inicia o driver caso a resposta seja sim ou s
def __init__(self):
self.options = Options()
self.options.add_experimental_option(
'excludeSwitches', ['enable-logging'])
# self.options.add_argument("headless")
self.driver = Chrome(options=self.options)
# abre o site pela url
def abrir(self, url):
self.driver.get(url)
# Aguardar ate que a mensagem de loading suma
def aguardar_processo(self):
wait = WebDriverWait(self.driver, 300)
# Aguarda até que o elemento(loading do sistema) suma da tela
wait.until(EC.invisibility_of_element((By.CLASS_NAME, 'rf-st-start')))
# Loga no site usando o usuario e senha
def logar(self, usuario, senha):
# procura os elementos
usuario_campo = self.driver.find_element_by_id("formLogin:username")
senha_campo = self.driver.find_element_by_id("formLogin:senha")
# Insere os valores nos campos
usuario_campo.send_keys(usuario)
senha_campo.send_keys(senha)
# Procura o botao e clica
logar = self.driver.find_element_by_id("formLogin:btLogin")
logar.click()
# Pesquisa o conteudo
def pesquisar_conteudo(self, url_conteudo, codigo_conteudo):
self.driver.get(url_conteudo) # navegue ate a url
# Usando o select encontra o elemento select do html
consultar_por = Select(self.driver.find_element_by_id("form:consulta"))
# Seleciona o valor
consultar_por.select_by_value("codigoConteudo")
self.aguardar_processo()
# Consulta pelo codigo do conteudo
valor_consulta = self.driver.find_element_by_id("form:valorConsulta")
valor_consulta.clear() # limpa o campo
valor_consulta.send_keys(codigo_conteudo) # Escreve o codigo
# Clica no botao consultar
consultar = self.driver.find_element_by_id("form:consultar")
consultar.click()
# Seleciona e clica na disciplina encontrada, somente a primeira e unica encontrada
def selecionar_disciplina(self):
click_disciplina = self.driver.find_element_by_id(
"form:items:0:descricao")
click_disciplina.click()
# Verifica pelo texto se o conteudo existe
def verificar_conteudo(self, elem):
conteudo_existe = self.driver.find_elements_by_link_text(f"{elem}")
return len(conteudo_existe)
# Insere o titulo da semana
def inserir_semana(self, semana):
titulo_unidade = self.driver.find_element_by_id(
"form:unidadeConteudoTitulo") # Pega o input pelo id
titulo_unidade.send_keys(semana) # Digita o valor
# adicionar = self.driver.find_element_by_xpath(
# "//*[@id='form:j_idt595']") # pega o botao
adicionar = self.driver.find_element_by_xpath(
"/html/body/div[1]/div[2]/table/tbody/tr/td/table/tbody/tr[29]/td/form/table/tbody/tr[2]/td/div/div[3]/div/table/tbody/tr[2]/td/input") # pega o botao
adicionar.click() # clica para salvar
# Adiciona pegando a ultima semana inserida
def adicionar_video(self):
video = self.driver.find_elements_by_xpath(
"//*[@title='Adicionar Página']")[-1].get_attribute("id")
self.driver.execute_script(
f'document.getElementById("{video}").click()')
# Insere os videos por titulo e frame
def inserir_video(self, titulo, frame):
# Script para inserir o valor em um documento por innerHTML
self.driver.execute_script('document.getElementsByClassName("cke_wysiwyg_frame cke_reset")[0].contentDocument.body.innerHTML = ' + f'`<iframe src="https://player.vimeo.com/video/{frame}" width="640" height="360" frameborder="0" allow="autoplay; fullscreen; picture-in-picture" allowfullscreen></iframe>`')
# self.driver.execute_script(
# 'document.getElementsByClassName("cke_wysiwyg_frame cke_reset")[0].contentDocument.body.innerHTML = ' + f'`<div style="padding:75% 0 0 0;position:relative;"><iframe src="https://player.vimeo.com/video/{frame}?badge=0&autopause=0&player_id=0&app_id=58479" frameborder="0" allow="autoplay; fullscreen;picture-in-picture" allowfullscreen style="position:absolute;top:0;left:0;width:100%;height:100%;"></iframe></div><script src="https://player.vimeo.com/api/player.js"></script>`')
# Escreve o titulo no input encontrado
titulo_video = self.driver.find_element_by_id("formNovaPagina:titulo")
titulo_video.send_keys(titulo)
self.driver.execute_script(
'document.getElementById("formNovaPagina:salvar").click()')
class Inserir_Prova(Inserir_Conteudo):
def __init__(self):
super().__init__()
def adicionar_prova(self):
self.driver.find_elements_by_xpath(
"//*[@title='Adicionar Página']")[-1].click()
def inserir_informaçoes(self, titulo, informacao):
# Script para inserir o valor em um documento por innerHTML
self.driver.execute_script(
'document.getElementsByClassName("cke_wysiwyg_frame cke_reset")[0].contentDocument.body.innerHTML = ' + f"`{informacao}`")
# Escreve o titulo no input encontrado
titulo_prova = self.driver.find_element_by_id("formNovaPagina:titulo")
titulo_prova.send_keys(titulo)
self.driver.execute_script(
'document.getElementById("formNovaPagina:salvar").click()')
def editar_conteudo(self):
self.driver.find_elements_by_xpath(
"//*[@title='Editar']")[-1].click()
def adicionar_avaliacao_online(self):
self.driver.find_element_by_id("form:addPosterior").click()
id_elemento = self.driver.find_elements_by_xpath(
"//*[@title='Avalição Online']")[-1].get_attribute("id")
self.driver.execute_script(
f'document.getElementById("{id_elemento}").click()')
def adicionar_valor_por_id(self, id_elemento, valor):
input_valor = self.driver.find_element_by_id(id_elemento)
input_valor.send_keys(valor)
def adicionar_valor_por_path(self, path, valor):
input_valor = self.driver.find_element_by_xpath(path)
input_valor.send_keys(valor)
def selecionar_opcao(self, id_elemento, valor):
selecionar_por = Select(self.driver.find_element_by_id(id_elemento))
selecionar_por.select_by_value(valor)
def selecionar_assunto(self, valor):
assunto = self.driver.find_elements_by_tag_name("select")[-2].get_attribute("name")
selecionar_por = Select(self.driver.find_element_by_name(assunto))
selecionar_por.select_by_value(valor)
def selecionar_opcao_por_nome(self, nome_elemento, valor):
selecionar_por = Select(
self.driver.find_element_by_name(nome_elemento))
selecionar_por.select_by_value(valor)
def selecionar_opcao_por_path(self, path_elemento, valor):
selecionar_por = Select(
self.driver.find_element_by_xpath(path_elemento))
selecionar_por.select_by_value(valor)
def click_opcao_id(self, id_elemento):
selecionar = self.driver.find_element_by_id(id_elemento)
selecionar.click()
def inserir_descricao(self, descricao):
self.driver.execute_script(
f'document.getElementById("formAddRecursoEducacional:textoAvaliacaoOnline").innerHTML = `{descricao}`')
def limpar_campo(self, id_elemento):
limpar = self.driver.find_element_by_id(id_elemento)
limpar.clear()
def questoes(self, nivel, qtd, valor):
if nivel == "facil":
dificuldade = "formAddRecursoEducacional:quantidadeNivelQuestaoFacil"
valor_por_q = "formAddRecursoEducacional:notaPorQuestaoNivelFacil"
elif nivel == "medio":
dificuldade = "formAddRecursoEducacional:quantidadeNivelQuestaoMedio"
valor_por_q = "formAddRecursoEducacional:notaPorQuestaoNivelMedio"
elif nivel == "dificil":
id_elemento = self.driver.find_element_by_xpath(
"/html/body/div[1]/div[2]/table/tbody/tr/td/table/tbody/tr[17]/td/div/div[2]/div[4]/div/form/div[2]/div[1]/div/div[2]/table[1]/tbody/tr[1]/td[2]/span/input").get_attribute("id")
dificuldade = id_elemento
valor_por_q = "formAddRecursoEducacional:notaPorQuestaoNivelDificil"
self.driver.execute_script(f'''document.getElementById("{dificuldade}").value = {qtd};
document.getElementById("{valor_por_q}").value = "{valor}";
document.getElementById("{dificuldade}").onchange();''')
def xpath_click_script(self, path):
elemetoid = self.driver.find_element_by_xpath(
f"{path}").get_attribute("id")
self.driver.execute_script(
f'document.getElementById("{elemetoid}").click()')
def verificar_questoes(self):
try:
questoes = self.driver.find_element_by_xpath(
"/html/body/div[6]/div[4]/div/form/div/div/table/tbody/tr[1]/td/div/span").text
numero_questoes = self.driver.find_element_by_xpath(
"/html/body/div[6]/div[4]/div/form/div/div/table/tbody/tr[2]/td/div/span").text
questoes = questoes + "\n" + "Geradas: " + numero_questoes[-11:]
self.xpath_click_script("/html/body/div[6]/div[3]/input")
self.aguardar_processo()
self.xpath_click_script("/html/body/div[6]/div[3]/input")
self.aguardar_processo()
return {"texto": questoes, "tipo": "info"}
except exceptions.NoSuchElementException:
questoes = self.driver.find_element_by_xpath(
"/html/body/div[6]/div[4]/div/form/table[2]/tbody/tr/td/table/tbody/tr/td/table/tbody/tr/td").text
self.xpath_click_script("/html/body/div[6]/div[3]/input")
self.aguardar_processo()
return {"texto": questoes, "tipo": "warn"}
def salvar_prova(self):
id_elemento = self.driver.find_element_by_xpath(
"/html/body/div[1]/div[2]/table/tbody/tr/td/table/tbody/tr[17]/td/div/div[2]/div[4]/div/form/table[2]/tbody/tr[3]/td/table/tbody/tr/td/input[1]").get_attribute("id")
self.driver.execute_script(
f'document.getElementById("{id_elemento}").click()')
def salvar(self):
id_elemento = self.driver.find_element_by_xpath(
"/html/body/div[1]/div[2]/table/tbody/tr/td/table/tbody/tr[29]/td/form/table/tbody/tr[3]/td/table/tbody/tr[2]/td/table/tbody/tr/td/input[2]").get_attribute("id")
self.driver.execute_script(
f'document.getElementById("{id_elemento}").click()')
|
{"/modules/conteudo.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/modules/prova.py": ["/classes/Conteudo.py", "/util/funcoes.py", "/util/log.py"], "/util/funcoes.py": ["/util/log.py"], "/modules/conteudoMDB.py": ["/classes/ConteudoMDB.py"], "/main.py": ["/modules/conteudo.py", "/modules/conteudoMDB.py", "/util/funcoes.py"]}
|
25,801
|
siliu3/c-SewpPocket
|
refs/heads/master
|
/layers/use_case_layer/actors/__init__.py
|
from .someone_actor import SomeoneActor
from .contributor_actor import ContributorActor
from .consumer_actor import ConsumerActor
from .user_actor import UserActor
from .regulator_actor import RegulatorActor
|
{"/layers/use_case_layer/actors/__init__.py": ["/layers/use_case_layer/actors/someone_actor.py", "/layers/use_case_layer/actors/contributor_actor.py", "/layers/use_case_layer/actors/consumer_actor.py", "/layers/use_case_layer/actors/user_actor.py", "/layers/use_case_layer/actors/regulator_actor.py"], "/layers/domain_layer/repositories/repository.py": ["/layers/infrastructure_layer/db/orm_db.py"], "/layers/domain_layer/repositories/user_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/devhelper/gendata/__init__.py": ["/layers/infrastructure_layer/context.py"], "/layers/use_case_layer/actors/user_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py"], "/devhelper/gendata/gen_account.py": ["/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/contributor_post_deal_req_comments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/post_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/layers/ui_layer/rest/resources/consumer_request_commments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/account_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/contributor_post_request_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/errors/token_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/layers/use_case_layer/actors/regulator_actor.py": ["/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/error/error_type.py", "/layers/infrastructure_layer/context.py"], "/libs/cutoms/ex_reqparse.py": ["/layers/infrastructure_layer/error/error_type.py"], "/dev.py": ["/app.py", "/constants/enums.py"], "/devhelper/gendata/gen_regulator.py": ["/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/contributor_posts_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/use_case_layer/systems/auth_system.py": ["/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/posts_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/devhelper/command.py": ["/layers/infrastructure_layer/db/orm_db.py", "/layers/infrastructure_layer/configger/__init__.py"], "/layers/ui_layer/rest/resources/comment_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/comment_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/layers/ui_layer/rest/resources/token_resource.py": ["/layers/use_case_layer/actors/__init__.py", "/app.py", "/constants/enums.py"], "/layers/infrastructure_layer/configger/__init__.py": ["/constants/enums.py"], "/layers/use_case_layer/actors/consumer_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py", "/layers/infrastructure_layer/error/error_type.py"], "/layers/ui_layer/rest/resources/comments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/infrastructure_layer/logger.py": ["/constants/dirs.py"], "/layers/domain_layer/repositories/token_repository.py": ["/layers/domain_layer/token_aggregate/__init__.py"], "/layers/infrastructure_layer/db/orm_db.py": ["/layers/infrastructure_layer/configger/__init__.py"], "/layers/infrastructure_layer/db/mapping/mapping.py": ["/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/token_aggregate/__init__.py"], "/layers/domain_layer/repositories/errors/player_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/test.py": ["/app.py", "/constants/enums.py"], "/layers/use_case_layer/actors/contributor_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py"], "/layers/ui_layer/rest/resources/user_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/app.py": ["/constants/enums.py", "/layers/infrastructure_layer/configger/__init__.py", "/layers/infrastructure_layer/logger.py", "/layers/infrastructure_layer/db/orm_db.py", "/layers/ui_layer/rest/urls.py", "/layers/infrastructure_layer/error/error_mapping.py", "/devhelper/command.py", "/libs/cutoms/converter.py"], "/layers/domain_layer/repositories/errors/account_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/layers/ui_layer/rest/resources/post_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/consumer_requests_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/infrastructure_layer/configger/config.py": ["/constants/dirs.py"], "/layers/use_case_layer/actors/someone_actor.py": ["/layers/domain_layer/token_aggregate/__init__.py", "/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py", "/layers/infrastructure_layer/error/error_type.py"]}
|
25,802
|
siliu3/c-SewpPocket
|
refs/heads/master
|
/layers/domain_layer/repositories/repository.py
|
from layers.infrastructure_layer.db.orm_db import OrmDb
from libs.cutoms.singleton import Singleton
class Repository(object):
__metaclass__ = Singleton
def __init__(self):
self.session = OrmDb().get_session()
|
{"/layers/use_case_layer/actors/__init__.py": ["/layers/use_case_layer/actors/someone_actor.py", "/layers/use_case_layer/actors/contributor_actor.py", "/layers/use_case_layer/actors/consumer_actor.py", "/layers/use_case_layer/actors/user_actor.py", "/layers/use_case_layer/actors/regulator_actor.py"], "/layers/domain_layer/repositories/repository.py": ["/layers/infrastructure_layer/db/orm_db.py"], "/layers/domain_layer/repositories/user_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/devhelper/gendata/__init__.py": ["/layers/infrastructure_layer/context.py"], "/layers/use_case_layer/actors/user_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py"], "/devhelper/gendata/gen_account.py": ["/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/contributor_post_deal_req_comments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/post_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/layers/ui_layer/rest/resources/consumer_request_commments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/account_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/contributor_post_request_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/errors/token_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/layers/use_case_layer/actors/regulator_actor.py": ["/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/error/error_type.py", "/layers/infrastructure_layer/context.py"], "/libs/cutoms/ex_reqparse.py": ["/layers/infrastructure_layer/error/error_type.py"], "/dev.py": ["/app.py", "/constants/enums.py"], "/devhelper/gendata/gen_regulator.py": ["/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/contributor_posts_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/use_case_layer/systems/auth_system.py": ["/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/posts_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/devhelper/command.py": ["/layers/infrastructure_layer/db/orm_db.py", "/layers/infrastructure_layer/configger/__init__.py"], "/layers/ui_layer/rest/resources/comment_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/comment_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/layers/ui_layer/rest/resources/token_resource.py": ["/layers/use_case_layer/actors/__init__.py", "/app.py", "/constants/enums.py"], "/layers/infrastructure_layer/configger/__init__.py": ["/constants/enums.py"], "/layers/use_case_layer/actors/consumer_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py", "/layers/infrastructure_layer/error/error_type.py"], "/layers/ui_layer/rest/resources/comments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/infrastructure_layer/logger.py": ["/constants/dirs.py"], "/layers/domain_layer/repositories/token_repository.py": ["/layers/domain_layer/token_aggregate/__init__.py"], "/layers/infrastructure_layer/db/orm_db.py": ["/layers/infrastructure_layer/configger/__init__.py"], "/layers/infrastructure_layer/db/mapping/mapping.py": ["/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/token_aggregate/__init__.py"], "/layers/domain_layer/repositories/errors/player_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/test.py": ["/app.py", "/constants/enums.py"], "/layers/use_case_layer/actors/contributor_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py"], "/layers/ui_layer/rest/resources/user_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/app.py": ["/constants/enums.py", "/layers/infrastructure_layer/configger/__init__.py", "/layers/infrastructure_layer/logger.py", "/layers/infrastructure_layer/db/orm_db.py", "/layers/ui_layer/rest/urls.py", "/layers/infrastructure_layer/error/error_mapping.py", "/devhelper/command.py", "/libs/cutoms/converter.py"], "/layers/domain_layer/repositories/errors/account_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/layers/ui_layer/rest/resources/post_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/consumer_requests_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/infrastructure_layer/configger/config.py": ["/constants/dirs.py"], "/layers/use_case_layer/actors/someone_actor.py": ["/layers/domain_layer/token_aggregate/__init__.py", "/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py", "/layers/infrastructure_layer/error/error_type.py"]}
|
25,803
|
siliu3/c-SewpPocket
|
refs/heads/master
|
/layers/domain_layer/repositories/user_repository.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
user_repository.py
Created by BigYuki on 15/11/10.
"""
from repository import Repository
from layers.domain_layer.user_aggregate import User,Contributor,Consumer
class UserRepository(Repository):
def add(self, user):
self.session.add(user)
self.session.flush()
def save(self, user):
self.session.flush()
def get(self, user_id):
return self.session.query(User).get(user_id)
def get_contributor(self,user_id):
return self.session.query(Contributor).get(user_id)
def get_consumer(self,user_id):
return self.session.query(Consumer).get(user_id)
def get_user(self,user_id):
return self.session.query(User).get(user_id)
|
{"/layers/use_case_layer/actors/__init__.py": ["/layers/use_case_layer/actors/someone_actor.py", "/layers/use_case_layer/actors/contributor_actor.py", "/layers/use_case_layer/actors/consumer_actor.py", "/layers/use_case_layer/actors/user_actor.py", "/layers/use_case_layer/actors/regulator_actor.py"], "/layers/domain_layer/repositories/repository.py": ["/layers/infrastructure_layer/db/orm_db.py"], "/layers/domain_layer/repositories/user_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/devhelper/gendata/__init__.py": ["/layers/infrastructure_layer/context.py"], "/layers/use_case_layer/actors/user_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py"], "/devhelper/gendata/gen_account.py": ["/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/contributor_post_deal_req_comments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/post_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/layers/ui_layer/rest/resources/consumer_request_commments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/account_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/contributor_post_request_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/errors/token_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/layers/use_case_layer/actors/regulator_actor.py": ["/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/error/error_type.py", "/layers/infrastructure_layer/context.py"], "/libs/cutoms/ex_reqparse.py": ["/layers/infrastructure_layer/error/error_type.py"], "/dev.py": ["/app.py", "/constants/enums.py"], "/devhelper/gendata/gen_regulator.py": ["/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/contributor_posts_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/use_case_layer/systems/auth_system.py": ["/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/posts_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/devhelper/command.py": ["/layers/infrastructure_layer/db/orm_db.py", "/layers/infrastructure_layer/configger/__init__.py"], "/layers/ui_layer/rest/resources/comment_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/comment_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/layers/ui_layer/rest/resources/token_resource.py": ["/layers/use_case_layer/actors/__init__.py", "/app.py", "/constants/enums.py"], "/layers/infrastructure_layer/configger/__init__.py": ["/constants/enums.py"], "/layers/use_case_layer/actors/consumer_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py", "/layers/infrastructure_layer/error/error_type.py"], "/layers/ui_layer/rest/resources/comments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/infrastructure_layer/logger.py": ["/constants/dirs.py"], "/layers/domain_layer/repositories/token_repository.py": ["/layers/domain_layer/token_aggregate/__init__.py"], "/layers/infrastructure_layer/db/orm_db.py": ["/layers/infrastructure_layer/configger/__init__.py"], "/layers/infrastructure_layer/db/mapping/mapping.py": ["/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/token_aggregate/__init__.py"], "/layers/domain_layer/repositories/errors/player_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/test.py": ["/app.py", "/constants/enums.py"], "/layers/use_case_layer/actors/contributor_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py"], "/layers/ui_layer/rest/resources/user_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/app.py": ["/constants/enums.py", "/layers/infrastructure_layer/configger/__init__.py", "/layers/infrastructure_layer/logger.py", "/layers/infrastructure_layer/db/orm_db.py", "/layers/ui_layer/rest/urls.py", "/layers/infrastructure_layer/error/error_mapping.py", "/devhelper/command.py", "/libs/cutoms/converter.py"], "/layers/domain_layer/repositories/errors/account_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/layers/ui_layer/rest/resources/post_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/consumer_requests_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/infrastructure_layer/configger/config.py": ["/constants/dirs.py"], "/layers/use_case_layer/actors/someone_actor.py": ["/layers/domain_layer/token_aggregate/__init__.py", "/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py", "/layers/infrastructure_layer/error/error_type.py"]}
|
25,804
|
siliu3/c-SewpPocket
|
refs/heads/master
|
/devhelper/gendata/__init__.py
|
from gen_account import Generate_Account
from gen_regulator import Generate_Regulator
from layers.infrastructure_layer.context import Transaction_
@Transaction_
def Generate_Data():
regulator_id = Generate_Regulator()
account_username = Generate_Account(regulator_id)
|
{"/layers/use_case_layer/actors/__init__.py": ["/layers/use_case_layer/actors/someone_actor.py", "/layers/use_case_layer/actors/contributor_actor.py", "/layers/use_case_layer/actors/consumer_actor.py", "/layers/use_case_layer/actors/user_actor.py", "/layers/use_case_layer/actors/regulator_actor.py"], "/layers/domain_layer/repositories/repository.py": ["/layers/infrastructure_layer/db/orm_db.py"], "/layers/domain_layer/repositories/user_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/devhelper/gendata/__init__.py": ["/layers/infrastructure_layer/context.py"], "/layers/use_case_layer/actors/user_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py"], "/devhelper/gendata/gen_account.py": ["/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/contributor_post_deal_req_comments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/post_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/layers/ui_layer/rest/resources/consumer_request_commments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/account_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/contributor_post_request_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/errors/token_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/layers/use_case_layer/actors/regulator_actor.py": ["/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/error/error_type.py", "/layers/infrastructure_layer/context.py"], "/libs/cutoms/ex_reqparse.py": ["/layers/infrastructure_layer/error/error_type.py"], "/dev.py": ["/app.py", "/constants/enums.py"], "/devhelper/gendata/gen_regulator.py": ["/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/contributor_posts_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/use_case_layer/systems/auth_system.py": ["/layers/domain_layer/repositories/__init__.py"], "/layers/ui_layer/rest/resources/posts_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/devhelper/command.py": ["/layers/infrastructure_layer/db/orm_db.py", "/layers/infrastructure_layer/configger/__init__.py"], "/layers/ui_layer/rest/resources/comment_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/domain_layer/repositories/comment_repository.py": ["/layers/domain_layer/user_aggregate/__init__.py"], "/layers/ui_layer/rest/resources/token_resource.py": ["/layers/use_case_layer/actors/__init__.py", "/app.py", "/constants/enums.py"], "/layers/infrastructure_layer/configger/__init__.py": ["/constants/enums.py"], "/layers/use_case_layer/actors/consumer_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py", "/layers/infrastructure_layer/error/error_type.py"], "/layers/ui_layer/rest/resources/comments_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/infrastructure_layer/logger.py": ["/constants/dirs.py"], "/layers/domain_layer/repositories/token_repository.py": ["/layers/domain_layer/token_aggregate/__init__.py"], "/layers/infrastructure_layer/db/orm_db.py": ["/layers/infrastructure_layer/configger/__init__.py"], "/layers/infrastructure_layer/db/mapping/mapping.py": ["/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/token_aggregate/__init__.py"], "/layers/domain_layer/repositories/errors/player_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/test.py": ["/app.py", "/constants/enums.py"], "/layers/use_case_layer/actors/contributor_actor.py": ["/layers/use_case_layer/systems/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py"], "/layers/ui_layer/rest/resources/user_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/app.py": ["/constants/enums.py", "/layers/infrastructure_layer/configger/__init__.py", "/layers/infrastructure_layer/logger.py", "/layers/infrastructure_layer/db/orm_db.py", "/layers/ui_layer/rest/urls.py", "/layers/infrastructure_layer/error/error_mapping.py", "/devhelper/command.py", "/libs/cutoms/converter.py"], "/layers/domain_layer/repositories/errors/account_errors.py": ["/layers/infrastructure_layer/error/error_type.py"], "/layers/ui_layer/rest/resources/post_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/ui_layer/rest/resources/consumer_requests_resource.py": ["/layers/use_case_layer/actors/__init__.py"], "/layers/infrastructure_layer/configger/config.py": ["/constants/dirs.py"], "/layers/use_case_layer/actors/someone_actor.py": ["/layers/domain_layer/token_aggregate/__init__.py", "/layers/domain_layer/user_aggregate/__init__.py", "/layers/domain_layer/repositories/__init__.py", "/layers/infrastructure_layer/context.py", "/layers/infrastructure_layer/error/error_type.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.