index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
47,236 | emojipeach/djangur | refs/heads/master | /users/models.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from django.contrib.auth.models import AbstractUser
from django.core import validators
from django.db import models
from django.utils.translation import ugettext_lazy as _
class CustomUser(AbstractUser):
username = models.CharField(_('username'), max_length=30, unique=True,
help_text=_('Required. 30 characters or fewer. Letters, numbers and ./-/_ characters'),
validators=[
validators.RegexValidator(re.compile('^[\w.-]+$'), _('Enter a valid username.'), _('invalid'))
])
pgp_key = models.TextField(_("PGP public key"), blank=True)
def __str__(self):
return self.username
| {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,237 | emojipeach/djangur | refs/heads/master | /pmessaging/urls.py | # -*- coding: utf-8 -*-
""" Contains all url patterns for imageapp app."""
from __future__ import unicode_literals
from django.urls import path
from . import views
app_name = 'imageapp'
urlpatterns = [
# Home page
path(
'', views.index,
name='index'
),
# Upload page
path(
'upload/',
views.upload,
name='upload'
),
# View an image
path(
'image/<str:identifier>/',
views.image,
name='image'
),
# Delete an image
path(
'delete_image/<str:identifier>/<str:deletion_password>/',
views.delete_image,
name='delete_image'
),
# Moderator delete an image
path(
'mod_delete_image/<str:identifier>/<str:deletion_password>/',
views.mod_delete_image,
name='mod_delete_image'
),
# Moderator reset image reporting
path(
'mod_image_acceptable/<str:identifier>/<str:deletion_password>/',
views.mod_image_acceptable,
name='mod_image_acceptable'
),
# Report an image
path(
'report_image/<str:identifier>/',
views.report_image,
name='report_image'
),
# Moderator queue
path(
'mod_queue/',
views.mod_queue,
name='mod_queue'
),
# User profile
path(
'profile/<str:username>/',
views.profile,
name='profile'
),
] | {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,238 | emojipeach/djangur | refs/heads/master | /imageapp/models.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import os
import requests
from datetime import datetime
from datetime import timedelta
from hashlib import sha256
from io import BytesIO
from PIL import ExifTags
from PIL import Image
from time import localtime
from time import strftime
from time import time
from urllib.parse import urlparse
from django.contrib.auth import get_user_model
from django.core.files.base import ContentFile
from django.db import models
from imageapp.settings import ALLOWED_IMAGE_FORMATS
from imageapp.settings import EXPIRY_CHOICES
from imageapp.settings import IMAGE_QUALITY_VAL
from imageapp.settings import THUMB_SIZE
logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s')
User = get_user_model()
def image_path(instance, filename):
""" Provides a path and unique filename."""
new_folder = str(sha256(str(datetime.fromtimestamp(
instance.uploaded_time).strftime("%d%m%Y").encode('utf-8')).encode('utf-8')).hexdigest())[2:8]
new_filename = instance.identifier
ext = filename.split('.')[-1].lower()
if filename.split('.')[0] == 'thumbnail':
return '{0}/{1}_thumb.{2}'.format(new_folder, new_filename, ext)
else:
return '{0}/{1}.{2}'.format(new_folder, new_filename, ext)
class ImageUpload(models.Model):
""" The main class which stores data about uploaded images."""
identifier = models.CharField(max_length=32, primary_key=True)
uploaded_time = models.FloatField()
title = models.CharField(max_length=50, blank=True)
image_file = models.ImageField(upload_to=image_path, blank=True)
thumbnail = models.ImageField(upload_to=image_path, blank=True, editable=False)
expiry_choice = models.IntegerField(choices=EXPIRY_CHOICES)
expiry_time = models.FloatField()
private = models.BooleanField()
reported = models.IntegerField(default=0)
reported_first_time = models.FloatField(default=0)
img_url = models.CharField(max_length=255, blank=True)
owner = models.ForeignKey(User, on_delete=models.CASCADE, blank=True, null=True)
def save(self, *args, **kwargs):
if not self.process_main_image():
raise Exception('Not a valid image file')
if not self.make_thumbnail():
raise Exception('Problem making thumbnail')
super(ImageUpload, self).save(*args, **kwargs)
@staticmethod
def image_is_animated_gif(image, image_format):
""" Checks whether an image is an animated gif by trying to seek beyond the initial frame. """
if image_format != 'GIF':
return False
try:
image.seek(1)
except EOFError:
return False
else:
return True
@staticmethod
def file_type_check(file_type):
""" Ensures only allowed files uploaded."""
if file_type not in ALLOWED_IMAGE_FORMATS:
raise ValueError('File type not allowed!')
@staticmethod
def reorientate_image(image):
""" Respects orientation tags in exif data while disregarding and erasing the rest."""
if hasattr(image, '_getexif'): # only present in JPEGs
for orientation in ExifTags.TAGS.keys():
if ExifTags.TAGS[orientation] == 'Orientation':
break
exif_data = image._getexif() # returns None if no EXIF data
if exif_data is not None:
exif = dict(exif_data.items())
orientation = exif[orientation]
if orientation == 2:
image = image.transpose(Image.FLIP_LEFT_RIGHT)
elif orientation == 3:
image = image.transpose(Image.ROTATE_180)
elif orientation == 4:
image = image.transpose(Image.FLIP_TOP_BOTTOM)
elif orientation == 5:
image = image.transpose(Image.FLIP_LEFT_RIGHT, Image.ROTATE_90)
elif orientation == 6:
image = image.transpose(Image.ROTATE_270)
elif orientation == 7:
image = image.transpose(Image.FLIP_TOP_BOTTOM, Image.ROTATE_90)
elif orientation == 8:
image = image.transpose(Image.ROTATE_90)
else:
pass
return image
def filename(self):
""" Returns just the image filename saved in the instance."""
return os.path.basename(self.image_file.name)
def upload_success_password(self):
""" Gives a password used to show the upload success page which includes a deletion link."""
return sha256(str(self.uploaded_time).encode('utf-8')).hexdigest()[0:6]
def deletion_password(self):
""" Provides a password used to confirm the user should be able to delete the image."""
return sha256(str(self.uploaded_time).encode('utf-8')).hexdigest()[6:12]
def formatted_filesize(self):
""" Returns a formatted string for use in templates from the image.size attribute provided in bytes."""
size_bytes = self.image_file.size
if size_bytes > 1048576:
result = "Filesize: " + "{0:.2f}".format(size_bytes / 1048576) + " MB"
elif size_bytes > 1024:
result = "Filesize: " + "{0:.0f}".format(size_bytes / 1024) + " KB"
else:
result = "Filesize: " + '{:,}'.format(size_bytes) + " Bytes"
return result
def formatted_uploaded_time(self):
""" Provides a formatted timestamp for template use."""
result = "Uploaded at " + strftime('%b. %d, %Y, %-I:%M %p', localtime(self.uploaded_time))
return result
def get_expiry_time(self):
""" Provided the exact expiry time of an instance."""
uploaded = datetime.fromtimestamp(self.uploaded_time)
expiry = uploaded + timedelta(days=self.expiry_choice)
result = expiry.timestamp()
return result
def formatted_expiry_delta(self):
""" Provides a formatted expiry time delta used in templates."""
ex_time = self.expiry_time
up_time = self.uploaded_time
if ex_time < up_time:
return 'Never expires'
now = time()
tdelt = ex_time - now
days, remainder = divmod(tdelt, 86400)
hours, remainder = divmod(remainder, 3600)
minutes = remainder // 60
if int(days) == 1:
days_string = 'day'
else:
days_string = 'days'
if int(hours) == 1:
hours_string = 'hour'
else:
hours_string = 'hours'
if int(minutes) == 1:
minutes_string = 'minute'
else:
minutes_string = 'minutes'
if days > 7:
return 'Expires in {0} {1}'.format(int(days), days_string)
elif days > 1:
return 'Expires in {0} {1} and {2} {3}'.format(int(days), days_string, int(hours), hours_string)
else:
return 'Expires in {0} {1} and {2} {3}'.format(int(hours), hours_string, int(minutes), minutes_string)
def process_main_image(self):
""" Process the main image for saving (accounting for orientation, animated gifs and disallowed file types)."""
try: # We dont want to overwrite a file if already saved
if os.path.isfile(self.image_file.path):
return True
except ValueError:
logging.info('image_file.path had no vaue set yet, gonna process image')
if self.image_file: # Check we have an image uploaded
image = Image.open(self.image_file)
elif self.img_url: # Check we have a URL
name = urlparse(self.img_url).path.split('/')[-1]
response = requests.get(self.img_url)
if response.status_code == 200:
image = Image.open(BytesIO(response.content))
file_type = image.format.upper()
self.file_type_check(file_type)
if self.image_is_animated_gif(image, file_type):
self.image_file.save(name, ContentFile(response.content), save=True)
return True
else:
raise Exception('Not a valid image URL')
else:
raise Exception('No Image File or URL provided')
file_type = image.format.upper()
self.file_type_check(file_type)
try:
image = self.reorientate_image(image)
except Exception:
logging.info('There was an error dealing with EXIF data when trying to reorientate')
if self.image_is_animated_gif(image, file_type):
pass
# Animated gifs are not processed before being saved
else:
temp_image = BytesIO()
image.save(temp_image, file_type, quality=IMAGE_QUALITY_VAL)
temp_image.seek(0)
self.image_file.save(self.image_file.name, ContentFile(temp_image.read()), save=False)
temp_image.close()
return True
def make_thumbnail(self):
""" Makes and saves a thumbnail."""
image = Image.open(self.image_file)
try:
if os.path.isfile(self.thumbnail.path):
return True
except ValueError:
logging.info('thumbnail.path had no vaue set yet, gonna process image')
file_type = image.format.upper()
ext = self.filename().split('.')[-1].lower()
thumbnail_placefolder = "thumbnail.{0}".format(ext)
image.thumbnail(THUMB_SIZE, Image.ANTIALIAS)
temp_thumb = BytesIO()
image.save(temp_thumb, file_type, quality=IMAGE_QUALITY_VAL)
temp_thumb.seek(0)
self.thumbnail.save(thumbnail_placefolder, ContentFile(temp_thumb.read()), save=False)
temp_thumb.close()
return True
| {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,239 | emojipeach/djangur | refs/heads/master | /imageapp/views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import codecs
import logging
import os
from time import time
from uuid import uuid4
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.decorators import login_required
from django.contrib.auth.decorators import user_passes_test
from django.http import HttpResponseRedirect
from django.http import Http404
from django.shortcuts import render
from django.urls import reverse
from imageapp.forms import ImageUploadForm
from imageapp.models import ImageUpload
from imageapp.settings import MODERATION_COUNTER_RESET
from imageapp.settings import MODERATION_THRESHOLD
# from imageapp.startup import delete_expired_images
logging.basicConfig(
level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s'
)
User = get_user_model()
def index(request):
""" Default index view which displays some recent images."""
images = ImageUpload.objects.filter(private=False).order_by('-uploaded_time')[:5]
context = {'images': images}
return render(request, 'imageapp/index.html', context)
def upload(request):
""" View for the image upload form."""
if request.method == 'POST':
form = ImageUploadForm(request.POST, request.FILES)
if form.is_valid():
new_image = form.save(commit=False)
new_image.identifier = uuid4().hex
new_image.uploaded_time = time()
new_image.expiry_time = new_image.get_expiry_time()
if request.user.is_authenticated:
new_image.owner = request.user
new_image.save()
context = {
'current_image': new_image,
'attempted_upload_success_password': new_image.upload_success_password(),
}
return render(request, 'imageapp/image.html', context)
else: # Blank form initially
form = ImageUploadForm()
context = {
'form': form,
}
return render(request, 'imageapp/upload.html', context)
def image(request, identifier):
""" View which displays an image referenced by the identifier."""
try:
current_image = ImageUpload.objects.get(identifier=identifier)
except Exception:
raise Http404("Page not found")
# Lets check if the image.reported field exceeds the moderation threshold
if current_image.reported >= MODERATION_THRESHOLD:
messages.error(request, 'Image ({0}) awaiting moderation.'.format(identifier))
return HttpResponseRedirect(reverse('imageapp:index'))
# Lets check if the image has expired and is awaiting deletion
if current_image.expiry_time < time() and current_image.expiry_time > current_image.uploaded_time:
raise Http404("Page not found")
context = {
'current_image': current_image,
}
return render(request, 'imageapp/image.html', context)
def profile(request, username):
""" Displays a user's profile."""
current_user = User.objects.get(username=username)
user_id = current_user.id
images = ImageUpload.objects.filter(owner=user_id).order_by('-uploaded_time')
context = {
'images': images,
'current_user': current_user,
}
return render(request, 'imageapp/user_profile.html', context)
def delete_image(request, identifier, deletion_password=''):
""" View to delete an image, a correct deletion password must be passed in the url."""
try:
current_image = ImageUpload.objects.get(identifier=identifier)
except Exception:
raise Http404("Page not found")
# Lets check the deletion password is correct
if deletion_password == current_image.deletion_password():
# If so we need to delete the instance and all associated files
os.remove(current_image.image_file.path)
os.remove(current_image.thumbnail.path)
current_image.delete()
messages.success(request, 'Image ({0}) deleted.'.format(identifier))
return HttpResponseRedirect(reverse('imageapp:index'))
else:
raise Http404("Page not found")
@login_required
def report_image(request, identifier):
""" View that increments the image.reported count and adds a timestamp to be used for the mod_queue."""
try:
current_image = ImageUpload.objects.get(identifier=identifier)
except Exception:
raise Http404("Page not found")
# TODO should add some session checking to limt multiple reports
current_image.reported += 1
if current_image.reported_first_time == 0:
current_image.reported_first_time = time()
current_image.save(update_fields=['reported', 'reported_first_time'])
messages.success(request, 'Image ({0}) reported.'.format(identifier))
return HttpResponseRedirect(reverse('imageapp:image', args=[identifier]))
def moderator_check(user):
if user.is_superuser:
return True
else:
return user.groups.filter(name__in=['moderators',]).exists()
@user_passes_test(moderator_check)
def mod_delete_image(request, identifier, deletion_password=''):
""" View called from the mod_queue template which deletes an image and redirects back to the queue."""
try:
current_image = ImageUpload.objects.get(identifier=identifier)
except Exception:
raise Http404("Page not found")
if deletion_password == current_image.deletion_password():
os.remove(current_image.image_file.path)
os.remove(current_image.thumbnail.path)
current_image.delete()
messages.success(request, 'Previous image ({0}) deleted.'.format(identifier))
return HttpResponseRedirect(reverse('imageapp:mod_queue'))
else:
raise Http404("Page not found")
@user_passes_test(moderator_check)
def mod_image_acceptable(request, identifier, deletion_password=''):
""" View which resets the image.reported counter."""
try:
current_image = ImageUpload.objects.get(identifier=identifier)
except Exception:
raise Http404("Page not found")
if deletion_password == current_image.deletion_password():
current_image.reported = -MODERATION_COUNTER_RESET
current_image.reported_first_time = 0
current_image.save(update_fields=['reported', 'reported_first_time'])
# TODO attribute this action to the mod responsible
messages.success(request, 'Previous image ({0}) acceptable.'.format(identifier))
return HttpResponseRedirect(reverse('imageapp:mod_queue'))
else:
raise Http404("Page not found")
@user_passes_test(moderator_check)
def mod_queue(request):
""" View gets 10 images above moderation_threshold and sort by the first time they were reported."""
images_for_moderation = ImageUpload.objects.filter(
reported__gte=MODERATION_THRESHOLD
).order_by('-reported_first_time')[:10]
# Lets pick a random image from this list to show to moderator
try:
pick_an_image = int(int(codecs.encode(os.urandom(1), 'hex'), 16) / 255 * len(images_for_moderation))
# Random number upto len(i_for_m)
moderate = images_for_moderation[pick_an_image]
except (ValueError, IndexError):
messages.error(request, 'Moderation queue empty')
return HttpResponseRedirect(reverse('imageapp:index'))
context = {
'moderate': moderate
}
return render(request, 'imageapp/mod_queue.html', context)
| {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,240 | emojipeach/djangur | refs/heads/master | /pmessaging/settings.py | # Detetmines the frequency at which old messages are checked and deleted.
MESSAGE_REMOVAL_FREQUENCY = 3600
# Determines the numbet of days back old messages are kept in the db. After this time messages are deleted.
OLD_MESSAGES_DELETED_IN = -30
| {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,241 | emojipeach/djangur | refs/heads/master | /users/admin.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth import get_user_model
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
User = get_user_model()
class CustomUserAdmin(UserAdmin):
model = User
list_display = ['username']
fieldsets = (
(None, {
'fields': ('username', 'email', 'first_name', 'last_name', 'groups', 'pgp_key', 'is_active', )
}),
('Advanced options', {
'classes': ('collapse',),
'fields': ('password', 'is_staff', 'is_superuser', ),
}),
)
admin.site.register(User, CustomUserAdmin) | {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,242 | emojipeach/djangur | refs/heads/master | /imageapp/startup.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import os
import threading
from time import time
from django.db.models import F
from imageapp.models import ImageUpload
from imageapp.settings import EXPIRY_REMOVAL_FREQUENCY
logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s')
def delete_expired_images():
""" This function selects all images which have an expiry set before now and deletes them permanently."""
logging.info('Expired image cleanup started...')
now = time()
images = ImageUpload.objects.filter(uploaded_time__lte=F('expiry_time'), expiry_time__lte=now)
for image in images:
# lets delete the image file, thumbnail and instance
filename = image.image_file.filename()
os.remove(image.image_file.path)
os.remove(image.thumbnail.path)
image.delete()
logging.info('deleted an expired image: {0}'.format(filename))
logging.info('Expired image cleanup finished...')
def launch_expired_image_remover():
""" This function runs the expired images delete function every hour (freq can be changed in settings)."""
threads = threading.Timer(EXPIRY_REMOVAL_FREQUENCY, launch_expired_image_remover)
threads.start()
delete_expired_images()
launch_expired_image_remover()
| {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,243 | emojipeach/djangur | refs/heads/master | /imageapp/forms.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from .models import ImageUpload
class ImageUploadForm(forms.ModelForm):
""" Main image upload form - supports direct file and url upload."""
def __init__(self, *args, **kwargs):
super(ImageUploadForm, self).__init__(*args, **kwargs)
self.fields['private'].initial = True
self.fields['expiry_choice'].initial = -10
class Meta:
model = ImageUpload
fields = (
'title',
'image_file',
'img_url',
'expiry_choice',
'private',
)
| {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,244 | emojipeach/djangur | refs/heads/master | /imageapp/admin.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from imageapp.models import ImageUpload
admin.site.register(ImageUpload)
| {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,245 | emojipeach/djangur | refs/heads/master | /users/forms.py | from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import UserCreationForm
from django.utils.translation import ugettext_lazy as _
User = get_user_model()
class CustomUserCreationForm(UserCreationForm):
def clean_username(self):
""" Ensures a unique username accounting for character case."""
username = self.cleaned_data.get('username')
if User.objects.filter(username__iexact=username):
raise forms.ValidationError(_('This username is already in use. Please try again.'))
else:
return username
class Meta(UserCreationForm.Meta):
model = User
fields = ('username', 'email')
class CustomUserChangeProfileForm(forms.ModelForm):
def clean_pgp_key(self):
""" Ensures pgp key headers are present."""
key = self.cleaned_data.get('pgp_key')
if key == "":
return key
elif key.startswith(
'-----BEGIN PGP PUBLIC KEY BLOCK-----'
) and key.__contains__(
'-----END PGP PUBLIC KEY BLOCK-----'
):
return key
else:
raise forms.ValidationError(_(
'That is not a valid PGP public key. Please try again or just delete the invalid key.'
))
class Meta:
model = User
fields = (
'email',
'first_name',
'last_name',
'pgp_key',
)
widgets = {'pgp_key': forms.Textarea(attrs={'rows': 40, 'cols': 70})}
| {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,246 | emojipeach/djangur | refs/heads/master | /pmessaging/cleanup.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import logging
import threading
from pmessaging.models import Message
from pmessaging.settings import MESSAGE_REMOVAL_FREQUENCY
from pmessaging.settings import OLD_MESSAGES_DELETED_IN
logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s')
def delete_old_messages():
""" This function deletes all messages which are older than the OLD_MESSAGES_DELETED_IN constant."""
logging.info('Old message cleanup started...')
now = datetime.datetime.now()
delete_before = now + datetime.timedelta(days=OLD_MESSAGES_DELETED_IN)
messages = Message.objects.filter(sent_at__date__lte=delete_before)
for message in messages:
# lets delete the image file, thumbnail and instance
message.delete()
logging.info('deleted an old message')
logging.info('Old message cleanup finished...')
def launch_old_message_remover():
""" This function runs the old message delete function every hour (freq can be changed in settings)."""
threads = threading.Timer(MESSAGE_REMOVAL_FREQUENCY, launch_old_message_remover)
threads.start()
delete_old_messages()
launch_old_message_remover() | {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,247 | emojipeach/djangur | refs/heads/master | /users/views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import messages
from django.contrib.auth import authenticate
from django.contrib.auth import get_user_model
from django.contrib.auth import login
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import PasswordChangeForm
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse_lazy
from django.urls import reverse
from users.forms import CustomUserCreationForm
from users.forms import CustomUserChangeProfileForm
User = get_user_model()
def signup(request):
"""Register a new user."""
if request.method != 'POST':
# Display a blank registration form
form = CustomUserCreationForm()
else:
# Process completed form
form = CustomUserCreationForm(data=request.POST)
if form.is_valid():
new_user = form.save()
# log the user in then redirect to home page
authenticated_user = authenticate(username=new_user.username, password=request.POST['password1'])
login(request, authenticated_user)
return HttpResponseRedirect(reverse_lazy('imageapp:index'))
context = {'form': form}
return render(request, 'users/signup.html', context)
@login_required
def password_change(request):
""" Allows a user to change their own password."""
if request.method == 'POST':
form = PasswordChangeForm(request.user, request.POST)
if form.is_valid():
user = form.save()
update_session_auth_hash(request, user) # Important!
messages.success(request, 'Your password was successfully updated!')
return HttpResponseRedirect(reverse('imageapp:profile', args=[request.user.username]))
else:
messages.error(request, 'Please correct the error below.')
else:
form = PasswordChangeForm(request.user)
context = {
'form': form,
}
return render(request, 'users/password_change.html', context)
@login_required
def edit_profile(request):
""" View allows user to update their own settings."""
if request.method == 'POST':
form = CustomUserChangeProfileForm(data=request.POST, instance=request.user)
if form.is_valid():
form.save()
messages.success(request, 'Your profile was successfully updated!')
return HttpResponseRedirect(reverse('imageapp:settings'))
else:
form = CustomUserChangeProfileForm(instance=request.user)
context = {
'form': form,
}
return render(request, 'users/edit_profile.html', context)
@login_required
def my_profile(request):
""" Displays the user's profile."""
current_user = User.objects.get(username=request.user)
context = {
'current_user': current_user,
}
return render(request, 'users/my_profile.html', context)
| {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,248 | emojipeach/djangur | refs/heads/master | /users/urls.py | from django.urls import path
from django.contrib.auth.views import LoginView
from django.contrib.auth.views import LogoutView
from . import views
app_name = 'users'
urlpatterns = [
# user signup
path(
'signup/',
views.signup,
name='signup'
),
# user login
path(
'login/',
LoginView.as_view(template_name='users/login.html'),
name='login'
),
# user logout
path(
'logout/',
LogoutView.as_view(template_name='users/login.html'),
name='logout'
),
# user self password change
path(
'password_change/',
views.password_change,
name='password_change'
),
# user edit own settings
path(
'edit_profile/',
views.edit_profile,
name='edit_profile'
),
]
| {"/imageapp/views.py": ["/imageapp/forms.py", "/imageapp/models.py"], "/imageapp/startup.py": ["/imageapp/models.py"], "/imageapp/forms.py": ["/imageapp/models.py"], "/imageapp/admin.py": ["/imageapp/models.py"], "/pmessaging/cleanup.py": ["/pmessaging/settings.py"], "/users/views.py": ["/users/forms.py"]} |
47,249 | Lyxf3/Books_Shop | refs/heads/main | /user/views.py | from django.shortcuts import render
from rest_framework import viewsets
from rest_framework import permissions
# from rest_framework.decorators import action, list_route
# from rest_framework.response import Response
from user.serializers import UserSerializer
from user.models import User
class UserViewSet(viewsets.ViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = [permissions.IsAuthenticated]
# @list_route()
# def list(self, request):
# queryset = User.objects.all()
# serializer = UserSerializer(queryset, many=True)
# return Response(serializer.data)
#
# @action(methods=['get'])
# def retrieve(self, request, pk=None):
# queryset = User.objects.all()
# user = get_object_or_404(queryset, pk=pk)
# serializer = UserSerializer(user)
# return Response(serializer.data)
#
# @action(methods=['post'])
# def create(self, request):
# user = CustomUser.objects.create_user(
# name=request['name'],
# email=request['email'],
# is_staff=request['is_staff'],
# is_superuser=request['is_superuser'],
# is_active=request['is_active'],
# favourite_books=request['favourite_books'],
# balance=request['balance'],
# )
# user.save()
#
# @action(methods=['put'])
# def update(self, request, pk=None):
# user = self.get_object(pk)
# serializer = UserSerializer(user, data=request.data)
# if serializer.is_valid():
# serializer.save()
# return Response(serializer.data)
# return Response(serializer.errors)
#
# @action(methods=['delete'])
# def destroy(self, request, pk=None):
# snippet = self.get_object(pk)
# snippet.delete()
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,250 | Lyxf3/Books_Shop | refs/heads/main | /book/migrations/0002_initial.py | # Generated by Django 3.2.7 on 2021-10-06 10:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('book', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='promocode',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='User'),
),
migrations.AddField(
model_name='contract',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='author', to='book.author'),
),
migrations.AddField(
model_name='contract',
name='publisher',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='publisher', to='book.publisher'),
),
migrations.AddField(
model_name='book',
name='authors',
field=models.ManyToManyField(related_name='books', to='book.Author', verbose_name='Authors'),
),
migrations.AddField(
model_name='book',
name='categories',
field=models.ManyToManyField(related_name='books', to='book.Category', verbose_name='Categories'),
),
migrations.AddField(
model_name='book',
name='discount_shop',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='books', to='book.discountshop', verbose_name='DiscountShop'),
),
migrations.AddField(
model_name='book',
name='publisher',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='book.publisher', verbose_name='Publisher'),
),
]
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,251 | Lyxf3/Books_Shop | refs/heads/main | /user/models.py | from django.db import models
from django.contrib.auth.models import AbstractBaseUser
from user.manager import CustomAccountManager
from django.utils import timezone
from django.contrib.auth.models import PermissionsMixin
class User(AbstractBaseUser, PermissionsMixin):
name = models.CharField(max_length=100, blank=False, null=True, verbose_name="Name")
email = models.EmailField(blank=False, null=True, unique=True, verbose_name="Email")
is_staff = models.BooleanField(default=False, null=False, verbose_name="Is_staff")
is_superuser = models.BooleanField(default=False, null=False, verbose_name="is_active")
is_active = models.BooleanField(default=False, null=False, verbose_name="is_active")
favourite_books = models.ManyToManyField(to="book.Book", blank=False, verbose_name="Favourite_books",
related_name="Users")
balance = models.DecimalField(blank=False, null=False, default=0, max_digits=7, decimal_places=2,
verbose_name="Balance")
date_joined = models.DateTimeField(default=timezone.now, verbose_name="Date_joined")
USERNAME_FIELD = "email"
REQUIRED_FIELDS = []
objects = CustomAccountManager()
def __str__(self):
return self.email
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,252 | Lyxf3/Books_Shop | refs/heads/main | /book/models.py | from django.db import models
from django.utils import timezone
import uuid
class Category(models.Model):
title = models.CharField(max_length=100,
blank=False,
null=True,
verbose_name="Title")
def __str__(self):
return self.title
class Author(models.Model):
first_name = models.CharField(max_length=30,
blank=True,
null=True,
verbose_name="First_name")
second_name = models.CharField(max_length=30,
blank=True,
null=True,
verbose_name="Second_name")
percent = models.PositiveSmallIntegerField(blank=False, default=20,
verbose_name="Percent")
class Publisher(models.Model):
title = models.CharField(max_length=100,
blank=True,
null=True,
verbose_name="Title")
def __str__(self):
return self.title
class Contract(models.Model):
title = models.CharField(max_length=100)
author = models.ForeignKey(to=Author,
on_delete=models.CASCADE,
related_name="contracts",
verbose_name="author")
publisher = models.ForeignKey(to=Publisher,
on_delete=models.CASCADE,
related_name="contracts",
verbose_name="publisher")
class Market(models.Model):
title = models.CharField(max_length=100,
verbose_name="Title")
location = models.CharField(max_length=100,
verbose_name="Location")
def __str__(self):
return self.title
class DiscountShop(models.Model):
author_discount = models.PositiveSmallIntegerField(blank=False,
verbose_name="Author_discount")
shop_discount = models.PositiveSmallIntegerField(blank=False,
verbose_name="Author_discount")
def __str__(self):
return f'author_discount: {self.author_discount} shop_discount:{self.shop_discount}'
class Book(models.Model):
title = models.CharField(max_length=100,
blank=False,
null=True,
verbose_name="Title")
price = models.PositiveIntegerField(blank=False,
verbose_name="Price")
issued = models.DateTimeField(default=timezone.now,
verbose_name="Issued")
categories = models.ManyToManyField(to=Category,
blank=False,
related_name="books",
verbose_name="Categories")
authors = models.ManyToManyField(to=Author,
blank=False,
related_name="books",
verbose_name="Authors")
publisher = models.ForeignKey(to=Publisher,
on_delete=models.SET_NULL,
null=True,
related_name="books",
verbose_name="Publisher")
market_id = models.PositiveIntegerField(blank=False,
verbose_name="Market_id")
discount_shop = models.ForeignKey(to=DiscountShop,
on_delete=models.SET_NULL,
null=True,
related_name="books",
verbose_name="Discount_shop")
discount_market = models.PositiveSmallIntegerField(blank=False,
verbose_name="Discount_market")
available = models.BooleanField(default=False,
verbose_name="Available")
def __str__(self):
return self.title
def get_fields(self):
return "\n".join([p.products for p in self.product.all()])
def categories_list(self):
return ', '.join([str(category) for category in self.categories.all()])
categories_list.short_description="Categories"
class PBook(Book):
pass
class EBook(Book):
source = models.URLField(blank=True,
null=True,
unique=True,
verbose_name="Source")
class ABook(Book):
file = models.FileField(upload_to=None,
blank=True,
null=True,
verbose_name="File")
class Types(models.IntegerChoices):
ASSOCIATIVE = 1
DIDACTIC = 2
COMBINED = 3
class BBook(Book):
symbol_type = models.PositiveSmallIntegerField(choices=Types.choices)
class PromoCode(models.Model):
id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False,
verbose_name="Id")
percent = models.PositiveSmallIntegerField(blank=True,
null=True,
verbose_name="Percent")
user = models.ForeignKey(to="user.User",
on_delete=models.CASCADE,
null=True,
related_name='promo_codes',
verbose_name="User")
times_to_use = models.PositiveSmallIntegerField(blank=True,
null=True,
verbose_name="Times_to_use")
times_used = models.PositiveSmallIntegerField(blank=True,
null=True,
verbose_name="Times_used")
def __str__(self):
return self.id
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,253 | Lyxf3/Books_Shop | refs/heads/main | /user/urls.py | from django.urls import path
from rest_framework.routers import DefaultRouter
from django.conf.urls import include
from user import views as user_views
router = DefaultRouter()
router.register(r'user', user_views.UserViewSet, basename='user')
urlpatterns = [
path('', include(router.urls)),
]
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,254 | Lyxf3/Books_Shop | refs/heads/main | /user/serializers.py | from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
from rest_framework_simplejwt.views import TokenObtainPairView
from user.models import User
from rest_framework import serializers
class MyTokenObtainPairSerializer(TokenObtainPairSerializer):
@classmethod
def get_token(cls, user):
token = super().get_token(user)
token['name'] = user.name
return token
class MyTokenObtainPairView(TokenObtainPairView):
serializer_class = MyTokenObtainPairSerializer
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('name', 'email', 'issued', 'favourite_books',
'balance', 'is_active', 'is_superuser')
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,255 | Lyxf3/Books_Shop | refs/heads/main | /book/admin.py | from django.contrib import admin
from django.db.models import OuterRef
from django.utils.html import mark_safe
from book.models import (
Book, Category, Contract, Author, Publisher, DiscountShop,
Market, PBook, EBook, ABook, Types, BBook, PromoCode
)
from django.utils.html import format_html
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ['title', 'price', 'issued', 'market_id',
'discount_market', 'available', 'discount_shop',
'categories_list', 'publisher_name', ]
search_fields = ('title', 'price')
fields = ('title', 'price', 'discount_shop')
readonly_fields = ('issued', 'market_id', 'discount_market', 'available', 'categories_list',)
def get_queryset(self, request):
query_set = super(BookAdmin, self).get_queryset(request)
return query_set \
.annotate(_publisher_name= \
Publisher.objects.filter(
id=OuterRef('publisher_id')
).values('title')
) \
.select_related('discount_shop') \
.prefetch_related('categories')
def publisher_name(self, instance):
return instance._publisher_name
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
list_display = ['title']
search_fields = ('title',)
readonly_fields = ('title',)
@admin.register(Author)
class AuthorAdmin(admin.ModelAdmin):
list_display = ['first_name', 'second_name', 'percent']
search_fields = ('second_name',)
readonly_fields = ('first_name', 'second_name', 'percent')
@admin.register(Publisher)
class PublisherAdmin(admin.ModelAdmin):
list_display = ['title']
search_fields = ('title',)
readonly_fields = ('title',)
@admin.register(Contract)
class ContractAdmin(admin.ModelAdmin):
list_display = ['title', 'author', 'publisher']
search_fields = ('title', 'author', 'publisher')
readonly_fields = ('title', 'author', 'publisher')
def get_queryset(self, request):
query_set = super(ContractAdmin, self).get_queryset(request)
return query_set \
.select_related('author') \
.select_related('publisher')
@admin.register(Market)
class MarketAdmin(admin.ModelAdmin):
list_display = ['title', 'location']
search_fields = ('title',)
readonly_fields = ('title', 'location')
@admin.register(DiscountShop)
class DiscountShopAdmin(admin.ModelAdmin):
list_display = ['author_discount', 'shop_discount']
search_fields = ('author_discount', 'shop_discount')
fields = ('author_discount', 'shop_discount',)
@admin.register(EBook)
class EBookAdmin(BookAdmin):
def get_list_display(self, request):
return self.list_display + ['source']
def get_search_fields(self, request):
return self.search_fields + ('source', )
def get_readonly_fields(self, request):
return self.readonly_fields + ('source', )
@admin.register(ABook)
class ABookAdmin(BookAdmin):
def get_list_display(self, request):
return self.list_display + ['file', ]
def get_search_fields(self, request):
return self.search_fields + ('file', )
def get_readonly_fields(self, request):
return self.readonly_fields + ('file', )
@admin.register(BBook)
class BBookAdmin(BookAdmin):
def get_list_display(self, request):
return self.list_display + ['symbol_type']
def get_search_fields(self, request):
return self.search_fields + ('symbol_type', )
def get_readonly_fields(self, request):
return self.readonly_fields + ('symbol_type', )
@admin.register(PBook)
class PBookAdmin(BookAdmin):
def get_list_display(self, request):
return self.list_display
def get_search_fields(self, request):
return self.search_fields
def get_readonly_fields(self, request):
return self.readonly_fields
@admin.register(PromoCode)
class PromoCodeAdmin(admin.ModelAdmin):
list_display = ['id', 'percent', 'user', 'times_to_use', 'times_used', ]
search_fields = ('id', 'user')
readonly_fields = ('id', 'percent', 'user', 'times_to_use', 'times_used',)
def get_queryset(self, request):
query_set = super(PromoCodeAdmin, self).get_queryset(request)
return query_set \
.select_related('user')
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,256 | Lyxf3/Books_Shop | refs/heads/main | /purchase/models.py | from django.db import models
from django.utils import timezone
from user.models import User
from book.models import Book
from django.utils import timezone
class Statuses(models.IntegerChoices):
PROCESSED = 1
CANCELED = 2
ACCEPTED = 3
class Purchase(models.Model):
user_id = models.ForeignKey(to=User, blank=False, null=True, on_delete=models.CASCADE,
related_name="Purchase", verbose_name="User")
books = models.ForeignKey(to=Book, blank=False, null=True, on_delete=models.SET_NULL,
related_name="Purchase", verbose_name="Book")
amount = models.DecimalField(blank=False, max_digits=7, decimal_places=2, verbose_name="Amount")
created_at = models.DateTimeField(default=timezone.now, verbose_name="Created_at")
status = models.PositiveSmallIntegerField(blank=False, null=True, choices=Statuses.choices,
verbose_name="Status")
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,257 | Lyxf3/Books_Shop | refs/heads/main | /book/views.py | from django.shortcuts import render
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework.decorators import action
from rest_framework.response import Response
from book.serializers import (
CategorySerializer, AuthorSerializer, BookSerializer, PromoCodeSerializer
)
from book.models import (
Category, Author, Book, PromoCode
)
from django.db.models import Avg, Max
class BookViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Book.objects.all()
serializer_class = BookSerializer
# @list_route()
# def get_avg_author_discount(self, request):
# queryset = Book.objects.all()
# serializer = BookSerializer(queryset, many=True)
# return Response(serializer.data)
#
# @detail_route(methods=['get'])
# def retrieve(self, request, pk=None):
# queryset = Book.objects.all()
# user = get_object_or_404(queryset, pk=pk)
# serializer = BookSerializer(user)
# return Response(serializer.data)
class PromoCodeViewSet(viewsets.ReadOnlyModelViewSet):
queryset = PromoCode.objects.all()
serializer_class = PromoCodeSerializer
# @list_route()
# def list(self, request):
# queryset = PromoCode.objects.all()
# serializer = PromoCodeSerializer(queryset, many=True)
# return Response(serializer.data)
#
# @detail_route(methods=['get'])
# def retrieve(self, request, pk=None):
# queryset = PromoCode.objects.all()
# user = get_object_or_404(queryset, pk=pk)
# serializer = PromoCodeSerializer(user)
# return Response(serializer.data)
class AuthorViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Author.objects.all()
serializer_class = AuthorSerializer
#
# @list_route()
# def list(self, request):
# queryset = PromoCode.objects.all()
# serializer = PromoCodeSerializer(queryset, many=True)
# return Response(serializer.data)
class CategoryViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Category.objects.all()
serializer_class = CategorySerializer
# @list_route()
# def list(self, request):
# queryset = Category.objects.all()
# serializer = CategorySerializer(queryset, many=True)
# return Response(serializer.data)
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,258 | Lyxf3/Books_Shop | refs/heads/main | /purchase/admin.py | from django.contrib import admin
from django.utils.html import mark_safe
from purchase.models import Purchase
@admin.register(Purchase)
class PurchaseAdmin(admin.ModelAdmin):
list_display = ('user_id', 'books', 'amount',
'created_at', 'status')
search_fields = ('user_id',)
list_filter = ('status',)
fields = ()
readonly_fields = ('user_id', 'books', 'amount',
'created_at', 'status')
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,259 | Lyxf3/Books_Shop | refs/heads/main | /book/migrations/0006_auto_20211008_1958.py | # Generated by Django 3.2.7 on 2021-10-08 16:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('book', '0005_author_percent'),
]
operations = [
migrations.AlterField(
model_name='book',
name='authors',
field=models.ManyToManyField(related_name='books', to='book.Author', verbose_name='Authors'),
),
migrations.AlterField(
model_name='book',
name='discount_shop',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='books', to='book.discountshop', verbose_name='Discount_shop'),
),
migrations.AlterField(
model_name='book',
name='publisher',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='books', to='book.publisher', verbose_name='Publisher'),
),
migrations.AlterField(
model_name='contract',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contracts', to='book.author', verbose_name='author'),
),
migrations.AlterField(
model_name='contract',
name='publisher',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contracts', to='book.publisher', verbose_name='publisher'),
),
migrations.AlterField(
model_name='promocode',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='promo_codes', to=settings.AUTH_USER_MODEL, verbose_name='User'),
),
]
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,260 | Lyxf3/Books_Shop | refs/heads/main | /tmp/purchase.py | from book.modals import (
Book, Category, Author, Publisher, Contract, Market, DiscountShop
)
from random import randint, choice
from string import ascii_lowercase
from django.utils import timezone
#
# def create_purchase(count):
# def get_random_obf(model):
# random_idx = randint(0, model.objects.count() - 1)
# return model.objects.all()[random_idx]
# now = timezone.now()
#
# for item in range(count):
# params = {
# "user_id": get_random_obf(User),
# "books": get_random_obf(Book),
# "amount": randint(1, 2000),
# "created_at": now,
# "status": PROCESSED,
# }
# Book.objects.create(params)
#
#
# class Statuses(models.IntegerChoices):
# PROCESSED = 1
# CANCELED = 2
# ACCEPTED = 3
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,261 | Lyxf3/Books_Shop | refs/heads/main | /purchase/urls.py | from django.urls import path
from rest_framework.routers import DefaultRouter
from django.conf.urls import include
from purchase import views as purchase_views
router = DefaultRouter()
router.register(r'purchase', purchase_views.PurchaseViewSet, basename='purchase')
urlpatterns = [
path('', include(router.urls)),
]
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,262 | Lyxf3/Books_Shop | refs/heads/main | /purchase/serializers.py | from purchase.models import Purchase
from rest_framework import serializers
class PurchaseSerializer(serializers.ModelSerializer):
class Meta:
model = Purchase
fields = ('user_id', 'books', 'issued', 'amount',
'created_at', 'status')
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,263 | Lyxf3/Books_Shop | refs/heads/main | /tmp/book.py | from book.models import (
Book, Category, Author, Publisher, Contract, Market,
DiscountShop, BBook, PBook, EBook, ABook, Types
)
from random import randint, choice, choices
from string import ascii_lowercase
from django.utils import timezone
CATEGORIES_COUNT = 1500
AUTHORS_COUNT = 1500
PUBLISHER_COUNT = 100
CONTRACT_COUNT = 100
BOOK_COUNT = 1500
DISCOUNT_SHOP = 100
def get_random_obj(model):
random_idx = randint(0, model.objects.count() - 1)
return model.objects.all()[random_idx]
def get_random_queryset(model):
authors = model.objects.all().values_list('id', flat=True)
return model.objects.filter(pk__in=choices(authors, k=3))
def get_book_type():
return choice([PBook, EBook, ABook, BBook])
def generate_data_by_type(book_type):
if book_type == PBook:
return {}
elif book_type == EBook:
generate = f'https://{"".join(choice(ascii_lowercase) for _ in range(randint(2, 30)))}/'
if EBook.objects.filter(source=generate).exists():
return ''.join(choice(ascii_lowercase) for _ in range(randint(2, 30)))
return {
"source": generate
}
elif book_type == ABook:
return {
"file": None
}
elif book_type == BBook:
return {
"symbol_type": choice([Types.ASSOCIATIVE, Types.DIDACTIC, Types.COMBINED])
}
def create_category(count):
for item in range(count):
params = {
"title": ''.join(choice(ascii_lowercase) for _ in range(randint(2, 50)))
}
Category.objects.create(**params)
def create_author(count):
for item in range(count):
params = {
"first_name": ''.join(choice(ascii_lowercase) for _ in range(randint(2, 30))),
"second_name": ''.join(choice(ascii_lowercase) for _ in range(randint(2, 30))),
'percent': randint(2, 30)
}
Author.objects.create(**params)
def create_publisher(count):
for item in range(count):
params = {
"title": ''.join(choice(ascii_lowercase) for _ in range(randint(2, 40))),
}
Publisher.objects.create(**params)
def create_contract(count):
for item in range(count):
params = {
"title": ''.join(choice(ascii_lowercase) for _ in range(randint(2, 100))),
"author": get_random_obj(Author),
'publisher': get_random_obj(Publisher)
}
Contract.objects.create(**params)
def create_discount_shop(count):
for item in range(count):
params = {
"author_discount": randint(1, 20),
"shop_discount": randint(1, 20),
}
DiscountShop.objects.create(**params)
def create_book(count):
now = timezone.now()
for item in range(count):
book_type = get_book_type()
params = {
"title": ''.join(choice(ascii_lowercase) for _ in range(randint(10, 50))),
"price": randint(1, 2000),
"issued": now,
"publisher": get_random_obj(Publisher),
"market_id": randint(1, 2000),
"discount_market": randint(1, 50),
"discount_shop": get_random_obj(DiscountShop),
"available": True,
}
params.update(generate_data_by_type(book_type))
book = book_type.objects.create(**params)
authors_queryset = get_random_queryset(Author)
categories_queryset = get_random_queryset(Category)
for author in authors_queryset:
book.authors.add(author)
for categories in categories_queryset:
book.categories.add(categories)
def main():
create_category(CATEGORIES_COUNT)
create_author(AUTHORS_COUNT)
create_publisher(PUBLISHER_COUNT)
create_contract(CONTRACT_COUNT)
create_discount_shop(DISCOUNT_SHOP)
create_book(BOOK_COUNT)
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,264 | Lyxf3/Books_Shop | refs/heads/main | /book/mixins/aggregation.py | from book.models import (
DiscountShop, Author, Book, Publisher
)
from django.db.models import FloatField, OuterRef
from django.db.models.functions import Cast
from django.db.models import Avg, Max, F, Sum, Min
# Средняя скидка от автора Avg int
def get_avg_discount():
author = Author.objects.first()
return Book.objects.filter(authors=author).aggregate(Avg('discount_shop__author_discount'))
# Сколько теряет автор денег, если вводиться скидка от продавца, например процент автора 30%
def how_much_money_author_lose():
author = Author.objects.first()
return Book.objects.filter(authors=author)\
.annotate(
loses_test2=(Cast(F('price'), FloatField()) -
Cast(F('price'), FloatField()) *
Cast(F('discount_shop__shop_discount'), FloatField()) / 100) *
Cast(F('authors__percent'), FloatField()) / 100,
).aggregate(
Sum(F('loses_test2')),
)
def publisher_max_and_min_cost():
return Publisher.objects.filter(
books__in=Book.objects.annotate(min_price=Min('price')).values('id')
).all()
def main():
print(get_avg_discount())
print(how_much_money_author_lose())
print(publisher_max_and_min_cost())
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,265 | Lyxf3/Books_Shop | refs/heads/main | /book/migrations/0005_author_percent.py | # Generated by Django 3.2.7 on 2021-10-08 12:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book', '0004_auto_20211008_1340'),
]
operations = [
migrations.AddField(
model_name='author',
name='percent',
field=models.PositiveSmallIntegerField(default=20, verbose_name='Percent'),
),
]
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,266 | Lyxf3/Books_Shop | refs/heads/main | /book/serializers.py | from book.models import (
Book, Author, Publisher, PromoCode, Category
)
from rest_framework import serializers
class BookSerializer(serializers.ModelSerializer):
class Meta:
model = Book
fields = ('title', 'price', 'issued', 'categories', 'authors', 'publisher',
'market_id', 'discount_shop', 'discount_market', 'available')
class AuthorSerializer(serializers.ModelSerializer):
class Meta:
model = Author
fields = ('first_name', 'second_name', 'percent')
class PromoCodeSerializer(serializers.ModelSerializer):
class Meta:
model = PromoCode
fields = ('id', 'percent', 'user', 'times_to_use', 'times_used')
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = ('title',)
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,267 | Lyxf3/Books_Shop | refs/heads/main | /purchase/migrations/0001_initial.py | # Generated by Django 3.2.7 on 2021-10-06 10:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('book', '0002_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Purchase',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amount', models.DecimalField(decimal_places=2, max_digits=7, verbose_name='Amount')),
('created_at', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Created_at')),
('status', models.PositiveSmallIntegerField(choices=[(1, 'Processed'), (2, 'Canceled'), (3, 'Accepted')], null=True, verbose_name='Status')),
('books', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Purchase', to='book.book', verbose_name='Book')),
('user_id', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='Purchase', to=settings.AUTH_USER_MODEL, verbose_name='User')),
],
),
]
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,268 | Lyxf3/Books_Shop | refs/heads/main | /book/migrations/0003_alter_ebook_source.py | # Generated by Django 3.2.7 on 2021-10-06 14:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book', '0002_initial'),
]
operations = [
migrations.AlterField(
model_name='ebook',
name='source',
field=models.URLField(blank=True, null=True, unique=True, verbose_name='Source'),
),
]
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,269 | Lyxf3/Books_Shop | refs/heads/main | /book/migrations/0001_initial.py | # Generated by Django 3.2.7 on 2021-10-06 10:21
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(blank=True, max_length=30, null=True, verbose_name='First_name')),
('second_name', models.CharField(blank=True, max_length=30, null=True, verbose_name='Second_name')),
],
),
migrations.CreateModel(
name='Book',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, null=True, verbose_name='Title')),
('price', models.PositiveIntegerField(verbose_name='Price')),
('issued', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Issued')),
('market_id', models.PositiveIntegerField(verbose_name='Market_id')),
('discount_market', models.PositiveSmallIntegerField(verbose_name='Discount_market')),
('available', models.BooleanField(default=False, verbose_name='Available')),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, null=True, verbose_name='Title')),
],
),
migrations.CreateModel(
name='Contract',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='DiscountShop',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author_discount', models.PositiveSmallIntegerField(verbose_name='Author_discount')),
('shop_discount', models.PositiveSmallIntegerField(verbose_name='Author_discount')),
],
),
migrations.CreateModel(
name='Market',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, verbose_name='Title')),
('location', models.CharField(max_length=100, verbose_name='Location')),
],
),
migrations.CreateModel(
name='PromoCode',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='Id')),
('percent', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='Percent')),
('times_to_use', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='Times_to_use')),
('times_used', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='Times_used')),
],
),
migrations.CreateModel(
name='Publisher',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=100, null=True, verbose_name='Title')),
],
),
migrations.CreateModel(
name='ABook',
fields=[
('book_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='book.book')),
('file', models.FileField(blank=True, null=True, upload_to=None, verbose_name='File')),
],
bases=('book.book',),
),
migrations.CreateModel(
name='BBook',
fields=[
('book_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='book.book')),
('symbol_type', models.PositiveSmallIntegerField(choices=[(1, 'Associative'), (2, 'Didactic'), (3, 'Combined')])),
],
bases=('book.book',),
),
migrations.CreateModel(
name='EBook',
fields=[
('book_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='book.book')),
('source', models.URLField(blank=True, null=True, verbose_name='Source')),
],
bases=('book.book',),
),
migrations.CreateModel(
name='PBook',
fields=[
('book_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='book.book')),
],
bases=('book.book',),
),
]
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,270 | Lyxf3/Books_Shop | refs/heads/main | /purchase/views.py | from django.shortcuts import render
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework.decorators import action
from rest_framework.response import Response
from purchase.serializers import PurchaseSerializer
from purchase.models import Purchase
class PurchaseViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Purchase.objects.all()
serializer_class = PurchaseSerializer
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,271 | Lyxf3/Books_Shop | refs/heads/main | /user/admin.py | from django.contrib import admin
from django.utils.html import mark_safe
from user.models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'is_staff', 'is_superuser',
'is_active', 'balance', 'date_joined')
search_fields = ('email', 'name')
list_filter = ('is_superuser', 'is_active', 'is_staff')
fields = ('name', 'balance',)
readonly_fields = ('email', 'is_staff', 'is_superuser',
'is_active', 'date_joined')
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,272 | Lyxf3/Books_Shop | refs/heads/main | /book/urls.py | from django.urls import path
from rest_framework.routers import DefaultRouter
from django.conf.urls import include
from book import views as book_views
router = DefaultRouter()
router.register(r'book', book_views.BookViewSet, basename='book')
router.register(r'promo-code', book_views.PromoCodeViewSet, basename='promo-code')
router.register(r'author', book_views.AuthorViewSet, basename='author')
router.register(r'category', book_views.CategoryViewSet, basename='category')
urlpatterns = [
path('', include(router.urls)),
]
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,273 | Lyxf3/Books_Shop | refs/heads/main | /user/migrations/0001_initial.py | # Generated by Django 3.2.7 on 2021-10-06 10:21
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('book', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('name', models.CharField(max_length=100, null=True, verbose_name='Name')),
('email', models.EmailField(max_length=254, null=True, unique=True, verbose_name='Email')),
('is_staff', models.BooleanField(default=False, verbose_name='Is_staff')),
('is_superuser', models.BooleanField(default=False, verbose_name='is_active')),
('is_active', models.BooleanField(default=False, verbose_name='is_active')),
('balance', models.DecimalField(decimal_places=2, default=0, max_digits=7, verbose_name='Balance')),
('favourite_books', models.ManyToManyField(related_name='Users', to='book.Book', verbose_name='Favourite_books')),
],
options={
'abstract': False,
},
),
]
| {"/user/views.py": ["/user/serializers.py", "/user/models.py"], "/user/serializers.py": ["/user/models.py"], "/book/admin.py": ["/book/models.py"], "/purchase/models.py": ["/user/models.py", "/book/models.py"], "/book/views.py": ["/book/serializers.py", "/book/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/purchase/serializers.py": ["/purchase/models.py"], "/tmp/book.py": ["/book/models.py"], "/book/mixins/aggregation.py": ["/book/models.py"], "/book/serializers.py": ["/book/models.py"], "/purchase/views.py": ["/purchase/serializers.py", "/purchase/models.py"], "/user/admin.py": ["/user/models.py"]} |
47,280 | antoniolopes/bootstrap-bottle | refs/heads/master | /routes/static.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Static routes
Created by: Rui Carmo
License: MIT (see LICENSE for details)
"""
import os, sys, logging
from bottle import route, static_file
log = logging.getLogger()
@route('/static/<path:path>')
def send_static(path):
"""Static file handler"""
return static_file(path, root='static')
| {"/app.py": ["/api/__init__.py", "/routes/__init__.py"]} |
47,281 | antoniolopes/bootstrap-bottle | refs/heads/master | /app.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Main application script
Created by: Rui Carmo
"""
import os, sys, logging
# Make sure our bundled libraries take precedence
sys.path.insert(0,os.path.join(os.path.dirname(os.path.abspath(__file__)),'lib'))
from bottle import run
from config import settings
log = logging.getLogger()
if __name__ == "__main__":
if settings.reloader:
if 'BOTTLE_CHILD' not in os.environ:
log.debug('Using reloader, spawning first child.')
else:
log.debug('Child spawned.')
if not settings.reloader or ('BOTTLE_CHILD' in os.environ):
log.info("Setting up application.")
import api, routes, controllers
log.info("Serving requests.")
run(
port = settings.http.port,
host = settings.http.bind_address,
debug = settings.debug,
reloader = settings.reloader
)
| {"/app.py": ["/api/__init__.py", "/routes/__init__.py"]} |
47,282 | antoniolopes/bootstrap-bottle | refs/heads/master | /routes/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Main routes
Created by: Rui Carmo
(Updated by: António Lopes - bootstrap-bottle)
License: MIT (see LICENSE for details)
"""
import os, sys, logging
from bottle import view, route
log = logging.getLogger()
@route('/')
@view('home')
def index():
return {
"title": "Bootstrap-Bottle",
"body": "Welcome to the example page"
}
# import all other routes
import static, debug, docs
| {"/app.py": ["/api/__init__.py", "/routes/__init__.py"]} |
47,283 | antoniolopes/bootstrap-bottle | refs/heads/master | /api/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Application routes
Created by: Rui Carmo
License: MIT (see LICENSE for details)
"""
import os, sys, logging
log = logging.getLogger()
version = '1'
prefix = '/v%s' % version
# import route submodules here
| {"/app.py": ["/api/__init__.py", "/routes/__init__.py"]} |
47,284 | ewurman/LA2Project | refs/heads/master | /GameB.py | '''
M = money I have (integer)
if M is div by 3 we toss coin 1;
else we toss coin 2
Coin1: I win 9/10 + e
Coin2: I win 1/4 + e
Winner wins 1 dollar from loser
'''
import random
import numpy as np
import matplotlib.pyplot as plt
class GameB:
def __init__(self, epsilon = 0.005, chance1 = 0.9, chance2 = 0.25):
self.epsilon = epsilon
self.chance_one = chance1
self.chance_two = chance2
self.coin1_chance = 3
def play_once(self, money):
'''return True if I win'''
x = random.random()
if (money % self.coin1_chance == 0):
#we flip coin 1
if (self.chance_one + self.epsilon > x):
return True
else:
return False
else:
#we flip #2
if (self.chance_two + self.epsilon > x):
return True
else:
return False
def play(self, turns, money):
'''returns money after playing turns turns'''
i = 0
money_list = []
counts = [0,0,0]
while(i < turns):
counts[money%3] += 1
if self.play_once(money):
money += 1
else:
money -= 1
money_list.append(money)
i += 1
counts[0] = counts[0]/turns
counts[1] = counts[1]/turns
counts[2] = counts[2]/turns
# print('P(0) =', P_0)
# print('P(1) =', P_1)
# print('P(2) =', P_2)
# print(P_0 + P_1 + P_2)
return money, money_list, counts
def plot(self, turns, money, trials):
'''plots the average return for the given number
of turns and trials'''
i = 0
turns_list = np.linspace(0, turns, turns)
list_sum = np.zeros(turns)
list_counts = [0,0,0]
while i < trials:
money = money
money2, money_list, counts = self.play(turns, money)
list_sum = np.add(list_sum, money_list)
list_counts = np.add(list_counts, counts)
#print(list_counts)
i += 1
#print('Winning sums:', list_sum)
ave_list = np.multiply((1/trials), list_sum)
ave_counts = np.multiply((1/trials), list_counts)
print('P(0) =', ave_counts[0])
print('P(1) =', ave_counts[1])
print('P(2) =', ave_counts[2])
print(ave_counts[0] + ave_counts[1] + ave_counts[2])
#print('Flips:', turns_list)
#print('Average winnings:', ave_list)
plt.plot(turns_list, ave_list)
plt.xlabel('Coin Flips')
plt.ylabel('Money')
plt.title('Game B Average Winnings Over 1000 Trials')
plt.show()
| {"/Parrondo3.py": ["/GameA.py", "/GameB.py"]} |
47,285 | ewurman/LA2Project | refs/heads/master | /Parrondo3.py | from GameA import *
from GameB import *
import numpy as np
def alternate_games(money, turns):
'''returns the money after playing the game turns times each'''
ga = GameA()
gb = GameB()
money_list = []
i = 0
turns_2 = turns
countsA = [0,0,0]
countsB = [0,0,0]
while i < turns_2:
countsA[money%3] += 1
if ga.play_once():
money += 1
else:
money -= 1
money_list.append(money)
i += 1
countsA[money%3] += 1
if ga.play_once():
money += 1
else:
money -= 1
money_list.append(money)
i += 1
countsB[money%3] += 1
if gb.play_once(money):
money += 1
else:
money -= 1
money_list.append(money)
i += 1
countsB[money%3] += 1
if gb.play_once(money):
money += 1
else:
money -= 1
money_list.append(money)
i += 1
if i >= turns:
break
countsB[money%3] += 1
if gb.play_once(money):
money += 1
else:
money -= 1
money_list.append(money)
i += 1
countsA[money%3] += 1
if ga.play_once():
money += 1
else:
money -= 1
money_list.append(money)
i += 1
return money, money_list, (countsA, countsB)
def alternate_many_trials(money, turns, trials):
'''averages trial number of alternating games '''
i = 0
orig_money = money
turns_list = np.linspace(0, turns, turns)
list_sum = np.zeros(turns)
countsA_avg = np.zeros(3)
countsB_avg = np.zeros(3)
while i < trials:
money = orig_money
money2, money_list, counts = alternate_games(money, turns)
countsA, countsB = counts
list_sum = np.add(list_sum, money_list)
#print(countsA)
countsA_avg = np.add(countsA, countsA_avg)
countsB_avg = np.add(countsB, countsB_avg)
#print(countsA_avg)
i += 1
countsA_avg = np.multiply((1/trials), countsA_avg)
countsB_avg = np.multiply((1/trials), countsB_avg)
ave_list = np.multiply((1/trials), list_sum)
print("A: ", end ="")
print(countsA_avg)
print("B: ", end ="")
print(countsB_avg)
plt.plot(turns_list, ave_list)
plt.xlabel('Coin Flips')
plt.ylabel('Money')
plt.title('Game C (AABBAB Pattern) Average Winnings Over 1000 Trials')
plt.show()
def __main__():
money = 100
print("Sanity Check, Starting with $100")
turns = 1000
trials = 1000
ga = GameA() #higher episolon to test losing game
gb = GameB()
money, x = ga.play(turns, money)
print("Played GameA {0} times, money at {1}. resetting money to $100".format(turns, money))
money = 100
#money, x = gb.play(turns, money)
print("Played GameB {0} times, money at {1}. resetting money to $100".format(turns, money))
money = 100
print("Alternating games for {0} turns each:".format(turns))
money, x, c = alternate_games(money, turns)
print("Final: ${0}".format(money))
money = 100
#ga.plot(turns, money, trials)
#gb.play(turns, money)
alternate_many_trials(money, turns, trials)
__main__()
| {"/Parrondo3.py": ["/GameA.py", "/GameB.py"]} |
47,286 | ewurman/LA2Project | refs/heads/master | /GameA.py | '''
I Lose with 1/2 - e chance,
I win with 1/2 + e chance
Winner wins $1
'''
import random
import numpy as np
import matplotlib.pyplot as plt
class GameA:
def __init__(self, epsilon = 0.005, chance = 0.5):
self.epsilon = epsilon
self.chance = chance
def play_once(self):
'''return True if I win'''
x = random.random()
if (self.chance + self.epsilon > x):
#we are in the 0-not us winning range
return True
else:
return False
def play(self, turns, money):
'''returns money after playing turns turns'''
i = 0
money_list = []
counts = [0,0,0]
while(i < turns):
counts[money%3] += 1
if self.play_once():
money += 1
else:
money -= 1
money_list.append(money)
i += 1
counts[0] = counts[0]/turns
counts[1] = counts[1]/turns
counts[2] = counts[2]/turns
return money, money_list
def plot(self, turns, money, trials):
'''plots the average return for the given number
of turns and trials'''
i = 0
turns_list = np.linspace(0, turns, turns)
list_sum = np.zeros(turns)
while i < trials:
money = money
money2, money_list = self.play(turns, money)
list_sum = np.add(list_sum, money_list)
i += 1
#print('Winning sums:', list_sum)
ave_list = np.multiply((1/trials), list_sum)
#print('Flips:', turns_list)
#print('Average winnings:', ave_list)
plt.plot(turns_list, ave_list)
plt.xlabel('Coin Flips')
plt.ylabel('Money')
plt.title('Game A Average Winnings Over 1000 Trials')
plt.show()
| {"/Parrondo3.py": ["/GameA.py", "/GameB.py"]} |
47,290 | klausfmh/pypeman-jsonrpc-adm | refs/heads/master | /vendor/jsonrpc_adm/server.py | from inspect import iscoroutinefunction
from aiohttp import web
from vendor.jsonrpc_adm.rpc_methods import RPCMethods
class RPCHandler:
def __init__(self, rpc_methods):
self.rpc_methods = rpc_methods
# identify all coroutine functions or callables
self.method_dict = methods = {}
for name in (v for v in dir(rpc_methods) if not v.startswith("_")):
entry = getattr(rpc_methods, name)
if iscoroutinefunction(entry):
methods[name] = entry, True
elif callable(entry):
methods[name] = entry, False
async def post(self, request):
data = await request.json()
print("DATA", dict(data))
version, method_name, id_, params = (data.get(key) for key in (
"jsonrpc", "method", "id", "params"))
assert version == "2.0"
print(method_name)
print(id_)
print(params)
args = params if isinstance(params, list) else []
kwargs = params if isinstance(params, dict) else {}
print("ARGS", args, "KWARGS", kwargs)
methods = self.method_dict
method_info = methods.get(method_name)
result = dict(
jsonrpc="2.0",
id=id_,
)
if method_info is None:
error = dict(
code=-32601,
message="unknown method %s" % method_name,
)
result["error"] = error
return web.json_response(result)
method, is_async = method_info
try:
if is_async:
rslt = await method(*args, **kwargs)
else:
rslt = method(*args, **kwargs)
except Exception:
raise
result["result"] = rslt
return web.json_response(result)
def server_app(rpc_methods=None, http_args=None):
if rpc_methods is None:
rpc_methods = RPCMethods()
http_args = http_args or {}
app = web.Application(**http_args)
rpc_handler = RPCHandler(rpc_methods)
app.add_routes([
web.get("/", rpc_handler.post),
web.post("/", rpc_handler.post),
])
return app
| {"/vendor/jsonrpc_adm/server.py": ["/vendor/jsonrpc_adm/rpc_methods.py"], "/vendor/jsonrpc_adm/plugin.py": ["/vendor/jsonrpc_adm/server.py", "/vendor/jsonrpc_adm/rpc_methods.py"], "/vendor/jsonrpc_adm/sample_srv.py": ["/vendor/jsonrpc_adm/server.py"]} |
47,291 | klausfmh/pypeman-jsonrpc-adm | refs/heads/master | /vendor/jsonrpc_adm/plugin.py | # import asyncio
import logging
# from inspect import getmembers
# from inspect import isfunction
# from ajsonrpc import __version__ as ajsonrpc_version
# from ajsonrpc.dispatcher import Dispatcher
# from ajsonrpc.manager import AsyncJSONRPCResponseManager
# from ajsonrpc.core import JSONRPC20Request
from pypeman.conf import settings
from pypeman.endpoints import SocketEndpoint
from pypeman.plugins.base import BasePlugin
from vendor.jsonrpc_adm.server import server_app
from vendor.jsonrpc_adm.rpc_methods import RPCMethods
logger = logging.getLogger(__name__)
DEFAULT_SETTINGS = dict(
sock="0.0.0.0:8899",
reuse_port=False,
verify=False,
)
class JsonRPCAdmin(BasePlugin):
"""
service providing a JSON RPC service to control pypeman
"""
def __init__(self):
super().__init__()
cfg = dict(DEFAULT_SETTINGS)
cfg.update(dict(settings.JSON_RPC_ADMIN_CFG))
logger.debug("CFG = %s", cfg)
self.sock = SocketEndpoint.normalize_socket(cfg["sock"])
self.reuse_port = cfg["reuse_port"]
self.http_args = cfg.get("http_args") or {}
self.ssl_context = self.http_args.pop("ssl_context", None)
self.verify = cfg["verify"]
self.app = None
def ready(self):
rpc_methods = RPCMethods()
self.app = server_app(
http_args=self.http_args,
rpc_methods=rpc_methods,
)
async def start(self):
logger.debug("do_start")
sock = SocketEndpoint.mk_socket(self.sock, self.reuse_port)
self.srv = await self.loop.create_server(
protocol_factory=self.app.make_handler(),
sock=sock,
ssl=self.ssl_context,
)
logger.debug("server created")
return self.srv
async def stop(self):
logger.debug("do_stop")
| {"/vendor/jsonrpc_adm/server.py": ["/vendor/jsonrpc_adm/rpc_methods.py"], "/vendor/jsonrpc_adm/plugin.py": ["/vendor/jsonrpc_adm/server.py", "/vendor/jsonrpc_adm/rpc_methods.py"], "/vendor/jsonrpc_adm/sample_srv.py": ["/vendor/jsonrpc_adm/server.py"]} |
47,292 | klausfmh/pypeman-jsonrpc-adm | refs/heads/master | /vendor/jsonrpc_adm/http_client.py | from functools import partial
import requests
class Client:
def __init__(self, url):
if "://" not in url:
url = "http://" + url
if not url.endswith("/"):
url += "/"
self.url = url
self.ses = None
self.cnt = 1
def connect(self):
self.ses = requests.Session()
def post(self, data):
"""
post data to url, add id if missing in data and
parse result
"""
if "id" not in data:
data = dict(data)
data["id"] = self.cnt
self.cnt += 1
rslt = self.ses.post(self.url, json=data)
assert rslt.status_code == 200
data = rslt.json()
return data
def call(self, method, *args, **kwargs):
"""
calls a json RPC method with args or kwargs
"""
assert not (args and kwargs)
if args:
params = args
elif kwargs:
params = kwargs
else:
params = []
data = dict(
jsonrpc="2.0",
method=method,
id=self.cnt,
params=params,
)
self.cnt += 1
rslt = self.post(data)
return rslt
def __getattr__(self, name):
return partial(self.call, name)
| {"/vendor/jsonrpc_adm/server.py": ["/vendor/jsonrpc_adm/rpc_methods.py"], "/vendor/jsonrpc_adm/plugin.py": ["/vendor/jsonrpc_adm/server.py", "/vendor/jsonrpc_adm/rpc_methods.py"], "/vendor/jsonrpc_adm/sample_srv.py": ["/vendor/jsonrpc_adm/server.py"]} |
47,293 | klausfmh/pypeman-jsonrpc-adm | refs/heads/master | /vendor/jsonrpc_adm/sample_srv.py | from aiohttp import web
from vendor.jsonrpc_adm.server import server_app
class RPCMethods:
def ping(self):
return "pong"
async def aping(self):
return "ponga"
if __name__ == "__main__":
methods = RPCMethods()
app = server_app(methods)
web.run_app(app)
| {"/vendor/jsonrpc_adm/server.py": ["/vendor/jsonrpc_adm/rpc_methods.py"], "/vendor/jsonrpc_adm/plugin.py": ["/vendor/jsonrpc_adm/server.py", "/vendor/jsonrpc_adm/rpc_methods.py"], "/vendor/jsonrpc_adm/sample_srv.py": ["/vendor/jsonrpc_adm/server.py"]} |
47,294 | klausfmh/pypeman-jsonrpc-adm | refs/heads/master | /vendor/jsonrpc_adm/rpc_methods.py | import importlib
from pypeman import channels
from pypeman import nodes
from pypeman.conf import settings
from pypeman.graph import mk_graph
class RPCMethods:
def ping(self):
"""
ping function (mostly for debug)
"""
return "pong"
async def pinga(self):
"""
asynchronous ping function (mostly for debug)
"""
return "ponga"
def graph(self, dot=False, json=False):
"""
returns pypeman graph
:param dot: if true text lines will be for a dot file
:param json: return graph as json object. NOT IMPLEMENTED SO FAR
"""
if json:
raise NotImplementedError("json graph not implemented so far")
lines = list(mk_graph(dot=dot))
return lines
def channels(self, name=None):
"""
return info about all channels
"""
chan_by_uuid = {}
to_uuid = {}
# TODO as code seems to force unique names, perhaps
# better make to_name and by_name ??
rslt = dict(
by_uuid=chan_by_uuid,
to_uuid=to_uuid,
)
for channel in channels.all_channels:
if name and name != channel.name:
continue
uuid=channel.uuid.hex
to_uuid[channel.name] = uuid
if hasattr(channel, "as_json"):
chan_by_uuid[uuid] = channel.as_json()
continue
parent = channel.parent
nodes = []
for node in channel._nodes:
nodes.append(node.name)
if parent:
parent = parent.name
as_dict = channel.to_dict()
data = dict(
name=channel.name,
uuid=channel.uuid.hex,
parent=parent,
parent_uids=channel.parent_uids,
status=as_dict["status"],
has_message_store=as_dict["has_message_store"],
processed_msgs=channel.processed_msgs,
nodes=nodes,
)
chan_by_uuid[uuid] = data
return rslt
def nodes(self, name=None):
"""
return info about all nodes
"""
by_name = {}
rslt = dict(
by_name=by_name,
)
for node in nodes.all_nodes:
name = node.name
if hasattr(node, "as_json"):
by_name[name] = node.as_json()
continue
data = dict(
name=name,
cls=node.__class__.__module__ + "." + node.__class__.__name__,
fullpath=node.fullpath(),
processed=node.processed,
)
by_name[name] = data
return rslt
def channel_info(self, name=None, uuid=None):
"""
gets detailed channel info
"""
return "NotImplemented"
async def channel_start(self, name=None, uuid=None):
"""
starts a given channel
"""
return "NotImplemented"
async def channel_stop(self, name=None, uuid=None):
"""
stops a given channel
"""
return "NotImplemented"
async def channel_process(self, name=None, uuid=None):
"""
let channel process a given message
"""
return "NotImplemented"
def node_info(self, name):
"""
get detailed info about a node
"""
return "NotImplemented"
async def node_process(self, name, msg=None):
return "NotImplemented"
async def node_inject(self, name, msg=None):
"""
inject message into a node and let it ripple through
"""
return "NotImplemented"
async def node_process(self, name, msg=None):
"""
let node process a given message
"""
return "NotImplemented"
async def clear_break(self, name=None, uuid=None):
"""
set breakpoint for a given node
"""
return "NotImplemented"
async def set_break(self, name):
"""
set breakpoint for a given node
"""
return "NotImplemented"
async def step(self):
"""
perform a single step
"""
return "NotImplemented"
async def continue_processing(self):
"""
continues exection till next break
"""
return "NotImplemented"
def settings(self):
"""
retrieve pypeman settings
"""
rslt = {}
for key in dir(settings):
if key[0] < "A" or key[0] > "Z":
continue
rslt[key] = getattr(settings, key)
return rslt
def patch(self, modulename="vendor.jsonrpc_adm.patches", *args, **kwargs):
"""
running patch code from a local file.
This allows to import / reimport code while pypeman is running.
Can be helpful for debugging, patching
"""
# TODO:
# - add http import
# -
if modulename:
mod = importlib.import_module(modulename)
importlib.reload(mod)
if hasattr(mod, "patch"):
rslt = mod.patch(*args, **kwargs)
else:
rslt = str(mod)
return rslt
def shutdown(self):
"""
shutdown pypeman
"""
return "NotImplemented"
def disable_debug(self):
"""
disable / shotdown debug interface
"""
return "NotImplemented"
| {"/vendor/jsonrpc_adm/server.py": ["/vendor/jsonrpc_adm/rpc_methods.py"], "/vendor/jsonrpc_adm/plugin.py": ["/vendor/jsonrpc_adm/server.py", "/vendor/jsonrpc_adm/rpc_methods.py"], "/vendor/jsonrpc_adm/sample_srv.py": ["/vendor/jsonrpc_adm/server.py"]} |
47,345 | towrig/marvelcoop | refs/heads/master | /tracker.py | import os
import time
import math
import tkinter.filedialog as filedialog
import schedule
import threading
import random
import exporter as exp
from tkinter import *
import numpy as np
from PIL import Image, ImageTk, ImageDraw
from marvelmind import MarvelmindHedge
# whole app requires: pyserial, crcmod, matplotlib, Pillow, numpy
class MainApp(Tk):
def __init__(self):
Tk.__init__(self)
self._frame = None
self.title("Coopi tracker")
self.switch_frame(ReplayFrame)
self.protocol("WM_DELETE_WINDOW", self.app_close_callback)
def switch_frame(self, frame_class):
new_frame = frame_class(self)
if self._frame is not None:
self._frame.destroy()
self._frame = new_frame
self._frame.pack()
if frame_class == ReplayFrame:
self._frame.canvas_widget.menubar(self)
def app_close_callback(self):
self._frame.onclose()
self.destroy()
class Heatmap(object):
def __init__(self, image, root, start_point, zoom):
self.image = image.copy().convert('RGBA')
size = self.image.size
matrix, edge_x, edge_y, start_point, map_max = root.generate_heatmap(60, 40, size[0], size[1], zoom, start_point)
self.heat_overlay = Image.new('RGBA', self.image.size, (255, 255, 255, 0))
self.draw = ImageDraw.Draw(self.heat_overlay)
for i in range(len(matrix)):
for j in range(len(matrix[i])):
val = int((matrix[i][j] / map_max) * 255)
x1 = edge_x * j
x2 = x1 + edge_x
y1 = edge_y * i
y2 = y1 + edge_y
self.draw.rectangle([x1, y1, x2, y2], fill=(val, 255 - val, 255 - val, 128))
self.comp_img = Image.alpha_composite(self.image, self.heat_overlay)
self.save_heatmap()
def save_heatmap(self):
self.comp_img.save("vad helvete.png")
class BeaconPath(object):
def __init__(self, id, mat):
self.start_point = (mat[0][0], mat[0][1])
self.id = id
self.color = "red"
self.points = mat[:] # unique timestamps
# Create temporary list to later use for building the numpy array
temp_l = []
for i in range(len(mat) - 1):
vector_x = mat[i + 1][0] - mat[i][0]
vector_y = mat[i + 1][1] - mat[i][1]
temp_l.append([vector_x, vector_y, mat[i + 1][4]])
self.vectors = np.asarray(temp_l)
def captured_time(self):
return self.points[0][4], self.points[-1][4]
class ReplayFrame(Frame):
def __init__(self, root):
# Init objects
Frame.__init__(self, root)
self.beacons_active = {}
self.beacon_paths = {}
zip_file = filedialog.askopenfilename(initialdir="./", title='Choose a file',
filetypes=(("zip files", "*.zip"), ("all files", "*.*")))
data = exp.import_file(zip_file)
beacons = data["beacons"]
self.canvas_widget = CanvasWidget(self, 0, 1, data["img"])
self.start_date = data["date"]
self.time = self.start_date.split(" ")[1].split(".")
# generate BeaconPaths
for key in beacons:
self.beacon_paths[key] = BeaconPath(key, beacons[key])
# Status box in the corner
self.status_box = Frame(self)
self.status_box.grid(row=0, column=0, sticky=W) # adding a status box to corner of the screen
self.status_text = StringVar()
self.zoom_text = StringVar()
self.status_label = Label(self.status_box, textvariable=self.status_text, fg="#228C22", bg="#a1a1a1", width=16,
height=2)
self.zoom_label = Label(self.status_box, textvariable=self.zoom_text, fg="black", bg="#a1a1a1", width=16,
height=2)
self.status_label.grid(row=0, column=0, sticky="NSEW")
self.zoom_label.grid(row=1, column=0, sticky="NSEW")
self.zoom_text.set("Zoom: 1.0")
self.status_text.set("IDLE")
# Setup the scale
self.start_time = 999999999
self.end_time = 0
for key in self.beacon_paths:
start, end = self.beacon_paths[key].captured_time()
if start < self.start_time:
self.start_time = start
if end > self.end_time:
self.end_time = end
self.scale_container = Frame(self, padx=100)
self.scale_text = StringVar()
self.scale_text.set("Current time:" + self.start_date)
Label(self.scale_container, textvariable=self.scale_text).grid(row=0, column=0, sticky='ns',)
self.scale = Scale(self.scale_container, from_=self.start_time, showvalue=0, length=400, to=self.end_time, orient=HORIZONTAL,
command=self.update_canvas)
self.scale_container.grid(row=4, column=1, columnspan=3, sticky=W)
self.scale.grid(row=0, column=1, sticky='ns')
# Setup Checkboxes for enabling beacons
j = 0
self.beacons_container = Frame(self, borderwidth=1)
for beacon in self.beacon_paths:
self.beacons_active[beacon] = IntVar()
Checkbutton(self.beacons_container, text="Hedgehog " + beacon, variable=self.beacons_active[beacon]).grid(
row=j, sticky=W)
j += 1
self.beacons_container.grid(row=1, column=0, sticky=W)
# Setup buttons for rotating the origin.
self.controls_container = CanvasControls(self, self.canvas_widget)
self.controls_container.grid(row=2, column=0, sticky=W)
# First draw
self.update_canvas()
def update_canvas(self, scale_value=0):
self.canvas_widget.clear()
self.canvas_widget.draw_origin()
scale_value = self.scale.get()
for b_name in self.beacons_active:
if self.beacons_active[b_name].get() == 0: # check if beacon is set visible
continue
point_count = 0
for i in range(self.beacon_paths[b_name].vectors.shape[0] - 1):
time = self.beacon_paths[b_name].vectors[i][2]
if time > int(scale_value):
point_count = i + 1
break
if i + 1 == self.beacon_paths[b_name].vectors.shape[0] - 1:
point_count = i
vectors = self.beacon_paths[b_name].vectors[:point_count]
self.canvas_widget.draw_lines(self.beacon_paths[b_name].start_point, vectors)
diff = scale_value - self.start_time + int(self.time[0])*60*60*1000 + int(self.time[1])*60*1000 + int(self.time[2])*1000
seconds = math.floor((diff / 1000) % 60)
minutes = math.floor((diff / (1000 * 60)) % 60)
hours = math.floor((diff / (1000 * 60 * 60)) % 24)
hours = "0" + str(hours) if (hours < 10) else hours
minutes = "0" + str(minutes) if (minutes < 10) else minutes
seconds = "0" + str(seconds) if (seconds < 10) else seconds
text = self.start_date.split(" ")[0]+" "+str(hours)+"."+str(minutes)+"."+str(seconds)
self.scale_text.set("Current time: " + text)
def generate_heatmap(self, seg_x=10, seg_y=10, width=100, height=100, zoom=1, start_point=None):
edge_x = width / seg_x
edge_y = height / seg_y
if start_point is None:
start_point = [width / 2, height / 2]
max = 1.0
start_x = width / 2 + start_point[0]
start_y = height / 2 + start_point[1]
matrix = []
for y in range(seg_y):
matrix.append([])
for x in range(seg_x):
appended_val = 0.0
seg_start_x = x * edge_x
seg_start_y = y * edge_y
for b in self.beacon_paths:
positions = self.beacon_paths[b].points
for p in positions:
p_x = start_x - p[0] * zoom
p_y = height - start_y + p[1] * zoom
if seg_start_x <= p_x <= seg_start_x + edge_x and seg_start_y <= p_y <= seg_start_y + edge_y:
appended_val += 1.0
if appended_val > max:
max = appended_val
matrix[y].append(appended_val)
return [matrix, edge_x, edge_y, start_point, max]
def onclose(self):
self.destroy()
class CanvasControls(Frame):
def __init__(self, root, canvas_widget):
Frame.__init__(self, root)
self.canvas_widget = canvas_widget
Label(self, text="Origin rotation:").grid(row=0, columnspan=4)
self.b_n = Button(self, text="N", command=lambda: self.buttonpress("N"), disabledforeground="red",
state="disabled")
self.b_n.grid(row=1, column=0)
self.b_s = Button(self, text="S", command=lambda: self.buttonpress("S"), disabledforeground="red")
self.b_s.grid(row=1, column=1)
self.b_e = Button(self, text="E", command=lambda: self.buttonpress("E"), disabledforeground="red",
state="disabled")
self.b_e.grid(row=1, column=2)
self.b_w = Button(self, text="W", command=lambda: self.buttonpress("W"), disabledforeground="red")
self.b_w.grid(row=1, column=3)
self.b_l = Button(self, text="L", command=lambda: self.buttonpress("L"), disabledforeground="red",
state="disabled")
self.b_l.grid(row=2, column=1)
self.b_r = Button(self, text="R", command=lambda: self.buttonpress("R"), disabledforeground="red")
self.b_r.grid(row=2, column=2)
def buttonpress(self, letter):
self.canvas_widget.set_origin_rotation(letter)
if letter == "N":
self.b_n["state"] = DISABLED
self.b_s["state"] = NORMAL
elif letter == "S":
self.b_s["state"] = DISABLED
self.b_n["state"] = NORMAL
elif letter == "E":
self.b_e["state"] = DISABLED
self.b_w["state"] = NORMAL
elif letter == "W":
self.b_w["state"] = DISABLED
self.b_e["state"] = NORMAL
elif letter == "L":
self.b_l["state"] = DISABLED
self.b_r["state"] = NORMAL
elif letter == "R":
self.b_r["state"] = DISABLED
self.b_l["state"] = NORMAL
class CanvasWidget:
def __init__(self, root, row=0, column=0, img=None, heatmapped=False):
self.root = root
self.canvas_container = Frame(root)
self.canvas_container.grid(row=row, column=column, columnspan=3, rowspan=3, sticky='nsew')
self.canvas = Canvas(self.canvas_container, width=800, height=600, bg="#8c8c8c")
self.canvas.grid(row=0, column=0, columnspan=3, rowspan=3, sticky='nsew')
self.heatmapper = None
self.drawn_hedgehogs = {}
# reference points for scaling
self.zero = self.canvas.create_text(0, 0, anchor='center', text='0')
self.cs_x = 0
self.cs_y = 0
self.origin_id = self.canvas.create_oval(self.cs_x - 5, self.cs_y - 5, self.cs_x + 5, self.cs_y + 5,
fill="yellow")
# mouse-events related
self.PLACING_BEACON = False
self.PANNING = False
self.CALIBRATING = False
self.calibration_start = None
self.calibration_line = None
self.origin_rotation = ["N", "E", "L"]
self.zoom = 10
# parsing floor-plan
if img is None:
self.floor_plan = ImageTk.PhotoImage(file='test_image.png')
self.floor_plan_image = Image.open('test_image.png')
else:
self.floor_plan_image = Image.open(img)
self.floor_plan = ImageTk.PhotoImage(self.floor_plan_image)
self.floor_plan_scale = 1.0
self.draw_floor_plan()
# bind mouse-events
self.canvas.bind('<Button-1>', self.handle_mouse_click)
self.canvas.bind('<MouseWheel>', self.handle_mouse_wheel)
self.canvas.bind('<B1-Motion>', self.handle_mouse_move)
self.canvas.bind('<ButtonRelease-1>', self.handle_mouse_raise)
# cleared elements every re-draw
self.clear_ids = []
self.heatmap_img = None
def refresh(self):
self.root.update_canvas()
def clear(self):
for i in self.clear_ids:
self.canvas.delete(i)
self.clear_ids = []
def get_zero_reference(self):
return self.canvas.coords(self.zero)
def set_origin_rotation(self, val):
if val in ("N", "S"):
self.origin_rotation[0] = val
elif val in ("E", "W"):
self.origin_rotation[1] = val
elif val in ("L", "R"):
self.origin_rotation[2] = val
def handle_mouse_wheel(self, event):
x = self.canvas.canvasx(event.x)
y = self.canvas.canvasy(event.y)
scale = 1.0
if event.delta == -120: # scroll down
scale *= 0.8
self.floor_plan_scale *= 0.8
if event.delta == 120: # scroll up
scale /= 0.8
self.floor_plan_scale /= 0.8
self.root.zoom_text.set("Zoom: " + str(round(self.floor_plan_scale, 3)))
self.canvas.scale('all', x, y, scale, scale)
self.rescale_image()
self.refresh()
def handle_mouse_click(self, event):
if self.PLACING_BEACON:
self.root.status_text.set("IDLE")
self.place_beacon(event)
self.PLACING_BEACON = False
elif self.CALIBRATING:
if self.calibration_start is None:
x = self.canvas.canvasx(event.x)
y = self.canvas.canvasy(event.y)
self.calibration_start = [x, y]
else:
self.canvas.scan_mark(event.x, event.y)
self.PANNING = True
def handle_mouse_move(self, event):
if self.PANNING:
self.canvas.scan_dragto(event.x, event.y, gain=1)
self.canvas.scan_mark(event.x, event.y)
if self.CALIBRATING and self.calibration_start is not None:
if self.calibration_line is not None:
self.canvas.delete(self.calibration_line)
x = self.canvas.canvasx(event.x)
y = self.canvas.canvasy(event.y)
self.calibration_line = self.canvas.create_line(self.calibration_start[0], self.calibration_start[1], x, y,
width=3, fill="orange")
def calibrate(self):
self.CALIBRATING = True
self.root.status_text.set("CALIBRATING...")
def handle_mouse_raise(self, event):
if self.PANNING:
self.canvas.scan_dragto(event.x, event.y, gain=1)
self.PANNING = False
if self.CALIBRATING:
self.CALIBRATING = False
self.root.status_text.set("IDLE")
x1 = self.calibration_start[0]
y1 = self.calibration_start[1]
x2 = self.canvas.canvasx(event.x)
y2 = self.canvas.canvasy(event.y)
self.zoom = int(math.sqrt(math.pow((x2 - x1), 2) + math.pow((y2 - y1), 2)))
self.canvas.delete(self.calibration_line)
self.refresh()
def paint_heatmap(self):
self.clear()
self.heatmapper = Heatmap(self.floor_plan_image, self.root, [self.cs_x, self.cs_y], self.zoom)
print(self.heatmapper.comp_img)
self.heatmap_img = ImageTk.PhotoImage(self.heatmapper.comp_img)
mid = self.canvas.create_image(0, 0, image=self.heatmap_img)
self.canvas.lift(mid)
def place_beacon(self, event):
self.cs_x = self.canvas.canvasx(event.x)
self.cs_y = self.canvas.canvasy(event.y)
self.canvas.coords(self.origin_id, self.cs_x - 5, self.cs_y - 5, self.cs_x + 5, self.cs_y + 5)
def add_beacon(self):
self.PLACING_BEACON = True
self.root.status_text.set("PLACING ORIGIN")
def add_floor_plan(self):
img_file = filedialog.askopenfilename(initialdir="./", title='Choose a file',
filetypes=(("image files", "*.jpg;*.png"), ("all files", "*.*")))
if img_file and os.path.exists(img_file) and os.path.isfile(img_file):
self.floor_plan = ImageTk.PhotoImage(file=img_file)
self.floor_plan_image = Image.open(img_file)
self.floor_plan_scale = 1.0
self.refresh()
def rescale_image(self):
width, height = self.floor_plan_image.size
new_size = int(self.floor_plan_scale * width), int(self.floor_plan_scale * height)
self.floor_plan = ImageTk.PhotoImage(self.floor_plan_image.resize(new_size))
if self.heatmapper is not None:
self.heatmap_img = ImageTk.PhotoImage(self.heatmapper.comp_img.resize(new_size))
self.draw_floor_plan()
def draw_floor_plan(self):
img_id = self.canvas.create_image(self.get_zero_reference(), image=self.floor_plan)
if self.heatmapper is not None:
hid = self.canvas.create_image(self.get_zero_reference(), image=self.heatmap_img)
self.canvas.lift(hid)
self.canvas.lower(img_id)
def draw_origin(self):
og = self.canvas.coords(self.origin_id)
self.cs_x = (og[0] + og[2]) / 2
self.cs_y = (og[1] + og[3]) / 2
self.canvas.coords(self.origin_id, self.cs_x - 5, self.cs_y - 5, self.cs_x + 5, self.cs_y + 5)
def draw_lines(self, start, vectors):
r = 3
x1 = self.cs_x + float(start[0])
y1 = self.cs_y + float(start[1])
i = self.canvas.create_oval(x1 - r, y1 - r, x1 + r, y1 + r, fill="red")
self.clear_ids.append(i)
for vector in vectors:
vector = np.multiply(vector, self.zoom * self.floor_plan_scale)
# x2 = self.calc_x(x1, vector[0])
# y2 = self.calc_y(y1, vector[1])
x2, y2 = self.calc_xy(x1, y1, vector)
i = self.canvas.create_line(x1, y1, x2, y2, fill="red", width=2)
j = self.canvas.create_oval(x2 - r, y2 - r, x2 + r, y2 + r, fill="red")
self.clear_ids.append(i)
self.clear_ids.append(j)
x1 = x2
y1 = y2
def draw_hedgehog(self, arr):
self.clear()
r = 5
id = arr[0]
x = arr[1] * 10
y = arr[2] * 10
if id not in self.drawn_hedgehogs:
rand = lambda: random.randint(0, 255)
self.drawn_hedgehogs[id] = '#{:02x}{:02x}{:02x}'.format(rand(), rand(), rand())
c_id = self.canvas.create_rectangle(x - r, y - r, x + r, y + r, fill=self.drawn_hedgehogs[id])
self.clear_ids.append(c_id)
def calc_xy(self, x, y, vector):
pair = [0, 0]
if self.origin_rotation[2] == "R":
c = vector[0]
vector[0] = vector[1]
vector[1] = c
if self.origin_rotation[1] == "E":
pair[0] = x + float(vector[0])
else:
pair[0] = x - float(vector[0])
if self.origin_rotation[0] == "N":
pair[1] = y - float(vector[1])
else:
pair[1] = y + float(vector[1])
return pair
def menubar(self, root):
menu = Menu(root)
root.config(menu=menu)
editMenu = Menu(menu)
editMenu.add_command(label="Add room plan", command=self.add_floor_plan)
editMenu.add_command(label="Place origin beacon", command=self.add_beacon)
editMenu.add_command(label="Calibrate", command=self.calibrate)
menu.add_cascade(label="Edit", menu=editMenu)
menu.add_command(label="Generate heatmap", command=self.paint_heatmap)
menu.add_command(label="LIVE TRACKING", command=lambda: self.root.switch_frame(TrackingFrame))
class TrackingFrame(Frame):
def __init__(self, root):
Frame.__init__(self, root)
self.root = root
self.canvas = CanvasWidget(self)
self.controls_container = CanvasControls(self, self.canvas)
self.controls_container.grid(row=2, column=0, sticky=W)
self.hedge = MarvelmindHedge(tty="\\.\COM3", adr=None, debug=False) # create MarvelmindHedge thread
self.start_comms()
self.ram_log = []
@staticmethod
def valid_coords(coords):
if coords[0] == 0 and coords[1] == 0 and coords[2] == 0 and coords[3] == 0:
return False
return True
def start_comms(self, i=1):
self.hedge.start() # start marvelmind thread
schedule.every(1).seconds.do(self.communicate)
cease = threading.Event()
class ScheduleThread(threading.Thread):
@classmethod
def run(cls):
while not cease.is_set():
schedule.run_pending()
time.sleep(i)
continuous_thread = ScheduleThread()
continuous_thread.start()
return cease
def communicate(self):
coords = self.hedge.position()
if self.valid_coords(coords):
self.parse_hedgehogs(list(self.hedge.valuesUltrasoundPosition))
else:
print("Modem not connected!")
def parse_hedgehogs(self, raw_data):
parsed_hedgehogs = []
for arr in raw_data:
id = arr[0]
if id not in parsed_hedgehogs:
parsed_hedgehogs.append(id)
self.ram_log.append(arr)
self.canvas.draw_hedgehog(arr)
def onclose(self):
self.hedge.stop()
self.destroy()
MainApp().mainloop()
| {"/tracker.py": ["/exporter.py"]} |
47,346 | towrig/marvelcoop | refs/heads/master | /exporter.py | from zipfile import ZipFile
from datetime import date
import io
from PIL import Image, ImageTk
def import_file(filename):
d = {}
with ZipFile(filename, 'r') as zip:
floor_plan_name = ""
with io.TextIOWrapper(zip.open("meta.txt"), encoding="utf-8") as f:
d["date"] = f.readline()
floor_plan_name = f.readline()
d["img"] = io.BytesIO(zip.read(floor_plan_name))
with io.TextIOWrapper(zip.open("positions.txt"), encoding="utf-8") as f:
beacons = {}
prev_parts = None
for line in f.readlines():
parts = line.strip().replace(' ', '')[1:-1].split(',')
for i in range(len(parts[1:-1])):
parts[i + 1] = float(parts[i + 1])
parts[-1] = int(parts[-1])
if parts == prev_parts:
continue
if parts[0] not in beacons.keys():
beacons[parts[0]] = []
beacons[parts[0]].append(parts[1:])
prev_parts = parts[:]
d["beacons"] = beacons
return d
def export_file():
with ZipFile('exported_'+str(date.today())+'.zip', 'w') as zip:
zip.write(file) | {"/tracker.py": ["/exporter.py"]} |
47,349 | Dhakshan/webm_converter | refs/heads/main | /controller.py | """
Author: Konstantin (k0nze) Lübeck
License: BSD 3-Clause License
Copyright (c) 2021 Konstantin (k0nze) Lübeck
"""
try:
import Tkinter as Tk
except ModuleNotFoundError:
import tkinter as Tk
from pathlib import Path
from consts import *
from view import View
from model import Model
class Controller:
def __init__(self):
user_dir = Path.home()
data_path = Path.joinpath(user_dir, '.' + FILE_NAME)
self.model = Model(data_path)
self.root = Tk.Tk()
self.root.iconbitmap('./images/logo_120x120.ico')
self.view = View(self.model, self)
def run(self):
self.root.title(NAME)
self.root.deiconify()
self.root.mainloop()
def quit(self):
self.root.quit()
| {"/controller.py": ["/consts.py", "/view.py", "/model.py"], "/main_window.py": ["/consts.py", "/about_dialog.py", "/included_software_dialog.py"], "/webm_converter.py": ["/controller.py"], "/view.py": ["/main_window.py"], "/model.py": ["/consts.py"], "/about_dialog.py": ["/consts.py"]} |
47,350 | Dhakshan/webm_converter | refs/heads/main | /consts.py | """
Author: Konstantin (k0nze) Lübeck
License: BSD 3-Clause License
Copyright (c) 2021 Konstantin (k0nze) Lübeck
"""
import os
from pathlib import Path
VERSION = "v0.3"
NAME = "WebM Converter"
FILE_NAME = "webm_converter"
IMAGES_DIR = str(os.path.dirname(os.path.realpath(__file__))) + "/images" | {"/controller.py": ["/consts.py", "/view.py", "/model.py"], "/main_window.py": ["/consts.py", "/about_dialog.py", "/included_software_dialog.py"], "/webm_converter.py": ["/controller.py"], "/view.py": ["/main_window.py"], "/model.py": ["/consts.py"], "/about_dialog.py": ["/consts.py"]} |
47,351 | Dhakshan/webm_converter | refs/heads/main | /main_window.py | """
Author: Konstantin (k0nze) Lübeck
License: BSD 3-Clause License
Copyright (c) 2021 Konstantin (k0nze) Lübeck
"""
try:
import Tkinter as Tk
from tkinter import ttk
from Tkinter import messagebox
from Tkinter.filedialog import askopenfilename, askdirectory
import Tkinter.scrolledtext as st
except ModuleNotFoundError:
import tkinter as Tk
from tkinter import ttk
from ttkthemes import ThemedStyle
from tkinter import messagebox
from tkinter.filedialog import askopenfilename, askdirectory
import tkinter.scrolledtext as st
from tkinter.constants import DISABLED
from tkinter.font import NORMAL
from consts import *
from about_dialog import AboutDialog
from included_software_dialog import IncludedSoftwareDialog
import copy
class MainWindow(ttk.Frame):
def __init__(self, model, root):
self.model = model
self.model.register_observer(self)
self.root = root
root.tk.call('source', 'azure.tcl')
ttk.Style().theme_use('azure')
#style = ThemedStyle(root)
#style.set_theme('black')
self.input_file_set = False
self.output_directory_set = False
self.root.minsize(400, 300)
ttk.Frame.__init__(self, self.root)
self.pack(fill="both", expand=True)
self.menubar = Tk.Menu(self.master)
self.master.config(menu=self.menubar)
# file menu
file_menu = Tk.Menu(self.menubar)
file_menu.add_command(label="About", command=self.on_about)
file_menu.add_command(label="Included Software", command=self.on_included_software)
file_menu.add_separator()
file_menu.add_command(label="Quit", command=self.root.quit)
self.menubar.add_cascade(label="File", menu=file_menu)
self.rowconfigure(0, weight=0)
self.rowconfigure(1, weight=1)
self.rowconfigure(2, weight=0)
self.columnconfigure(0, weight=1)
input_output_file_frame = ttk.Frame(self)
input_output_file_frame.grid(sticky=Tk.N+Tk.E+Tk.S+Tk.W, row=0, column=0, columnspan=3, padx=5, pady=5)
input_output_file_frame.columnconfigure(1, weight=1)
# input file label
input_file_path_entry_label = ttk.Label(input_output_file_frame, text="Input File:")
input_file_path_entry_label.grid(sticky=Tk.W, row=0, column=0, padx=10, pady=10)
self.input_file_path_var = Tk.StringVar()
self.input_file_path_var.set("No File Selected")
input_file_path_entry = ttk.Entry(input_output_file_frame, textvariable=self.input_file_path_var, state='readonly', justify=Tk.LEFT).grid(sticky=Tk.E+Tk.W, row=0, column=1, padx=10)
# input file selection button
input_file_selection_button = ttk.Button(input_output_file_frame, text="Open Input File", command=self.on_select_input_file).grid(sticky=Tk.W+Tk.E, row=0, column=2, padx=10, pady=10)
# input file label
output_directory_path_entry_label = ttk.Label(input_output_file_frame, text="Output Directory:")
output_directory_path_entry_label.grid(sticky=Tk.W, row=1, column=0, padx=10)
self.output_directory_path_var = Tk.StringVar()
self.output_directory_path_var.set("No Directory Selected")
output_directory_path_entry = ttk.Entry(input_output_file_frame, textvariable=self.output_directory_path_var, state='readonly', justify=Tk.LEFT).grid(sticky=Tk.E+Tk.W, row=1, column=1, padx=10)
# input file selection button
output_direcotry_selection_button = ttk.Button(input_output_file_frame, text="Select", command=self.on_select_output_directory).grid(sticky=Tk.W+Tk.E, row=1, column=2, padx=10)
# ffmpeg log
ffmpeg_log_frame = ttk.LabelFrame(self, text="FFmpeg Log")
ffmpeg_log_frame.grid(sticky=Tk.N+Tk.E+Tk.S+Tk.W, row=1, column=0, columnspan=3, padx=5, pady=5)
ffmpeg_log_frame.rowconfigure(0, weight=1)
ffmpeg_log_frame.columnconfigure(0, weight=1)
self.ffmpeg_log_text = st.ScrolledText(ffmpeg_log_frame)
self.ffmpeg_log_text.grid(sticky=Tk.N+Tk.E+Tk.S+Tk.W, row=0, column=0, padx=5, pady=5)
self.ffmpeg_log_text.config(state=DISABLED)
# quit and convert button
quit_and_convert_frame = ttk.Frame(self)
quit_and_convert_frame.grid(sticky=Tk.W+Tk.E, row=2, column=0, padx=10, pady=10)
quit_and_convert_frame.columnconfigure(0, weight=1)
self.conversion_status_label_var = Tk.StringVar()
self.conversion_status_label_var.set("")
conversion_status_label = ttk.Label(quit_and_convert_frame, textvariable=self.conversion_status_label_var).grid(sticky=Tk.W, row=0, column=0)
quit_button = ttk.Button(quit_and_convert_frame, text="Quit", command=self.root.quit).grid(sticky=Tk.E, row=0, column=1, padx=5)
convert_button = ttk.Button(quit_and_convert_frame, text="Convert", command=self.on_convert).grid(sticky=Tk.E, row=0, column=2)
def on_select_input_file(self):
user_dir = Path.home()
input_file_path_string = askopenfilename(title='Open a video file', initialdir=user_dir)
if input_file_path_string:
self.input_file_set = True
self.input_file_path_var.set(input_file_path_string)
def on_select_output_directory(self):
user_dir = Path.home()
output_directory_path_string = askdirectory(title='Select output directory', initialdir=user_dir)
if output_directory_path_string:
self.output_directory_set = True
self.output_directory_path_var.set(output_directory_path_string)
def on_convert(self):
# check if conversion is running
if not self.model.conversion_finished:
messagebox.showerror("Error", "Conversion is running at the moment.")
return
# check if input file is set
if not self.input_file_set:
messagebox.showerror("Error", "No input file selected.")
return
# check if output directory is set
if not self.output_directory_set:
messagebox.showerror("Error", "No output directory selected.")
return
# check input file is readable
if not os.access(self.input_file_path_var.get(), os.R_OK):
messagebox.showerror("Error", "Input file can not be read.")
return
# check if output directory is writable
if not os.access(self.output_directory_path_var.get(), os.W_OK):
messagebox.showerror("Error", "Output directory can not be written to.")
return
# check if output file already exists
output_file_path_string = self.model.get_output_file_path(self.input_file_path_var.get(), self.output_directory_path_var.get())
if os.path.isfile(output_file_path_string):
overwrite = messagebox.askyesno("Warning", "Output file already exists.\nDo you want to overwrite it?")
if overwrite:
os.remove(output_file_path_string)
if not overwrite:
return
self.clear_ffmpeg_log()
self.model.convert_to_webm(self.input_file_path_var.get(), output_file_path_string, self.append_ffmpeg_log, self)
def clear_ffmpeg_log(self):
self.ffmpeg_log_text.config(state=NORMAL)
self.ffmpeg_log_text.delete('1.0', Tk.END)
self.ffmpeg_log_text.config(state=DISABLED)
def append_ffmpeg_log(self, string):
self.ffmpeg_log_text.config(state=NORMAL)
self.ffmpeg_log_text.insert(Tk.END, string)
self.ffmpeg_log_text.see(Tk.END)
self.ffmpeg_log_text.config(state=DISABLED)
def conversion_started(self):
self.conversion_status_label_var.set("Conversion in progress ...")
def conversion_finished(self):
self.conversion_status_label_var.set("Conversion finished")
messagebox.showinfo("Info", "Conversion finished successfully.")
def conversion_failed(self):
self.conversion_status_label_var.set("Conversion finished")
messagebox.showerror("Error", "Conversion failed.")
def notify(self):
pass
def on_about(self):
about_dialog = AboutDialog(self.root)
# make window modal
about_dialog.wait_visibility()
about_dialog.focus_set()
about_dialog.grab_set()
about_dialog.transient(self.root)
def on_included_software(self):
included_software_dialog = IncludedSoftwareDialog(self.root)
# make window modal
included_software_dialog.wait_visibility()
included_software_dialog.focus_set()
included_software_dialog.grab_set()
included_software_dialog.transient(self.root)
| {"/controller.py": ["/consts.py", "/view.py", "/model.py"], "/main_window.py": ["/consts.py", "/about_dialog.py", "/included_software_dialog.py"], "/webm_converter.py": ["/controller.py"], "/view.py": ["/main_window.py"], "/model.py": ["/consts.py"], "/about_dialog.py": ["/consts.py"]} |
47,352 | Dhakshan/webm_converter | refs/heads/main | /included_software_dialog.py | """
Author: Konstantin (k0nze) Lübeck
License: BSD 3-Clause License
Copyright (c) 2021 Konstantin (k0nze) Lübeck
"""
try:
import Tkinter as Tk
from Tkinter import ttk
except ModuleNotFoundError:
import tkinter as Tk
from tkinter import ttk
class IncludedSoftwareDialog(Tk.Toplevel):
def __init__(self, master):
Tk.Toplevel.__init__(self, master)
self.minsize(190, 100)
self.resizable(False, False)
self.title("Included Software")
wrapper_frame = ttk.Frame(self)
# Version
python_label = ttk.Label(wrapper_frame, text="Python 3.9.6 - PSF License")
python_label.grid(row=0, column=0, columnspan=2)
ffmpeg_label = ttk.Label(wrapper_frame, text="FFmpeg n4.4 - GPLv3 License")
ffmpeg_label.grid(row=1, column=0, columnspan=2)
pillow_label = ttk.Label(wrapper_frame, text="Pillow 8.3.1 - HPND License")
pillow_label.grid(row=2, column=0, columnspan=2)
azure_ttk_theme = ttk.Label(wrapper_frame, text="Azure TTK Theme 1.4.1 - LGPL v2.1")
azure_ttk_theme.grid(row=3, column=0, columnspan=2)
# Close button
close_button = ttk.Button(wrapper_frame, text="Close", command=self.on_close).grid(row=4, column=0, columnspan=2, pady=10)
wrapper_frame.grid(row=0, column=0, padx=10)
#self.update()
#print(self.winfo_height())
#print(self.winfo_width())
def on_close(self):
self.destroy()
| {"/controller.py": ["/consts.py", "/view.py", "/model.py"], "/main_window.py": ["/consts.py", "/about_dialog.py", "/included_software_dialog.py"], "/webm_converter.py": ["/controller.py"], "/view.py": ["/main_window.py"], "/model.py": ["/consts.py"], "/about_dialog.py": ["/consts.py"]} |
47,353 | Dhakshan/webm_converter | refs/heads/main | /webm_converter.py | """
Author: Konstantin (k0nze) Lübeck
License: BSD 3-Clause License
Copyright (c) 2021 Konstantin (k0nze) Lübeck
"""
from controller import Controller
if __name__ == '__main__':
c = Controller()
c.run()
| {"/controller.py": ["/consts.py", "/view.py", "/model.py"], "/main_window.py": ["/consts.py", "/about_dialog.py", "/included_software_dialog.py"], "/webm_converter.py": ["/controller.py"], "/view.py": ["/main_window.py"], "/model.py": ["/consts.py"], "/about_dialog.py": ["/consts.py"]} |
47,354 | Dhakshan/webm_converter | refs/heads/main | /view.py | """
Author: Konstantin (k0nze) Lübeck
Copyright (c) 2020 Konstantin (k0nze) Lübeck
"""
try:
import Tkinter as Tk
from Tkinter import ttk
except ModuleNotFoundError:
import tkinter as Tk
from tkinter import ttk
from main_window import MainWindow
class View:
def __init__(self, model, controller):
self.controller = controller
self.model = model
self.main_window = MainWindow(self.model, self.controller.root)
| {"/controller.py": ["/consts.py", "/view.py", "/model.py"], "/main_window.py": ["/consts.py", "/about_dialog.py", "/included_software_dialog.py"], "/webm_converter.py": ["/controller.py"], "/view.py": ["/main_window.py"], "/model.py": ["/consts.py"], "/about_dialog.py": ["/consts.py"]} |
47,355 | Dhakshan/webm_converter | refs/heads/main | /model.py | """
Author: Konstantin (k0nze) Lübeck
License: BSD 3-Clause License
Copyright (c) 2021 Konstantin (k0nze) Lübeck
"""
import json
import os
import subprocess
import threading
import time
import time
import shlex
from consts import *
from pathlib import Path
from subprocess import check_output
class JsonFileCreateException(Exception):
""" raised when json model could not be created """
pass
class JsonFileOpenException(Exception):
""" raised when json model could not be opened """
pass
class JsonFileWriteException(Exception):
""" raised when json model could not be written """
pass
class DirCreationException(Exception):
""" raised when """
pass
class CopyFileException(Exception):
""" raised when """
pass
class Model():
def __init__(self, data_path):
self.data_path = data_path
# check if ~/.FILE_NAME exists
if not self.data_path.is_dir():
# try to create ~/.FILE_NAME dir
try:
os.mkdir(self.data_path)
except Exception as e:
raise DirCreationException
self.conversion_finished = True
# check if data.json exists
self.json_path = Path.joinpath(self.data_path, 'data.json')
self.data = dict()
if not self.json_path.is_file():
# try to create the json model
try:
with open(self.json_path.resolve(), 'w') as json_file:
self.data = { "template": "template" }
json.dump(self.data, json_file, sort_keys=True, indent=4)
except Exception as e:
raise JsonFileCreateException
# read config
else:
with open(self.json_path.resolve(), 'r') as json_file:
self.data = json.load(json_file)
self.observers = []
def register_observer(self, observer):
self.observers.append(observer)
def __notify_observers(self):
for observer in self.observers:
observer.notify()
def __save_json(self):
try:
with open(self.json_path.resolve(), 'w') as json_file:
json.dump(self.data, json_file, sort_keys=True, indent=4)
except Exception as e:
raise JsonFileWriteException
def get_output_file_path(self, input_file_path_string, output_file_directory_string):
input_file_name = os.path.basename(input_file_path_string)
output_file_path_string = output_file_directory_string + "\\" + os.path.splitext(input_file_name)[0] + ".webm"
return output_file_path_string
def convert_to_webm(self, input_file_path_string, output_file_path_string, log, status):
self.input_file_path_string = input_file_path_string
self.output_file_path_string = output_file_path_string
self.log = log
self.status = status
ffmpeg_thread = threading.Thread(target=self.start_ffmpeg_conversion)
ffmpeg_thread.setDaemon(True)
ffmpeg_thread.start()
def start_ffmpeg_conversion(self):
self.status.conversion_started()
ffmpeg_cmd = ["ffmpeg\\ffmpeg.exe", "-i", self.input_file_path_string, "-c:v", "libvpx-vp9", "-pix_fmt", "yuva420p", "-crf", "15", "-b:v", "2M", self.output_file_path_string]
process = subprocess.Popen(ffmpeg_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout_iterator = iter(process.stdout.readline, b"")
for line in stdout_iterator:
self.log(line.decode('utf-8'))
self.log("\nDone")
return_value = process.poll()
if return_value == 0:
self.status.conversion_finished()
else:
self.status.conversion_failed()
| {"/controller.py": ["/consts.py", "/view.py", "/model.py"], "/main_window.py": ["/consts.py", "/about_dialog.py", "/included_software_dialog.py"], "/webm_converter.py": ["/controller.py"], "/view.py": ["/main_window.py"], "/model.py": ["/consts.py"], "/about_dialog.py": ["/consts.py"]} |
47,356 | Dhakshan/webm_converter | refs/heads/main | /about_dialog.py | """
Author: Konstantin (k0nze) Lübeck
License: BSD 3-Clause License
Copyright (c) 2021 Konstantin (k0nze) Lübeck
"""
try:
import Tkinter as Tk
from Tkinter import ttk
import Tkinter.font as TkFont
except ModuleNotFoundError:
import tkinter as Tk
from tkinter import ttk
import tkinter.font as TkFont
from PIL import ImageTk, Image
from consts import *
import webbrowser
class AboutDialog(Tk.Toplevel):
def __init__(self, master):
Tk.Toplevel.__init__(self, master)
self.minsize(200, 400)
self.resizable(False, False)
self.title("About " + NAME)
wrapper_frame = ttk.Frame(self)
# Logo
logo = ImageTk.PhotoImage(Image.open(IMAGES_DIR + "/logo_120x120.png"))
logo_label = ttk.Label(wrapper_frame)
logo_label.image = logo
logo_label.configure(image=logo)
logo_label.grid(row=0, column=0, columnspan=2, pady=15)
# Name
name_font_style = TkFont.Font(family="TkDefaultFont", size=12)
name_label = ttk.Label(wrapper_frame, text=NAME, font=name_font_style)
name_label.grid(row=1, column=0, columnspan=2)
# Version
version_label = ttk.Label(wrapper_frame, text="Version: " + VERSION)
version_label.grid(row=2, column=0, columnspan=2)
# Created by
konze_frame = ttk.Frame(wrapper_frame)
k_logo = ImageTk.PhotoImage(Image.open(IMAGES_DIR + "/k_logo_30x30.png"))
k_logo_label = ttk.Label(konze_frame)
k_logo_label.image = k_logo
k_logo_label.configure(image=k_logo, cursor="hand2")
k_logo_label.bind("<Button-1>", lambda e: self.open_browser("https://linktree.k0nze.gg"))
k_logo_label.pack(side=Tk.LEFT)
konze_name_label = ttk.Label(konze_frame, text="Created by Konstantin (Konze) Lübeck", cursor="hand2")
konze_name_label.bind("<Button-1>", lambda e: self.open_browser("https://linktree.k0nze.gg"))
konze_name_label.pack(side=Tk.RIGHT)
konze_frame.grid(row=4, column=0, columnspan=2, pady=10)
# Links
# Discord
patreon_label = ttk.Label(wrapper_frame, text="Patreon:")
patreon_label.grid(row=5, column=0, columnspan=1, sticky=Tk.W)
patreon_link_label = Tk.Label(wrapper_frame, text="https://patreon.com/k0nze", fg="blue", cursor="hand2")
patreon_link_label.grid(row=5, column=1, columnspan=1, sticky=Tk.W)
patreon_link_label.bind("<Button-1>", lambda e: self.open_browser("https://patreon.com/k0nze"))
# Twitch
twitch_label = ttk.Label(wrapper_frame, text="Twitch:")
twitch_label.grid(row=6, column=0, columnspan=1, sticky=Tk.W)
twitch_link_label = Tk.Label(wrapper_frame, text="https://twitch.tv/k0nze", fg="blue", cursor="hand2")
twitch_link_label.grid(row=6, column=1, columnspan=1, sticky=Tk.W)
twitch_link_label.bind("<Button-1>", lambda e: self.open_browser("https://twitch.tv/k0nze"))
# Youtube
youtube_label = ttk.Label(wrapper_frame, text="Youtube:")
youtube_label.grid(row=7, column=0, columnspan=1, sticky=Tk.W)
youtube_link_label = Tk.Label(wrapper_frame, text="https://youtube.com/k0nze", fg="blue", cursor="hand2")
youtube_link_label.grid(row=7, column=1, columnspan=1, sticky=Tk.W)
youtube_link_label.bind("<Button-1>", lambda e: self.open_browser("https://youtube.com/k0nze"))
# Twitter
twitter_label = ttk.Label(wrapper_frame, text="Twitter:")
twitter_label.grid(row=8, column=0, columnspan=1, sticky=Tk.W)
twitter_link_label = Tk.Label(wrapper_frame, text="https://twitter.com/k0nze_gg", fg="blue", cursor="hand2")
twitter_link_label.grid(row=8, column=1, columnspan=1, sticky=Tk.W)
twitter_link_label.bind("<Button-1>", lambda e: self.open_browser("https://twitter.com/k0nze_gg"))
# TikTok
tiktok_label = ttk.Label(wrapper_frame, text="TikTok:")
tiktok_label.grid(row=9, column=0, columnspan=1, sticky=Tk.W)
tiktok_link_label = Tk.Label(wrapper_frame, text="https://tiktok.com/@k0nze.gg", fg="blue", cursor="hand2")
tiktok_link_label.grid(row=9, column=1, columnspan=1, sticky=Tk.W)
tiktok_link_label.bind("<Button-1>", lambda e: self.open_browser("https://tiktok.com/@k0nze.gg"))
# Github
github_label = ttk.Label(wrapper_frame, text="GitHub:")
github_label.grid(row=10, column=0, columnspan=1, sticky=Tk.W)
github_link_label = Tk.Label(wrapper_frame, text="https://github.com/k0nze", fg="blue", cursor="hand2")
github_link_label.grid(row=10, column=1, columnspan=1, sticky=Tk.W)
github_link_label.bind("<Button-1>", lambda e: self.open_browser("https://github.com/k0nze/" + TODO))
# Close button
close_button = ttk.Button(wrapper_frame, text="Close", command=self.on_close).grid(row=11, column=0, columnspan=2, pady=10)
wrapper_frame.grid(row=0, column=0, padx=10)
#self.update()
#print(self.winfo_height())
#print(self.winfo_width())
def open_browser(self, url):
webbrowser.open_new(url)
def on_close(self):
self.destroy()
| {"/controller.py": ["/consts.py", "/view.py", "/model.py"], "/main_window.py": ["/consts.py", "/about_dialog.py", "/included_software_dialog.py"], "/webm_converter.py": ["/controller.py"], "/view.py": ["/main_window.py"], "/model.py": ["/consts.py"], "/about_dialog.py": ["/consts.py"]} |
47,363 | Forcepoint/fp-bd-ngfw-aws-guardduty | refs/heads/master | /tasks.py | from celery import Celery
from smc_tools import SMCSession
app = Celery('tasks')
app.config_from_object('celeryconfig')
@app.task
def add_to_blacklist(source, destination, duration):
# Create SMC Session and add to blacklist
session = SMCSession()
session.add_to_blacklist(source=source, destination=destination, duration=duration)
session.logout()
| {"/tasks.py": ["/smc_tools.py"], "/main.py": ["/config.py", "/logs.py", "/server.py"], "/test.py": ["/tasks.py"], "/smc_tools.py": ["/config.py"], "/server.py": ["/config.py", "/tasks.py"]} |
47,364 | Forcepoint/fp-bd-ngfw-aws-guardduty | refs/heads/master | /celeryconfig.py | broker_url = 'pyamqp://guest@broker'
task_annotations = {
'tasks.add_to_blacklist': {
'rate_limit': '12/m'
}
} | {"/tasks.py": ["/smc_tools.py"], "/main.py": ["/config.py", "/logs.py", "/server.py"], "/test.py": ["/tasks.py"], "/smc_tools.py": ["/config.py"], "/server.py": ["/config.py", "/tasks.py"]} |
47,365 | Forcepoint/fp-bd-ngfw-aws-guardduty | refs/heads/master | /main.py | import logging
from config import Config
from logs import Logger
from server import app
if __name__ == "__main__":
# Load in config
config = Config()
config.load()
# Create logger
logger = Logger(config)
# Get required configuration items
host = config.get('host')
if not host:
logging.warning('No host was supplied in the config file.')
host = ''
port = config.get('port')
if not port:
logging.FATAL('No port was supplied in the config file.')
exit(1)
# Run the server
app.run(host=host, port=port) | {"/tasks.py": ["/smc_tools.py"], "/main.py": ["/config.py", "/logs.py", "/server.py"], "/test.py": ["/tasks.py"], "/smc_tools.py": ["/config.py"], "/server.py": ["/config.py", "/tasks.py"]} |
47,366 | Forcepoint/fp-bd-ngfw-aws-guardduty | refs/heads/master | /test.py | from tasks import add_to_blacklist
add_to_blacklist.delay('any', '1.1.1.1', 3600)
add_to_blacklist.delay('any', '1.2.2.2', 3600)
add_to_blacklist.delay('any', '1.3.3.3', 3600)
add_to_blacklist.delay('any', '1.4.4.4', 3600)
add_to_blacklist.delay('any', '1.5.5.5', 3600)
add_to_blacklist.delay('any', '1.6.6.6', 3600)
add_to_blacklist.delay('any', '1.7.7.7', 3600)
add_to_blacklist.delay('any', '1.8.8.8', 3600)
add_to_blacklist.delay('any', '1.9.9.9', 3600)
add_to_blacklist.delay('any', '1.10.10.10', 3600)
add_to_blacklist.delay('any', '1.11.11.11', 3600)
add_to_blacklist.delay('any', '1.12.12.12', 3600)
add_to_blacklist.delay('any', '1.13.13.13', 3600)
add_to_blacklist.delay('any', '1.14.14.14', 3600)
add_to_blacklist.delay('any', '1.15.15.15', 3600)
add_to_blacklist.delay('any', '1.16.16.16', 3600)
add_to_blacklist.delay('any', '1.17.17.17', 3600)
add_to_blacklist.delay('any', '1.18.18.18', 3600) | {"/tasks.py": ["/smc_tools.py"], "/main.py": ["/config.py", "/logs.py", "/server.py"], "/test.py": ["/tasks.py"], "/smc_tools.py": ["/config.py"], "/server.py": ["/config.py", "/tasks.py"]} |
47,367 | Forcepoint/fp-bd-ngfw-aws-guardduty | refs/heads/master | /smc_tools.py | import ipaddress
import logging
from config import Config
from smc import session
from smc.core.engine import Engine
from smc.elements.other import Blacklist
from time import sleep
class SMCSession:
api_key = ''
api_version = "6.7"
config = Config()
endpoint = ''
engines = []
port = 0
def __init__(self):
# Load config
self.config.load()
# Get required values
self.endpoint = self.config.get('smc_endpoint')
self.port = self.config.get('smc_port')
self.api_key = self.config.get('smc_api_key')
self.api_version = self.config.get('smc_api_version')
# Login to SMC for session
self.login()
# Get current engines list
self.engines = list(Engine.objects.all())
def login(self):
# Validate endpoint
if self.endpoint == '':
logging.fatal('Endpoint "smc_endpoint" cannot be empty in configuration file.')
if not self.endpoint.startswith('https://') and not self.endpoint.startswith('http://'):
logging.warning('Endpoint "smc_endpoint" should start with "https://" or "http://" to be valid. Adding in now.')
self.endpoint = f'http://{self.endpoint}'
if self.endpoint.endswith('/'):
logging.warning('Endpoint "smc_endpoint" should not end with "/". Removing now.')
self.endpoint = self.endpoint[:len(self.endpoint)-2]
# Validate port
if self.port == 0:
logging.fatal('Port number must be set. The SMC cannot run on port 0.')
# Validate api key
if self.api_key == '':
logging.fatal("API Key for SMC must be present in the configuration.")
# Attempt to login for session
url = f'{self.endpoint}:{self.port}'
session.login(url=url, api_key=self.api_key, api_version=self.api_version)
def add_to_blacklist(self, source='any', destination='any', duration=3600):
# Transform parameters into required formats
if source != 'any':
try:
address = ipaddress.ip_address(source)
except ValueError:
logging.fatal(f'Source IP address for adding to blacklist must be a valid IP address. "{source}" is not valid.')
source = f'{source}/32'
if destination != 'any':
try:
address = ipaddress.ip_address(destination)
except ValueError:
logging.fatal(f'Destination IP address for adding to blacklist must be a valid IP address. "{destination}" is not valid.')
destination = f'{destination}/32'
# Create a blacklist
blacklist = Blacklist()
blacklist.add_entry(src=source, dst=destination, duration=duration)
# Get engines to exclude
exclude_engines = self.config.get('exclude_engines')
# Loop through and add to each engine
for engine in self.engines:
if engine.name not in exclude_engines:
# Add to Engine
engine.blacklist_bulk(blacklist)
# Sleep for required time to avoid sending too many requests
sleep(2)
def logout(self):
# Logout from session
session.logout()
| {"/tasks.py": ["/smc_tools.py"], "/main.py": ["/config.py", "/logs.py", "/server.py"], "/test.py": ["/tasks.py"], "/smc_tools.py": ["/config.py"], "/server.py": ["/config.py", "/tasks.py"]} |
47,368 | Forcepoint/fp-bd-ngfw-aws-guardduty | refs/heads/master | /config.py | import os
import requests
import yaml
class Config:
"""
Class representing configuration for the program.
"""
data = {}
def load(self):
# Check if config file exists, download if not.
if not os.path.isfile('config.yaml'):
self.download()
# Load in config file details
with open('config.yaml') as config:
self.data = yaml.load(config, Loader=yaml.FullLoader)
def download(self):
# Retrieve URL for config file
url = os.environ['CONFIG_URL']
# Download config file
file_content = requests.get(url=url)
with open('config.yaml', 'wb') as config:
config.write(file_content.content)
def get(self, key):
return self.data.get(key, None)
| {"/tasks.py": ["/smc_tools.py"], "/main.py": ["/config.py", "/logs.py", "/server.py"], "/test.py": ["/tasks.py"], "/smc_tools.py": ["/config.py"], "/server.py": ["/config.py", "/tasks.py"]} |
47,369 | Forcepoint/fp-bd-ngfw-aws-guardduty | refs/heads/master | /lambda/lambda_function.py | import os
import requests
def lambda_handler(event, context):
# Get environment variables
url = os.environ['NGFW_ENDPOINT']
# Get values from event
event_type = event.get('type', None)
if not event_type:
return {
'statusCode': 404,
'message': 'No event type was associated with this event.'
}
service = event.get('service', None)
if not service:
return {
'statusCode': 404,
'message': 'No service was associated with this event.'
}
action = service.get('action', None)
if not action:
return {
'statusCode': 404,
'message': 'No action was associated with this event.'
}
call_action = action.get('awsApiCallAction', None)
if not call_action:
return {
'statusCode': 404,
'message': 'No AWS api call action was associated with this event.'
}
remote_details = call_action.get('remoteIpDetails', None)
if not remote_details:
return {
'statusCode': 404,
'message': 'No remote details were associated with this event.'
}
ip_address = remote_details.get('ipAddressV4', None)
if not ip_address:
return {
'statusCode': 404,
'message': 'No IP Address was associated with this event.'
}
timestamp = event.get('createdAt', None)
if not timestamp:
return {
'statusCode': 404,
'message': 'No timestamp was associated with this event.'
}
# Build data to be sent
data = {
'event_type': event_type,
'remote_ip': ip_address,
'timestamp': timestamp,
'domain': ''
}
# Send request
try:
requests.post(url, json=data)
except requests.Timeout:
print('There was a timeout error when attempting to send the event to the endpoint %s.', url)
# Return
return {
'statusCode': 200,
'message': 'Event successfully sent to NGFW endpoint.'
}
| {"/tasks.py": ["/smc_tools.py"], "/main.py": ["/config.py", "/logs.py", "/server.py"], "/test.py": ["/tasks.py"], "/smc_tools.py": ["/config.py"], "/server.py": ["/config.py", "/tasks.py"]} |
47,370 | Forcepoint/fp-bd-ngfw-aws-guardduty | refs/heads/master | /server.py | import logging
from config import Config
from flask import Flask, request
from tasks import add_to_blacklist
app = Flask(__name__)
# Get config
config = Config()
config.load()
# Disable Flask logs
log = logging.getLogger('werkzeug')
log.disabled = True
app.logger.disabled = True
@app.route('/')
@app.route('/api/', methods=['GET', 'POST'])
def home():
# Log requests
logging.info('Received incoming %s request for /api/ route.' % request.method)
# Handle requests
if request.method == 'POST':
# Get data
data = request.json
blacklist_duration = config.get('blacklist_duration')
# Add add_to_blacklist task to queue
add_to_blacklist.delay(
'any',
data['remote_ip'],
blacklist_duration
)
add_to_blacklist.delay(
data['remote_ip'],
'any',
blacklist_duration
)
return "Success!"
else:
return 'Running...' | {"/tasks.py": ["/smc_tools.py"], "/main.py": ["/config.py", "/logs.py", "/server.py"], "/test.py": ["/tasks.py"], "/smc_tools.py": ["/config.py"], "/server.py": ["/config.py", "/tasks.py"]} |
47,371 | Forcepoint/fp-bd-ngfw-aws-guardduty | refs/heads/master | /logs.py | import logging
levels = {
'info': logging.INFO,
'debug': logging.DEBUG,
'warn': logging.WARN,
'fatal': logging.FATAL,
'error': logging.ERROR
}
class Logger():
"""
Class representing the logger.
"""
def __init__(self, config):
"""
Initialise the logger.
:param config: The Config object for the program
"""
# Get log level
log_level = config.get('log_level')
if not log_level:
log_level = 'info'
level = get_log_level(log_level)
# Get log file
log_file = config.get('log_file')
if not log_file:
log_file = 'events.log'
# Initialise logging
logging.basicConfig(filename=log_file, level=level)
def get_log_level(level):
return levels.get(level, logging.INFO) | {"/tasks.py": ["/smc_tools.py"], "/main.py": ["/config.py", "/logs.py", "/server.py"], "/test.py": ["/tasks.py"], "/smc_tools.py": ["/config.py"], "/server.py": ["/config.py", "/tasks.py"]} |
47,393 | dagopian/Island | refs/heads/master | /multi.py | from multiprocessing import Process, Pipe
import time as t
import os
import argparse
import numpy as np
import pandas as pd
from rdkit import Chem
from rdkit import rdBase
import cfg_util
import score_util
import zinc_grammar
import optimize_J
rdBase.DisableLog('rdApp.error')
GCFG = zinc_grammar.GCFG
def f(i):
print('id :',i,'\n','process: ',os.getpid())
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', type=int, default=3,
help='Number of islands, i.e. number of Genetic Algorithm excuted in parallel')
parser.add_argument('-m', type=int, default=1000,
help='Interval between each migration')
parser.add_argument('-e', type=int, default=5,
help='Pourcentage of emigrants in each migration')
parser.add_argument('-n', type=int, default=-1,
help='In case you are doing repetability test, equals to the n-th test ')
args = parser.parse_args()
N_islands = args.i
N_migrations = args.m
N_emigrants = args.e
# Creates an array which contain a Pipe for every connexion between islands
Pipes = pd.DataFrame(columns=[str(i) for i in range(N_islands)])
for i in range(N_islands):
Pipes.loc[i] = [ Pipe() for _ in range(N_islands)]
# Set the diagonal to 0: Migration is not possible from an island to itself
np.fill_diagonal(Pipes.values,0)
pool = []
for i in range(N_islands):
p = Process(target=optimize_J.main, args=(Pipes, i, N_islands,N_migrations,args.n))
p.start()
pool.append(p)
#for i,p in enumerate(pool):
#p.join()
if __name__ == '__main__':
main()
# def g(conn,id):
# print('Bonjour depuis ',id,conn)
# #a = pd.DataFrame([[0,0,0,0,0,0],[1,1,1,1,1,1]])
# conn.send('Hello from %s'%str(id))
# rep = conn.recv()
# print(rep,conn)
# a , b = Pipe()
# pipes = [a,b]
# for i in range(2):
# p = Process(target=g, args=(pipes[i],i))
# p.start()
| {"/multi.py": ["/optimize_J.py"]} |
47,394 | dagopian/Island | refs/heads/master | /optimize_J.py | from __future__ import print_function
import argparse
import copy
import nltk
import threading
import time as t
import numpy as np
import pandas as pd
from rdkit import Chem
from rdkit import rdBase
import cfg_util
import score_util
import zinc_grammar
rdBase.DisableLog('rdApp.error')
GCFG = zinc_grammar.GCFG
def CFGtoGene(prod_rules, max_len=-1):
gene = []
for r in prod_rules:
lhs = GCFG.productions()[r].lhs()
possible_rules = [idx for idx, rule in enumerate(GCFG.productions())
if rule.lhs() == lhs]
gene.append(possible_rules.index(r))
if max_len > 0:
if len(gene) > max_len:
gene = gene[:max_len]
else:
gene = gene + [np.random.randint(0, 256)
for _ in range(max_len-len(gene))]
return gene
def GenetoCFG(gene):
prod_rules = []
stack = [GCFG.productions()[0].lhs()]
for g in gene:
try:
lhs = stack.pop()
except Exception:
break
possible_rules = [idx for idx, rule in enumerate(GCFG.productions())
if rule.lhs() == lhs]
#print(possible_rules)
rule = possible_rules[g % len(possible_rules)]
prod_rules.append(rule)
rhs = filter(lambda a: (type(a) == nltk.grammar.Nonterminal)
and (str(a) != 'None'),
zinc_grammar.GCFG.productions()[rule].rhs())
stack.extend(list(rhs)[::-1])
return prod_rules
def selectParent(population, tournament_size=3):
idx = np.random.randint(len(population), size=tournament_size)
best = population[idx[0]]
for i in idx[1:]:
if population[i][0] > best[0]:
best = population[i]
return best
def mutation(gene):
idx = np.random.choice(len(gene))
gene_mutant = copy.deepcopy(gene)
gene_mutant[idx] = np.random.randint(0, 256)
return gene_mutant
def canonicalize(smiles):
mol = Chem.MolFromSmiles(smiles)
if smiles != '' and mol is not None and mol.GetNumAtoms() > 1:
return Chem.MolToSmiles(mol)
else:
return smiles
elapsed_min = 0
best_score = 0
mean_score = 0
std_score = 0
min_score = 0
best_smiles = ""
all_smiles = []
def current_best():
global elapsed_min
global best_score
global best_smiles
global mean_score
global min_score
global std_score
global all_smiles
elapsed_min += 1
print("${},{},{},{}"
.format(elapsed_min, best_score, best_smiles, len(all_smiles)))
t = threading.Timer(60, current_best, [])
t.start()
def migration(Pipes, island_id, nb_of_island, population, migration_nb):
# Pipes is an array containing every pipe between every islands
# island_id, the number of the island
# population the current population (score,smile,gene)
# migration_nb the migration number, e.g. the k-th migration can be 1 to len(island)-1
k = migration_nb #between 1 and number of island
island_list = [i for i in range(nb_of_island)]
top5 = round(0.05*len(population))
for i in island_list:
if i==island_id:
j = i-k
(Pipes.iloc[i,j])[0].send(population[0:top5])
print('send from island ',island_id,' with i and j: ',i,j)
# recv
j += k
i += k
if i>=nb_of_island :
i = i - nb_of_island
migrants = (Pipes.iloc[i,j])[1].recv()
print('recv from island',island_id)
population = population[:len(population)-top5] + migrants
else :
pass
return population
def main(Pipes, island_id, nb_of_island,mig_interval, logn=-1):
#parser = argparse.ArgumentParser()
#parser.add_argument('--smifile', default='250k_rndm_zinc_drugs_clean.smi')
#parser.add_argument('--seed', type=int, default=t.time())
#args = parser.parse_args()
smifile = '250k_rndm_zinc_drugs_clean.smi'
if logn == -1:
np.random.seed(0 + island_id)
else:
np.random.seed(int(t.time()))
#np.random.seed(0)
global best_smiles
global best_score
global all_smiles
gene_length = 300
N_mu = int(1000/nb_of_island)
N_lambda = int(2000/nb_of_island)
# initialize population
seed_smiles = []
with open(smifile) as f:
for line in f:
smiles = line.rstrip()
seed_smiles.append(smiles)
initial_smiles = np.random.choice(seed_smiles, N_mu+N_lambda)
initial_smiles = [canonicalize(s) for s in initial_smiles]
initial_genes = [CFGtoGene(cfg_util.encode(s), max_len=gene_length)
for s in initial_smiles]
initial_scores = [score_util.calc_score(s) for s in initial_smiles]
#print(initial_scores)
population = []
for score, gene, smiles in zip(initial_scores, initial_genes,
initial_smiles):
population.append((score, smiles, gene))
population = sorted(population, key=lambda x: x[0], reverse=True)[:N_mu]
th = threading.Timer(60, current_best, [])
th.start()
print("Start!")
all_smiles = [p[1] for p in population]
#print([p[0] for p in population])
#mig_interval = 5 # A migration every 1000 iteration
x = [ i for i in range(mig_interval,1000000000,mig_interval)] # All the generation in wich a migration should occur
k = 1 # First migration
t0=t.time()
for generation in range(1000000000):
scores = [p[0] for p in population]
mean_score = np.mean(scores)
min_score = np.min(scores)
std_score = np.std(scores)
best_score = np.max(scores)
idx = np.argmax(scores)
best_smiles = population[idx][1]
print("%{},{},{},{},{}".format(generation, best_score,
mean_score, min_score, std_score))
new_population = []
for _ in range(N_lambda):
p = population[np.random.randint(len(population))]
p_gene = p[2]
c_gene = mutation(p_gene)
c_smiles = canonicalize(cfg_util.decode(GenetoCFG(c_gene)))
if c_smiles not in all_smiles:
c_score = score_util.calc_score(c_smiles)
c = (c_score, c_smiles, c_gene)
new_population.append(c)
all_smiles.append(c_smiles)
population.extend(new_population)
population = sorted(population,
key=lambda x: x[0], reverse=True)[:N_mu]
# Every mig_interval generation make
if generation in x:
print('Starting Migration')
if k >= nb_of_island:
k = 1
population = migration(Pipes, island_id, nb_of_island, population, k)
k+=1
if t.time() - t0 >= 3600*8 :
break
if logn == -1:
f = open(str(island_id)+'_final_pop'+'_'+str(nb_of_island)+'_'+str(mig_interval)+'.csv','w')
if logn != -1:
f = open(str(island_id)+'_final_pop'+'_'+str(nb_of_island)+'_'+str(mig_interval)+'_'+str(logn)+'.csv','w')
population = pd.DataFrame(population)
population.to_csv(f)
f.close()
if __name__ == "__main__":
main()
| {"/multi.py": ["/optimize_J.py"]} |
47,395 | dagopian/Island | refs/heads/master | /diversity.py | from rdkit import Chem, DataStructs
from rdkit.Chem.Fingerprints import FingerprintMols
import pandas as pd
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', type=int, default=0, help='Number of islands')
parser.add_argument('-m', type=int, help='Migration Interval')
parser.add_argument('-a', type=str, default='island',
help='"island" or "chem" : Comput diversity for islands or chemGE')
args = parser.parse_args()
N_island = args.i
alg = args.a
m = args.m
ms = []
score = []
if alg == 'island':
for i in range(N_island):
tab = pd.read_csv(str(i)+'_final_pop_'+str(N_island)+'_'+str(m)+'.csv')
ms += list(tab.iloc[:,2])
score += list(tab.iloc[:,1])
fps = [FingerprintMols.FingerprintMol(Chem.MolFromSmiles(x)) for x in ms]
t = 0.0
for x in fps:
for y in fps:
t += 1-DataStructs.FingerprintSimilarity(x, y, metric=DataStructs.TanimotoSimilarity)
print('diversity: ',t/(len(fps)*len(fps)))
print('best score: ',max(score))
#print(score)
if alg == 'chem':
tab = pd.read_csv(str(N_island)+'_chemGE.csv')
ms += list(tab.iloc[:,2])
score += list(tab.iloc[:,1])
fps = [FingerprintMols.FingerprintMol(Chem.MolFromSmiles(x)) for x in ms]
t = 0.0
for x in fps:
for y in fps:
t += 1-DataStructs.FingerprintSimilarity(x, y, metric=DataStructs.TanimotoSimilarity)
print('diversity: ',t/(len(fps)*len(fps)))
print('best score: ',max(score))
if __name__ == '__main__':
main()
| {"/multi.py": ["/optimize_J.py"]} |
47,397 | backslash112/subtitle_adjustment | refs/heads/master | /subtitle_adjustment.py | #with open('subtitle.txt') as subtitle:
# for line in subtitle:
# timer = float(line[6:])+0.01
# with open('subtitle_result.txt', 'a') as result_file:
# result_file.write(line[0:6]+"%.2f"%timer+'\n')
class SubtitleAdjustment(object):
def __init__(self):
print("myclass init")
@classmethod
def test(self):
print("test")
def test2(self):
print("test2")
def run(self):
print("SubtitleAdjustment - run")
with open('subtitle.txt') as subtitle:
for line in subtitle:
timer = float(line[6:])+0.01
with open('subtitle_result.txt', 'a') as result_file:
result_file.write(line[0:6]+"%.2f"%timer+'\n') | {"/main.py": ["/subtitle_adjustment.py"]} |
47,398 | backslash112/subtitle_adjustment | refs/heads/master | /main.py | from subtitle_adjustment import SubtitleAdjustment
def main():
print("starting")
SubtitleAdjustment.test()
sa = SubtitleAdjustment()
sa.test2()
sa.run()
if __name__ == "__main__":
main() | {"/main.py": ["/subtitle_adjustment.py"]} |
47,401 | alvarozamora/1DP | refs/heads/master | /postsim.py | import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import struct
import sod
reals = range(1)
t = range(10)
boxes = 200
L = 10**6
w = 30875045135703220*50
w = 3087504513.570322036743164*50
w = 3087504513.570322036743164*boxes/2
grid = 0
h = np.zeros((len(t),boxes))
for i in reals:
for k in t:
grid += 1
file = 'Grid'+str(grid)
size = 8
num = L*4
type = 'd' #d is double, f is float, i is integer
f = open(file, 'rb')
X = f.read(num*size)
X = np.array(struct.unpack(type*num, X))
z = X[:L]/w
vx = X[L:2*L]/w
vy = X[2*L:3*L]/w
vz = X[3*L:4*L]/w
h[k] += np.histogram(z, bins = boxes, range = (0,1))[0]
print(grid)
h = h/len(reals)/L*(9/16)*boxes
gamma = 5/3
dustFrac = 0.0
npts = 500
time = 0.2
left_state = (1,1,0)
right_state = (0.1, 0.125, 0.)
# left_state and right_state set pressure, density and u (velocity)
# geometry sets left boundary on 0., right boundary on 1 and initial
# position of the shock xi on 0.5
# t is the time evolution for which positions and states in tube should be
# calculated
# gamma denotes specific heat
# note that gamma and npts are default parameters (1.4 and 500) in solve
# function
plt.figure()
for T in t:
time = 0.006266570686578*(T+1)
plt.bar(np.arange(boxes)/boxes+0.5/boxes, h[T] , width = 1/boxes)
positions, regions, values = sod.solve(left_state=left_state, right_state=right_state, geometry=(0., 1., 0.5), t=time, gamma=gamma, npts=npts, dustFrac=dustFrac)
#x = [0,0.5,0.5,1]
y = np.array([1,1,1/8,1/8])
#print(y,np.sum(y))
#plt.plot(x,y,'k')
plt.plot(values['x'], values['rho'], 'k')
print(np.sum(h[T])/boxes, np.sum(y)/4)
while len(str(T)) < 4:
T = '0' + str(T)
plt.savefig('hist'+str(T)+'.png')
plt.clf()
| {"/Python/1DP.py": ["/utils.py"]} |
47,402 | alvarozamora/1DP | refs/heads/master | /Python/1DP.py | from utils import *
import numpy as np
import matplotlib
import pdb
#matplotlib.use("agg")
import matplotlib.pyplot as plt
import seaborn as sns
import argparse
import h5py
import time
import torch
sns.set_context("talk") #darkgrid, whitegrid, dark, white, ticks
sns.set_style("white")
parser = argparse.ArgumentParser(description='HDF5 Initial Condition Pipeline')
parser.add_argument('--file', type=str, default='particle/particle', help='Output: hdf base file path/name')
parser.add_argument('-n', type=int, default=10**8, help='Particle Number')
parser.add_argument('-c', type=int, default=10**3, help='Number of cores/files')
parser.add_argument('-b', type=int, default=0, help='Periodic (0), Reflective (1)')
args = parser.parse_args()
# Sound Wave
f = 1 # Degrees of Freedom
g = (f+2)/f # Adiabatic Index
c2 = 1.0 # Squared Sound Speed
delta = 0.05 # Sound Wave - Perturbation Amplitude
# Setting up Grid
Nc = 512 # Number of Cells
x = np.linspace(0+1/Nc/2,1-1/Nc/2,Nc) # Grid Centers on (0,1)
v = delta*np.sin(4*np.pi*x) # Bulk Velocity
rho = 1 - v/np.sqrt(c2) # Density
Pressure = rho*c2 # Pressure
#ied = c2/(g-1) # Thermal Energy
p = {'rho': rho, 'Pressure': Pressure, 'v': v, 'x': x}
print("Generating Initial Conditions")
P, Pdx, vels, P0, p = InitialConditions(x, rho, v, Pressure, args, p)
print("Done")
DumpDensity(Pdx, x, p, P)
DumpPhaseSpace(P, vels, Nc)
print("Initial Conditions Dumped")
ntimes = 400
Tf = 1.0
times = np.linspace(Tf/ntimes, Tf, ntimes-1)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print(f'Using {device}')
P = torch.from_numpy(P).to(device)
vels = torch.from_numpy(vels).to(device)
for i in range(ntimes-1):
t = times[i]
start = time.time()
Pnew = torch.sort((P + vels*t)%1)[0]
Pdxnew = Pnew.roll(-1)-Pnew
Pdxnew[-1] += 1
end = time.time()
print(f"Advected to time {t:.3f} in {end-start:.3e} seconds")
DumpDensity(Pdxnew.cpu().numpy(), x, p, Pnew.cpu().numpy(), Ns=10**6, fname=f'Output/Density{i+1:03d}.png')
# P = np.array_split(P, args.c)
# Pdx = np.array_split(Pdx, args.c)
# vels = np.array_split(vels, args.c)
# parts = np.array([len(q) for q in Pdx]).astype(int)
# print(f'Particle 1 : ({P0:.3e}. {v0:.3e})')
# with h5py.File(args.file, 'w') as hdf:
# hdf.create_dataset('n', data=parts)
# hdf.create_dataset('x', data=np.array([P0]))
# hdf.create_dataset('v', data=np.array([v0]))
# hdf.create_dataset('D', data=np.array([D]))
# hdf.create_dataset('Nc', data=np.array([Nc]))
# hdf.create_dataset('xgrid', data=x)
# hdf.create_dataset('rhogrid', data=rho)
# hdf.create_dataset('vgrid', data=v)
# hdf.create_dataset('Pgrid', data=Pressure)
# for p,data in enumerate(Pdx):
# with h5py.File(args.file+f'{p:03d}', 'w') as hdf:
# hdf.create_dataset("dx", data=data)
# hdf.create_dataset("v", data=vels[p])
# hdf.create_dataset("x", data=P[p])
# hdf.create_dataset("xnew", data=P[p])
| {"/Python/1DP.py": ["/utils.py"]} |
47,403 | alvarozamora/1DP | refs/heads/master | /hdf2.py | import numpy as np
import matplotlib
matplotlib.use("agg")
import matplotlib.pyplot as plt
import seaborn as sns
import argparse
import h5py
import pdb
sns.set_context("talk") #darkgrid, whitegrid, dark, white, ticks
parser = argparse.ArgumentParser(description='HDF5 Initial Condition Pipeline')
parser.add_argument('--file', type=str, default='particle/particle', help='Output: hdf base file path/name')
parser.add_argument('-n', type=int, default=10**9, help='Particle Number')
parser.add_argument('-c', type=int, default=10**3, help='Number of cores/files')
parser.add_argument('-b', type=int, default=0, help='Periodic (0), Reflective (1)')
args = parser.parse_args()
#import pdb; pdb.set_trace()
# Sound Wave
f = 1 # Degrees of Freedom
g = (f+2)/f # Adiabatic Index
c2 = 1.0 # Squared Sound Speed
delta = 0.05 # Sound Wave - Perturbation Amplitude
# Setting up Grid
Nc = 512 # Number of Cells
x = np.linspace(0+1/Nc/2,1-1/Nc/2,Nc) # Grid Centers on (0,1)
xL = np.linspace(0,1-1/Nc,Nc) # Grid Left
v = delta*np.sin(4*np.pi*x) # Bulk Velocity
rho = 1 - v # Density
Pressure = rho*c2 # Pressure
#ied = c2/(g-1) # Thermal Energy
dx = np.gradient(x) # Cell Sizes (UNIFORM GRID ONLY)
M = (rho*dx).sum() # Total Mass (UNIFORM GRID ONLY)
# Particle Parameters
N = args.n # Approximate number of particles
mp = M/N # Particle Mass
# Final number of particles per cell, total
Np = np.floor((rho*dx)/mp + np.random.uniform(size=rho.size)).astype(int)
NP = Np.sum()
# Particle Diameter
Dmax = (dx/Np).min()
D = 1e-4*Dmax
D = 0
#Initialize Velocities
s = np.sqrt(2*Pressure/rho)
print(len(Np), len(rho), len(v), len(Pressure))
# bin and effbin sizes
# velocities
with h5py.File(args.file, 'w') as hdf:
hdf.create_dataset('D', data=np.array([D]))
hdf.create_dataset('m', data=np.array([mp]))
hdf.create_dataset('Nc', data=np.array([Nc]))
hdf.create_dataset('N', data=np.array([NP]))
hdf.create_dataset('Np', data=Np)
hdf.create_dataset('xgrid', data=x)
hdf.create_dataset('rhogrid', data=rho)
hdf.create_dataset('vgrid', data=v)
hdf.create_dataset('Pgrid', data=Pressure)
print("Saved Metadata")
print(Nc)
print(NP)
print(mp)
print(f"Making Initial Conditions for {args.n:.0e} particles across {Nc} cells")
for j in range(len(dx)):
Left = xL[j]
effbin = dx[j]/Np[j]-D
bin = dx[j]/Np[j]
vels = np.random.randn(Np[j])*s[j] + v[j]
uniforms = np.random.random(Np[j])
P = Left + np.arange(Np[j])*bin + uniforms*effbin + D/2
#pdb.set_trace()
print(f"Finished cell {j} with minmax = ({P.min():.2e}, {P.max():.2e})")
file = format
with h5py.File(args.file+f"{j:03d}", "w") as hdf:
hdf.create_dataset("x", data=P)
hdf.create_dataset("v", data=vels)
#hdf.create_dataset("xnew", data=P[p]) #keep in case we want to output positions
| {"/Python/1DP.py": ["/utils.py"]} |
47,404 | alvarozamora/1DP | refs/heads/master | /gif.py | import imageio as imo
import os
import glob
import re
import numpy as np
import torch
import torch.nn.functional as F
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
#from torchvision.utils import save_image
import io
import struct
def tryint(s):
try:
return int(s)
except:
return s
def alphanum_key(s):
""" Turn a string into a list of string and number chunks.
"z23a" -> ["z", 23, "a"]
"""
return [ tryint(c) for c in re.split('([0-9]+)', s) ]
def sort_nicely(l):
""" Sort the given list in the way that humans expect.
"""
l.sort(key=alphanum_key)
print("Making Gif")
particle_names = glob.glob("Data/rho*")
sort_nicely(particle_names)
times = len(particle_names)
# Gather Euler Data
grid_names = []
for i in range(401):
#grid_names.append(f'/Euler10/density_{i}.npy')
grid_names.append(f'Euler/density_{i}.npy')
#print(particle_names)
#making animation
dur = 1/24.
#dur = 1.
t = 'd'
size = 8
with imo.get_writer('ParticleSoundWave.gif', duration=dur) as writer:
for i in range(len(particle_names)):
g_data = np.load(grid_names[i])
Nc = len(g_data)
#import pdb; pdb.set_trace()
f = open(particle_names[i], 'rb')
X = f.read(Nc*size)
X = np.array(struct.unpack(t*Nc, X))
x = np.linspace(1/2/Nc, 1 - 1/2/Nc, Nc)
plt.figure(0,figsize=(6,4))
plt.clf()
plt.plot(x, g_data, label = r"$\rho$")
plt.plot(x, X, lw = 1, label = r"$\rho_p$")
plt.xlabel("Distance")
plt.ylabel("Density")
plt.grid(alpha=0.2)
plt.xticks([0,0.25,0.5,0.75,1.0])
d = 0.05
plt.yticks([1-2*d, 1-d, 1, 1+d, 1+2*d])
plt.ylim(1-2*d,1+2*d)
plt.xlim(0,1)
buf = io.BytesIO()
plt.savefig(buf, format="png", dpi=230)
buf.seek(0)
image = imo.imread(buf)
writer.append_data(image)
writer.close()
'''
with imo.get_writer('ParticleSoundWave.mp4', fps=60) as writer:
for i in range(len(particle_names)):
g_data = np.load(grid_names[i])
Nc = len(g_data)
#import pdb; pdb.set_trace()
f = open(particle_names[i], 'rb')
X = f.read(Nc*size)
X = np.array(struct.unpack(t*Nc, X))
x = np.linspace(1/2/Nc, 1 - 1/2/Nc, Nc)
plt.figure(0,figsize=(6,4))
plt.clf()
plt.plot(x, g_data, label = r'$\rho$')
plt.plot(x, X, lw = 1, label = r'$\rho_p$')
plt.xlabel('Distance')
plt.ylabel('Density')
plt.grid(alpha=0.2)
plt.xticks([0,0.25,0.5,0.75,1.0])
d = 0.05
plt.yticks([1-2*d, 1-d, 1, 1+d, 1+2*d])
plt.ylim(1-2*d,1+2*d)
plt.xlim(0,1)
buf = io.BytesIO()
plt.savefig(buf, format='png', dpi=230)
buf.seek(0)
image = imo.imread(buf)
writer.append_data(image)
writer.close()
'''
| {"/Python/1DP.py": ["/utils.py"]} |
47,405 | alvarozamora/1DP | refs/heads/master | /utils.py | import numpy as np
import matplotlib
#matplotlib.use("agg")
import matplotlib.pyplot as plt
import seaborn as sns
import argparse
import h5py
#import pdb; pdb.set_trace()
def InitialConditions(x, rho, v, Pressure, args, p):
N = args.n
dx = np.gradient(x) # Cell Sizes (UNIFORM GRID ONLY)
M = (rho*dx).sum() # Total Mass (UNIFORM GRID ONLY)
# Particle Parameters
mp = M/N # Particle Mass, Total Mass divided by total particles
p['mp'] = mp
# Final number of particles per cell, and total
Np = np.floor((rho*dx)/mp + np.random.uniform(size=rho.size)).astype(int)
NP = Np.sum()
p['NP'] = NP
# Particle Diameter
Dmax = (dx/Np).min()
D = 1e-4*Dmax
D = 0
p['D'] = D
#Initialize Velocities
s = np.sqrt(Pressure/rho)
# bin and effbin sizes
# velocities
effbins = np.array([])
bins = np.array([])
effbins = []
bins = []
vels = []
for j in range(len(dx)):
effbins.append( np.ones(Np[j])*(dx[j]/Np[j]-D))
bins.append( np.ones(Np[j])*dx[j]/Np[j] )
vels.append(np.random.randn(Np[j])*s[j] + v[j])
#effbins = np.array(effbins)
#bins = np.array(bins)
#pos_in_bin = effbins*np.random.uniform(size=effbins.size)
effbins = np.concatenate(effbins)
bins = np.concatenate(bins)
vels = np.concatenate(vels)
uniforms = np.random.random(Np.sum())
# Computing particle dx's
if args.b == 0: #Periodic Boundary Conditions
Pdx = np.roll(effbins*uniforms, -1) - effbins*uniforms + bins
v0 = vels[0]
P0 = uniforms[0]*effbins[0] + D/2
vels = np.roll(vels, -1) - vels
P = np.concatenate(([P0], P0 + np.cumsum(Pdx[:-1])))
elif args.b == 1: #Reflective Boundary Conditions #NEED TO FIX VELOCITY
Pdx = effbins[1:]*uniforms[1:] - effbins[:-1]*uniforms[:-1] + bins[:-1]
vels = np.roll(vels, -1) - vels
P0 = uniforms[0]*effbins[0] + D/2
Pdx = np.append(np.array([P0]),Pdx)
P = np.cumsum(Pdx)
#MP = mp*np.arange(1,Np.sum())
#rhoP = mp/Pdx
return P, Pdx, vels, P0, p
# Computes Densities
# Smooths gradients over Ns particles
def DumpDensity(Pdx, x, p, P, Ns=10**3, fname='Output/Density000.png'):
plt.figure(0)
plt.clf()
j = int(np.floor(p['NP']/Ns))
#Pdx_smooth = Ns*p['mp']/np.array([Pdx[i*Ns:(i+1)*Ns].sum() for i in range(j)])
Pdx_smooth = 1*p['mp']/np.array([Pdx[i*Ns:(i+1)*Ns].mean() for i in range(j)])
P_smooth = np.array([P[i*Ns] for i in range(j)])
#plt.plot(P[::Ns], mp/(Pdx[::Ns][1:]+Pdx[::Ns][:-1]), label=r'$\rho_P$')
plt.plot(x, p['rho'], label=r'$\rho$')
plt.plot(P_smooth,Pdx_smooth, lw=1, label = r'$\rho_P$')
plt.xlabel("Distance")
plt.ylabel("Density")
plt.grid(alpha=0.3)
plt.ylim(0.9,1.1)
plt.xlim(0,1)
#plt.yticks([0,1/2,1,3/2,2])
plt.xticks([0,1/4,2/4,3/4,1])
plt.legend()
sns.despine()
plt.tight_layout()
plt.savefig(fname)
# Preview Phase Space
def DumpPhaseSpace(P, vels, Nc = 128, fname='Output/Phase000.png'):
plt.figure(1)
plt.clf()
plt.hist2d(P,vels,bins=(Nc,Nc), range = [[0,1],[-4,4]])
plt.xlabel("Distance")
plt.ylabel("Velocity")
plt.title("Phase Space")
plt.tight_layout()
plt.savefig(fname)
| {"/Python/1DP.py": ["/utils.py"]} |
47,406 | alvarozamora/1DP | refs/heads/master | /Python/gif.py | import imageio as io
import os
import glob
import re
import numpy as np
import torch
import torch.nn.functional as F
#from torchvision.utils import save_image
def tryint(s):
try:
return int(s)
except:
return s
def alphanum_key(s):
""" Turn a string into a list of string and number chunks.
"z23a" -> ["z", 23, "a"]
"""
return [ tryint(c) for c in re.split('([0-9]+)', s) ]
def sort_nicely(l):
""" Sort the given list in the way that humans expect.
"""
l.sort(key=alphanum_key)
print("Making Gif")
file_names = glob.glob("Output/Density*.png")
sort_nicely(file_names)
#print(file_names)
#making animation
dur = 1/30.
with io.get_writer('ParticleSoundWave.gif', duration=dur) as writer:
for filename in file_names:
image = io.imread(filename)
writer.append_data(image)
writer.close()
| {"/Python/1DP.py": ["/utils.py"]} |
47,407 | alvarozamora/1DP | refs/heads/master | /hdf.py | import numpy as np
import matplotlib
matplotlib.use("agg")
import matplotlib.pyplot as plt
import seaborn as sns
import argparse
import h5py
sns.set_context("talk") #darkgrid, whitegrid, dark, white, ticks
parser = argparse.ArgumentParser(description='HDF5 Initial Condition Pipeline')
parser.add_argument('--file', type=str, default='particle/particle', help='Output: hdf base file path/name')
parser.add_argument('-n', type=int, default=10**9, help='Particle Number')
parser.add_argument('-c', type=int, default=10**3, help='Number of cores/files')
parser.add_argument('-b', type=int, default=0, help='Periodic (0), Reflective (1)')
args = parser.parse_args()
#import pdb; pdb.set_trace()
# Sound Wave
f = 1 # Degrees of Freedom
g = (f+2)/f # Adiabatic Index
c2 = 1.0 # Squared Sound Speed
delta = 0.05 # Sound Wave - Perturbation Amplitude
# Setting up Grid
Nc = 512 # Number of Cells
x = np.linspace(0+1/Nc/2,1-1/Nc/2,Nc) # Grid Centers on (0,1)
v = delta*np.sin(4*np.pi*x) # Bulk Velocity
rho = 1 - v # Density
Pressure = rho*c2 # Pressure
#ied = c2/(g-1) # Thermal Energy
dx = np.gradient(x) # Cell Sizes (UNIFORM GRID ONLY)
M = (rho*dx).sum() # Total Mass (UNIFORM GRID ONLY)
# Particle Parameters
N = args.n # Approximate number of particles
mp = M/N # Particle Mass
# Final number of particles per cell, total
Np = np.floor((rho*dx)/mp + np.random.uniform(size=rho.size)).astype(int)
NP = Np.sum()
# Particle Diameter
Dmax = (dx/Np).min()
D = 1e-4*Dmax
#Initialize Velocities
s = np.sqrt(2*Pressure/rho)
# bin and effbin sizes
# velocities
effbins = np.array([])
bins = np.array([])
effbins = []
bins = []
vels = []
for j in range(len(dx)):
effbins.append( np.ones(Np[j])*(dx[j]/Np[j]-D))
bins.append( np.ones(Np[j])*dx[j]/Np[j] )
vels.append(np.random.randn(Np[j])*s[j] + v[j])
#effbins = np.array(effbins)
#bins = np.array(bins)
#pos_in_bin = effbins*np.random.uniform(size=effbins.size)
effbins = np.concatenate(effbins)
bins = np.concatenate(bins)
vels = np.concatenate(vels)
uniforms = np.random.random(Np.sum())
# Computing particle dx's
if args.b == 0: #Periodic Boundary Conditions
Pdx = np.roll(effbins*uniforms, -1) - effbins*uniforms + bins
v0 = vels[0]
P0 = uniforms[0]*effbins[0] + D/2
vels = np.roll(vels, -1) - vels
elif args.b == 1: #Reflective Boundary Conditions #NEED TO FIX VELOCITY
Pdx = effbins[1:]*uniforms[1:] - effbins[:-1]*uniforms[:-1] + bins[:-1]
vels = np.roll(vels, -1) - vels
P0 = uniforms[0]*effbins[0] + D/2
Pdx = np.append(np.array([P0]),Pdx)
P = np.cumsum(Pdx)
MP = mp*np.arange(1,Np.sum())
rhoP = mp/Pdx
# In[9]:
Np.sum(), Pdx.shape
# In[10]:
# Bin Check
print(P0, bins.sum(), effbins.sum(), 1-Np.sum()*D)
# In[11]:
# Computes Densities
# Smooths gradients over Ns particles
sns.set_style("white")
Ns = 10**3
j = int(np.ceil(Np.sum()/Ns))
Pdx_smooth = Ns*mp/np.array([Pdx[i*Ns:(i+1)*Ns].sum() for i in range(j)])
P_smooth = np.array([P[i*Ns] for i in range(j)])
#plt.plot(P[::Ns], mp/(Pdx[::Ns][1:]+Pdx[::Ns][:-1]), label=r'$\rho_P$')
#plt.plot(P_smooth,Pdx_smooth, label = r'$\rho_p')
plt.plot(x,rho, label=r'$\rho$')
plt.xlabel("Distance")
plt.ylabel("Density")
plt.grid(alpha=0.3)
plt.ylim(0.9,1.1)
plt.xlim(0,1)
#plt.yticks([0,1/2,1,3/2,2])
plt.xticks([0,1/4,2/4,3/4,1])
plt.legend()
sns.despine()
plt.tight_layout()
plt.savefig("Positions.png")
# Preview Phase Space
plt.figure()
plt.hist2d(P,vels,bins=(Nc,128), range = [[0,1],[-4,4]])
plt.xlabel("Distance")
plt.ylabel("Velocity")
plt.title("Phase Space")
plt.tight_layout()
plt.savefig("PhaseSpace.png")
#import pdb; pdb.set_trace()
P = np.array_split(P, args.c)
Pdx = np.array_split(Pdx, args.c)
vels = np.array_split(vels, args.c)
parts = np.array([len(q) for q in Pdx]).astype(int)
print(f'Particle 1 : ({P0:.3e}. {v0:.3e})')
with h5py.File(args.file, 'w') as hdf:
hdf.create_dataset('n', data=parts)
hdf.create_dataset('x', data=np.array([P0]))
hdf.create_dataset('v', data=np.array([v0]))
hdf.create_dataset('D', data=np.array([D]))
hdf.create_dataset('Nc', data=np.array([Nc]))
hdf.create_dataset('xgrid', data=x)
hdf.create_dataset('rhogrid', data=rho)
hdf.create_dataset('vgrid', data=v)
hdf.create_dataset('Pgrid', data=Pressure)
for p,data in enumerate(Pdx):
with h5py.File(args.file+f'{p:03d}', 'w') as hdf:
hdf.create_dataset("dx", data=data)
hdf.create_dataset("v", data=vels[p])
hdf.create_dataset("x", data=P[p])
hdf.create_dataset("xnew", data=P[p])
| {"/Python/1DP.py": ["/utils.py"]} |
47,408 | katehrkim/budget | refs/heads/master | /classes/budget.py | from classes.savings import Savings
from classes.expenses import Expenses
import os
import csv
class Budget:
def __init__(self, name, amount):
self.name = name
self.amount = amount
self.spendings = []
self.deposits = []
def list_expenses(self):
for object in self.spendings:
print(f'{object.date}: {object.amount} spent for {object.type}')
def list_savings(self):
for object in self.deposits:
print(f'{object.date}: {object.amount} deposited for {object.type}')
def record_expense(self, expense_data):
self.spendings.append(Expenses(**expense_data))
self.amount -= expense_data.amount_spent
self.save_expense()
def record_savings(self, savings_data):
self.deposits.append(Savings(**savings_data))
self.amount += savings_data.amount_deposited
self.save_deposit()
def save_expense(self):
my_path = os.path.abspath(os.path.dirname(__file__))
path = os.path.join(my_path, "../data/expenses.csv")
with open(path, 'w') as csvfile:
expense_csv = csv.writer(csvfile, delimiter=',')
expense_csv.writerow(['date','type','amount'])
for expense in self.spendings:
expense_csv.writerow([expense.date_spent, expense.type, expense.amount])
def save_deposit(self):
my_path = os.path.abspath(os.path.dirname(__file__))
path = os.path.join(my_path, "../data/savings.csv")
with open(path, 'w') as csvfile:
deposit_csv = csv.writer(csvfile, delimiter=',')
deposit_csv.writerow(['date','type','amount'])
for deposit in self.deposits:
deposit_csv.writerow([deposit.date_deposited, deposit.type, deposit.amount])
def view_amount_left(self):
print(self.amount) | {"/classes/budget.py": ["/classes/savings.py", "/classes/expenses.py"], "/runner.py": ["/classes/expenses.py", "/classes/budget.py"]} |
47,409 | katehrkim/budget | refs/heads/master | /runner.py | # After you write all your classes, use this file to call them all together and run your program
from classes.expenses import Expenses
from classes.budget import Budget
jon = Budget('Jon', 500)
while True:
user_input = input('\nWhat would you like to do?\nOptions:\n1 view amount of money left\n2 record a spending\n3 record a deposit\n4 view spending list\n5 view deposit list\n6 quit\n')
if user_input == '1':
jon.view_amount_left()
elif user_input == '2':
food = Expenses('06/04/21','Food',30)
jon.record_expense(food)
elif user_input == '6':
break
| {"/classes/budget.py": ["/classes/savings.py", "/classes/expenses.py"], "/runner.py": ["/classes/expenses.py", "/classes/budget.py"]} |
47,410 | katehrkim/budget | refs/heads/master | /classes/expenses.py | class Expenses:
def __init__(self, date, type, amount):
self.date_spent = date
self.type = type
self.amount = amount
| {"/classes/budget.py": ["/classes/savings.py", "/classes/expenses.py"], "/runner.py": ["/classes/expenses.py", "/classes/budget.py"]} |
47,411 | katehrkim/budget | refs/heads/master | /classes/savings.py | class Savings:
def __init__(self, date, type='income', amount=100):
self.date_deposited = date
self.type = type
self.amount = amount
| {"/classes/budget.py": ["/classes/savings.py", "/classes/expenses.py"], "/runner.py": ["/classes/expenses.py", "/classes/budget.py"]} |
47,419 | AkumaEX/MAC0317-EP1 | refs/heads/master | /main.py | import sys
import wave
import numpy as np
from synthesizer import Synthesizer
# preparando a estrutura de dados
adsr = open(sys.argv[1], 'r').readlines()
freq = int(sys.argv[2])
part = sys.stdin.readlines()
# amostragem da melodia
synt = Synthesizer(adsr, freq, part)
melody = synt.get_melody()
# estruturando a amostragem em 2 canais de 16 bits
audio = (melody*32768).astype(np.int16)
mono = np.reshape(audio, (len(audio), 1))
stereo = np.hstack((mono, mono))
# criando o arquivo .wav
with wave.open(sys.stdout.buffer, 'wb') as file:
file.setnchannels(2)
file.setsampwidth(2)
file.setframerate(freq)
file.writeframes(stereo)
| {"/main.py": ["/synthesizer.py"]} |
47,420 | AkumaEX/MAC0317-EP1 | refs/heads/master | /synthesizer.py | import re
import numpy as np
class Synthesizer:
def __init__(self, adsr, freq, part):
self._adsr = adsr
self._freq = freq
self._part = part
def C(octave):
return 16.352*2**int(octave)
def Db(octave):
return 17.324*2**int(octave)
def D(octave):
return 18.354*2**int(octave)
def Eb(octave):
return 19.445*2**int(octave)
def E(octave):
return 20.602*2**int(octave)
def F(octave):
return 21.827*2**int(octave)
def Gb(octave):
return 23.125*2**int(octave)
def G(octave):
return 24.500*2**int(octave)
def Ab(octave):
return 25.957*2**int(octave)
def A(octave):
return 27.500*2**int(octave)
def Bb(octave):
return 29.135*2**int(octave)
def B(octave):
return 30.868*2**int(octave)
self._pitch = {
'C': C,
'C#': Db,
'Db': Db,
'D': D,
'D#': Eb,
'Eb': Eb,
'E': E,
'F': F,
'F#': Gb,
'Gb': Gb,
'G': G,
'G#': Ab,
'Ab': Ab,
'A': A,
'A#': Bb,
'Bb': Bb,
'B': B,
}
def _get_freq(self, note):
"""Recebe uma notação alfabética e devolve a sua frequência"""
[tone, octave, []] = re.split('(-?\d+)', note)
return self._pitch[tone](octave)
def _get_amp(self, T):
"""Recebe a duração do timbre e devolve sua amplitude"""
num_samples = int(self._freq*T)
a = np.linspace(start=0, stop=0, endpoint=True, num=num_samples)
ta, la = np.float_(self._adsr[0].split())
td, ld = np.float_(self._adsr[1].split())
ts, ls = np.float_(self._adsr[2].split())
tr, lr = np.float_(self._adsr[3].split())
t0 = 0
# attack
t1 = int(num_samples*ta)
t = np.linspace(start=0, stop=ta*T, endpoint=True,
num=int(num_samples*ta))
a[t0:t1] = t * (la/(ta*T))
# decay
t2 = int(num_samples*td) + t1
t = np.linspace(start=0, stop=td*T, endpoint=True,
num=int(num_samples*td))
a[t1:t2] = t * (ld-la)/(td*T) + la
# sustain
t3 = int(num_samples*ts) + t2
t = np.linspace(start=0, stop=ts*T, endpoint=True,
num=int(num_samples*ts))
a[t2:t3] = t * (ls-ld)/(ts*T) + ld
# release
t4 = int(num_samples*tr) + t3
t = np.linspace(start=0, stop=tr*T, endpoint=True,
num=int(num_samples*tr))
a[t3:t4] = t * (lr-ls)/(tr*T) + ls
return a
def _get_sample(self, note, T):
"""Recebe uma notação alfabética e sua duração e devolve sua amostragem"""
num_samples = int(self._freq*T)
t = np.linspace(start=0, stop=T, endpoint=True, num=num_samples)
k = self._get_freq(note)
a = self._get_amp(T)
return a * np.sin(2*np.pi*k*t)
def get_melody(self):
"""Devolve a amostragem da melodia"""
melody = []
for i in range(1, len(self._part)):
seq = self._part[i].split()
T = int(re.split('(\n)', seq[len(seq)-1])[0])/1000
for j in range(0, len(seq)-1):
note = seq[j]
sample = self._get_sample(note, T)
melody = np.concatenate((melody, sample))
return melody
| {"/main.py": ["/synthesizer.py"]} |
47,422 | TomHuix/fine_tuning | refs/heads/master | /main_train.py | from fine_tuning_bert import main
from fine_tune_sbert import train_sbert
#############################################
corpus_input_file = 'data/corpus.txt'
bert_output_dir = 'output/bert'
model_type = 'bert'
model_name_or_path = 'bert-base-uncased'
sbert_output_dir = 'output/sbert'
#############################################
main(corpus_input_file, bert_output_dir, model_type, model_name_or_path)
train_sbert(bert_output_dir, sbert_output_dir)
| {"/main_train.py": ["/fine_tune_sbert.py"], "/make_test_sick.py": ["/cleaner.py", "/encoders.py"]} |
47,423 | TomHuix/fine_tuning | refs/heads/master | /encoders.py | ###This file represents all encoders used in this projet,
## An encoder uses 2 functions, encode: transform e string into encoded vector
## Train: train the model
## For the moment: tfidf, Doc2vec, skip-thoughts, Quick-thoughts, Word2Vec moyenne, Sbert
from nltk.tokenize import word_tokenize
from gensim.models.doc2vec import Doc2Vec, TaggedDocument
from sklearn.feature_extraction.text import TfidfVectorizer
from sentence_transformers import SentenceTransformer
import pandas as pd
import numpy as np
import re
from sklearn.metrics.pairwise import cosine_similarity
import matplotlib.pyplot as plt
from sklearn.pipeline import Pipeline
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
from gensim.models import Word2Vec
from gensim.test.utils import common_texts
class encoder_tfidf:
def train(self, corpus):
print("[SYSTEME] train tfidf")
vocabulary = np.unique(word_tokenize(' '.join(corpus)))
self.pipe = Pipeline([('count', CountVectorizer(vocabulary=vocabulary)), ('tfid',TfidfTransformer())]).fit(corpus)
def encode(self, text):
return(self.pipe.transform([text]).toarray()[0])
class encoder_Doc2vec:
def __init__(self, vec_size, max_epochs):
self.model = Doc2Vec(vector_size=vec_size, batch_words=50)
self.max_epochs = max_epochs
def train(self, corpus):
print("[SYSTEME] train d2v")
tagged_data = [TaggedDocument(words=word_tokenize(str(_d).lower()), tags=[str(i)]) for i, _d in enumerate(corpus)]
self.model.build_vocab(tagged_data)
for _ in range(self.max_epochs):
self.model.train(tagged_data,
total_examples=self.model.corpus_count,
epochs=self.model.epochs)
self.model.alpha -= 0.0002
self.model.min_alpha = self.model.alpha
def encode(self, text):
return(self.model.infer_vector([text]))
class mean_word2vec:
def __init__(self, output_size, window, workers, sg):
self.output_size = output_size
self.window = window
self.workers = workers
self.sg =sg
def train(self, corpus):
print("[SYSTEME] train w2v")
corpus = [word_tokenize(sentence) for sentence in corpus]
self.model = Word2Vec(corpus, size=self.output_size, window=self.window, min_count=1, workers=self.workers, sg=self.sg)
def encode(self, text):
vectors = []
for word in word_tokenize(text):
try:
vectors.append(self.model.wv[word])
except:
()
return(np.mean(vectors, axis=0))
#class quick_thoughts:
class encoder_sbert:
def train(self, corpus):
print("[SYSTEME] train sbert")
self.model = SentenceTransformer('bert-base-nli-mean-tokens')
def encode(self, text):
return(self.model.encode([text])[0])
| {"/main_train.py": ["/fine_tune_sbert.py"], "/make_test_sick.py": ["/cleaner.py", "/encoders.py"]} |
47,424 | TomHuix/fine_tuning | refs/heads/master | /fine_tune_sbert.py |
from torch.utils.data import DataLoader
import math
from sentence_transformers import models, losses
from sentence_transformers import SentencesDataset, LoggingHandler, SentenceTransformer
from sentence_transformers.evaluation import EmbeddingSimilarityEvaluator
from sentence_transformers.readers import *
import logging
from datetime import datetime
import os
from get_data import import_dataset
def load_dataset():
if not(os.path.exists("data/AllNLI")) or not(os.path.exists("data/stsbenchmark")):
import_dataset('data')
return(NLIDataReader('data/AllNLI'), STSDataReader('data/stsbenchmark'))
def train_sbert(model_name, model_save_path):
batch_size = 16
nli_reader, sts_reader = load_dataset()
train_num_labels = nli_reader.get_num_labels()
# Use BERT for mapping tokens to embeddings
word_embedding_model = models.BERT(model_name)
# Apply mean pooling to get one fixed sized sentence vector
pooling_model = models.Pooling(word_embedding_model.get_word_embedding_dimension(),
pooling_mode_mean_tokens=True,
pooling_mode_cls_token=False,
pooling_mode_max_tokens=False)
model = SentenceTransformer(modules=[word_embedding_model, pooling_model])
# Convert the dataset to a DataLoader ready for training
logging.info("Read AllNLI train dataset")
train_data = SentencesDataset(nli_reader.get_examples('train.gz'), model=model)
train_dataloader = DataLoader(train_data, shuffle=True, batch_size=batch_size)
train_loss = losses.SoftmaxLoss(model=model, sentence_embedding_dimension=model.get_sentence_embedding_dimension(), num_labels=train_num_labels)
logging.info("Read STSbenchmark dev dataset")
dev_data = SentencesDataset(examples=sts_reader.get_examples('sts-dev.csv'), model=model)
dev_dataloader = DataLoader(dev_data, shuffle=False, batch_size=batch_size)
evaluator = EmbeddingSimilarityEvaluator(dev_dataloader)
# Configure the training
num_epochs = 1
warmup_steps = math.ceil(len(train_dataloader) * num_epochs * 0.1) #10% of train data for warm-up
logging.info("Warmup-steps: {}".format(warmup_steps))
# Train the model
model.fit(train_objectives=[(train_dataloader, train_loss)],
evaluator=evaluator,
epochs=num_epochs,
evaluation_steps=1000,
warmup_steps=warmup_steps,
output_path=model_save_path
)
model = SentenceTransformer(model_save_path)
test_data = SentencesDataset(examples=sts_reader.get_examples("sts-test.csv"), model=model)
test_dataloader = DataLoader(test_data, shuffle=False, batch_size=batch_size)
evaluator = EmbeddingSimilarityEvaluator(test_dataloader)
model.evaluate(evaluator)
| {"/main_train.py": ["/fine_tune_sbert.py"], "/make_test_sick.py": ["/cleaner.py", "/encoders.py"]} |
47,425 | TomHuix/fine_tuning | refs/heads/master | /cleaner.py | import swifter
import pandas as pd
import string
import functools
import re
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
import timeit
import nltk
from nltk.corpus import stopwords, wordnet
from nltk.tokenize import word_tokenize
import string
from sklearn.feature_extraction.text import CountVectorizer
import functools
import operator
import plotly.express as px
foldl = lambda func, acc, xs: functools.reduce(func, xs, acc)
import matplotlib.pyplot as plt
import timeit
import spacy
import json
import pickle
import plotly
nlp = spacy.load('en_core_web_lg')
foldl = lambda func, acc, xs: functools.reduce(func, xs, acc)
stopwords = set(stopwords.words('english'))
def lowercasing(df, output_column):
df[output_column] = df[output_column].swifter.apply(lambda x : x.lower())
def replace_values(df, output_column):
df[output_column] = df[output_column].swifter.apply(lambda x : foldl(lambda a,b: re.sub(b[0], b[1], a), x, [("( .*@.* )", " xxemail "),
(r"(\d{3}[-\.\s]??\d{3}[-\.\s]??\d{4}|\(\d{3}\)\s*\d{3}[-\.\s]??\d{4}|\d{3}[-\.\s]??\d{4})", "xxphone"),
("([1-9] |1[0-9]| 2[0-9]|3[0-1])(.|-)([1-9] |1[0-2])(.|-|)20[0-9][0-9]", "xxdate"),
("[0-9]", "")]))
def clean_header(df, output_column):
df[output_column] = df[output_column].swifter.apply(lambda x : foldl(lambda a,b: re.sub(b, "", a), x, ["From: .*\n", "Subject: .*\n",
"Organization: .*\n", "Lines: .*\n", "To: .*\n", "NNTP-Posting-Host: .*\n", "Nntp-Posting-Host: .*\n",
"Article-I.D.: .*\n", "Reply-To: .*\n"]))
def replace_syn(corpus, dic):
result = []
for text in corpus:
current_text = []
tokens = word_tokenize(text)
for token in tokens:
if token not in stopwords:
syn = dic.get(token, -1)
if syn == -1: #synonym is not in the dictionnary
try:
for syn in wordnet.synsets(token)[0].lemma_names():
dic[syn] = token
dic[token] = token
current_text.append(word)
except: #token does not exist on wordnet
()
else:
current_text.append(syn)
result.append(current_text)
pickle.dump(dic, open("dict_word_cleaner.pkl","wb"))
return(result)
def clean_syn(df, output_column):
try:
dic = pickle.load(open( "dict_word_cleaner.pkl", "rb" ))
except:
dic = {}
df[output_column] = replace_syn(df.column.values, dic)
def replace(word, word_type):
if word_type == "CARDINAL":
return("xxnumber")
if word_type == 'DATE':
return("xxdate")
elif word_type == "QUANTITY":
return("xxquantity")
elif word_type == "TIME":
return("xxtime")
elif word_type == "PERCENT":
return("xxpercent")
elif word_type == "MONEY":
return("xxmoney")
elif word_type == "PERSON":
return("xxperson")
elif word_type == "ORG":
return("xxorg")
else:
return(word)
def clean_words(df, output_column):
df[output_column] = df[output_column].swifter.apply(lambda x : ' '.join([str(replace(w, w.ent_type_)) for w in nlp(x)]))
def remove_punctuation(df, output_column):
df[output_column] = df[output_column].swifter.apply(lambda x: foldl(lambda a,b: a.replace(b, ""), x, string.punctuation))
def remove_sw(df, output_column):
df[output_column] = df[output_column].swifter.apply(lambda x : [w for w in word_tokenize(x) if w not in stopwords])
def remove_number(df, output_column):
df[output_column] = df[output_column].swifter.apply(lambda x : re.sub('[0-9]', '', x))
def simple_clean(df, column_to_clean, output_column):
df[output_column] = df[column_to_clean]
#remove_header(df, output_column)
lowercasing(df, output_column)
remove_punctuation(df, output_column)
remove_number(df, output_column)
remove_sw(df, output_column)
return(df)
def medium_clean(df, column_to_clean, output_column):
df[output_column] = df[column_to_clean]
#remove_header(df, output_column)
clean_header(df, column_to_clean)
lowercasing(df, output_column)
replace_values(df, output_column)
remove_punctuation(df, output_column)
clean_syn(df, output_column)
return(df)
def complet_clean(df, column_to_clean, output_column):
df[output_column] = df[column_to_clean]
#remove_header(df, output_column)
clean_words(df, output_column)
lowercasing(df, output_column)
replace_values(df, output_column)
remove_punctuation(df, output_column)
clean_syn(df, output_column)
return(df)
| {"/main_train.py": ["/fine_tune_sbert.py"], "/make_test_sick.py": ["/cleaner.py", "/encoders.py"]} |
47,426 | TomHuix/fine_tuning | refs/heads/master | /make_test_sick.py | import pandas as pd
import swifter
from cleaner import simple_clean
from encoders import encoder_Doc2vec, encoder_tfidf, mean_word2vec, encoder_sbert
import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
import plotly
import plotly.graph_objects as go
from reducer_dim import reduce_dim
def load():
df = pd.read_pickle("data/sick.pkl")
return(df)
def clean(df):
df = simple_clean(df, "sentence_a", "clean_a")
df = simple_clean(df, "sentence_b", "clean_b")
def encode(df, params):
corpus = np.concatenate([[ ' '.join(s) for s in df.clean_b.values], [' '.join(s) for s in df.clean_a.values]], axis=None)
for param in params:
print("column", param[1])
algo = param[0]
algo.train(corpus)
df[param[1]+"_a"] = df.clean_a.swifter.apply(lambda x: algo.encode(' '.join(x)))
df[param[1]+"_b"] = df.clean_b.swifter.apply(lambda x: algo.encode(' '.join(x)))
def find_cosine_similarity(df, params):
for param in params:
column = param[1]
df[column+"_cos_score"] = df.swifter.apply(lambda x: cosine_similarity([x[column+"_a"]], [x[column+"_b"]])[0][0], axis=1)
def plot(df, params):
for param in params:
column = param[1]
fig = go.Figure(data=[go.Scatter(x=df.score.values, y=df[column+"_cos_score"].values, name=column, mode="markers")])
plotly.offline.plot(fig, filename='html/sick_'+column+'.html')
df = load()
clean(df)
params = [(encoder_sbert(), "sbert"), (encoder_Doc2vec(100, 100), "doc2vec" ),
(mean_word2vec(100, 5, 3, 0), "w2v_cbow"), (mean_word2vec(100, 5, 3, 1), "w2v_skip"),
(encoder_tfidf(), "tfidf")]
encode(df, params)
find_cosine_similarity(df, params)
plot(df, params)
| {"/main_train.py": ["/fine_tune_sbert.py"], "/make_test_sick.py": ["/cleaner.py", "/encoders.py"]} |
47,506 | yvson18/Imobiliaria-fastapi-Postsql-crud | refs/heads/main | /Modelos.py | from pydantic import BaseModel
class Endereco(BaseModel):
rua: str = None
cidade: str = None
estado: str = None
pais: str = None
cep: str = None
numero: str = None
complemento: str = None
class Imobiliaria(BaseModel):
endereco: Endereco
anuncio: str = None
area_construida: str = None
num_de_quartos: str = None
imovel_id: str
create_at: str
class Imobiliaria_User_Entry(BaseModel):
endereco: Endereco
anuncio: str = None
area_construida: str = None
num_de_quartos: str = None
class Imovel_Delete(BaseModel):
imovel_id: str
class Imovel_Update(BaseModel):
imovel_id: str
endereco: Endereco
anuncio: str = None
area_construida: str = None
num_de_quartos: str = None
| {"/main.py": ["/Modelos.py", "/PgDb.py"]} |
47,507 | yvson18/Imobiliaria-fastapi-Postsql-crud | refs/heads/main | /PgDb.py | import databases, sqlalchemy
usuario_posgre = "userimob"
senha = "userimob1337"
ip = "localhost" # deixa localhost para ficar nao dar ruim
porta = "5432"
DATABASE_URL = f"postgresql://{usuario_posgre}:{senha}@{ip}:{porta}/dbproimob"
database = databases.Database(DATABASE_URL) #Acho que se usa a url para fazer as devidas conexoes com o banco
metadata = sqlalchemy.MetaData()
users = sqlalchemy.Table(
"Tabela_Imo",
metadata,
sqlalchemy.Column("imovel_id" , sqlalchemy.String,primary_key= True),
sqlalchemy.Column("create_at" ,sqlalchemy.String),
sqlalchemy.Column("endereco" ,sqlalchemy.JSON),
sqlalchemy.Column("anuncio" ,sqlalchemy.String),
sqlalchemy.Column("area_construida" ,sqlalchemy.String),
sqlalchemy.Column("num_de_quartos" ,sqlalchemy.String),
)
engine = sqlalchemy.create_engine(DATABASE_URL) # The Engine is the starting point for any SQLAlchemy application.
metadata.create_all(engine)
| {"/main.py": ["/Modelos.py", "/PgDb.py"]} |
47,508 | yvson18/Imobiliaria-fastapi-Postsql-crud | refs/heads/main | /main.py | from fastapi import FastAPI, HTTPException
from Modelos import Imobiliaria,Imobiliaria_User_Entry,Imovel_Delete,Imovel_Update
from PgDb import database,users
import uuid
import datetime
app = FastAPI()
#-------------------------------on_vent faz uma func rodar antes da app iniciar, porem n sei a utilidade aqui ---------------------------------------
@app.on_event("startup")
async def startup():
await database.connect()
@app.on_event("shutdown")
async def shutdown():
await database.disconnect()
#----------------------------------------------------------------------------------------------------------------------------------------------------
@app.get("/imoveis")
async def read_item():
query = users.select()
return await database.fetch_all(query)
@app.get("/imoveis/{user_ID}",response_model=Imobiliaria)
async def busca_imovel_pelo_id(imovel_id:str):
query = users.select().where(users.c.imovel_id == imovel_id)
return await database.fetch_one(query)
@app.post("/imovel-insert",response_model = Imobiliaria)
async def inserir_imovel(imovel: Imobiliaria_User_Entry):
gID = str(uuid.uuid1())
gDate = str(datetime.datetime.now())
query = users.insert().values(
imovel_id = gID,
create_at = gDate,
endereco = imovel.endereco.dict(),
anuncio = imovel.anuncio,
area_construida = imovel.area_construida,
num_de_quartos = imovel.num_de_quartos
)
await database.execute(query)
return {
"imovel_id":gID,
"create_at": gDate,
**imovel.dict()
}
@app.put("/imoveis-update")
async def update_imoveis(imovel: Imovel_Update):
gDate = str(datetime.datetime.now())
query = users.update().\
where(users.c.imovel_id == imovel.imovel_id).\
values(
endereco = imovel.endereco.dict(),
anuncio = imovel.anuncio,
area_construida = imovel.area_construida,
num_de_quartos = imovel.num_de_quartos,
)
await database.execute(query)
return await busca_imovel_pelo_id(imovel.imovel_id)
@app.delete("/imoveis/{imovelId}",tags=["Imoveis"])
async def delete_imovel(imovel:Imovel_Delete):
query = users.delete().where(users.c.imovel_id == imovel.imovel_id)
await database.execute(query)
return {
"status" : True,
"message": "Esse imovel foi deletado com sucesso."
}
| {"/main.py": ["/Modelos.py", "/PgDb.py"]} |
47,511 | RushikeshGholap/Projects | refs/heads/master | /Django/django_sinha/imagepred/core/models.py | from django.db import models
# Create your models here.
class Document(models.Model):
des = models.CharField(max_length=100)
image = models.ImageField(upload_to='media/')
| {"/Django/django_sinha/imagepred/core/forms.py": ["/Django/django_sinha/imagepred/core/models.py"], "/Django/django_sinha/imagepred/core/views.py": ["/Django/django_sinha/imagepred/core/forms.py"]} |
47,512 | RushikeshGholap/Projects | refs/heads/master | /Django/Code/mysite/polls/views.py | from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
import nltk
def index(request):
return HttpResponse("Hello, world. You're at the polls index.")
@csrf_exempt
def caps(request):
inputtext = request.POST['inputtext']
return HttpResponse(inputtext)
| {"/Django/django_sinha/imagepred/core/forms.py": ["/Django/django_sinha/imagepred/core/models.py"], "/Django/django_sinha/imagepred/core/views.py": ["/Django/django_sinha/imagepred/core/forms.py"]} |
47,513 | RushikeshGholap/Projects | refs/heads/master | /Random/Optimization.py | # -*- coding: utf-8 -*-
"""
Created on Tue Jul 11 14:52:17 2017
@author: Akshay
"""
#Optimization problems
# MIN -> x1*x4*(x1+x2+x3)+x3
# x1x2x3x4 >=25
# Sum of squares of all x1 to x4 is 40
import numpy as np
from scipy.optimize import minimize
#Objective
def objective(x):
return x[0]*x[3]*(x[0]+x[1]+x[2])+x[2]
#Constraints
def constraint1(x):
return x[0]*x[1]*x[2]*x[3]-25
def constraint2(x):
sum_sq=40
for i in range(4):
sum_sq = sum_sq - x[i]**2
return sum_sq
con1 = {'type': 'ineq', 'fun': constraint1}
con2 = {'type': 'eq', 'fun': constraint2}
cons = [con1,con2]
#Bounds
b = (1.0, 5.0)
bnds = (b,b,b,b)
#Optimizer
x0=[1,5,7,1]
sol = minimize(objective, x0, method = 'SLSQP', bounds = bnds, constraints = cons)
print(sol.x)
print(sol)
| {"/Django/django_sinha/imagepred/core/forms.py": ["/Django/django_sinha/imagepred/core/models.py"], "/Django/django_sinha/imagepred/core/views.py": ["/Django/django_sinha/imagepred/core/forms.py"]} |
47,514 | RushikeshGholap/Projects | refs/heads/master | /Random/sentiment_analysis.py | # -*- coding: utf-8 -*-
"""
Created on Mon Aug 7 01:31:29 2017
@author: Akshay
"""
import requests
from bs4 import BeautifulSoup
import pandas as pd
url = 'http://www.moneycontrol.com/news/world/'
z = requests.get(url)
htmltext = z.text
soup = BeautifulSoup(z.text,'lxml')
newsfeed=soup.find(class_="fleft")
headers1=newsfeed.find_all(class_="clearfix")
headers2=[]
content2 = []
for i in range(1,len(headers1)):
content1=''
headers2.append(headers1[i].a["title"])
urlx = headers1[i].a["href"]
pagex = requests.get(urlx)
htmlx = pagex.text
soupx = BeautifulSoup(htmlx,'lxml')
contentx = soupx.find(class_="arti-flow")
paragraphx = contentx.find_all("p")
for j in paragraphx:
try:
content1 = content1+'\n'+j.string
except:
content1 = ''
content2.append(content1)
df = pd.DataFrame({'header':headers2,'content':content2})
df["header"].iloc[1]
| {"/Django/django_sinha/imagepred/core/forms.py": ["/Django/django_sinha/imagepred/core/models.py"], "/Django/django_sinha/imagepred/core/views.py": ["/Django/django_sinha/imagepred/core/forms.py"]} |
47,515 | RushikeshGholap/Projects | refs/heads/master | /Django/django_sinha/imagepred/core/forms.py | from django import forms
from .models import *
class DocumentForm(forms.ModelForm):
class Meta:
model = Document
fields = ['image','des'] | {"/Django/django_sinha/imagepred/core/forms.py": ["/Django/django_sinha/imagepred/core/models.py"], "/Django/django_sinha/imagepred/core/views.py": ["/Django/django_sinha/imagepred/core/forms.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.