index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
46,219 | jaydenwindle/YAK-server | refs/heads/master | /test_project/test_app/migrations/0005_auto_20180314_1653.py | # Generated by Django 2.0 on 2018-03-14 16:53
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('test_app', '0004_auto_20150605_2118'),
]
operations = [
migrations.AlterModelOptions(
name='user',
options={'ordering': ['-username']},
),
migrations.AlterField(
model_name='article',
name='thumbnail',
field=models.ImageField(blank=True, null=True, upload_to='article_photos/thumbnail/'),
),
migrations.AlterField(
model_name='post',
name='thumbnail',
field=models.ImageField(blank=True, null=True, upload_to='post_photos/thumbnail/'),
),
migrations.AlterField(
model_name='user',
name='large_photo',
field=models.ImageField(blank=True, null=True, upload_to='user_photos/large/'),
),
migrations.AlterField(
model_name='user',
name='original_photo',
field=models.ImageField(blank=True, null=True, upload_to='user_photos/original/'),
),
migrations.AlterField(
model_name='user',
name='small_photo',
field=models.ImageField(blank=True, null=True, upload_to='user_photos/small/'),
),
migrations.AlterField(
model_name='user',
name='thumbnail',
field=models.ImageField(blank=True, null=True, upload_to='user_photos/thumbnail/'),
),
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=30, unique=True, validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username.', 'invalid')], verbose_name='username'),
),
]
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,220 | jaydenwindle/YAK-server | refs/heads/master | /yak/rest_notifications/migrations/0002_auto_20141220_0018.py | # -*- coding: utf-8 -*-
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
('rest_notifications', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='pushwooshtoken',
name='user',
field=models.ForeignKey(related_name='pushwoosh_tokens', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='notificationsetting',
name='notification_type',
field=models.ForeignKey(related_name='user_settings', to='rest_notifications.NotificationType', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='notificationsetting',
name='user',
field=models.ForeignKey(related_name='notification_settings', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='notificationsetting',
unique_together=set([('notification_type', 'user')]),
),
migrations.AddField(
model_name='notification',
name='content_type',
field=models.ForeignKey(to='contenttypes.ContentType', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='notification',
name='notification_type',
field=models.ForeignKey(related_name='notifications', to='rest_notifications.NotificationType', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='notification',
name='reporter',
field=models.ForeignKey(related_name='notifications_sent', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='notification',
name='user',
field=models.ForeignKey(related_name='notifications_received', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE),
preserve_default=True,
),
]
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,221 | jaydenwindle/YAK-server | refs/heads/master | /yak/rest_social_network/serializers.py | from django.contrib.auth import get_user_model
from rest_framework import serializers
from yak.rest_social_network.models import Tag, Comment, Follow, Flag, Share, Like
from yak.rest_user.serializers import UserSerializer
User = get_user_model()
class LikedMixin(object):
def get_liked_id(self, obj):
request = self.context['request']
if request.user.is_authenticated:
try:
content_type = self.get_content_type(obj)
return Like.objects.get(content_type=content_type, user=request.user, object_id=obj.pk).pk
except Like.DoesNotExist:
pass
return None
class FollowedMixin(object):
def get_follow_id(self, obj):
# Indicate whether or not the logged in user is following a given object (e.g., another user)
# Provide the id of the follow object so it can be deleted to unfollow the object
if self.context['request'].user.is_authenticated:
try:
content_type = self.get_content_type(obj)
return self.context['request'].user.following.get(content_type=content_type, object_id=obj.pk).pk
except Follow.DoesNotExist:
pass
return None
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
fields = ('name', 'id')
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
exclude = ('related_tags',)
def __init__(self, *args, **kwargs):
"""
The `user` field is added here to help with recursive import issues mentioned in rest_user.serializers
"""
super(CommentSerializer, self).__init__(*args, **kwargs)
self.fields["user"] = UserSerializer(read_only=True, default=serializers.CurrentUserDefault())
class FollowSerializer(serializers.ModelSerializer):
follower = UserSerializer(read_only=True, source="user")
following = serializers.SerializerMethodField('get_user_follow')
class Meta:
model = Follow
fields = ['id', 'follower', 'following', 'created', 'content_type', 'object_id']
def get_user_follow(self, obj):
user = User.objects.get(pk=obj.object_id)
serializer = UserSerializer(user, context={'request': self.context.get('request')})
return serializer.data
class ShareSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True, default=serializers.CurrentUserDefault())
class Meta:
model = Share
fields = '__all__'
class LikeSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True, default=serializers.CurrentUserDefault())
class Meta:
model = Like
fields = '__all__'
class FlagSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True, default=serializers.CurrentUserDefault())
class Meta:
model = Flag
fields = '__all__'
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,222 | jaydenwindle/YAK-server | refs/heads/master | /yak/rest_core/fields.py | from rest_framework.fields import get_attribute
from rest_framework import relations
from rest_framework.reverse import reverse
class GenericHyperlinkedRelatedField(relations.PrimaryKeyRelatedField):
def get_attribute(self, instance):
return get_attribute(instance, self.source_attrs)
def to_representation(self, value):
default_view_name = "{}s-detail".format(value._meta.object_name.lower())
url = reverse(default_view_name, kwargs={'pk': value.pk})
request = self.context.get('request', None)
if request is not None:
return request.build_absolute_uri(url)
return url
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,223 | jaydenwindle/YAK-server | refs/heads/master | /yak/rest_notifications/management/commands/add_missing_notification_settings.py | from django.contrib.auth import get_user_model
from django.core.management import BaseCommand
from yak.rest_notifications.models import NotificationSetting, NotificationType
__author__ = 'rudolphmutter'
User = get_user_model()
class Command(BaseCommand):
args = ''
help = 'Creates missing notification settings for existing users'
def handle(self, *args, **options):
for user in User.objects.all():
for notification_type in NotificationType.objects.all():
NotificationSetting.objects.get_or_create(notification_type=notification_type, user=user)
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,224 | jaydenwindle/YAK-server | refs/heads/master | /yak/rest_social_auth/models.py | from django.db import models
from yak.rest_core.models import CoreModel
class SocialProvider(CoreModel):
"""
Used as a relation on User model so users can pick certain default providers to share posts on
TODO: Do we still need this? / I think FB recommends explicit sharing, not default sharing
"""
name = models.CharField(max_length=20)
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,225 | jaydenwindle/YAK-server | refs/heads/master | /yak/rest_social_network/models.py | import abc
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.fields import GenericRelation, GenericForeignKey
from django.contrib.sites.models import Site
from django.utils.baseconv import base62
import re
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models.signals import post_save
from yak.rest_core.models import CoreModel
from yak.rest_notifications.models import NotificationType
from yak.rest_user.models import AbstractYeti
from yak.settings import yak_settings
class FollowableModel(metaclass=abc.ABCMeta):
"""
Abstract class that used as interface
This class makes sure that child classes have
my_method implemented
"""
@abc.abstractmethod
def identifier(self):
return
@abc.abstractmethod
def type(self):
return
class Tag(CoreModel):
name = models.CharField(max_length=75, unique=True)
class Meta:
ordering = ['-created']
def identifier(self):
return "#{}".format(self.name)
def type(self):
return "tag"
def __unicode__(self):
return "{}".format(self.name)
FollowableModel.register(Tag)
def relate_tags(sender, **kwargs):
"""
Intended to be used as a receiver function for a `post_save` signal on models that have tags
Expects tags is stored in a field called 'related_tags' on implementing model
and it has a parameter called TAG_FIELD to be parsed
"""
# If we're saving related_tags, don't save again so we avoid duplicating notifications
if kwargs['update_fields'] and 'related_tags' not in kwargs['update_fields']:
return
changed = False
# Get the text of the field that holds tags. If there is no field specified, use an empty string. If the field's
# value is None, use an empty string.
message = getattr(kwargs['instance'], sender.TAG_FIELD, '') or ''
for tag in re.findall(r"#[a-zA-Z0-9_-]+", message):
tag_obj, created = Tag.objects.get_or_create(name=tag[1:])
if tag_obj not in kwargs['instance'].related_tags.all():
kwargs['instance'].related_tags.add(tag_obj)
changed = True
if changed:
kwargs['instance'].save()
def mentions(sender, **kwargs):
"""
Intended to be used as a receiver function for a `post_save` signal on models that have @mentions
Implementing model must have an attribute TAG_FIELD where @mentions are stored in raw form
This function creates notifications but does not associate mentioned users with the created model instance
"""
try:
from yak.rest_notifications.models import create_notification
except ImportError:
return
if kwargs['created']:
# Get the text of the field that holds tags. If there is no field specified, use an empty string. If the field's
# value is None, use an empty string.
message = getattr(kwargs['instance'], sender.TAG_FIELD, '') or ''
content_object = getattr(kwargs['instance'], 'content_object', kwargs['instance'])
for user in re.findall(r"@[a-zA-Z0-9_.]+", message):
User = get_user_model()
try:
receiver = User.objects.get(username=user[1:])
mention_type = NotificationType.objects.get(slug="mention")
create_notification(receiver, kwargs['instance'].user, content_object, mention_type)
# Note that for a Comment, this means the notification is associated with the object commented on,
# not the comment itself
except User.DoesNotExist:
pass
class Comment(CoreModel):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField(db_index=True)
content_object = GenericForeignKey()
TAG_FIELD = 'description'
related_tags = models.ManyToManyField(Tag, blank=True)
description = models.TextField()
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="comments", on_delete=models.CASCADE)
class Meta:
ordering = ['created']
post_save.connect(mentions, sender=Comment)
post_save.connect(relate_tags, sender=Comment)
# Allows a user to 'follow' objects
class Follow(CoreModel):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField(db_index=True)
content_object = GenericForeignKey()
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="following", on_delete=models.CASCADE)
@property
def object_type(self):
return self.content_type.name
@property
def name(self):
# object must be registered with FollowableModel
return self.content_object.identifier()
class Meta:
unique_together = (("user", "content_type", "object_id"),)
ordering = ['created']
class Like(CoreModel):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField(db_index=True)
content_object = GenericForeignKey()
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="likes", on_delete=models.CASCADE)
class Meta:
unique_together = (("user", "content_type", "object_id"),)
# Flag an object for review
class Flag(CoreModel):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="flags", on_delete=models.CASCADE)
class Meta:
unique_together = (("user", "content_type", "object_id"),)
class Share(CoreModel):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
shared_with = models.ManyToManyField(settings.AUTH_USER_MODEL, related_name='shared_with')
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="shares", on_delete=models.CASCADE)
class AbstractSocialYeti(AbstractYeti):
follows = GenericRelation(Follow)
class Meta:
abstract = True
def user_following(self):
return self.following.filter(
content_type=ContentType.objects.get(app_label=yak_settings.USER_APP_LABEL, model=yak_settings.USER_MODEL)
)
def user_followers(self):
return Follow.objects.filter(
content_type=ContentType.objects.get(app_label=yak_settings.USER_APP_LABEL, model=yak_settings.USER_MODEL),
object_id=self.pk
)
def user_following_count(self):
return self.user_following().count()
def user_followers_count(self):
return self.user_followers().count()
def identifier(self):
return "{}".format(self.username)
def type(self):
return "user"
class BaseSocialModel(models.Model):
"""
This is an abstract model to be inherited by the main "object" being used in feeds on a social media application.
It expects that object to override the methods below.
"""
class Meta:
abstract = True
def url(self):
current_site = Site.objects.get_current()
return "http://{0}/{1}/".format(current_site.domain, base62.encode(self.pk))
def facebook_og_info(self):
# return {'action': '', 'object': '', 'url': self.url()}
raise NotImplementedError("This has not been implemented")
def create_social_message(self, provider):
raise NotImplementedError("This has not been implemented")
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,226 | jaydenwindle/YAK-server | refs/heads/master | /yak/rest_notifications/utils.py | import json
import requests
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.utils.html import strip_tags
from django.utils.module_loading import import_string
from pypushwoosh import constants
from pypushwoosh.client import PushwooshClient
from yak.settings import yak_settings
def submit_to_pushwoosh(request_data):
url = 'https://cp.pushwoosh.com/json/1.3/createMessage'
response = requests.post(url, data=request_data, headers=PushwooshClient.headers)
return response.json()
def send_pushwoosh_notification(receiver, message, deep_link=None):
notification_data = {
'content': message,
'send_date': constants.SEND_DATE_NOW,
'devices': [token.token for token in receiver.pushwoosh_tokens.all()],
'ios_badges': '+1'
}
if deep_link is not None:
notification_data['minimize_link'] = 0
notification_data['link'] = deep_link
request = {'request': {
'notifications': [notification_data],
'auth': yak_settings.PUSHWOOSH_AUTH_TOKEN,
'application': yak_settings.PUSHWOOSH_APP_CODE
}}
request_data = json.dumps(request)
return submit_to_pushwoosh(request_data)
def send_push_notification(receiver, message, deep_link=None):
notification_handler = import_string(yak_settings.PUSH_NOTIFICATION_HANDLER)
return notification_handler(receiver, message, deep_link=None)
def send_email_notification(receiver, message, reply_to=None):
headers = {}
if reply_to:
headers['Reply-To'] = reply_to
text_content = strip_tags(message)
msg = EmailMultiAlternatives(yak_settings.EMAIL_NOTIFICATION_SUBJECT, text_content, settings.DEFAULT_FROM_EMAIL,
[receiver.email], headers=headers)
msg.attach_alternative(message, "text/html")
msg.send()
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,227 | jaydenwindle/YAK-server | refs/heads/master | /yak/rest_notifications/__init__.py | __author__ = 'rudy'
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,228 | jaydenwindle/YAK-server | refs/heads/master | /test_project/test_app/tests/test_syntax.py | from yak.rest_core.test import YAKSyntaxTest
class SyntaxTest(YAKSyntaxTest):
def test_syntax(self):
self.check_syntax()
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,229 | jaydenwindle/YAK-server | refs/heads/master | /test_project/urls.py | from django.contrib import admin
from django.conf.urls import url, include
from rest_framework import routers
from .test_app.api.views import ProjectUserViewSet, PostViewSet
router = routers.DefaultRouter()
router.register(r'users', ProjectUserViewSet, base_name='users')
router.register(r'posts', PostViewSet, base_name='posts')
api_v1 = [
# Project-specific views
url(r'^', include(router.urls)),
# Auth views
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^', include('social_django.urls', namespace='social')),
# Library views
url(r'^', include('yak.rest_user.urls')),
url(r'^', include('yak.rest_social_auth.urls')),
url(r'^', include('yak.rest_social_network.urls')),
url(r'^', include('yak.rest_notifications.urls')),
]
admin.autodiscover()
urlpatterns = [
# Change the admin prefix here to use an alternate URL for the
# admin interface, which would be marginally more secure.
url(r'^admin/', admin.site.urls),
]
urlpatterns += [
url(r'^api/v1/', include(api_v1)),
]
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,230 | jaydenwindle/YAK-server | refs/heads/master | /yak/rest_social_network/migrations/0002_auto_20141220_0018.py | # -*- coding: utf-8 -*-
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
('rest_social_network', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='share',
name='shared_with',
field=models.ManyToManyField(related_name='shared_with', to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
migrations.AddField(
model_name='share',
name='user',
field=models.ForeignKey(related_name='shares', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='share',
unique_together=set([('user', 'content_type', 'object_id')]),
),
migrations.AddField(
model_name='like',
name='content_type',
field=models.ForeignKey(to='contenttypes.ContentType', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='like',
name='user',
field=models.ForeignKey(related_name='likes', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='like',
unique_together=set([('user', 'content_type', 'object_id')]),
),
migrations.AddField(
model_name='follow',
name='content_type',
field=models.ForeignKey(to='contenttypes.ContentType', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='follow',
name='user',
field=models.ForeignKey(related_name='following', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='follow',
unique_together=set([('user', 'content_type', 'object_id')]),
),
migrations.AddField(
model_name='flag',
name='content_type',
field=models.ForeignKey(to='contenttypes.ContentType', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='flag',
name='user',
field=models.ForeignKey(related_name='flags', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='flag',
unique_together=set([('user', 'content_type', 'object_id')]),
),
migrations.AddField(
model_name='comment',
name='content_type',
field=models.ForeignKey(to='contenttypes.ContentType', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='comment',
name='related_tags',
field=models.ManyToManyField(to='rest_social_network.Tag', null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='comment',
name='user',
field=models.ForeignKey(related_name='comments', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
preserve_default=True,
),
]
| {"/yak/rest_core/test.py": ["/yak/settings.py"], "/yak/rest_social_auth/backends/yak_twitter.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_instagram.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_social_auth/backends/yak_soundcloud.py": ["/yak/rest_social_auth/backends/base.py"], "/test_project/test_app/tests/test_social.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_social_network/models.py"], "/test_project/test_app/tests/factories.py": ["/test_project/test_app/models.py", "/yak/rest_social_network/models.py"], "/yak/rest_social_auth/serializers.py": ["/yak/rest_user/serializers.py"], "/yak/rest_user/serializers.py": ["/yak/rest_core/serializers.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_facebook.py": ["/yak/rest_social_auth/backends/base.py", "/yak/settings.py"], "/test_project/test_app/api/serializers.py": ["/test_project/test_app/models.py", "/yak/rest_core/serializers.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py"], "/test_project/test_app/tests/test_notifications.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py", "/yak/rest_notifications/utils.py", "/yak/settings.py"], "/yak/rest_social_auth/views.py": ["/yak/rest_social_auth/serializers.py", "/yak/rest_social_auth/utils.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/serializers.py": ["/yak/rest_user/serializers.py", "/yak/settings.py"], "/test_project/test_app/tests/test_user.py": ["/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/yak/rest_social_network/views.py": ["/yak/rest_social_network/models.py", "/yak/rest_social_network/serializers.py", "/yak/rest_user/serializers.py", "/yak/rest_user/views.py"], "/yak/rest_notifications/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_notifications/serializers.py", "/yak/rest_social_network/views.py", "/yak/settings.py"], "/yak/rest_social_auth/backends/yak_tumblr.py": ["/yak/rest_social_auth/backends/base.py"], "/yak/rest_user/views.py": ["/yak/rest_core/permissions.py", "/yak/rest_user/serializers.py", "/yak/rest_user/utils.py"], "/test_project/test_app/tests/test_permissions.py": ["/test_project/test_app/models.py", "/test_project/test_app/tests/factories.py", "/yak/rest_core/test.py"], "/test_project/test_app/api/views.py": ["/test_project/test_app/models.py", "/yak/rest_social_auth/views.py", "/yak/rest_social_network/views.py", "/test_project/test_app/api/serializers.py"], "/test_project/test_app/models.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/utils.py"], "/yak/rest_social_network/serializers.py": ["/yak/rest_social_network/models.py", "/yak/rest_user/serializers.py"], "/yak/rest_social_network/models.py": ["/yak/rest_user/models.py", "/yak/settings.py"], "/yak/rest_notifications/utils.py": ["/yak/settings.py"], "/test_project/test_app/tests/test_syntax.py": ["/yak/rest_core/test.py"], "/test_project/urls.py": ["/test_project/test_app/api/views.py"]} |
46,299 | mtbui2010/manage_scripts | refs/heads/master | /distribute_package.py | from make_binary_package import make_binary_package
import getopt, sys, os
PKG_DIR = '/mnt/workspace/000_demo_packaging'
PKG_NAME = 'ketitestlib'
REBINARY = True
MAKE_PYI = True
CHANGE = 'bug'
def distribute_protect_package_pypi(pkg_dir, pkg_name='', rebinarize=True, make_pyi=True, change='Bug'):
cwd = os.getcwd()
os.chdir(pkg_dir)
# check version
version_file = 'VERSION'
if not os.path.exists(version_file): version='1.0.0'
else: f = open(version_file, 'r'); version=f.read().replace('\n', ''); f.close(); os.remove(version_file)
print(f'{"+" * 10} package {pkg_name}: current version {version}')
# upgrade version
major, minor, bug = [int(p) for p in version.split('.')]
if change.lower()=='major': major+=1
if change.lower()=='minor': minor+=1
if change.lower()=='bug': bug+=1
version = f'{major}.{minor}.{bug}'
f = open(version_file, 'w'); f.write(version); f.close()
print(f'{"+" * 10} change to version {version}')
make_binary_package(pkg_dir=pkg_dir,pkg_name=pkg_name, rebinarize=rebinarize, make_pyi=make_pyi)
os.chdir(f'{pkg_dir}_binary')
os.system('python3 -m twine upload dist/*')
print(f'{"+" * 10} package {pkg_name}_v{version} uploaded to pypi >> "pip install {pkg_name}" to install')
os.chdir(cwd)
def get_args(argv):
options = ['p', 'n', 'b', 'i', 'c']
describes = ['package_path[str]', 'package_name[str]', 'rebinarize[True/False]',
'make_pyi[True/False]', 'change[Major/Minor/Bug]']
default_values = [None, '', True, True, 'Bug']
requires = ['p']
#
opt_str = 'h'
for p in options: opt_str += f'{p}:'
usage_guide = 'python3 distribute_package.py'
for o,d in zip(options, describes): usage_guide += f' -{o} <{d}>'
args=dict()
for o,v in zip(options, default_values): args.update({o: v})
try: opts, _ = getopt.getopt(argv, opt_str)
except getopt.GetoptError:
print(usage_guide)
exit()
opts1 = [opt[1:] for opt, v in opts]
for r in requires:
if r not in opts1:
print(usage_guide)
exit()
for opt, v in opts: args[opt[1:]] = v
return args
if __name__ == "__main__":
# args = get_args(sys.argv[1:])
# distribute_package_pypi(pkg_dir=args['p'], pkg_name=args['n'],rebinarize=args['b'],
# make_pyi=args['i'], change=args['c'])
distribute_protect_package_pypi(pkg_dir=PKG_DIR, pkg_name=PKG_NAME, rebinarize=REBINARY,
make_pyi=MAKE_PYI, change=CHANGE)
| {"/distribute_package.py": ["/make_binary_package.py"]} |
46,300 | mtbui2010/manage_scripts | refs/heads/master | /make_binary_package_and_git.py | import shutil, os
from glob import glob
from setuptools import setup
from Cython.Build import cythonize
from Cython.Distutils import build_ext
import numpy as np
# +++++++++++++++++++++++++++++++++ CONFIGURATIONS
package_dir = '/mnt/workspace/001_grasp_detection_package'
rebinarize = True
upload_to_git = True
git_msg = 'commit'
git_link = 'https://github.com/mtbui2010/kpick_binary.git'
binary_package_dir = '{}_binary'.format(package_dir)
if rebinarize: remove_git = 'remove_git'
else: remove_git = 'keep_git'
if rebinarize:
# +++++++++++++++++++++++++++++++++ convert .py to binary
if os.path.exists(binary_package_dir): shutil.rmtree(binary_package_dir, ignore_errors=True)
shutil.copytree(package_dir, binary_package_dir)
py_module_paths = glob(os.path.join(binary_package_dir, '**/*.py'), recursive=True)
py_dirs = np.unique([os.path.split(path)[0] for path in py_module_paths]).tolist()
py_dirs.remove(binary_package_dir)
for py_dir in py_dirs: # cythonize .py files
# os.chdir(binary_package_dir)
os.chdir(py_dir)
if os.path.exists('__init__.py'): os.remove('__init__.py')
py_module_paths_ = [p for p in glob('*.py', recursive=True)]
if len(py_module_paths_)==0: continue
setup_path = 'setup.py' # make setup file
if os.path.exists(setup_path): os.remove(setup_path)
with open(setup_path, 'w') as f:
f.write('from setuptools import setup\n')
f.write('from Cython.Build import cythonize\n')
f.write('setup(ext_modules = cythonize({}))'.format(py_module_paths_))
os.system('python3 {} build_ext --inplace'.format(setup_path))
# os.remove(setup_path) # remmove setup file
if os.path.exists('build'): shutil.rmtree('build')
[os.remove(p) for p in py_module_paths_]
if upload_to_git:
# +++++++++++++++++++++++++++++++++ updload to git
os.chdir(binary_package_dir)
git_command = './commit_git.sh -m "{}" -l "{}" -r "{}"'.format(git_msg, git_link, remove_git)
print(git_command)
os.system(git_command)
# # +++++++++++++++++++++++++++++++++ updload to pypi
# os.chdir(pkg_dir)
# os.system('python3 setup.py sdist bdist_wheel')
# os.system('python3 -m twine upload dist/*')
| {"/distribute_package.py": ["/make_binary_package.py"]} |
46,301 | mtbui2010/manage_scripts | refs/heads/master | /upload_to_git.py | import os
from shutil import rmtree
import getpass
def upload_to_git(git_path, update_dir, username=None, password=None):
cwd = os.getcwd()
if username is not None and password is not None:
git_path = git_path.replace('https://','')
git_path = 'https://{}:{}@{}'.format(username, password, git_path)
git_command = './commit_git.sh -m "commit" -l "{}"'.format(git_path)
os.chdir(update_dir)
print(os.getcwd())
os.system(git_command)
print('{} completed'.format('+' * 5))
os.chdir(cwd)
def upload_to_gits_auth(git_paths, update_dirs):
username = input('Git ID?')
password = getpass.getpass(prompt='Git password?')
num_git = len(git_paths)
for j, git_path, update_dir in zip(range(num_git), git_paths, update_dirs):
print('{} [{}/{}] Giting push from {} to {}'.format('+' * 10, j, num_git, update_dir, git_path))
upload_to_git(git_path, update_dir, username=username, password=password)
if __name__=='__main__':
git_paths = [
'https://github.com/mtbui2010/ikea',
'https://github.com/KETI-AN/ikeacv',
'https://github.com/mtbui2010/ttdet_demo',
'https://github.com/mtbui2010/ttdet',
'https://github.com/mtbui2010/ttcv',
'https://github.com/mtbui2010/detectron2',
]
update_dirs = [
'/mnt/workspace/001_ikea',
'/mnt/workspace/001_ikeacv',
'/mnt/workspace/000_ttdet_demo',
'/mnt/workspace/000_ttdet',
'/mnt/workspace/000_ttcv_simple',
'/mnt/workspace/000_detectron2',
]
upload_to_gits_auth(git_paths, update_dirs)
| {"/distribute_package.py": ["/make_binary_package.py"]} |
46,302 | mtbui2010/manage_scripts | refs/heads/master | /make_binary_package.py | import os
import shutil
from glob import glob
from distutils.dir_util import copy_tree
rebinarize = False
make_pyi = False
def iter_make_pyi(dir_path=''):
os.system(f'stubgen {os.path.join(dir_path,"*.py")} -o .')
items = os.listdir(dir_path)
sub_dirs = [os.path.join(dir_path, p) for p in items if os.path.isdir(os.path.join(dir_path, p)) and 'cache' not in p]
if len(sub_dirs)==0: return
for p in sub_dirs:
iter_make_pyi(p)
def make_binary_package(pkg_dir, pkg_name='', rebinarize=True, make_pyi=True):
if not os.path.isdir(pkg_dir):
print(f'{"+"*10} {pkg_dir} is not a dir >> return')
return
bn_pkg_dir = f'{pkg_dir}_binary'
if rebinarize or make_pyi:
# if os.path.exists(bn_pkg_dir):
# #shutil.rmtree(bn_pkg_dir, ignore_errors=True)
# os.system(f'sudo rm -rf {bn_pkg_dir}')
# shutil.copytree(pkg_dir, bn_pkg_dir)
os.makedirs(bn_pkg_dir, exist_ok=True)
copy_tree(pkg_dir, bn_pkg_dir)
print(f'{"+"*10} {pkg_dir} coppied to {bn_pkg_dir}')
cwd = os.getcwd()
os.chdir(bn_pkg_dir)
print(f'{"+" * 10} change workdir to {bn_pkg_dir}')
if make_pyi:
iter_make_pyi(pkg_name)
print(f'{"+" * 10} .pyi files made for {bn_pkg_dir}')
if rebinarize:
os.system('python3 setup.py build_ext --inplace')
print(f'{"+" * 10} binary files built')
lib_dir = [p for p in os.listdir('build') if p.startswith('lib.linux')][0]
lib_dir = os.path.join('build', lib_dir)
pyifiles = glob(os.path.join(pkg_name, '**/*.pyi'), recursive=True)
for src in pyifiles: shutil.copyfile(src,os.path.join(lib_dir, src))
print(f'{"+" * 10} pyi files copied to built lib')
if os.path.exists('dist'):
os.system('rm -rf dist/*')
os.system('python3 setup.py bdist_wheel')
print(f'{"+" * 10} distribute wheel made')
wheelfile = glob(os.path.join('dist', '*.whl'))[0]
os.rename(wheelfile, wheelfile.replace('-linux_', '-manylinux2014_'))
if os.path.exists(wheelfile): os.remove(wheelfile)
print(f'{"+" * 10} rename wheel file to support manylinux')
os.chdir(cwd)
print(f'{"+" * 10} change workdir to {cwd}')
if __name__=='__main__':
make_binary_package(pkg_dir='/mnt/workspace/001_kpick', pkg_name='kpick',
rebinarize=rebinarize, make_pyi=make_pyi)
| {"/distribute_package.py": ["/make_binary_package.py"]} |
46,303 | mtbui2010/manage_scripts | refs/heads/master | /download_update_from_git.py | import os
from shutil import rmtree
import getpass
def download_update_from_git(git_path, update_dir, git_dir='tmp', ignores=None, username=None, password=None):
if os.path.exists(git_dir): rmtree(git_dir)
os.makedirs(git_dir, exist_ok=True)
if username is None or password is None:
git_command = 'git clone --recursive {} {}'.format(git_path, git_dir)
else:
git_path = git_path.replace('https://', '')
git_command = 'git clone --recursive https://{}:{}@{} {}'.format(username, password, git_path, git_dir)
os.system(git_command)
print('{} downloaded into {}'.format(git_path, git_dir))
if ignores is None: ignores = ['.git', '.idea']
else: ignores += ['.git', '.idea']
for item in os.listdir(git_dir):
if item in ignores: continue
cp_source = os.path.join(git_dir, item)
cp_dest = update_dir
os.system('cp -r {} {}'.format(cp_source, cp_dest))
print('{} copied to {}'.format(cp_source, cp_dest))
rmtree(git_dir)
print('{} completed'.format('+' * 5))
def download_update_from_gits_auth(git_paths, update_dirs, ignoress=None):
if ignoress is None: ignoress = [None]*len(git_paths)
username = input('Git ID?')
password = getpass.getpass(prompt='Git password?')
num_git = len(git_paths)
for j, git_path, update_dir, ignores in zip(range(num_git), git_paths, update_dirs, ignoress):
print('{} [{}/{}] Giting {} and updating {}'.format('+' * 10, j, num_git, git_path, update_dir))
download_update_from_git(git_path, update_dir, ignores=ignores, username=username, password=password)
if __name__=='__main__':
git_paths = [
'https://github.com/mtbui2010/ikea',
'https://github.com/KETI-AN/ikeacv',
'https://github.com/mtbui2010/ttdet_demo',
'https://github.com/mtbui2010/ttdet',
'https://github.com/mtbui2010/ttcv',
'https://github.com/mtbui2010/detectron2',
]
update_dirs = [
'/mnt/workspace/001_ikea',
'/mnt/workspace/001_ikeacv',
'/mnt/workspace/000_ttdet_demo',
'/mnt/workspace/000_ttdet',
'/mnt/workspace/000_ttcv_simple',
'/mnt/workspace/000_detectron2',
]
download_update_from_gits_auth(git_paths, update_dirs)
| {"/distribute_package.py": ["/make_binary_package.py"]} |
46,305 | lschuetze/ReBench | refs/heads/master | /rebench/model/measurement.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from datetime import datetime
from .run_id import RunId
class Measurement(object):
def __init__(self, value, unit, run_id, criterion = 'total',
timestamp = None, line_number = None, filename = None):
self._run_id = run_id
self._criterion = criterion
self._value = value
self._unit = unit
self._timestamp = timestamp or datetime.now()
self._line_number = line_number
self._filename = filename
def is_total(self):
return self._criterion == 'total'
@property
def criterion(self):
return self._criterion
@property
def value(self):
return self._value
@property
def unit(self):
return self._unit
@property
def timestamp(self):
return self._timestamp
@property
def run_id(self):
return self._run_id
@property
def filename(self):
return self._filename
@property
def line_number(self):
return self._line_number
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
def as_str_list(self):
if isinstance(self._value, float):
val = "%f" % self.value
else:
val = "%s" % self.value
return ["[" + self._timestamp.strftime(self.TIME_FORMAT) + "]",
val,
self._unit,
self._criterion] + self._run_id.as_str_list()
@classmethod
def from_str_list(cls, data_store, str_list, line_number = None,
filename = None):
timestamp = datetime.strptime(str_list[0][1:-1], cls.TIME_FORMAT)
value = float(str_list[1])
unit = str_list[2]
criterion = str_list[3]
run_id = RunId.from_str_list(data_store, str_list[4:])
return Measurement(value, unit, run_id, criterion, timestamp,
line_number, filename)
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,306 | lschuetze/ReBench | refs/heads/master | /rebench/model/__init__.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Glossary:
data point:
a set of measurements belonging together.
generated by one specific run.
In some cases, a single run can produce multiple data points.
measurement:
one value for one specific criterion
virtual machine:
A named set of settings for the executor of a benchmark suite.
Typically, this is one specific virtual machine with a specific set of
startup parameters. It refers to an executable that will execute
benchmarks from suite. Thus, the virtual machine is the executor.
benchmark suite:
A set of benchmarks with a variety of parameters, i.e., dimension to be
explored by a benchmark.
benchmark:
A set of experiments based on one program to be executed.
The set is described by parameters, i.e., dimensions that are to be
explored.
run:
A run is one specific experiments based on the selected
parameters, benchmark, benchmark suite, and virtual machine.
One run can generate multiple data points.
experiment:
Brings together benchmark suites, virtual machines, and their
various parameters.
"""
def value_or_list_as_list(value):
if type(value) is list:
return value
elif value is None:
return []
else:
return [value]
def value_with_optional_details(value, default_details = None):
if type(value) is dict:
assert len(value) == 1
(value, details) = value.items()[0]
else:
details = default_details
return value, details
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,307 | lschuetze/ReBench | refs/heads/master | /rebench/tests/test-vm2.py | #!/usr/bin/env python
## simple script emulating a VM generating benchmark results
import sys
import time
import random
print "test-vm2.py: args=", sys.argv
print "RESULT-part1: ", random.uniform(100, 110)
print "RESULT-part2: ", random.uniform(400, 440)
print "RESULT-part3: ", random.uniform(200, 300)
print "RESULT-total: ", random.uniform(700, 850) | {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,308 | lschuetze/ReBench | refs/heads/master | /rebench/interop/epcc_adapter.py | # class EPCCPerformance(Performance):
# """EPCCPerformance is used to read the output of the EPCC barrier benchmarks.
# """
# barrier_time = re.compile(r"^BARRIER time =\s+([0-9\.E]+) microseconds(?:.+)")
# barrier_time2 = re.compile(r"\s*Total time without initialization\s+:?\s+([0-9]+)")
# barnes = re.compile(r"COMPUTETIME\s+=\s+([0-9]+)")
# re_error = re.compile("Error [^t][^o][^l]")
# barrier_time3 = re.compile(r"^BARRIER overhead =\s+([0-9\.E]+) microseconds(?:.+)")
#
# def parse_data(self, data):
# result = []
# time = None
#
# for line in data.split("\n"):
# if self.check_for_error(line):
# raise ResultsIndicatedAsInvalid("Output of bench program indicated error.")
# #import pdb; pdb.set_trace()
# m = self.barrier_time.match(line)
# if not m:
# m = self.barrier_time2.match(line)
# if not m:
# m = self.barnes.match(line)
# if not m:
# m = self.barrier_time3.match(line)
#
# if m:
# time = float(m.group(1))
# val = Measurement(time, None)
# result.append(val)
#
# if time is None:
# raise OutputNotParseable(data)
#
# return (time, result) | {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,309 | lschuetze/ReBench | refs/heads/master | /rebench/model/benchmark_suite.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from . import value_or_list_as_list
import logging
class BenchmarkSuite(object):
def __init__(self, suite_name, vm, global_suite_cfg):
"""Specialize the benchmark suite for the given VM"""
self._name = suite_name
## TODO: why do we do handle input_sizes the other way around?
if vm.input_sizes:
self._input_sizes = vm.input_sizes
else:
self._input_sizes = global_suite_cfg.get('input_sizes')
if self._input_sizes is None:
self._input_sizes = [None]
self._location = global_suite_cfg.get('location', vm.path)
self._cores = global_suite_cfg.get('cores', vm.cores)
self._variable_values = value_or_list_as_list(global_suite_cfg.get(
'variable_values', [None]))
self._vm = vm
self._benchmarks = value_or_list_as_list(
global_suite_cfg['benchmarks'])
self._gauge_adapter = global_suite_cfg['gauge_adapter']
self._command = global_suite_cfg['command']
self._max_runtime = global_suite_cfg.get('max_runtime', -1)
# TODO: remove in ReBench 1.0
if 'performance_reader' in global_suite_cfg:
logging.warning("Found deprecated 'performance_reader' key in"
" configuration, please replace by 'gauge_adapter'"
" key.")
self._gauge_adapter = global_suite_cfg['performance_reader']
@property
def input_sizes(self):
return self._input_sizes
@property
def location(self):
return self._location
@property
def cores(self):
return self._cores
@property
def variable_values(self):
return self._variable_values
@property
def vm(self):
return self._vm
@property
def benchmarks(self):
return self._benchmarks
@property
def gauge_adapter(self):
return self._gauge_adapter
@property
def name(self):
return self._name
@property
def command(self):
return self._command
@property
def max_runtime(self):
return self._max_runtime
def has_max_runtime(self):
return self._max_runtime != -1
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,310 | lschuetze/ReBench | refs/heads/master | /rebench/tests/features/issue_15_vm.py | #!/usr/bin/env python
## simple script emulating a VM generating benchmark results
import sys
import random
print sys.argv
print "Harness Name: ", sys.argv[1]
print "Bench Name:", sys.argv[2]
print "Warmup: ", sys.argv[3]
warmup = int(sys.argv[3])
for i in range(0, warmup):
print "RESULT-total: ", random.triangular(700, 850) * 2
print "RESULT-total: ", random.triangular(700, 850) | {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,311 | lschuetze/ReBench | refs/heads/master | /rebench/reporter.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import with_statement, print_function
from collections import deque
from datetime import datetime
from httplib import HTTPException
from time import time
from math import floor
import logging
import json
import urllib2
import urllib
import re
from .statistics import StatisticProperties
class Reporter(object):
def __init__(self):
self._job_completion_reported = False
def run_failed(self, _run_id, _cmdline, _return_code, _output):
pass
def run_completed(self, run_id, statistics, cmdline):
pass
def job_completed(self, run_ids):
if not self._job_completion_reported:
self.report_job_completed(run_ids)
self._job_completion_reported = True
def set_total_number_of_runs(self, num_runs):
pass
def start_run(self, run_id):
pass
class TextReporter(Reporter):
def __init__(self):
super(TextReporter, self).__init__()
def _configuration_details(self, run_id, statistics = None):
result = ["\t".join(run_id.as_str_list()), " = "]
self._output_stats(result, run_id, statistics)
return result
def _output_stats(self, output_list, run_id, statistics):
if not statistics:
return
for field, value in statistics.__dict__.iteritems():
if not field.startswith('_'):
output_list.append("%s: %s " % (field, value))
@staticmethod
def _path_to_string(path):
out = [path[0].as_simple_string()]
for item in path[1:]:
if item:
out.append(str(item))
return " ".join(out) + " "
def _generate_all_output(self, run_ids):
rows = []
col_width = None
for run_id in run_ids:
stats = StatisticProperties(run_id.get_total_values())
out = run_id.as_str_list()
self._output_stats(out, run_id, stats)
if col_width is None:
col_width = [0] * len(out)
rows.append(out)
col_width = [max(len(col_content), col)
for col_content, col in zip(out, col_width)]
for row in rows:
result = " ".join([col.ljust(width)
for col, width in zip(row, col_width)])
yield result
class CliReporter(TextReporter):
""" Reports to standard out using the logging framework """
def __init__(self, executes_verbose):
super(CliReporter, self).__init__()
self._num_runs = None
self._runs_completed = 0
self._startTime = None
self._runs_remaining = 0
self._executes_verbose = executes_verbose
# TODO: re-add support, think, we need that based on the proper config, i.e., the run id
# self._min_runtime = configurator.statistics.min_runtime
def run_failed(self, run_id, cmdline, return_code, output):
# Additional information in debug mode
result = "[%s] Run failed: %s\n" % (
datetime.now(),
" ".join(self._configuration_details(run_id)))
logging.debug(result)
# Standard error output
if return_code == -9:
log_msg = "Run timed out. return_code: %s"
else:
log_msg = "Run failed return_code: %s"
print(log_msg % return_code)
print("Cmd: %s\n" % cmdline)
if run_id.bench_cfg.suite.has_max_runtime():
logging.debug("max_runtime: %s" % run_id.bench_cfg.suite.max_runtime)
logging.debug("cwd: %s" % run_id.bench_cfg.suite.location)
if not self._executes_verbose and output and len(output.strip()) > 0:
print("Output:\n%s\n" % output)
def run_completed(self, run_id, statistics, cmdline):
result = "[%s] Run completed: %s\n" % (
datetime.now(),
" ".join(self._configuration_details(run_id, statistics)))
logging.debug(result)
self._runs_completed += 1
self._runs_remaining -= 1
if run_id.run_config.min_runtime:
if statistics.mean < run_id.run_config.min_runtime:
print(("WARNING: measured mean is lower than min_runtime (%s) "
"\t mean: %.1f\trun id: %s")
% (run_id.run_config.min_runtime,
statistics.mean,
run_id.as_simple_string()))
print("Cmd: %s" % cmdline)
def report_job_completed(self, run_ids):
print("[%s] Job completed" % datetime.now())
for line in self._generate_all_output(run_ids):
print(line)
def set_total_number_of_runs(self, num_runs):
self._num_runs = num_runs
self._runs_remaining = num_runs
def start_run(self, run_id):
if self._runs_completed > 0:
current = time()
data_points_per_run = run_id.run_config.number_of_data_points
data_points_completed = (self._runs_completed *
data_points_per_run + len(run_id.get_data_points()))
data_points_remaining = (self._runs_remaining *
data_points_per_run - len(run_id.get_data_points()))
time_per_data_point = ((current - self._startTime) /
data_points_completed)
etl = time_per_data_point * data_points_remaining
sec = etl % 60
m = (etl - sec) / 60 % 60
h = (etl - sec - m) / 60 / 60
print(("Run %s \t runs left: %00d \t " +
"time left: %02d:%02d:%02d") % (run_id.bench_cfg.name,
self._runs_remaining,
floor(h), floor(m),
floor(sec)))
else:
self._startTime = time()
print("Run %s \t runs left: %d" % (run_id.bench_cfg.name,
self._runs_remaining))
def _output_stats(self, output_list, run_id, statistics):
if not statistics:
return
if run_id.run_failed():
output_list.append("run failed.")
output_list.append("")
output_list.append("")
output_list.append("")
else:
output_list.append("mean:")
output_list.append(("%.1f" % statistics.mean).rjust(8))
class FileReporter(TextReporter):
""" should be mainly a log file
data is the responsibility of the data_aggregator
"""
def __init__(self, filename):
super(FileReporter, self).__init__()
self._file = open(filename, 'a+')
def run_failed(self, run_id, _cmdline, _return_code, _output):
result = "[%s] Run failed: %s\n" % (
datetime.now(),
" ".join(self._configuration_details(run_id)))
self._file.writelines(result)
def run_completed(self, run_id, statistics, cmdline):
result = "[%s] Run completed: %s\n" % (
datetime.now(),
" ".join(self._configuration_details(run_id, statistics)))
self._file.writelines(result)
def report_job_completed(self, run_ids):
self._file.write("[%s] Job completed\n" % datetime.now())
for line in self._generate_all_output(run_ids):
self._file.write(line + "\n")
self._file.close()
# TODO: re-add support for CSV file generation for overview statistics
# class CSVFileReporter(Reporter):
# """ Will generate a CSV file for processing in another program
# as for instance R, Excel, or Numbers """
#
# def __init__(self, cfg):
# super(CSVFileReporter, self).__init__()
# self._file = open(cfg.csv_file, 'a+')
# self._cfg = cfg
#
# def _prepareHeaderRow(self, data, data_aggregator, parameterMappings):
# # since the data might be irregular find the item with the most
# # parameters first
# longestTuple = max(data.keys(), key=lambda tpl: len(tpl))
# # and determine table width
# table_width = len(longestTuple)
#
# # now generate the header
#
# # get sorted parameter mapping first
# mapping = sorted(parameterMappings.items(), key=lambda entry: entry[1])
#
# header_row = []
# for (title, _index) in mapping:
# header_row.append(title)
#
# # add empty columns to keep table aligned
# while len(header_row) < table_width:
# header_row.append('')
#
# # now the statistic rows
# for title in StatisticProperties.tuple_mapping():
# header_row.append(title)
#
# return header_row, table_width
#
# def report_job_completed(self, run_ids):
# old_locale = locale.getlocale(locale.LC_ALL)
# if self._cfg.csv_locale:
# locale.setlocale(locale.LC_ALL, self._cfg.csv_locale)
#
#
# # get the data to be processed
# data = data_aggregator.getDataFlattend()
# parameterMappings = data_aggregator.data_mapping()
# num_common_parameters = len(parameterMappings)
#
# header_row, max_num_parameters = self._prepareHeaderRow(data, data_aggregator, parameterMappings)
#
# table = []
#
# # add the header row
# table.append(header_row)
#
# # add the actual results to the table
# for run, measures in data.iteritems():
# row = []
# row += run[0:num_common_parameters] # add the common ones
# row += [''] * (max_num_parameters - len(run)) # add fill for unused parameters
# row += run[num_common_parameters:] # add all remaining
# row += list(StatisticProperties(measures,
# self._cfg.confidence_level).as_tuple()) # now add the actual result data
# table.append(row)
#
# for row in table:
# self._file.write(";".join([i if type(i) == str else locale.format("%f", i or 0.0) for i in row]) + "\n")
#
# self._file.close()
# locale.setlocale(locale.LC_ALL, old_locale)
class CodespeedReporter(Reporter):
"""
This report will report the recorded data on the completion of the job
to the configured Codespeed instance.
"""
def __init__(self, cfg):
super(CodespeedReporter, self).__init__()
self._cfg = cfg
self._incremental_report = self._cfg.report_incrementally
self._cache_for_seconds = 30
self._cache = {}
self._last_send = time()
def run_completed(self, run_id, statistics, cmdline):
if not self._incremental_report:
return
# ok, talk to codespeed immediately
self._cache[run_id] = self._format_for_codespeed(run_id, statistics)
if time() - self._last_send >= self._cache_for_seconds:
self._send_and_empty_cache()
def _send_and_empty_cache(self):
self._send_to_codespeed(self._cache.values())
self._cache = {}
def _result_data_template(self):
# all None values have to be filled in
return {
'commitid': self._cfg.commit_id,
'project': self._cfg.project,
#'revision_date': '', # Optional. Default is taken either
# from VCS integration or from current date
'executable': None,
'benchmark': None,
'environment': self._cfg.environment,
'branch': self._cfg.branch,
'result_value': None,
# 'result_date': datetime.today(), # Optional
'std_dev': None,
'max': None,
'min': None}
@staticmethod
def _beautify_benchmark_name(name):
"""
Currently just remove all bench, or benchmark strings.
"""
replace = re.compile('bench(mark)?', re.IGNORECASE)
return replace.sub('', name)
def _format_for_codespeed(self, run_id, stats = None):
result = self._result_data_template()
if stats and not run_id.run_failed():
result['min'] = stats.min
result['max'] = stats.max
result['std_dev'] = stats.std_dev
result['result_value'] = stats.mean
else:
result['result_value'] = -1
result['executable'] = self._cfg.executable or run_id.bench_cfg.vm.name
if run_id.bench_cfg.codespeed_name:
name = run_id.bench_cfg.codespeed_name
else:
name = (self._beautify_benchmark_name(run_id.bench_cfg.name)
+ " (%(cores)s cores, %(input_sizes)s %(extra_args)s)")
# TODO: this is incomplete:
name = name % {'cores' : run_id.cores_as_str,
'input_sizes' : run_id.input_size_as_str,
'extra_args' : run_id.bench_cfg.extra_args}
result['benchmark'] = name
return result
def _send_payload(self, payload):
fh = urllib2.urlopen(self._cfg.url, payload)
response = fh.read()
fh.close()
logging.info("Results were sent to codespeed, response was: "
+ response)
def _send_to_codespeed(self, results):
payload = urllib.urlencode({'json': json.dumps(results)})
try:
self._send_payload(payload)
except (IOError, HTTPException):
# sometimes codespeed fails to accept a request because something
# is not yet properly initialized, let's try again for those cases
try:
self._send_payload(payload)
except (IOError, HTTPException) as error:
logging.error(str(error) + " This is most likely caused by "
"either a wrong URL in the config file, or an "
"environment not configured in codespeed. URL: "
+ self._cfg.url)
logging.info("Sent %d results to codespeed." % len(results))
def _prepare_result(self, run_id):
stats = StatisticProperties(run_id.get_total_values())
return self._format_for_codespeed(run_id, stats)
def report_job_completed(self, run_ids):
if self._incremental_report:
# send remaining items from cache
self._send_and_empty_cache()
return
results = [self._prepare_result(run_id) for run_id in run_ids]
# now, send them of to codespeed
self._send_to_codespeed(results)
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,312 | lschuetze/ReBench | refs/heads/master | /rebench/__init__.py | from rebench import ReBench, main_func
__version__ = ReBench().version
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,313 | lschuetze/ReBench | refs/heads/master | /rebench/model/reporting.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from ..configuration_error import ConfigurationError
import rebench.reporter as reporter
class Reporting(object):
def __init__(self, raw_config, cli_reporter, options):
self._csv_file = raw_config.get('csv_file', None)
self._csv_locale = raw_config.get('csv_locale', None)
self._csv_raw = raw_config.get('csv_raw', None)
if "codespeed" in raw_config and options.use_codespeed:
self._codespeed = CodespeedReporting(raw_config, options)
else:
self._codespeed = None
self._cli_reporter = cli_reporter
# if self._config.reporting:
# if ('codespeed' in self._config.reporting and
# self._config.options.use_codespeed):
# reporters.append(CodespeedReporter(self._config))
@property
def csv_file(self):
return self._csv_file
@property
def csv_locale(self):
return self._csv_locale
@property
def csv_raw(self):
return self._csv_raw
def combined(self, raw_config):
rep = Reporting({}, self._cli_reporter, None)
rep._csv_file = raw_config.get('csv_file', self._csv_file)
rep._csv_locale = raw_config.get('csv_locale', self._csv_locale)
rep._csv_raw = raw_config.get('csv_raw', self._csv_raw)
rep._codespeed = self._codespeed
return rep
def get_reporters(self):
result = []
if self._cli_reporter:
result.append(self._cli_reporter)
if self._codespeed:
result.append(self._codespeed.get_reporter())
return result
class CodespeedReporting(object):
def __init__(self, raw_config, options):
codespeed = raw_config.get("codespeed", {})
if options.commit_id is None:
raise ConfigurationError("--commit-id has to be set on the command "
"line for codespeed reporting.")
self._commit_id = options.commit_id
if options.environment is None:
raise ConfigurationError("--environment has to be set on the "
"command line for codespeed reporting.")
self._environment = options.environment
if "project" not in codespeed and options.project is None:
raise ConfigurationError("The config file needs to configure a "
"'project' in the reporting.codespeed "
"section, or --project has to be given on "
"the command line.")
if options.project is not None:
self._project = options.project
else:
self._project = codespeed["project"]
if "url" not in codespeed:
raise ConfigurationError("The config file needs to define a URL to "
"codespeed in the reporting.codespeed "
"section")
self._url = codespeed["url"]
self._report_incrementally = options.report_incrementally
self._branch = options.branch
self._executable = options.executable
self._reporter = reporter.CodespeedReporter(self)
@property
def report_incrementally(self):
return self._report_incrementally
@property
def branch(self):
return self._branch
@property
def executable(self):
return self._executable
@property
def project(self):
return self._project
@property
def commit_id(self):
return self._commit_id
@property
def environment(self):
return self._environment
@property
def url(self):
return self._url
def get_reporter(self):
return self._reporter
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,314 | lschuetze/ReBench | refs/heads/master | /rebench/persistence.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import os
import sys
import logging
import subprocess
import shutil
from threading import Lock
import time
from .model.data_point import DataPoint
from .model.measurement import Measurement
from .model.run_id import RunId
class DataStore:
def __init__(self):
self._files = {}
self._run_ids = {}
self._bench_cfgs = {}
def load_data(self):
for persistence in self._files.values():
persistence._load_data()
def get(self, filename, discard_old_data):
if filename not in self._files:
self._files[filename] = _DataPointPersistence(filename, self,
discard_old_data)
return self._files[filename]
def create_run_id(self, bench_cfg, cores, input_size, var_value):
if isinstance(cores, str) and cores.isdigit():
cores = int(cores)
if input_size == '':
input_size = None
if var_value == '':
var_value = None
run = RunId(bench_cfg, cores, input_size, var_value)
if run in self._run_ids:
return self._run_ids[run]
else:
self._run_ids[run] = run
return run
def get_config(self, name, vm_name, suite_name, extra_args, warmup):
key = (name, vm_name, suite_name,
'' if extra_args is None else str(extra_args),
str(warmup))
if key not in self._bench_cfgs:
raise ValueError("Requested configuration is not available: " +
key.__str__())
return self._bench_cfgs[key]
def register_config(self, cfg):
key = tuple(cfg.as_str_list())
if key in self._bench_cfgs:
raise ValueError("Two identical BenchmarkConfig tried to " +
"register. This seems to be wrong: " + str(key))
else:
self._bench_cfgs[key] = cfg
return cfg
@classmethod
def get_by_file(cls, runs):
by_file = {}
for r in runs:
points = r.get_data_points()
r.discard_data_points()
for p in points:
ms = p.get_measurements()
for m in ms:
if m.filename in by_file:
by_file[m.filename].append(m)
else:
by_file[m.filename] = [m]
return by_file
@classmethod
def discard_data_of_runs(cls, runs):
by_file = cls.get_by_file(runs)
for filename, ms in by_file.iteritems():
try:
with open(filename, 'r') as f:
lines = f.readlines()
except IOError:
logging.info("Failed to open data file: %s" % filename)
continue
for m in ms:
lines[m.line_number] = None
lines = filter(None, lines)
with open(filename, 'w') as f:
f.writelines(lines)
class _DataPointPersistence(object):
def __init__(self, data_filename, data_store, discard_old_data):
self._data_store = data_store
if not data_filename:
raise ValueError("DataPointPersistence expects a filename " +
"for data_filename, but got: %s" % data_filename)
self._data_filename = data_filename
self._file = None
if discard_old_data:
self._discard_old_data()
self._insert_shebang_line()
self._lock = Lock()
def _discard_old_data(self):
self._truncate_file(self._data_filename)
@staticmethod
def _truncate_file(filename):
with open(filename, 'w'):
pass
def _load_data(self):
"""
Loads the data from the configured data file
"""
try:
with open(self._data_filename, 'r') as f:
self._process_lines(f)
except IOError:
logging.info("No data loaded %s does not exist."
% self._data_filename)
def _process_lines(self, f):
"""
The most important assumptions we make here is that the total
measurement is always the last one serialized for a data point.
"""
errors = set()
previous_run_id = None
line_number = 0
for line in f:
if line.startswith('#'): # skip comments, and shebang lines
line_number += 1
continue
try:
measurement = Measurement.from_str_list(
self._data_store, line.rstrip('\n').split(self._SEP),
line_number, self._data_filename)
run_id = measurement.run_id
if previous_run_id is not run_id:
data_point = DataPoint(run_id)
previous_run_id = run_id
data_point.add_measurement(measurement)
if measurement.is_total():
run_id.loaded_data_point(data_point)
data_point = DataPoint(run_id)
except ValueError, e:
msg = str(e)
if msg not in errors:
# Configuration is not available, skip data point
logging.log(logging.DEBUG - 1, msg)
errors.add(msg)
line_number += 1
def _insert_shebang_line(self):
"""
Insert a shebang (#!/path/to/executable) into the data file.
This allows it theoretically to be executable.
"""
shebang_line = "#!%s\n" % (subprocess.list2cmdline(sys.argv))
try:
# if file doesn't exist, just create it
if not os.path.exists(self._data_filename):
with open(self._data_filename, 'w') as f:
f.write(shebang_line)
f.flush()
f.close()
return
# if file exists, the first line might already be the same line
with open(self._data_filename, 'r') as f:
if f.readline() == shebang_line:
return
# otherwise, copy the file and insert line at the beginning
renamed_file = "%s-%.0f.tmp" % (self._data_filename, time.time())
os.rename(self._data_filename, renamed_file)
with open(self._data_filename, 'w') as f:
f.write(shebang_line)
f.flush()
shutil.copyfileobj(open(renamed_file, 'r'), f)
os.remove(renamed_file)
except Exception as e:
logging.error("An error occurred " +
"while trying to insert a shebang line: %s", e)
_SEP = "\t" # separator between serialized parts of a measurement
def persist_data_point(self, data_point):
"""
Serialize all measurements of the data point and persist them
in the data file.
"""
with self._lock:
self._open_file_to_add_new_data()
for measurement in data_point.get_measurements():
line = self._SEP.join(measurement.as_str_list())
self._file.write(line + "\n")
self._file.flush()
def _open_file_to_add_new_data(self):
if not self._file:
self._file = open(self._data_filename, 'a+')
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,315 | lschuetze/ReBench | refs/heads/master | /rebench/model/experiment.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from .virtual_machine import VirtualMachine
from .benchmark_suite import BenchmarkSuite
from .benchmark_config import BenchmarkConfig
from .reporting import Reporting
from . import value_or_list_as_list, value_with_optional_details
class Experiment:
def __init__(self, name, exp_def, global_runs_cfg, global_vms_cfg,
global_suite_cfg, global_reporting_cfg, data_store,
standard_data_file, discard_old_data, cli_reporter,
run_filter, options):
self._name = name
self._raw_definition = exp_def
self._runs_cfg = global_runs_cfg.combined(exp_def)
self._reporting = Reporting(
global_reporting_cfg, cli_reporter,
options).combined(exp_def.get('reporting', {}))
self._data_store = data_store
self._persistence = data_store.get(exp_def.get('data_file',
standard_data_file),
discard_old_data)
self._vms = self._compile_virtual_machines(global_vms_cfg)
self._suites = self._compile_benchmark_suites(global_suite_cfg)
self._benchmarks = self._compile_benchmarks()
self._runs = self._compile_runs(run_filter)
@property
def name(self):
return self._name
def get_runs(self):
return self._runs
def _compile_runs(self, run_filter):
runs = set()
for bench in self._benchmarks:
if not run_filter.applies(bench):
continue
for cores in bench.suite.cores:
for input_size in bench.suite.input_sizes:
for var_val in bench.suite.variable_values:
run = self._data_store.create_run_id(
bench, cores, input_size, var_val)
bench.add_run(run)
runs.add(run)
run.add_reporting(self._reporting)
run.add_persistence(self._persistence)
run.set_run_config(self._runs_cfg)
return runs
def _compile_virtual_machines(self, global_vms_cfg):
benchmarks = value_or_list_as_list(self._raw_definition.
get( 'benchmark', None))
input_sizes = value_or_list_as_list(self._raw_definition.
get('input_sizes', None))
executions = value_or_list_as_list(self._raw_definition['executions'])
vms = []
for vm in executions:
vm, vm_details = value_with_optional_details(vm)
if vm not in global_vms_cfg:
raise ValueError("The VM '%s' requested in %s was not found."
% (vm, self.name))
global_cfg = global_vms_cfg[vm]
vms.append(VirtualMachine(vm, vm_details, global_cfg, benchmarks,
input_sizes, self.name))
return vms
def _compile_benchmark_suites(self, global_suite_cfg):
suites = []
for vm in self._vms:
for suite_name in vm.benchmark_suite_names:
suites.append(BenchmarkSuite(suite_name, vm,
global_suite_cfg[suite_name]))
return suites
def _compile_benchmarks(self):
bench_cfgs = []
for suite in self._suites:
for bench in value_or_list_as_list(suite.benchmarks):
bench_cfgs.append(BenchmarkConfig.compile(
bench, suite, self._data_store))
return bench_cfgs
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,316 | lschuetze/ReBench | refs/heads/master | /rebench/model/runs_config.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from time import time
import logging
class RunsConfig(object):
""" General configuration parameters for runs """
def __init__(self,
number_of_data_points = None,
min_runtime = None,
parallel_interference_factor = 2.5):
self._number_of_data_points = number_of_data_points
self._min_runtime = min_runtime
self._parallel_interference_factor = parallel_interference_factor
@property
def number_of_data_points(self):
return self._number_of_data_points
@property
def min_runtime(self):
return self._min_runtime
@property
def parallel_interference_factor(self):
return self._parallel_interference_factor
def combined(self, run_def):
config = RunsConfig(self._number_of_data_points, self._min_runtime,
self._parallel_interference_factor)
val = run_def.get('number_of_data_points', None)
if val:
config._number_of_data_points = val
val = run_def.get('min_runtime', None)
if val:
config._min_runtime = val
# parallel_interference_factor is a global setting, so it is not
# merged from other run definitions
return config
def log(self):
msg = "Run Config: number of data points: %d" % self._number_of_data_points
if self._min_runtime:
msg += ", min_runtime: %dms" % self._min_runtime
logging.debug(msg)
def create_termination_check(self, bench_cfg):
return TerminationCheck(self, bench_cfg)
class QuickRunsConfig(RunsConfig):
def __init__(self, number_of_data_points = None,
min_runtime = None,
max_time = None):
super(QuickRunsConfig, self).__init__(number_of_data_points,
min_runtime)
self._max_time = max_time
def combined(self, run_def):
"""For Quick runs, only the global config is taken into account."""
return self
@property
def max_time(self):
return self._max_time
def create_termination_check(self, bench_cfg):
return QuickTerminationCheck(self, bench_cfg)
class TerminationCheck(object):
def __init__(self, run_cfg, bench_cfg):
self._run_cfg = run_cfg
self._bench_cfg = bench_cfg
self._consecutive_erroneous_executions = 0
self._failed_execution_count = 0
self._fail_immediately = False
def fail_immediately(self):
self._fail_immediately = True
def indicate_failed_execution(self):
self._consecutive_erroneous_executions += 1
self._failed_execution_count += 1
def indicate_successful_execution(self):
self._consecutive_erroneous_executions = 0
def has_sufficient_number_of_data_points(self, number_of_data_points):
return number_of_data_points >= self._run_cfg.number_of_data_points
def fails_consecutively(self):
return (self._fail_immediately or
self._consecutive_erroneous_executions >= 3)
def has_too_many_failures(self, number_of_data_points):
return (self._fail_immediately or
(self._failed_execution_count > 6) or (
number_of_data_points > 10 and (
self._failed_execution_count > number_of_data_points / 2)))
def should_terminate(self, number_of_data_points):
if self._fail_immediately:
logging.info(
"%s was marked to fail immediately" % self._bench_cfg.name)
if self.has_sufficient_number_of_data_points(number_of_data_points):
logging.debug("Reached number_of_data_points for %s"
% self._bench_cfg.name)
return True
elif self.fails_consecutively():
logging.error(("Three executions of %s have failed in a row, " +
"benchmark is aborted") % self._bench_cfg.name)
return True
elif self.has_too_many_failures(number_of_data_points):
logging.error("Many runs of %s are failing, benchmark is aborted."
% self._bench_cfg.name)
return True
else:
return False
class QuickTerminationCheck(TerminationCheck):
def __init__(self, run_cfg, bench_cfg):
super(QuickTerminationCheck, self).__init__(run_cfg, bench_cfg)
self._start_time = time()
def should_terminate(self, number_of_data_points):
if time() - self._start_time > self._run_cfg.max_time:
logging.debug("Maximum runtime is reached for %s" % self._bench_cfg.name)
return True
return super(QuickTerminationCheck, self).should_terminate(
number_of_data_points)
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,317 | lschuetze/ReBench | refs/heads/master | /rebench/tests/persistency_test.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import os
from datetime import datetime
from .rebench_test_case import ReBenchTestCase
from ..configurator import Configurator
from ..executor import Executor
from ..persistence import DataStore
from ..model.run_id import RunId
from ..model.measurement import Measurement
from ..model.benchmark_config import BenchmarkConfig
from ..model.benchmark_suite import BenchmarkSuite
from ..model.virtual_machine import VirtualMachine
class PersistencyTest(ReBenchTestCase):
def test_de_serialization(self):
data_store = DataStore()
vm = VirtualMachine("MyVM", None, {'path': '', 'binary': ''},
None, [1], None)
suite = BenchmarkSuite("MySuite", vm, {'benchmarks': [],
'gauge_adapter': '',
'command': ''})
bench_cfg = BenchmarkConfig("Test Bench [>", "Test Bench [>", None,
suite, vm, None, 0, None, data_store)
run_id = RunId(bench_cfg, 1000, 44, 'sdf sdf sdf sdfsf')
timestamp = datetime.now().replace(microsecond=0)
measurement = Measurement(2222.2222, 'ms', run_id, 'foobar crit',
timestamp)
serialized = measurement.as_str_list()
deserialized = Measurement.from_str_list(data_store, serialized)
self.assertEquals(deserialized.criterion, measurement.criterion)
self.assertEquals(deserialized.value, measurement.value)
self.assertEquals(deserialized.unit, measurement.unit)
self.assertAlmostEquals(deserialized.timestamp, measurement.timestamp)
self.assertEquals(deserialized.run_id, measurement.run_id)
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,318 | lschuetze/ReBench | refs/heads/master | /rebench/tests/features/issue_19_vm.py | #!/usr/bin/env python
## simple script emulating a VM generating benchmark results
import sys
import random
print sys.argv
print "Harness Name: ", sys.argv[1]
print "Bench Name:", sys.argv[2]
print "Input Size: ", sys.argv[3]
print "RESULT-total: ", random.triangular(700, 850)
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,319 | lschuetze/ReBench | refs/heads/master | /rebench/configurator.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import sys
import logging
import subprocess
import traceback
from .model.runs_config import RunsConfig, QuickRunsConfig
from .model.experiment import Experiment
class _VMFilter:
def __init__(self, name):
self._name = name
def matches(self, bench):
return bench.vm.name == self._name
class _SuiteFilter(object):
def __init__(self, name):
self._name = name
def matches(self, bench):
return bench.suite.name == self._name
class _BenchmarkFilter(_SuiteFilter):
def __init__(self, suite_name, benchmark_name):
super(_BenchmarkFilter, self).__init__(suite_name)
self._benchmark_name = benchmark_name
def matches(self, bench):
if not super(_BenchmarkFilter, self).matches(bench):
return False
return bench.name == self._benchmark_name
class _RunFilter:
def __init__(self, run_filter):
self._vm_filters = []
self._suite_filters = []
if not run_filter:
return
for f in run_filter:
parts = f.split(":")
if parts[0] == "vm":
self._vm_filters.append(_VMFilter(parts[1]))
elif parts[0] == "s" and len(parts) == 2:
self._suite_filters.append(_SuiteFilter(parts[1]))
elif parts[0] == "s" and len(parts) == 3:
self._suite_filters.append(_BenchmarkFilter(parts[1], parts[2]))
else:
raise Exception("Unknown filter expression: " + f)
def applies(self, bench):
return (self._match(self._vm_filters, bench) and
self._match(self._suite_filters, bench))
@staticmethod
def _match(filters, bench):
if not filters:
return True
for f in filters:
if f.matches(bench):
return True
return False
class Configurator:
def __init__(self, file_name, data_store, cli_options = None,
cli_reporter = None, exp_name = None,
standard_data_file = None, run_filter = None):
self._raw_config = self._load_config(file_name)
if standard_data_file:
self._raw_config['standard_data_file'] = standard_data_file
self._options = self._process_cli_options(cli_options)
self._exp_name = exp_name
self.runs = RunsConfig( **self._raw_config.get( 'runs', {}))
self.quick_runs = QuickRunsConfig(**self._raw_config.get('quick_runs', {}))
self._data_store = data_store
self._experiments = self._compile_experiments(cli_reporter,
_RunFilter(run_filter))
# TODO: does visualization work?
# self.visualization = self._raw_config['experiments'][self.experiment_name()].get('visualization', None)
@property
def build_log(self):
return self._raw_config.get('build_log', 'build.log')
@staticmethod
def _load_config(file_name):
import yaml
try:
f = file(file_name, 'r')
return yaml.load(f)
except IOError:
logging.error("An error occurred on opening the config file (%s)."
% file_name)
logging.error(traceback.format_exc(0))
sys.exit(-1)
except yaml.YAMLError:
logging.error("Failed parsing the config file (%s)." % file_name)
logging.error(traceback.format_exc(0))
sys.exit(-1)
def _process_cli_options(self, options):
if options is None:
return
if options.debug:
if options.verbose:
logging.basicConfig(level=logging.NOTSET)
logging.getLogger().setLevel(logging.NOTSET)
logging.debug("Enabled verbose debug output.")
else:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger().setLevel(logging.DEBUG)
logging.debug("Enabled debug output.")
else:
logging.basicConfig(level=logging.ERROR)
logging.getLogger().setLevel(logging.ERROR)
if options.use_nice:
if not self._can_set_niceness():
logging.error("Process niceness cannot be set currently. "
"To execute benchmarks with highest priority, "
"you might need root/admin rights.")
logging.error("Deactivated usage of nice command.")
options.use_nice = False
return options
@staticmethod
def _can_set_niceness():
output = subprocess.check_output(["nice", "-n-20", "echo", "test"],
stderr=subprocess.STDOUT)
if "cannot set niceness" in output or "Permission denied" in output:
return False
else:
return True
@property
def options(self):
return self._options
@property
def use_nice(self):
return self.options is not None and self.options.use_nice
def experiment_name(self):
return self._exp_name or self._raw_config['standard_experiment']
def get_experiments(self):
"""The configuration has been compiled before it is handed out
to the client class, since some configurations can override
others and none of that should concern other parts of the
system.
"""
return self._experiments
def get_experiment(self, name):
return self._experiments[name]
def get_runs(self):
runs = set()
for exp in self._experiments.values():
runs |= exp.get_runs()
return runs
def _compile_experiments(self, cli_reporter, run_filter):
if not self.experiment_name():
raise ValueError("No experiment chosen.")
conf_defs = {}
if self.experiment_name() == "all":
for exp_name in self._raw_config['experiments']:
conf_defs[exp_name] = self._compile_experiment(exp_name,
cli_reporter,
run_filter)
else:
if self.experiment_name() not in self._raw_config['experiments']:
raise ValueError("Requested experiment '%s' not available." %
self.experiment_name())
conf_defs[self.experiment_name()] = self._compile_experiment(
self.experiment_name(), cli_reporter, run_filter)
return conf_defs
def _compile_experiment(self, exp_name, cli_reporter, run_filter):
exp_def = self._raw_config['experiments'][exp_name]
run_cfg = (self.quick_runs if (self.options and self.options.quick)
else self.runs)
return Experiment(exp_name, exp_def, run_cfg,
self._raw_config['virtual_machines'],
self._raw_config['benchmark_suites'],
self._raw_config.get('reporting', {}),
self._data_store,
self._raw_config.get('standard_data_file', None),
self._options.clean if self._options else False,
cli_reporter,
run_filter,
self._options)
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,320 | lschuetze/ReBench | refs/heads/master | /rebench/model/virtual_machine.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import os
from . import value_or_list_as_list
class VirtualMachine(object):
def __init__(self, name, vm_details, global_cfg, _benchmarks, _input_sizes,
experiment_name):
"""Specializing the VM details in the run definitions with the settings from
the VM definitions
"""
if vm_details:
benchmarks = value_or_list_as_list(vm_details.get('benchmark',
_benchmarks))
input_sizes = value_or_list_as_list(vm_details.get('input_sizes',
_input_sizes))
cores = value_or_list_as_list(vm_details.get('cores',
None))
else:
benchmarks = _benchmarks
input_sizes = _input_sizes
cores = None
self._name = name
self._benchsuite_names = benchmarks
self._input_sizes = input_sizes
self._build = global_cfg.get('build', None)
self._do_build = self._build is not None
self._build_failed = False
self._cores = cores or global_cfg.get('cores', [1])
self._path = global_cfg.get('path', None)
if self._path:
self._path = os.path.abspath(self._path)
self._binary = global_cfg['binary']
self._args = global_cfg.get('args', '')
self._experiment_name = experiment_name
self._execute_exclusively = global_cfg.get('execute_exclusively', True)
@property
def name(self):
return self._name
@property
def benchmark_suite_names(self):
return self._benchsuite_names
@property
def input_sizes(self):
return self._input_sizes
@property
def cores(self):
return self._cores
@property
def build(self):
return self._build
@property
def is_built(self):
return not self._do_build
@property
def is_failed_build(self):
return self._build_failed
def mark_build(self):
self._do_build = False
def mark_build_failed(self):
self._build_failed = True
@property
def path(self):
return self._path
@property
def binary(self):
return self._binary
@property
def args(self):
return self._args
@property
def execute_exclusively(self):
return self._execute_exclusively
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,321 | lschuetze/ReBench | refs/heads/master | /rebench/tests/model/runs_config_test.py | # Copyright (c) 2016 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from ...model.runs_config import TerminationCheck, QuickTerminationCheck
from ...configurator import Configurator
from ...persistence import DataStore
from ..rebench_test_case import ReBenchTestCase
class RunsConfigTestCase(ReBenchTestCase):
def setUp(self):
super(RunsConfigTestCase, self).setUp()
self._cnf = Configurator(self._path + '/small.conf', DataStore(), None,
standard_data_file=self._tmp_file)
runs = self._cnf.get_runs()
self._run = list(runs)[0]
def test_termination_check_basic(self):
tc = TerminationCheck(self._run.run_config, self._run.bench_cfg)
self.assertFalse(tc.should_terminate(0))
self.assertTrue(tc.should_terminate(10))
def test_consecutive_fails(self):
tc = TerminationCheck(self._run.run_config, self._run.bench_cfg)
self.assertFalse(tc.should_terminate(0))
for i in range(0, 2):
tc.indicate_failed_execution()
self.assertFalse(tc.should_terminate(0))
tc.indicate_failed_execution()
self.assertTrue(tc.should_terminate(0))
def test_too_many_fails(self):
tc = TerminationCheck(self._run.run_config, self._run.bench_cfg)
self.assertFalse(tc.should_terminate(0))
for i in range(0, 6):
tc.indicate_failed_execution()
tc.indicate_successful_execution()
self.assertFalse(tc.should_terminate(0))
tc.indicate_failed_execution()
self.assertTrue(tc.should_terminate(0))
def test_quick_termination(self):
tc = QuickTerminationCheck(self._run.run_config, self._run.bench_cfg)
self._run.run_config.max_time = -1
self.assertTrue(tc.should_terminate(0))
self._run.run_config.max_time = 100000000000
self.assertFalse(tc.should_terminate(0))
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,322 | lschuetze/ReBench | refs/heads/master | /rebench/model/benchmark_config.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from . import value_with_optional_details
import logging
class BenchmarkConfig(object):
@classmethod
def compile(cls, bench, suite, data_store):
"""Specialization of the configurations which get executed by using the
suite definitions.
"""
name, details = value_with_optional_details(bench, {})
command = details.get('command', name)
# TODO: remove in ReBench 1.0
if 'performance_reader' in details:
logging.warning("Found deprecated 'performance_reader' key in"
" configuration, please replace by 'gauge_adapter'"
" key.")
details['gauge_adapter'] = details['performance_reader']
gauge_adapter = details.get('gauge_adapter',
suite.gauge_adapter)
extra_args = details.get('extra_args', None)
codespeed_name = details.get('codespeed_name', None)
warmup = int(details.get('warmup', 0))
return BenchmarkConfig(name, command, gauge_adapter, suite,
suite.vm, extra_args, warmup, codespeed_name,
data_store)
def __init__(self, name, command, gauge_adapter, suite, vm, extra_args,
warmup, codespeed_name, data_store):
self._name = name
self._command = command
self._extra_args = extra_args
self._codespeed_name = codespeed_name
self._warmup = warmup
self._gauge_adapter = gauge_adapter
self._suite = suite
self._vm = vm
self._runs = set() # the compiled runs, these might be shared
# with other benchmarks/suites
data_store.register_config(self)
def add_run(self, run):
self._runs.add(run)
@property
def name(self):
return self._name
@property
def command(self):
"""
We distinguish between the benchmark name, used for reporting, and the
command that is passed to the benchmark executor.
If no command was specified in the config, the name is used instead.
See the compile(.) method for details.
:return: the command to be passed to the benchmark invocation
"""
return self._command
@property
def codespeed_name(self):
return self._codespeed_name
@property
def extra_args(self):
return self._extra_args
@property
def warmup_iterations(self):
return self._warmup
@property
def gauge_adapter(self):
return self._gauge_adapter
@property
def suite(self):
return self._suite
@property
def vm(self):
return self._vm
@property
def execute_exclusively(self):
return self._vm.execute_exclusively
def __str__(self):
return "%s, vm:%s, suite:%s, args:'%s', warmup: %d" % (
self._name, self._vm.name, self._suite.name, self._extra_args or '',
self._warmup)
def as_simple_string(self):
if self._extra_args:
return "%s (%s, %s, %s, %d)" % (self._name, self._vm.name,
self._suite.name, self._extra_args,
self._warmup)
else:
return "%s (%s, %s, %d)" % (self._name, self._vm.name,
self._suite.name, self._warmup)
def as_str_list(self):
return [self._name, self._vm.name, self._suite.name,
'' if self._extra_args is None else str(self._extra_args),
str(self._warmup)]
@classmethod
def from_str_list(cls, data_store, str_list):
return data_store.get_config(str_list[0], str_list[1], str_list[2],
None if str_list[3] == '' else str_list[3],
int(str_list[4]))
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,323 | lschuetze/ReBench | refs/heads/master | /rebench/tests/executor_test.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import unittest
import subprocess
from ..executor import Executor, BenchmarkThreadExceptions
from ..configurator import Configurator
from ..model.measurement import Measurement
from ..persistence import DataStore
from .. import ReBench
import os
import sys
from .rebench_test_case import ReBenchTestCase
class ExecutorTest(ReBenchTestCase):
def setUp(self):
super(ExecutorTest, self).setUp(__file__)
os.chdir(self._path + '/../')
def test_setup_and_run_benchmark(self):
# before executing the benchmark, we override stuff in subprocess for testing
subprocess.Popen = Popen_override
options = ReBench().shell_options().parse_args([])[0]
cnf = Configurator(self._path + '/test.conf', DataStore(), options,
None, 'Test', standard_data_file = self._tmp_file)
ex = Executor(cnf.get_runs(), cnf.use_nice)
ex.execute()
### should test more details
# (mean, sdev, (interval, interval_percentage),
# (interval_t, interval_percentage_t)) = ex.result['test-vm']['test-bench']
#
# self.assertEqual(31, len(ex.benchmark_data['test-vm']['test-bench']))
# self.assertAlmostEqual(45870.4193548, mean)
# self.assertAlmostEqual(2.93778711485, sdev)
#
# (i_low, i_high) = interval
# self.assertAlmostEqual(45869.385195243565, i_low)
# self.assertAlmostEqual(45871.453514433859, i_high)
# self.assertAlmostEqual(0.00450904792104, interval_percentage)
def test_broken_command_format(self):
def test_exit(val):
self.assertEquals(-1, val, "got the correct error code")
raise RuntimeError("TEST-PASSED")
sys.exit = test_exit
try:
options = ReBench().shell_options().parse_args([])[0]
cnf = Configurator(self._path + '/test.conf', DataStore(), options,
None, 'TestBrokenCommandFormat',
standard_data_file=self._tmp_file)
ex = Executor(cnf.get_runs(), cnf.use_nice)
ex.execute()
except RuntimeError as e:
self.assertEqual("TEST-PASSED", e.message)
except BenchmarkThreadExceptions as e:
self.assertEqual("TEST-PASSED", e.exceptions[0].message)
def test_broken_command_format_with_TypeError(self):
def test_exit(val):
self.assertEquals(-1, val, "got the correct error code")
raise RuntimeError("TEST-PASSED")
sys.exit = test_exit
try:
options = ReBench().shell_options().parse_args([])[0]
cnf = Configurator(self._path + '/test.conf', DataStore(), options,
None, 'TestBrokenCommandFormat2',
standard_data_file=self._tmp_file)
ex = Executor(cnf.get_runs(), cnf.use_nice)
ex.execute()
except RuntimeError as e:
self.assertEqual("TEST-PASSED", e.message)
except BenchmarkThreadExceptions as e:
self.assertEqual("TEST-PASSED", e.exceptions[0].message)
def _basic_execution(self, cnf):
runs = cnf.get_runs()
self.assertEquals(8, len(runs))
ex = Executor(cnf.get_runs(), cnf.use_nice)
ex.execute()
for run in runs:
data_points = run.get_data_points()
self.assertEquals(10, len(data_points))
for data_point in data_points:
measurements = data_point.get_measurements()
self.assertEquals(4, len(measurements))
self.assertIsInstance(measurements[0], Measurement)
self.assertTrue(measurements[3].is_total())
self.assertEquals(data_point.get_total_value(),
measurements[3].value)
def test_basic_execution(self):
cnf = Configurator(self._path + '/small.conf', DataStore(), None,
standard_data_file = self._tmp_file)
self._basic_execution(cnf)
def test_basic_execution_with_magic_all(self):
cnf = Configurator(self._path + '/small.conf', DataStore(), None, None,
'all', standard_data_file = self._tmp_file)
self._basic_execution(cnf)
def Popen_override(cmdline, stdout, stderr=None, shell=None):
class Popen:
returncode = 0
def communicate(self):
return "", ""
def poll(self):
return self.returncode
return Popen()
def test_suite():
return unittest.makeSuite(ExecutorTest)
if __name__ == "__main__":
unittest.main(defaultTest='test_suite')
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,324 | lschuetze/ReBench | refs/heads/master | /rebench/interop/jgf_adapter.py | # class JGFPerformance(Performance):
# """JGFPerformance is used to read the output of the JGF barrier benchmarks.
# """
# re_barrierSec1 = re.compile(r"^(?:.*:.*:)(.*)(?:\s+)([0-9\.E]+)(?:\s+)\(barriers/s\)") # for the barrier benchmarks in sec 1 of the JGF benchmarks
# re_sec2 = re.compile(r"^(?:Section2:.*:)(.*)(?::.*)(?:\s+)([0-9]+)(?:\s+)\(ms\)") # for the benchmarks from sec 2
# re_sec3 = re.compile(r"^(?:Section3:.*:)(.*)(?::Run:.*)(?:\s+)([0-9]+)(?:\s+)\(ms\)") # for the benchmarks from sec 3, the time of 'Run' is used
#
# re_invalid = re.compile("Validation failed")
#
# def __init__(self):
# self._otherErrorDefinitions = [JGFPerformance.re_invalid]
#
# def parse_data(self, data, run_id):
# data_points = []
# current = DataPoint(run_id)
#
# for line in data.split("\n"):
# if self.check_for_error(line):
# raise ResultsIndicatedAsInvalid("Output of bench program indicated error.")
#
# m = self.re_barrierSec1.match(line)
# if not m:
# m = self.re_sec2.match(line)
# if not m:
# m = self.re_sec3.match(line)
#
# if m:
# time = float(m.group(2))
# val = Measurement(time, None)
# result.append(val)
# #print "DEBUG OUT:" + time
#
# if time is None:
# raise OutputNotParseable(data)
#
# return (time, result)
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,325 | lschuetze/ReBench | refs/heads/master | /rebench/tests/bugs/issue_27_vm.py | #!/usr/bin/env python
## simple script emulating a failing benchmark
print "Starting Richards benchmark ..."
print "Results are incorrect"
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,326 | lschuetze/ReBench | refs/heads/master | /rebench/subprocess_with_timeout.py | from os import kill
from select import select
from signal import SIGKILL
from subprocess import PIPE, STDOUT, Popen
from threading import Thread
from time import time
import sys
class SubprocessThread(Thread):
def __init__(self, binary_name, args, shell, cwd, verbose, stdout, stderr):
Thread.__init__(self, name = "Subprocess %s" % binary_name)
self._args = args
self._shell = shell
self._cwd = cwd
self._verbose = verbose
self._stdout = stdout
self._stderr = stderr
self.stdout_result = None
self.stderr_result = None
self.returncode = None
self.pid = None
def run(self):
p = Popen(self._args, shell=self._shell, cwd=self._cwd,
stdout=self._stdout, stderr=self._stderr)
self.pid = p.pid
self.process_output(p)
self.returncode = p.returncode
def process_output(self, p):
if self._verbose and self._stdout == PIPE and (self._stderr == PIPE or
self._stderr == STDOUT):
self.stdout_result = ""
self.stderr_result = ""
while True:
reads = [p.stdout.fileno()]
if self._stderr == PIPE:
reads.append(p.stderr.fileno())
ret = select(reads, [], [])
for fd in ret[0]:
if fd == p.stdout.fileno():
read = p.stdout.readline()
sys.stdout.write(read)
self.stdout_result += read
if self._stderr == PIPE and fd == p.stderr.fileno():
read = p.stderr.readline()
sys.stderr.write(read)
self.stderr_result += read
if p.poll() is not None:
break
else:
self.stdout_result, self.stderr_result = p.communicate()
def run(args, cwd = None, shell = False, kill_tree = True, timeout = -1,
verbose = False, stdout = PIPE, stderr = PIPE):
"""
Run a command with a timeout after which it will be forcibly
killed.
"""
binary_name = args.split(' ')[0]
thread = SubprocessThread(binary_name, args, shell, cwd, verbose, stdout,
stderr)
thread.start()
if timeout == -1:
thread.join()
else:
t10min = 10 * 60
if timeout < t10min:
thread.join(timeout)
else:
start = time()
diff = 0
while diff < timeout:
if t10min < timeout - diff:
t = t10min
else:
t = timeout - diff
thread.join(t)
if not thread.is_alive():
break
diff = time() - start
if diff < timeout:
print "Keep alive, current job runs for %dmin" % (diff / 60)
if timeout != -1 and thread.is_alive():
assert thread.pid is not None
return kill_process(thread.pid, kill_tree, thread)
return thread.returncode, thread.stdout_result, thread.stderr_result
def kill_process(pid, recursively, thread):
pids = [pid]
if recursively:
pids.extend(get_process_children(pid))
for p in pids:
kill(p, SIGKILL)
thread.join()
return -9, thread.stdout_result, thread.stderr_result
def get_process_children(pid):
p = Popen('ps --no-headers -o pid --ppid %d' % pid, shell = True,
stdout = PIPE, stderr = PIPE)
stdout, _stderr = p.communicate()
result = [int(p) for p in stdout.split()]
for child in result[:]:
result.extend(get_process_children(child))
return result
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,327 | lschuetze/ReBench | refs/heads/master | /rebench/executor.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import with_statement
from collections import deque
from math import floor
import logging
from multiprocessing import cpu_count
import os
import pkgutil
import random
from select import select
import subprocess
import sys
from tempfile import mkstemp
from threading import Thread, RLock
import subprocess_with_timeout as subprocess_timeout
from .statistics import StatisticProperties
from .interop.adapter import ExecutionDeliveredNoResults
class FailedBuildingVM(Exception):
"""The exception to be raised when building of the VM failed."""
def __init__(self, vm_name):
self._vm_name = vm_name
class RunScheduler(object):
def __init__(self, executor):
self._executor = executor
@staticmethod
def _filter_out_completed_runs(runs):
return [run for run in runs if not run.is_completed()]
@staticmethod
def number_of_uncompleted_runs(runs):
return len(RunScheduler._filter_out_completed_runs(runs))
def execute(self):
runs = self._filter_out_completed_runs(self._executor.runs)
self._process_remaining_runs(runs)
class BatchScheduler(RunScheduler):
def _process_remaining_runs(self, runs):
for run_id in runs:
try:
completed = False
while not completed:
completed = self._executor.execute_run(run_id)
except FailedBuildingVM:
pass
class RoundRobinScheduler(RunScheduler):
def _process_remaining_runs(self, runs):
task_list = deque(runs)
while task_list:
try:
run = task_list.popleft()
completed = self._executor.execute_run(run)
if not completed:
task_list.append(run)
except FailedBuildingVM:
pass
class RandomScheduler(RunScheduler):
def _process_remaining_runs(self, runs):
task_list = list(runs)
while task_list:
try:
run = random.choice(task_list)
completed = self._executor.execute_run(run)
if completed:
task_list.remove(run)
except FailedBuildingVM:
task_list.remove(run)
class BenchmarkThread(Thread):
def __init__(self, par_scheduler, num):
Thread.__init__(self, name = "BenchmarkThread %d" % num)
self._par_scheduler = par_scheduler
self._id = num
self.exception = None
def run(self):
try:
scheduler = self._par_scheduler.get_local_scheduler()
while True:
work = self._par_scheduler.acquire_work()
if work is None:
return
scheduler._process_remaining_runs(work)
except BaseException as e:
self.exception = e
class BenchmarkThreadExceptions(Exception):
def __init__(self, exceptions):
self.exceptions = exceptions
class ParallelScheduler(RunScheduler):
def __init__(self, executor, seq_scheduler_class):
RunScheduler.__init__(self, executor)
self._seq_scheduler_class = seq_scheduler_class
self._lock = RLock()
self._num_worker_threads = self._number_of_threads()
self._remaining_work = None
def _number_of_threads(self):
# TODO: read the configuration elements!
non_interference_factor = float(2.5)
return int(floor(cpu_count() / non_interference_factor))
@staticmethod
def _split_runs(runs):
seq_runs = []
par_runs = []
for run in runs:
if run.execute_exclusively:
seq_runs.append(run)
else:
par_runs.append(run)
return seq_runs, par_runs
def _process_sequential_runs(self, runs):
seq_runs, par_runs = self._split_runs(runs)
scheduler = self._seq_scheduler_class(self._executor)
scheduler._process_remaining_runs(seq_runs)
return par_runs
def _process_remaining_runs(self, runs):
self._remaining_work = self._process_sequential_runs(runs)
self._worker_threads = [BenchmarkThread(self, i)
for i in range(self._num_worker_threads)]
for thread in self._worker_threads:
thread.start()
exceptions = []
for thread in self._worker_threads:
thread.join()
if thread.exception is not None:
exceptions.append(thread.exception)
if len(exceptions) > 0:
print exceptions
if len(exceptions) == 1:
raise exceptions[0]
else:
raise BenchmarkThreadExceptions(exceptions)
def _determine_num_work_items_to_take(self):
# use a simple and naive scheduling strategy that still allows for
# different running times, without causing too much scheduling overhead
k = len(self._remaining_work)
per_thread = int(floor(float(k) / float(self._num_worker_threads)))
per_thread = max(1, per_thread) # take at least 1 run
return per_thread
def get_local_scheduler(self):
return self._seq_scheduler_class(self._executor)
def acquire_work(self):
with self._lock:
if len(self._remaining_work) == 0:
return None
n = self._determine_num_work_items_to_take()
assert n <= len(self._remaining_work)
work = []
for i in range(n):
work.append(self._remaining_work.pop())
return work
class Executor:
def __init__(self, runs, use_nice, include_faulty = False, verbose = False,
scheduler = BatchScheduler, build_log = None):
self._runs = runs
self._use_nice = use_nice
self._include_faulty = include_faulty
self._verbose = verbose
self._scheduler = self._create_scheduler(scheduler)
self._build_log = build_log
num_runs = RunScheduler.number_of_uncompleted_runs(runs)
for run in runs:
run.set_total_number_of_runs(num_runs)
def _create_scheduler(self, scheduler):
# figure out whether to use parallel scheduler
if cpu_count() > 1:
i = 0
for run in self._runs:
if not run.execute_exclusively:
i += 1
if i > 1:
return ParallelScheduler(self, scheduler)
return scheduler(self)
def _construct_cmdline(self, run_id, gauge_adapter):
cmdline = ""
if self._use_nice:
cmdline += "nice -n-20 "
cmdline += gauge_adapter.acquire_command(run_id.cmdline())
return cmdline
@staticmethod
def _get_script(build):
""" build can be either a file name, or a list of things.
If it is a list of operations, we create a temporary file
to execute it as shell script. """
if not isinstance(build, list):
return build, False
fd, file_name = mkstemp('.sh')
os.close(fd)
with open(file_name, 'w') as tmp_file:
tmp_file.write("#!/bin/sh\n")
for line in build:
tmp_file.write(line)
tmp_file.write('\n')
os.chmod(file_name, 0700)
return file_name, True
def _build_vm(self, run_id):
vm_name = run_id.bench_cfg.vm.name
if run_id.bench_cfg.vm.is_built or not run_id.bench_cfg.vm.build:
return
if run_id.bench_cfg.vm.is_failed_build:
run_id.fail_immediately()
raise FailedBuildingVM(vm_name)
path = run_id.bench_cfg.vm.path or os.getcwd()
script, is_temp = self._get_script(run_id.bench_cfg.vm.build)
p = subprocess.Popen(
script, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=path)
if self._build_log:
with open(self._build_log, 'a') as log_file:
while True:
reads = [p.stdout.fileno(), p.stderr.fileno()]
ret = select(reads, [], [])
for fd in ret[0]:
if fd == p.stdout.fileno():
read = p.stdout.readline()
if len(read) > 0:
log_file.write(vm_name + '|STD:')
log_file.write(read)
elif fd == p.stderr.fileno():
read = p.stderr.readline()
if len(read) > 0:
log_file.write(vm_name + '|ERR:')
log_file.write(read)
if p.poll() is not None:
break
# read rest of pipes
while True:
read = p.stdout.readline()
if read == "":
break
log_file.write(vm_name + '|STD:')
log_file.write(read)
while True:
read = p.stderr.readline()
if len(read) == 0:
break
log_file.write(vm_name + '|ERR:')
log_file.write(read)
log_file.write('\n')
if p.returncode != 0:
run_id.bench_cfg.vm.mark_build_failed()
run_id.fail_immediately()
run_id.report_run_failed(script, p.returncode, "Build of VM " + vm_name + " failed.")
raise FailedBuildingVM(vm_name)
if is_temp:
os.remove(script)
run_id.bench_cfg.vm.mark_build()
def execute_run(self, run_id):
termination_check = run_id.get_termination_check()
run_id.run_config.log()
run_id.report_start_run()
gauge_adapter = self._get_gauge_adapter_instance(
run_id.bench_cfg.gauge_adapter)
cmdline = self._construct_cmdline(run_id, gauge_adapter)
terminate = self._check_termination_condition(run_id, termination_check)
if not terminate:
self._build_vm(run_id)
stats = StatisticProperties(run_id.get_total_values())
# now start the actual execution
if not terminate:
terminate = self._generate_data_point(cmdline, gauge_adapter,
run_id, termination_check)
stats = StatisticProperties(run_id.get_total_values())
logging.debug("Run: #%d" % stats.num_samples)
if terminate:
run_id.report_run_completed(stats, cmdline)
return terminate
def _get_gauge_adapter_instance(self, adapter_name):
adapter_name += "Adapter"
root = sys.modules['rebench.interop'].__path__
for _, name, _ in pkgutil.walk_packages(root):
# depending on how ReBench was executed, name might one of the two
try:
p = __import__("rebench.interop." + name, fromlist=adapter_name)
except ImportError as e1:
try:
p = __import__("interop." + name, fromlist=adapter_name)
except ImportError as e2:
p = None
if p is not None and hasattr(p, adapter_name):
return getattr(p, adapter_name)(self._include_faulty)
def _generate_data_point(self, cmdline, gauge_adapter, run_id,
termination_check):
print cmdline
# execute the external program here
(return_code, output, _) = subprocess_timeout.run(
cmdline, cwd=run_id.location, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True, verbose=self._verbose,
timeout=run_id.bench_cfg.suite.max_runtime)
if return_code != 0 and not self._include_faulty:
run_id.indicate_failed_execution()
run_id.report_run_failed(cmdline, return_code, output)
if return_code == 126:
logging.error(("Could not execute %s. A likely cause is that "
"the file is not marked as executable.")
% run_id.bench_cfg.vm.name)
else:
self._eval_output(output, run_id, gauge_adapter, cmdline)
return self._check_termination_condition(run_id, termination_check)
def _eval_output(self, output, run_id, gauge_adapter, cmdline):
try:
data_points = gauge_adapter.parse_data(output, run_id)
warmup = run_id.warmup_iterations
num_points_to_show = 20
num_points = len(data_points)
if num_points > num_points_to_show:
logging.debug("Skipped %d results..." % (num_points - num_points_to_show))
i = 0
for data_point in data_points:
if warmup > 0:
warmup -= 1
else:
run_id.add_data_point(data_point)
# only log the last num_points_to_show results
if i >= num_points - num_points_to_show:
logging.debug("Run %s:%s result=%s" % (
run_id.bench_cfg.vm.name, run_id.bench_cfg.name,
data_point.get_total_value()))
i += 1
run_id.indicate_successful_execution()
except ExecutionDeliveredNoResults:
run_id.indicate_failed_execution()
run_id.report_run_failed(cmdline, 0, output)
@staticmethod
def _check_termination_condition(run_id, termination_check):
return termination_check.should_terminate(
run_id.get_number_of_data_points())
def execute(self):
self._scheduler.execute()
successful = True
for run in self._runs:
run.report_job_completed(self._runs)
if run.is_failed():
successful = False
return successful
@property
def runs(self):
return self._runs
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,328 | lschuetze/ReBench | refs/heads/master | /rebench/tests/interop/rebench_log_adapter_test.py | from unittest import TestCase
from ...interop.rebench_log_adapter import RebenchLogAdapter
class RebenchAdapterTest(TestCase):
def _assert_basics(self, data, val, unit, criterion, total):
self.assertEqual(1, len(data))
dp = data[0]
self.assertEqual(val, dp.get_total_value())
self.assertEqual(1, len(dp.get_measurements()))
m = dp.get_measurements()[0]
self.assertEqual(total, m.is_total())
self.assertEqual(val, m.value)
self.assertEqual(criterion, m.criterion)
self.assertEqual(unit, m.unit)
def _assert_two_measures(self, data, val1, unit1, criterion1, val_t, unit_t):
self.assertEqual(1, len(data))
dp = data[0]
self.assertEqual(val_t, dp.get_total_value())
self.assertEqual(2, len(dp.get_measurements()))
m1 = dp.get_measurements()[0]
self.assertFalse(m1.is_total())
self.assertEqual(val1, m1.value)
self.assertEqual(criterion1, m1.criterion)
self.assertEqual(unit1, m1.unit)
m2 = dp.get_measurements()[1]
self.assertTrue(m2.is_total())
self.assertEqual(val_t, m2.value)
self.assertEqual(unit_t, m2.unit)
def test_simple_name(self):
l = RebenchLogAdapter(True)
d = l.parse_data("Dispatch: iterations=1 runtime: 557ms", None)
self._assert_basics(d, 557, 'ms', 'total', True)
def test_doted_name(self):
l = RebenchLogAdapter(True)
d = l.parse_data(
"LanguageFeatures.Dispatch: iterations=1 runtime: 309557us", None)
self._assert_basics(d, 309.557, 'ms', 'total', True)
def test_doted_and_ms(self):
l = RebenchLogAdapter(True)
d = l.parse_data(
"LanguageFeatures.Dispatch: iterations=1 runtime: 557ms", None)
self._assert_basics(d, 557, 'ms', 'total', True)
def test_high_iter_count(self):
l = RebenchLogAdapter(True)
d = l.parse_data(
"LanguageFeatures.Dispatch: iterations=2342 runtime: 557ms", None)
self._assert_basics(d, 557, 'ms', 'total', True)
def test_total_explicit(self):
l = RebenchLogAdapter(True)
d = l.parse_data(
"LanguageFeatures.Dispatch total: iterations=2342 runtime: 557ms",
None)
self._assert_basics(d, 557, 'ms', 'total', True)
def test_alloc_criterion(self):
l = RebenchLogAdapter(True)
d = l.parse_data(
"""LanguageFeatures.Dispatch alloc: iterations=2342 runtime: 222ms
LanguageFeatures.Dispatch total: iterations=2342 runtime: 557ms""",
None)
self._assert_two_measures(d, 222, 'ms', 'alloc', 557, 'ms')
def test_foobar_criterion(self):
l = RebenchLogAdapter(True)
d = l.parse_data(
"""LanguageFeatures.Dispatch foobar: iterations=2342 runtime: 550ms
LanguageFeatures.Dispatch total: iterations=2342 runtime: 557ms""",
None)
self._assert_two_measures(d, 550, 'ms', 'foobar', 557, 'ms')
def test_foobar_criterion_no_doted_name(self):
l = RebenchLogAdapter(True)
d = l.parse_data(
"""Dispatch foobar: iterations=2342 runtime: 550ms
LanguageFeatures.Dispatch total: iterations=2342 runtime: 557ms""",
None)
self._assert_two_measures(d, 550, 'ms', 'foobar', 557, 'ms')
def test_some_prefix_before_data(self):
l = RebenchLogAdapter(True)
d = l.parse_data(
"some prefix: Dispatch: iterations=2342 runtime: 557ms",
None)
self._assert_basics(d, 557, 'ms', 'total', True)
def test_path_as_name(self):
l = RebenchLogAdapter(True)
d = l.parse_data(
"core-lib/Benchmarks/Join/FibSeq.ns: iterations=1 runtime: 129us",
None)
self._assert_basics(d, 0.129, 'ms', 'total', True)
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,329 | lschuetze/ReBench | refs/heads/master | /rebench/rebench.py | #!/usr/bin/env python2.7
# ReBench is tool to run and document benchmarks.
#
# It is inspired by JavaStats implemented by Andy Georges.
# JavaStats can be found here: http://www.elis.ugent.be/en/JavaStats
#
# ReBench goes beyond the goals of JavaStats, no only by broaden the scope
# to not only Java VMs, but also by introducing facilities to evaluate
# other runtime characteristics of a VM beside pure execution time.
#
# Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import logging
import sys
from optparse import OptionParser, OptionGroup
from .executor import Executor, BatchScheduler, RoundRobinScheduler, \
RandomScheduler
from .persistence import DataStore
from .reporter import CliReporter
from .configurator import Configurator
from .configuration_error import ConfigurationError
class ReBench:
def __init__(self):
self.version = "0.9.1"
self.options = None
self._config = None
def shell_options(self):
usage = """%prog [options] <config> [run_name] [vm:$]* [s:$]*
Argument:
config required argument, file containing the run definition to be executed
run_name optional argument, the name of a run definition
from the config file
vm:$ filter runs to only include the named VM, example: vm:VM1 vm:VM3
s:$ filter runs to only include the named suite and possibly benchmark
example: s:Suite1 s:Suite2:Bench3
Note, filters are combined with `or` semantics in the same group,
i.e., vm or suite, and at least one filter needs to match per group.
"""
options = OptionParser(usage=usage, version="%prog " + self.version)
options.add_option("-q", "--quick", action="store_true", dest="quick",
help="Do a quick benchmark run instead of a full, "
"statistical rigorous experiment.",
default=False)
options.add_option("-d", "--debug", action="store_true", dest="debug",
default=False, help="Enable debug output.")
options.add_option("-f", "--faulty", action="store_true",
dest="include_faulty", default=False,
help="Include results of faulty or failing runs")
options.add_option("-v", "--verbose", action="store_true",
dest="verbose", default=False,
help="Out more details in the report.")
options.add_option("-N", "--without-nice", action="store_false",
dest="use_nice",
help="Used for debugging and environments without "
" the tool nice.",
default=True)
options.add_option("-s", "--scheduler", action="store", type="string",
dest="scheduler", default="batch", help="execution "
"order of benchmarks: batch, round-robin, random "
"[default: %default]")
options.add_option("-o", "--out", dest="output_file", default=None,
help="Report is saved to the given file. "
"The report is always verbose.")
options.add_option("-c", "--clean", action="store_true", dest="clean",
default=False,
help="Discard old data from the data file "
"(configured in the run description).")
options.add_option("-r", "--rerun", action="store_true",
dest="do_rerun", default=False,
help="Rerun selected experiments, " +
"and discard old data from data file.")
# now here some thing which have to be passed in to make codespeed
# reporting complete
codespeed = OptionGroup(options, "Reporting to Codespeed",
"Some of these parameters are mandatory for "
"reporting to codespeed")
codespeed.add_option("--commit-id", dest="commit_id", default=None,
help="MANDATORY: when codespeed reporting is "
" used, the commit-id has to be specified.")
codespeed.add_option("--environment", dest="environment",
default=None,
help="MANDATORY: name the machine on which the "
"results are obtained.")
codespeed.add_option("--branch", dest="branch",
default="HEAD",
help="The branch for which the results have to be "
"recorded, i.e., to which the commit belongs."
" Default: HEAD")
codespeed.add_option("--executable", dest="executable",
default=None,
help="The executable name given to codespeed. "
"Default: The name used for the virtual "
"machine.")
codespeed.add_option("--project", dest="project",
default=None,
help="The project name given to codespeed. "
"Default: Value given in the config file.")
codespeed.add_option("-I", "--disable-inc-report",
action="store_false", dest="report_incrementally",
default=True, help="Does a final report at the "
"end instead of reporting "
"incrementally.")
codespeed.add_option("-S", "--disable-codespeed",
action="store_false", dest="use_codespeed",
default=True, help="Override configuration and "
"disable reporting to codespeed.")
options.add_option_group(codespeed)
return options
def run(self, argv = None):
if argv is None:
argv = sys.argv
data_store = DataStore()
cli_options, args = self.shell_options().parse_args(argv[1:])
if len(args) < 1:
logging.error("<config> is a mandatory parameter and was not given."
"See --help for more information.")
sys.exit(-1)
cli_reporter = CliReporter(cli_options.verbose)
# interpret remaining args
exp_name = args[1] if len(args) > 1 and (
not args[1].startswith("vm:") and
not args[1].startswith("s:")) else "all"
run_filter = [f for f in args if (f.startswith("vm:") or
f.startswith("s:"))]
try:
self._config = Configurator(args[0], data_store, cli_options,
cli_reporter, exp_name, None,
run_filter)
except ConfigurationError as e:
logging.error(e.message)
sys.exit(-1)
data_store.load_data()
return self.execute_experiment()
def execute_experiment(self):
logging.debug("execute experiment: %s" % self._config.experiment_name())
# first load old data if available
if self._config.options.clean:
pass
scheduler_class = {'batch': BatchScheduler,
'round-robin': RoundRobinScheduler,
'random': RandomScheduler}.get(self._config.options.scheduler)
runs = self._config.get_runs()
if self._config.options.do_rerun:
DataStore.discard_data_of_runs(runs)
executor = Executor(runs, self._config.use_nice,
self._config.options.include_faulty,
self._config.options.verbose,
scheduler_class,
self._config.build_log)
return executor.execute()
def main_func():
try:
return 0 if ReBench().run() else -1
except KeyboardInterrupt:
logging.info("Aborted by user request")
return -1
if __name__ == "__main__":
sys.exit(main_func())
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,330 | lschuetze/ReBench | refs/heads/master | /rebench/model/run_id.py | # Copyright (c) 2009-2014 Stefan Marr <http://www.stefan-marr.de/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import logging
import re
import sys
from .benchmark_config import BenchmarkConfig
class RunId(object):
def __init__(self, bench_cfg, cores, input_size, var_value):
self._bench_cfg = bench_cfg
self._cores = cores
self._input_size = input_size
self._var_value = var_value
self._reporters = set()
self._persistence = set()
self._run_config = None
self._data_points = []
self._termination_check = None
self._cmdline = None
self._failed = True
def requires_warmup(self):
return self._bench_cfg.warmup_iterations > 0
@property
def warmup_iterations(self):
return self._bench_cfg.warmup_iterations
@property
def execute_exclusively(self):
return self._bench_cfg.execute_exclusively
def fail_immediately(self):
self._termination_check.fail_immediately()
def indicate_failed_execution(self):
self._termination_check.indicate_failed_execution()
def indicate_successful_execution(self):
self._failed = False
self._termination_check.indicate_successful_execution()
def is_failed(self):
return self._failed
def add_reporter(self, reporter):
self._reporters.add(reporter)
def add_reporting(self, reporting):
self._reporters.update(reporting.get_reporters())
def report_run_failed(self, cmdline, return_code, output):
for reporter in self._reporters:
reporter.run_failed(self, cmdline, return_code, output)
def report_run_completed(self, statistics, cmdline):
for reporter in self._reporters:
reporter.run_completed(self, statistics, cmdline)
def report_job_completed(self, run_ids):
for reporter in self._reporters:
reporter.job_completed(run_ids)
def set_total_number_of_runs(self, num_runs):
for reporter in self._reporters:
reporter.set_total_number_of_runs(num_runs)
def report_start_run(self):
for reporter in self._reporters:
reporter.start_run(self)
def add_persistence(self, persistence):
self._persistence.add(persistence)
def loaded_data_point(self, data_point):
self._data_points.append(data_point)
def add_data_point(self, data_point):
self._data_points.append(data_point)
for persistence in self._persistence:
persistence.persist_data_point(data_point)
def get_number_of_data_points(self):
return len(self._data_points)
def get_data_points(self):
return self._data_points
def discard_data_points(self):
self._data_points = []
def get_total_values(self):
return [dp.get_total_value() for dp in self._data_points]
def set_run_config(self, run_cfg):
if self._run_config and self._run_config != run_cfg:
raise ValueError("Run config has already been set "
"and is not the same.")
self._run_config = run_cfg
def get_termination_check(self):
if self._termination_check is None:
self._termination_check = self._run_config.create_termination_check(
self._bench_cfg)
return self._termination_check
def is_completed(self):
""" Check whether the termination condition is satisfied. """
return self.get_termination_check().should_terminate(
self.get_number_of_data_points())
def run_failed(self):
return (self._termination_check.fails_consecutively() or
self._termination_check.has_too_many_failures(
len(self._data_points)))
@property
def run_config(self):
return self._run_config
@property
def bench_cfg(self):
return self._bench_cfg
@property
def cores(self):
return self._cores
@property
def input_size(self):
return self._input_size
@property
def cores_as_str(self):
return '' if self._cores is None else str(self._cores)
@property
def input_size_as_str(self):
return '' if self._input_size is None else str(self._input_size)
@property
def var_value_as_str(self):
return '' if self._var_value is None else str(self._var_value)
@property
def var_value(self):
return self._var_value
def __hash__(self):
return hash(self.cmdline())
def as_simple_string(self):
return "%s %s %s %s" % (self._bench_cfg.as_simple_string(),
self._cores, self._input_size, self._var_value)
def _expand_vars(self, string):
return string % {'benchmark' : self._bench_cfg.command,
'input' : self._input_size,
'variable' : self._var_value,
'cores' : self._cores,
'warmup' : self._bench_cfg.warmup_iterations
}
def cmdline(self):
if self._cmdline:
return self._cmdline
cmdline = ""
if self._bench_cfg.vm.path:
cmdline = "%s/" % (self._bench_cfg.vm.path, )
cmdline += "%s %s" % (self._bench_cfg.vm.binary,
self._bench_cfg.vm.args)
cmdline += self._bench_cfg.suite.command
if self._bench_cfg.extra_args is not None:
cmdline += " %s" % self._bench_cfg.extra_args
try:
cmdline = self._expand_vars(cmdline)
except ValueError:
self._report_cmdline_format_issue_and_exit(cmdline)
except TypeError:
self._report_cmdline_format_issue_and_exit(cmdline)
self._cmdline = cmdline.strip()
return self._cmdline
@property
def location(self):
if not self._bench_cfg.suite.location:
return None
return self._expand_vars(self._bench_cfg.suite.location)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.cmdline() == other.cmdline())
def __ne__(self, other):
return not self.__eq__(other)
def _report_cmdline_format_issue_and_exit(self, cmdline):
logging.critical("The configuration of %s contains improper "
"Python format strings.", self._bench_cfg.name)
# figure out which format misses a conversion type
without_conversion_type = re.findall("\%\(.*?\)(?![diouxXeEfFgGcrs\%])", cmdline)
logging.error("The command line configured is: %s", cmdline)
logging.error("The following elements do not have conversion types: \"%s\"",
'", "'.join(without_conversion_type))
logging.error("This can be fixed by replacing for instance %s with %ss",
without_conversion_type[0],
without_conversion_type[0])
sys.exit(-1)
def as_str_list(self):
result = self._bench_cfg.as_str_list()
result.append(self.cores_as_str)
result.append(self.input_size_as_str)
result.append(self.var_value_as_str)
return result
@classmethod
def from_str_list(cls, data_store, str_list):
bench_cfg = BenchmarkConfig.from_str_list(data_store, str_list[:-3])
return data_store.create_run_id(
bench_cfg, str_list[-3], str_list[-2], str_list[-1])
def __str__(self):
return "RunId(%s, %s, %s, %s, %s, %d)" % (self._bench_cfg.name,
self._cores,
self._bench_cfg.extra_args,
self._input_size or '',
self._var_value or '',
self._bench_cfg.warmup_iterations)
| {"/rebench/model/measurement.py": ["/rebench/model/run_id.py"], "/rebench/model/benchmark_suite.py": ["/rebench/model/__init__.py"], "/rebench/model/reporting.py": ["/rebench/reporter.py"], "/rebench/model/experiment.py": ["/rebench/model/virtual_machine.py", "/rebench/model/benchmark_suite.py", "/rebench/model/benchmark_config.py", "/rebench/model/reporting.py", "/rebench/model/__init__.py"], "/rebench/tests/persistency_test.py": ["/rebench/configurator.py", "/rebench/executor.py", "/rebench/persistence.py", "/rebench/model/run_id.py", "/rebench/model/measurement.py", "/rebench/model/benchmark_config.py", "/rebench/model/benchmark_suite.py", "/rebench/model/virtual_machine.py"], "/rebench/configurator.py": ["/rebench/model/runs_config.py", "/rebench/model/experiment.py"], "/rebench/model/virtual_machine.py": ["/rebench/model/__init__.py"], "/rebench/tests/model/runs_config_test.py": ["/rebench/model/runs_config.py", "/rebench/configurator.py", "/rebench/persistence.py"], "/rebench/model/benchmark_config.py": ["/rebench/model/__init__.py"], "/rebench/tests/executor_test.py": ["/rebench/executor.py", "/rebench/configurator.py", "/rebench/model/measurement.py", "/rebench/persistence.py", "/rebench/__init__.py"], "/rebench/rebench.py": ["/rebench/executor.py", "/rebench/persistence.py", "/rebench/reporter.py", "/rebench/configurator.py"], "/rebench/model/run_id.py": ["/rebench/model/benchmark_config.py"]} |
46,332 | dobredia/datathon | refs/heads/master | /data_loading/process_air_quality_official.py | from os import listdir
from os.path import isfile, join
import codecs
from data_loading.data_load_utils import one_hot, process_date_air_official, time_hash, process_date_air_official
AirQualityStationVocab = ['STA-BG0040A', 'STA-BG0050A', 'STA-BG0052A', 'STA-BG0054A', 'STA-BG0073A', 'STA-BG0079A']
SamplingProcessVocab = ['SPP-BG_A_BETA_andersenFH62IR', 'SPP-BG_A_BETA_thermo5030SHARP']
'''
0. Countrycode,
1. Namespace,
2. AirQualityNetwork,
3. AirQualityStation,
4. AirQualityStationEoICode,
5. SamplingPoint,
6. SamplingProcess,
7. Sample,
8. AirPollutant,
9. AirPollutantCode,
10. AveragingTime,
11. Concentration,
12. UnitOfMeasurement,
13. DatetimeBegin,
14. DatetimeEnd,
15. Validity,
16. Verification
'''
def rewrite_lines(new_file_path = '../../datathlon data/air-quality-official/Processed_BG_5_9421_2013_timeseries.csv', file_to_read_path = '../../datathlon data/air-quality-official/BG_5_9421_2013_timeseries.csv'):
new_file = open(new_file_path, "w")
f = codecs.open(file_to_read_path, "r", "utf-16")
lines = f.readlines()
for line in lines[1:]:
split = line.split(',')
array = []
array += one_hot(split[3], AirQualityStationVocab) # AirQualityStation
array += one_hot(split[6], SamplingProcessVocab) # SamplingProcess
array += [
split[11], # Concentration
split[13], # DatetimeBegin
split[14] # DatetimeEnd
]
process_date_air_official(split[13])
new_file.write(','.join(array) + '\n')
def long_lat_of_official_air_station(air_station_name):
if air_station_name == 'STA-BG0040A':
return 23.310972, 42.732292
elif air_station_name == 'STA-BG0050A':
return 23.296786, 42.680558
elif air_station_name == 'STA-BG0052A':
return 23.400164, 42.666508
elif air_station_name == 'STA-BG0054A':
return 23.33605, 42.690353
elif air_station_name == 'STA-BG0073A':
return 23.268403, 42.669797
elif air_station_name == 'STA-BG0079A':
return 23.383271, 42.655488
def rewrite_lines_for_heatmap(
new_file_path = '../../datathlon data/air-quality-official/Processed_heatmap_BG_5_9421_2013_timeseries.csv',
file_to_read_path = '../../datathlon data/air-quality-official/BG_5_9421_2013_timeseries.csv',
file_writing_mode = 'w'):
new_file = open(new_file_path, file_writing_mode)
f = codecs.open(file_to_read_path, "r", "utf-16")
lines = f.readlines()
new_file.write('DatetimeEndHash,Longitude,Latitude,Concentration\n')
for line in lines[1:]:
split = line.split(',')
year, month, day, hour = process_date_air_official(split[14]) # DatetimeEnd
_time_hash = time_hash(year, month, day, hour)
long, lat = long_lat_of_official_air_station(split[3])
array = [
str(_time_hash), # DatetimeEnd hash
str(long), # Longitude
str(lat), # Latitude
split[11], # Concentration
]
new_file.write(','.join(array) + '\n')
def merge_all_files_for_heatmap(new_file_path = '../../datathlon data/air-quality-official/Processed_heatmap_all.csv', dir_path = '../../datathlon data/air-quality-official'):
file_paths = [
dir_path + '/' + f
for f in listdir(dir_path)
if isfile(join(dir_path, f)) and f.startswith('BG') and f.endswith('2018_timeseries.csv')
]
for path in file_paths:
rewrite_lines_for_heatmap(new_file_path = new_file_path, file_to_read_path = path, file_writing_mode = 'a')
def data_for_heatmap(file_to_read_path = '../../datathlon data/air-quality-official/Processed_heatmap_BG_5_9421_2013_timeseries.csv'):
f = codecs.open(file_to_read_path, "r", "utf-8")
lines = f.readlines()
for line in lines[1:]:
split = line.split(',')
time_hash = int(split[0]) # DatetimeEnd hash
long = float(split[1]) # Longitude
lat = float(split[2]) # Latitude
concentration = float(split[3]) # Concentration
yield time_hash, long, lat, concentration
if __name__ == "__main__":
# rewrite_lines_for_heatmap()
merge_all_files_for_heatmap()
| {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,333 | dobredia/datathon | refs/heads/master | /heatmap/heatmap_citizen_official.py | import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from data_loading.process_air_quality_official import data_for_heatmap as data_for_heatmap_official
if __name__ == "__main__":
'''
OFFICIAL DATA
'''
data_official = pd.read_csv('../../datathlon data/air-quality-official/Processed_heatmap_all.csv')
# print(data.head())
# print(data.shape)
# print(data.columns)
couple_columns_official = data_official[['Concentration', 'Longitude', 'Latitude']]
# print(couple_columns.head())
# print(data.ix[:, ['Concentration', 'Longitude', 'Latitude']].head())
data_lat_long_official = couple_columns_official.groupby(['Latitude', 'Longitude']).mean()
# print(data_lat_long.shape)
# print(data_lat_long.head(10))
data_lat_long_official = data_lat_long_official.reset_index()
# print(data_lat_long.head())
# major_ticks = np.arange(0.2, 0.5, 0.01)
# minor_ticks = np.arange(0, 50, 1)
'''
CITIZEN DATA
'''
data_citizen = pd.read_csv('../../datathlon data/air-quality-citizen/Processed_heatmap_all_citizen.csv')
# print(data.head())
# print(data.shape)
# print(data.columns)
couple_columns_citizen = data_citizen[['Concentration', 'Longitude', 'Latitude']]
# print(couple_columns.head())
# print(data.ix[:, ['Concentration', 'Longitude', 'Latitude']].head())
data_lat_long_citizen = couple_columns_citizen.groupby(['Latitude', 'Longitude']).mean()
# print(data_lat_long.shape)
# print(data_lat_long.head(10))
data_lat_long_citizen = data_lat_long_citizen.reset_index()
# print(data_lat_long.head())
# major_ticks = np.arange(0.2, 0.5, 0.01)
# minor_ticks = np.arange(0, 50, 1)
'''
PLOT
'''
fig = plt.figure(figsize=(6, 5))
ax = fig.add_subplot(1, 1, 1)
s_1 = ax.scatter('Latitude', 'Longitude', c = 'Concentration', data = data_lat_long_official, cmap = 'RdYlGn_r', marker = 's', s = 190)
ax.axis([
data_lat_long_citizen['Latitude'].min() - 0.05,
data_lat_long_citizen['Latitude'].max() + 0.05,
data_lat_long_citizen['Longitude'].min() - 0.05,
data_lat_long_citizen['Longitude'].max() + 0.05
])
ax.grid(which='both', alpha = 0.3)
ax.grid(which='major', alpha = 0.3)
ax.set_xlabel('Latitude', fontsize = 10);
ax.set_ylabel('Longitude', fontsize = 10);
ax.set_title('Concentration', size = 15)
clip_config = {
'Concentration': data_lat_long_official['Concentration'].max(),
'Latitude': data_lat_long_citizen['Latitude'].max(),
'Longitude': data_lat_long_citizen['Longitude'].max()
}
data_lat_long_citizen = data_lat_long_citizen.clip(upper = pd.Series(clip_config), axis=1)
cbar = plt.colorbar(mappable = s_1, ax = ax)
s_2 = ax.scatter('Latitude', 'Longitude', c='Concentration', data=data_lat_long_citizen, cmap='RdYlGn_r', marker='.', s=50)
# cbar = plt.colorbar(mappable=s_2, ax=ax)
plt.show() | {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,334 | dobredia/datathon | refs/heads/master | /data_loading/merge_citizen_and_official_by_time.py | from os import listdir
from os.path import isfile, join
import codecs
from geohash.geohashdecode import decode1
from data_loading.data_load_utils import time_hash, process_date_str
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from data_loading.process_air_quality_official import data_for_heatmap as data_for_heatmap_official
'''
0. DatetimeEndHash
1. Longitude
2. Latitude
3. Concentration
'''
def merge_citize_and_official():
official = pd.read_csv('../../datathlon data/air-quality-official/Processed_heatmap_all.csv').as_matrix()
citizen = pd.read_csv('../../datathlon data/air-quality-citizen/Processed_heatmap_all_citizen.csv').as_matrix()
by_time = {}
for i in range(len(official)):
if by_time.__contains__(official[i][0]):
by_time[official[i][0]] += [official[i]]
else:
by_time[official[i][0]] = [official[i]]
for key in by_time:
matrix = by_time[key]
for passnum in range(len(matrix) - 1, 0, -1):
for i in range(passnum):
if matrix[i][2] > matrix[i + 1][2]:
temp = matrix[i]
matrix[i] = matrix[i + 1]
matrix[i + 1] = temp
by_time[key] = matrix
citizen_enriched_with_official = []
for i in range(len(citizen)):
if by_time.__contains__(citizen[i][0]):
try:
official_by_time = by_time[citizen[i][0]]
official_concentrations = [official_by_time[0][3], official_by_time[1][3], official_by_time[2][3], official_by_time[3][3], official_by_time[4][3]]
enriched = citizen[i].tolist() + official_concentrations
citizen_enriched_with_official.append(enriched)
except:
print('recored skipped beacuse of error')
print(official_by_time)
print('Done')
if __name__ == "__main__":
merge_citize_and_official() | {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,335 | dobredia/datathon | refs/heads/master | /gausian_mixture_model/gausian-citizen.py | import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
from sklearn import mixture
import pandas as pd
'''
0. Latitude
1. Longitude
2. Concentration
'''
data = pd.read_csv('../../datathlon data/air-quality-citizen/Processed_heatmap_all_citizen.csv')
couple_columns = data[['Concentration', 'Longitude', 'Latitude']]
data_lat_long = couple_columns.groupby(['Latitude', 'Longitude']).mean()
data_lat_long = data_lat_long.reset_index()
np_data = data_lat_long.as_matrix()
X_train = np_data[:, 0:2]
# fit a Gaussian Mixture Model with two components
clf = mixture.GaussianMixture(n_components=8, covariance_type='full')
clf.fit(X_train)
# display predicted scores by the model as a contour plot
x = np.linspace(data_lat_long['Latitude'].min() - 0.05, data_lat_long['Latitude'].max() + 0.05)
y = np.linspace(data_lat_long['Longitude'].min() - 0.05, data_lat_long['Longitude'].max() + 0.05)
X, Y = np.meshgrid(x, y)
XX = np.array([X.ravel(), Y.ravel()]).T
Z = -clf.score_samples(XX)
Z = Z.reshape(X.shape)
# CS = plt.contour(X, Y, Z, norm=LogNorm(vmin=1.0, vmax=1000.0),
# levels=np.logspace(0, 3, 10))
CS = plt.contour(X, Y, Z)
# CB = plt.colorbar(CS, shrink=0.8, extend='both')
CB = plt.colorbar(CS)
plt.scatter(X_train[:, 0], X_train[:, 1], .8)
plt.title('Negative log-likelihood predicted by a GMM')
plt.axis('tight')
plt.show() | {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,336 | dobredia/datathon | refs/heads/master | /data_loading/merge_citizen_and_official_by_time_v2.py | from os import listdir
from os.path import isfile, join
import codecs
from geohash.geohashdecode import decode1
from data_loading.data_load_utils import time_hash, process_date_str
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from data_loading.process_air_quality_official import data_for_heatmap as data_for_heatmap_official
from scipy import stats
'''
0. DatetimeEndHash
1. Longitude
2. Latitude
3. Concentration
'''
def merge_citize_and_official():
official = pd.read_csv('../../datathlon data/air-quality-official/Processed_heatmap_all.csv').as_matrix()
citizen = pd.read_csv('../../datathlon data/air-quality-citizen/Processed_heatmap_all_citizen.csv').as_matrix()
by_time = {}
for i in range(len(official)):
if by_time.__contains__(official[i][0]):
by_time[official[i][0]] += [official[i]]
else:
by_time[official[i][0]] = [official[i]]
for key in by_time:
matrix = by_time[key]
for passnum in range(len(matrix) - 1, 0, -1):
for i in range(passnum):
if matrix[i][2] > matrix[i + 1][2]:
temp = matrix[i]
matrix[i] = matrix[i + 1]
matrix[i + 1] = temp
by_time[key] = matrix
citizen_enriched_with_official = []
for i in range(len(citizen)):
if by_time.__contains__(citizen[i][0]):
try:
official_by_time = by_time[citizen[i][0]]
official_concentrations = [official_by_time[0][3], official_by_time[1][3], official_by_time[2][3], official_by_time[3][3]]
if len(official_by_time) == 5:
official_concentrations += [ official_by_time[4][3]]
else:
official_concentrations += [0]
enriched = citizen[i].tolist() + official_concentrations
citizen_enriched_with_official.append(enriched)
except:
# print('recored skipped beacuse of error')
print(official_by_time)
'''
0. DatetimeEndHash
1. Longitude
2. Latitude
3. Concentration
4. Concentration Official 1
5. Concentration Official 2
6. Concentration Official 3
7. Concentration Official 4
8. Concentration Official 5
'''
np_citizen_enriched_with_official = np.asanyarray(citizen_enriched_with_official)
print(np_citizen_enriched_with_official.shape)
citizen_concentrations = np.squeeze(np_citizen_enriched_with_official[:, 3:4])
official_concentration_1 = np.squeeze(np_citizen_enriched_with_official[:, 4:5])
official_concentration_2 = np.squeeze(np_citizen_enriched_with_official[:, 5:6])
official_concentration_3 = np.squeeze(np_citizen_enriched_with_official[:, 6:7])
official_concentration_4 = np.squeeze(np_citizen_enriched_with_official[:, 7:8])
official_concentration_5 = np.squeeze(np_citizen_enriched_with_official[:, 8:9])
pearson_r_1 = stats.pearsonr(citizen_concentrations, official_concentration_1)[0]
pearson_r_2 = stats.pearsonr(citizen_concentrations, official_concentration_2)[0]
pearson_r_3 = stats.pearsonr(citizen_concentrations, official_concentration_3)[0]
pearson_r_4 = stats.pearsonr(citizen_concentrations, official_concentration_4)[0]
pearson_r_5 = stats.pearsonr(citizen_concentrations, official_concentration_5)[0]
print('Pearson R', pearson_r_1, pearson_r_2, pearson_r_3, pearson_r_4, pearson_r_5)
if __name__ == "__main__":
merge_citize_and_official() | {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,337 | dobredia/datathon | refs/heads/master | /heatmap/heatmap_station_citizen.py | import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from data_loading.process_air_quality_official import data_for_heatmap as data_for_heatmap_official
if __name__ == "__main__":
data = pd.read_csv('../../datathlon data/air-quality-citizen/Processed_heatmap_all_citizen.csv')
# print(data.head())
# print(data.shape)
# print(data.columns)
couple_columns = data[['Concentration', 'Longitude', 'Latitude']]
# print(couple_columns.head())
# print(data.ix[:, ['Concentration', 'Longitude', 'Latitude']].head())
data_lat_long = couple_columns.groupby(['Latitude', 'Longitude']).mean()
# print(data_lat_long.shape)
# print(data_lat_long.head(10))
data_lat_long = data_lat_long.reset_index()
# print(data_lat_long.head())
# major_ticks = np.arange(0.2, 0.5, 0.01)
# minor_ticks = np.arange(0, 50, 1)
fig = plt.figure(figsize=(6, 5))
ax = fig.add_subplot(1, 1, 1)
s = ax.scatter('Latitude', 'Longitude', c = 'Concentration', data = data_lat_long, cmap = 'RdYlGn_r', marker = '.', s = 50)
ax.axis([
data_lat_long['Latitude'].min() - 0.05,
data_lat_long['Latitude'].max() + 0.05,
data_lat_long['Longitude'].min() - 0.05,
data_lat_long['Longitude'].max() + 0.05
])
# ax.set_xticks(major_ticks)
# ax.set_xticks(minor_ticks, minor = True)
# ax.set_yticks(major_ticks)
ax.grid(which='both', alpha = 0.3)
ax.grid(which='major', alpha = 0.3)
ax.set_xlabel('Latitude', fontsize = 10);
ax.set_ylabel('Longitude', fontsize = 10);
ax.set_title('Concentration', size = 15)
cbar = plt.colorbar(mappable = s, ax = ax)
plt.show()
| {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,338 | dobredia/datathon | refs/heads/master | /data_loading/merge_citizen_and_official_by_time_v4.py | from os import listdir
from os.path import isfile, join
import codecs
from geohash.geohashdecode import decode1
from data_loading.data_load_utils import time_hash, process_date_str
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from data_loading.process_air_quality_official import data_for_heatmap as data_for_heatmap_official
from scipy import stats
import math
'''
0. DatetimeEndHash
1. Longitude
2. Latitude
3. Concentration
'''
def merge_citize_and_official():
official = pd.read_csv('../../datathlon data/air-quality-official/Processed_heatmap_all.csv').as_matrix()
citizen = pd.read_csv('../../datathlon data/air-quality-citizen/Processed_heatmap_all_citizen.csv').as_matrix()
by_time = {}
for i in range(len(official)):
if by_time.__contains__(official[i][0]):
by_time[official[i][0]] += [official[i]]
else:
by_time[official[i][0]] = [official[i]]
for key in by_time:
matrix = by_time[key]
for passnum in range(len(matrix) - 1, 0, -1):
for i in range(passnum):
if matrix[i][2] > matrix[i + 1][2]:
temp = matrix[i]
matrix[i] = matrix[i + 1]
matrix[i + 1] = temp
by_time[key] = matrix
citizen_enriched_with_official = []
for i in range(len(citizen)):
if by_time.__contains__(citizen[i][0]):
try:
official_by_time = by_time[citizen[i][0]]
official_concentrations = [official_by_time[0][3], official_by_time[1][3], official_by_time[2][3], official_by_time[3][3]]
if len(official_by_time) == 5:
official_concentrations += [ official_by_time[4][3]]
else:
official_concentrations += [0]
enriched = citizen[i].tolist() + official_concentrations
citizen_enriched_with_official.append(enriched)
except:
print('recored skipped beacuse of error')
# print(official_by_time)
'''
0. DatetimeEndHash
1. Longitude
2. Latitude
3. Concentration
4. Concentration Official 1
5. Concentration Official 2
6. Concentration Official 3
7. Concentration Official 4
8. Concentration Official 5
'''
by_citizen_station = {}
for measurement in citizen_enriched_with_official:
station = measurement[1], measurement[2]
if by_citizen_station.__contains__(station):
by_citizen_station[station].append(measurement)
else:
by_citizen_station[station] = [measurement]
weighted_measuements_file_path = '../../datathlon data/air-quality-citizen/Processed_heatmap_all_citizen_weighted.csv'
new_file = open(weighted_measuements_file_path, 'a')
for station in by_citizen_station:
try:
measurements = by_citizen_station[station]
np_measurments = np.asarray(measurements)
citizen = np.squeeze(np_measurments[:, 3:4])
official_1 = np.squeeze(np_measurments[:, 4:5])
official_2 = np.squeeze(np_measurments[:, 5:6])
official_3 = np.squeeze(np_measurments[:, 6:7])
official_4 = np.squeeze(np_measurments[:, 7:8])
official_5 = np.squeeze(np_measurments[:, 8:9])
pearson_1 = stats.pearsonr(citizen, official_1)[0]
pearson_2 = stats.pearsonr(citizen, official_2)[0]
pearson_3 = stats.pearsonr(citizen, official_3)[0]
pearson_4 = stats.pearsonr(citizen, official_4)[0]
pearson_5 = stats.pearsonr(citizen, official_5)[0]
if math.isnan(pearson_1) or math.isnan(pearson_2) or math.isnan(pearson_3) or math.isnan(pearson_4) or math.isnan(pearson_5):
continue
official_1_weighted = official_1 * pearson_1
official_2_weighted = official_2 * pearson_2
official_3_weighted = official_3 * pearson_3
official_4_weighted = official_4 * pearson_4
official_5_weighted = official_5 * pearson_5
weighted_sum = citizen + official_1_weighted + official_2_weighted + official_3_weighted + official_4_weighted + official_5_weighted
weights = 1 + pearson_1 + pearson_2 + pearson_3 + pearson_4 + pearson_5
concentrations_weighted = weighted_sum / weights
cols = np_measurments[:, 0:3]
concentrations_weighted_col = np.expand_dims(concentrations_weighted, axis=1)
records = np.hstack((cols, concentrations_weighted_col))
for rec in records.tolist():
rec_str = [str(r) for r in rec]
new_file.write(','.join(rec_str) + '\n')
except:
print('Record skipped from pearson caclualtion beacuse of error')
print('Done')
if __name__ == "__main__":
merge_citize_and_official() | {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,339 | dobredia/datathon | refs/heads/master | /data_loading/process_air_quality_citizen.py | from os import listdir
from os.path import isfile, join
import codecs
from geohash.geohashdecode import decode1
from data_loading.data_load_utils import time_hash, process_date_str
'''
0. time,
1. geohash,
2. P1 PM10,
3. P2 PM2.5,
4. temperature,
5. humidity,
6. pressure
'''
def rewrite_lines_0(new_file_path = '../../datathlon data/air-quality-citizen/Processed_sample_data_bg_2018.csv', file_to_read_path = '../../datathlon data/air-quality-citizen/sample_data_bg_2018.csv'):
new_file = open(new_file_path, "w")
f = codecs.open(file_to_read_path, "r", "utf-8")
lines = f.readlines()
for line in lines[:1]:
split = line.split(',')
array = [
split[0], # time
'lon', # geohash / lon
'lat', # geohash / lat
split[2], # P1 PM10
# split[3], # P2 PM2.5
split[4], # temperature
split[5], # humidity
split[6][:-1], # pressure
]
new_file.write(','.join(array))
def rewrite_lines(new_file_path = '../../datathlon data/air-quality-citizen/Processed_sample_data_bg_2018.csv', file_to_read_path = '../../datathlon data/air-quality-citizen/sample_data_bg_2018.csv'):
new_file = open(new_file_path, "a")
f = codecs.open(file_to_read_path, "r", "utf-8")
lines = f.readlines()
for line in lines[1:]:
split = line.split(',')
lon, lat = decode1(split[1])
array = [
split[0], # time
str(lon), # geohash / lon
str(lat), # geohash / lat
split[2], # P1 PM10
# split[3], # P2 PM2.5
split[4], # temperature
split[5], # humidity
split[6][:-1], # pressure
]
new_file.write(','.join(array))
def rewrite_lines_for_heat_map(
new_file_path = '../../datathlon data/air-quality-citizen/Processed_sample_data_bg_2018.csv',
file_to_read_path = '../../datathlon data/air-quality-citizen/sample_data_bg_2018.csv',
write_mode = 'w',
include_header = False):
new_file = open(new_file_path, write_mode)
f = codecs.open(file_to_read_path, "r", "utf-8")
lines = f.readlines()
if include_header:
new_file.write('DatetimeEndHash,Longitude,Latitude,Concentration\n')
count = 0
for line in lines[1:]:
split = line.split(',')
year, month, day, hour = process_date_str(split[0])
_time_hash = time_hash(year, month, day, hour)
try:
lon, lat = decode1(split[1])
except:
print('skipped record because can not decode location hash : ', split[1])
continue
if lat < 42.62 or lat > 42.74 or lon > 23.45 or lon < 23.20:
continue
pm_10 = split[2]
array = [
str(_time_hash),
str(lon),
str(lat),
pm_10 + '\n'
]
new_file.write(','.join(array))
count += 1
if count % 100000 == 0:
print(count / len(lines) * 100, '%')
def merge_all_files_for_heatmap(
new_file_path = '../../datathlon data/air-quality-citizen/Processed_heatmap_all_citizen.csv',
file_paths = ['../../datathlon data/air-quality-citizen/data_bg_2018.csv/data_bg_2018.csv']):
new_file = open(new_file_path, 'a')
new_file.write('DatetimeEndHash,Longitude,Latitude,Concentration\n')
for path in file_paths:
rewrite_lines_for_heat_map(new_file_path = new_file_path, file_to_read_path = path, write_mode = 'a')
if __name__ == "__main__":
# rewrite_lines_0()
# rewrite_lines()
# rewrite_lines_for_heat_map(include_header = True)
merge_all_files_for_heatmap() | {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,340 | dobredia/datathon | refs/heads/master | /heatmap/heatmap_example.py | import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
helix = pd.read_csv('./helix_parameters.csv')
helix.head()
helix.shape
helix.columns
couple_columns = helix[['Energy','helix 2 phase', 'helix1 phase']]
couple_columns.head()
helix.ix[:,['Energy','helix 2 phase', 'helix1 phase']].head()
phase_1_2 = couple_columns.groupby(['helix1 phase', 'helix 2 phase']).mean()
phase_1_2.shape
phase_1_2.head(10)
phase_1_2 = phase_1_2.reset_index()
phase_1_2.head()
major_ticks = np.arange(0, 200, 20)
minor_ticks = np.arange(0, 180, 5)
fig = plt.figure(figsize = (6,5))
ax = fig.add_subplot(1,1,1)
s = ax.scatter('helix1 phase', 'helix 2 phase', c = 'Energy',data = phase_1_2, cmap = 'Blues_r', marker = 's',s = 190)
ax.axis([phase_1_2['helix1 phase'].min()-10, phase_1_2['helix1 phase'].max()+10, phase_1_2['helix 2 phase'].min()-10, phase_1_2['helix 2 phase'].max()+10])
ax.set_xticks(major_ticks)
ax.set_xticks(minor_ticks, minor=True)
ax.set_yticks(major_ticks)
ax.grid(which='both', alpha = 0.3)
ax.grid(which='major', alpha=0.3)
ax.set_xlabel('helix1 phase', fontsize=10);
ax.set_ylabel('helix 2 phase', fontsize=10);
ax.set_title('Energy from Helix Phase Angles', size = 15)
cbar = plt.colorbar(mappable = s,ax = ax)
plt.show() | {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,341 | dobredia/datathon | refs/heads/master | /data_loading/data_load_utils.py | import numpy as np
from dateutil.parser import parse
year_vocab = [2017, 2018]
def one_hot(value, vocab):
one_hot_array = np.full(len(vocab), '0')
one_hot_bool_mask = np.array(vocab) == value
one_hot_array[one_hot_bool_mask] = 1
return one_hot_array.tolist()
def time_hash(year, month, day, hour):
return hour + day * 24 + month * 24 * 30 + year * 24 * 30 * 12
def date_hash(year, month, day):
return day + month * 30 + year * 30 * 12
def process_date_air_official(date_string):
dt = parse(date_string)
t = dt.utctimetuple()
year, month, day, hour = t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour
return year, month, day, hour
def process_date_str(date_string):
dt = parse(date_string)
t = dt.utctimetuple()
year, month, day, hour = t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour
return year, month, day, hour | {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,342 | dobredia/datathon | refs/heads/master | /sheet.py | import numpy as np
array1 = np.asarray([1, 2, 3])
array2 = np.asarray([0.5, 5, 10])
print(array1 / array2) | {"/data_loading/process_air_quality_official.py": ["/data_loading/data_load_utils.py"], "/heatmap/heatmap_citizen_official.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v2.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/heatmap/heatmap_station_citizen.py": ["/data_loading/process_air_quality_official.py"], "/data_loading/merge_citizen_and_official_by_time_v4.py": ["/data_loading/data_load_utils.py", "/data_loading/process_air_quality_official.py"], "/data_loading/process_air_quality_citizen.py": ["/data_loading/data_load_utils.py"]} |
46,349 | dan801212/pytorch-3DGAN | refs/heads/master | /utils.py | import torch
import torch.nn.functional as F
from torch.autograd import Variable
import matplotlib as mpl
mpl.use('Agg')
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
from scipy.misc import imread
def generateLabel(tensor, label, args):
if args.soft_label:
if label == 1:
out = torch.Tensor(tensor.size()).uniform_(0.7, 1.2)
else:
out = torch.Tensor(tensor.size()).uniform_(0, 0.3)
out = Variable(out).cuda()
else:
if label == 1:
out = torch.ones_like(tensor)
else:
out = torch.zeros_like(tensor)
out = out.cuda()
return out
def generateZ(args):
if args.z_dis == "norm":
Z = torch.randn(args.batch_size_gan, args.z_dim, args.z_start_vox[0], args.z_start_vox[1], args.z_start_vox[2]).normal_(0, 0.33)
elif args.z_dis == "uni":
Z = torch.rand(args.batch_size_gan, args.z_dim, args.z_start_vox[0], args.z_start_vox[1], args.z_start_vox[2])
else:
print("z_dist is not normal or uniform")
Z = Z.type(torch.FloatTensor)
return Z
def adjust_learning_rate(optimizer, epoch, init_lr, update_lr_epoch):
"""Sets the learning rate to the initial LR decayed by 10 every udpate_lr epochs"""
lr = init_lr * (0.1 ** (epoch // update_lr_epoch))
print('Set new lr = ' + str(lr))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def plot_3D_scene(data_pred, data_label, log_img_name, src_img_name, free_voxels):
x, y, z = np.meshgrid(np.arange(data_pred.shape[0]), np.arange(data_pred.shape[1]), np.arange(data_pred.shape[2]))
[x, y, z] = (np.reshape(x, (-1)), np.reshape(y, (-1)), np.reshape(z, (-1)))
fig = plt.figure(figsize=(25, 12))
idx = (data_pred != 0) & (data_pred != 255) & (free_voxels != 1) # TODO: Shouldn't this be data_label?
idx = np.reshape(idx, (-1))
ax = fig.add_subplot(131, projection='3d')
scat = ax.scatter(x[idx], y[idx], z[idx], c=np.reshape(data_pred, (-1))[idx], cmap='jet', marker="s")
ax.set_xlabel('X', fontsize=9)
ax.set_ylabel('Y', fontsize=9)
ax.set_zlabel('Z', fontsize=9)
ax.set_title('pred')
ax.axis('equal')
az = 150
el = 30
ax.view_init(elev=el, azim=az)
idx = (data_label != 0) & (data_label != 255)
idx = np.reshape(idx, (-1))
ax = fig.add_subplot(132, projection='3d')
scat = ax.scatter(x[idx], y[idx], z[idx], c=np.reshape(data_label, (-1))[idx], cmap='jet', marker="s")
ax.set_xlabel('X', fontsize=9)
ax.set_ylabel('Y', fontsize=9)
ax.set_zlabel('Z', fontsize=9)
ax.set_title('gt')
ax.axis('equal')
az = 150
el = 30
ax.view_init(elev=el, azim=az)
fig.colorbar(scat, shrink=0.5, aspect=5)
if src_img_name is not None:
ax = fig.add_subplot(133)
im = imread(src_img_name)
ax.imshow(im)
plt.axis('off')
plt.savefig(log_img_name, bbox_inches='tight')
plt.close('all')
def plot_single_3D_scene(data_pred, log_img_name):
x, y, z = np.meshgrid(np.arange(data_pred.shape[0]), np.arange(data_pred.shape[1]), np.arange(data_pred.shape[2]))
[x, y, z] = (np.reshape(x, (-1)), np.reshape(y, (-1)), np.reshape(z, (-1)))
fig = plt.figure(figsize=(25, 12))
idx = (data_pred != 0) & (data_pred != 255)
idx = np.reshape(idx, (-1))
ax = fig.add_subplot(131, projection='3d')
scat = ax.scatter(x[idx], y[idx], z[idx], c=np.reshape(data_pred, (-1))[idx], cmap='jet', marker="s")
ax.set_xlabel('X', fontsize=9)
ax.set_ylabel('Y', fontsize=9)
ax.set_zlabel('Z', fontsize=9)
ax.set_title('pred')
ax.axis('equal')
az = 150
el = 30
ax.view_init(elev=el, azim=az)
plt.savefig(log_img_name, bbox_inches='tight')
plt.close('all')
def print_all_params(num_epochs, update_lr_iter, iter_size, save_freq, vis_freq,
save_dir, batch_size, learning_rate,
resume, model_file, optimizer_mode,
sampling_mode, fixed_weights, filelist_name,
data_root, image_dir, data_info, num_train_files):
print(' num_epochs = ' + str(num_epochs))
print(' update_lr_iter = ' + str(update_lr_iter))
print(' iter_size = ' + str(iter_size))
print(' save_freq = ' + str(save_freq))
print(' vis_freq = ' + str(vis_freq))
print(' save_dir = ' + str(save_dir))
print(' batch_size = ' + str(batch_size))
print(' learning_rate = ' + str(learning_rate))
print(' resume = ' + str(resume))
print(' model_file = ' + str(model_file))
print(' optimizer_mode = ' + str(optimizer_mode))
print(' sampling_mode = ' + str(sampling_mode))
print(' fixed_weights = ' + str(fixed_weights))
print(' filelist_name = ' + str(filelist_name))
print(' data_root = ' + str(data_root))
print(' image_dir = ' + str(image_dir))
# print(' data_info = ' + str(data_info))
print(' num_train_files = ' + str(num_train_files))
| {"/main.py": ["/train.py"], "/train.py": ["/model.py", "/ShapeNetDataset.py", "/utils.py"]} |
46,350 | dan801212/pytorch-3DGAN | refs/heads/master | /main.py | import argparse
from train import train
from test import test
def main(args):
if args.mode == 'train':
train(args)
elif args.mode == 'test':
test(args)
else:
raise('Please choose a correct mode')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="3DGAN")
parser.add_argument("--mode", type=str, default='train', choices=['train', 'test'])
#GAN params
parser.add_argument("--gan-epochs", type=int, default=1500)
parser.add_argument("--lr-G", type=float, default=0.0025)
parser.add_argument("--lr-D", type=float, default=0.001)
parser.add_argument("--optimizer-G", type=str, default='Adam', choices=['Adam', 'Sgd', 'RMSprop'],
help='choose between Sgd/ Adam')
parser.add_argument("--optimizer-D", type=str, default='Adam', choices=['Adam', 'Sgd', 'RMSprop'],
help='choose between Sgd/ Adam')
parser.add_argument("--Adam-beta-G", type=float, default=(0.5,0.99))
parser.add_argument("--Adam-beta-D", type=float, default=(0.5,0.99))
parser.add_argument("--z-dim", type=int, default=200)
parser.add_argument("--z-start-vox", type=int, nargs='*', default=[1,1,1])
parser.add_argument("--z-dis", type=str, default='norm', choices=['norm', 'uni'],
help='uniform: uni, normal: norm')
parser.add_argument("--batch-size-gan", type=int, default=32)
parser.add_argument('--d-thresh', type=float, default=0.8,
help='for balance discriminator and generator')
parser.add_argument('--leak-value', type=float, default=0.2,
help='leaky relu')
parser.add_argument('--soft-label', type=bool, default=True,
help='using soft_label')
#step params
parser.add_argument("--iter-G", type=int, default=1,
help='train G every n iteration')
parser.add_argument("--save-freq", type=int, default=10,
help= 'Save model every n epoch')
parser.add_argument("--vis-freq", type=int, default=100,
help= 'visualize model every n epoch')
parser.add_argument("--update-lr-epoch", type=int, default=500,
help= 'lr decay by 10 every n epoch')
# dir params
parser.add_argument("--save-dir", type=str)
parser.add_argument("--dataset-dir", type=str)
# other params
parser.add_argument('--use-tensorboard', type=bool, default=True,
help='using tensorboard to visualize')
parser.add_argument('--manualSeed', type=int, default=0,
help='manual seed')
args = parser.parse_args()
main(args)
| {"/main.py": ["/train.py"], "/train.py": ["/model.py", "/ShapeNetDataset.py", "/utils.py"]} |
46,351 | dan801212/pytorch-3DGAN | refs/heads/master | /ShapeNetDataset.py | from torch.utils.data.dataset import Dataset
import pandas as pd
import numpy as np
import ReadVoxLabel
import torch
import scipy.io as io
import os.path
import scipy.ndimage as nd
class CustomDataset(Dataset):
"""Custom Dataset compatible with torch.utils.data.DataLoader"""
def __init__(self, root):
"""Set the path for Data.
Args:
root: image directory.
transform: Tensor transformer.
"""
self.root = root
self.listdir = os.listdir(self.root)
def __getitem__(self, index):
with open(self.root + self.listdir[index], "rb") as f:
volume = np.asarray(getVoxelFromMat(f, 64), dtype=np.float32)
return torch.FloatTensor(volume)
def __len__(self):
return len(self.listdir)
def getVoxelFromMat(path, cube_len=64):
#if we use shapenet dataset, we have to upsample the data. 32 -> 64
voxels = io.loadmat(path)['instance']
voxels = np.pad(voxels, (1, 1), 'constant', constant_values=(0, 0))
if cube_len != 32 and cube_len == 64:
voxels = nd.zoom(voxels, (2, 2, 2), mode='constant', order=0)
return voxels | {"/main.py": ["/train.py"], "/train.py": ["/model.py", "/ShapeNetDataset.py", "/utils.py"]} |
46,352 | dan801212/pytorch-3DGAN | refs/heads/master | /train.py | import os
import numpy as np
import torch
import random
from torch.autograd import Variable, gradcheck
import torch.nn.functional as F
import model
import ShapeNetDataset
from utils import plot_3D_scene, plot_single_3D_scene, print_all_params, adjust_learning_rate, generateZ, generateLabel
from logger import Logger
def train(args):
print(str(args).replace(',', '\n'))
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
if args.manualSeed is None:
args.manualSeed = random.randint(1, 10000)
print("Random Seed: ", args.manualSeed)
random.seed(args.manualSeed)
torch.manual_seed(args.manualSeed)
# create folder we need
if not os.path.exists(args.save_dir):
os.makedirs(args.save_dir)
if not os.path.exists(args.save_dir + '/logs'):
os.makedirs(args.save_dir + '/logs')
os.makedirs(args.save_dir + '/logs/gan')
if not os.path.exists(args.save_dir + '/models'):
os.makedirs(args.save_dir + '/models')
os.makedirs(args.save_dir + '/models/G')
os.makedirs(args.save_dir + '/models/D')
#===================== for using tensorboard=================#
if args.use_tensorboard:
if not os.path.exists(args.save_dir + '/logs/tensorboard'):
os.makedirs(args.save_dir + '/logs/tensorboard')
logger = Logger(args.save_dir + '/logs/tensorboard')
# ===================== Data Loader=================#
dset_shape = ShapeNetDataset.CustomDataset(args.dataset_dir)
shape_loader = torch.utils.data.DataLoader(dset_shape, batch_size=args.batch_size_gan, shuffle=True)
#======================create gan=====================#
D = model._D(args)
G = model._G(args)
# D.apply(model.initialize_weights)
# G.apply(model.initialize_weights)
print(D)
print(G)
D = D.cuda()
G = G.cuda()
if args.optimizer_G == 'Adam':
print('G Using Adam optimizer')
G_solver = torch.optim.Adam(G.parameters(), lr=args.lr_G, betas=args.Adam_beta_G)
elif args.optimizer_G == 'RMSprop':
print('G Using RMSprop optimizer')
G_solver = torch.optim.RMSprop(G.parameters(), lr=args.lr_G)
else:
print('G Using Sgd optimizer')
G_solver = torch.optim.SGD(G.parameters(), lr=args.lr_G, weight_decay=0.0005)
if args.optimizer_D == 'Adam':
print('D Using Adam optimizer')
D_solver = torch.optim.Adam(D.parameters(), lr=args.lr_D, betas=args.Adam_beta_D)
elif args.optimizer_D == 'RMSprop':
print('D Using RMSprop optimizer')
D_solver = torch.optim.RMSprop(D.parameters(), lr=args.lr_D)
else:
print('D Using Sgd optimizer')
D_solver = torch.optim.SGD(D.parameters(), lr=args.lr_D, weight_decay=0.0005)
criterion = torch.nn.BCEWithLogitsLoss()
#======================training=====================#
ite = 1
for epoch in range(args.gan_epochs):
for i, labels in enumerate(shape_loader):
labels = labels.view(-1, 1, 64, 64, 64)
# ============= Train the discriminator =============# maximize log(D(x)) + log(1 - D(G(z)))
labels = Variable(labels.type(torch.FloatTensor)).cuda()
d_real, d_real_no_sigmoid = D(labels)
d_real_loss = criterion(d_real_no_sigmoid, generateLabel(d_real,1, args))
Z = Variable(generateZ(args)).cuda()
fake = G(Z)
d_fake, d_fake_no_sigmoid = D(fake.detach())
d_fake_loss = criterion(d_fake_no_sigmoid , generateLabel(d_fake,0, args))
d_loss = d_real_loss + d_fake_loss
d_real_acc = torch.ge(d_real.squeeze(), 0.5).float()
d_fake_acc = torch.le(d_fake.squeeze(), 0.5).float()
d_total_acc = torch.mean(torch.cat((d_real_acc, d_fake_acc),0))
if (d_total_acc <= args.d_thresh).data.cpu().numpy():
D_solver.zero_grad()
d_loss.backward()
D_solver.step()
# ============= Train the generator =============# maximize log(D(G(z)))
if (i) % args.iter_G == 0 :
d_fake, d_fake_no_sigmoid = D(fake)
g_loss = criterion(d_fake_no_sigmoid, generateLabel(d_fake,1, args))
G_solver.zero_grad()
g_loss.backward()
G_solver.step()
print('Iter-{}; , D_loss : {:.4}, G_loss : {:.4}, D_acc : {:.4}'.format(ite, d_loss.data[0], g_loss.data[0], d_total_acc.data[0]))
#======================tensorboard========================#
if args.use_tensorboard:
info = {
'loss/loss_D_R': d_real_loss.data[0],
'loss/loss_D_F': d_fake_loss.data[0],
'loss/loss_D': d_loss.data[0],
'loss/loss_G': g_loss.data[0],
'loss/acc_D': d_total_acc.data[0]
}
for tag, value in info.items():
logger.scalar_summary(tag, value, ite)
for tag, value in G.named_parameters():
tag = tag.replace('.', '/')
logger.histo_summary('Generator/' + tag, value.data.cpu().numpy(), ite)
logger.histo_summary('Generator/' + tag + '/grad', value.grad.data.cpu().numpy(), ite)
for tag, value in D.named_parameters():
tag = tag.replace('.', '/')
logger.histo_summary('Discriminator/' + tag, value.data.cpu().numpy(), ite)
logger.histo_summary('Discriminator/' + tag + '/grad', value.grad.data.cpu().numpy(), ite)
if (i) % 10 == 0:
Z = Variable(generateZ(args)).cuda()
G.eval()
data_gen = G(Z)
G.train(True)
data_gen = data_gen.data.cpu().numpy()
data_gen = data_gen[0,:,:,:,:]
data_gen = data_gen.__ge__(0.5)
print(np.count_nonzero(data_gen))
data_gen = np.squeeze(data_gen)
log_img_name = args.save_dir + '/logs/gan/' + str(epoch).zfill(5) + '_' + str(i).zfill(5) + '.png'
plot_single_3D_scene(data_gen, log_img_name)
ite += 1
if epoch % args.save_freq == 0:
save_model_name_D = os.path.join(args.save_dir + '/models/D/', '%05d.ckpt' % (ite - 1))
save_model_name_G = os.path.join(args.save_dir + '/models/G/', '%05d.ckpt' % (ite - 1))
torch.save(D, save_model_name_D)
torch.save(G, save_model_name_G)
adjust_learning_rate(D_solver, epoch+1, args.lr_D, args.update_lr_epoch)
adjust_learning_rate(G_solver, epoch+1, args.lr_G, args.update_lr_epoch) | {"/main.py": ["/train.py"], "/train.py": ["/model.py", "/ShapeNetDataset.py", "/utils.py"]} |
46,353 | dan801212/pytorch-3DGAN | refs/heads/master | /model.py | import torch
import torch.nn as nn
import torch.nn.functional as F
class _G(nn.Module):
def __init__(self, args):
super(_G, self).__init__()
self.args = args
self.layer1 = torch.nn.Sequential(
torch.nn.ConvTranspose3d(self.args.z_dim, 512, kernel_size=4, stride=1, padding=0),
torch.nn.BatchNorm3d(512),
torch.nn.ReLU()
)
self.layer2 = torch.nn.Sequential(
torch.nn.ConvTranspose3d(512, 256, kernel_size=4, stride=2, padding=1),
torch.nn.BatchNorm3d(256),
torch.nn.ReLU(),
)
self.layer3 = torch.nn.Sequential(
torch.nn.ConvTranspose3d(256, 128, kernel_size=4, stride=2, padding=1),
torch.nn.BatchNorm3d(128),
torch.nn.ReLU()
)
self.layer4 = torch.nn.Sequential(
torch.nn.ConvTranspose3d(128, 64, kernel_size=4, stride=2, padding=1),
torch.nn.BatchNorm3d(64),
torch.nn.ReLU()
)
self.layer5 = torch.nn.Sequential(
torch.nn.ConvTranspose3d(64, 1, kernel_size=4, stride=2, padding=1),
torch.nn.Sigmoid()
)
def forward(self, x):
x = x.view(-1, self.args.z_dim, 1, 1, 1)
#print(x.size()) # torch.Size([n, 200, 1, 1, 1])
x = self.layer1(x)
#print(x.size()) # torch.Size([n, 512, 4, 4, 4])
x = self.layer2(x)
#print(x.size()) # torch.Size([n, 256, 8, 8, 8])
x = self.layer3(x)
#print(x.size()) # torch.Size([n, 128, 16, 16, 16])
x = self.layer4(x)
#print(x.size()) # torch.Size([n, 64, 32, 32, 32])
x = self.layer5(x)
#print(x.size()) # torch.Size([n, 1, 64, 64, 64])
return x
class _D(nn.Module):
def __init__(self, args):
super(_D, self).__init__()
self.args = args
self.layer1 = torch.nn.Sequential(
torch.nn.Conv3d(1, 64, kernel_size=4, stride=2, padding=1),
torch.nn.BatchNorm3d(64),
torch.nn.LeakyReLU(self.args.leak_value, inplace=True)
)
self.layer2 = torch.nn.Sequential(
torch.nn.Conv3d(64, 128, kernel_size=4, stride=2, padding=1),
torch.nn.BatchNorm3d(128),
torch.nn.LeakyReLU(self.args.leak_value, inplace=True),
)
self.layer3 = torch.nn.Sequential(
torch.nn.Conv3d(128, 256, kernel_size=4, stride=2, padding=1),
torch.nn.BatchNorm3d(256),
torch.nn.LeakyReLU(self.args.leak_value, inplace=True)
)
self.layer4 = torch.nn.Sequential(
torch.nn.Conv3d(256, 512, kernel_size=4, stride=2, padding=1),
torch.nn.BatchNorm3d(512),
torch.nn.LeakyReLU(self.args.leak_value, inplace=True),
)
self.layer5 = torch.nn.Sequential(
torch.nn.Conv3d(512, 1, kernel_size=4, stride=1, padding=0)
)
def forward(self, x):
x = x.view(-1, 1, 64, 64, 64)
# print(x.size()) # torch.Size([ n, 1, 64, 64, 64])
x = self.layer1(x)
# print(x.size()) # torch.Size([ 64, 32, 32, 32])
x = self.layer2(x)
# print(x.size()) # torch.Size([ 128, 16, 16, 16])
x = self.layer3(x)
# print(x.size()) # torch.Size([ 256, 8, 8, 8])
x = self.layer4(x)
# print(x.size()) # torch.Size([ 512, 4, 4, 4])
x = self.layer5(x)
# print(x.size()) # torch.Size([ 1, 1, 1, 1])
x_after_sigmoid = F.sigmoid(x)
return x_after_sigmoid, x
class Flatten(nn.Module):
def forward(self, x):
n, _, _, _, _ = x.size()
return x.view(n, -1)
# custom weights initialization called on netG and netD
def initialize_weights(m):
if isinstance(m, nn.Linear) or isinstance(m, nn.ConvTranspose3d) or isinstance(m, nn.Conv3d):
# nn.init.xavier_uniform(m.weight.data)
nn.init.normal(m.weight.data, 0, 0.02) | {"/main.py": ["/train.py"], "/train.py": ["/model.py", "/ShapeNetDataset.py", "/utils.py"]} |
46,476 | AnthonyMrt/Poyosei | refs/heads/master | /admin.py | from django.contrib import admin
from django.forms import TextInput, Textarea
#from simple_history.admin import SimpleHistoryAdmin
from .models import *
# Register your models here.
class PlanteurHistoryAdmin(admin.ModelAdmin):
list_display = ('nom', 'prenom', 'pacage', 'controle')
history_list_display = ['controle']
date_hierarchy = 'date_creation'
search_fields = ['name', 'user__username']
formfield_overrides = {
models.CharField: {'widget': TextInput(attrs={'size':'20'})},
models.TextField: {'widget': Textarea(attrs={'rows':4, 'cols':40})},
}
class reconstitutionTonnageAdmin(admin.ModelAdmin):
model = reconstitutionTonnage
list_display = ('pacage', 'annee', 'reconstitution_tonnage', 'justification')
class CampagneAdmin(admin.ModelAdmin):
model = Campagne
list_display = ('pacage', 'annee', 'rid', 'rit')
extra = 1
class StatistiqueAdmin(admin.ModelAdmin):
model = Statistique
list_display = ('pacage', 'annee', 'surface_totale_utile', 'surface_totale_exploitation')
class MouvementAdmin(admin.ModelAdmin):
model = Mouvement
list_display = ('type_mouvement', 'pacage_cedant', 'pacage_repreneur', 'date_demande', 'mouvement_valide' )
class typeMouvementModelsAdmin(admin.ModelAdmin):
model = typeMouvementModel
list_display = ('type_mouvement', 'informations')
class RelationAdmin(admin.ModelAdmin):
model = Relation
list_display = ('id', 'pacageA')
class prodCommercialeAdmin(admin.ModelAdmin):
model = ProductionCommerciale
list_display = ('pacage', 'année', 'production_commerciale')
class typeMouvementAdmin(admin.ModelAdmin):
model = typeMouvement
admin.site.register(Planteur, PlanteurHistoryAdmin)
admin.site.register(Mouvement, MouvementAdmin)
admin.site.register(typeMouvementModel, typeMouvementModelsAdmin)
admin.site.register(Campagne, CampagneAdmin)
admin.site.register(reconstitutionTonnage, reconstitutionTonnageAdmin)
admin.site.register(Statistique, StatistiqueAdmin)
admin.site.register(Relation, RelationAdmin)
admin.site.register(ProductionCommerciale, prodCommercialeAdmin)
admin.site.register(typeMouvement, typeMouvementAdmin)
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,477 | AnthonyMrt/Poyosei | refs/heads/master | /urls.py | from django.conf.urls import url
from . import views
from poyosei.views import *
app_name = 'poyosei'
urlpatterns = [
# Static
url(r'^$', views.index, name='index'),
# Editer -> Modification
# Fiche -> Visualisation
# Planteur
# url(r'planteur/rid/(?P<pacage>[0-9]+)/$', views.planteurRID, name='planteurRID'),
url(r'planteur/ajouter/$', views.planteurAjouter, name='planteurAjouter'),
url(r'planteur/editer/(?P<pacage>[0-9]+)/$',
views.planteurEditer, name='planteurEditer'),
url(r'planteur/export/$', views.planteurExport, name='planteurExport'),
url(r'planteur/fiche/(?P<pacage>[0-9]+)/$',
views.planteurFiche, name='planteurFiche'),
url(r'planteur/historique/$',
views.planteurHistorique, name='planteurHistorique'),
url(r'planteur/liste/$', views.planteurListe, name='planteurListe'),
url(r'planteur/supprimer/(?P<pacage>[0-9]+)/$',
views.planteurSupprimer, name='planteurSupprimer'),
# Relation
url(r'relation/ajout/(?P<pacage>[0-9]+)/$',
views.planteurAjoutRelation, name='planteurAjoutRelation'),
# Mouvement
url(r'mouvement/ajouter/$',
views.mouvementAjouter, name='mouvementAjouter'),
url(r'mouvement/editer/(?P<pacage_cedant>[0-9]+)/(?P<pacage_repreneur>[0-9]+)/(?P<id>[0-9]+)/$',
views.mouvementEditer, name='mouvementEditer'),
url(r'mouvement/export/$',
views.mouvementExport, name='mouvementExport'),
url(r'mouvement/fiche/(?P<pacage_cedant>[0-9]+)/(?P<pacage_repreneur>[0-9]+)/(?P<id>[0-9]+)/$',
views.mouvementFiche, name="mouvementFiche"),
url(r'mouvement/liste/$',
views.mouvementListe, name='mouvementListe'),
url(r'mouvement/supprimer/(?P<pacage_cedant>[0-9]+)/(?P<pacage_repreneur>[0-9]+)/(?P<id>[0-9]+)/$',
views.mouvementSupprimer, name='mouvementSupprimer'),
url(r'mouvement/historique/$',
views.mouvementHistorique, name='mouvementHistorique'),
# Campagne
url(r'campagne/liste/$',
views.campagneListe, name='campagneListe'),
url(r'campagne/ajouter/$',
views.campagneAjouter, name='campagneAjouter'),
url(r'campagne/editer/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/$',
views.campagneEditer, name='campagneEditer'),
url(r'campagne/supprimer/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/(?P<id>[0-9]+)/$',
views.campagneSupprimer, name='campagneSupprimer'),
url(r'campagne/fiche/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/$',
views.campagneFiche, name="campagneFiche"),
# ProductionCommerciale
url(r'ProdCommerciale/liste/$',
views.prodCommercialeListe, name='prodCommercialeListe'),
url(r'ProdCommerciale/ajouter/$',
views.prodCommercialeAjouter, name='prodCommercialeAjouter'),
url(r'ProdCommerciale/editer/(?P<pacage>[0-9]+)/(?P<année>[0-9]+)/$',
views.prodCommercialeEditer, name='prodCommercialeEditer'),
url(r'ProdCommerciale/fiche/(?P<pacage>[0-9]+)/(?P<année>[0-9]+)/$',
views.prodCommercialeFiche, name='prodCommercialeFiche'),
url(r'ProdCommerciale/supprimer/(?P<pacage>[0-9]+)/(?P<année>[0-9]+)/$',
views.prodCommercialeSupprimer, name='prodCommercialeSupprimer'),
# Statistiques
url(r'^statistique/liste/$',
views.statsListe, name='statsListe'),
url(r'^statistique/ajouter/$',
views.statsAjouter, name='statsAjouter'),
url(r'statistique/editer/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/$',
views.statsEditer, name='statsEditer'),
url(r'statistique/fiche/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/$',
views.statsFiche, name='statsFiche'),
url(r'statistique/supprimer/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/$',
views.statsSupprimer, name='statsSupprimer'),
# Rapport
url(r'^rapport/$',
views.rapport, name='rapport'),
url(r'^rapport/editer/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/$',
views.statsEditer, name='statsEditer'),
url(r'^rapport/fiche/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/$',
views.statsFiche, name='statsFiche'),
url(r'rapoport/rapportCampagneEnCours/(?P<pacage>[0-9]+)/$',
views.campagneEnCoursExport, name='campagneEnCoursExport'),
url(r'rapport/rapportMouvementDuPlanteur/(?P<pacage>[0-9]+)/$',
views.mouvementExport2, name='mouvementExport2'),
url(r'my_template_name/(?P<pacage>[0-9]+)/$',
views.planteurExport2, name='planteurExport2'),
url(r'rapport/rapportODEADOM/$',
views.rapportODEADOM, name='rapportODEADOM'),
url(r'rapport/rapportAnnuelODEADOM/$',
views.rapportODEADOMAnneeEnCours, name='rapportODEADOMAnneeEnCours'),
# Opération
url(r'^operation/$',
views.operation, name='operation'),
# URL test ajax
# url(r'^ajax_query/$', views.ajax_query, name='ajax_query'),
# url(r'^ajax/planteur/$', views.searchPlanteur, name='searchPlanteur'),
# Créer nouveau Mouvement
url(r'^typeMouvement/ajouter/$',
views.AjouterTypeMouvement, name='AjouterTypeMouvement'),
# reconstitutionTonnage
url(r'^reconstitutionTonnage/liste/$',
views.tonnageListe, name='tonnageListe'),
url(r'^reconstitutionTonnage/ajouter/$',
views.tonnageAjouter, name='tonnageAjouter'),
url(r'reconstitutionTonnage/editer/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/$',
views.tonnageEditer, name='tonnageEditer'),
url(r'reconstitutionTonnage/fiche/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/$',
views.tonnageFiche, name='tonnageFiche'),
url(r'reconstitutionTonnage/supprimer/(?P<pacage>[0-9]+)/(?P<annee>[0-9]+)/$',
views.tonnageSupprimer, name='tonnageSupprimer'),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,478 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0050_auto_20180821_0525.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-21 09:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0049_auto_20180821_0525'),
]
operations = [
migrations.AlterField(
model_name='historicalmouvement',
name='type_mouvement',
field=models.CharField(choices=[("transfert total d'une exploitation", "transfert total d'une exploitation"), ('Transfert de référence rndividuelle avec cession partielle de foncier', 'Transfert de référence individuelle avec cession partielle de foncier'), ('Transfert de référence individuelle sans foncier', 'Transfert de référence individuelle sans foncier'), ('Reprise administrative', 'Reprise administrative'), ('Cession volontaire définitive', 'Cession volontaire définitive'), ('Cession volontaire temporaire', 'Cession volontaire temporaire'), ('Cessation d’activite sans repreneur', 'Cessation d’activite sans repreneur'), ('Attribution de Reference Individuelle par la reserve', 'Attribution de Reference Individuelle par la réserve'), ('Autre', 'Autre'), ('', '')], default='', max_length=100),
),
migrations.AlterField(
model_name='typemouvementmodel',
name='type_mouvement',
field=models.CharField(choices=[("transfert total d'une exploitation", "transfert total d'une exploitation"), ('Transfert de référence rndividuelle avec cession partielle de foncier', 'Transfert de référence individuelle avec cession partielle de foncier'), ('Transfert de référence individuelle sans foncier', 'Transfert de référence individuelle sans foncier'), ('Reprise administrative', 'Reprise administrative'), ('Cession volontaire définitive', 'Cession volontaire définitive'), ('Cession volontaire temporaire', 'Cession volontaire temporaire'), ('Cessation d’activite sans repreneur', 'Cessation d’activite sans repreneur'), ('Attribution de Reference Individuelle par la reserve', 'Attribution de Reference Individuelle par la réserve'), ('Autre', 'Autre'), ('', '')], default='', max_length=100),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,479 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0032_auto_20180814_1430.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-14 18:30
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0031_campagne_planteur'),
]
operations = [
migrations.RemoveField(
model_name='campagne',
name='planteur',
),
migrations.AlterField(
model_name='campagne',
name='rid',
field=models.DecimalField(decimal_places=0, default=Decimal('0.0'), max_digits=20),
),
migrations.AlterField(
model_name='campagne',
name='rit',
field=models.DecimalField(decimal_places=0, default=Decimal('0.0'), max_digits=20),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,480 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-05-31 13:56
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Campagne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pacage', models.CharField(max_length=9)),
('annee', models.CharField(max_length=4)),
('RIT', models.DecimalField(decimal_places=0, default=Decimal('0'), max_digits=9)),
('RID', models.DecimalField(decimal_places=0, default=Decimal('0'), max_digits=9)),
('production_commerciale_totale', models.DecimalField(decimal_places=0, default=Decimal('0'), max_digits=9)),
('commentaire', models.TextField()),
],
),
migrations.CreateModel(
name='Planteur',
fields=[
('pacage', models.CharField(help_text='Un nombre de 9 chiffres', max_length=9, primary_key=True, serialize=False, verbose_name='Numéro pacage')),
('civilite', models.CharField(blank=True, choices=[('Mr', 'Monsieur'), ('gérant', 'Mr le gérant'), ('gérante', 'Mme la gérante'), ('Mme', 'Madame'), ('Societe', 'Societe'), ('Autre', 'Autre'), ('', '')], default='', max_length=20, verbose_name='Civilité')),
('nom', models.CharField(help_text='200 caractères maximum.', max_length=200, verbose_name='Nom')),
('prenom', models.CharField(help_text='200 caractères maximum.', max_length=100, verbose_name='Prénom')),
('siret', models.CharField(blank=True, help_text="code Insee permettant l'identification d'un établissement ou d'une entreprise française.", max_length=14, null=True, verbose_name='SIRET')),
('LPG', models.CharField(blank=True, help_text='Identifiant LPG du planteur.', max_length=100, null=True, verbose_name='Numéro LPG')),
('contremarque', models.CharField(blank=True, max_length=100, null=True, verbose_name='Contremarque')),
('denomination', models.TextField(blank=True, help_text="Texte d'aide", max_length=100, null=True, verbose_name='Dénomination')),
('gerant', models.TextField(blank=True, help_text="Texte d'aide", null=True, verbose_name='Gérant')),
('adresse', models.CharField(blank=True, help_text='Adresse du planteur', max_length=255, null=True, verbose_name='Adresse')),
('adresse_complementaire', models.CharField(blank=True, max_length=255, null=True)),
('code_postal', models.CharField(blank=True, help_text='Code postal planteur', max_length=10, null=True, verbose_name='Code postal')),
('commune', models.CharField(blank=True, help_text='Commune du planteur', max_length=100, null=True, verbose_name='Commune')),
('telephone_principale', models.CharField(blank=True, help_text='Téléphone de contact', max_length=50, null=True, verbose_name='Numéro de téléphone')),
('telephone_secondaire', models.CharField(blank=True, help_text='téléphone de contact', max_length=50, null=True, verbose_name='Autre téléphone')),
('courriel', models.EmailField(blank=True, help_text='Adresse de courriel de contact.', max_length=254, null=True, verbose_name='Courriel')),
('dateNaissance', models.DateField(blank=True, help_text='Date de naissance du planteur', null=True, verbose_name='Date de naissance')),
('date_adhesion', models.DateField(blank=True, help_text='Date adhésion', null=True, verbose_name="Date d'adhésion à l'organisme de production")),
('numero_exemption_Diecte', models.CharField(blank=True, help_text="Numéro d'exemption DIECTE", max_length=50, null=True, verbose_name="Numéro d'exemption DIECTE")),
('date_fin_Diecte', models.DateField(blank=True, help_text="Date de fin d'exemption DIECTE", null=True, verbose_name="Date de fin d'exemption DIECTE")),
('entreprise_associé', models.TextField(blank=True, help_text='Autre entreprise associé', null=True, verbose_name='Entreprise associé par Actionnaire')),
('controle', models.BooleanField(default=False, help_text='Le planteur est-il exempte de contrôle ?', verbose_name='Exemption de contrôle')),
('date_cessation_Activite', models.DateField(blank=True, help_text="Date de cessation d'activité", null=True, verbose_name="Date de cessation d'activité")),
('commentaire', models.TextField(blank=True, help_text="Toute information utile à l'instruction", null=True, verbose_name='Commentaire')),
('date_creation', models.DateField(auto_now_add=True, null=True)),
],
),
migrations.AlterUniqueTogether(
name='campagne',
unique_together=set([('pacage', 'annee')]),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,481 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0037_auto_20180815_0354.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-15 07:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0036_auto_20180814_1732'),
]
operations = [
migrations.AlterField(
model_name='statistique',
name='pacage',
field=models.CharField(default='', max_length=9),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,482 | AnthonyMrt/Poyosei | refs/heads/master | /forms/formsMouvement.py | from django import forms
from django.contrib.admin import widgets
from django.forms import ModelChoiceField
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, Submit, HTML, Button, Row, Field
from crispy_forms.bootstrap import AppendedText, PrependedText, FormActions
from poyosei.models import *
from datetime import datetime
def year_choices():
return [(r,r) for r in range(datetime.now().year-4, datetime.now().year+5)]
class MouvementForm(forms.ModelForm):
class Meta:
model = Mouvement
fields = '__all__'
def __init__(self, *args, **kwargs):
super(MouvementForm, self).__init__(*args, **kwargs)
self.fields['type_mouvement'].widget.attrs\
.update({
'placeholder': 'Name',
'class': 'form-group'
})
self.fields['mouvement_valide'].widget.attrs\
.update({
'type': 'checkbox',
'class': 'custom-control-input'
})
self.fields['type_mouvement'] = forms.ModelChoiceField(queryset=typeMouvement.objects.all(), widget=forms.Select(attrs={"onChange": 'javascript: dynamicdropdown(this.options[this.selectedIndex].value);'}))
self.fields['date_demande'].widget = widgets.AdminDateWidget()
#self.fields['informations'].widget = forms.Select(attrs={"onChange": 'javascript: dynamicdropdown(this.options[this.selectedIndex].value);'})
self.fields['date_COSDA_Valide'].widget = widgets.AdminDateWidget()
self.fields['pacage_cedant'] = forms.ModelChoiceField(queryset=Planteur.objects.all())
self.fields['pacage_repreneur'] = forms.ModelChoiceField(queryset=Planteur.objects.all())
self.fields['année_concerne'] = forms.TypedChoiceField(coerce=int, choices=year_choices, initial=datetime.now().year)
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,483 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0016_auto_20180802_0851.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-02 12:51
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0015_auto_20180611_0840'),
]
operations = [
migrations.RenameField(
model_name='historicalplanteur',
old_name='contremarque',
new_name='contre_marque',
),
migrations.RenameField(
model_name='planteur',
old_name='contremarque',
new_name='contre_marque',
),
migrations.AddField(
model_name='historicalplanteur',
name='production_commerciale_totale',
field=models.DecimalField(decimal_places=0, default=Decimal('0.0'), max_digits=9),
),
migrations.AddField(
model_name='historicalplanteur',
name='référence_individuelle_définitive',
field=models.DecimalField(blank=True, decimal_places=0, default=Decimal('0'), max_digits=100, verbose_name='Référence individuelle défintive'),
),
migrations.AddField(
model_name='planteur',
name='production_commerciale_totale',
field=models.DecimalField(decimal_places=0, default=Decimal('0.0'), max_digits=9),
),
migrations.AddField(
model_name='planteur',
name='référence_individuelle_définitive',
field=models.DecimalField(blank=True, decimal_places=0, default=Decimal('0'), max_digits=100, verbose_name='Référence individuelle défintive'),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,484 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0038_auto_20180818_0134.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-18 05:34
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0037_auto_20180815_0354'),
]
operations = [
migrations.AddField(
model_name='historicalmouvement',
name='numéro',
field=models.CharField(blank=True, max_length=10),
),
migrations.AddField(
model_name='typemouvementmodel',
name='numéro',
field=models.CharField(blank=True, max_length=10),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,485 | AnthonyMrt/Poyosei | refs/heads/master | /views/viewsRapport.py | from django.shortcuts import render
# from django.views import View
# from poyosei.forms import *
# from django.views.decorators.csrf import csrf_exempt
# from poyosei.models import *
# from poyosei.ressources import PlanteurResource, mouvementResource
# from tablib import Dataset
# import json
# @csrf_exempt
def rapport(request):
return render(request, 'rapport/index.html', {"active_tab": "rapport"})
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,486 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0013_remove_mouvement_quantite_reference_individuelle_accorde2.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-06 13:23
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0012_auto_20180606_0922'),
]
operations = [
migrations.RemoveField(
model_name='mouvement',
name='quantite_reference_individuelle_accorde2',
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,487 | AnthonyMrt/Poyosei | refs/heads/master | /apps.py | from django.apps import AppConfig
from django.conf.urls import url
from django.contrib import admin
class PoyoseiConfig(AppConfig):
name = 'poyosei'
admin.site.site = '/poyosei' | {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,488 | AnthonyMrt/Poyosei | refs/heads/master | /forms/formsPlanteur.py | from django import forms
from django.contrib.admin import widgets
from django.forms import ModelChoiceField
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, Submit, HTML, Button, Row, Field
from crispy_forms.bootstrap import AppendedText, PrependedText, FormActions
from poyosei.models import *
class PlanteurForm(forms.ModelForm):
class Meta:
model = Planteur
fields = '__all__'
error_messages = {
'nom': {
'max_length': "Le nom écrit est trop long",
},
}
def __init__(self, *args, **kwargs):
super(PlanteurForm, self).__init__(*args, **kwargs)
self.fields['dateNaissance'].widget = widgets.AdminDateWidget()
self.fields['date_adhesion'].widget = widgets.AdminDateWidget()
self.fields['date_fin_Diecte'].widget = widgets.AdminDateWidget()
self.fields['date_cessation_Activite'].widget = widgets.AdminDateWidget()
class PacageForm(forms.Form):
planteurSearch = forms.CharField(label='planteurSearch', max_length=100, widget=forms.TextInput(attrs={'onkeyup': 'planteur_suggestion()', 'placeholder': 'planteur_datalist',}))
def clean_planteurSearch(self):
try:
planteurSearch = int(self.cleaned_data["planteurSearch"])
except:
planteurSearch = "inconnue"
if planteurSearch and Planteur.objects.filter(pacage__contains=planteurSearch).count():
return planteurSearch
else:
raise forms.ValidationError("Veuillez entrez un pacage valide")
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,489 | AnthonyMrt/Poyosei | refs/heads/master | /forms/__init__.py | from .formsPlanteur import *
from .formsMouvement import *
from .formsRelation import *
from .formsCampagne import *
from .formsProdCommerciale import *
from .formsStatistique import *
from .formsTypeMouvement import *
from .formsTonnage import * | {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,490 | AnthonyMrt/Poyosei | refs/heads/master | /forms/formsRelation.py | from django import forms
from django.contrib.admin import widgets
from django.forms import ModelChoiceField
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, Submit, HTML, Button, Row, Field
from crispy_forms.bootstrap import AppendedText, PrependedText, FormActions
from poyosei.models import *
class RelationForm(forms.ModelForm):
class Meta:
model = Relation
fields = '__all__'
def clean(self):
pacageA = self.cleaned_data['pacageA']
filterA = Planteur.objects.filter(pacage=pacageA)
if not filterA:
raise forms.ValidationError('Veuillez entrer un pacages valide')
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,491 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0006_planteurback.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-05-31 15:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0005_reconstitutiontonnage'),
]
operations = [
migrations.CreateModel(
name='PlanteurBack',
fields=[
('planteur_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='poyosei.Planteur')),
],
bases=('poyosei.planteur',),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,492 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0042_ajoutmouvement.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-19 06:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0041_delete_ajoutmouvement'),
]
operations = [
migrations.CreateModel(
name='ajoutMouvement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Nom_mouvement', models.CharField(max_length=100)),
('informations', models.TextField()),
],
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,493 | AnthonyMrt/Poyosei | refs/heads/master | /views/viewsMouvement.py | from django.shortcuts import get_object_or_404, render, redirect, HttpResponseRedirect, HttpResponse
from django.views import View
from poyosei.forms import *
# from django.views.decorators.csrf import csrf_exempt
from poyosei.models import *
from poyosei.ressources import PlanteurResource, mouvementResource
from tablib import Dataset
import json
# @csrf_exempt
def mouvementListe(request):
mouvements = Mouvement.objects.all().filter(année_concerne__gte=Campagne.CampagneEnCours())
#active_tab = 'mouvement'
return render(request, 'mouvement/liste.html', {'active_tab': 'mouvement', 'mouvements': mouvements})
def mouvementAjouter(request):
mouvements = Mouvement.objects.all()
mouv = typeMouvement.objects.get(Nom_mouvement='Transfert de référence individuelle sans foncier')
phrase = mouv.Nom_mouvement
form = MouvementForm()
if request.method == 'POST':
form = MouvementForm(request.POST)
if form.is_valid():
form.save()
return redirect('poyosei:mouvementListe')
else:
form = MouvementForm()
return render(request, 'mouvement/ajouter.html', {'form': form, 'active_tab': 'mouvement', 'active_tabM': 'ajout', 'phrase':phrase, 'mouvements': mouvements})
def mouvementEditer(request, pacage_cedant, pacage_repreneur, id):
mouvements = Mouvement.objects.all()
instance = get_object_or_404(Mouvement, pacage_cedant=pacage_cedant, pacage_repreneur=pacage_repreneur, id=id)
form = MouvementForm(request.POST or None, instance=instance)
if form.is_valid():
mouvement = form.save(commit=False)
mouvement.save()
return render(request, 'mouvement/editer.html', {'mouvements':mouvements, 'form':form, 'instance':instance })
def mouvementSupprimer(request, pacage_cedant, pacage_repreneur, id):
mouvements = Mouvement.objects.all()
query = get_object_or_404(Mouvement, pacage_cedant=pacage_cedant, pacage_repreneur=pacage_repreneur, id=id)
query.delete()
return redirect('poyosei:mouvementListe')
def mouvementFiche(request, pacage_cedant, pacage_repreneur, id):
mouvements = Mouvement.objects.all()
instance = get_object_or_404(Mouvement, pacage_cedant=pacage_cedant, pacage_repreneur=pacage_repreneur, id=id)
form = MouvementForm(instance=instance)
if form.is_valid():
mouvement = form.save(commit=False)
mouvement.save()
return render(request, 'mouvement/fiche.html', {'form':form, 'mouvements':mouvements, 'instance':instance})
def mouvementHistorique(request):
historique = Mouvement.history.all()
return render (request, 'mouvement/historique.html', {'historique':historique} )
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,494 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0053_delete_rule.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-03-30 21:39
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0052_auto_20180828_1023'),
]
operations = [
migrations.DeleteModel(
name='Rule',
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,495 | AnthonyMrt/Poyosei | refs/heads/master | /views/viewsTonnage.py | # from django.contrib import messages
# from django.contrib.humanize.templatetags.humanize import intcomma
# from django.db.models import Q
# from django.conf import settings
from django.shortcuts import get_object_or_404, render, redirect
# from django.views import View
# from django.views.generic import ListView
from poyosei.forms import *
from poyosei.models import *
# from tablib import Dataset
# import json
# @csrf_exempt
def tonnageListe(request):
tonnages = reconstitutionTonnage.objects.all()
return render(request, 'reconstitutionTonnage/liste.html', {'active_tab': 'tonnage', 'tonnages': tonnages})
def tonnageAjouter(request):
tonnages = reconstitutionTonnage.objects.all()
if request.method == 'POST':
form = TonnageForm(request.POST)
if form.is_valid():
form.save()
return redirect('poyosei:tonnageListe')
else:
form = TonnageForm()
return render(request, 'reconstitutionTonnage/ajouter.html', {'active_tab': 'tonnage', 'active_tabP': 'ajout', 'form': form, 'tonnages': tonnages})
def tonnageEditer(request, pacage, annee):
tonnages = reconstitutionTonnage.objects.all()
instance = get_object_or_404(reconstitutionTonnage, pacage=pacage, annee=annee)
form = TonnageForm(request.POST or None, instance=instance)
if form.is_valid():
form = form.save(commit=False)
form.save()
return redirect('poyosei:tonnageListe')
else:
form = TonnageForm(request.POST or None, instance=instance)
return render(request, 'reconstitutionTonnage/editer.html', {'active_tab': 'tonnage', 'form': form, 'instance':instance, 'tonnages': tonnages})
def tonnageFiche(request, pacage, annee):
tonnages = reconstitutionTonnage.objects.all()
instance = get_object_or_404(reconstitutionTonnage, pacage=pacage, annee=annee)
form = TonnageForm(request.POST or None, instance=instance)
return render(request, 'reconstitutionTonnage/fiche.html', {'active_tab': 'tonnage', 'form': form, 'instance':instance, 'tonnages': tonnages})
def tonnageSupprimer(request, pacage, annee):
tonnages = reconstitutionTonnage.objects.all()
instance = get_object_or_404(reconstitutionTonnage, pacage=pacage, annee=annee)
instance.delete()
return redirect('poyosei:tonnageListe')
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,496 | AnthonyMrt/Poyosei | refs/heads/master | /views/viewsHistorique.py | from django.shortcuts import get_object_or_404, render, redirect, HttpResponseRedirect, HttpResponse
from django.views import View
from poyosei.forms import *
# from django.views.decorators.csrf import csrf_exempt
from poyosei.models import *
#from poyosei.ressources import PlanteurResource, mouvementResource
from django.views.generic import ListView
class PlanteurListView(ListView):
model = Planteur
def get_queryset(self):
history = Planteur.history.all()
return history
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,497 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0040_auto_20180819_0205.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-19 06:05
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0039_auto_20180818_0144'),
]
operations = [
migrations.RenameModel(
old_name='newMouvement',
new_name='ajoutMouvement',
),
migrations.AlterUniqueTogether(
name='reconstitutiontonnage',
unique_together=set([('pacage', 'annee')]),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,498 | AnthonyMrt/Poyosei | refs/heads/master | /views/__init__.py | from .viewsCampagne import *
from .viewsExport import *
from .viewsMouvement import *
from .viewsOperation import *
from .viewsPlanteur import *
from .viewsRapport import *
from .viewsRelation import *
from .viewsStatic import *
from .viewsHistorique import *
from .viewsProdCommerciale import *
from .viewsStatistique import *
from .viewsTypeMouvement import *
from .viewsTonnage import * | {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,499 | AnthonyMrt/Poyosei | refs/heads/master | /views/viewsProdCommerciale.py | from django.shortcuts import get_object_or_404, render, redirect
# from django.views import View
from poyosei.forms import *
# from django.views.decorators.csrf import csrf_exempt
from poyosei.models import *
# from poyosei.ressources import PlanteurResource, mouvementResource
# from tablib import Dataset
# import json
# @csrf_exempt
def prodCommercialeListe(request):
prodCommerciale = ProductionCommerciale.objects.all()
return render(request, 'prodCommerciale/liste.html', {'active_tab': 'prodCommerciale', 'prodCommerciale':prodCommerciale})
def prodCommercialeAjouter(request):
prodCommerciale = ProductionCommerciale.objects.all()
form = ProdCommercialeForm()
if request.method == 'POST':
form = ProdCommercialeForm(request.POST)
if form.is_valid():
form.save()
return redirect('poyosei:prodCommercialeListe')
else:
form = ProdCommercialeForm()
return render(request, 'prodCommerciale/ajouter.html', {'form':form, 'active_tab':'prodCommerciale', 'active_tabM': 'ajout', 'prodCommerciale':prodCommerciale })
def prodCommercialeEditer(request, pacage, année):
prodCommerciale = ProductionCommerciale.objects.all()
instance = get_object_or_404(ProductionCommerciale, pacage=pacage, année=année)
form = ProdCommercialeForm(request.POST or None, instance=instance)
if form.is_valid():
form = form.save(commit=False)
form.save()
return redirect('poyosei:prodCommercialeListe')
else:
form = ProdCommercialeForm(request.POST or None, instance=instance)
return render(request, 'prodCommerciale/editer.html', {'prodCommerciale':prodCommerciale, 'instance':instance , 'form':form })
def prodCommercialeFiche(request, pacage, année):
prodCommerciale = ProductionCommerciale.objects.all()
instance = get_object_or_404(ProductionCommerciale, pacage=pacage, année=année)
form = ProdCommercialeForm(request.POST or None, instance=instance)
return render(request, 'prodCommerciale/fiche.html', {'prodCommerciale':prodCommerciale, 'instance':instance , 'form':form })
def prodCommercialeSupprimer(request, pacage, année):
prodCommerciale = ProductionCommerciale.objects.all()
instance = get_objects_or_404(ProductionCommerciale, pacage=pacage, année=année)
instance.delete()
return redirect('poyosei:prodCommercialeListe')
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,500 | AnthonyMrt/Poyosei | refs/heads/master | /forms/formsCampagne.py | from django import forms
from django.contrib.admin import widgets
from django.forms import ModelChoiceField
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, Submit, HTML, Button, Row, Field
from crispy_forms.bootstrap import AppendedText, PrependedText, FormActions
from poyosei.models import *
class CampagneForm(forms.ModelForm):
class Meta:
model = Campagne
fields = '__all__'
def __init__(self, *args, **kwargs):
super(CampagneForm, self).__init__(*args, **kwargs)
self.fields['pacage'] = forms.ModelChoiceField(queryset=Planteur.objects.all())
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,501 | AnthonyMrt/Poyosei | refs/heads/master | /models/modelsTonnage.py | from django.db import models
from decimal import Decimal
# from project import settings
# import datetime
class reconstitutionTonnage(models.Model):
pacage = models.CharField(max_length=9)
annee = models.CharField(max_length=4)
reconstitution_tonnage = models.DecimalField(max_digits=9, decimal_places=0, default=Decimal('0'))
justification = models.TextField()
class Meta:
unique_together = ('pacage', 'annee',)
def __str__(self):
return self.pacage
def tonnageExport(self, annee):
"""Fonction qui récupère les noms des champs de la table planteur"""
Model = reconstitutionTonnage
line = Model.objects.get(pacage=self.pacage, annee=annee)
headers = []
for field in Model._meta.get_fields():
headers.append(field.name)
row = []
for field in headers:
if field in headers:
if not Model._meta.get_field(field).is_relation:
val = getattr(line, field)
row.append(str(val))
return row
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,502 | AnthonyMrt/Poyosei | refs/heads/master | /views/viewsStatistique.py | # from django.views.decorators.csrf import csrf_exempt
# from datetime import datetime
from django.shortcuts import get_object_or_404, render, redirect
# from django.views import View
from poyosei.forms import *
from poyosei.models import *
# from poyosei.ressources import PlanteurResource, mouvementResource
# from tablib import Dataset
# import json
def statsListe(request):
statistiques = Statistique.objects.all()
return render(request, 'statistique/liste.html', {'active_tab': 'statistique', 'statistiques':statistiques})
def statsAjouter(request):
statistiques = Statistique.objects.all()
form = StatistiqueForm()
if request.method == 'POST':
form = StatistiqueForm(request.POST)
if form.is_valid():
form.save()
return redirect('poyosei:statsListe')
else:
form = StatistiqueForm()
return render(request, 'statistique/ajouter.html', {'form':form, 'active_tab':'statistique', 'active_tabM': 'ajout', 'statistiques':statistiques })
def statsEditer(request, pacage, annee):
statistiques = Statistique.objects.all()
instance = get_object_or_404(Statistique, pacage=pacage, annee=annee)
form = StatistiqueForm(request.POST or None, instance=instance)
if form.is_valid():
form = form.save(commit=False)
form.save()
return redirect('poyosei:statsListe')
else:
form = StatistiqueForm(request.POST or None, instance=instance)
return render(request, 'statistique/editer.html', {'statistiques':statistiques, 'instance':instance, 'form':form })
def statsFiche(request, pacage, annee):
statistiques = Statistique.objects.all()
instance = get_object_or_404(Statistique, pacage=pacage, annee=annee)
form = StatistiqueForm(request.POST or None, instance=instance)
return render(request, 'statistique/fiche.html', {'statistiques':statistiques, 'instance':instance, 'form':form })
def statsSupprimer(request, pacage, annee):
statistiques = Statistique.objects.all()
instance = get_object_or_404(Statistique, pacage=pacage, annee=annee)
instance.delete()
return redirect('poyosei:statsListe')
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,503 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0029_auto_20180814_1146.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-14 15:46
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0028_auto_20180814_1109'),
]
operations = [
migrations.AddField(
model_name='historicalplanteur',
name='campagne',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='poyosei.Campagne'),
),
migrations.AddField(
model_name='planteur',
name='campagne',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='poyosei.Campagne'),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,504 | AnthonyMrt/Poyosei | refs/heads/master | /models/modelsRelation.py | from django.db import models
# from decimal import Decimal
# from project import settings
from .modelsPlanteur import *
# import datetime
class Relation(models.Model):
pacageA = models.CharField(max_length=9, default="")
planteurs = models.ManyToManyField(Planteur)
def __str__(self):
return "%s" % (self.pacageA)
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,505 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0004_statistique.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-05-31 14:47
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0003_relation'),
]
operations = [
migrations.CreateModel(
name='statistique',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pacage', models.DecimalField(decimal_places=0, default=Decimal('0'), max_digits=9)),
('annee', models.CharField(max_length=4)),
('surface_bannane', models.DecimalField(decimal_places=2, default=Decimal('0.00'), max_digits=10)),
('surface_jachere', models.DecimalField(decimal_places=2, default=Decimal('0.00'), max_digits=10)),
('surface_autre', models.DecimalField(decimal_places=2, default=Decimal('0.00'), max_digits=9)),
('surface_totale_utile', models.DecimalField(decimal_places=2, default=Decimal('0.00'), max_digits=9)),
('surface_totale_exploitation', models.DecimalField(decimal_places=2, default=Decimal('0.00'), max_digits=9)),
('rendement', models.DecimalField(decimal_places=0, default=Decimal('0'), max_digits=9)),
('prodution_exporte', models.DecimalField(decimal_places=0, default=Decimal('0'), max_digits=9)),
('production_locale', models.DecimalField(decimal_places=0, default=Decimal('0'), max_digits=9)),
('information_diverse', models.TextField()),
('surface_propriete', models.DecimalField(decimal_places=2, default=Decimal('0.00'), max_digits=10)),
('surface_location', models.DecimalField(decimal_places=2, default=Decimal('0.00'), max_digits=10)),
],
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,506 | AnthonyMrt/Poyosei | refs/heads/master | /models/modelsCampagne.py | from django.db import models
from decimal import Decimal
from django.utils import timezone
from django.forms import DateTimeField
from datetime import datetime
from project import settings
from poyosei.models import *
class Campagne(models.Model):
pacage = models.CharField(max_length=9)
annee = models.CharField(max_length=4)
rit = models.DecimalField(
max_digits=20, decimal_places=0, default=Decimal('0.0'))
rid = models.DecimalField(
max_digits=20, decimal_places=0, default=Decimal('0.0'))
production_commerciale_totale = models.DecimalField(
max_digits=9, decimal_places=0, default=Decimal('0.0'))
commentaire = models.TextField(blank=True)
ri_Total = models.DecimalField(
max_digits=20, decimal_places=0, default=Decimal('0.0'))
class Meta:
app_label = 'poyosei'
unique_together = ('pacage', 'annee',)
def __str__(self):
return self.pacage
def campagneExport(self, annee):
Model = Campagne
line = Model.objects.get(pacage=self.pacage, annee=annee)
headers = []
for field in Model._meta.get_fields():
headers.append(field.name)
row = []
for field in headers:
if field in headers:
if not Model._meta.get_field(field).is_relation:
val = getattr(line, field)
# if callable(val):
# val = val()
row.append(str(val))
return row
@property
def TerminerCampagne(self):
"""Fonction qui sert à cloturer la campagne en Cours"""
reserve = Planteur.objects.get(pacage='000000000')
annee = reserve.annee
newAnnee = annee + 1
planteurs = Planteur.objects.all()
for p in planteurs:
if p.pacage != '000000000':
ridP = p.ridAnneeEnCours
ritP = p.ritAnneeEnCours
Campagne.objects.create(
pacage=p.pacage, annee=newAnnee, rid=ridP, rit=ritP)
else:
taxe = p.taxeReserve
total = float(p.ridAnneeEnCours) + taxe
riTemp = p.ritAnneeEnCours
Campagne.objects.create(
pacage=p.pacage, annee=newAnnee, rid=total, rit=riTemp)
def CampagneEnCours(pacageSearch='00000000'):
"""Fonction qui récupère l'année de la campagne en Cours"""
query = Campagne.objects.filter(pacage=pacageSearch)
p = Campagne.objects.order_by('annee').last()
annee = p.annee
return int(annee)
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,507 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0015_auto_20180611_0840.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-11 12:40
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0014_historicalmouvement_historicalplanteur'),
]
operations = [
migrations.CreateModel(
name='ProductionCommerciale',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pacage', models.CharField(max_length=9)),
('année', models.CharField(max_length=4)),
('production_commerciale', models.DecimalField(decimal_places=0, default=Decimal('0'), max_digits=9)),
],
),
migrations.AlterUniqueTogether(
name='productioncommerciale',
unique_together=set([('pacage', 'année')]),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,508 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0007_auto_20180531_1344.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-05-31 17:44
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0006_planteurback'),
]
operations = [
migrations.RenameField(
model_name='campagne',
old_name='RID',
new_name='rid',
),
migrations.RenameField(
model_name='campagne',
old_name='RIT',
new_name='rit',
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,509 | AnthonyMrt/Poyosei | refs/heads/master | /views/viewsTypeMouvement.py | from django.shortcuts import get_object_or_404, render, redirect, HttpResponseRedirect, HttpResponse
from django.views import View
from poyosei.forms import *
# from django.views.decorators.csrf import csrf_exempt
from poyosei.models import *
from poyosei.ressources import PlanteurResource, mouvementResource
from tablib import Dataset
import json
# @csrf_exempt
def typeMouvementListe(request):
typeMouv = typeMouvement.objects.all()
return render(request, 'typeMouvement/liste.html', {'active_tab': 'typeMouvement', 'typeMouv': typeMouv} )
def AjouterTypeMouvement(request):
#Revoir le nom de la methode
form = TypeMouvementForm()
if request.method == 'POST':
form = TypeMouvementForm(request.POST)
if form.is_valid():
form.save()
return redirect('poyosei:mouvementListe')
else:
form = TypeMouvementForm()
return render(request, 'typeMouvement/ajouter.html', {'form':form, })
def typeMouvementEditer(request, Nom_mouvement):
#Revoir le nom de la methode
instance = get_object_or_404(TypeMouvement, Nom_mouvement=Nom_mouvement)
form = TypeMouvementForm(request.POST or None, instance=instance)
if form.is_valid():
form = form.save(commit=False)
form.save()
return redirect('poyosei:typeMouvementListe')
else:
form = TypeMouvementForm(request.POST or None, instance=instance)
return render(request, 'typeMouvement/editer.html', {'form':form, 'active_tab':'typeMouvement', 'instance':instance})
def typeMouvementFiche(request, Nom_mouvement):
instance = get_object_or_404(TypeMouvement, Nom_mouvement=Nom_mouvement)
form = TypeMouvementForm(request.POST or None, instance=instance)
return render(request, 'typeMouvement/fiche.html', {'form':form, 'active_tab':'typeMouvement', 'instance':instance})
def typeMouvementSupprimer(request, Nom_mouvement):
instance = get_object_or_404(TypeMouvement, Nom_mouvement=Nom_mouvement)
instance.delete()
return redirect('poyosei:typeMouvementListe')
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,510 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0023_auto_20180807_1412.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-07 18:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0022_auto_20180807_1412'),
]
operations = [
migrations.RenameField(
model_name='statistique',
old_name='prodution_exporte',
new_name='production_exporte',
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,511 | AnthonyMrt/Poyosei | refs/heads/master | /models/modelsMouvement.py | from django.db import models
from decimal import Decimal
from django.utils import timezone
from django.forms import DateTimeField
from project import settings
from datetime import datetime
from poyosei.models import *
from django.core.validators import MaxValueValidator, MinValueValidator
from simple_history.models import HistoricalRecords
typeMouvement_CHOICE = (
('transfert total d\'une exploitation', 'transfert total d\'une exploitation'),
('Transfert de référence rndividuelle avec cession partielle de foncier',
'Transfert de référence individuelle avec cession partielle de foncier'),
('Transfert de référence individuelle sans foncier',
'Transfert de référence individuelle sans foncier'),
('Reprise administrative', 'Reprise administrative'),
('Cession volontaire définitive', 'Cession volontaire définitive'),
('Cession volontaire temporaire', 'Cession volontaire temporaire'),
('Cessation d’activite sans repreneur', 'Cessation d’activite sans repreneur'),
('Attribution de Reference Individuelle par la reserve',
'Attribution de Reference Individuelle par la réserve'),
('Autre', 'Autre'),
('', ''),
)
class typeMouvementModel(models.Model):
type_mouvement = models.CharField(
max_length=100, choices=typeMouvement_CHOICE, default='')
informations = models.TextField(blank=True)
def __str__(self):
return str(self.id)
typeDeReferenceIndividuelle_CHOICE = (
('définitive', 'définitive'),
('temporaire', 'temporaire'),
('autre', 'autre'),
('', '')
)
class Mouvement(typeMouvementModel):
pacage_cedant = models.CharField(max_length=10)
pacage_repreneur = models.CharField(max_length=10)
année_concerne = models.CharField(max_length=4, blank=True)
date_demande = models.DateField(null=True, blank=True)
mouvement_valide = models.BooleanField(default=False, blank=True)
date_COSDA_Valide = models.DateField(null=True, blank=True)
type_reference_individuelle_modifie = models.CharField(
max_length=100, default='', choices=typeDeReferenceIndividuelle_CHOICE)
quantite_reference_individuelle_demande = models.DecimalField(
max_digits=10, decimal_places=0, default=Decimal('0'), blank=True, null=True)
quantite_reference_individuelle_accorde = models.DecimalField(
max_digits=10, decimal_places=0, default=Decimal('0'), blank=True)
date_creation = models.DateTimeField(
auto_now_add=True, null=True, blank=True)
taxe = models.FloatField(default=0.0, blank=True)
history = HistoricalRecords()
def __str__(self):
return str(self.id)
def mouvementExport(self):
Model = Mouvement
line = Model.objects.get(
pacage_cedant=self.pacage_cedant, pacage_repreneur=self.pacage_repreneur)
headers = []
for field in Model._meta.get_fields():
headers.append(field.name)
row = []
for field in headers:
if field in headers:
if not Model._meta.get_field(field).is_relation:
val = getattr(line, field)
row.append(str(val))
return row
# Transfert de Référence Individuelle sans foncier
@property
def ridCedant(self):
"""Fonction qui effectue le calcul la nouvelle référence individuelle du pacage cédant lors d'un mouvement."""
ceder = - self.quantite_reference_individuelle_accorde
if self.type_mouvement == 'Reprise administrative':
c = Campagne.objects.order_by('annee').last()
Cannee = int(c.annee)
cR = Campagne.objects.get(pacage=self.pacage_cedant, annee=Cannee)
produc_commer = float(cR.production_commerciale_totale)
print(produc_commer)
print(self.quantite_reference_individuelle_accorde)
ceder = - \
float(self.quantite_reference_individuelle_accorde) * \
0.80 + produc_commer
return ceder
@property
def ridRepreneur(self):
"""Fonction qui effectue le calcul de la nouvelle référence"""
taxer = float(
self.quantite_reference_individuelle_accorde) * self.taxe/100
repris = float(self.quantite_reference_individuelle_accorde) - taxer
if self.type_mouvement == 'Reprise administrative':
c = Campagne.objects.order_by('annee').last()
Cannee = int(c.annee)
cR = Campagne.objects.get(pacage=self.pacage_cedant, annee=Cannee)
produc_commer = float(cR.production_commerciale_totale)
# print(produc_commer)
repris = float(
self.quantite_reference_individuelle_accorde) * 0.80 - produc_commer
return repris
@property
def ridReserve(self):
retenue = float(
self.quantite_reference_individuelle_accorde) * self.taxe/100
return retenue
@property
def ritCedant(self):
donner = 0
return donner
@property
def ritRepreneur(self):
obtenue = 0
return obtenue
@property
def ritReserve(self):
taxe = 0
return taxe
# Transfert de Référence individuelle avec cession partielle de foncier:
# def cederFoncier(self):
# query = Campagne.objects.order_by('annee').last()
# annee1 = query.annee - 1
# annee2 = query.annee
# mvt1 = Mouvement.objects.filter(type_mouvement='Attribution de Reference Individuelle par la reserve', pacage_repreneur=self.pacage_repreneur, année_concerne=annee1)
# mvt2 = Mouvement.objects.filter(type_mouvement='Attribution de Reference Individuelle par la reserve', pacage_repreneur=self.pacage_repreneur, année_concerne=annee2)
# mvt3 = Mouvement.objects.filter(type_mouvement='Attribution de Reference Individuelle par la reserve', pacage_repeneur=self.pacage_repreneur, année_cocerne=datetime.now().year)
# mvt4 = Mouvement.objects.filter(pacage_repeneur=self.pacage_repreneur, année_cocerne=datetime.now().year)
# cpg = Campagne.objects.get(pacage=self.pacage_repreneur, annee=query.annee)
# quantite1 = mvt1.count()
# quantite2 = mvt2.count()
# quantite3 = mvt3.count()
# quantite4 = mvt4.count()
# total = quantite1 + quantite2 + quantite3
# if total > 1 or quantite4 > 2 :
# return False
# else:
# ceder = - self.quantite_reference_individuelle_accorde
# return ceder
# def obtenueFoncier(self):
# query = Campagne.objects.order_by('annee').last()
# annee1 = query.annee - 1
# annee2 = query.annee
# mvt1 = Mouvement.objects.filter(pacage_repreneur=self.pacage_repreneur, année_concerne=annee1)
# mvt2 = Mouvement.objects.filter(pacage_repreneur=self.pacage_repreneur, année_concerne=annee2)
# mvt3 = Mouvement.objects.filter(pacage_repeneur=self.pacage_repreneur, année_cocerne=datetime.now().year)
# quantite1 = mvt1.count()
# quantite2 = mvt2.count()
# quantite3 = mvt3.count()
# total = quantite1 + quantite2 + quantite3
# if total > 1:
# return False
# else:
# obtenue = self.quantite_reference_individuelle_accorde
# return obtenue
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,512 | AnthonyMrt/Poyosei | refs/heads/master | /models/__init__.py | from .modelsCampagne import *
from .modelsMouvement import *
from .modelsPlanteur import *
from .modelsRelation import *
from .modelsStatistique import *
from .modelsTonnage import *
from .modelsProductionCommerciale import *
from .modelsTypeMouvement import * | {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,513 | AnthonyMrt/Poyosei | refs/heads/master | /views/viewsOperation.py | from django.conf import settings
from django.db.models import Sum
from django.shortcuts import render
from poyosei.models import *
from django.views.decorators.csrf import csrf_exempt
@csrf_exempt
def operation(request):
continuer = 'init'
mouvements = ""
planteurs = Planteur.objects.all()
for p in planteurs:
query = Campagne.objects.filter(pacage=p.pacage)
camp = Campagne.objects.order_by('annee').last()
annee = camp.annee
total = Mouvement.objects.all().filter(année_concerne=annee).aggregate(Sum('quantite_reference_individuelle_accorde'))['quantite_reference_individuelle_accorde__sum'] or 0.00
totalMouv = Mouvement.objects.all().filter(année_concerne=annee)
mouvement = Mouvement.objects.filter(mouvement_valide=False)
nbrM = mouvement.count
keyword = request.POST.get("order", "")
annee = Campagne.CampagneEnCours()
newAnnee = annee + 1
if request.method == 'POST':
for t in totalMouv:
taxin = t.ridReserve
for p in planteurs:
if p.pacage == '000000000':
total = float(p.ridAnneeEnCours) + taxin
riTemp = p.ritAnneeEnCours
Campagne.objects.create(pacage=p.pacage, annee=newAnnee, rid=p.taxeReserve, rit=riTemp, ri_Total=p.riTotale)
else :
ridP = p.ridAnneeP
ritP = p.ritAnneeP
Campagne.objects.create(pacage=p.pacage, annee=newAnnee, rid=ridP, rit=ritP, ri_Total=p.riTotale)
return render(request, 'operation/index.html', { 'continuer':continuer, 'annee':annee, 'mouvements':mouvements, 'mouvement':mouvement, "active_tab": "operation", 'total':total, 'totalMouv':totalMouv, 'nbrM':nbrM})
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,514 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0043_auto_20180819_0908.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-19 13:08
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0042_ajoutmouvement'),
]
operations = [
migrations.AddField(
model_name='campagne',
name='ri_Total',
field=models.DecimalField(decimal_places=0, default=Decimal('0.0'), max_digits=20),
),
migrations.AlterField(
model_name='ajoutmouvement',
name='informations',
field=models.TextField(blank=True),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,515 | AnthonyMrt/Poyosei | refs/heads/master | /models/modelsTypeMouvement.py | from django.db import models
# from decimal import Decimal
# from django.utils import timezone
# from django.forms import DateTimeField
# from project import settings
# from django.db.models import Q
# from django.db.models.signals import post_save
from poyosei.models import *
# from django.shortcuts import get_object_or_404
# from datetime import datetime
# from itertools import chain
# from simple_history.models import HistoricalRecords
class typeMouvement(models.Model):
Nom_mouvement = models.TextField(max_length=100)
informations = models.TextField(blank=True)
def __str__(self):
return str(self.Nom_mouvement) | {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
46,516 | AnthonyMrt/Poyosei | refs/heads/master | /migrations/0020_auto_20180806_1509.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-08-06 19:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poyosei', '0019_auto_20180802_1341'),
]
operations = [
migrations.AlterModelOptions(
name='planteur',
options={'ordering': ['pacage']},
),
migrations.AlterField(
model_name='statistique',
name='pacage',
field=models.CharField(max_length=9),
),
]
| {"/admin.py": ["/models/__init__.py"], "/forms/__init__.py": ["/forms/formsPlanteur.py", "/forms/formsMouvement.py", "/forms/formsRelation.py", "/forms/formsCampagne.py", "/forms/formsProdCommerciale.py", "/forms/formsTypeMouvement.py"], "/views/__init__.py": ["/views/viewsCampagne.py", "/views/viewsExport.py", "/views/viewsMouvement.py", "/views/viewsOperation.py", "/views/viewsPlanteur.py", "/views/viewsRapport.py", "/views/viewsRelation.py", "/views/viewsStatic.py", "/views/viewsHistorique.py", "/views/viewsProdCommerciale.py", "/views/viewsStatistique.py", "/views/viewsTypeMouvement.py", "/views/viewsTonnage.py"], "/models/modelsRelation.py": ["/models/modelsPlanteur.py"], "/models/__init__.py": ["/models/modelsCampagne.py", "/models/modelsMouvement.py", "/models/modelsPlanteur.py", "/models/modelsRelation.py", "/models/modelsStatistique.py", "/models/modelsTonnage.py", "/models/modelsProductionCommerciale.py", "/models/modelsTypeMouvement.py"], "/ressources.py": ["/models/__init__.py"], "/test.old/__init__.py": ["/test.old/testMouvement.py", "/test.old/testPlanteur.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.