index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
60,544 | drrros/frtest | refs/heads/master | /polls/urls.py | from django.urls import path
from polls.views.admin import PollCreateAPIView, QuestionCreateAPIView, PollChangeAPIView, QuestionChangeAPIView, \
PollListAdminAPIView, QuestionChoiceCreateAPIView, QuestionChoiceChangeAPIView
from polls.views.client import ActivePollAPIView, AnswerAPIView, GetAnswerAPIView
urlpatterns = [
path('poll_list/', ActivePollAPIView.as_view(), name='active_polls_list_api'),
path('answer/', AnswerAPIView.as_view(), name='answer_api'),
path('get_answers/<int:userid>', GetAnswerAPIView.as_view(), name='get_answers_api'),
path('admin/create_poll/', PollCreateAPIView.as_view(), name='poll_create_api'),
path('admin/change_poll/<int:pk>', PollChangeAPIView.as_view(), name='poll_change_api'),
path('admin/create_question/', QuestionCreateAPIView.as_view(), name='question_create_api'),
path('admin/create_question_choice/', QuestionChoiceCreateAPIView.as_view(), name='question_choice_create_api'),
path('admin/change_question/<int:pk>', QuestionChangeAPIView.as_view(), name='question_change_api'),
path('admin/change_question_choice/<int:pk>', QuestionChoiceChangeAPIView.as_view(), name='question_choice_change_api'),
path('admin/poll_list/', PollListAdminAPIView.as_view(), name='poll_list_admin_api'),
]
| {"/polls/serializers.py": ["/polls/models.py"], "/polls/views/admin.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/urls.py": ["/polls/views/admin.py", "/polls/views/client.py"], "/polls/tests.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/admin.py": ["/polls/models.py"], "/polls/views/client.py": ["/polls/models.py", "/polls/serializers.py"]} |
60,545 | drrros/frtest | refs/heads/master | /polls/tests.py | from django.contrib.auth.models import User
from django.urls import reverse
from django.utils import timezone
from rest_framework import status
from rest_framework.test import APITestCase
from .models import Poll, Answer, Question, QuestionChoice
from .serializers import PollSerializer
class PollTest(APITestCase):
""" Test module for test polls API """
def setUp(self):
User.objects.create_superuser(username='admin', email='admin@drros.ru', password='123AdminPassword')
poll = Poll.objects.create(
title='Poll',
description='Poll desc',
)
question = Question.objects.create(
text='Question',
type='text'
)
QuestionChoice.objects.create(
text='QC',
question=question
)
QuestionChoice.objects.create(
text='QC2',
question=question
)
poll.questions.add(question)
def testTasks(self):
user = User.objects.get(username='admin')
url = reverse('poll_list_admin_api')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.client.force_authenticate(user=user)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
poll = Poll.objects.filter(id=1)
serialized_poll = PollSerializer(poll, many=True)
self.assertEqual(response.data, serialized_poll.data)
url = reverse('question_change_api', kwargs={'pk': 1})
response = self.client.patch(url, data={'text': 'asdqwe'})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(poll.first().questions.first().text, 'asdqwe')
url = reverse('poll_change_api', kwargs={'pk': 1})
response = self.client.put(url, data={
'date_start': timezone.now(),
'title': 'Poll',
'description': 'Poll desc',
})
self.assertEqual(response.status_code, status.HTTP_200_OK)
url = reverse('question_change_api', kwargs={'pk': 1})
response = self.client.patch(url, data={'text': 'text'})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.client.logout()
self.assertEqual(Answer.objects.count(), 0)
url = reverse('answer_api')
data = {
'question': 1,
'userid': 123,
'value': 'asd'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Answer.objects.count(), 1)
self.assertEqual(Answer.objects.get().userid, 123)
self.assertEqual(Answer.objects.get().value, 'asd')
url = reverse('get_answers_api', kwargs={'userid': 123})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data[0]['question'], 1)
self.assertEqual(response.data[0]['userid'], 123)
self.assertEqual(response.data[0]['value'], 'asd')
| {"/polls/serializers.py": ["/polls/models.py"], "/polls/views/admin.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/urls.py": ["/polls/views/admin.py", "/polls/views/client.py"], "/polls/tests.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/admin.py": ["/polls/models.py"], "/polls/views/client.py": ["/polls/models.py", "/polls/serializers.py"]} |
60,546 | drrros/frtest | refs/heads/master | /polls/admin.py | from django.contrib import admin
# Register your models here.
from polls.models import Question, Poll, QuestionChoice, Answer
admin.site.register(Question)
admin.site.register(Poll)
admin.site.register(Answer)
admin.site.register(QuestionChoice)
| {"/polls/serializers.py": ["/polls/models.py"], "/polls/views/admin.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/urls.py": ["/polls/views/admin.py", "/polls/views/client.py"], "/polls/tests.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/admin.py": ["/polls/models.py"], "/polls/views/client.py": ["/polls/models.py", "/polls/serializers.py"]} |
60,547 | drrros/frtest | refs/heads/master | /polls/migrations/0001_initial.py | # Generated by Django 2.2.10 on 2021-03-25 21:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=200, verbose_name='Текст вопроса')),
('type', models.CharField(choices=[('text', 'Ответ текстом'), ('choice', 'Ответ выбором одного варианта'), ('multiple_choice', 'Ответ со многими вариантами')], max_length=50, verbose_name='Тип вопроса')),
],
),
migrations.CreateModel(
name='QuestionChoice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=200, verbose_name='Вариант ответа')),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Question', verbose_name='Вопрос')),
],
),
migrations.CreateModel(
name='Poll',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200, verbose_name='Название опроса')),
('description', models.CharField(max_length=200, verbose_name='Описание опроса')),
('date_start', models.DateTimeField(blank=True, null=True, verbose_name='Дата начала проведения')),
('date_end', models.DateTimeField(blank=True, null=True, verbose_name='Дата окончания проведения')),
('locked', models.BooleanField(default=False, verbose_name='Изменение вопросов заблокировано')),
('questions', models.ManyToManyField(blank=True, related_name='polls', to='polls.Question')),
],
),
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('userid', models.PositiveIntegerField(verbose_name='ID пользователя')),
('value', models.TextField()),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Question', verbose_name='Вопрос')),
],
),
]
| {"/polls/serializers.py": ["/polls/models.py"], "/polls/views/admin.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/urls.py": ["/polls/views/admin.py", "/polls/views/client.py"], "/polls/tests.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/admin.py": ["/polls/models.py"], "/polls/views/client.py": ["/polls/models.py", "/polls/serializers.py"]} |
60,548 | drrros/frtest | refs/heads/master | /polls/views/client.py | from django.utils import timezone
from rest_framework import generics
from polls.models import Poll, Answer
from polls.serializers import PollSerializer, AnswerSerializer
class ActivePollAPIView(generics.ListAPIView):
serializer_class = PollSerializer
def get_queryset(self):
return Poll.objects.filter(
date_start__lt=timezone.now(),
date_end__gt=timezone.now()
)
class AnswerAPIView(generics.CreateAPIView):
serializer_class = AnswerSerializer
class GetAnswerAPIView(generics.ListAPIView):
serializer_class = AnswerSerializer
def get_queryset(self):
return Answer.objects.filter(userid=self.kwargs['userid'])
| {"/polls/serializers.py": ["/polls/models.py"], "/polls/views/admin.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/urls.py": ["/polls/views/admin.py", "/polls/views/client.py"], "/polls/tests.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/admin.py": ["/polls/models.py"], "/polls/views/client.py": ["/polls/models.py", "/polls/serializers.py"]} |
60,549 | drrros/frtest | refs/heads/master | /polls/models.py | from django.db import models
# Create your models here.
class Question(models.Model):
TYPE_CHOICES = (
('text', 'Ответ текстом'),
('choice', 'Ответ выбором одного варианта'),
('multiple_choice', 'Ответ со многими вариантами'),
)
text = models.CharField(
verbose_name='Текст вопроса',
max_length=200
)
type = models.CharField(
verbose_name='Тип вопроса',
max_length=50,
choices=TYPE_CHOICES,
)
def __str__(self):
return f'{self.text}'
class Poll(models.Model):
title = models.CharField(
verbose_name='Название опроса',
max_length=200,
)
description = models.CharField(
verbose_name='Описание опроса',
max_length=200,
)
date_start = models.DateTimeField(
verbose_name='Дата начала проведения',
blank=True,
null=True,
)
date_end = models.DateTimeField(
verbose_name='Дата окончания проведения',
blank=True,
null=True,
)
locked = models.BooleanField(
verbose_name='Изменение вопросов заблокировано',
default=False
)
questions = models.ManyToManyField(
Question,
related_name='polls',
blank=True,
)
def __str__(self):
return f'{self.title}'
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if self.date_start:
self.locked = True
super().save(force_insert, force_update, using, update_fields)
class Answer(models.Model):
question = models.ForeignKey(
Question,
verbose_name='Вопрос',
on_delete=models.CASCADE
)
userid = models.PositiveIntegerField(
verbose_name='ID пользователя'
)
value = models.TextField()
def __str__(self):
return f'{self.userid} - {self.value}'
class QuestionChoice(models.Model):
text = models.CharField(
verbose_name='Вариант ответа',
max_length=200
)
question = models.ForeignKey(
Question,
verbose_name='Вопрос',
on_delete=models.CASCADE
)
def __str__(self):
return f'{self.question} - {self.text}'
| {"/polls/serializers.py": ["/polls/models.py"], "/polls/views/admin.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/urls.py": ["/polls/views/admin.py", "/polls/views/client.py"], "/polls/tests.py": ["/polls/models.py", "/polls/serializers.py"], "/polls/admin.py": ["/polls/models.py"], "/polls/views/client.py": ["/polls/models.py", "/polls/serializers.py"]} |
60,555 | fuzeman/stash.py | refs/heads/master | /stash/caches/c_memory.py | from stash.caches.core.base import Cache
from stash.lib import six as six
class MemoryCache(Cache):
__key__ = 'memory'
def __init__(self, initial=None):
super(MemoryCache, self).__init__()
self.data = initial or {}
def iteritems(self):
return self.data.iteritems()
def items(self):
return self.data.items()
def __delitem__(self, key):
del self.data[key]
def __getitem__(self, key):
return self.data[key]
def __iter__(self):
return iter(self.data)
def __len__(self):
return len(self.data)
def __setitem__(self, key, value):
self.data[key] = value
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,556 | fuzeman/stash.py | refs/heads/master | /tests/archives/sqlite_tests.py | from stash import Stash, SqliteArchive
from stash.lib.six.moves import xrange
import pytest
def fetch(cursor, query):
result = cursor.execute(query)
return dict(list(result))
def test_construct():
st = Stash(SqliteArchive(':memory:', 'one'))
assert type(st.archive) is SqliteArchive
assert st.archive.table == 'one'
with pytest.raises(TypeError):
# Missing 'table' parameter
st = Stash('sqlite:///')
st = Stash('sqlite:///:memory:?table=one')
assert type(st.archive) is SqliteArchive
assert st.archive.table == 'one'
def test_set():
st = Stash('sqlite:///:memory:?table=stash', 'lru:///?capacity=10')
for x in xrange(5):
st[x] = str(x)
st.save()
# Ensure DB contains correct data
cursor = st.archive.db.cursor()
data = fetch(cursor, 'select key, value from "%s"' % st.archive.table)
for x in xrange(5):
assert data[x] == str(x)
def test_get():
st = Stash('sqlite:///:memory:?table=stash', 'lru:///?capacity=10')
# Fill database with test data
cursor = st.archive.db.cursor()
for x in xrange(5):
cursor.execute('insert into "%s" (key, value) values (?, ?)' % st.archive.table, (x, str(x)))
# Ensure DB contains correct data
for x in xrange(5):
assert st[x] == str(x)
# Ensure `KeyError` is raised on missing items
with pytest.raises(KeyError):
assert st[10]
def test_delete():
st = Stash('sqlite:///:memory:?table=stash', 'lru:///?capacity=10')
# Fill database with test data
cursor = st.archive.db.cursor()
for x in xrange(5):
cursor.execute('insert into "%s" (key, value) values (?, ?)' % st.archive.table, (x, str(x)))
# Ensure DB contains correct data
for x in xrange(5):
assert st[x] == str(x)
# Delete items
del st[2]
assert st.get(2) is None
del st[4]
assert st.get(4) is None
# Ensure `KeyError` is raised on missing items
with pytest.raises(KeyError):
assert st[10]
# Ensure DB contains correct data
data = fetch(cursor, 'select key, value from "%s"' % st.archive.table)
assert data == {
0: '0',
1: '1',
3: '3'
}
def test_len():
st = Stash('sqlite:///:memory:?table=stash', 'lru:///?capacity=10')
# Fill database with test data
cursor = st.archive.db.cursor()
for x in xrange(7):
cursor.execute('insert into "%s" (key, value) values (?, ?)' % st.archive.table, (x, str(x)))
# Ensure length matches
assert len(st.archive) == 7
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,557 | fuzeman/stash.py | refs/heads/master | /stash/caches/__init__.py | from stash.caches.c_memory import MemoryCache
__all__ = ['MemoryCache']
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,558 | fuzeman/stash.py | refs/heads/master | /stash/archives/core/base.py | from stash.core.modules.base import MappingModule
import collections
class Archive(MappingModule):
__group__ = 'archive'
@property
def serializer(self):
return self.stash.serializer
def dumps(self, value):
return self.serializer.dumps(value)
def loads(self, value):
return self.serializer.loads(value)
def save(self):
raise NotImplementedError
def delete(self, keys):
if not keys:
return
if not isinstance(keys, collections.Iterable):
keys = [keys]
for key in keys:
del self[key]
def get_items(self, keys=None):
if keys is None:
return self.iteritems()
return [(key, self[key]) for key in keys]
def set_items(self, items):
for key, value in items:
self[key] = value
def __delitem__(self, key):
raise NotImplementedError
def __getitem__(self, key):
raise NotImplementedError
def __iter__(self):
raise NotImplementedError
def __len__(self):
raise NotImplementedError
def __setitem__(self, key, value):
raise NotImplementedError
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,559 | fuzeman/stash.py | refs/heads/master | /tests/archives/apsw_tests.py | from stash import Stash, ApswArchive
from stash.lib.six.moves import xrange
from stash.lib import six
import pytest
def fetch(cursor, query):
result = cursor.execute(query)
return dict(list(result))
def test_construct():
st = Stash(ApswArchive(':memory:', 'one'))
assert type(st.archive) is ApswArchive
assert st.archive.table == 'one'
with pytest.raises(TypeError):
# Missing 'table' parameter
st = Stash('apsw:///')
st = Stash('apsw:///:memory:?table=one')
assert type(st.archive) is ApswArchive
assert st.archive.table == 'one'
def test_set():
st = Stash('apsw:///:memory:?table=stash', 'lru:///?capacity=10')
for x in xrange(5):
st[x] = str(x)
st.save()
# Ensure DB contains correct data
cursor = st.archive.db.cursor()
data = fetch(cursor, 'select key, value from "%s"' % st.archive.table)
for x in xrange(5):
assert str(data[x]) == str(x)
def test_set_unicode():
st = Stash('apsw:///:memory:?table=stash', 'lru:///?capacity=10')
values = [six.u('\xae'), six.u('\xaf'), six.u('\xb0')]
for x in xrange(len(values)):
st[x] = values[x]
st.save()
# Ensure DB contains correct data
cursor = st.archive.db.cursor()
data = fetch(cursor, 'select key, value from "%s"' % st.archive.table)
for x in xrange(len(values)):
if six.PY3:
assert ord(six.text_type(data[x])) == ord(values[x])
else:
assert ord(str(data[x]).decode('unicode_internal')) == ord(values[x])
def test_set_utf8():
st = Stash('apsw:///:memory:?table=stash', 'lru:///?capacity=10')
values = ['\xc2\xae', '\xc2\xaf', '\xc2\xb0']
for x in xrange(len(values)):
st[x] = values[x]
st.save()
# Ensure DB contains correct data
cursor = st.archive.db.cursor()
data = fetch(cursor, 'select key, value from "%s"' % st.archive.table)
for x in xrange(len(values)):
assert str(data[x]) == values[x]
def test_get():
st = Stash('apsw:///:memory:?table=stash', 'lru:///?capacity=10')
# Fill database with test data
cursor = st.archive.db.cursor()
for x in xrange(5):
cursor.execute('insert into "%s" (key, value) values (?, ?)' % st.archive.table, (x, str(x)))
# Ensure DB contains correct data
for x in xrange(5):
assert st[x] == str(x)
# Ensure `KeyError` is raised on missing items
with pytest.raises(KeyError):
assert st[10]
def test_delete():
st = Stash('apsw:///:memory:?table=stash', 'lru:///?capacity=10')
# Fill database with test data
cursor = st.archive.db.cursor()
for x in xrange(5):
cursor.execute('insert into "%s" (key, value) values (?, ?)' % st.archive.table, (x, str(x)))
# Ensure DB contains correct data
for x in xrange(5):
assert st[x] == str(x)
# Delete items
del st[2]
assert st.get(2) is None
del st[4]
assert st.get(4) is None
# Ensure `KeyError` is raised on missing items
with pytest.raises(KeyError):
assert st[10]
# Ensure DB contains correct data
data = fetch(cursor, 'select key, value from "%s"' % st.archive.table)
assert data == {
0: '0',
1: '1',
3: '3'
}
def test_len():
st = Stash('apsw:///:memory:?table=stash', 'lru:///?capacity=10')
# Fill database with test data
cursor = st.archive.db.cursor()
for x in xrange(7):
cursor.execute('insert into "%s" (key, value) values (?, ?)' % st.archive.table, (x, str(x)))
# Ensure length matches
assert len(st.archive) == 7
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,560 | fuzeman/stash.py | refs/heads/master | /stash/archives/a_apsw.py | from stash.archives.core.base import Archive
from stash.lib import six as six
from collections import Mapping
from contextlib import closing
import collections
try:
import apsw
except ImportError:
apsw = None
if six.PY3:
def buffer(value):
return value
class ApswArchive(Archive):
__key__ = 'apsw'
def __init__(self, db, table):
super(ApswArchive, self).__init__()
if apsw is None:
raise Exception('Unable to construct apsw:// - "apsw" module is not available')
self.db = apsw.Connection(db) if type(db) is str else db
self.table = table
# Ensure table exists
with closing(self.db.cursor()) as c:
c.execute('create table if not exists "%s" (key PRIMARY KEY, value BLOB)' % self.table)
def items(self):
return list(self.iteritems())
def iteritems(self):
rows = self.select('select key, value from "%s"' % self.table)
for key, value in rows:
yield self.key_decode(key), self.loads(value)
def iterkeys(self):
rows = self.select('select key from "%s"' % self.table)
for key, in rows:
yield self.key_decode(key)
def itervalues(self):
rows = self.select('select value from "%s"' % self.table)
for value, in rows:
yield self.loads(value)
def keys(self):
return list(self.iterkeys())
def save(self):
pass
def select(self, sql, parameters=None):
if parameters is None:
parameters = ()
with closing(self.db.cursor()) as c:
return list(c.execute(sql, parameters))
def select_one(self, sql, parameters=None):
rows = self.select(sql, parameters)
if not rows:
return None
return rows[0]
def delete(self, keys):
if not keys:
return
if not isinstance(keys, collections.Iterable):
keys = [keys]
# Start transaction
with self.db:
# Create cursor
with closing(self.db.cursor()) as c:
# Delete `keys`
c.executemany(self._query_delete(), [
self.key_encode(key)
for key in keys
])
def get_items(self, keys=None):
if keys:
# encode keys
keys = [
self.key_encode(key)
for key in keys
]
rows = self.select('select key,value from "%s" where key in ?' % self.table, (keys,))
else:
rows = self.select('select key,value from "%s"' % self.table)
for key, value in rows:
yield self.key_decode(key), self.loads(value)
def set_items(self, items):
# Start transaction
with self.db:
# Create cursor
with closing(self.db.cursor()) as c:
# Insert `items`
c.executemany(self._query_upsert(), [
(self.key_encode(key), buffer(self.dumps(value)))
for key, value in items
])
def update(self, *args, **kwds):
if args:
other = args[0]
if isinstance(other, Mapping):
if six.PY3:
self.set_items(other.items())
else:
self.set_items(other.iteritems())
elif hasattr(other, "keys"):
self.set_items([
(key, other[key])
for key in other.keys()
])
else:
self.set_items([
(key, value)
for key, value in other
])
if six.PY3:
self.set_items(kwds.items())
else:
self.set_items(kwds.iteritems())
def __delitem__(self, key):
key = self.key_encode(key)
with closing(self.db.cursor()) as c:
result = c.execute('delete from "%s" where key=?' % self.table, (key, ))
rows = list(result)
success = len(rows) > 0
if not success:
raise KeyError(key)
def __getitem__(self, key):
key = self.key_encode(key)
row = self.select_one('select value from "%s" where key=?' % self.table, (key, ))
if not row:
raise KeyError(key)
return self.loads(row[0])
def __iter__(self):
return self.iterkeys()
def __len__(self):
row = self.select_one('select count(*) from "%s"' % self.table)
if not row:
return None
return row[0]
def __setitem__(self, key, value):
key = self.key_encode(key)
value = self.dumps(value)
with closing(self.db.cursor()) as c:
c.execute('update "%s" set value=? WHERE key=?' % self.table, (buffer(value), key))
c.execute('insert or ignore into "%s" values(?,?)' % self.table, (key, buffer(value)))
def _query_delete(self):
return 'delete from "%s" where key=?' % self.table
def _query_upsert(self):
return 'insert or replace into "%s" values(?,?)' % self.table
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,561 | fuzeman/stash.py | refs/heads/master | /setup.py | from stash import __version__
from setuptools import setup, find_packages
setup(
name='stash.py',
version=__version__,
license='MIT',
url='https://github.com/fuzeman/stash.py',
author='Dean Gardiner',
author_email='me@dgardiner.net',
description='Dictionary-style storage interface with a modular interface for algorithms, archives, caches and serializers',
packages=find_packages(exclude=[
'examples',
'tests',
'tests.*'
]),
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database :: Front-Ends'
],
)
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,562 | fuzeman/stash.py | refs/heads/master | /tests/serializers/jsonpickle_tests.py | from stash import JsonPickleSerializer
def test_basic():
serializer = JsonPickleSerializer()
assert serializer.dumps(1234) == '1234'
assert serializer.dumps('1234') == '"1234"'
def test_loads():
serializer = JsonPickleSerializer()
assert serializer.loads('1234') == 1234
assert serializer.loads('"1234"') == '1234'
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,563 | fuzeman/stash.py | refs/heads/master | /stash/archives/a_sqlite.py | from stash.archives.core.base import Archive
from contextlib import closing
try:
import sqlite3
except ImportError:
sqlite3 = None
class SqliteArchive(Archive):
__key__ = 'sqlite'
def __init__(self, db, table):
super(SqliteArchive, self).__init__()
if sqlite3 is None:
raise Exception('Unable to construct sqlite:// - "sqlite3" module is not available')
self.db = sqlite3.connect(db) if type(db) is str else db
self.table = table
# Ensure table exists
with closing(self.db.cursor()) as c:
c.execute('create table if not exists "%s" (key PRIMARY KEY, value)' % self.table)
self.db.commit()
def iterkeys(self):
rows = self.select('select key from "%s"' % self.table)
for row in rows:
yield self.key_decode(row[0])
def keys(self):
return list(self.iterkeys())
def save(self):
pass
def select(self, sql, parameters=None):
if parameters is None:
parameters = ()
with closing(self.db.cursor()) as c:
return list(c.execute(sql, parameters))
def select_one(self, sql, parameters=None):
rows = self.select(sql, parameters)
if not rows:
return None
return rows[0]
def __delitem__(self, key):
key = self.key_encode(key)
with closing(self.db.cursor()) as c:
result = c.execute('delete from "%s" where key=?' % self.table, (key, ))
success = result.rowcount > 0
self.db.commit()
if not success:
raise KeyError(key)
def __getitem__(self, key):
key = self.key_encode(key)
row = self.select_one('select value from "%s" where key=?' % self.table, (key, ))
if not row:
raise KeyError(key)
return self.loads(row[0])
def __iter__(self):
return self.iterkeys()
def __len__(self):
row = self.select_one('select count(*) from "%s"' % self.table)
if not row:
return None
return row[0]
def __setitem__(self, key, value):
key = self.key_encode(key)
value = self.dumps(value)
with closing(self.db.cursor()) as c:
c.execute('update "%s" set value=? WHERE key=?' % self.table, (value, key))
c.execute('insert or ignore into "%s" values(?,?)' % self.table, (key, value))
self.db.commit()
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,564 | fuzeman/stash.py | refs/heads/master | /stash/algorithms/core/prime_context.py | from stash.lib.six.moves import _thread
class PrimeContext(object):
def __init__(self, algorithm=None, buffer=None):
self._algorithm = algorithm
self._buffer = buffer
@property
def buffer(self):
return self._buffer
def __enter__(self):
if self._algorithm is None:
return
self._algorithm._buffers[_thread.get_ident()] = self._buffer
def __exit__(self, exc_type, exc_val, exc_tb):
if self._algorithm is None:
return
try:
del self._algorithm._buffers[_thread.get_ident()]
except KeyError:
pass
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,565 | fuzeman/stash.py | refs/heads/master | /stash/serializers/__init__.py | from stash.serializers.s_jsonpickle import JsonPickleSerializer
from stash.serializers.s_msgpack import MessagePackSerializer
from stash.serializers.s_none import NoneSerializer
from stash.serializers.s_pickle import PickleSerializer
__all__ = [
'JsonPickleSerializer',
'MessagePackSerializer',
'NoneSerializer',
'PickleSerializer'
]
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,566 | fuzeman/stash.py | refs/heads/master | /stash/core/modules/base.py | from stash.core.modules.manager import ModuleManager
from stash.lib import six
from abc import ABCMeta
from collections import MutableMapping
class ModuleMeta(type):
def __init__(cls, *args, **kwargs):
if not cls.__module__.endswith('.base'):
ModuleManager.register(cls)
super(ModuleMeta, cls).__init__(*args, **kwargs)
class Module(six.with_metaclass(ModuleMeta)):
__group__ = None
__key__ = None
def __init__(self):
self.stash = None
@property
def key_encode(self):
encode, _ = self.stash.key_transform
return encode
@property
def key_decode(self):
_, decode = self.stash.key_transform
return decode
class MappingMeta(ModuleMeta, ABCMeta):
pass
class MappingModule(six.with_metaclass(MappingMeta, Module, MutableMapping)):
pass
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,567 | fuzeman/stash.py | refs/heads/master | /stash/core/modules/manager.py | from stash.lib import six
from stash.lib.six.moves.urllib import parse as urlparse
import inspect
import logging
log = logging.getLogger(__name__)
class ModuleManager(object):
modules = {}
@classmethod
def construct(cls, stash, group, value):
if isinstance(value, six.string_types):
obj = cls.from_uri(group, value)
elif inspect.isclass(value):
obj = value()
else:
obj = value
if obj is None:
return None
obj.stash = stash
return obj
@classmethod
def from_uri(cls, group, uri):
if group not in cls.modules:
return None
# Retrieve scheme from URI
scheme = cls.get_scheme(uri)
if not scheme:
return None
# Ensure scheme is registered
cls.register_scheme(scheme)
# Parse URI
result = urlparse.urlparse(uri)
key = result.scheme
if key not in cls.modules[group]:
return None
module = cls.modules[group][key]
# Parse `path`
args = []
path = result.path.lstrip('/')
if path:
args.append(result.path.lstrip('/'))
# Parse `query`
kwargs = dict(urlparse.parse_qsl(result.query))
# Construct module
return module(*args, **kwargs)
@classmethod
def get_scheme(cls, uri):
pos = uri.find('://')
if pos < 0:
return None
return uri[:pos]
@classmethod
def register(cls, module):
group = module.__group__
key = module.__key__
if not group or not key:
log.warn('Unable to register: %r - missing a "__group__" or "__key__" attribute', module)
return
if group not in cls.modules:
cls.modules[group] = {}
if key in cls.modules[group]:
log.warn('Unable to register: %r - already registered', module)
return
cls.modules[group][key] = module
@classmethod
def register_scheme(cls, scheme):
for method in filter(lambda s: s.startswith('uses_'), dir(urlparse)):
schemes = getattr(urlparse, method)
if scheme in schemes:
continue
schemes.append(scheme)
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,568 | fuzeman/stash.py | refs/heads/master | /stash/algorithms/__init__.py | from stash.algorithms.lru import LruAlgorithm
__all__ = ['LruAlgorithm']
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,569 | fuzeman/stash.py | refs/heads/master | /stash/serializers/s_jsonpickle.py | from stash.serializers.core.base import Serializer
try:
import jsonpickle
except ImportError:
jsonpickle = None
class JsonPickleSerializer(Serializer):
__key__ = 'jsonpickle'
def dumps(self, value):
if jsonpickle is None:
raise Exception('"jsonpickle" library is not available')
return jsonpickle.encode(value)
def loads(self, value):
if jsonpickle is None:
raise Exception('"jsonpickle" library is not available')
return jsonpickle.decode(value)
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,570 | fuzeman/stash.py | refs/heads/master | /tests/serializers/pickle_tests.py | from stash import PickleSerializer
from stash.lib import six
#
# Pickle v0
#
def test_0_basic():
serializer = PickleSerializer(protocol=0)
assert serializer.loads(serializer.dumps(1234)) == 1234
assert serializer.loads(serializer.dumps('1234')) == '1234'
def test_0_unicode():
serializer = PickleSerializer(protocol=0)
assert serializer.loads(serializer.dumps(six.u('\xee'))) == six.u('\xee')
assert serializer.loads(serializer.dumps(six.u('\xae'))) == six.u('\xae')
def test_0_utf8():
serializer = PickleSerializer(protocol=0)
assert serializer.loads(serializer.dumps('\xc3\xae')) == '\xc3\xae'
assert serializer.loads(serializer.dumps('\xc2\xae')) == '\xc2\xae'
def test_0_escape():
serializer = PickleSerializer(protocol=0)
assert serializer.loads(serializer.dumps('\\use')) == '\\use'
#
# Pickle v1
#
def test_1_basic():
serializer = PickleSerializer(protocol=1)
assert serializer.loads(serializer.dumps(1234)) == 1234
assert serializer.loads(serializer.dumps('1234')) == '1234'
def test_1_unicode():
serializer = PickleSerializer(protocol=1)
assert serializer.loads(serializer.dumps(six.u('\xee'))) == six.u('\xee')
assert serializer.loads(serializer.dumps(six.u('\xae'))) == six.u('\xae')
def test_1_utf8():
serializer = PickleSerializer(protocol=1)
assert serializer.loads(serializer.dumps('\xc3\xae')) == '\xc3\xae'
assert serializer.loads(serializer.dumps('\xc2\xae')) == '\xc2\xae'
def test_1_escape():
serializer = PickleSerializer(protocol=1)
assert serializer.loads(serializer.dumps('\\use')) == '\\use'
#
# Pickle v2
#
def test_2_basic():
serializer = PickleSerializer(protocol=2)
assert serializer.loads(serializer.dumps(1234)) == 1234
assert serializer.loads(serializer.dumps('1234')) == '1234'
def test_2_unicode():
serializer = PickleSerializer(protocol=2)
assert serializer.loads(serializer.dumps(six.u('\xee'))) == six.u('\xee')
assert serializer.loads(serializer.dumps(six.u('\xae'))) == six.u('\xae')
def test_2_utf8():
serializer = PickleSerializer(protocol=2)
assert serializer.loads(serializer.dumps('\xc3\xae')) == '\xc3\xae'
assert serializer.loads(serializer.dumps('\xc2\xae')) == '\xc2\xae'
def test_2_escape():
serializer = PickleSerializer(protocol=2)
assert serializer.loads(serializer.dumps('\\use')) == '\\use'
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,571 | fuzeman/stash.py | refs/heads/master | /stash/serializers/s_msgpack.py | from stash.serializers.core.base import Serializer
import stash.lib.six as six
try:
import msgpack
except ImportError:
msgpack = None
class MessagePackSerializer(Serializer):
__key__ = 'msgpack'
def dumps(self, value):
if msgpack is None:
raise Exception('"msgpack" library is not available')
# Dump object
value = msgpack.dumps(value)
value = six.text_type(value, 'raw_unicode_escape')
# Return UTF-8 string
return value.encode('utf-8')
def loads(self, value):
if msgpack is None:
raise Exception('"msgpack" library is not available')
# Convert `buffer` -> UTF-8 string
value = str(value).decode('utf-8')
value = value.encode('raw_unicode_escape')
# Return decoded object
return msgpack.loads(value)
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,572 | fuzeman/stash.py | refs/heads/master | /stash/core/helpers.py | def to_integer(value, default=None):
try:
return int(value)
except:
return default
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,573 | fuzeman/stash.py | refs/heads/master | /stash/archives/__init__.py | from stash.archives.a_apsw import ApswArchive
from stash.archives.a_memory import MemoryArchive
from stash.archives.a_sqlite import SqliteArchive
__all__ = ['ApswArchive', 'MemoryArchive', 'SqliteArchive']
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,574 | fuzeman/stash.py | refs/heads/master | /stash/__init__.py | from stash.algorithms import *
from stash.archives import *
from stash.caches import *
from stash.serializers import *
from stash.main import Stash
__version__ = '1.2.0'
__all__ = [
'Stash',
'LruAlgorithm',
'ApswArchive', 'MemoryArchive', 'SqliteArchive',
'MemoryCache',
'JsonPickleSerializer', 'NoneSerializer', 'PickleSerializer'
]
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,575 | fuzeman/stash.py | refs/heads/master | /stash/algorithms/core/base.py | from stash.core.modules.base import Module
import collections
class Algorithm(Module):
__group__ = 'algorithm'
@property
def archive(self):
return self.stash.archive
@property
def cache(self):
return self.stash.cache
def compact(self, force=False):
raise NotImplementedError
def delete(self, keys):
if not keys:
return
if not isinstance(keys, collections.Iterable):
keys = [keys]
for key in keys:
try:
# Delete `key` from `archive`
del self.archive[key]
except KeyError:
pass
try:
# Delete `key` from `cache`
del self.cache[key]
except KeyError:
pass
def prime(self, keys=None, force=False):
raise NotImplementedError
def __delitem__(self, key):
success = False
try:
# Delete `key` from `archive`
del self.archive[key]
success = True
except KeyError:
pass
try:
# Delete `key` from `cache`
del self.cache[key]
success = True
except KeyError:
pass
if not success:
# Couldn't find `key` in `archive` or `cache`
raise KeyError(key)
def __getitem__(self, key):
try:
return self.cache[key]
except KeyError:
# Load item into `cache`
self.cache[key] = self.archive[key]
return self.cache[key]
def __setitem__(self, key, value):
self.cache[key] = value
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,576 | fuzeman/stash.py | refs/heads/master | /stash/algorithms/lru.py | from stash.algorithms.core.base import Algorithm
from stash.algorithms.core.prime_context import PrimeContext
from stash.core.helpers import to_integer
from stash.lib.six.moves import xrange, _thread
try:
from llist import dllist
except ImportError:
try:
from pyllist import dllist
except ImportError:
dllist = None
import collections
import logging
log = logging.getLogger(__name__)
class LruAlgorithm(Algorithm):
__key__ = 'lru'
def __init__(self, capacity=100, compact='auto', compact_threshold=200):
super(LruAlgorithm, self).__init__()
if dllist is None:
raise Exception('Unable to construct lru:// - "llist" and "pyllist" modules are not available')
self.capacity = to_integer(capacity, 100)
self.compact_mode = compact
self.compact_threshold = to_integer(compact_threshold, 200)
self.queue = dllist()
self.nodes = {}
self._buffers = {}
def __delitem__(self, key):
try:
node = self.nodes.pop(key)
# Remove `node` from `queue`
self.queue.remove(node)
except KeyError:
pass
# Remove `key` from `cache` and `archive`
return super(LruAlgorithm, self).__delitem__(key)
def __getitem__(self, key):
# Try retrieve value from `prime_buffer`
try:
buffer = self._buffers.get(_thread.get_ident())
if buffer is not None:
return buffer[key]
except KeyError:
pass
# Try retrieve value from `cache`
try:
value = self.cache[key]
# Ensure node for `key` exists
self.create(key)
return value
except KeyError:
pass
# Try load `key` from `archive`
return self.load(key)
def __setitem__(self, key, value):
# Store `value` in cache
self.cache[key] = value
# Create node for `key`
self.create(key)
def compact(self, force=False):
count = len(self.nodes)
if count <= self.capacity:
return
if not force and count <= self.compact_threshold:
return
self.release_items(count - self.capacity)
def delete(self, keys):
if not keys:
return
if not isinstance(keys, collections.Iterable):
keys = [keys]
for key in keys:
try:
node = self.nodes.pop(key)
# Remove `node` from `queue`
self.queue.remove(node)
except KeyError:
pass
# Remove keys from `cache` and `archive`
return super(LruAlgorithm, self).delete(keys)
def release(self, key=None):
if key is None:
key = self.queue.popright()
# Move item to archive
self.archive[key] = self.cache.pop(key)
# Remove from `nodes`
del self.nodes[key]
def release_items(self, count=None, keys=None):
if count is not None:
def iterator():
for x in xrange(count):
# Pop next item from `queue`
key = self.queue.popright()
# Delete from `nodes`
del self.nodes[key]
# Yield item
yield key, self.cache.pop(key)
elif keys is not None:
def iterator():
for key in keys:
# Remove from `queue
self.queue.remove(key)
# Delete from `nodes`
del self.nodes[key]
# Yield item
yield key, self.cache.pop(key)
else:
raise ValueError()
self.archive.set_items(iterator())
return True
def prime(self, keys=None, force=False):
if keys is not None:
# Filter keys to ensure we only load ones that don't exist
keys = [
key for key in keys
if key not in self.cache
]
# Iterate over archive items
items = self.archive.get_items(keys)
buffer = {}
context = PrimeContext(self, buffer)
for key, value in items:
# Store `value` in cache
buffer[key] = value
return context
def create(self, key, compact=True):
if key in self.nodes:
# Move node to the front of `queue`
self.touch(key)
return
# Store node in `queue`
self.nodes[key] = self.queue.appendleft(key)
# Compact `cache` (if enabled)
if compact and self.compact_mode == 'auto':
self.compact()
def load(self, key):
# Load `key` from `archive`
self[key] = self.archive[key]
return self.cache[key]
def touch(self, key):
node = self.nodes[key]
# Remove `node` from `queue`
self.queue.remove(node)
# Append `node` to the start of `queue`
self.nodes[key] = self.queue.appendleft(node)
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,577 | fuzeman/stash.py | refs/heads/master | /stash/serializers/s_none.py | from stash.serializers.core.base import Serializer
class NoneSerializer(Serializer):
__key__ = 'none'
def dumps(self, value):
return value
def loads(self, value):
return value
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,578 | fuzeman/stash.py | refs/heads/master | /stash/main.py | from stash.core.modules.manager import ModuleManager
from collections import MutableMapping
class Stash(MutableMapping):
def __init__(self, archive, algorithm='lru:///', serializer='none:///', cache='memory:///', key_transform=None):
# Construct modules
self.archive = ModuleManager.construct(self, 'archive', archive)
self.algorithm = ModuleManager.construct(self, 'algorithm', algorithm)
self.serializer = ModuleManager.construct(self, 'serializer', serializer)
self.cache = ModuleManager.construct(self, 'cache', cache)
self.key_transform = key_transform or (lambda key: key, lambda key: key)
def compact(self, force=False):
return self.algorithm.compact(force=force)
def delete(self, keys):
return self.algorithm.delete(keys)
def flush(self):
# Update `archive` with the items in `cache`
self.archive.update(self.cache)
def items(self):
self.flush()
return self.archive.items()
def iteritems(self):
self.flush()
return self.archive.iteritems()
def iterkeys(self):
self.flush()
return self.archive.iterkeys()
def itervalues(self):
self.flush()
return self.archive.itervalues()
def prime(self, keys=None, force=False):
"""Prime cache with `keys` from archive.
:param keys: list of keys to load, or `None` to load everything
:type keys: list of any or None
:param force: force the loading of items (by ignoring the algorithm capacity parameter).
**Note:** these items will be removed on the next `compact()` call.
:type force: bool
"""
return self.algorithm.prime(
keys=keys,
force=force
)
def save(self):
# Flush items from `cache` to `archive`
self.flush()
# Ensure `archive` is completely saved
self.archive.save()
def __delitem__(self, key):
del self.algorithm[key]
def __getitem__(self, key):
return self.algorithm[key]
def __iter__(self):
self.flush()
return iter(self.archive)
def __len__(self):
self.flush()
return len(self.archive)
def __setitem__(self, key, value):
self.algorithm[key] = value
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,579 | fuzeman/stash.py | refs/heads/master | /examples/pickle_serializer.py | import logging
logging.basicConfig(level=logging.DEBUG)
from stash import Stash
if __name__ == '__main__':
s = Stash('sqlite:///pickle_serializer.db?table=stash', 'lru:///?capacity=5', 'pickle:///')
for x in xrange(5):
s[str(x)] = x
for x in xrange(2):
s[str(x)] = x
for x in xrange(10, 13):
s[str(x)] = x
s.flush()
print 'len(s.cache): %r' % len(s.cache)
print 'len(s.archive): %r' % len(s.archive)
print "del s['1']"
del s['1']
print 'len(s.cache): %r' % len(s.cache)
print 'len(s.archive): %r' % len(s.archive)
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,580 | fuzeman/stash.py | refs/heads/master | /stash/serializers/core/base.py | from stash.core.modules.base import Module
class Serializer(Module):
__group__ = 'serializer'
def dumps(self, value):
raise NotImplementedError
def loads(self, value):
raise NotImplementedError
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,581 | fuzeman/stash.py | refs/heads/master | /stash/archives/a_memory.py | from stash.archives.core.base import Archive
class MemoryArchive(Archive):
__key__ = 'memory'
def __init__(self, initial=None):
super(MemoryArchive, self).__init__()
self.data = initial or {}
def save(self):
pass
def __delitem__(self, key):
key = self.key_encode(key)
del self.data[key]
def __getitem__(self, key):
key = self.key_encode(key)
return self.data[key]
def __iter__(self):
return iter(self.data)
def __len__(self):
return len(self.data)
def __setitem__(self, key, value):
key = self.key_encode(key)
self.data[key] = value
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,582 | fuzeman/stash.py | refs/heads/master | /stash/serializers/s_pickle.py | from stash.core.helpers import to_integer
from stash.serializers.core.base import Serializer
import stash.lib.six as six
try:
import cPickle as pickle
except ImportError:
import pickle
from stash.lib.six import BytesIO
class PickleSerializer(Serializer):
__key__ = 'pickle'
def __init__(self, protocol=0):
super(PickleSerializer, self).__init__()
self.protocol = to_integer(protocol)
def dumps(self, value):
# Dump object
value = pickle.dumps(value, protocol=self.protocol)
# Build unicode string from `value`
value = six.text_type(value, 'latin-1')
# Return UTF-8 string
return value.encode('utf-8')
def loads(self, value):
# Convert `buffer` -> UTF-8 string
if six.PY3:
value = value.decode('utf-8')
else:
value = str(value).decode('utf-8')
# Build `BytesIO` object from raw unicode string
value = BytesIO(value.encode('latin-1'))
# Return decoded object
return pickle.load(value)
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,583 | fuzeman/stash.py | refs/heads/master | /stash/caches/core/base.py | from stash.core.modules.base import MappingModule
import collections
class Cache(MappingModule):
__group__ = 'cache'
def delete(self, keys):
if not keys:
return
if not isinstance(keys, collections.Iterable):
keys = [keys]
for key in keys:
del self[key]
def __delitem__(self, key):
raise NotImplementedError
def __getitem__(self, key):
raise NotImplementedError
def __iter__(self):
raise NotImplementedError
def __len__(self):
raise NotImplementedError
def __setitem__(self, key, value):
raise NotImplementedError
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,584 | fuzeman/stash.py | refs/heads/master | /tests/algorithms/lru_tests.py | from stash import Stash, LruAlgorithm, MemoryArchive, MemoryCache
from stash.lib.six.moves import xrange
def test_construct():
st = Stash(MemoryArchive(), LruAlgorithm)
assert type(st.algorithm) is LruAlgorithm
st = Stash(MemoryArchive(), 'lru:///')
assert type(st.algorithm) is LruAlgorithm
st = Stash(MemoryArchive(), 'lru:///?capacity=64')
assert type(st.algorithm) is LruAlgorithm
assert st.algorithm.capacity == 64
def test_set():
st = Stash(MemoryArchive(), LruAlgorithm(10))
# Fill with numbers: 1 - 10
for x in xrange(1, 11):
st[x] = str(x)
assert sorted(st.cache) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
assert sorted(st.algorithm.nodes) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
assert list(st.algorithm.queue) == [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
def test_get():
st = Stash(MemoryArchive({3: '3', 4: '4'}), LruAlgorithm(10), cache=MemoryCache({1: '1', 2: '2'}))
# Ensure numbers 1 - 4 exist
for x in xrange(1, 5):
assert st[x] == str(x)
def test_delete():
st = Stash(MemoryArchive({3: '3', 4: '4'}), LruAlgorithm(10), cache=MemoryCache({1: '1', 2: '2'}))
# Test archive deletion
del st[3]
assert st.get(3) is None
# Test cache deletion
del st[1]
assert st.get(1) is None
# Test deletion of LRU nodes
assert st[2] == '2' # Construct LRU nodes
del st[2]
assert st.get(2) is None
assert 2 not in st.algorithm.nodes
def test_touch():
st = Stash(MemoryArchive(), LruAlgorithm(10))
# Fill with numbers: 1 - 10
for x in xrange(1, 11):
st[x] = str(x)
# Bump numbers: 1 - 5
for x in xrange(1, 6):
st[x] = str(x)
assert sorted(st.cache) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
assert sorted(st.algorithm.nodes) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
assert list(st.algorithm.queue) == [5, 4, 3, 2, 1, 10, 9, 8, 7, 6]
def test_archive():
st = Stash(MemoryArchive(), LruAlgorithm(10))
# Fill with numbers: 1 - 10
for x in xrange(1, 11):
st[x] = str(x)
# Bump numbers: 1 - 5
for x in xrange(1, 6):
st[x] = str(x)
# Fill with numbers: 21 - 25
for x in xrange(21, 26):
st[x] = str(x)
# Force a compact to ensure items are archived
st.compact(force=True)
assert sorted(st.archive) == [6, 7, 8, 9, 10]
assert sorted(st.cache) == [1, 2, 3, 4, 5, 21, 22, 23, 24, 25]
assert sorted(st.algorithm.nodes) == [1, 2, 3, 4, 5, 21, 22, 23, 24, 25]
assert list(st.algorithm.queue) == [25, 24, 23, 22, 21, 5, 4, 3, 2, 1]
| {"/stash/caches/c_memory.py": ["/stash/caches/core/base.py"], "/tests/archives/sqlite_tests.py": ["/stash/__init__.py"], "/stash/caches/__init__.py": ["/stash/caches/c_memory.py"], "/stash/archives/core/base.py": ["/stash/core/modules/base.py"], "/tests/archives/apsw_tests.py": ["/stash/__init__.py"], "/stash/archives/a_apsw.py": ["/stash/archives/core/base.py"], "/setup.py": ["/stash/__init__.py"], "/tests/serializers/jsonpickle_tests.py": ["/stash/__init__.py"], "/stash/archives/a_sqlite.py": ["/stash/archives/core/base.py"], "/stash/serializers/__init__.py": ["/stash/serializers/s_jsonpickle.py", "/stash/serializers/s_msgpack.py", "/stash/serializers/s_none.py", "/stash/serializers/s_pickle.py"], "/stash/core/modules/base.py": ["/stash/core/modules/manager.py"], "/stash/algorithms/__init__.py": ["/stash/algorithms/lru.py"], "/stash/serializers/s_jsonpickle.py": ["/stash/serializers/core/base.py"], "/tests/serializers/pickle_tests.py": ["/stash/__init__.py"], "/stash/serializers/s_msgpack.py": ["/stash/serializers/core/base.py"], "/stash/archives/__init__.py": ["/stash/archives/a_apsw.py", "/stash/archives/a_memory.py", "/stash/archives/a_sqlite.py"], "/stash/__init__.py": ["/stash/algorithms/__init__.py", "/stash/archives/__init__.py", "/stash/caches/__init__.py", "/stash/serializers/__init__.py", "/stash/main.py"], "/stash/algorithms/core/base.py": ["/stash/core/modules/base.py"], "/stash/algorithms/lru.py": ["/stash/algorithms/core/base.py", "/stash/algorithms/core/prime_context.py", "/stash/core/helpers.py"], "/stash/serializers/s_none.py": ["/stash/serializers/core/base.py"], "/stash/main.py": ["/stash/core/modules/manager.py"], "/stash/serializers/core/base.py": ["/stash/core/modules/base.py"], "/stash/archives/a_memory.py": ["/stash/archives/core/base.py"], "/stash/serializers/s_pickle.py": ["/stash/core/helpers.py", "/stash/serializers/core/base.py"], "/stash/caches/core/base.py": ["/stash/core/modules/base.py"], "/tests/algorithms/lru_tests.py": ["/stash/__init__.py"]} |
60,618 | potehinre/snmp_poller | refs/heads/master | /config.py | # -*- coding: utf-8 -*-
#Название опрашиваемого интерфейса, если такого интерфейса у сервера нету , ставится -1
INTERFACE_NAME = "eth0"
COMMUNITY_STRING = 'public'
SNMP_PORT = 161
#Спустя какое количество времени удалять статистику в секундах.
DELETE_STAT_AFTER = 60 * 30
#Список опрашиваемых серверов
SERVER_NAMES = ("demo.snmplabs.com", "false.server.ru")
DB_SETTINGS = {"user": "root",
"password": "86kitty",
"host": "127.0.0.1",
"database": "snmp_poller"}
| {"/snmp_poller.py": ["/config.py"]} |
60,619 | potehinre/snmp_poller | refs/heads/master | /snmp_poller.py | # -*- coding:utf-8 -*-
import mysql.connector
import sys
import os
import time
from pysnmp.entity.rfc3413.oneliner import cmdgen
from pysnmp.error import PySnmpError
from config import INTERFACE_NAME, COMMUNITY_STRING, SNMP_PORT, SERVER_NAMES, DB_SETTINGS, DELETE_STAT_AFTER
IN_OCTETS_OID = "1.3.6.1.2.1.2.2.1.10"
OUT_OCTETS_OID = "1.3.6.1.2.1.2.2.1.16"
INTERFACE_NAMES_OID = "1.3.6.1.2.1.2.2.1.2"
SERVER_UNAVAILABLE = -1
#Transport settings
RETRIES_COUNT = 0
TIMEOUT = 1
poll_info = {}
if_load_info = {}
def datetime_to_timestamp(datetime):
return time.mktime(datetime.timetuple())
def get_last_servers_poll_info(connection):
result = {}
cursor = connection.cursor()
query = "SELECT server_name,octet_count,time FROM last_poll_info"
cursor.execute(query)
for (server_name, octet_count, time) in cursor:
result[server_name] = {"octet_count": octet_count, "time": datetime_to_timestamp(time)}
return result
def del_last_servers_poll_info(connection):
query = "DELETE FROM last_poll_info"
cursor = connection.cursor()
cursor.execute(query)
connection.commit()
cursor.close()
def save_last_servers_poll_info(connection, poll_info):
cursor = connection.cursor()
query = "INSERT INTO last_poll_info(server_name, octet_count) VALUES(%s, %s)"
for server_name, octet_count in poll_info.items():
cursor.execute(query, [server_name, octet_count])
connection.commit()
cursor.close()
def save_load_stats(connection, load_stats):
cursor = connection.cursor()
query = "INSERT INTO load_stats(server_name, if_load) VALUES (%s, %s)"
for server_name, if_load in load_stats.items():
cursor.execute(query, [server_name, if_load])
connection.commit()
cursor.close()
def delete_old_load_stats(connection):
cursor = connection.cursor()
query = "DELETE FROM load_stats WHERE TIME_TO_SEC(TIMEDIFF(NOW(),time)) > %s"
cursor.execute(query, [DELETE_STAT_AFTER])
connection.commit()
cursor.close()
#Async callbacks
def get_interface_number(send_request_handle, error_indication, error_status, error_index,
var_binds, server_name):
interface_number = None
if error_indication:
if_load_info[server_name] = -1
else:
for var_bind_table_row in var_binds:
for name, val in var_bind_table_row:
if val == INTERFACE_NAME:
interface_number = int(name[-1])
if interface_number:
try:
cmdGen.asyncGetCmd(cmdgen.CommunityData(COMMUNITY_STRING),
cmdgen.UdpTransportTarget((server_name, SNMP_PORT), retries=RETRIES_COUNT, timeout=TIMEOUT),
(IN_OCTETS_OID + "." + str(interface_number), OUT_OCTETS_OID + "." + str(interface_number)),
(get_interface_load_info, (server_name, interface_number)))
except PySnmpError:
if_load_info[server_name] = SERVER_UNAVAILABLE
return False
else:
if_load_info[server_name] = SERVER_UNAVAILABLE
return True
def get_interface_load_info(send_request_handle, error_indication, error_status,
error_index, var_binds, cb_ctx):
server_name = cb_ctx[0]
interface_number = cb_ctx[1]
if error_indication:
if_load_info[server_name] = SERVER_UNAVAILABLE
else:
sum_octets = sum(v for k, v in var_binds)
if server_name in last_servers_poll_info:
current_time = int(time.time())
previous_time = last_servers_poll_info[server_name]['time']
time_diff = current_time - previous_time
if time_diff > 0:
if_load_info[server_name] = float(sum_octets - last_servers_poll_info[server_name]['octet_count']) / time_diff
poll_info[server_name] = int(sum_octets)
if __name__ == "__main__":
try:
connection = mysql.connector.connect(user=DB_SETTINGS["user"],
password=DB_SETTINGS["password"],
host=DB_SETTINGS["host"],
database=DB_SETTINGS["database"])
last_servers_poll_info = get_last_servers_poll_info(connection)
del_last_servers_poll_info(connection)
cmdGen = cmdgen.AsynCommandGenerator()
for server_name in SERVER_NAMES:
try:
cmdGen.asyncNextCmd(
cmdgen.CommunityData(COMMUNITY_STRING),
cmdgen.UdpTransportTarget((server_name, SNMP_PORT), retries=RETRIES_COUNT, timeout=TIMEOUT),
(INTERFACE_NAMES_OID,),
(get_interface_number, server_name)
)
except PySnmpError:
if_load_info[server_name] = SERVER_UNAVAILABLE
cmdGen.snmpEngine.transportDispatcher.runDispatcher()
save_last_servers_poll_info(connection, poll_info)
save_load_stats(connection, if_load_info)
delete_old_load_stats(connection)
except mysql.connector.Error as err:
sys.stderr.write("Can't connect to DB. errno=" + str(err.errno) + "\n")
sys.exit(1)
else:
connection.close()
| {"/snmp_poller.py": ["/config.py"]} |
60,620 | FFCoder/PythonCallServer | refs/heads/master | /server.py | import json
import time
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
from requests import exceptions
from models import *
from ParseHandler import handler
watchFolder = ""
parseAppId = ""
parseRESTKey = ""
class MyHandler(PatternMatchingEventHandler):
patterns = ["*.mp3"]
def on_created(self, event):
print "New Call Found"
filepath = event.src_path.encode('ascii', 'ignore')
x = Call(open(filepath))
try:
handle = handler(x, parseAppId, parseRESTKey)
print "Sending to Parse.com"
handle.send()
except exceptions.ConnectionError:
print "Sending Failed: To Many Retries. Trying again after 1 second"
time.sleep(1)
handle = handler(x, parseAppId, parseRESTKey)
print "Sending to Parse.com"
handle.send()
try:
with open("server.conf", "r") as theConfig:
x = json.loads(theConfig.read())
watchFolder = x["folder"]
parseAppId = x["X-Parse-Application-Id"]
parseRESTKey = x["X-Parse-REST-API-Key"]
except IOError:
print "Error Loading Config File"
if __name__ == '__main__':
observer = Observer()
observer.schedule(MyHandler(), path=watchFolder)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
| {"/models.py": ["/utils.py"]} |
60,621 | FFCoder/PythonCallServer | refs/heads/master | /utils.py | class MCESException(Exception):
pass
| {"/models.py": ["/utils.py"]} |
60,622 | FFCoder/PythonCallServer | refs/heads/master | /models.py | from utils import MCESException
from dateutil.parser import parse
import uuid
class Unit(object):
"""This is a model to represent a [Fire,EMS,Rescue,Etc.] Unit
Attributes:
name: Name of the unit
stationNumber: int containing Station Number that the unit belongs to.
"""
def __init__(self, unitShortCode, name=None, stationNumber=None):
self.name = name
self.stationNumber = stationNumber
self.uShort = unitShortCode
if (self.__validateShortCode__()):
self.__setVariables__()
def __setVariables__(self):
varSet = {
"CMD": ("Command", 99),
"ALL": ("ALL", 0),
"E1": ("Engine 1", 1),
"E2": ("Engine 2", 2),
"E3": ("Engine 3", 3),
"E4": ("Engine 4", 4),
"E5": ("Engine 5", 5),
"E6": ("Engine 6", 6),
"E7": ("Engine 7", 7),
"E8": ("Engine 8", 8),
"E9": ("Engine 9", 9),
"E10": ("Engine 10", 10),
"E11": ("Engine 11", 11),
"E12": ("Engine 12", 12),
"E14": ("Engine 14", 14),
"Sq1": ("Squad 1", 1),
"SQ1": ("Squad 1", 1),
"SQ4": ("Squad 4", 4),
"SQ12": ("Squad 12", 12)
}
self.name = varSet[self.uShort][0]
self.stationNumber = varSet[self.uShort][1]
def __validateShortCode__(self):
if self.uShort.startswith(("E", "SQ", "ALL", "CMD","Sq")):
return True
else:
raise MCESException("Unit Short Code is Invalid")
return False
class Call(object):
"""This object represents a 'Call' such as when units get paged out it is called a Call."""
def __init__(self, mp3File):
self.mFile = mp3File
self.__parse__()
def __parse__(self):
mString = self.mFile.name
rawArray1 = mString.split("_")
if ("/" in rawArray1[0]):
self.unit = Unit(rawArray1[0].split("/")[-1])
dFormat = rawArray1[1][0:-2] + " " +rawArray1[1][-2::]+":"+rawArray1[2]+":"+rawArray1[3]
self.date = parse(dFormat)
self.uID = uuid.uuid4()
| {"/models.py": ["/utils.py"]} |
60,623 | FFCoder/PythonCallServer | refs/heads/master | /ParseHandler.py | import json,requests
class handler(object):
def __init__(self,call,parseAPPID,parseRESTKey):
self.call = call
self.appid = parseAPPID
self.restkey = parseRESTKey
def send(self):
headers= {"X-Parse-Application-Id": self.appid,
"X-Parse-REST-API-Key": self.restkey}
with open(self.call.mFile.name,'rb') as payload:
h = requests.post("https://api.parse.com/1/files/callfile.mp3",headers=headers,data=payload)
dataload = {"unitName":self.call.unit.name,"unitStationNumber":self.call.unit.stationNumber,"calldate":{"__type":"Date","iso":self.call.date.isoformat()},"callFile":{"name":h.json()["name"],"__type":"File"}}
r = requests.post("https://api.parse.com/1/classes/Call",data=json.dumps(dataload),headers=headers)
print r, r.text
| {"/models.py": ["/utils.py"]} |
60,626 | GjergjiSh/hand-sign-detection-opencv | refs/heads/master | /FingerDistanceTracker.py | import cv2
import time
import numpy as np
import HandTrackingModule as htm
import math
#hand detector instance
detector = htm.HandDetector(detection_confidence=0.7)
#camera stream
camera_width, camera_height = 640, 480
cap = cv2.VideoCapture(0)
cap.set(3, camera_width)
cap.set(4, camera_width)
#to calc fps
pTime = 0
cTime = 0
while True:
#Grab frame
success, img = cap.read()
#Detect Hand landmarls
img = detector.find_hands(img)
landmark_list = detector.find_hand_position(img, draw=False)
if (len(landmark_list) != 0):
#Index finger and thumb coordiantes
x1, y1 = landmark_list[4][1], landmark_list[4][2]
x2, y2 = landmark_list[8][1], landmark_list[8][2]
#Center betweent he finger and the thumb
cx, cy = ((x1 + x2) // 2) , ((y1 + y2) // 2)
#Circles and line
cv2.circle(img, (x1, y1), 15, (255, 0, 255), cv2.FILLED)
cv2.circle(img, (x2, y2), 15, (255, 0, 255), cv2.FILLED)
cv2.circle(img, (cx, cy), 10, (0,255,255), cv2.FILLED)
cv2.line(img, (x1 , y1), (x2, y2), (255, 0, 255), 3)
#Make red green when fingers close enough
length = math.hypot(x2-x1,y2-y1)
if length < 50:
cv2.circle(img, (cx, cy), 10, (0,0,255), cv2.FILLED)
#Calc fps
cTime = time.time()
fps = 1/(cTime - pTime)
pTime = cTime
#Draw fps
cv2.putText(img, str(int(fps)), (10, 70),
cv2.FONT_HERSHEY_PLAIN, 3, (0, 0, 255), 2)
#Show image
cv2.imshow("Img", img)
cv2.waitKey(1)
| {"/FingerDistanceTracker.py": ["/HandTrackingModule.py"]} |
60,627 | GjergjiSh/hand-sign-detection-opencv | refs/heads/master | /HandTrackingModule.py | import cv2
import mediapipe as mp
import time
class HandDetector():
def __init__(self, mode=False, max_hands=2, detection_confidence=0.5, track_confdence=0.5):
self.mode = mode
self.max_hands = max_hands
self.detection_confidence = detection_confidence
self.track_confdence = track_confdence
self.mp_hands = mp.solutions.hands
self.hands = self.mp_hands.Hands( self.mode, self.max_hands, self.detection_confidence, self.track_confdence)
self.mp_draw = mp.solutions.drawing_utils
def find_hands(self, img, draw=True ):
img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
self.results = self.hands.process(img_rgb)
if self.results.multi_hand_landmarks:
for hand_lms in self.results.multi_hand_landmarks:
if draw:
self.mp_draw.draw_landmarks(img, hand_lms, self.mp_hands.HAND_CONNECTIONS)
return img
def find_hand_position(self, img, hand_nr=0, draw=True):
landmark_list = []
if self.results.multi_hand_landmarks:
hand = self.results.multi_hand_landmarks[hand_nr]
for id, lm in enumerate(hand.landmark):
h, w, c = img.shape
cx, cy = int(lm.x*w), int(lm.y*h)
landmark_list.append([id, cx, cy])
if draw:
cv2.circle(img, (cx,cy), 8, (0,0,255), cv2.FILLED)
return landmark_list
def main():
cap = cv2.VideoCapture(0)
pTime = 0
cTime = 0
detector = HandDetector()
while True:
success, img = cap.read()
cTime = time.time()
fps = 1/(cTime - pTime)
pTime = cTime
img = detector.find_hands(img)
landmark_list = detector.find_hand_position(img)
cv2.putText(img, str(int(fps)), (10, 70),
cv2.FONT_HERSHEY_PLAIN, 3, (0, 0, 255), 2)
cv2.imshow("Image", img)
cv2.waitKey(1)
if __name__ == "__main__":
main()
| {"/FingerDistanceTracker.py": ["/HandTrackingModule.py"]} |
60,628 | ComputerScientist-01/Tile-Matching-Game | refs/heads/master | /app.py | import pygame
import game_config as gc
from pygame import display, event, image
from time import sleep
from animal import Animal
def find_index_from_xy(x, y):
row = y // gc.IMAGE_SIZE
col = x // gc.IMAGE_SIZE
index = row * gc.NUM_TILES_SIDE + col
return row, col, index
pygame.init()
# used to initialize the pygame methods
display.set_caption('My Game')
# set the game title on the title screen
screen = display.set_mode((gc.SCREEN_SIZE, gc.SCREEN_SIZE))
# defining the window size of the screen
matched = image.load('other_assets/matched.png')
# loading the image assets
#screen.blit(matched,(0,0)) -> not needed for now
#displaying the image in full screen mode
#blit -> draw one image onto another
#display.flip() -> not needed for now
#display.flip() -> This will update the contents of the entire display
running = True
# set a boolean for the while loop
tiles = [Animal(i) for i in range(0, gc.NUM_TILES_TOTAL)]
# we will instantiate the images
current_images_displayed = []
while running: #setting up the game loop
current_events = event.get()
#This will get all the messages and remove them from the queue
for e in current_events:# looping over the events
if e.type == pygame.QUIT:#if user wants to quit exit the game loop
running = False
if e.type == pygame.KEYDOWN:
if e.key == pygame.K_ESCAPE:
running = False
# pressing escape will lead to quiting of the game
if e.type == pygame.MOUSEBUTTONDOWN:
mouse_x, mouse_y = pygame.mouse.get_pos()
#getting the positiion of mouse after clicking on animal
row, col, index = find_index_from_xy(mouse_x, mouse_y)
# using a function to find the row and column number
if index not in current_images_displayed:
if len(current_images_displayed) > 1:
current_images_displayed = current_images_displayed[1:] + [index]
# we are doing this so that unique images get matched
#appending the index to the list current images
else:
current_images_displayed.append(index)
# Display animals
screen.fill((255, 255, 255))
# set wthe screen color to white
total_skipped = 0
for i, tile in enumerate(tiles):
current_image = tile.image if i in current_images_displayed else tile.box
# if the image is present in current images
# display it other wiise display a grey box
if not tile.skip:
screen.blit(current_image, (tile.col * gc.IMAGE_SIZE + gc.MARGIN, tile.row * gc.IMAGE_SIZE + gc.MARGIN))
#iterating over tiles and displaying them
# enumerate gives the iterator index
else:
total_skipped += 1
display.flip()
#update the screen with display.flip()
# Check for matches
if len(current_images_displayed) == 2:
idx1, idx2 = current_images_displayed
if tiles[idx1].name == tiles[idx2].name:
tiles[idx1].skip = True
tiles[idx2].skip = True
# display matched message
sleep(0.2)
screen.blit(matched, (0, 0))
display.flip()
sleep(0.5)
current_images_displayed = []
if total_skipped == len(tiles):
running = False
print('Goodbye!')
| {"/app.py": ["/game_config.py", "/animal.py"], "/animal.py": ["/game_config.py"]} |
60,629 | ComputerScientist-01/Tile-Matching-Game | refs/heads/master | /animal.py | import random
import os
import game_config as gc
from pygame import image, transform
animals_count = dict((a, 0) for a in gc.ASSET_FILES)
#creating a dictionary to keep count of animals
#setting all the values to 0 for all the keys
#the keys are going to be all the files from our asset dir
def available_animals():
return [animal for animal, count in animals_count.items() if count < 2]
#returns a list of animals which are available
#meaning all the keys whose values is less tha 2
class Animal:
def __init__(self, index):
self.index = index
self.name = random.choice(available_animals())
# randomly choose the name from all the available names
self.image_path = os.path.join(gc.ASSET_DIR, self.name)
# setting up our image path
# os.path.join -> Join one or more path components intelligently
self.row = index // gc.NUM_TILES_SIDE
# we will get the resultant int as the row number
self.col = index % gc.NUM_TILES_SIDE
# we will get the resultant int as column number
self.skip = False
# if the animals are matched the we can skip
# printing the image/box as it has been
# removed from the game
self.image = image.load(self.image_path)
# loading the images
self.image = transform.scale(self.image, (gc.IMAGE_SIZE - 2 * gc.MARGIN, gc.IMAGE_SIZE - 2 * gc.MARGIN))
#pygame.transform.scale -> resize to new resolution
# we are subtracting the margin length to get actual size
self.box = self.image.copy()
# creating a copy of the image
self.box.fill((200, 200, 200))
# filling the box to get a light grey color
animals_count[self.name] += 1
# updating the key value in the dictonary
| {"/app.py": ["/game_config.py", "/animal.py"], "/animal.py": ["/game_config.py"]} |
60,630 | ComputerScientist-01/Tile-Matching-Game | refs/heads/master | /game_config.py | import os
IMAGE_SIZE = 128
#defining the image size
SCREEN_SIZE = 512
#defining the screen size
NUM_TILES_SIDE = 4
NUM_TILES_TOTAL = 16
# total tiles in the game
MARGIN = 8
#margin between one image and another
ASSET_DIR = 'assets'
#defining the assset directory
ASSET_FILES = [x for x in os.listdir(ASSET_DIR) if x[-3:].lower() == 'png']
#list comprehension to access all the files
assert len(ASSET_FILES) == 8
#just to check wether all the 8 files of our program are there or not | {"/app.py": ["/game_config.py", "/animal.py"], "/animal.py": ["/game_config.py"]} |
60,631 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-2/libs/Harris.py | # -*- coding: utf-8 -*-
import numpy as np
from .misc import Gen_Gaussian_Filter, apply_2d_filter, calculate_gradient
class Harris_Corner_Detector(object):
"""Apply Harris corner detector to image.
Parameters
----------
image: array-like
Target image.
neighbor: int, default 7
Neighbor of nxn.
avg: string, 'Box' or 'Gaussian', default 'box'
Window averaging function.
sigma: float, default 1
Standard deviation if Gaussian window.
thresold: int, default 1e5
Threshold for
k: float, default 0.05
Empirically determined constant.
nonmax_window: int, default 3
Window size for non-maimum suppression.
"""
def __init__(self, image, method="Sobel", neighbor=7, avg="box", sigma=1,
thresold=1e5, k=0.05, nonmax_window=3):
ishape = image.shape
self.image = image
self.image_shpae = ishape
self.method = method
self.neighbor = neighbor
self.avg = avg
self.win_sigma = sigma
self.thresold = thresold
self.k = k
self.nonmax_window = nonmax_window
self.r_matrix = np.zeros(ishape)
self.window_func = self._gen_window_func(avg, neighbor, sigma)
@staticmethod
def _gen_window_func(avg, neighbor, sigma):
"""Generate window function."""
if "g" in avg.lower():
window_func = Gen_Gaussian_Filter(2, sigma, size=neighbor)
else:
window_func = np.ones((neighbor, neighbor))
window_func = window_func / window_func.sum()
return window_func
@staticmethod
def _findM(x2, y2, xy, ind, neigh, window):
"""Find C matrix of gven gradient."""
x, y = ind
n = int(neigh / 2)
Gxx = x2[x - n:x + n + 1, y - n:y + n + 1]
Gyy = y2[x - n:x + n + 1, y - n:y + n + 1]
Gxy = xy[x - n:x + n + 1, y - n:y + n + 1]
Ixx = window * Gxx
Iyy = window * Gyy
Ixy = window * Gxy
M = np.array([[Ixx.sum(), Ixy.sum()],
[Ixy.sum(), Iyy.sum()]])
return M
def _corner_response(self, M):
"""Measure of corner response.
Parameters
----------
M: array-like
Weighted C matrix.
"""
a, b, c, d = M.reshape((1, -1))[0, :]
det = a * c - b * d
trace = a + c
R = det - self.k * (trace)**2
return R
@staticmethod
def _calculate_gradient_old(image, method):
if "s" in method.lower():
factor = 2
else:
factor = 1
ishape = image.shape
xmask = np.array([[-1, 0, 1]]) * np.array([[1], [factor], [1]])
ymask = np.array([[1, factor, 1]]) * np.array([[-1], [0], [1]])
Ix = np.zeros(ishape)
Iy = np.zeros(ishape)
Ex = apply_2d_filter(xmask, image)
Ey = apply_2d_filter(ymask, image)
Ix[1:-1, 1:-1] = np.copy(Ex[1:-1, 1:-1])
Iy[1:-1, 1:-1] = np.copy(Ey[1:-1, 1:-1])
return Ix, Iy
def harris_r_matrix(self):
"""Compute Harris R function over the image."""
image = self.image
neighbor = self.neighbor
window = self.window_func
ishape = self.image_shpae
# Ix, Iy = self._calculate_gradient_old(image, "Sobel")
Ix, Iy = calculate_gradient(image, method=self.method)
Ixy = Ix * Iy
Ixx = Ix ** 2
Iyy = Iy ** 2
ovrlay = int(neighbor / 2) + 1
r_matrix = np.zeros(ishape)
for i in np.arange(ishape[0])[ovrlay:-ovrlay]:
for j in np.arange(ishape[1])[ovrlay:-ovrlay]:
m = self._findM(Ixx, Iyy, Ixy, [i, j], neighbor, window)
r = self._corner_response(m)
r_matrix[i, j] = r
self.r_matrix = r_matrix
self.ovrlay = ovrlay
return self
def nonmax_Supression_old(self):
"""Do non-maimum suppression to get a sparse set of corner features."""
threshold = self.thresold
r_matrix = self.r_matrix
ovrlay = self.ovrlay
ishape = self.image_shpae
win = self.nonmax_window
R = r_matrix * (r_matrix > threshold)
offset = int(win / 2)
ovrlay += offset
for i in np.arange(ishape[0])[ovrlay:- ovrlay]:
for j in np.arange(ishape[1])[ovrlay:- ovrlay]:
local_matrix = R[i - offset:i + offset + 1,
j - offset:j + offset + 1]
local_max = max(local_matrix.reshape((1, -1))[0, :])
local_matrix[np.where(local_matrix != local_max)] = 0
self.nonmax_r = R
return self
def nonmax_Supression(self):
"""Do non-maimum suppression to get a sparse set of corner features."""
threshold = self.thresold
r_matrix = self.r_matrix
ovrlay = self.ovrlay
ishape = self.image_shpae
win = self.nonmax_window
R = r_matrix * (r_matrix > threshold)
offset = int(win / 2)
ovrlay += offset
nonmax_r = np.zeros(ishape)
matrix0 = R[ovrlay - 1:- ovrlay - 1, ovrlay - 1:- ovrlay - 1]
matrix1 = R[ovrlay - 1:- ovrlay - 1, ovrlay:- ovrlay]
matrix2 = R[ovrlay - 1:- ovrlay - 1, ovrlay + 1:- ovrlay + 1]
matrix3 = R[ovrlay:- ovrlay, ovrlay - 1:- ovrlay - 1]
matrix4 = R[ovrlay:- ovrlay, ovrlay:- ovrlay]
matrix5 = R[ovrlay:- ovrlay, ovrlay + 1:- ovrlay + 1]
matrix6 = R[ovrlay + 1:- ovrlay + 1, ovrlay - 1:- ovrlay - 1]
matrix7 = R[ovrlay + 1:- ovrlay + 1, ovrlay:- ovrlay]
matrix8 = R[ovrlay + 1:- ovrlay + 1, ovrlay + 1:- ovrlay + 1]
matrix_array = np.array([matrix0, matrix1, matrix2, matrix3,
matrix5, matrix6, matrix7, matrix8])
matrix_diff = matrix4 - matrix_array
matrix_diff[np.where(matrix_diff > 0)] = 1
matrix_diff[np.where(matrix_diff < 0)] = 0
matrix_sum = sum(matrix_diff)
peaks_inds = np.where(matrix_sum == 8)
nonmax_r[peaks_inds[0] + ovrlay, peaks_inds[1] + ovrlay] = 1
self.nonmax_r = nonmax_r
return self
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,632 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-2/libs/misc.py | # -*- coding: utf-8 -*-
import glob
import numpy as np
import matplotlib.image as mpimg
__all__ = ["Gen_Gaussian_Filter", "Load_Images", "rgb2gray",
"calculate_gradient", "normalized_correlation"]
def Gen_Gaussian_Filter(dim, sigma, size=0):
"""Generate 1D or 2D Gaussian filter.
Parameters
----------
dim: int
Dimension of filter
sigma: float
Standard deviation
n: int
Size
"""
n = max(2 * np.ceil(2 * sigma) + 1, size)
ovrlay = int(n / 2)
inds = np.arange(-ovrlay, ovrlay + 1)
gaussian_1d = np.exp(-inds**2/(2 * sigma**2))
mask = gaussian_1d /sum(gaussian_1d).reshape((-1, 1))
if dim == 2:
mask = gaussian_1d * gaussian_1d.T
return mask
def Load_Images(path, imgtype="*.jpg"):
"""Load frame images from folder.
Parameters
----------
path: string
Image path
imgtype: string
Type of images
"""
loadpath = f"{path}{imgtype}"
all_img_path = glob.glob(loadpath)
img_num = len(all_img_path)
all_img = [0] * img_num
for i in np.arange(img_num):
one_img_path = all_img_path[i]
all_img[i] = mpimg.imread(one_img_path)
# all_img = np.array(all_img)
return all_img
def rgb2gray(img):
"""Convert a RGB image to gray scale."""
if img.shape[-1] == 3:
grayimg = 0.299 * img[:, :, 0] + 0.587 * img[:, :, 1] + 0.114 * img[:, :, 2]
else:
grayimg = img[:, :, 0]
return grayimg
def apply_2d_filter(bfilter, timage):
"""Apply given 2D filter onto an image.
Parameters
----------
bfilter: array-like
The filter
timage: array-like
Targeted image
"""
image_shape = timage.shape
ovrlay = int(bfilter.shape[0] / 2)
tmp_matrix = np.zeros(np.array(image_shape) + 2 * ovrlay)
tmp_matrix[ovrlay:-ovrlay, ovrlay:-ovrlay] = timage
res_matrix = np.zeros(timage.shape)
for i in np.arange(image_shape[0]) + ovrlay:
for j in np.arange(image_shape[1]) + ovrlay:
local_matrix = tmp_matrix[i - ovrlay:i + ovrlay + 1,
j - ovrlay:j + ovrlay + 1]
res_matrix[i - ovrlay, j - ovrlay] = sum(sum(local_matrix * bfilter))
return res_matrix
def calculate_gradient(matrix, method='Prewitt'):
mshape = np.shape(matrix)
if "s" in method.lower():
factor = 2
else:
factor = 1
# x-axis
x_gradient = np.zeros(mshape)
matrix_x = matrix[:, 2:] - matrix[:, :-2]
matrix_x0 = matrix_x[:-2, :]
matrix_x1 = matrix_x[1:-1, :] * factor
matrix_x2 = matrix_x[2:, :]
x_gradient[1:-1, 1:-1] = matrix_x0 + matrix_x1 + matrix_x2
# y-axis
y_gradient = np.zeros(mshape)
matrix_y = matrix[2:, :] - matrix[:-2, :]
matrix_y0 = matrix_y[:, :-2]
matrix_y1 = matrix_y[:, 1:-1] * factor
matrix_y2 = matrix_y[:, 2:]
y_gradient[1:-1, 1:-1] = matrix_y0 + matrix_y1 + matrix_y2
return x_gradient, y_gradient
def normalize(matrix):
"""Compute normailized form.
Parameters
----------
matrix: array-like
The target matrix
"""
msum = sum(sum(matrix**2))
if msum:
res = matrix / msum
else:
res = matrix
return res
def normalized_correlation(image, template):
"""Compute cross-correlation between image and template.
Parameters
----------
image: array-like
The target image
template: array-like
Template image
"""
nimage = np.array([normalize(i) for i in image])
ntemplate = normalize(template)
mask_array = nimage * ntemplate
ncc_array = mask_array.sum(axis = -1).sum(axis=1)
return ncc_array
def homo_project(trans_matrix, point):
"""Project a point using given trnasformation matrix."""
x, y = point
homo_point = np.array([[x], [y], [1]])
unor_point = np.matmul(trans_matrix, homo_point)
z = unor_point[-1, 0]
if z:
point = tuple(unor_point[:2, 0] / z)
else:
point = (np.inf, np.inf)
return point
def euclidean_disance(p0, p1):
"""Calculate Euclidean disance."""
x0, y0 = p0
x1, y1 = p1
dis = np.sqrt((x0 - x1)**2 + (y0 - y1)**2)
return dis
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,633 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-2/main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
from libs.misc import Load_Images
from libs.Mosaic import Image_Mosaicing
image_path = r".\\DanaOffice\\"
parameters = {"method": "Sobel",
"corner_neighbor": 7,
"avg": "Gaussian",
"sigma": 1.4,
"corner_thresold": 1e8,
"k": 0.05,
"nonmax_window": 3,
"ncc_threshold": 0.5,
"ncc_neighbor": 3,
"ransac_ratio": 0.1,
"ransac_distance": 2,
"ransac_iteration": 1e4,
"homo_num": 4}
if __name__ == "__main__":
raw_images = Load_Images(image_path)
mosaic = Image_Mosaicing(raw_images[7], raw_images[8], params=parameters)
mosaic.corner_detect(show=False)
mosaic.correspondences(show=False)
mosaic.homography_estimate(show=False)
mosaic.image_warp(show=False)
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,634 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-1/libs/Detector.py | # -*- coding: utf-8 -*-
import os
import numpy as np
import matplotlib.pylab as plt
#import matplotlib.image as mpimg
plt.rcParams['figure.figsize'] = 15, 6
#plt.rcParams['figure.dpi'] = 300
from .FilterApp import FilterApp
from .misc import Gen_Gaussian_Filter, Gen_Box_Filter
class MotionDetector(object):
def __init__(self, frames):
self.filapp = FilterApp()
self.frame_num = frames.shape[0]
self.raw_frames = frames
self.gray_frames = np.array([])
self.filtered_frames = np.array([])
self.derived_frames = np.array([])
self.derived_index = np.array([])
self.threshold = 0
self.filter_ovrlay = 1
self.mean_matrix = np.array([])
self.var_matrix = np.array([])
@staticmethod
def rgb2gray(img):
"""Convert a RGB image to gray scale."""
if img.shape[-1] == 3:
tmpimg = 0.299 * img[:, :, 0] + 0.587 * img[:, :, 1] + 0.114 * img[:, :, 2]
else:
tmpimg = img[:, :, 0]
return tmpimg
def Convert_Gray(self):
"""Convert raw frames into gray scale."""
tmp_frames = self.raw_frames
frame_num = self.frame_num
gray_frames = [0] * frame_num
for i in np.arange(frame_num):
current_frame = tmp_frames[i]
gray_frames[i] = self.rgb2gray(current_frame)
gray_frames = np.array(gray_frames)
self.gray_frames = gray_frames
return self
def Spatial_Smooth(self, mask="None", **kwargs):
"""Applying a 2D spatial smoothing filter to the frames
before applying the temporal derivative filter.
Parameters
----------
mask: string, ["None", "Box", Gaussian"], default 'None'
Spatial smoothing filter to apply
"""
gray_frames = self.gray_frames
frame_num = self.frame_num
if "b" in mask.lower():
spatial_filter = Gen_Box_Filter(kwargs['param'])
elif 'g' in mask.lower():
spatial_filter = Gen_Gaussian_Filter(2, kwargs['param'])
else:
spatial_filter = None
if spatial_filter is None:
filtered_frames = gray_frames
filter_ovrlay = 1
else:
filter_ovrlay = int(spatial_filter.shape[0] / 2)
tmp_frames = [0] * frame_num
for i in np.arange(frame_num):
tmp_frames[i] = self.filapp.apply_2d_filter(spatial_filter,
gray_frames[i])
filtered_frames = np.array(tmp_frames)
self.filtered_frames = filtered_frames
self.filter_ovrlay = filter_ovrlay
return self
def Temporal_Derive_old(self, op, **kwargs):
"""Apply a 1-D differential operator at each pixel to
compute a temporal derivative.
Parameters
----------
operator: string
Differential operator to apply
"""
filtered_frames = self.filtered_frames
frame_num = self.frame_num
if "g" in op.lower():
operator = Gen_Gaussian_Filter(1, kwargs['param'])
derived_frames = self.filapp.apply_1d_differential(operator, filtered_frames)
operator_length = operator.shape[1]
ovrlay = int(operator_length / 2)
derived_index = np.arange(frame_num)[ovrlay: -ovrlay]
else:
try:
order = int(kwargs['param'])
except:
order = 1
derived_frames = (filtered_frames[order:, :, :] -\
filtered_frames[:-order, :, :]) / order
derived_index = np.arange(frame_num)[order:]
self.derived_frames = derived_frames
self.derived_index = derived_index
return self
def Temporal_Derive(self, op, **kwargs):
"""Apply a 1-D differential operator at each pixel to
compute a temporal derivative.
Parameters
----------
operator: string
Differential operator to apply
"""
filtered_frames = self.filtered_frames
frame_num = self.frame_num
try:
order = int(kwargs['param'])
except:
order = 1
derived_frames = (filtered_frames[order:, :, :] -\
filtered_frames[:-order, :, :]) / order
derived_index = np.arange(frame_num)[order:]
if "g" in op.lower():
operator = Gen_Gaussian_Filter(1, kwargs['param']).reshape((1, -1))[0, :]
new_derived_frames = np.zeros(derived_frames.shape)
shape = derived_frames[0].shape
for i in np.arange(shape[0]):
for j in np.arange(shape[1]):
new_derived_frames[:, i, j] = self.filapp.apply_1d_filter(operator,
derived_frames[:, i, j])
self.derived_frames = derived_frames
self.derived_index = derived_index
return self
def Threshold_Select(self, percentage=0.1, n=5):
"""Select reasonable threshold for motion detection."""
filter_ovrlay = self.filter_ovrlay
derived_frames = self.derived_frames
pwidth, plength = derived_frames.shape[1:]
mean_matrix = np.zeros((pwidth, plength))
var_matrix = np.zeros((pwidth, plength))
for i in np.arange(pwidth):
for j in np.arange(plength):
one_pixel_array = np.abs(derived_frames[:, i, j])
mean_matrix[i, j] = np.mean(one_pixel_array)
var_matrix[i, j] = np.std(one_pixel_array)
# valid_mean_matrix = np.copy(mean_matrix[filter_ovrlay: -filter_ovrlay,
# filter_ovrlay: -filter_ovrlay])
# mean_array = np.sort(valid_mean_matrix.reshape((1, -1))[0, :])
# estimated_mean = mean_array[int(percentage*len(mean_array))]
valid_var_matrix = np.copy(var_matrix[filter_ovrlay: -filter_ovrlay,
filter_ovrlay: -filter_ovrlay])
var_array = np.sort(valid_var_matrix.reshape((1, -1))[0, :])
estimated_var = var_array[int(percentage*len(var_array))]
threshold = n*estimated_var
# self.mean_matrix = mean_matrix
# self.var_matrix = var_matrix
self.threshold = threshold
def Show_Result(self, savepath=r".\\new\\"):
threshold = self.threshold
derived_frames = self.derived_frames
derived_index = self.derived_index
original_frames = self.raw_frames[derived_index]
filtered_frames = self.filtered_frames[derived_index]
mask_frames = 255 * np.ones(derived_frames.shape)
mask_frames[np.where(derived_frames >= threshold)] = 0
if not os.path.exists(savepath):
os.makedirs(savepath)
for i in np.arange(len(derived_index)):
fig = plt.figure()
ax1 = fig.add_subplot(131)
ax1.imshow(original_frames[i])
ax1.axis('off')
ax1.set_title("Original", fontsize=20)
ax2 = fig.add_subplot(132)
ax2.imshow(filtered_frames[i], cmap=plt.cm.gray)
ax2.axis('off')
ax2.set_title("Filtered", fontsize=20)
ax3 = fig.add_subplot(133)
ax3.imshow(mask_frames[i], cmap=plt.cm.gray)
ax3.axis('off')
ax3.set_title("Mask", fontsize=20)
fig.savefig(f"{savepath}{i}.png")
print(f"Saving {i} frame......")
plt.close("all")
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,635 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-3/main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from libs.misc import Load_Images
from libs.Flow import Dense_Optical_Flow
image_path = r".\\toys\\"
parameters = {"smooth": "Gaussian",
"sigma": 1.4,
"method": "Prewitt"}
if __name__ == "__main__":
raw_images = Load_Images(image_path, imgtype="*.*")[3:]
flow = Dense_Optical_Flow(raw_images[0], raw_images[1], params=parameters)
flow.LKMethod(scale=1)
flow.LKMethod(scale=2)
flow.LKMethod(scale=4)
flow.LKMethod(scale=2, window=3)
flow.LKMethod(scale=2, window=7)
flow.LKMethod(scale=4, window=7)
flow.LKMethod(scale=1, window=7)
flow.show_origin()
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,636 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Homework-2/hw2.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pylab as plt
import matplotlib.image as mpimg
plt.rcParams['figure.figsize'] = 15, 6
#plt.rcParams['figure.dpi'] = 300
NUMBER = 10
IMG_SHAPE = (256, 256)
GREY_LEVEL = 128
MU = 0
SIGMA = 2
class Homework2(object):
def __init__(self):
pass
@staticmethod
def gen_one_image():
"""Generate one image."""
img = GREY_LEVEL * np.ones(IMG_SHAPE)
noise = np.random.normal(MU, SIGMA, IMG_SHAPE)
image = img + noise
return image
def gen_image(self, nimage):
"""Generate all required images."""
tmp = [0] * nimage
for i in np.arange(nimage):
tmp[i] = self.gen_one_image()
return np.array(tmp)
@staticmethod
def EST_NOISE(images):
"""Implementation of EST_NOISE in Chapter 2 of Trucco and Verri."""
num = images.shape[0]
m_e_bar = sum(images)/num
m_sigma = np.sqrt(sum((images - m_e_bar)**2)/(num - 1))
return m_sigma
def solve_p1(self):
"""Solve problem #1."""
all_image = self.gen_image(NUMBER)
result = self.EST_NOISE(all_image)
print("\n\nSolution for problem #1.")
self.plot_result(all_image, result)
self.images = all_image
@staticmethod
def apply_2d_filter(bfilter, timage):
"""Apply given 2D filter onto an image.
Parameters
----------
bfilter: array-like
The filter
timage: array-like
Targeted image
"""
image_shape = timage.shape
ovrlay = int(bfilter.shape[0] / 2)
tmp_matrix = np.zeros(np.array(image_shape) + 2 * ovrlay)
tmp_matrix[ovrlay:-ovrlay, ovrlay:-ovrlay] = timage
res_matrix = np.zeros(timage.shape)
for i in np.arange(image_shape[0]) + ovrlay:
for j in np.arange(image_shape[1]) + ovrlay:
local_matrix = tmp_matrix[i - ovrlay:i + ovrlay + 1,
j - ovrlay:j + ovrlay + 1]
res_matrix[i - ovrlay, j - ovrlay] = sum(sum(local_matrix * bfilter))
return res_matrix
def solve_p2(self):
"""Solve problem #2."""
box_filter = np.ones((3, 3)) / 9
all_image = self.images
filted_image = [0] * NUMBER
for i in np.arange(NUMBER):
filted_image[i] = self.apply_2d_filter(box_filter, all_image[i])
filted_image = np.array(filted_image)
result = self.EST_NOISE(filted_image)
print("\n\nSolution for problem #2.")
self.plot_result(filted_image, result)
@staticmethod
def plot_result(images, res):
"""Plot results."""
fig = plt.figure()
for i in np.arange(NUMBER):
ax = fig.add_subplot(2, 5, i + 1)
ax.axis('off')
ax.imshow(images[i], cmap=plt.cm.gray)
fig.suptitle(f"Estimated Noise {res.mean():.4f}, Worst Case Noise {res.max():.4f}",
fontsize=12)
fig.show()
def solve_p3(self, sigma=1.4, show=True):
"""Solve problem #1."""
n = int(2 * np.ceil(2 * sigma) + 1)
# 2D Gaussian
ovrlay = int(n / 2)
inds = np.arange(-ovrlay, ovrlay + 1)
x, y = np.meshgrid(inds, inds)
mask = np.exp(-(x**2 + y**2)/(2*sigma**2))
mask = mask/sum(sum(mask))
# two 1D Gaussian
gaussian_1d = np.exp(-inds**2/(2 * sigma**2))
gaussian_1d = gaussian_1d /sum(gaussian_1d).reshape((-1, 1))
mask2 = gaussian_1d * gaussian_1d.T
print("\n\nSolution for problem #3.")
if show:
Fsize = 16
test_img = mpimg.imread(r".\fig\test.png")
output_img1 = self.apply_2d_filter(mask, test_img)
output_img2 = self.apply_2d_filter(mask2, test_img)
fig = plt.figure()
ax1 = fig.add_subplot(131)
ax1.axis('off')
ax1.imshow(test_img, cmap=plt.cm.gray)
ax1.set_title("Raw image", fontsize=Fsize)
ax2 = fig.add_subplot(132, sharex=ax1, sharey=ax1)
ax2.axis('off')
ax2.imshow(output_img1, cmap=plt.cm.gray)
ax2.set_title(f"2D {n}x{n} Gaussian", fontsize=Fsize)
ax3 = fig.add_subplot(133, sharex=ax1, sharey=ax1)
ax3.axis('off')
ax3.imshow(output_img2, cmap=plt.cm.gray)
ax3.set_title(f"Two 1D n={n} Gaussian", fontsize=Fsize)
fig.show()
return mask, gaussian_1d
@staticmethod
def apply_1d_filter(bfilter, timage):
"""Apply given 1D filter onto an image.
Parameters
----------
bfilter: array-like
The filter
timage: array-like
Targeted image
"""
image_length = len(timage)
ovrlay = int(bfilter.shape[0] / 2)
tmp_array = np.zeros(image_length + 2 * ovrlay)
tmp_array[ovrlay:-ovrlay] = timage
res_array = np.zeros(image_length )
for i in np.arange(image_length) + ovrlay:
local_matrix = tmp_array[i - ovrlay:i + ovrlay + 1]
res_array[i - ovrlay] = sum(local_matrix * bfilter)
return res_array
@staticmethod
def apply_1d_median_filter(n, timage):
"""Applying a 3 median flter on the image I assuming that the
border pixels are not changed.
Parameters
----------
n: int
Shape of median filter
timage: array-like
Targeted image
"""
image_shape = timage.shape
ovrlay = int(n / 2)
res_matrix = np.copy(timage)
for i in np.arange(image_shape[0])[1:-1]:
local_matrix = timage[i - ovrlay:i + ovrlay + 1]
median = np.median(local_matrix)
res_matrix[i] = median
return res_matrix
def solve_p4(self):
"""Solve problem #4."""
I = np.array([10] * 5 + [40] * 5)
filter1 = np.ones(5)/5
filter2 = np.array([1, 2, 4, 2, 1]) / 10
O1 = self.apply_1d_filter(filter1, I).astype(int)
O2 = self.apply_1d_filter(filter2, I).astype(int)
print("\n\nSolution for problem #4.")
print("Filter (a)")
print(O1)
print("Filter (b)")
print(O2)
return O1, O2
@staticmethod
def apply_2d_median_filter(n, timage):
"""Applying a nxn median filter on the image I assuming that the
border pixels are not changed."""
image_shape = timage.shape
ovrlay = int(n / 2)
res_matrix = np.copy(timage)
for i in np.arange(image_shape[0])[1:-1]:
for j in np.arange(image_shape[1])[1:-1]:
local_matrix = timage[i - ovrlay:i + ovrlay + 1,
j - ovrlay:j + ovrlay + 1]
median = np.median(local_matrix)
res_matrix[i, j] = median
return res_matrix
def solve_p5(self, show=True):
"""Solve problem #5."""
print("\n\nSolution for problem #5.")
if show:
fig = plt.figure()
ax1 = fig.add_subplot(121)
ax1.stem([-100, 0, 100], np.array([49, 42, 9])/100)
ax1.set_ylabel("Probability")
ax1.set_xlabel("Output")
ax1.set_title("On the line")
ax2 = fig.add_subplot(122)
ax2.stem([-150, -50, 50, 100], np.array([21, 9, 21, 49])/100)
ax2.set_ylabel("Probability")
ax2.set_xlabel("Output")
ax2.set_title("On the adjacent line")
fig.show()
def solve_p6(self):
"""Solve problem #6."""
I = np.zeros((8, 8)).astype(int)
for i in np.arange(8):
for j in np.arange(8):
I[i, j] = np.abs(i - j)
O = self.apply_2d_median_filter(3, I).astype(int)
print("\n\nSolution for problem #6.")
print(I)
print(O)
return I, O
def solve_p7(self):
"""Solve problem #7."""
I = np.array([4] * 4 + [8] * 4)
O1 = self.apply_1d_median_filter(3, I)
avgfilter = np.array([1, 2, 1]) / 4
O2 = np.copy(I)
O2[1:-1] = self.apply_1d_filter(avgfilter, I)[1:-1]
print("\n\nSolution for problem #7.")
print("Median Filter")
print(O1)
print("Average Mask")
print(O2)
return O1, O2
if __name__ == "__main__":
hw2 = Homework2()
hw2.solve_p1()
hw2.solve_p2()
hw2.solve_p3()
hw2.solve_p4()
hw2.solve_p5()
hw2.solve_p6()
hw2.solve_p7()
# | {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,637 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Homework-4/libs/misc.py | # -*- coding: utf-8 -*-
import numpy as np
__all__ = ["compute_ssd",
"compute_correlation",
"compute_normalized_correlation"]
def compute_ssd(image, template):
"""Compute SSD between image and template.
Parameters
----------
image: array-like
The target image
template: array-like
Template image
"""
image_shape = image.shape
ovrlay = int(template.shape[0] / 2)
tmp_matrix = - np.ones(np.array(image_shape))
for i in np.arange(image_shape[0])[ovrlay:-ovrlay]:
for j in np.arange(image_shape[1])[ovrlay:-ovrlay]:
local_matrix = image[i - ovrlay:i + ovrlay + 1,
j - ovrlay:j + ovrlay + 1]
tmp_matrix[i, j] = sum(sum((local_matrix - template)**2))
return tmp_matrix
def compute_correlation(image, template):
"""Compute cross-correlation between image and template.
Parameters
----------
image: array-like
The target image
template: array-like
Template image
"""
image_shape = image.shape
ovrlay = int(template.shape[0] / 2)
tmp_matrix = - np.ones(np.array(image_shape))
for i in np.arange(image_shape[0])[ovrlay:-ovrlay]:
for j in np.arange(image_shape[1])[ovrlay:-ovrlay]:
local_matrix = image[i - ovrlay:i + ovrlay + 1,
j - ovrlay:j + ovrlay + 1]
tmp_matrix[i, j] = sum(sum((local_matrix * template)))
return tmp_matrix
def normalize(matrix):
"""Compute normailized form.
Parameters
----------
matrix: array-like
The target matrix
"""
msum = sum(sum(matrix**2))
if msum:
res = matrix / msum
else:
res = matrix
return res
def compute_normalized_correlation(image, template):
"""Compute cross-correlation between image and template.
Parameters
----------
image: array-like
The target image
template: array-like
Template image
"""
image_shape = image.shape
ovrlay = int(template.shape[0] / 2)
tmp_matrix = - np.ones(np.array(image_shape))
nor_template = normalize(template)
for i in np.arange(image_shape[0])[ovrlay:-ovrlay]:
for j in np.arange(image_shape[1])[ovrlay:-ovrlay]:
local_matrix = image[i - ovrlay:i + ovrlay + 1,
j - ovrlay:j + ovrlay + 1]
normalized_local_matrix = normalize(local_matrix)
tmp_matrix[i, j] = sum(sum((normalized_local_matrix * nor_template)))
return tmp_matrix | {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,638 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-1/main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pylab as plt
#import matplotlib.image as mpimg
plt.rcParams['figure.figsize'] = 15, 6
#plt.rcParams['figure.dpi'] = 300
from libs.misc import Load_Images
from libs.Detector import MotionDetector
frame_path = r".\\Office\\"
test_case = [('no', 0, 'd', 2, 0.1),
('b', 3, 'd', 2, 0.1),
('b', 5, 'd', 2, 0.1),
('g', 1.4, 'd', 2, 0.1),
('g', 1.8, 'd', 2, 0.1),
('b', 5, 'g', 1, 0.1),
('b', 5, 'g', 1.6, 0.1),
('b', 5, 'g', 2.5, 0.1),
('b', 5, 'd', 2, 0.1),
('b', 5, 'd', 2, 0.2),
('b', 5, 'd', 2, 0.5)]
if __name__ == "__main__":
for i in np.arange(len(test_case))[3:4]:
case = test_case[i]
save_path = f".\\Report\\{i}\\"
raw_frames = Load_Images(frame_path)
Mdetector = MotionDetector(raw_frames)
Mdetector.Convert_Gray()
Mdetector.Spatial_Smooth(case[0], param=case[1])
Mdetector.Temporal_Derive(case[2], param=case[3])
Mdetector.Threshold_Select(percentage=case[4])
Mdetector.Show_Result(savepath=save_path)
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,639 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Homework-3/hw3.py | # -*- coding: utf-8 -*-
import warnings
import numpy as np
import matplotlib.pylab as plt
from matplotlib.lines import Line2D
from libs.FilterApp import FilterApp
from libs import misc
warnings.filterwarnings("ignore")
plt.rcParams['figure.figsize'] = 8, 8
#plt.rcParams['figure.dpi'] = 300
#%%
class Homework3(object):
def __init__(self):
self.filapp = FilterApp()
@staticmethod
def rotate(matrix):
new_matrix = np.array(list(zip(*matrix[::-1])))
return new_matrix
def edge_detection(self, timage, vdetector):
"""Implementaion of algorithm canny_enhancer.
Parameters
----------
timage: array-like
Targeted image
vdetector: array-like
Vertical detector
"""
hdetector = self.rotate(vdetector)
xres = self.filapp.apply_2d_filter(vdetector, timage)
yres = self.filapp.apply_2d_filter(hdetector, timage)
magnitude = np.sqrt(xres**2 + yres**2)
inds = np.where(xres == 0)
orientation = np.arctan(yres / xres)
orientation[inds] = np.arctan(np.inf)
return magnitude, orientation
@staticmethod
def print_matrix_latex(matrix):
for i in matrix:
print(' & '.join(str(i)[1:-1].split(" ")))
def solve_p1(self):
"""Solve problem #1."""
row, col = np.meshgrid(np.arange(8), np.arange(8))
image = np.abs(row - col)
prewitt_vmask = np.array([[-1, 0, 1]]) * np.array([[1],[1],[1]])
sobel_vmask = np.array([[-1, 0, 1]]) * np.array([[1],[2],[1]])
print(f"\n\nSolution for problem #1.\n{'='*80}")
for irow in image:
print(' & '.join(str(irow)[1:-1].split(" ")))
pmag, pori = self.edge_detection(image, prewitt_vmask)
print("Magnitude and Orientation with Prewitt Mask.\n")
print(pmag.round(4))
print("\n")
print(pori.round(4))
smag, sori = self.edge_detection(image, sobel_vmask)
print("\nMagnitude and Orientation with Sobel Mask.\n")
print(smag.round(4))
print("\n")
print(sori.round(4))
@staticmethod
def findC(x2, y2, xy, ind, neigh):
x, y = ind
n = int(neigh / 2)
Ixx = x2[x - n:x + n + 1, y - n:y + n + 1]
Iyy = y2[x - n:x + n + 1, y - n:y + n + 1]
Ixy = xy[x - n:x + n + 1, y - n:y + n + 1]
C = np.array([[sum(sum(Ixx)), sum(sum(Ixy))], [sum(sum(Ixy)), sum(sum(Iyy))]])
return C
def solve_p3(self):
"""Solve problem #3."""
Zero = np.zeros((10, 10))
One = 40 * np.ones((10, 10))
I = np.hstack([np.vstack([Zero, One]), np.vstack([One, Zero])])
prewitt_xmask = np.array([[-1, 0, 1]]) * np.array([[1], [1], [1]])
prewitt_ymask = np.array([[1, 1, 1]]) * np.array([[-1], [0], [1]])
Ix = np.zeros((20, 20))
Iy = np.zeros((20, 20))
Ex = self.filapp.apply_2d_filter(prewitt_xmask, I)
Ey = self.filapp.apply_2d_filter(prewitt_ymask, I)
Ix[1:-1, 1:-1] = np.copy(Ex[1:-1, 1:-1])
Iy[1:-1, 1:-1] = np.copy(Ey[1:-1, 1:-1])
print(f"\n\nSolution for problem #2.\n{'='*80}")
Ixy = Ix * Iy
Ixx = Ix ** 2
Iyy = Iy ** 2
ovrlay = 4
eigen = np.zeros((20, 20))
for i in np.arange(20)[ovrlay:-ovrlay]:
for j in np.arange(20)[ovrlay:-ovrlay]:
c = self.findC(Ixx, Iyy, Ixy, [i, j], 7)
eigenvalues = misc.solve_2_eigenvalues(c)
if eigenvalues:
eigen[i, j] = min(eigenvalues)
print(c)
def solve_p4(self):
"""Solve problem #4."""
figure, ax = plt.subplots()
ax.plot([-10, 10], [2*np.sqrt(2) - 10, 2*np.sqrt(2) + 10], 'k')
ax.plot([-10, 10], [4, 4], 'k')
ax.plot([-4, -4], [-10, 10], 'k')
ax.set_xlim([-5, 4])
ax.set_ylim([-4, 5])
ax.set_title("$S = 36 - 16\sqrt{2}$", fontsize=20)
plt.show()
def solve_p5(self):
"""Solve problem #5."""
sqrt3 = np.sqrt(3)
figure, ax = plt.subplots()
ax.plot([-10, 10], [2 + 10 / sqrt3, 2 - 10 / sqrt3], 'g--')
ax.plot([-10, 10], [3, 3], 'g--')
ax.plot([3, 3], [-10, 10], 'g--')
ax.plot([-10, 10], [-9, 11], 'g--')
ax.set_xlim([-5, 4])
ax.set_ylim([-4, 5])
text = ['A', 'B', 'C', 'D']
points = np.array(((2, 3), (3, 3), (3, 2- sqrt3),
(sqrt3 / (sqrt3 + 1), (2 * sqrt3 + 1) / (sqrt3 + 1))))
ax.scatter(points[:, 0], points[:, 1], c='r')
for i in np.arange(4):
ax.annotate(f"{text[i]}{points[i, :].round(2)}", points[i, :], fontsize=20)
ax.set_xlim([0, 4])
ax.set_ylim([-1, 4])
# ax.set_title("$S = 36 - 16\sqrt{2}$", fontsize=20)
plt.show()
#%%
if __name__ == "__main__":
hw3 = Homework3()
hw3.solve_p1()
hw3.solve_p3()
hw3.solve_p4()
hw3.solve_p5()
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,640 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-2/libs/Correspondences.py | # -*- coding: utf-8 -*-
import logging
import numpy as np
from .misc import normalized_correlation, homo_project, euclidean_disance
class Correspond(object):
def __init__(self, image1, image2, r1, r2, threshold=0, neighbor=3):
self.image1 = image1
self.image2 = image2
self.r1 = r1
self.r2 = r2
self.threshold = threshold
self.neighbor = neighbor
def _gen_local_matrix(self, image, r_matrix):
"""Generate index array with R matrix."""
n = self.neighbor
row_index, col_index = np.where(r_matrix)
ovrlay = min(min(row_index), min(col_index), int(n / 2))
matrix_inds = list(zip(row_index, col_index))
matrix_array = [image[i[0] - ovrlay:i[0] + ovrlay + 1,
i[1] - ovrlay:i[1] + ovrlay + 1] for i in matrix_inds]
point_inds = list(zip(row_index, col_index))
return np.array(matrix_array), point_inds
def calculate_ncc(self):
"""Calculate and generate NCC matrix between two images."""
image1 = self.image1
image2 = self.image2
r1 = self.r1
r2 = self.r2
matrix_array_1, pinds1 = self._gen_local_matrix(image1, r1)
matrix_array_2, pinds2 = self._gen_local_matrix(image2, r2)
len1 = len(pinds1)
len2 = len(pinds2)
ncc_matrix = np.zeros((len1, len2))
for i in np.arange(len1):
template = matrix_array_1[i]
ncc_row_i = normalized_correlation(matrix_array_2, template)
ncc_matrix[i, :] = ncc_row_i
self.matrix_array_1 = matrix_array_1
self.matrix_array_2 = matrix_array_2
self.points_inds_1 = pinds1
self.points_inds_2 = pinds2
self.ncc_matrix = ncc_matrix
return self
def find_cor_pair(self):
"""Find correspondence points pairs."""
threshold = self.threshold
ncc_matrix = np.copy(self.ncc_matrix)
# matrix_array_1 = self.matrix_array_1
# matrix_array_2 = self.matrix_array_2
points_inds_1 = self.points_inds_1
points_inds_2 = self.points_inds_2
ncc_matrix[np.where(ncc_matrix < threshold)] = 0
pair_length = min(len(points_inds_1), len(points_inds_2))
ipoints_array_1 = [(0, 0)] * pair_length
ipoints_array_2 = [(0, 0)] * pair_length
count = 0
while 1:
current_max = ncc_matrix.max()
if not current_max:
break
row, col = np.where(ncc_matrix == current_max)
ipoints_array_1[count] = points_inds_1[row[0]]
ipoints_array_2[count] = points_inds_2[col[0]]
ncc_matrix[row[0], :] = 0
ncc_matrix[:, col[0]] = 0
count += 1
self.points_pair = (ipoints_array_1, ipoints_array_2)
return self
def find_cor_pair_new(self):
"""Find correspondence points pairs."""
threshold = self.threshold
ncc_matrix = np.copy(self.ncc_matrix)
# matrix_array_1 = self.matrix_array_1
# matrix_array_2 = self.matrix_array_2
points_inds_1 = self.points_inds_1
points_inds_2 = self.points_inds_2
ncc_matrix[np.where(ncc_matrix < threshold)] = 0
pair_length = len(points_inds_1)
ipoints_array_1 = [(0, 0)] * pair_length
ipoints_array_2 = [(0, 0)] * pair_length
for i in np.arange(pair_length):
j = np.argmax(ncc_matrix[i, :])
ipoints_array_1[i] = points_inds_1[i]
ipoints_array_2[i] = points_inds_2[j]
self.points_pair = (ipoints_array_1, ipoints_array_2)
return self
@staticmethod
def solve_homograph_matrix(*args):
"""Solve homography matrix with 4 points, non-colinear."""
co_matrix = []
b_array = []
for points_pair in args[0]:
p0, p1 = points_pair
x0, y0 = p0
x1, y1 = p1
co_matrix += [[x0, y0, 1, 0, 0, 0, -x0 * x1, -y0 * x1],
[0, 0, 0, x0, y0, 1, -x0 * y1, -y0 * y1]]
b_array += [x1, y1]
co_matrix = np.vstack(co_matrix)
b_array = np.vstack(b_array)
co_matrix_inv = np.linalg.inv(np.matmul(co_matrix.T, co_matrix))
h_array = np.append(np.matmul(co_matrix_inv,
np.matmul(co_matrix.T, b_array)), 1)
h_array = h_array.reshape((3, 3))
logging.debug(h_array)
return h_array
@staticmethod
def ransac(zip_points_pair, homo_matrix, distance):
"""Copmute distance_array given points pairs and projection matrix."""
pair_length = len(zip_points_pair)
distance_array = - np.ones(pair_length)
for i in np.arange(pair_length):
p0, p1 = zip_points_pair[i]
pro_p0 = homo_project(homo_matrix, p0)
temp_dis = euclidean_disance(pro_p0, p1)
distance_array[i] = temp_dis
inds = np.where(distance_array <= distance)
new_points_pair = list(np.array(zip_points_pair)[inds])
return new_points_pair
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,641 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-1/libs/misc.py | # -*- coding: utf-8 -*-
import glob
import numpy as np
import matplotlib.image as mpimg
def EST_NOISE(images):
"""Implementation of EST_NOISE in Chapter 2 of Trucco and Verri."""
num = images.shape[0]
m_e_bar = sum(images)/num
m_sigma = np.sqrt(sum((images - m_e_bar)**2) / (num - 1))
return m_sigma
def Load_Images(path, imgtype="*.jpg"):
"""Load frame images from folder.
Parameters
----------
path: string
Image path
imgtype: string
Type of images
"""
loadpath = f"{path}{imgtype}"
all_img_path = glob.glob(loadpath)
img_num = len(all_img_path)
all_img = [0] * img_num
for i in np.arange(img_num):
one_img_path = all_img_path[i]
all_img[i] = mpimg.imread(one_img_path)
all_img = np.array(all_img)
return all_img
def Gen_Gaussian_Filter(dim, sigma, size=0):
"""Generate 1D or 2D Gaussian filter.
Parameters
----------
dim: int
Dimension of filter
sigma: float
Standard deviation
n: int
Size
"""
n = max(2 * np.ceil(2 * sigma) + 1, size)
ovrlay = int(n / 2)
inds = np.arange(-ovrlay, ovrlay + 1)
gaussian_1d = np.exp(-inds**2/(2 * sigma**2))
mask = gaussian_1d /sum(gaussian_1d).reshape((-1, 1))
if dim == 2:
mask = gaussian_1d * gaussian_1d.T
return mask
def Gen_Box_Filter(n):
"""Generate 1D or 2D Gaussian filter.
Parameters
----------
n: int
Size
"""
size = int(n)
box_mask = np.ones((size, size)) / (size ** 2)
return box_mask
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,642 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-3/libs/misc.py | # -*- coding: utf-8 -*-
import glob
import numpy as np
import matplotlib.image as mpimg
__all__ = ["Gen_Gaussian_Filter", "Load_Images", "rgb2gray", "gradient"]
def Gen_Gaussian_Filter(dim, sigma, size=0):
"""Generate 1D or 2D Gaussian filter.
Parameters
----------
dim: int
Dimension of filter
sigma: float
Standard deviation
n: int
Size
"""
n = max(2 * np.ceil(2 * sigma) + 1, size)
ovrlay = int(n / 2)
inds = np.arange(-ovrlay, ovrlay + 1)
gaussian_1d = np.array([np.exp(-inds**2/(2 * sigma**2))])
if dim == 2:
mask = gaussian_1d * gaussian_1d.T
mask = mask / mask.sum()
else:
mask = gaussian_1d /gaussian_1d.sum()
return mask
def Load_Images(path, imgtype="*.gif"):
"""Load frame images from folder.
Parameters
----------
path: string
Image path
imgtype: string
Type of images
"""
loadpath = f"{path}{imgtype}"
all_img_path = glob.glob(loadpath)
img_num = len(all_img_path)
all_img = [0] * img_num
for i in np.arange(img_num):
one_img_path = all_img_path[i]
all_img[i] = mpimg.imread(one_img_path)
# all_img = np.array(all_img)
return all_img
def rgb2gray(img):
"""Convert a RGB image to gray scale."""
if len(img.shape) == 2:
grayimg = img[:, :]
elif img.shape[-1] >= 3:
grayimg = 0.299 * img[:, :, 0] + 0.587 * img[:, :, 1] + 0.114 * img[:, :, 2]
else:
grayimg = img[:, :, 0]
return grayimg
def apply_2d_filter(bfilter, timage):
"""Apply given 2D filter onto an image.
Parameters
----------
bfilter: array-like
The filter
timage: array-like
Targeted image
"""
image_shape = timage.shape
ovrlay = int(bfilter.shape[0] / 2)
tmp_matrix = np.zeros(np.array(image_shape) + 2 * ovrlay)
tmp_matrix[ovrlay:-ovrlay, ovrlay:-ovrlay] = timage
res_matrix = np.zeros(timage.shape)
for i in np.arange(image_shape[0]) + ovrlay:
for j in np.arange(image_shape[1]) + ovrlay:
local_matrix = tmp_matrix[i - ovrlay:i + ovrlay + 1,
j - ovrlay:j + ovrlay + 1]
res_matrix[i - ovrlay, j - ovrlay] = sum(sum(local_matrix * bfilter))
return res_matrix
def gradient(matrix, method='Prewitt'):
mshape = np.shape(matrix)
if "s" in method.lower():
factor = 2
else:
factor = 1
# x-axis
x_gradient = np.zeros(mshape)
matrix_x = matrix[:, 2:] - matrix[:, :-2]
matrix_x0 = matrix_x[:-2, :]
matrix_x1 = matrix_x[1:-1, :] * factor
matrix_x2 = matrix_x[2:, :]
x_gradient[1:-1, 1:-1] = matrix_x0 + matrix_x1 + matrix_x2
# y-axis
y_gradient = np.zeros(mshape)
matrix_y = matrix[2:, :] - matrix[:-2, :]
matrix_y0 = matrix_y[:, :-2]
matrix_y1 = matrix_y[:, 1:-1] * factor
matrix_y2 = matrix_y[:, 2:]
y_gradient[1:-1, 1:-1] = matrix_y0 + matrix_y1 + matrix_y2
return x_gradient, y_gradient
def solve_mateq(A, b):
try:
x = np.matmul(np.linalg.inv(A), b)
except:
x = np.array([[0], [0]])
return x
def norm_minmax(matrix):
vmax = matrix.max()
vmin = matrix.min()
norm_matrix = (matrix - vmin) / (vmax - vmin)
return norm_matrix
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,643 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Homework-3/libs/misc.py | # -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pylab as plt
def solve_2_eigenvalues(matrix):
a, b, c, d = matrix.reshape((1, -1))[0, :]
l = 1
m = - a - d
n = a * d - b * c
delta = np.sqrt(m**2 - 4 * l * n)
if delta > 0:
delta = np.sqrt(delta)
x1 = (- m + delta) / (2 * l)
x2 = (- m - delta) / (2 * l)
return x1, x2
elif delta == 0:
x = (- m) / (2 * l)
return x, x
else:
return None
def pol2line(pol):
rho, phi = pol
y0 = rho / np.sin(phi)
x0 = rho / np.cos(phi)
return (x0, 0), (0, y0)
def line2pol(p1, p2):
(x1, y1), (x2, y2) = p1, p2
if x1 == x2:
rho = x1
theta = 0
elif y1 == y2:
rho = y1
theta = np.pi / 2
else:
x0 = x1 - y1 * (x2 - x1) / (y2 - y1)
y0 = y1 - x1 * (y2 - y1) / (x2 - x1)
rho = np.sqrt(x0**2 * y0**2 / (x0**2 + y0**2))
if rho:
theta = np.sign(y0) * np.abs(np.arctan(x0/y0))
rho = np.sign(x0) * rho
else:
theta = np.arctan((y2 - y1) / (x2 - x1))
return rho, theta
def calc_area(p1, p2, p3):
(x1, y1), (x2, y2), (x3, y3) = p1, p2, p3
area = abs(x2 * y3 + x1 * y2 + x3 * y1 - x3 * y2 - x2 * y1 - x1 * y3) / 2
return area
def find_enclosing(p1, p2, p3):
(r1, t1), (r2, t2), (r3, t3) = p1, p2, p3
#%%
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,644 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Homework-4/hw4.py | # -*- coding: utf-8 -*-
import numpy as np
from libs.misc import *
class Homework4(object):
def __init__(self):
pass
def solve_p2(self):
"""Solve problem #1."""
f = np.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 2, 4, 2, 0, 0, 0, 0],
[0, 2, 0, 0, 0, 0, 0, 0],
[0, 0, 2, 0, 0, 0, 2, 0],
[0, 0, 0, 0, 0, 0, 2, 0],
[1, 2, 1, 0, 0, 2, 4, 2],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0]])
g = np.array([[1, 2, 1],
[0, 1, 0],
[0, 1, 0]])
ssd = compute_ssd(f, g)
cfg = compute_correlation(f, g)
nfg = compute_normalized_correlation(f, g).round(4)
print(f"\n\nSolution for problem #1.\n{'='*80}")
print("SSD\n")
print(ssd)
print("Correlation\n")
print(cfg)
print("Normalized Correlation\n")
print(nfg)
if __name__ == "__main__":
hw4 = Homework4()
hw4.solve_p2()
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,645 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-2/libs/Mosaic.py | # -*- coding: utf-8 -*-
import logging
from cv2 import warpPerspective
import numpy as np
import matplotlib.pylab as plt
plt.rcParams['figure.figsize'] = 15, 6
from .misc import rgb2gray, homo_project
from .Harris import Harris_Corner_Detector
from .Correspondences import Correspond
class Image_Mosaicing(object):
"""Produce a mosaic containing the union of all pixels in the two images.
Parameters
----------
rawimg1: array-like
Raw image 1.
rawimg2: array-like
Raw image 2.
"""
def __init__(self, rawimg1, rawimg2, params):
self.raw_image1 = rawimg1
self.raw_image2 = rawimg2
self.gray_image1 = rgb2gray(rawimg1)
self.gray_image2 = rgb2gray(rawimg2)
self.grad_method = params["method"]
self.neighbor = params["corner_neighbor"]
self.avg = params["avg"]
self.sigma = params["sigma"]
self.corner_thresold = params["corner_thresold"]
self.k = params["k"]
self.nonmax_window = params["nonmax_window"]
self.ncc_threshold = params["ncc_threshold"]
self.ncc_neighbor = params["ncc_neighbor"]
self.ransac_ratio = params["ransac_ratio"]
self.ransac_distance = params["ransac_distance"]
self.ransac_iteration = params["ransac_iteration"]
self.homo_num = params["homo_num"]
self.correspond = None
self.final_zip_points_pair = None
self.homography = None
def _apply_harris_corner_detector(self, image):
"""Apply Harris corner detector."""
neighbor = self.neighbor
avg = self.avg
sigma = self.sigma
corner_thresold = self.corner_thresold
k = self.k
nonmax_window = self.nonmax_window
detector = Harris_Corner_Detector(image, neighbor=neighbor, avg=avg,
sigma=sigma, k=k,
thresold=corner_thresold,
nonmax_window=nonmax_window)
detector.harris_r_matrix()
detector.nonmax_Supression()
r = detector.nonmax_r
return r
def corner_detect(self, show=True):
"""Detect corners."""
image1 = self.gray_image1
image2 = self.gray_image2
r1 = self._apply_harris_corner_detector(image1)
r2 = self._apply_harris_corner_detector(image2)
self.corner1 = r1
self.corner2 = r2
logging.debug(f"R1 size:\t{len(np.where(r1)[0])}")
logging.debug(f"R2 size:\t{len(np.where(r2)[0])}")
if show:
fig = plt.figure()
ax1 = fig.add_subplot(121)
ax1.axis('off')
ax1.imshow(self.raw_image1)
ax2 = fig.add_subplot(122)
ax2.axis('off')
ax2.imshow(self.raw_image2)
fig = plt.figure()
ax1 = fig.add_subplot(121)
ax1.axis('off')
ax1.imshow(self.raw_image1)
ax1.scatter(np.where(r1)[1], np.where(r1)[0], c="r")
ax2 = fig.add_subplot(122)
ax2.axis('off')
ax2.imshow(self.raw_image2)
ax2.scatter(np.where(r2)[1], np.where(r2)[0], c="r")
plt.show()
return self
def correspondences(self, show=True):
"""Find correspondences."""
threshold = self.ncc_threshold
neighbor = self.ncc_neighbor
image1 = self.gray_image1
image2 = self.gray_image2
r1 = self.corner1
r2 = self.corner2
correspond = Correspond(image1, image2, r1, r2,
threshold=threshold, neighbor=neighbor)
correspond.calculate_ncc()
correspond.find_cor_pair()
self.correspond = correspond
self.correspondence_pairs = correspond.points_pair
if show:
raw_image1 = self.raw_image1
raw_image2 = self.raw_image2
joint_image = np.hstack((raw_image1, raw_image2))
points1, points2 = correspond.points_pair
points2 = np.array(points2)
points2[:, 1] += raw_image1.shape[1]
points_draw_pair = list(zip(np.array(points1), points2))
fig = plt.figure()
ax = fig.add_subplot(111)
ax.axis("off")
ax.imshow(joint_image)
for i in points_draw_pair[:100]:
tmp = np.array(i)
ax.plot(tmp[:, 1], tmp[:, 0])
plt.show()
return self
def homography_estimate(self, show=True):
"""Estimate homography."""
ratio = self.ransac_ratio
distance = self.ransac_distance
iteration = self.ransac_iteration
correspond = self.correspond
homo_num = self.homo_num
point1, point2 = correspond.points_pair
zip_points_pair = list(zip(point1, point2))
pair_length = len(zip_points_pair)
loop_count = 0
while loop_count < iteration:
logging.info(f"Loop Count {loop_count}")
n_init_points_inds = np.random.choice(np.arange(pair_length),
homo_num, replace=False)
n_init_points = list(np.array(zip_points_pair)[n_init_points_inds])
homo_matrix = correspond.solve_homograph_matrix(n_init_points)
new_zip_points_pair = correspond.ransac(zip_points_pair,
homo_matrix, distance)
new_pair_length = len(new_zip_points_pair)
if new_pair_length / pair_length > ratio:
zip_points_pair = new_zip_points_pair
pair_length = new_pair_length
loop_count += 1
else:
logging.info("Noop")
if show:
raw_image1 = self.raw_image1
raw_image2 = self.raw_image2
joint_image = np.hstack((raw_image1, raw_image2))
plot_zip_points_pair = np.copy(new_zip_points_pair)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.axis("off")
ax.imshow(joint_image)
for point_pair in plot_zip_points_pair:
point_pair[1, 1] += raw_image1.shape[1]
ax.plot(point_pair[:, 1], point_pair[:, 0], 'r--')
plt.show()
self.final_zip_points_pair = new_zip_points_pair
self.homography = homo_matrix
return self
def image_warp(self, show=True):
"""Warp images."""
raw_image1 = self.raw_image1
raw_image2 = self.raw_image2
homography = self.homography
row, col = raw_image1.shape[:2]
after_homo = np.vstack([homo_project(homography, (0, 0)),
homo_project(homography, (0, col)),
homo_project(homography, (row, 0)),
homo_project(homography, (row, col))])
min_row = min(min(after_homo[:, 0]), 0)
max_row = max(max(after_homo[:, 0]), row)
min_col = min(min(after_homo[:, 1]), 0)
max_col = max(max(after_homo[:, 1]), col)
warp_size = (int(np.ceil(max_row - min_row)),
int(np.ceil(max_col - min_col)))
wraped_image = warpPerspective(raw_image1, homography,
warp_size, raw_image2)
logging.debug(homo_project(homography, (0, 0)))
logging.debug(homo_project(homography, (0, col)))
logging.debug(homo_project(homography, (row, 0)))
logging.debug(homo_project(homography, (row, col)))
logging.debug(warp_size)
if show:
fig = plt.figure()
ax1 = fig.add_subplot(221)
ax1.axis('off')
ax1.imshow(raw_image1)
ax1.set_title("Raw Image 1", fontsize=14)
ax2 = fig.add_subplot(222)
ax2.axis('off')
ax2.imshow(raw_image2)
ax2.set_title("Raw Image 2", fontsize=14)
ax3 = fig.add_subplot(212)
ax3.axis('off')
ax3.imshow(wraped_image)
ax3.set_title("Wrapped Image", fontsize=14)
plt.show()
self.warp_size = warp_size
return self
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,646 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-3/libs/Flow.py | # -*- coding: utf-8 -*-
import cv2
import numpy as np
import matplotlib.pylab as plt
plt.rcParams['figure.figsize'] = 9, 9
from .misc import rgb2gray, Gen_Gaussian_Filter, apply_2d_filter, gradient, solve_mateq, norm_minmax
class Dense_Optical_Flow(object):
"""Produce a mosaic containing the union of all pixels in the two images.
Parameters
----------
rawimg1: array-like
Raw image 1.
rawimg2: array-like
Raw image 2.
"""
def __init__(self, rawimg1, rawimg2, params):
self.raw_image1 = rawimg1
self.raw_image2 = rawimg2
self.smooth = params["smooth"]
self.sigma = params["sigma"]
self.grad_method = params["method"]
self.gray_image1 = rgb2gray(rawimg1)
self.gray_image2 = rgb2gray(rawimg2)
self.smoother = self._gen_smoother(self.smooth, self.sigma)
self.smooth_image1 = None
self.smooth_image2 = None
self.vx_matrix = None
self.vy_matrix = None
self.ovrlay = None
@staticmethod
def _gen_smoother(smooth, sigma):
"""Generate smoothing filter."""
if smooth is not None:
filt = Gen_Gaussian_Filter(2, sigma)
else:
filt = None
return filt
@staticmethod
def _findAB(x2, y2, xy, xt, yt, ind, neigh):
"""Find matrix components of given gradient."""
x, y = ind
n = int(neigh / 2)
Ixx = x2[x - n:x + n + 1, y - n:y + n + 1]
Iyy = y2[x - n:x + n + 1, y - n:y + n + 1]
Ixy = xy[x - n:x + n + 1, y - n:y + n + 1]
Ixt = xt[x - n:x + n + 1, y - n:y + n + 1]
Iyt = yt[x - n:x + n + 1, y - n:y + n + 1]
A = np.array([[Ixx.sum(), Ixy.sum()],
[Ixy.sum(), Iyy.sum()]])
B = - np.array([[Ixt.sum()], [Iyt.sum()]])
return A, B
def LKMethod(self, scale=1, window=5, show=True):
"""Lucas-Kanade method.
Parameters
----------
scale: int, default 1
Scale level of origin image.
window: int, default 5
Window size.
"""
smoother = self.smoother
image1 = self.gray_image1
image2 = self.gray_image2
smooth_image1 = apply_2d_filter(smoother, image1)[::scale, ::scale]
smooth_image2 = apply_2d_filter(smoother, image2)[::scale, ::scale]
ishape = np.shape(smooth_image2)
Ix, Iy = gradient(smooth_image2)
It = smooth_image2 - smooth_image1
Ixy = Ix * Iy
Ixx = Ix ** 2
Iyy = Iy ** 2
Ixt = Ix * It
Iyt = Iy * It
ovrlay = int(window / 2) + 1
# v_matrix = np.zeros(ishape, dtype=complex)
vx_matrix = np.zeros(ishape)
vy_matrix = np.zeros(ishape)
for i in np.arange(ishape[0])[ovrlay:-ovrlay]:
for j in np.arange(ishape[1])[ovrlay:-ovrlay]:
a, b = self._findAB(Ixx, Iyy, Ixy, Ixt, Iyt, [i, j], window)
v = solve_mateq(a, b)
vx, vy = v[:, 0]
vx_matrix[i, j] = vx
vy_matrix[i, j] = vy
# v_matrix[i, j] = complex(vx, vy)
if show:
fig = plt.figure()
ax1 = fig.add_subplot(121)
ax1.axis('off')
ax1.imshow(smooth_image2, cmap=plt.cm.gray)
ax1.quiver(vx_matrix, vy_matrix, color='green')
ax1.set_title(f"$scale = {scale}, window = {window}$",
fontsize=18)
ax2 = fig.add_subplot(122, sharex=ax1, sharey=ax1)
ax2.axis('off')
hs_matrix = np.copy(self.raw_image1[::scale, ::scale, :3])
# hs_matrix = np.zeros([ishape[0], ishape[1], 3])
direct = self._find_direction(vx_matrix, vy_matrix)
length = np.sqrt(vx_matrix**2 + vy_matrix**2)
hs_matrix[:, :, 0] = direct
hs_matrix[:, :, 1] = length * 255 / length.max()
hs_matrix[:, :, 2] = 255 * np.ones(ishape)
rgb = cv2.cvtColor(hs_matrix, cv2.COLOR_HSV2BGR)
ax2.imshow(rgb)
ax2.set_title(f"$scale = {scale}, window = {window}$",
fontsize=18)
plt.show()
self.vx_matrix = vx_matrix
self.vy_matrix = vy_matrix
self.ovrlay = ovrlay
return self
def show_origin(self):
"""Plot original images."""
image1 = self.gray_image1
image2 = self.gray_image2
fig = plt.figure()
ax1 = fig.add_subplot(121)
ax1.axis('off')
ax1.imshow(image1, cmap=plt.cm.gray)
ax1.set_title("Raw image 1", fontsize=18)
ax2 = fig.add_subplot(122, sharex=ax1, sharey=ax1)
ax2.axis('off')
ax2.imshow(image2, cmap=plt.cm.gray)
ax2.set_title("Raw image 2", fontsize=18)
plt.show()
@staticmethod
def _find_direction(vx, vy):
alpha = np.degrees(np.arctan(vy / vx))
vx_sign = np.sign(vx)
vy_sign = np.sign(vy)
direct = np.zeros(vx.shape)
direct[np.where(vy_sign < 0)] += 180
direct[np.where(vy_sign * vx_sign < 0)] += 180
return direct + alpha
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,647 | tjyiiuan/Graduate-Courses | refs/heads/master | /EECE5639-Computer-Vision/Project-1/libs/FilterApp.py | # -*- coding: utf-8 -*-
import numpy as np
class FilterApp(object):
def __init__(self):
pass
@staticmethod
def apply_1d_filter(bfilter, timage):
"""Apply given 1D filter onto an image.
Parameters
----------
bfilter: 1D array-like
The filter
timage: array-like
Targeted image
"""
image_length = len(timage)
ovrlay = int(bfilter.shape[0] / 2)
tmp_array = np.zeros(image_length + 2 * ovrlay)
tmp_array[ovrlay:-ovrlay] = timage
res_array = np.zeros(image_length)
for i in np.arange(image_length) + ovrlay:
local_matrix = tmp_array[i - ovrlay:i + ovrlay + 1]
res_array[i - ovrlay] = sum(local_matrix * bfilter)
return res_array
@staticmethod
def apply_1d_differential(operator, simage):
"""Apply given 1D filter onto a series of 2D image.
Parameters
----------
operator: 1D array-like
The filter
simage: array-like
Targeted image sequence
"""
image_length = simage.shape[0]
operator_length = operator.shape[1]
ovrlay = int(operator_length / 2)
res_array = np.zeros((image_length - 2 * ovrlay,
simage.shape[1],
simage.shape[2]))
for i in np.arange(image_length - 2 * ovrlay):
local_array = simage[i:i + 2 * ovrlay + 1, :, :]
temp_zip = list(zip(local_array, operator[0, :]))
res_array[i] = sum([j[0] * j[1] for j in temp_zip])
res_array = np.array(res_array)
return res_array
@staticmethod
def apply_2d_filter(bfilter, timage):
"""Apply given 2D filter onto an image.
Parameters
----------
bfilter: array-like
The filter
timage: array-like
Targeted image
"""
image_shape = timage.shape
ovrlay = int(bfilter.shape[0] / 2)
tmp_matrix = np.zeros(np.array(image_shape) + 2 * ovrlay)
tmp_matrix[ovrlay:-ovrlay, ovrlay:-ovrlay] = timage
res_matrix = np.zeros(timage.shape)
for i in np.arange(image_shape[0]) + ovrlay:
for j in np.arange(image_shape[1]) + ovrlay:
local_matrix = tmp_matrix[i - ovrlay:i + ovrlay + 1,
j - ovrlay:j + ovrlay + 1]
res_matrix[i - ovrlay, j - ovrlay] = sum(sum(local_matrix * bfilter))
return res_matrix
@staticmethod
def apply_1d_median_filter(n, timage):
"""Applying a n median flter on the input image assuming that the
border pixels are not changed.
Parameters
----------
n: int
Shape of median filter
timage: array-like
Targeted image
"""
image_shape = timage.shape
ovrlay = int(n / 2)
res_matrix = np.copy(timage)
for i in np.arange(image_shape[0])[1:-1]:
local_matrix = timage[i - ovrlay:i + ovrlay + 1]
median = np.median(local_matrix)
res_matrix[i] = median
return res_matrix
@staticmethod
def apply_2d_median_filter(n, timage):
"""Applying a nxn median filter on the input image assuming that the
border pixels are not changed."""
image_shape = timage.shape
ovrlay = int(n / 2)
res_matrix = np.copy(timage)
for i in np.arange(image_shape[0])[1:-1]:
for j in np.arange(image_shape[1])[1:-1]:
local_matrix = timage[i - ovrlay:i + ovrlay + 1,
j - ovrlay:j + ovrlay + 1]
median = np.median(local_matrix)
res_matrix[i, j] = median
return res_matrix
| {"/EECE5639-Computer-Vision/Project-2/libs/Harris.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-1/libs/Detector.py": ["/EECE5639-Computer-Vision/Project-1/libs/FilterApp.py", "/EECE5639-Computer-Vision/Project-1/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py"], "/EECE5639-Computer-Vision/Project-2/libs/Mosaic.py": ["/EECE5639-Computer-Vision/Project-2/libs/misc.py", "/EECE5639-Computer-Vision/Project-2/libs/Harris.py", "/EECE5639-Computer-Vision/Project-2/libs/Correspondences.py"], "/EECE5639-Computer-Vision/Project-3/libs/Flow.py": ["/EECE5639-Computer-Vision/Project-3/libs/misc.py"]} |
60,650 | ttimon7/Kivy_OpenGL | refs/heads/master | /glmodel.py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import kivy
from kivy.graphics import *
from kivy.graphics import Color, Ellipse
from kivy.graphics.transformation import Matrix
from kivy.graphics.opengl import *
from kivy.resources import resource_find
kivy.require('1.9.1') # replace with your current kivy version !
import sys
import numpy as np
from colored_output import BColors as BC
printe = BC.printe
printw = BC.printw
printi = BC.printi
printok = BC.printok
class ModelLoader( object ):
""" Class Description
Parses an OBJ file.
"""
def __init__( self, srcs, **kwargs ):
self._currentModel = None
self.models = {}
self.vertices = []
self.normals = []
self.texcoords = []
self.faces = []
self.counter = 0
try:
for src in srcs:
self.loadModel( src )
printi( "File processed:", src[0], "\n" )
except:
printe( "Model not found:", src[0] )
print("Unexpected error:", sys.exc_info() )
sys.exit(1)
def loadModel( self, src ):
""" Method Description
Parses the OBJ file containing model data. Uses the parseOBJFaceData method to extract face information.
WARNING: Comments correspond to triangulates mesh information.
"""
objSrc = resource_find( src[0] )
shaderSrc = resource_find( src[1] )
with open( objSrc, 'r' ) as f:
for l in f:
if l.startswith('#'):
continue
if l.startswith('s'):
continue
l = l.rstrip()
d = l.split( " " )
if not d:
continue
elif "o" == d[0]:
self.finishObject( objSrc, shaderSrc )
printi( "Constructing", d[1], "(glmodel.py)" )
self._currentModel = d[1]
elif "v" == d[0]: # v vx vy vz
v = list( map( float, d[1:4] ) )
self.vertices.append( v )
elif "vt" == d[0]: # vt u v
vt = list( map( float, d[1:3] ) )
self.texcoords.append( vt )
elif "vn" == d[0]: # vn nx ny nz
vn = list( map( float, d[1:4] ) )
self.normals.append( vn )
elif "f" == d[0]: # f v/vt/vn
faceIndices = [] # [v1, v2, v3]
textureIndices = [] # [vt1, vt2]
normalsIndices = [] # [vn1, vn2, vn3]
for v in d[1:]:
w = v.split( '/' )
faceIndices.append( int( w[0] ) )
if len( w ) >= 2 and len( w[1] ) > 0:
textureIndices.append( int( w[1] ) )
else:
textureIndices.append( -1 )
if len( w ) >= 3 and len( w[2] ) > 0:
normalsIndices.append( int( w[2] ) )
else:
normalsIndices.append( -1 )
# Appends the touple: ( [v1, v2, v3], [vt1, vt2], [vn1, vn2, vn3] )
self.faces.append( ( faceIndices, textureIndices, normalsIndices ) )
self.finishObject( objSrc, shaderSrc ) # Construct last Model
def finishObject( self, objSrc, shaderSrc ):
# If this is the first orject read, wait untill its data has been parsed
if self._currentModel is None:
return
self.counter += 1
model = Model( name = self._currentModel, src = objSrc, shader = shaderSrc )
idx = 0
for f in self.faces:
""" Method Description
WARNING: Comments correspond to triangulates mesh information.
self.faces is n array of face touples:
self.faces = [
( [v1, v2, v3], [vt1, vt2], [vn1, vn2, vn3] ),
( [v1, v2, v3], [vt1, vt2], [vn1, vn2, vn3] ),
...
]
therefore, f is a single touple:
( [v1, v2, v3], [vt1, vt2], [vn1, vn2, vn3] )
Indeces in an OBJ file start from 1 not 0, therefore a correction (-1) is needed when accessing the vertex information from the corresponding arrays.
"""
verts = f[0] # [v1, v2, v3]
tcs = f[1] # [vt1, vt2]
norms = f[2] # [vn1, vn2, vn3]
for i in range(3):
# Get vertex components
v = self.vertices[verts[i] - 1] # v = [x, y, z]
# Get texture coordinate components
t = ( 0.0, 0.0 )
if tcs[i] != -1:
t = self.texcoords[tcs[i] - 1] # t = [u, v]
# Get normal components
n = ( 0.0, 0.0, 0.0 )
if norms[i] != -1:
n = self.normals[norms[i] - 1] # n = [x, y, z]
data = [v[0], v[1], v[2], t[0], t[1], n[0], n[1], n[2]]
model.vertices.extend( data )
tri = [idx, idx + 1, idx + 2]
model.indices.extend( tri )
idx += 3
self.models[self._currentModel] = model
# print( " ---- " +
# self._currentModel +
# " (" + str( self.counter ) +
# ") ----",
# "\n verticis:", model.vertices,
# "\n indices:", model.indices, "\n" )
self._currentModel = None
self.vertices = []
self.normals = []
self.texcoords = []
self.faces = []
def getModels( self ):
return list( self.models.values() )
class Model( object ):
def __init__( self, **kwargs ):
self.name = resource_find( kwargs.get( "name", None ) )
self.src = resource_find( kwargs.get( "src", None ) )
self.shader = resource_find( kwargs.get( "shader", "shaders/silple.glsl" ) )
self.vFormat = kwargs.get( "vFormat",
[
( b'v_pos', 3, 'float' ),
( b'v_tc0', 2, 'float' ),
( b'v_normal', 3, 'float' )
]
)
# Positions are in World Coordinates
self.vertices = []
self.indices = []
self.rotAngle = 0
self.rotAxis = ( 0, 1, 0 )
self.position = { "x": 0, "y": 0, "z": 0 }
self.rMatrix = Matrix()
self.tMatrix = Matrix()
self.mMatrix = Matrix()
self.nMatrix = Matrix()
self.applyTransform()
def calculateNormals(self):
for i in range(len(self.indices) / (3)):
fi = i * 3
v1i = self.indices[fi]
v2i = self.indices[fi + 1]
v3i = self.indices[fi + 2]
vs = self.vertices
p1 = [vs[v1i + c] for c in range(3)]
p2 = [vs[v2i + c] for c in range(3)]
p3 = [vs[v3i + c] for c in range(3)]
u, v = [0, 0, 0], [0, 0, 0]
for j in range(3):
v[j] = p2[j] - p1[j]
u[j] = p3[j] - p1[j]
n = [0, 0, 0]
n[0] = u[1] * v[2] - u[2] * v[1]
n[1] = u[2] * v[0] - u[0] * v[2]
n[2] = u[0] * v[1] - u[1] * v[0]
for k in range(3):
self.vertices[v1i + 3 + k] = n[k]
self.vertices[v2i + 3 + k] = n[k]
self.vertices[v3i + 3 + k] = n[k]
def addRotation( self, a ):
""" Method Description
Increase rotation angle (rotAngle) by a
"""
self.rotAngle += np.radians( a )
self.rMatrix.rotate( np.radians( a ), *self.rotAxis )
def setRotation( self, a, x, y, z ):
""" Method Description
Rotation object by angle a around the vector (x, y, z). Sets rotAngle to a, and rotAxis to (x, y, z)
"""
if self.rotAngle != np.radians( a ) or self.rotAxis[0] != x or self.rotAxis[1] != y or self.rotAxis[2] != z:
self.rotAngle = np.radians( a )
self.rotAxis = ( x, y, z )
self.rMatrix.identity().rotate( self.rotAngle, x, y, z )
def addTranslation( self, x, y, z ):
""" Method Description
Add the vecor (x, y, z) to the current position vector.
"""
self.position["x"] = x
self.position["y"] = y
self.position["z"] = z
self.tMatrix.translate( x, y, z )
def setTranslation( self, x, y, z ):
""" Method Description
Move to position (x, y, z). Equivalently, replace the position vector by (x, y, z)
"""
if x != self.position["x"] or y != self.position["y"] or z != self.position["z"]:
self.position["x"] = x
self.position["y"] = y
self.position["z"] = z
self.tMatrix.identity().translate( x, y, z )
def applyTransform( self ):
offset = 0
length = 0
entryLength = 0
flag = True
for e in self.vFormat:
entryLength += e[1]
if b"pos" in e[0]:
length = e[1]
flag = False
elif flag:
offset += entryLength
def setup( self ):
""" Method Description
Loads the model and transformation data to the Kivy OpenGL implementation.
The proper order to apply matrices is:
mMatrix = tMatrix.multiply( rMatrix )
mvMatrix = vMatrix.multiply( mMatrix )
where:
- rMatrix: is the rotation matrix (to be applied first)
- tMatrix: is the translation matrix (to be applied second)
- mMatrix: is the model matrix
- vMatrix: is the view matrix (to be applied third)
When drawing a mesh 4 parameters have to be provided:
- vertice, which should be added to a single array:
vertices = [x1, y1, z1, u1, v1, x2, y2, z2, u2, v2, ...]
+------ i1 ------+ +------ i2 ------+
Where ( x, y, z ) are position coordinates, while ( u, v ) are textel coordinates. Colors in the form of ( r, g, b ) can be provided instead of texel coordinates.
- mode, as one of the following:
‘points’, ‘line_strip’, ‘line_loop’, ‘lines’, ‘triangles’, ‘triangle_strip’ or ‘triangle_fan’
Unlike OpenGL, OpenGL ES - and the implementation supported by Kivy - does not allow rendering with quads.
- indeces, should be added to a single array:
indeces = [i1, i2, i3, ...]
In OpenGL ES 2.0 and in Kivy's graphics implementation, we cannot have more than 65535 indices.
- fmt, should be an array of format touples:
(variable_name as byte, size, type)
e.g. if the shader source contains the attributes vec3 v_pos and vec4 v_color, then they can be accessed as:
vertex_format = [
(b'v_pos', 3, 'float'),
(b'v_color', 4, 'float'),
]
To load a matrix to the OpenGL shader, one must add the matrix to the RenderContext as follows:
renderContext['matrix_name'] = myMatrix
"""
Color(0, 0, 0, 1)
UpdateNormalMatrix()
# XXX With the following 'with' statement, the mesh is bound to the renderContext. This binding is crucial, as the Mesh object has the facilities to look for any changes in the renderContext - through the global variable the 'with' statement sets -, and to react to it by redrawing the canvas. (see 'update' method)
with self.renderContext:
self.mesh = Mesh(
vertices = self.vertices,
indices = self.indices,
fmt = self.vFormat,
mode = 'triangles',
)
def getMvMatrix( self, vMatrix ):
return vMatrix.multiply( self.mMatrix )
def getNormMatrix( self, vMatrix ):
return vMatrix.multiply( self.mMatrix ).normal_matrix()
def setup( self ):
""" Method Description
Loads the model and transformation data to the Kivy OpenGL implementation.
The proper order to apply matrices is:
mMatrix = tMatrix.multiply( rMatrix )
mvMatrix = vMatrix.multiply( mMatrix )
where:
- rMatrix: is the rotation matrix (to be applied first)
- tMatrix: is the translation matrix (to be applied second)
- mMatrix: is the model matrix
- vMatrix: is the view matrix (to be applied third)
When drawing a mesh 4 parameters have to be provided:
- vertice, which should be added to a single array:
vertices = [x1, y1, z1, u1, v1, x2, y2, z2, u2, v2, ...]
+------ i1 ------+ +------ i2 ------+
Where ( x, y, z ) are position coordinates, while ( u, v ) are textel coordinates. Colors in the form of ( r, g, b ) can be provided instead of texel coordinates.
- mode, as one of the following:
‘points’, ‘line_strip’, ‘line_loop’, ‘lines’, ‘triangles’, ‘triangle_strip’ or ‘triangle_fan’
Unlike OpenGL, OpenGL ES - and the implementation supported by Kivy - does not allow rendering with quads.
- Indices, should be added to a single array:
Indices = [i1, i2, i3, ...]
In OpenGL ES 2.0 and in Kivy's graphics implementation, we cannot have more than 65535 indices.
- fmt, should be an array of format touples:
(variable_name as byte, size, type)
e.g. if the shader source contains the attributes vec3 v_pos and vec4 v_color, then they can be accessed as:
vertex_format = [
(b'v_pos', 3, 'float'),
(b'v_color', 4, 'float'),
]
To load a matrix to the OpenGL shader, one must add the matrix to the RenderContext as follows:
renderContext['matrix_name'] = myMatrix
"""
pass
def update( self, **kwargs ):
""" Method Description
Calculate the model matrix (mMatrix)
In kivy, m1.multiply( m2 ) means: m2.m1 where . is the matrix product operator.
"""
self.mMatrix = self.tMatrix.multiply( self.rMatrix )
| {"/glmodel.py": ["/colored_output.py"], "/main.py": ["/colored_output.py", "/glmodel.py"]} |
60,651 | ttimon7/Kivy_OpenGL | refs/heads/master | /colored_output.py | #!/usr/bin/python3
class BColors:
""" Class Decription
Container for ANSI Escape Characters
"""
# '\033[1;30mGray like Ghost\033[1;m'
# '\033[1;31mRed like Radish\033[1;m'
# '\033[1;32mGreen like Grass\033[1;m'
# '\033[1;33mYellow like Yolk\033[1;m'
# '\033[1;34mBlue like Blood\033[1;m'
# '\033[1;35mMagenta like Mimosa\033[1;m'
# '\033[1;36mCyan like Caribbean\033[1;m'
# '\033[1;37mWhite like Whipped Cream\033[1;m'
# '\033[1;38mCrimson like Chianti\033[1;m'
# '\033[1;41mHighlighted Red like Radish\033[1;m'
# '\033[1;42mHighlighted Green like Grass\033[1;m'
# '\033[1;43mHighlighted Brown like Bear\033[1;m'
# '\033[1;44mHighlighted Blue like Blood\033[1;m'
# '\033[1;45mHighlighted Magenta like Mimosa\033[1;m'
# '\033[1;46mHighlighted Cyan like Caribbean\033[1;m'
# '\033[1;47mHighlighted Gray like Ghost\033[1;m'
# '\033[1;48mHighlighted Crimson like Chianti\033[1;m'
HEADER = '\033[95m'
WARNING = '\033[93m'
ERROR = '\033[91m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
FAIL = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
ENDC = '\033[0m'
@staticmethod
def concatArgs( *args ):
text = ""
for a in args:
text += " " + str( a )
return text
@staticmethod
def colorText( *args, **kwargs ):
color = kwargs.get( "color", "" )
bold = BColors.BOLD if kwargs.get( "bold", False ) else ""
return bold + color + BColors.concatArgs( *args ) + BColors.ENDC
@staticmethod
def printc( *args, **kwargs ):
color = kwargs.get( "color", "" )
bold = BColors.BOLD if kwargs.get( "bold", False ) else ""
print( bold + color + BColors.concatArgs( *args ) + BColors.ENDC )
@staticmethod
def printok( *args ):
OK_HEADER = "[" + BColors.BOLD + BColors.OKGREEN + "OK" + BColors.ENDC + "] "
print( OK_HEADER + BColors.concatArgs( *args ) )
@staticmethod
def printi( *args ):
INFO_HEADER = "[" + BColors.BOLD + BColors.OKBLUE + "INFO" + BColors.ENDC + "] "
print( INFO_HEADER + BColors.concatArgs( *args ) )
@staticmethod
def printw( *args ):
WARNING_HEADER = "[" + BColors.BOLD + BColors.WARNING + "WARNING" + BColors.ENDC + "] "
print( WARNING_HEADER + BColors.concatArgs( *args ) )
@staticmethod
def printe( *args ):
ERROR_HEADER = "[" + BColors.BOLD + BColors.ERROR + "ERROR" + BColors.ENDC + "] "
print( ERROR_HEADER + BColors.concatArgs( *args ) )
if __name__ == "__main__":
BColors.printok( "test" )
BColors.printi( "test" )
BColors.printw( "test" )
BColors.printe( "test" )
| {"/glmodel.py": ["/colored_output.py"], "/main.py": ["/colored_output.py", "/glmodel.py"]} |
60,652 | ttimon7/Kivy_OpenGL | refs/heads/master | /main.py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import kivy
from kivy.app import App
from kivy.config import Config
from kivy.graphics import *
from kivy.graphics import Color, Ellipse
from kivy.graphics.transformation import Matrix
from kivy.graphics.opengl import *
from kivy.resources import resource_find
from kivy.lang import Builder
from kivy.properties import ObjectProperty, ListProperty
from kivy.core.window import Window
from kivy.clock import Clock
from kivy.uix.widget import Widget
from kivy.uix.label import Label
from kivy.uix.image import Image
from kivy.uix.button import Button
from kivy.uix.textinput import TextInput
from kivy.uix.actionbar import ActionButton, ActionGroup
from kivy.uix.popup import Popup
from kivy.uix.scrollview import ScrollView
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.gridlayout import GridLayout
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.relativelayout import RelativeLayout
kivy.require('1.9.1') # replace with your current kivy version !
import sys, os, random, json
import pygame
import numpy as np
from numpy import array
from colored_output import BColors as BC
printe = BC.printe
printw = BC.printw
printi = BC.printi
printok = BC.printok
from glmodel import *
class OpenGLWidgetRC( Widget ):
""" Class Description
Attempts to render multiple 3D models using RenderContexts, where each 3D model has its own shader stored in the associated RenderContext.
---- FIXME -----------------------------------------------------------------
The shaders stored in the models RenderContexts are applied
correctly - at least I think, since the transformations, evoked by
the application of the rotation and translation matrices take place
correctly, and effect only the models they should -, but then,
----------------------------------------------------------------------------
"""
opengl_widget = ObjectProperty( None )
def __init__( self, **kwargs ): # Normal constructor method
# Creating a storage for the RenderContexts associated with the 3D models
self.instructions = InstructionGroup()
# Preparing canvas for storing instructions and the shader
self.canvas = RenderContext()
self.canvas.shader.source = "shaders/simple.glsl" # FIXME something goes wrong here, maybe
self.rcs = []
self.rots = []
self.trans = []
self.meshes = []
self.counter = 0
self.pMatrix = Matrix()
self.vMatrix = Matrix()
Window.bind( on_keyboard = self.keyboard_handler )
super( OpenGLWidgetRC, self ).__init__( **kwargs )
def init( self, **kwargs ):
# Loading models
self.models = kwargs.get( "models", [] )
# Creating instruction spaces for the models
for i in range( len( self.models ) ):
m = self.models[i]
self.rcs.append( RenderContext( compute_normal_mat = True ) )
self.rcs[i].shader.source = m.shader
self.instructions.add( self.rcs[i] )
# Initializing projection and view matrices
aspect = self.width / float( self.height )
self.pMatrix.perspective( 45.0, aspect, 1.0, 80.0 )
self.vMatrix = Matrix().look_at(
0.0, 0.0, 15.0, # Eye coordinates x, y, z
0.0, 0.0, 0.0, # Reference point x, y, z (Eye and the up vector are given relative to this)
0.0, 1.0, 0.0) # Up vector x, y, z
self.canvas.add( self.instructions )
with self.canvas:
self.cb = Callback( self.setup_gl_context )
PushMatrix()
self.setup_scene_with_rcs()
PopMatrix()
self.cb = Callback( self.reset_gl_context )
Clock.schedule_once( self.update_glsl, 1 / 40. )
def setup_gl_context(self, *args):
glEnable(GL_DEPTH_TEST)
def reset_gl_context(self, *args):
glDisable(GL_DEPTH_TEST)
def update_glsl(self, *args):
self.counter += 1
aspect = self.width / float(self.height)
self.pMatrix = Matrix().view_clip( -aspect, aspect, -1, 1, 1, 100, 1 )
for i in range( len( self.models ) ):
PushMatrix()
m = self.models[i]
m.addRotation( 1 )
if i == 0:
m.setTranslation( -1, 3, 0 )
elif i == 1:
m.setTranslation( -1, -3, 0 )
m.update()
self.rcs[i]['projection_mat'] = self.pMatrix
self.rcs[i]['modelview_mat'] = m.getMvMatrix( self.vMatrix )
PopMatrix()
Clock.schedule_once( self.update_glsl, 1 / 40. )
def setup_scene_with_rcs( self ):
Color( 0, 0, 0, 1 )
for i in range( len( self.models ) ):
m = self.models[i]
with self.rcs[i]:
Color( 0, 0, 0, 1 )
PushMatrix()
"""
If the fragment shader has a "uniform mat4 normal_mat" property, the UpdateNormalMatrix() will update it.
"""
UpdateNormalMatrix()
self.meshes.append(
Mesh(
vertices = m.vertices,
indices = m.indices,
fmt = m.vFormat,
mode = 'triangles',
)
)
PopMatrix()
def keyboard_handler( self, key, asciiCode, code, text, *args, **kwargs ):
""" Method Description
Changes viewer position by updating the view matrix
"""
offsetX = 0.
offsetZ = 0.
if asciiCode == 119 or asciiCode == 273: # w or up
offsetZ += 0.5
if asciiCode == 97 or asciiCode == 276: # a or left
offsetX -= 0.5
if asciiCode == 115 or asciiCode == 274: # s or down
offsetZ -= 0.5
if asciiCode == 100 or asciiCode == 275: # d or right
offsetX += 0.5
self.vMatrix.translate( offsetX, 0., offsetZ )
class OpenGLWidget( Widget ):
"""
Renders multiple 3D models while relying on the single RenderContext of its canvas.
---- FIXME -----------------------------------------------------------------
This renders the models **almost** as it should. The shader loaded
to self.canvas is applyied correctly, but changing the shader on
the fly does not seem to work, therfore the correction to the model
matrix effects all the models displayed unifirmly.
----------------------------------------------------------------------------
"""
opengl_widget = ObjectProperty( None )
def __init__( self, **kwargs ): # Normal constructor method
self.canvas = RenderContext( compute_normal_mat = True )
printi( "Using canvas shader:", str( resource_find( 'shaders/simple.glsl' ) ) )
self.canvas.shader.source = resource_find( 'shaders/simple.glsl' )
self.pMatrix = Matrix()
self.vMatrix = Matrix()
# Registering keyboard event handler
Window.bind( on_keyboard = self.keyboard_handler )
super( OpenGLWidget, self ).__init__( **kwargs )
def init( self, **kwargs ):
# Loading models
self.models = kwargs.get( "models", [] )
# Initializing projection and view matrices
aspect = self.width / float( self.height )
self.pMatrix.perspective( 45.0, aspect, 1.0, 80.0 )
self.vMatrix = Matrix().look_at(
0.0, 0.0, 20.0, # Eye coordinates x, y, z
0.0, 0.0, 0.0, # Reference point x, y, z (Eye and the up vector are given relative to this)
0.0, 1.0, 0.0) # Up vector x, y, z
self.canvas['projection_mat'] = self.pMatrix
self.canvas['modelview_mat'] = self.vMatrix
with self.canvas:
self.cb = Callback( self.setup_gl_context )
PushMatrix()
self.setup_scene()
PopMatrix()
self.cb = Callback( self.reset_gl_context )
Clock.schedule_once( self.update_glsl, 1 / 40. )
def setup_gl_context(self, *args):
glEnable(GL_DEPTH_TEST)
def reset_gl_context(self, *args):
glDisable(GL_DEPTH_TEST)
def update_glsl(self, *args):
aspect = self.width / float(self.height)
self.pMatrix = Matrix().view_clip( -aspect, aspect, -1, 1, 1, 100, 1 )
for i in range( len( self.models ) ):
PushMatrix()
m = self.models[i]
m.addRotation( 1 )
if i == 0:
m.setTranslation( 4.5, 3, 5 ) # FIXME gets overwritten
elif i == 1:
m.setTranslation( 4.5, 0, 5 ) # FIXME since this comes later, this overrides
m.update()
self.canvas['projection_mat'] = self.pMatrix
self.canvas['modelview_mat'] = m.getMvMatrix( self.vMatrix )
PopMatrix()
Clock.schedule_once( self.update_glsl, 1 / 40. )
def setup_scene(self):
Color( 0, 0, 0, 1 )
for m in self.models:
"""
If the fragment shader has a "uniform mat4 normal_mat" property, the UpdateNormalMatrix() will update it.
"""
PushMatrix()
UpdateNormalMatrix()
Mesh(
vertices=m.vertices,
indices=m.indices,
fmt=m.vFormat,
mode='triangles',
)
PopMatrix()
def keyboard_handler( self, key, asciiCode, code, text, *args, **kwargs ):
""" Method Description
Changes viewer position by updating the view matrix
"""
offsetX = 0.
offsetZ = 0.
if asciiCode == 119 or asciiCode == 273: # w or up
offsetZ += 0.5
if asciiCode == 97 or asciiCode == 276: # a or left
offsetX -= 0.5
if asciiCode == 115 or asciiCode == 274: # s or down
offsetZ -= 0.5
if asciiCode == 100 or asciiCode == 275: # d or right
offsetX += 0.5
self.vMatrix.translate( offsetX, 0., offsetZ )
class RootWidget( BoxLayout ):
path = os.path.abspath( os.path.dirname( "." ) )
menu = ObjectProperty( None )
opengl_widget_rc = ObjectProperty( None )
opengl_widget = ObjectProperty( None )
def __init__( self, **kwargs ): # Normal constructor method
super( RootWidget, self ).__init__( **kwargs )
def init( self ): # Trick to perform initialization when I need it, not upon instantiation
ml = ModelLoader( [( "models/suzanne.obj", "shaders/simple.glsl" ), ( "models/torus.obj", "shaders/simple_blue.glsl" )] )
models = ml.getModels()
printi( "Number of models retrieved: ", str( len( models ) ) )
# XXX Widget relying on multiple RenderContexts added to an InstructionGroup passed to self.canvas.
self.opengl_widget_rc.init( models = models )
# XXX Widget relying on a single RenderContext (self.canvas = RenderContext())
self.opengl_widget.init( models = models )
class MyApp( App ):
path = os.path.abspath( os.path.dirname( "." ) )
def build( self ):
self.title = "MyApp"
self.icon = str( os.path.join( self.path, "icons", "icon.png" ) )
# Setting the window's size
self.width = self.config.getint( 'graphics', 'width' )
self.height = self.config.getint( 'graphics', 'height' )
Window.size = ( self.width, self.height )
self.root = Builder.load_file( os.path.join( self.path, "kvs", "RootWidget.kv" ) )
self.root.init()
return self.root
def build_config( self, config ): # If there is no config file, create one, with the following parameters
config.setdefaults( 'graphics', {
'width': '800',
'height': '200'
})
if __name__ == '__main__':
MyApp().run()
| {"/glmodel.py": ["/colored_output.py"], "/main.py": ["/colored_output.py", "/glmodel.py"]} |
60,655 | Ipsedo/PolyhedronGAN | refs/heads/main | /utils.py | import sys
import os
def block_print():
sys.stdout = open(os.devnull, 'w')
# Restore
def enable_print():
sys.stdout = sys.__stdout__
| {"/train.py": ["/networks.py"], "/read_model.py": ["/utils.py"]} |
60,656 | Ipsedo/PolyhedronGAN | refs/heads/main | /train.py | import networks
import torch as th
from tqdm import tqdm
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser("Main - train")
parser.add_argument("--tensor-file", type=str, required=True)
args = parser.parse_args()
data = th.load(args.tensor_file)
rand_channel = 32
mean_vec = th.randn(rand_channel)
rand_mat = th.randn(rand_channel, rand_channel)
cov_mat = rand_mat.t().matmul(rand_mat)
multi_norm = th.distributions.MultivariateNormal(mean_vec, cov_mat)
gen = networks.Generator()
disc = networks.Disciminator()
gen.cuda()
disc.cuda()
disc_lr = 1e-5
gen_lr = 2e-5
disc_optimizer = th.optim.SGD(disc.parameters(), lr=disc_lr, momentum=0.5)
gen_optimizer = th.optim.SGD(gen.parameters(), lr=gen_lr, momentum=0.9)
def __gen_rand(
curr_batch_size: int
) -> th.Tensor:
return multi_norm.sample(
(curr_batch_size, 8, 8, 8)
).permute(0, 4, 1, 2, 3)
nb_epoch = 30
batch_size = 4
nb_batch = data.size(0) // batch_size
for e in range(nb_epoch):
tqdm_bar = tqdm(range(nb_batch))
for b_idx in tqdm_bar:
i_min = b_idx * batch_size
i_max = (b_idx + 1) * batch_size
x_real = data[i_min:i_max, :, :, :, :].cuda()
rand_fake = __gen_rand(i_max - i_min).cuda()
x_fake = gen(rand_fake)
out_real = disc(x_real)
out_fake = disc(x_fake)
error_tp = (1. - out_real).mean().item()
error_tn = out_fake.mean().item()
disc_loss = networks.discriminator_loss(out_real, out_fake)
gen_optimizer.zero_grad()
disc_optimizer.zero_grad()
disc_loss.backward()
disc_optimizer.step()
disc_loss = disc_loss.item()
# Train generator
rand_fake = __gen_rand(i_max - i_min).cuda()
x_fake = gen(rand_fake)
out_fake = disc(x_fake)
gen_loss = networks.generator_loss(out_fake)
disc_optimizer.zero_grad()
gen_optimizer.zero_grad()
gen_loss.backward()
gen_optimizer.step()
gen_grad_norm = th.tensor(
[p.grad.norm() for p in gen.parameters()]
).mean()
disc_grad_norm = th.tensor(
[p.grad.norm() for p in disc.parameters()]
).mean()
tqdm_bar.set_description(
f"Epoch {e} : "
f"disc_loss = {disc_loss:.6f}, "
f"gen_loss = {gen_loss:.6f}, "
f"e_tp = {error_tp:.4f}, "
f"e_tn = {error_tn:.4f}, "
f"gen_gr = {gen_grad_norm.item():.4f}, "
f"disc_gr = {disc_grad_norm.item():.4f}"
)
with th.no_grad():
gen.eval()
rand_gen_sound = __gen_rand(10).cuda()
gen_model = gen(rand_gen_sound).cpu().detach()
th.save(gen_model, f"./out/gen_model_{e}.pt")
| {"/train.py": ["/networks.py"], "/read_model.py": ["/utils.py"]} |
60,657 | Ipsedo/PolyhedronGAN | refs/heads/main | /read_model.py | import voxlib.voxelize as voxel
import stl
import numpy as np
from voxelfuse.voxel_model import VoxelModel, Axes
import tqdm
import utils
import torch as th
import torch.nn.functional as fun
import matplotlib.pyplot as plt
import os
import argparse
def gen_rand_rotation(stl_path: str, out_file: str) -> None:
mesh = stl.mesh.Mesh.from_file(stl_path)
rand_vec = np.random.rand(3) * 2 - 1
rand_vec /= np.linalg.norm(rand_vec)
mesh.rotate(rand_vec, np.random.rand() * np.pi * 2)
mesh.save(out_file)
def voxelise_model(
model_path: str, size: int, random_scale: bool
) -> th.Tensor:
if random_scale:
scaled_size = int(np.random.uniform(0.9, 1.0) * size)
scaled_size -= scaled_size % 2
else:
scaled_size = size
points_generator = voxel.voxelize(
model_path, resolution=scaled_size
)
# one channel
model_mat = th.zeros(1, size, size, size)
min_x, min_y, min_z = scaled_size, scaled_size, scaled_size
max_x, max_y, max_z = -scaled_size, -scaled_size, -scaled_size
points = []
for p in points_generator:
min_x = min(p[0], min_x)
min_y = min(p[1], min_y)
min_z = min(p[2], min_z)
max_x = max(p[0], max_x)
max_y = max(p[1], max_y)
max_z = max(p[2], max_z)
points.append(p)
for p in points:
p_new = p[0] - min_x, \
p[1] - min_y, \
p[2] - min_z
p_new_2 = p_new[0] + size // 2, \
p_new[1] + size // 2, \
p_new[2] + size // 2
try:
model_mat[0, p_new[0], p_new[1], p_new[2]] = 1
except Exception as e:
print(min_x, min_y, min_z)
print(max_x, max_y, max_z)
print(size)
print(scaled_size)
print(p)
print(p_new)
print(p_new_2)
raise e
return model_mat
def voxelise_model_2(model_path: str, size: int,
random_scale: bool) -> th.Tensor:
if random_scale:
scaled_size = int(np.random.uniform(0.5, 1.0) * size)
scaled_size -= scaled_size % 2
else:
scaled_size = size
utils.block_print()
mesh = VoxelModel.fromMeshFile(
model_path, resolution=scaled_size
)
rand_vec = np.random.rand(3) * 2 - 1
rand_vec /= np.linalg.norm(rand_vec)
mesh = mesh.rotate(np.random.rand() * 360., Axes.X)
mesh = mesh.rotate(np.random.rand() * 360., Axes.Y)
mesh = mesh.rotate(np.random.rand() * 360., Axes.Z)
mesh = mesh.scaleToSize((scaled_size, scaled_size, scaled_size))
utils.enable_print()
to_pad = [
size - mesh.voxels.shape[0],
size - mesh.voxels.shape[1],
size - mesh.voxels.shape[2]
]
to_pad = [
to_pad[2] // 2,
to_pad[2] // 2 + to_pad[2] % 2,
to_pad[1] // 2,
to_pad[1] // 2 + to_pad[1] % 2,
to_pad[0] // 2,
to_pad[0] // 2 + to_pad[0] % 2,
]
return fun.pad(
th.tensor(mesh.voxels.astype(np.int16)).unsqueeze(0),
to_pad
)
if __name__ == '__main__':
parser = argparse.ArgumentParser("Main - voxelise")
parser.add_argument("model", type=str, help="STL/OBJ model")
parser.add_argument("-s", type=int, required=True, dest="size")
sub_parser = parser.add_subparsers()
sub_parser.required = True
sub_parser.dest = "mode"
read_parser = sub_parser.add_parser("read")
gen_parser = sub_parser.add_parser("generate")
view_parser = sub_parser.add_parser("view")
gen_parser.add_argument("nb_example", type=int)
gen_parser.add_argument("--tensor-out-path", type=str, required=True)
view_parser.add_argument("tensor_file", type=str)
args = parser.parse_args()
if args.mode == "read":
print("read")
mat_cub = voxelise_model_2(args.model, args.size, True)
print(mat_cub.size())
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.voxels(mat_cub.squeeze(0))
plt.show()
elif args.mode == "generate":
print("generate")
res_tensor = th.empty(args.nb_example, 1, args.size, args.size,
args.size)
for i in tqdm.tqdm(range(args.nb_example)):
res_tensor[i, :, :, :, :] = voxelise_model_2(
args.model, args.size, True
)
th.save(res_tensor, args.tensor_out_path)
elif args.mode == "view":
print("view")
models_tensor = th.load(args.tensor_file)
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.voxels(models_tensor[2].squeeze(0))
plt.show()
| {"/train.py": ["/networks.py"], "/read_model.py": ["/utils.py"]} |
60,658 | Ipsedo/PolyhedronGAN | refs/heads/main | /networks.py | import torch as th
import torch.nn as nn
class ReLU1(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x: th.Tensor) -> th.Tensor:
return th.minimum(
th.maximum(th.tensor(0., device=x.device), x),
th.tensor(1., device=x.device)
)
class Generator(nn.Module):
def __init__(self):
super().__init__()
self.__convs = nn.Sequential(
nn.ConvTranspose3d(
32, 20,
kernel_size=7, stride=2,
output_padding=1, padding=3
),
nn.SELU(),
nn.ConvTranspose3d(
20, 10,
kernel_size=5, stride=2,
padding=2, output_padding=1
),
nn.SELU(),
nn.ConvTranspose3d(
10, 6,
kernel_size=3, stride=2,
padding=1, output_padding=1
),
nn.SELU(),
nn.ConvTranspose3d(
6, 1,
kernel_size=3, stride=1,
padding=1
),
nn.Hardsigmoid()
)
def forward(self, x_rand: th.Tensor) -> th.Tensor:
o = self.__convs(x_rand)
return o
class Disciminator(nn.Module):
def __init__(self):
super().__init__()
self.__convs = nn.Sequential(
nn.Conv3d(
1, 4,
kernel_size=3, padding=1
),
nn.MaxPool3d(2, 2),
nn.SELU(),
nn.Conv3d(
4, 8,
kernel_size=5, padding=2
),
nn.MaxPool3d(2, 2),
nn.SELU(),
nn.Conv3d(
8, 10,
kernel_size=5, padding=2
),
nn.MaxPool3d(2, 2),
nn.SELU()
)
self.__lins = nn.Sequential(
nn.Linear(10 * 8 ** 3, 4096),
nn.SELU(),
nn.Linear(4096, 1),
nn.Sigmoid()
)
def forward(self, data: th.Tensor) -> th.Tensor:
o = self.__convs(data)
o = o.flatten(1, -1)
o = self.__lins(o)
return o
def discriminator_loss(y_real: th.Tensor, y_fake: th.Tensor) -> th.Tensor:
return -th.mean(th.log2(y_real) + th.log2(1. - y_fake))
def generator_loss(y_fake: th.Tensor) -> th.Tensor:
return -th.mean(th.log2(y_fake))
if __name__ == '__main__':
gen = Generator()
disc = Disciminator()
x = th.rand(3, 32, 8, 8, 8)
o = gen(x)
print(o.size())
print((o > 0).sum())
print(o.size())
o = disc(o)
print(o.size())
| {"/train.py": ["/networks.py"], "/read_model.py": ["/utils.py"]} |
60,688 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/custom_validators.py | from django.utils.translation import gettext as _, ngettext # https://docs.python.org/2/library/gettext.html#gettext.ngettext
from django.core.exceptions import ValidationError
import re
from difflib import SequenceMatcher
from pathlib import Path
import gzip
# https://docs.djangoproject.com/en/2.0/_modules/django/contrib/auth/password_validation/#MinimumLengthValidator
class MyCustomMinimumLengthValidator(object):
def __init__(self, min_length = 8): # put default min_length here
self.min_length = min_length
def validate(self, password, user=None):
if len(password) < self.min_length:
raise ValidationError(
ngettext(
# silly, I know, but if your min length is one, put your message here
"Ce mot de passe est trop court. Il doit contenir au moins %(min_length)d caractère.",
"Ce mot de passe est trop court. Il doit contenir au moins %(min_length)d caractère.",
self.min_length
),
code='password_too_short',
params={'min_length': self.min_length},
)
def get_help_text(self):
return ngettext(
# you can also change the help text to whatever you want for use in the templates (password.help_text)
"Votre mot de passe doit contenir au moins %(min_length)d caractère.",
"Votre mot de passe doit contenir au moins %(min_length)d caractère.",
self.min_length
) % {'min_length': self.min_length}
# <--------------------- MyCustomUserAttributeSimilarityValidator ------------------------->
class MyCustomUserAttributeSimilarityValidator(object):
"""
Validate whether the password is sufficiently different from the user's
attributes.
If no specific attributes are provided, look at a sensible list of
defaults. Attributes that don't exist are ignored. Comparison is made to
not only the full attribute value, but also its components, so that, for
example, a password is validated against either part of an email address,
as well as the full address.
"""
DEFAULT_USER_ATTRIBUTES = ('username', 'first_name', 'last_name', 'email')
def __init__(self, user_attributes=DEFAULT_USER_ATTRIBUTES, max_similarity=0.7):
self.user_attributes = user_attributes
self.max_similarity = max_similarity
def validate(self, password, user=None):
if not user:
return
for attribute_name in self.user_attributes:
value = getattr(user, attribute_name, None)
if not value or not isinstance(value, str):
continue
value_parts = re.split(r'\W+', value) + [value]
for value_part in value_parts:
if SequenceMatcher(a=password.lower(), b=value_part.lower()).quick_ratio() >= self.max_similarity:
try:
verbose_name = str(user._meta.get_field(attribute_name).verbose_name)
except FieldDoesNotExist:
verbose_name = attribute_name
raise ValidationError(
_("Le mot de passe est trop similaire à votre %(verbose_name)s."),
code='password_too_similar',
params={'verbose_name': verbose_name},
)
def get_help_text(self):
return _("Votre mot de passe ne peut pas être trop similaire à vos autres informations personnelles.")
# <---------------------------------- MyCustomCommonPasswordValidator ---------------------------------------->
class MyCustomCommonPasswordValidator(object):
"""
Validate whether the password is a common password.
The password is rejected if it occurs in a provided list of passwords,
which may be gzipped. The list Django ships with contains 20000 common
passwords (lowercased and deduplicated), created by Royce Williams:
https://gist.github.com/roycewilliams/281ce539915a947a23db17137d91aeb7
The password list must be lowercased to match the comparison in validate().
"""
DEFAULT_PASSWORD_LIST_PATH = Path(__file__).resolve().parent / 'common-passwords.txt.gz'
def __init__(self, password_list_path=DEFAULT_PASSWORD_LIST_PATH):
try:
with gzip.open(str(password_list_path)) as f:
common_passwords_lines = f.read().decode().splitlines()
except IOError:
with open(str(password_list_path)) as f:
common_passwords_lines = f.readlines()
self.passwords = {p.strip() for p in common_passwords_lines}
def validate(self, password, user=None):
if password.lower().strip() in self.passwords:
raise ValidationError(
_("Ce mot de passe est trop commun."),
code='password_too_common',
)
def get_help_text(self):
return _("Votre mot de passe ne peut pas être un mot de passe couramment utilisé.")
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,689 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/migrations/0007_adress.py | # Generated by Django 2.2 on 2019-04-25 10:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0006_auto_20190423_1328'),
]
operations = [
migrations.CreateModel(
name='Adress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('region', models.CharField(max_length=256)),
('commune', models.CharField(max_length=256)),
('rue', models.CharField(max_length=256)),
('logement', models.IntegerField()),
('profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.Profile')),
],
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,690 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/urls.py | from django.urls import path
from . import views
urlpatterns = [
path('',views.home,name="home"),
path('create_requete/',views.choose_problem,name="create_requete"),
path('create_requete_telephonique/',views.RequetCreateView.as_view(),name="create_requete_telephonique"),
path('create_requete_internet/',views.RequetInternetCreateView.as_view(),name="create_requete_internet"),
path('siuvi_requet/',views.RequetListView.as_view(),name="siuvi_requete"),
path('delete_requet/<int:pk>/',views.RequetDeleteView.as_view(),name="delete_requete"),
path("tech_requets",views.TechRequetListView.as_view(),name="tech_requets"),
path("requet_fixée/<int:pk>/",views.FixRequetView.as_view(),name="requet_fixée"),
path("success_view/<int:id>/",views.success_view,name="success_view"),
path("contact/",views.techContact,name="contact"),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,691 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/migrations/0005_auto_20190417_1459.py | # Generated by Django 2.2 on 2019-04-17 12:59
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0004_auto_20190417_1408'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='company',
),
migrations.RemoveField(
model_name='profile',
name='personne',
),
migrations.CreateModel(
name='Personne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=256)),
('last_name', models.CharField(max_length=256)),
('profile', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='users.Profile')),
],
),
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('profile', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='users.Profile')),
],
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,692 | ahmedbendev/Reclamation-Client | refs/heads/master | /manager/views.py | from datetime import datetime
from django.shortcuts import render ,redirect ,get_object_or_404
from django.contrib import auth
from django.urls import reverse
from django.http import HttpResponse
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.views.generic import DetailView ,ListView ,DeleteView
from django.contrib.auth.mixins import LoginRequiredMixin , UserPassesTestMixin
from django.contrib.auth.models import User
from django.db.models import Q
from django.utils import timezone
# rest rest_framework imports
from rest_framework import viewsets
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import authentication, permissions
from .permissions import IsManager
#my filles
from .forms import AddAdminForm ,UserChangeInfoForm ,ProfileAdminForm ,AddressTechForm
from .forms import EditRequetForm
from requets.models import Requet
from users.models import Profile
from .serializers import ReclamationSerializer
# Create your views here.
def home(request):
return render(request , "manager/list_requets.html")
def login_manager(request):
if request.method == 'POST':
username = request.POST["username"]
password = request.POST["password"]
user = auth.authenticate(request ,username = username , password = password)
if user and user.profile.group == "admin":
auth.login(request,user)
messages.success(request,f"welcome {username}")
return redirect("manager_home")
elif user == None :
error = " nom d'utilisateur ou mot de passe n'est pas correcte"
return render(request,"manager/login_manager.html",{"error":error})
else :
error = f"{username} n'est pas un administrateur, seul l'administrateur peut accéder à cette page"
return render(request,"manager/login_manager.html",{"error":error})
return render(request,"manager/login_manager.html")
# <------------------------- nouveax reclamation ----------------------------------------->
class RequetsListView( LoginRequiredMixin , UserPassesTestMixin ,ListView):
model = Requet
template_name = "manager/requet_list.html"
ordering = ["pub_date"]
context_object_name = "requets"
def get_queryset(self):
return Requet.objects.filter(state = "ont étape de traitement").order_by("client__profile__type","pub_date")
def test_func(self):
return self.request.user.profile.group == "admin"
# <------------------------------- reclamation approvée ----------------------------------------->
class RequetsApprovedListView( LoginRequiredMixin , UserPassesTestMixin ,ListView):
model = Requet
template_name = "manager/reclamation_approveé.html"
ordering = ["-pub_date"]
context_object_name = "requets"
paginate_by = 4
def get_queryset(self):
return Requet.objects.filter(state = "apprové par l'administrateur").order_by("client__profile__type","-pub_date")
def test_func(self):
return self.request.user.profile.group == "admin"
# <--------------------------------------- reclamation des probleme fixeé ---------------------------------->
class RequetsFixedListView( LoginRequiredMixin , UserPassesTestMixin ,ListView):
model = Requet
template_name = "manager/reclamation_fixée.html"
ordering = ["-fix_date"]
context_object_name = "requets"
paginate_by = 2
def get_queryset(self):
return Requet.objects.filter(state = "Problème Résolu").order_by("-fix_date")
def test_func(self):
return self.request.user.profile.group == "admin"
# reclamation after the failre of the technicien to fix it
class RequetsNoteeListView( LoginRequiredMixin , UserPassesTestMixin ,ListView):
model = Requet
template_name = "manager/reclamation_notifée.html"
ordering = ["-fix_date"]
context_object_name = "requets"
def get_queryset(self):
return Requet.objects.filter(state = "notée").order_by("-fix_date")
def test_func(self):
return self.request.user.profile.group == "admin"
class RequetDeleteView(LoginRequiredMixin , UserPassesTestMixin ,DeleteView ):
model = Requet
template_name = "manager/requet_confirm_delete.html"
def get_success_url(self):
return reverse("manager_requets")
def test_func(self):
return self.request.user.profile.group == "admin"
def get_context_data(self ,**kwargs):
data = super().get_context_data(**kwargs)
requet = self.get_object()
data['c_requet'] = requet
return data
# <----------------------------------- edit and approve requet -------------------------------------------------->
@login_required
def edit_requet(request ,id ):
requet = get_object_or_404(Requet , pk=id)
client = requet.client.username
if request.user.profile.group == "admin" :
form = EditRequetForm(instance = requet)
if request.method == "POST":
form = EditRequetForm(request.POST ,instance = requet)
if form.is_valid():
form.save()
if requet.state == "ont étape de traitement" or "notée" :
requet.state = "apprové par l'administrateur"
requet.aprove_date = timezone.now()
requet.save()
messages.success(request,f"{client} Reclamation est modifie avec success")
return redirect("manager_requets")
return render(request,"manager/edit_requet.html",{"form":form ,"requet":requet,"requet":requet})
else :
return HttpResponse("<h1>403 Forbidden </h1>")
@login_required
def aprove(request ,id):
requet = get_object_or_404(Requet , pk = id)
client = requet.client.username
if request.method == 'POST' and request.user.profile.group == "admin" :
requet.aprove()
messages.success(request,f"{client} Reclamation est approvée avec success")
return redirect("manager_requets")
else :
return HttpResponse("<h1>403 Forbidden </h1>")
#requet fixed informations
@login_required
def requet_info(request ,id):
requet = get_object_or_404(Requet , pk = id)
if request.user.profile.group == "admin" :
return render(request,"manager/requet_information.html",{"requet":requet})
else :
return HttpResponse("<h1>403 Forbidden </h1>")
# Recherer un requet
@login_required
def search_requet(request):
if request.user.profile.group == "admin" :
search_element = request.POST["search"]
requets = Requet.objects.filter(Q(problem__icontains = search_element) & Q(state = "Problème Résolu"))
return render(request,"manager/search_requet.html",{"requets" : requets})
# <--------------------------- tech part ----------------------------------------------->
# register tech
@login_required
def register_employee(request):
if request.user.profile.group == "admin" :
if request.method == 'POST':
u_form = AddAdminForm(request.POST)
p_form = ProfileAdminForm(request.POST)
a_form = AddressTechForm(request.POST)
if u_form.is_valid() and p_form.is_valid() and a_form.is_valid():
user = u_form.save()
user.set_password(user.password)
username = user.username
u_profile = p_form.save(commit = False)
u_profile.owner = user
u_profile.group = "tech"
u_profile.save()
address = a_form.save(commit = False)
address.profile = u_profile
address.save()
messages.success(request,f"le technicien {username} est creé avec success")
return redirect("list_tech")
else :
u_form = AddAdminForm()
p_form = ProfileAdminForm()
a_form = AddressTechForm()
return render(request,"manager/register_employee.html" ,{"u_form":u_form ,"p_form":p_form ,"a_form":a_form})
else :
return HttpResponse("<h1>403 Forbidden</h1>")
@login_required
def register_admin(request):
if request.user.profile.group == "admin" :
if request.method == 'POST':
u_form = AddAdminForm(request.POST)
if u_form.is_valid() :
user = u_form.save()
user.set_password(user.password)
username = user.username
u_profile = Profile.objects.create(owner = user ,group = "admin")
u_profile.phone_number = request.POST["phone"]
u_profile.save()
messages.success(request,f"un compte de {username} admin est creé avec success")
return redirect("manager_requets")
else :
u_form = AddAdminForm()
return render(request,"manager/register_admin.html" ,{"u_form":u_form})
else :
return HttpResponse("<h1>403 Forbidden</h1>")
# list des technicien
class TechListView( LoginRequiredMixin , UserPassesTestMixin ,ListView):
model = Requet
template_name = "manager/list_tech.html"
context_object_name = "techs"
def get_queryset(self):
return User.objects.filter(profile__group = "tech")
def test_func(self):
return self.request.user.profile.group == "admin"
# delete techs
class TechDeleteView(LoginRequiredMixin , UserPassesTestMixin ,DeleteView ):
model = User
template_name = "manager/tech_confirm_delete.html"
def get_success_url(self):
return reverse("list_tech")
def test_func(self):
user = self.get_object()
return self.request.user.profile.group == "admin" and user.profile.group == "tech"
def get_context_data(self ,**kwargs):
data = super().get_context_data(**kwargs)
tech = self.get_object()
data['tech'] = tech
return data
#edit tech
@login_required
def tech_info(request ,id):
tech = get_object_or_404(User , pk = id)
if tech.profile.group == "tech" :
u_form = UserChangeInfoForm(instance = tech)
p_form = ProfileAdminForm(instance = tech.profile)
a_form = AddressTechForm(instance = tech.profile.address)
if request.method == 'POST' :
u_form = UserChangeInfoForm( request.POST ,instance = tech )
p_form = ProfileAdminForm(request.POST ,instance = tech.profile)
a_form = AddressTechForm(request.POST ,instance = tech.profile.address)
if u_form.is_valid() and p_form.is_valid() and a_form.is_valid() :
email1 = u_form.cleaned_data["email"]
if User.objects.filter(email = email1).exclude(username = tech.username).exists():
error1 = "l'adresse e-mail que vous avez entrée est déjà enregistrée,"
return render(request,"manager/tech_info.html",{"u_form":u_form ,"a_form":a_form ,"p_form":p_form,"error1":error1,"tech":tech})
else :
u_form.save()
p_form.save()
a_form.save()
username = tech.username
messages.success(request,f"Les informations de {username} ont été modifiées avec succès")
return redirect("tech_info" ,id = tech.id)
return render(request,"manager/tech_info.html",{"u_form":u_form ,"p_form":p_form,"a_form":a_form ,"tech":tech})
else :
return HttpResponse("<h1> 403 Forbidden </h1>")
# <-------------------------------------- client part --------------------------------------->
def client_info(request , id):
if request.user.profile.group == "admin":
client = get_object_or_404(User , pk = id)
return render(request,"manager/client_info.html",{"client" : client})
else :
return HttpResponse("<h1> 403 Forbidden </h1>")
class PersonneListView(LoginRequiredMixin , UserPassesTestMixin ,ListView):
model = User
template_name = "manager/personne_list.html"
context_object_name = "clients"
def get_queryset(self):
return User.objects.filter(profile__type = "personne")
def test_func(self):
return self.request.user.profile.group == "admin"
#list des entreprises
def enterprise_list(request):
if request.user.profile.group == "admin":
clients = User.objects.filter(profile__type = "entreprise")
return render(request,"manager/enterprise_list.html",{"clients":clients})
else :
return HttpResponse("<h1>403 Forbidden</h1>")
#delete client
class PersonneDeleteView(LoginRequiredMixin , UserPassesTestMixin ,DeleteView):
model = User
template_name = "manager/delete_personne.html"
def get_context_data(self,**kwargs):
data = super().get_context_data(**kwargs)
client = self.get_object()
data['client'] = client
return data
def test_func(self):
return self.request.user.profile.group == "admin"
def get_success_url(self):
return reverse("list_personne")
#recherch un client
@login_required
def search_client(request):
if request.user.profile.group == "admin" :
search_element = request.POST["search"]
clients = User.objects.filter(
Q(profile__type = "personne") & (Q(username__icontains = search_element) | Q(profile__personne__first_name__icontains = search_element) |
Q(profile__personne__last_name__icontains = search_element) | Q(profile__phone_number__icontains = search_element) |
Q(profile__address__region__icontains = search_element) | Q(profile__address__commune__icontains = search_element) )
)
return render(request , "manager/search_client.html",{"clients" : clients})
#recherch un entrprise
def search_entrprise(request):
if request.user.profile.group == "admin" :
search_element = request.POST["search"]
clients = User.objects.filter(
Q(profile__type = "entreprise") & (Q(username__icontains = search_element) | Q(profile__company__name__icontains = search_element) |
Q(profile__phone_number__icontains = search_element) | Q(profile__address__region__icontains = search_element) |
Q(profile__address__commune__icontains = search_element))
)
return render(request , "manager/search_entrprise.html",{"clients" : clients})
# <------------------------------ Les Statistiques ------------------------------------------------------------------>
def techs(request):
if request.user.profile.group == "admin" :
return render(request,"manager/static_tech.html")
else :
return HttpResponse("<h1>Seul l'administrateur peut accéder à cette page</h1>")
def diaras(request):
if request.user.profile.group == "admin" :
return render(request,"manager/static_daira.html")
else :
return HttpResponse("<h1>Seul l'administrateur peut accéder à cette page</h1>")
def type(request):
if request.user.profile.group == "admin" :
return render(request,"manager/static_type.html")
else :
return HttpResponse("<h1>Seul l'administrateur peut accéder à cette page</h1>")
def evolution(request):
if request.user.profile.group == "admin" :
return render(request,"manager/static_evolution.html")
else :
return HttpResponse("<h1>Seul l'administrateur peut accéder à cette page</h1>")
def evolution_day(request):
if request.user.profile.group == "admin" :
month = datetime.now().strftime("%B")
return render(request,"manager/static_evolution_day.html",{"month":month})
else :
return HttpResponse("<h1>Seul l'administrateur peut accéder à cette page</h1>")
#<--------------------------- Rest Framework API ----------------------------------->
#send the new reclamations as a json format
class ReclamationSetView(viewsets.ViewSet):
permission_classes = (permissions.IsAuthenticated , IsManager,)
def list(self, request):
queryset = Requet.objects.filter(state = "ont étape de traitement")
serializer = ReclamationSerializer(queryset, many=True)
return Response(serializer.data)
#send the noted reclamations as a json format
class NotesSetView(viewsets.ViewSet):
permission_classes = (permissions.IsAuthenticated , IsManager,)
def list(self, request):
queryset = Requet.objects.filter(state = "notée")
serializer = ReclamationSerializer(queryset, many=True)
return Response(serializer.data)
#sending the data as a json format
class DataChart(APIView):
permission_classes = (permissions.IsAuthenticated , IsManager,)
def get(self, request, format=None):
techs = []
works = []
daira = ["bouira","sour","hachimia"]
reclamations = []
types = ["Coupage telephonique" ,"Autre Problem" , "Problem internet"]
reclamtions_type = []
begenning_date = datetime(2019,1,1)
weeks = []
weeks_show = []
reclamation_week = []
beg_date = datetime(2019,datetime.now().month,1)
days = []
days_show = []
reclamation_day = []
# all the weeks for this year
date1 = begenning_date
while date1 < datetime.now():
add = date1.day + 7
if date1.month == 2 :
if add <= 28 :
date1 = date1.replace(day = add)
weeks.append(date1)
else :
add1 = 28 - date1.day
add = 7 - add1
add_month = date1.month + 1
date1 = date1.replace(day = add)
date1 = date1.replace(month = add_month)
weeks.append(date1)
elif date1.month % 2 == 0 :
if add <= 30 :
date1 = date1.replace(day = add)
weeks.append(date1)
else :
add1 = 30 - date1.day
add = 7 - add1
add_month = date1.month + 1
date1 = date1.replace(day = add)
date1 = date1.replace(month = add_month)
weeks.append(date1)
else :
if add <= 31 :
date1 = date1.replace(day = add)
weeks.append(date1)
else :
add1 = 31 - date1.day
add = 7 - add1
add_month = date1.month + 1
date1 = date1.replace(day = add)
date1 = date1.replace(month = add_month)
weeks.append(date1)
#all this month days :
while beg_date < datetime.now() :
days.append(beg_date)
beg_date = beg_date.replace(day = beg_date.day + 1)
#les reclamation de chaque jour :
for index in range(1,len(days)):
rec = Requet.objects.filter(pub_date__range = (days[index-1] ,days[index])).count()
reclamation_day.append(rec)
#les reclamation par chaque semaine
for index in range(1,len(weeks)):
rec = Requet.objects.filter(pub_date__range = (weeks[index-1] ,weeks[index])).count()
reclamation_week.append(rec)
# reclamation de chaque categorie(type)
for type in types :
rec_type = Requet.objects.filter(problem = type).count()
reclamtions_type.append(rec_type)
# reclamation de chaque daira
for item in daira :
rec_par_daira = Requet.objects.filter(client__profile__address__region = item).count()
reclamations.append(rec_par_daira)
#reclamation de fixée par chaque tech
for tech in User.objects.filter(profile__group = "tech") :
techs.append(tech.username)
works.append(tech.works.filter(state = "Problème Résolu").count())
#orgnaze the weeks appearense
for week in weeks :
weeks_show.append(str(week.day) + "/"+ str(week.month) + "/" + str(week.year))
#orgnaze the days appearense
for day in days :
days_show.append(str(day.day) + "/"+ str(day.month) + "/" + str(day.year))
data = {
"techs":techs,
"works":works ,
"daira":daira,
"reclamations":reclamations,
"types" : types,
"reclamations_type":reclamtions_type,
"weeks":weeks_show,
"reclamation_week" : reclamation_week,
"days" : days_show,
"reclamation_day" : reclamation_day,
}
return Response(data)
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,693 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/admin.py | from django.contrib import admin
from .models import Requet
# Register your models here.
admin.site.register(Requet)
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,694 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/migrations/0008_auto_20190417_1408.py | # Generated by Django 2.2 on 2019-04-17 12:08
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('requets', '0007_auto_20190417_1407'),
]
operations = [
migrations.AlterField(
model_name='requet',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2019, 4, 17, 12, 8, 5, 427753, tzinfo=utc)),
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,695 | ahmedbendev/Reclamation-Client | refs/heads/master | /manager/serializers.py | from requets.models import Requet
from rest_framework import serializers
class ReclamationSerializer(serializers.ModelSerializer):
class Meta:
model = Requet
fields = ["pk",]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,696 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/models.py | from django.db import models
from django.contrib.auth.models import User
from requets.models import Requet
# Create your models here.
class Profile(models.Model):
owner = models.OneToOneField(User ,on_delete = models.CASCADE)
location = models.CharField(max_length = 100)
phone_number = models.CharField(max_length = 100,null = True)
group = models.CharField(max_length = 100 , default = "client")
type = models.CharField(max_length = 100 ,null = True)
def __str__(self):
return self.owner.username
def count_client(self):
clients = Profile.objects.filter(group = "client").count()
return clients
def count_tech(self):
techs = Profile.objects.filter(group = "tech").count()
return techs
def count_entreprise(self):
entreprise = Profile.objects.filter(type = "entreprise").count()
return entreprise
def count_personne(self):
personne = Profile.objects.filter(type = "personne").count()
return personne
def count_requets(self):
return Requet.objects.count()
def count_fixed_requets(self):
return Requet.objects.filter(state = "Problème Résolu").count()
def count_approved_requets(self):
return Requet.objects.filter(state = "apprové par l'administrateur").count()
def count_new_requets(self):
return Requet.objects.filter(state = "ont étape de traitement").count()
def count_notee_requets(self):
return Requet.objects.filter(state = "notée").count()
class Personne(models.Model):
profile = models.OneToOneField(Profile , on_delete = models.CASCADE )
first_name = models.CharField(max_length = 256)
last_name = models.CharField(max_length = 256 )
def __str__(self):
return self.first_name
class Company(models.Model):
profile = models.OneToOneField(Profile , on_delete = models.CASCADE )
name = models.CharField(max_length = 256)
def __str__(self):
return self.name
class Address(models.Model):
profile = models.OneToOneField(Profile , on_delete = models.CASCADE)
region = models.CharField(max_length = 256)
commune = models.CharField(max_length = 256)
rue = models.CharField(max_length = 256)
logement = models.IntegerField(null = True)
def __str__(self):
return self.profile.owner.username + " address"
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,697 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/migrations/0017_requet_fix_confirm.py | # Generated by Django 2.2 on 2019-04-26 13:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('requets', '0016_notification'),
]
operations = [
migrations.AddField(
model_name='requet',
name='fix_confirm',
field=models.BooleanField(default=False),
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,698 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/migrations/0002_auto_20190417_1351.py | # Generated by Django 2.2 on 2019-04-17 11:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
],
),
migrations.CreateModel(
name='Personne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=256)),
('last_name', models.CharField(max_length=256)),
],
),
migrations.AddField(
model_name='profile',
name='phone_number',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='profile',
name='type',
field=models.CharField(max_length=100, null=True),
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,699 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/migrations/0013_auto_20190424_1224.py | # Generated by Django 2.2 on 2019-04-24 10:24
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('requets', '0012_auto_20190423_1537'),
]
operations = [
migrations.AlterField(
model_name='requet',
name='pub_date',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,700 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/views.py | from django.shortcuts import render , redirect ,get_object_or_404
from django.urls import reverse
from django.views.generic import CreateView , DeleteView , ListView ,DetailView
from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin , UserPassesTestMixin
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.db.models import Q
from django.contrib.auth.models import User
from users.forms import UserForm ,ProfileForm
from .forms import RequetForm ,InternetRequetForm
from .models import Requet
# Create your views here.
def home(request):
u_f = UserForm()
p_f = ProfileForm()
return render(request,"users/register.html",{"u_form":u_f, "p_form":p_f})
def choose_problem(request):
return render(request ,"requets/choose.html")
#problem telephone fix
class RequetCreateView(LoginRequiredMixin ,CreateView):
model = Requet
form_class = RequetForm
def form_valid(self , form):
form.instance.client = self.request.user
return super().form_valid(form)
def get_success_url(self):
username = self.request.user.username
messages.success(self.request,f"{username} Votre Réclamation Créée Avec Succès")
return reverse("create_requete")
#problem d'internet
class RequetInternetCreateView(LoginRequiredMixin ,CreateView):
model = Requet
template_name = "requets/requet_internet.html"
form_class = InternetRequetForm
def form_valid(self , form):
form.instance.client = self.request.user
return super().form_valid(form)
def get_success_url(self):
username = self.request.user.username
messages.success(self.request,f"{username} Votre Réclamation Créée Avec Succès")
return reverse("create_requete")
class RequetListView(LoginRequiredMixin ,ListView):
model = Requet
context_object_name = "requets"
def get_queryset(self):
requets = Requet.objects.filter(client = self.request.user).order_by("-pub_date")
return requets
class FixRequetView(LoginRequiredMixin ,UserPassesTestMixin ,DetailView):
model = Requet
context_object_name = "requet"
template_name = "requets/requet_fixée.html"
def test_func(self):
requet = self.get_object()
return self.request.user == requet.client and requet.state == "Problème Résolu" and requet.fix_confirm == False
class RequetDeleteView(LoginRequiredMixin , UserPassesTestMixin ,DeleteView):
model = Requet
def get_success_url(self):
return reverse("siuvi_requete")
def test_func(self):
requet = self.get_object()
return self.request.user == requet.client
# <------------------------- tech part --------------------------------------------------->
class TechRequetListView(LoginRequiredMixin , UserPassesTestMixin ,ListView):
model = Requet
context_object_name = "requets"
oreder_by = ["pub_date"]
template_name = "requets/tech_list.html"
def get_queryset(self):
return Requet.objects.filter(tech = self.request.user ).exclude(state = "Problème Résolu").order_by("client__profile__type","pub_date")
def test_func(self):
return self.request.user.profile.group == "tech"
# page contact de tech
@login_required
def techContact(request):
if request.user.profile.group == "tech" :
techs = User.objects.filter(Q(profile__group ="tech") | Q(profile__group = "admin"))
return render(request ,"requets/contact.html",{"techs":techs})
# Client part problem resolu avec success
@login_required
def success_view(request , id):
requet = get_object_or_404(Requet , pk=id )
if request.user == requet.client :
requet.fix_confirm = True
requet.save()
username = request.user.username
messages.success(request,f"{username} nous sommes heureux de vous aider à résoudre le problème")
return redirect("siuvi_requete")
else :
return HttpResponse("<h1>403 Forbidden</h1>")
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,701 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/admin.py | from django.contrib import admin
from .models import Profile ,Personne ,Company , Address
# Register your models here.
admin.site.register(Profile)
admin.site.register(Personne)
admin.site.register(Company)
admin.site.register(Address)
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,702 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/migrations/0009_auto_20190417_1459.py | # Generated by Django 2.2 on 2019-04-17 12:59
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('users', '0005_auto_20190417_1459'),
('requets', '0008_auto_20190417_1408'),
]
operations = [
migrations.DeleteModel(
name='Company',
),
migrations.DeleteModel(
name='Personne',
),
migrations.AlterField(
model_name='requet',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2019, 4, 17, 12, 59, 26, 528982, tzinfo=utc)),
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,703 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/apps.py | from django.apps import AppConfig
class RequetsConfig(AppConfig):
name = 'requets'
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,704 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/models.py | from django.db import models
from django.contrib.auth.models import User
from django.utils import timezone
from django.db.models import Count
from django.db.models import Q
# Create your models here.
class Requet(models.Model):
client = models.ForeignKey(User , on_delete = models.CASCADE , related_name = "requets")
problem = models.CharField(max_length = 256 , null = True)
content = models.TextField()
state = models.CharField(max_length = 100 , default="ont étape de traitement")
tech = models.ForeignKey(User , on_delete = models.CASCADE , null = True , related_name = "works")
pub_date = models.DateTimeField(default = timezone.now)
aprove_date = models.DateTimeField(null = True)
fix_date = models.DateTimeField(null = True)
fix_confirm = models.BooleanField(default = False)
def __str__(self):
if len(self.content) > 100 :
return self.content[0:100] + "..."
else :
return self.content
def p_date(self):
return self.pub_date.strftime("%m/%d/%Y, %H:%M")
def aprove(self):
self.aprove_date = timezone.now()
requets_approveé = Count("works" ,filter = Q(works__state = "apprové par l'administrateur"))
tech = User.objects.filter(
profile__group = "tech" ,profile__address__region = self.client.profile.address.region
).annotate(requet_count = requets_approveé).order_by("requet_count").first()
#in case there is no tech in the same area of the client
if tech == None :
tech = User.objects.filter(profile__group = "tech").annotate(n_works = requets_approveé).order_by("n_works").first()
self.tech = tech
self.state = "apprové par l'administrateur"
self.save()
def summary(self):
if len(self.content) > 100 :
return self.content[0:100] + "..."
else :
return self.content
def requet_fixed(self):
self.state = "Problème Résolu"
self.fix_date = timezone.now()
self.save()
def requet_note(self):
self.state = "notée"
self.save()
def repair_time(self):
time = self.fix_date - self.pub_date
hours = time.seconds // 3600
minute = (time.seconds % 3600) // 60
return str(time.days) + "days ," + str(hours) + "Hour ," + str(minute) + "minutes"
# get the number of reclamation should hundle by the tech before this one
def get_index(self):
tech = self.tech
filters = Q(state = "apprové par l'administrateur" ) & Q(client__profile__type = "entreprise")
e_requets_numb = 0
if self.client.profile.type == "entreprise" :
requets = tech.works.all().filter(filters)
index = requets.filter(pub_date__lt = self.pub_date).count()
else :
e_requets_numb = tech.works.all().filter(filters).count()
requets = tech.works.all().filter(state = "apprové par l'administrateur").exclude(client__profile__type = "entreprise")
index = requets.filter(pub_date__lt = self.pub_date).count()
return index + e_requets_numb
class Notification(models.Model):
content = models.TextField()
owner = models.ForeignKey(User , on_delete = models.CASCADE , related_name = "nots")
requet = models.ForeignKey(Requet , on_delete = models.CASCADE , related_name = "nots")
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,705 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/views.py | from django.shortcuts import render ,redirect , get_object_or_404
from django.contrib.auth.models import User
from .models import Profile ,Personne ,Company , Address
from requets.models import Requet
from .forms import( AddressPersonneChangeForm ,UserForm ,
ProfileForm , UserChangeInfoForm ,
ProfileChangeForm ,PersonneChangeForm ,
CompanyChangeForm ,AddressCompanyChangeForm
)
from django.contrib import messages
from django.contrib import auth
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse ,JsonResponse
from django.views.generic import DetailView
from django.contrib.auth.mixins import UserPassesTestMixin , LoginRequiredMixin
from manager.forms import NotificationForm
from django.contrib.sites.shortcuts import get_current_site
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
from django.conf import settings
from .tokens import account_activation_token
# username validation
def username_validation(request ,username):
not_valid = User.objects.filter(username = username).exists()
return JsonResponse({"not_valid":not_valid})
# email validation
def email_validation(request ,email):
not_valid = User.objects.filter(email = email).exists()
return JsonResponse({"not_valid":not_valid})
# Create your views here.
def register(request):
if request.method == 'POST':
u_form = UserForm(request.POST)
p_form = ProfileForm(request.POST)
if u_form.is_valid() and p_form.is_valid():
user = u_form.save(commit = False)
user.is_active = False
user.save()
u_profile = p_form.save(commit = False)
u_profile.owner = user
u_type = request.POST["type"]
u_profile.type = u_type
u_profile.save()
if u_profile.type == "personne":
first_name = request.POST["first_name"]
last_name = request.POST["last_name"]
personne = Personne.objects.create(first_name = first_name , last_name = last_name , profile = u_profile)
personne.save()
address = Address.objects.create(
profile = u_profile ,region = request.POST["region"], commune = request.POST["commune"],
rue=request.POST["rue"] ,logement = request.POST["logement"]
)
address.save()
elif u_profile.type == "entreprise":
name = request.POST["name"]
entreprise = Company.objects.create(name = name , profile = u_profile)
entreprise.save()
address = Address.objects.create(
profile = u_profile ,region = request.POST["e_region"], commune = request.POST["e_commune"], rue=request.POST["e_rue"]
)
address.save()
#in case you want to get rid off the email confirmation : put this return redirect("login") and comment the rest bellow
# get current site
current_site = get_current_site(request)
username = user.username
subject = f"Activation du compte pour le client {username}"
# create Message
message = render_to_string("users/confirm_user.html", {
'user': user,
'domain': current_site.domain,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'token': account_activation_token.make_token(user),
})
html_message = render_to_string("users/confirm_user.html", {
'user': user,
'domain': current_site.domain,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'token': account_activation_token.make_token(user),
})
# send activation link to the user
user.email_user(subject=subject, message=message, html_message=html_message)
return redirect('success_register')
else :
u_form = UserForm()
p_form = ProfileForm()
return render(request,"users/register.html" ,{"u_form":u_form ,"p_form":p_form})
#confirm the user identity
def success_register(request):
return render(request,"users/success_register.html")
def confirm_user(request ,uidb64 ,token):
try:
uid = force_text(urlsafe_base64_decode(uidb64))
user = User.objects.get(pk=uid)
except(TypeError, ValueError, OverflowError,User.DoesNotExist):
user = None
if user is not None and account_activation_token.check_token(user, token):
user.is_active = True
user.save()
username = user.username
auth.login(request ,user)
messages.success(request,f"l'utilisateur {username} a été créé avec succès")
return redirect("home")
else:
return HttpResponse("Invalid Token")
# log in
def login_view(request):
u_form = UserForm()
p_form = ProfileForm()
if request.method == 'POST':
username = request.POST["username"]
password = request.POST["password"]
user = User.objects.filter(username = username).first()
if user and user.is_active == False :
error = f"{username} votre compte n'est pas encore activé allez sur votre email et cliquez sur le lien que nous vous avons envoyé pour activer votre compte"
return render(request,"users/register.html",{"error":error,"u_form":u_form,"p_form":p_form})
else :
user = auth.authenticate(request ,username = username , password = password)
if user :
auth.login(request,user)
messages.success(request,f"welcome {username}")
return redirect("home")
else:
error = " nom d'utilisateur ou mot de passe n'est pas correcte"
return render(request,"users/register.html",{"error":error,"u_form":u_form,"p_form":p_form})
return render(request,"users/register.html",{"u_form":u_form,"p_form":p_form})
# <----------------------------------- personne client informations ----------------------------------->
@login_required
def user_info(request):
u_form = UserChangeInfoForm(instance = request.user )
p_form = ProfileChangeForm(instance = request.user.profile )
ps_form = PersonneChangeForm(instance = request.user.profile.personne )
a_form = AddressPersonneChangeForm(instance = request.user.profile.address)
if request.method == 'POST':
u_form = UserChangeInfoForm( request.POST ,instance = request.user )
p_form = ProfileChangeForm( request.POST , instance = request.user.profile )
ps_form = PersonneChangeForm( request.POST , instance = request.user.profile.personne )
a_form = AddressPersonneChangeForm(request.POST ,instance = request.user.profile.address)
if u_form.is_valid() and p_form.is_valid() and ps_form.is_valid() and a_form.is_valid():
email1 = u_form.cleaned_data["email"]
if User.objects.filter(email = email1).exclude(username = request.user.username).exists():
error1 = "l'adresse e-mail que vous avez entrée est déjà enregistrée,"
return render(request,"users/client_info.html",{"u_form":u_form,"p_form":p_form,"ps_form":ps_form,"a_form":a_form ,"error1":error1})
else :
u_form.save()
p_form.save()
ps_form.save()
a_form.save()
username = request.user.username
messages.success(request,f"Les informations d'utilisateur {username} ont été modifiées avec succès")
return redirect("client_info")
return render(request,"users/client_info.html",{"u_form":u_form,"p_form":p_form,"ps_form":ps_form,"a_form":a_form })
# <------------------------------- company client information ----------------------------------------->
@login_required
def entreprise_info(request):
u_form = UserChangeInfoForm(instance = request.user )
p_form = ProfileChangeForm(instance = request.user.profile )
c_form = CompanyChangeForm(instance = request.user.profile.company )
a_form = AddressCompanyChangeForm(instance = request.user.profile.address)
if request.method == 'POST':
u_form = UserChangeInfoForm( request.POST ,instance = request.user )
p_form = ProfileChangeForm( request.POST , instance = request.user.profile )
c_form = CompanyChangeForm( request.POST , instance = request.user.profile.company )
a_form = AddressCompanyChangeForm(request.POST ,instance = request.user.profile.address)
if u_form.is_valid() and p_form.is_valid() and c_form.is_valid() and a_form.is_valid():
email1 = u_form.cleaned_data["email"]
if User.objects.filter(email = email1).exclude(username = request.user.username).exists():
error1 = "l'adresse e-mail que vous avez entrée est déjà enregistrée,"
return render(request,"users/client_info.html",{"u_form":u_form,"p_form":p_form ,"c_form":c_form,"error1":error1,"a_form":a_form})
else :
u_form.save()
p_form.save()
c_form.save()
a_form.save()
username = request.user.username
messages.success(request,f"Les informations d'utilisateur {username} ont été modifiées avec succès")
return redirect("entreprise_info")
return render(request,"users/client_info.html",{"u_form":u_form ,"p_form":p_form,"c_form":c_form,"a_form":a_form})
# logout
@login_required
def logout_view(request):
if request.method == 'POST':
auth.logout(request)
return redirect("login")
# class ClientDetailView(UserPassesTestMixin , LoginRequiredMixin ,DetailView):
# model = User
# template_name = "users/client_detail.html"
# context_object_name = "client"
#
# def test_func(self):
# return self.request.user.profile.group == "tech"
def requet_info(request ,id_client,id_requet ):
client = get_object_or_404(User , pk = id_client)
requet = get_object_or_404(Requet , pk=id_requet)
context = {"client":client,"requet" :requet}
if request.user.profile.group == "tech":
return render(request, "users/requet_info.html" ,context)
else :
return HttpResponse("<h1>403 Forbidden</h1>")
# <--------------------------------- tech part ---------------------------------------->
def login_tech(request):
if request.method == 'POST':
username = request.POST["username"]
password = request.POST["password"]
user = auth.authenticate(username = username , password = password)
if user and user.profile.group == "tech" :
auth.login(request ,user)
return redirect("tech_requets")
elif user == None :
error = "nom d'utilisateur ou mot de passe n'est pas correcte"
return render(request,"users/login_tech.html",{"error":error})
else :
return HttpResponse("<h2> 403 Forbidden </h2>")
return render(request ,"users/login_tech.html")
def problem_fixed(request, id):
requet = get_object_or_404(Requet ,pk = id)
client = requet.client.username
if request.method == 'POST' and request.user.profile.group == "tech" :
requet.requet_fixed()
messages.success(request,f"Problème de client {client} est reésolu avec success")
return redirect("tech_requets")
else :
return HttpResponse("<h2> 403 Forbidden </h2>")
@login_required
def note_view(request , id_r , id_u):
requet = get_object_or_404(Requet , pk = id_r)
owner = get_object_or_404(User , pk = id_u)
form = NotificationForm()
if request.user == requet.client or request.user == requet.tech :
if request.method == "POST" :
form = NotificationForm(request.POST)
if form.is_valid():
requet.requet_note()
note = form.save(commit = False)
note.owner = owner
note.requet = requet
note.save()
username = owner.username
messages.success(request , f"{username} votre notification est crea avec success")
if request.user == requet.tech :
requet.tech = None
requet.save()
return redirect("tech_requets" )
else :
return redirect("siuvi_requete" )
return render(request ,"users/note.html",{"form":form ,"client":owner ,"requet":requet})
else :
return HttpResponse("<h2> 403 Forbidden </h2>")
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,706 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/urls.py | from django.urls import path , include
from . import views
from django.contrib.auth.views import (
PasswordResetView,
PasswordResetConfirmView,
PasswordResetDoneView,
PasswordResetCompleteView,
)
urlpatterns = [
path('',views.register,name="register" ),
path('login/',views.login_view,name="login" ),
path('client_info/',views.user_info,name="client_info"),
path('entreprise_info/',views.entreprise_info,name="entreprise_info"),
path('logout/',views.logout_view,name="logout" ),
path('reset_password/',PasswordResetView.as_view(template_name="requets/reset_password.html"),name="reset_password"),
path('password_reset_done/',PasswordResetDoneView.as_view(template_name="requets/reset_password_done.html"),name="password_reset_done"),
path('password_reset_confirm/<uidb64>/<token>/',PasswordResetConfirmView.as_view(template_name="requets/reset_password_confirm.html"),name="password_reset_confirm"),
path('password_reset_complete/',PasswordResetCompleteView.as_view(template_name="requets/reset_password_complete.html"),name="password_reset_complete"),
path('login_tech/',views.login_tech,name="login_tech" ),
path('client/<int:id_client>/<int:id_requet>/',views.requet_info,name="requet_info" ),
path("problem_fixed/<int:id>/",views.problem_fixed,name="problem_fixed"),
path("note/<int:id_u>/<int:id_r>/",views.note_view,name="note"),
path("success_register/",views.success_register,name="success_register"),
path('confirm_user/<slug:uidb64>/<slug:token>)/', views.confirm_user, name='confirm_user'),
path('username_valid/<str:username>/', views.username_validation, name='username_validation'),
path('email_valid/<str:email>/', views.email_validation, name='email_validation'),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,707 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/migrations/0004_auto_20190417_1408.py | # Generated by Django 2.2 on 2019-04-17 12:08
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('requets', '0008_auto_20190417_1408'),
('users', '0003_auto_20190417_1407'),
]
operations = [
migrations.AddField(
model_name='profile',
name='company',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='requets.Company'),
),
migrations.AddField(
model_name='profile',
name='personne',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='requets.Personne'),
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,708 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/forms.py | from django import forms
from .models import Requet
class RequetForm(forms.ModelForm):
class Meta:
model = Requet
fields = ["problem","content",]
CHOICES = [
("Coupage telephonique", "Coupage telephonique"),
("Autre Problem" ,"Autre Problem"),
]
widgets = {
"content" : forms.Textarea(attrs={"class":"form-control txa-requet" ,"id":"exampleFormControlTextarea1", "rows":"3"}),
"problem" : forms.Select(choices = CHOICES , attrs={"required" : True})
}
labels = {
"content" : "s'il vous plaît essayez de préciser votre problème"
}
class InternetRequetForm(forms.ModelForm):
class Meta:
model = Requet
fields = ["problem","content",]
CHOICES = [
("Problem internet", "Problem internet"),
("Autre Problem" ,"Autre Problem"),
]
widgets = {
"content" : forms.Textarea(attrs={"class":"form-control txa-requet" ,"id":"exampleFormControlTextarea1", "rows":"3"}),
"problem" : forms.Select(choices = CHOICES , attrs={"required" : True})
}
labels = {
"content" : "s'il vous plaît essayez de préciser votre problème"
}
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,709 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/migrations/0001_initial.py | # Generated by Django 2.2 on 2019-04-15 11:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Requet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('state', models.CharField(default='ont étape de traitement', max_length=100)),
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='requets', to=settings.AUTH_USER_MODEL)),
('tech', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='works', to=settings.AUTH_USER_MODEL)),
],
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,710 | ahmedbendev/Reclamation-Client | refs/heads/master | /requets/migrations/0007_auto_20190417_1407.py | # Generated by Django 2.2 on 2019-04-17 12:07
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('requets', '0006_auto_20190417_1354'),
]
operations = [
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
],
),
migrations.CreateModel(
name='Personne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=256)),
('last_name', models.CharField(max_length=256)),
],
),
migrations.AlterField(
model_name='requet',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2019, 4, 17, 12, 7, 23, 625362, tzinfo=utc)),
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,711 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/forms.py | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm , UserChangeForm
from .models import Profile ,Personne ,Company ,Address
from django.utils.translation import gettext_lazy as _
class UserForm(UserCreationForm):
class Meta:
model = User
fields = ["username","email","password1","password2"]
widgets = {
"username" : forms.TextInput(attrs={ "class":"form-control username" ,"id":"username" ,"aria-describedby":"emailHelp" ,"placeholder":"Nom d'utilisateur"}),
"email" : forms.TextInput(attrs={ "class":"form-control email" ,"id":"email" ,"aria-describedby":"emailHelp" ,"placeholder":"Votre Email Adress"}),
}
labels = {
"username" : "",
"email" : "",
}
help_texts = {
"username" : None ,
"password" : _("le mot de passe doit comporter plus de 8 caractères" ),
"email" : None,
}
# <---------------- changing the default error whene username already exists ------------------------->
error_messages= {
"username_exists": _("ce nom d'utilisateur existe déjà"),
'password_mismatch': _("Les deux champs de mot de passe ne correspondent pas."),
}
def clean_username(self):
username = self.cleaned_data.get("username")
try:
User._default_manager.get(username=username)
#if the user exists, then let's raise an error message
raise forms.ValidationError(
self.error_messages['username_exists'], #my error message
code='username_exists', #set the error message key
)
except User.DoesNotExist:
return username # if user does not exist so we can continue the registration process
# <---------------- make the gmail a unique field ------------------------->
def clean_email(self):
email1 = self.cleaned_data.get("email")
username = self.cleaned_data.get("username")
if User.objects.filter(email = email1).exclude(username = username).exists():
raise forms.ValidationError(
"l'adresse e-mail que vous avez entrée est déjà enregistrée, allez à la page de connexion et connectez-vous"
)
else :
return email1
def __init__(self, *args, **kwargs):
super(UserForm, self).__init__(*args, **kwargs)
self.fields['password1'].widget = forms.PasswordInput(attrs={"class":"form-control password" ,"id":"pass1" ,"placeholder":"mot de passe"})
self.fields['password2'].widget = forms.PasswordInput(attrs={"class":"form-control password" ,"id":"pass2" ,"placeholder":"Retaper Le mot de passe"})
self.fields['password1'].label = ""
self.fields['password2'].label = ""
self.fields['password1'].help_text = ""
self.fields['password2'].help_text = ""
# <----------------------------- Profile Form --------------------------->
class ProfileForm(forms.ModelForm):
class Meta :
model = Profile
fields = ["phone_number"]
widgets = {
"phone_number" : forms.TextInput(attrs={ "class":"form-control " ,"id":"phone" ,"aria-describedby":"emailHelp" ,"placeholder":"Votre telephone fix"}),
}
labels = {
"phone_number" : "",
}
def clean(self):
data = super().clean()
phone = data["phone_number"]
if phone[0] != '0' :
raise forms.ValidationError("entrer votre numéro fix !!")
elif len(phone) != 9 :
raise forms.ValidationError("entrer votre numéro fix seulement!!")
else :
return data
# <------------------------------- changing user information form ----------------------------->
#get all the existing emails
class UserChangeInfoForm(forms.ModelForm):
class Meta:
model = User
fields = ("email",)
widgets = {
"email" : forms.TextInput(attrs={ "class":"form-control email" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Votre Email Adress"}),
}
class ProfileChangeForm(forms.ModelForm):
class Meta :
model = Profile
fields = ["phone_number"]
widgets = {
"phone_number" : forms.TextInput(attrs={ "class":"form-control " ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Votre telephone fix"}),
}
labels = {
"phone_number" : "Votre Telephone Fix",
}
class PersonneChangeForm(forms.ModelForm):
class Meta:
model = Personne
fields = ["first_name","last_name"]
widgets = {
"first_name" : forms.TextInput(attrs={ "class":"form-control location" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Nom"}),
"last_name" : forms.TextInput(attrs={ "class":"form-control " ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Prenon"}),
}
labels = {
"first_name" : "Votre Nom",
"last_name" : "Votre Prénom",
}
class CompanyChangeForm(forms.ModelForm):
class Meta:
model = Company
fields = ["name",]
widgets = {
"name" : forms.TextInput(attrs={ "class":"form-control location" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"nom"}),
}
labels = {
"name" : "Entrer le non de votre entreprise",
}
class AddressCompanyChangeForm(forms.ModelForm):
class Meta:
REGIONS = [
("bouira" , "bouira"),
("hachimia" , "hachimia"),
("sour" , "sour"),
]
model = Address
fields = ["region","commune","rue"]
widgets = {
"region" : forms.Select(choices = REGIONS)
}
labels = {
"region" : "Daira",
"commune" : "la Commune",
"rue" : "Rue",
}
class AddressPersonneChangeForm(forms.ModelForm):
class Meta:
REGIONS = [
("bouira" , "bouira"),
("hachimia" , "hachimia"),
("sour" , "sour"),
]
model = Address
fields = ["region","commune","rue","logement"]
widgets = {
"region" : forms.Select(choices = REGIONS)
}
labels = {
"region" : "Daira",
"commune" : "la Commune",
"rue" : "Rue",
"logement" : "N° de Logement",
}
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,712 | ahmedbendev/Reclamation-Client | refs/heads/master | /manager/permissions.py | from rest_framework import permissions
class IsManager(permissions.BasePermission):
"""
Custom permission to only allow owners of an object to edit it.
"""
def has_permission(self, request, view):
if request.user.profile.group == "admin":
return True
return False
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,713 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/migrations/0003_auto_20190417_1407.py | # Generated by Django 2.2 on 2019-04-17 12:07
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0002_auto_20190417_1351'),
]
operations = [
migrations.DeleteModel(
name='Company',
),
migrations.DeleteModel(
name='Personne',
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,714 | ahmedbendev/Reclamation-Client | refs/heads/master | /manager/forms.py | from django import forms
from requets.models import Requet , Notification
from users.models import Profile ,Address
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from django.utils.translation import gettext_lazy as _
class EditRequetForm(forms.ModelForm):
def __init__(self,*args,**kwargs):
super (EditRequetForm,self ).__init__(*args,**kwargs) # populates the post
self.fields['tech'].queryset = User.objects.filter(profile__group = "tech")
class Meta:
model = Requet
fields = ["content" , "tech"]
widgets = {
"content" : forms.Textarea(attrs={"class" : "form-control" ,"id" : "exampleFormControlTextarea1" , "rows" : "3"})
}
labels = {
"content" : "modifier le contenu pour être plus clair",
"tech" : "la possibilité de passer au technicien efficace"
}
# <------------------------------- to add and admin or tech profile ------------------------------->
class AddAdminForm(UserCreationForm):
class Meta:
model = User
fields = ["first_name","last_name","username","email","password1","password2"]
widgets = {
"username" : forms.TextInput(attrs={ "class":"form-control username" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Nom d'utilisateur"}),
"email" : forms.TextInput(attrs={ "class":"form-control email" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Email Adress de l'employer"}),
"first_name" : forms.TextInput(attrs={ "class":"form-control username" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Nom"}),
"last_name" : forms.TextInput(attrs={ "class":"form-control username" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Prenom"}),
}
labels = {
"username" : "",
"email" : "",
"first_name" : "",
"last_name" : "",
}
help_texts = {
"username" : None ,
"password" : _("le mot de passe doit comporter plus de 8 caractères" ),
"email" : ("email adress doit être unique")
}
# <---------------- changing the default error whene username already exists ------------------------->
error_messages= {
"username_exists": _("ce nom d'utilisateur existe déjà"),
'password_mismatch': _("Les deux champs de mot de passe ne correspondent pas."),
}
def clean_username(self):
username = self.cleaned_data.get("username")
try:
User._default_manager.get(username=username)
#if the user exists, then let's raise an error message
raise forms.ValidationError(
self.error_messages['username_exists'], #my error message
code='username_exists', #set the error message key
)
except User.DoesNotExist:
return username # if user does not exist so we can continue the registration process
# <---------------- make the gmail a unique field ------------------------->
def clean_email(self):
email1 = self.cleaned_data.get("email")
username = self.cleaned_data.get("username")
if User.objects.filter(email = email1).exclude(username = username).exists():
raise forms.ValidationError(
"l'adresse e-mail que vous avez entrée est déjà enregistrée, allez à la page de connexion et connectez-vous"
)
else :
return email1
def __init__(self, *args, **kwargs):
super(AddAdminForm, self).__init__(*args, **kwargs)
self.fields['password1'].widget = forms.PasswordInput(attrs={"class":"form-control password" ,"id":"exampleInputPassword1" ,"placeholder":"mot de passe"})
self.fields['password2'].widget = forms.PasswordInput(attrs={"class":"form-control password" ,"id":"exampleInputPassword1" ,"placeholder":"Retaper Le mot de passe"})
self.fields['password1'].label = ""
self.fields['password2'].label = ""
class ProfileAdminForm(forms.ModelForm):
class Meta:
model = Profile
fields = ["phone_number"]
widgets = {
"phone_number" : forms.TextInput(attrs={ "class":"form-control email" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" }),
}
labels = {
"phone_number" : "numéro telephone :",
}
# <----------------------------- tech edit form ---------------------------------------------->
class UserChangeInfoForm(forms.ModelForm):
class Meta:
model = User
fields = ["email","first_name","last_name"]
widgets = {
"email" : forms.TextInput(attrs={ "class":"form-control email" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Votre Email Adress"}),
"first_name" : forms.TextInput(attrs={ "class":"form-control username" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Nom"}),
"last_name" : forms.TextInput(attrs={ "class":"form-control username" ,"id":"exampleInputEmail1" ,"aria-describedby":"emailHelp" ,"placeholder":"Prenom"}),
}
labels = {
"email" : "Email adresse",
"first_name" : "Nom",
"last_name" : "Prenom",
}
class AddressTechForm(forms.ModelForm):
class Meta:
REGIONS = [
("boira" , "bouira"),
("hachimia" , "hachimia"),
("sour" , "sour"),
]
model = Address
fields = ["region","commune","rue","logement"]
widgets = {
"region" : forms.Select(choices = REGIONS)
}
labels = {
"region" : "Daira :",
"commune" : "la Commune :",
"rue" : "Rue :",
"logement" : "N° de Logement :",
}
class NotificationForm(forms.ModelForm):
class Meta:
model = Notification
fields = ["content",]
labels = {
"content" : "écrire le problème avec précision : "
}
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,715 | ahmedbendev/Reclamation-Client | refs/heads/master | /manager/urls.py | from django.urls import path
from . import views
urlpatterns = [
path("",views.login_manager,name="login_manager"),
path("home/",views.home,name="manager_home"),
path("requets/",views.RequetsListView.as_view(),name="manager_requets"),
path("aprove_requet/<int:id>/",views.aprove,name="aprove_requet"),
path("delete_requet/<int:pk>/",views.RequetDeleteView.as_view(),name="manager_delete_requet"),
path("edit_requet/<int:id>/",views.edit_requet,name="edit_requet"),
path("requets_approved/",views.RequetsApprovedListView.as_view(),name="manager_approved_requets"),
path("requets_fixed/",views.RequetsFixedListView.as_view(),name="manager_fixed_requets"),
path("register_employee/",views.register_employee,name="register_employee"),
path("register_admin/",views.register_admin,name="register_admin"),
path("list_tech/",views.TechListView.as_view(),name="list_tech"),
path("delete_tech/<int:pk>/",views.TechDeleteView.as_view(),name="delete_tech"),
path("tech_info/<int:id>/",views.tech_info,name="tech_info"),
path("client_info/<int:id>/",views.client_info,name="manager_client_info"),
path("requet_info/<int:id>/",views.requet_info,name="manager_requet_info"),
path("list_personne/",views.PersonneListView.as_view(),name="list_personne"),
path("delete_personne/<int:pk>/",views.PersonneDeleteView.as_view(),name="delete_personne"),
path("list_enterprise/",views.enterprise_list,name="list_enterprise"),
path("search_requet/",views.search_requet,name="search_requet"),
path("search_client/",views.search_client,name="search_client"),
path("search_entrprise/",views.search_entrprise,name="search_entrprise"),
path("requet_notée/",views.RequetsNoteeListView.as_view(),name="requet_notée"),
path("static_tech/",views.techs,name="static_tech"),
path("static_daira/",views.diaras,name="static_diara"),
path("static_type/",views.type,name="static_type"),
path("static_evolution/",views.evolution,name="static_evolution"),
path("static_evolution_day/",views.evolution_day,name="static_evolution_day"),
path("chart_data/",views.DataChart.as_view(),name="chart_data"),
path("rest_reclamations/",views.ReclamationSetView.as_view({'get': 'list'}),name="rest_reclamations"),
path("rest_notes/",views.NotesSetView.as_view({'get': 'list'}),name="rest_notes"),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
60,716 | ahmedbendev/Reclamation-Client | refs/heads/master | /users/migrations/0008_auto_20190425_1318.py | # Generated by Django 2.2 on 2019-04-25 11:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0007_adress'),
]
operations = [
migrations.RenameModel(
old_name='Adress',
new_name='Address',
),
]
| {"/manager/views.py": ["/manager/permissions.py", "/manager/forms.py", "/requets/models.py", "/users/models.py", "/manager/serializers.py"], "/requets/admin.py": ["/requets/models.py"], "/manager/serializers.py": ["/requets/models.py"], "/users/models.py": ["/requets/models.py"], "/requets/views.py": ["/users/forms.py", "/requets/forms.py", "/requets/models.py"], "/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py", "/requets/models.py", "/users/forms.py", "/manager/forms.py"], "/requets/forms.py": ["/requets/models.py"], "/users/forms.py": ["/users/models.py"], "/manager/forms.py": ["/requets/models.py", "/users/models.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.