index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
51,823 | STrucks/EventAnalysis | refs/heads/master | /data_crawler/__main__.py | from data_crawler.data_manager import DataManager
if __name__ == '__main__':
dm = DataManager()
for i in range(5000):
print(i)
dm.next_batch()
| {"/data_crawler/crawler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"], "/data_crawler/data_manager.py": ["/data_crawler/crawler.py", "/data_crawler/database_handler.py"], "/data_crawler/__main__.py": ["/data_crawler/data_manager.py"], "/data_crawler/database_handler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"]} |
51,824 | STrucks/EventAnalysis | refs/heads/master | /analyser/__main__.py |
if __name__ == '__main__':
# extract key words:
pass | {"/data_crawler/crawler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"], "/data_crawler/data_manager.py": ["/data_crawler/crawler.py", "/data_crawler/database_handler.py"], "/data_crawler/__main__.py": ["/data_crawler/data_manager.py"], "/data_crawler/database_handler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"]} |
51,825 | STrucks/EventAnalysis | refs/heads/master | /data_crawler/database_handler.py | import logging
from pymongo import MongoClient
from data_crawler.configurations.config_loader import ConfigLoader
from data_crawler.models.reddit_post import RedditPost
class DatabaseHandler:
def __init__(self):
cl = ConfigLoader()
logging.basicConfig(level=cl.get_logging_level())
client = MongoClient(cl.get("mongo_db_ip"))
self.db = client[cl.get("database_name")]
def upload_post(self, table_name, post: RedditPost):
result = self.db[table_name].insert_one(post.to_dict())
logging.debug("--Inserted object with text %s..." % post.headline)
def find(self, table_name, post: RedditPost):
post = self.db[table_name].find_one({"headline": post.headline,
"section": post.section,
"date": post.date})
if post is not None:
post = RedditPost(headline=post['headline'], section=post['section'], date=post['date'], time=post['time'])
return post
def summary(self, table_name):
count = self.db[table_name].count_documents({})
print("Number of documents: %d" % count)
| {"/data_crawler/crawler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"], "/data_crawler/data_manager.py": ["/data_crawler/crawler.py", "/data_crawler/database_handler.py"], "/data_crawler/__main__.py": ["/data_crawler/data_manager.py"], "/data_crawler/database_handler.py": ["/data_crawler/configurations/config_loader.py", "/data_crawler/models/reddit_post.py"]} |
51,830 | ChatchapongC/myform | refs/heads/master | /myform/views.py | from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.forms import UserCreationForm
from django.http import HttpResponseRedirect
from django.urls import reverse, reverse_lazy
from django.views import generic
from django.views.generic import TemplateView, CreateView, DetailView, UpdateView, FormView
from django.contrib.auth.models import User
from django.views import generic, View
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth.decorators import login_required
from .forms import UserRegistrationForm, EventForm, AddQuestion
from .models import *
import datetime
class HomeView(TemplateView):
template_name = 'registration/login.html'
class CreateProjectView(TemplateView):
template_name = 'myform/projectlist.html'
class SummaryView(TemplateView):
template_name = 'myform/summary.html'
class ContactView(TemplateView):
template_name = 'myform/contact.html'
def evaluator_view(request, event_id):
question_list = Question.objects.filter(event_id=event_id)
event = Event.objects.get(id=event_id)
context = {'question_list': question_list,
'event': event}
return render(request, 'myform/evaluator.html', context)
def event_delete(request, event_id):
event = get_object_or_404(Event, id=event_id)
if request.user == event.owner:
event.delete()
return redirect('myform:event')
def create_event(request):
if request.method == 'POST':
event_form = EventForm(request.POST)
if event_form.is_valid():
event_form = event_form.save(commit=False)
event_form.owner = request.user
event_form.save()
messages.success(
request, "Event added successfully",
extra_tags='alert alert-success alert-dismissible fade show')
else:
messages.error(
request, event_form.errors)
return HttpResponseRedirect(reverse('myform:event'))
else:
event_form = EventForm()
question_form = AddQuestion()
context = {'event_form': event_form,
'question_form': question_form}
return render(request, 'myform/createform.html', context)
def event_edit(request, event_id):
event = get_object_or_404(Event, pk=event_id)
question_list = Question.objects.filter(event_id=event_id)
if request.user != event.owner:
messages.error(request, 'This is not your own form')
return HttpResponseRedirect(reverse('myform:evaluator', kwargs={'event_id': event_id}))
if request.method == 'POST':
form = EventForm(request.POST, instance=event)
if form.is_valid:
form.save()
return HttpResponseRedirect(reverse('myform:event'))
else:
form = EventForm(instance=event)
question_form = AddQuestion()
context = {'event_form': form,
'event': event,
'question_form': question_form,
'question_list': question_list
}
return render(request, "myform/createform.html", context)
def create_question(request, event_id):
if request.method == 'POST':
question_form = AddQuestion(request.POST)
if question_form.is_valid():
question = question_form.save(commit=False)
question.event_id = event_id
question.save()
messages.success(
request, "Question added successfully")
else:
messages.error(
request, question_form.errors)
return HttpResponseRedirect(reverse('myform:edit', args=[event_id]))
else:
question_form = AddQuestion()
context = {'question_form': question_form}
return render(request, 'myform/createform.html', context)
def save_answer(request, event_id):
ans = request.POST['new']
return HttpResponseRedirect(f'/form/{event_id}')
class IndexView(generic.ListView):
model = Event
template_name = 'myform/index.html'
context_object_name = 'event_list'
def get_queryset(self):
event = super().get_queryset()
return event.all()
def get_context_data(self, **kwargs):
event = super().get_queryset()
context = super(IndexView, self).get_context_data(**kwargs)
if self.request.user.is_authenticated:
context['my_event'] = event.filter(owner=self.request.user.id)
return context
def user_login(request):
"""
If the user is not authenticated, get user's request and execute login.
"""
if request.method == "POST":
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
login(request, user)
return HttpResponseRedirect(reverse('myform:event'))
else:
messages.error(request, 'Wrong username or password try again!')
return render(request, 'registration/login.html')
else:
return render(request, 'registration/login.html')
def logout_user(request):
"""
Function to logout user and redirect to login page.
"""
logout(request)
return HttpResponseRedirect('/login')
def user_register(request):
registered = False
if request.method == 'POST':
user = UserRegistrationForm(data=request.POST)
if user.is_valid():
user = user.save()
user.set_password(user.password)
user.save()
registered = True
return HttpResponseRedirect(reverse('myform:home'))
else:
user = UserRegistrationForm()
context = {'user': user,
'registered': registered}
return render(request, 'registration/registration_form.html', context)
| {"/myform/views.py": ["/myform/forms.py", "/myform/models.py"], "/myform/forms.py": ["/myform/models.py"], "/myform/urls.py": ["/myform/views.py"]} |
51,831 | ChatchapongC/myform | refs/heads/master | /myform/forms.py | from django import forms
from django.contrib.auth.models import User
from django.forms import EmailInput, TextInput, PasswordInput, Field
from .models import *
class UserRegistrationForm(forms.ModelForm):
class Meta:
model = User
fields = ['email', 'username', 'password']
widgets = {
'email': EmailInput(attrs={'placeholder': 'example@email.com'}),
'username': TextInput(attrs={'placeholder': 'username'}),
'password': PasswordInput(attrs={'placeholder': 'password'})
}
class EventForm(forms.ModelForm):
class Meta:
model = Event
fields = ['event_name', ]
widgets = {
'event_name': TextInput(attrs={'placeholder': 'Event Name',
'type': 'question'}),
}
class AddQuestion(forms.ModelForm):
class Meta:
model = Question
fields = ['question_text']
widgets = {
'question_text': TextInput(attrs={'placeholder': 'Type question',
'type': 'question'}),
}
class EvaluatorForm(forms.ModelForm):
class Meta:
model = Evaluation
fields = ['event_name', ]
widgets = {
'event_name': TextInput(attrs={'placeholder': 'Event Name',
'type': 'question'}),
}
| {"/myform/views.py": ["/myform/forms.py", "/myform/models.py"], "/myform/forms.py": ["/myform/models.py"], "/myform/urls.py": ["/myform/views.py"]} |
51,832 | ChatchapongC/myform | refs/heads/master | /myform/urls.py | from django.contrib import admin
from django.urls import path
from myform.views import HomeView, IndexView, CreateProjectView, ContactView ,SummaryView, event_delete, create_question, create_event, event_edit, \
evaluator_view, create_question, save_answer
from django.contrib.auth.decorators import login_required
app_name = 'myform'
urlpatterns = [
path('', HomeView.as_view(), name='home'),
path('event/', IndexView.as_view(), name='event'),
path('project/', CreateProjectView.as_view(), name='project'),
path('contact/', ContactView.as_view(), name='contact'),
path('summary/', SummaryView.as_view(), name='summary'),
path('create/', create_event, name='create'),
path('create_question/<int:event_id>', create_question, name='create_question'),
path('edit/<int:event_id>', event_edit, name='edit'),
path('form/<int:event_id>', evaluator_view, name='evaluator'),
path('delete/<int:event_id>', event_delete, name='delete'),
path('addans/<int:event_id>' , save_answer, name='save')
]
| {"/myform/views.py": ["/myform/forms.py", "/myform/models.py"], "/myform/forms.py": ["/myform/models.py"], "/myform/urls.py": ["/myform/views.py"]} |
51,833 | ChatchapongC/myform | refs/heads/master | /myform/models.py | from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
import datetime
class Event(models.Model):
owner = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
event_name = models.CharField(max_length=100)
event_date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.event_name
class Question(models.Model):
event = models.ForeignKey(Event, related_name='event_of_question', on_delete=models.CASCADE)
question_text = models.CharField(max_length=100)
choice_text = models.TextField(blank=True, null=True)
def __str__(self):
return self.question_text
class Evaluation(models.Model):
responder = models.ForeignKey(User, on_delete=models.CASCADE)
event_name = models.ForeignKey(Event, on_delete=models.CASCADE)
def __str__(self):
return f'{self.event_name}'
class AnswerBase(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
response = models.ForeignKey(Evaluation, on_delete=models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class AnswerText(AnswerBase):
body = models.TextField(blank=True, null=True)
class AnswerRadio(AnswerBase):
body = models.TextField(blank=True, null=True)
class AnswerSelect(AnswerBase):
body = models.TextField(blank=True, null=True)
class AnswerSelectMultiple(AnswerBase):
body = models.TextField(blank=True, null=True)
class AnswerInteger(AnswerBase):
body = models.IntegerField(blank=True, null=True)
| {"/myform/views.py": ["/myform/forms.py", "/myform/models.py"], "/myform/forms.py": ["/myform/models.py"], "/myform/urls.py": ["/myform/views.py"]} |
51,836 | AlexeyKulyasov/Build-RESTful-Api-with-flask_restful-marshmallow | refs/heads/main | /routes.py | from typing import Tuple, List, Dict
from flask import Flask, request, jsonify, make_response
from flask_restful import Api, Resource, abort
from marshmallow import ValidationError
from models import (
DATA, get_all_books, init_db,
add_book, get_book_by_id, update_book_by_id,
delete_book_by_id, get_all_authors, add_author,
delete_author_by_id, get_author_by_id, get_books_by_id_author
)
from schemas import BookSchema, AuthorSchema
app = Flask(__name__)
api = Api(app)
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
def abort_if_book_doesnt_exist(book_id: int):
if get_book_by_id(book_id) is None:
abort(404, error="Book with id={} doesn't exist".format(book_id))
def abort_if_author_doesnt_exist(author_id: int):
if get_author_by_id(author_id) is None:
abort(404, error="Author with id={} doesn't exist".format(author_id))
class BookList(Resource):
# получение списка книг
def get(self) -> Tuple[List[Dict], int]:
schema = BookSchema()
return jsonify({'books': schema.dump(get_all_books(), many=True)})
# добавление новой книги
def post(self) -> Tuple[Dict, int]:
data = request.json
schema = BookSchema()
try:
book = schema.load(data)
except ValidationError as exc:
return exc.messages, 400
book = add_book(book)
return {'book': schema.dump(book)}, 201
class BookActions(Resource):
# получение информации по книге
def get(self, book_id: int):
abort_if_book_doesnt_exist(book_id)
schema = BookSchema()
return {'book': schema.dump(get_book_by_id(book_id))}
# обновление информации по книге
def put(self, book_id: int):
abort_if_book_doesnt_exist(book_id)
data = request.json
schema = BookSchema()
try:
book = schema.load(data)
except ValidationError as exc:
return exc.messages, 400
book.id = book_id
update_book_by_id(book)
return schema.dump(data), 201
# удаление книги
def delete(self, book_id: int):
abort_if_book_doesnt_exist(book_id)
delete_book_by_id(book_id)
return {"message": f"Book with id {book_id} is deleted."}, 200
class AuthorList(Resource):
# получение списка авторов
def get(self) -> Tuple[List[Dict], int]:
schema = AuthorSchema()
return jsonify({'authors': schema.dump(get_all_authors(), many=True)})
# добавление нового автора
def post(self) -> Tuple[Dict, int]:
data = request.json
schema = AuthorSchema()
try:
author = schema.load(data)
except ValidationError as exc:
return exc.messages, 400
author = add_author(author)
return {"author": schema.dump(author)}, 201
class AuthorActions(Resource):
# получение информации о всех книгах автора
def get(self, author_id: int):
abort_if_author_doesnt_exist(author_id)
schema = BookSchema(only=("id", "title"))
return {'books': schema.dump(get_books_by_id_author(author_id), many=True)}
# удаление автора со всеми его книгами
def delete(self, author_id: int):
abort_if_author_doesnt_exist(author_id)
delete_author_by_id(author_id)
return {"message": f"Author with id {author_id} is deleted."}, 200
api.add_resource(BookList, '/api/books') # список всех книг, добавить книгу
api.add_resource(BookActions, '/api/books/<int:book_id>') # получить инфу по книге, обновить, удалить книгу
api.add_resource(AuthorList, '/api/authors') # список всех авторов, добавить автора
api.add_resource(AuthorActions, '/api/authors/<int:author_id>') # все книги автора, удалить автора со всеми его книгами
if __name__ == '__main__':
init_db(initial_records=DATA)
app.run(debug=True)
| {"/routes.py": ["/models.py", "/schemas.py"], "/schemas.py": ["/models.py"]} |
51,837 | AlexeyKulyasov/Build-RESTful-Api-with-flask_restful-marshmallow | refs/heads/main | /schemas.py | from typing import Dict
from marshmallow import (
Schema, fields, validates,
ValidationError, post_load, validates_schema
)
from models import (
get_author_by_name,
is_book_exists, Book, Author
)
class BookSchema(Schema):
id = fields.Int(dump_only=True)
title = fields.Str(required=True)
author = fields.Str(required=True)
# проверка существования книги (по названию и автору).
# используется при добавлении новой книги и обновлении существующей
@validates_schema()
def validate_exists_book(self, data, **kwargs):
if is_book_exists(data['title'], data['author']):
errors = dict()
errors['error'] = 'Book with title "{title}" and author "{author}" already exists, ' \
'please use a different title or author.'.format(title=data['title'],
author=data['author'])
raise ValidationError(errors)
@post_load
def create_book(self, data: Dict, **kwargs) -> Book:
return Book(**data)
class AuthorSchema(Schema):
id = fields.Int(dump_only=True)
name = fields.Str(required=True)
# проверка существования автора (по имени)
# используется при добавлении нового автора
@validates('name')
def validate_name(self, name: str) -> None:
if get_author_by_name(name) is not None:
raise ValidationError(
'Author with name "{name}" already exists, '
'please use a different name.'.format(name=name)
)
@post_load
def create_author(self, data: Dict, **kwargs) -> Author:
return Author(**data)
| {"/routes.py": ["/models.py", "/schemas.py"], "/schemas.py": ["/models.py"]} |
51,838 | AlexeyKulyasov/Build-RESTful-Api-with-flask_restful-marshmallow | refs/heads/main | /models.py | import sqlite3
from dataclasses import dataclass
from typing import List, Optional, Tuple
ENABLE_FOREIGN_KEY = "PRAGMA foreign_keys = ON;"
DATA = [
{'id': 1, 'title': 'A Byte of Python', 'author': 'Swaroop C. H.'},
{'id': 2, 'title': 'Moby-Dick; or, The Whale', 'author': 'Herman Melville'},
{'id': 3, 'title': 'War and Peace', 'author': 'Leo Tolstoy'},
]
BOOKS_TABLE_NAME = 'books'
AUTHORS_TABLE_NAME = 'authors'
@dataclass
class Author:
name: str
id: Optional[int] = None
@dataclass
class Book:
title: str
author: str
id: Optional[int] = None
def init_db(initial_records: List[dict]) -> None:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.execute(
"SELECT name FROM sqlite_master "
f"WHERE type='table' AND name='{AUTHORS_TABLE_NAME}';"
)
exists = cursor.fetchone()
# если таблицы "authors" в БД не существует - производим первоначальную инициализацию данных в БД:
# создаем две связанные таблицы "authors" и "books", наполняем их первоначальными данными из DATA
if not exists:
cursor.executescript(
f'CREATE TABLE `{AUTHORS_TABLE_NAME}`'
'(id INTEGER PRIMARY KEY AUTOINCREMENT, name)'
)
cursor.executemany(
f'INSERT INTO `{AUTHORS_TABLE_NAME}` '
'(id, name) VALUES (?, ?)',
[(item['id'], item['author']) for item in initial_records]
)
cursor.executescript(
f'CREATE TABLE `{BOOKS_TABLE_NAME}`'
'(id INTEGER PRIMARY KEY AUTOINCREMENT, title,'
f'id_author INTEGER NOT NULL REFERENCES {AUTHORS_TABLE_NAME}(id) ON DELETE CASCADE)'
)
cursor.executemany(
f'INSERT INTO `{BOOKS_TABLE_NAME}` '
'(title, id_author) VALUES (?, ?)',
[(item['title'], item['id']) for item in initial_records]
)
def _get_book_obj_from_row(row: Tuple) -> Book:
return Book(id=row[0], title=row[1], author=row[2])
def _get_id_author_or_add_author_if_not_exist(c: sqlite3.Cursor, name: str) -> int:
c.execute(
f"""
SELECT id FROM {AUTHORS_TABLE_NAME}
WHERE name = ?
""", (name,)
)
author_id = c.fetchone()
if author_id:
return author_id[0]
c.execute(
f"""
INSERT INTO `{AUTHORS_TABLE_NAME}` (name) VALUES (?)
""", (name,)
)
return c.lastrowid
def get_all_books() -> List[Book]:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.execute(f'SELECT books.id, books.title, author.name '
f'FROM `{BOOKS_TABLE_NAME}` books '
f'INNER JOIN {AUTHORS_TABLE_NAME} author ON books.id_author = author.id')
all_books = cursor.fetchall()
return [_get_book_obj_from_row(row) for row in all_books]
def add_book(book: Book) -> Book:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
author_id = _get_id_author_or_add_author_if_not_exist(cursor, book.author)
cursor.execute(
f"""
INSERT INTO `{BOOKS_TABLE_NAME}`
(title, id_author) VALUES (?, ?)
""", (book.title, author_id)
)
book.id = cursor.lastrowid
return book
def get_book_by_id(book_id: int) -> Optional[Book]:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.execute(f'SELECT books.id, books.title, author.name '
f'FROM `{BOOKS_TABLE_NAME}` books '
f'LEFT JOIN {AUTHORS_TABLE_NAME} author ON books.id_author = author.id '
f'WHERE books.id = ?', (book_id,)
)
book = cursor.fetchone()
if book:
return _get_book_obj_from_row(book)
def update_book_by_id(book: Book) -> None:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
author_id = _get_id_author_or_add_author_if_not_exist(cursor, book.author)
cursor.execute(
f"""
UPDATE {BOOKS_TABLE_NAME}
SET title = ? ,
id_author = ?
WHERE id = ?
""", (book.title, author_id, book.id)
)
conn.commit()
def delete_book_by_id(book_id: int) -> None:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.execute(
f"""
DELETE FROM {BOOKS_TABLE_NAME}
WHERE id = ?
""", (book_id,)
)
conn.commit()
def get_all_authors() -> List[Author]:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.execute(f'SELECT * FROM {AUTHORS_TABLE_NAME}')
all_authors = cursor.fetchall()
return [Author(id=row[0], name=row[1]) for row in all_authors]
def add_author(author: Author) -> Author:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.execute(
f"""
INSERT INTO `{AUTHORS_TABLE_NAME}`
(name) VALUES (?)
""", (author.name,)
)
author.id = cursor.lastrowid
return author
def delete_author_by_id(author_id: int) -> None:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.executescript(ENABLE_FOREIGN_KEY)
cursor.execute(
f"""
DELETE FROM {AUTHORS_TABLE_NAME}
WHERE id = ?
""", (author_id,)
)
conn.commit()
def get_author_by_name(author_name: str) -> Optional[Author]:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.execute(
f'SELECT * FROM `{AUTHORS_TABLE_NAME}` WHERE name = ?', (author_name,)
)
author = cursor.fetchone()
if author:
return Author(id=author[0], name=author[1])
def get_author_by_id(author_id: int) -> Optional[Author]:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.execute(
f'SELECT * FROM `{AUTHORS_TABLE_NAME}` WHERE id = ?', (author_id,)
)
author = cursor.fetchone()
if author:
return Author(id=author[0], name=author[1])
def get_books_by_id_author(author_id: int) -> List[Book]:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.execute(f'SELECT * FROM `{BOOKS_TABLE_NAME}` '
f'WHERE id_author = ?', (author_id,)
)
books = cursor.fetchall()
return [_get_book_obj_from_row(row) for row in books]
def is_book_exists(book_title: str, author_name: str) -> bool:
with sqlite3.connect('table_books.db') as conn:
cursor = conn.cursor()
cursor.execute(f'SELECT b.title, a.name '
f'FROM `{BOOKS_TABLE_NAME}` b '
f'JOIN `{AUTHORS_TABLE_NAME}` a ON b.id_author = a.id '
f'WHERE b.title = ? and a.name = ?', (book_title, author_name)
)
book = cursor.fetchone()
if book:
return True
return False
| {"/routes.py": ["/models.py", "/schemas.py"], "/schemas.py": ["/models.py"]} |
51,840 | ondi-project/ondi-back | refs/heads/master | /ondi/main/migrations/0004_product_p_viewcount.py | # Generated by Django 3.1.6 on 2021-02-05 02:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0003_auto_20210205_1134'),
]
operations = [
migrations.AddField(
model_name='product',
name='p_viewcount',
field=models.IntegerField(default=0, null=True, verbose_name='조회수'),
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,841 | ondi-project/ondi-back | refs/heads/master | /ondi/main/migrations/0001_initial.py | # Generated by Django 3.1.6 on 2021-02-04 17:24
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='LiveProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('l_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='live시작시간')),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('p_category', models.CharField(max_length=200)),
('p_name', models.CharField(max_length=32, verbose_name='상품제목')),
('p_price', models.IntegerField(default=0, verbose_name='상품가격')),
('p_content', models.CharField(max_length=32, verbose_name='상품설명')),
('p_image', models.ImageField(null=True, upload_to='', verbose_name='대표사진')),
('p_tag', models.CharField(max_length=200)),
('p_nego', models.BooleanField()),
('p_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='등록날짜')),
('p_likecount', models.IntegerField(default=0, null=True, verbose_name='좋아요수')),
('p_live', models.CharField(default='', max_length=200, verbose_name='라이브방송여부')),
],
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,842 | ondi-project/ondi-back | refs/heads/master | /ondi/main/migrations/0005_auto_20210205_1211.py | # Generated by Django 3.1.6 on 2021-02-05 03:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0004_product_p_viewcount'),
]
operations = [
migrations.AlterField(
model_name='product',
name='p_live',
field=models.CharField(default=0, max_length=200, verbose_name='라이브방송여부'),
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,843 | ondi-project/ondi-back | refs/heads/master | /ondi/main/urls.py | from django.urls import path
from .views import *
from . import views
urlpatterns = [
path('', ProductListCreateView.as_view()),
path('livelist',views.livelist, name='livelist'),
path('post', views.post, name='post'),
path('view_product', views.view_product,name='view_product'),
path('category', views.category, name ='category'),
path('search', views.search, name ='search')
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,844 | ondi-project/ondi-back | refs/heads/master | /ondi/main/migrations/0007_product_p_buy.py | # Generated by Django 3.1.6 on 2021-02-05 06:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0006_merge_20210205_1506'),
]
operations = [
migrations.AddField(
model_name='product',
name='p_buy',
field=models.BooleanField(default=False, verbose_name='판매완료여부'),
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,845 | ondi-project/ondi-back | refs/heads/master | /ondi/main/admin.py | from django.contrib import admin
from .models import *
@admin.register(Product)
class ProductAdmin(admin.ModelAdmin):
list_display = ('id', 'p_category','p_name','p_date','p_seller','p_tag')
fields =()
# admin.site.register(Product)
@admin.register(LiveProduct)
class LiveProductAdmin(admin.ModelAdmin):
list_display = ('id', 'l_product', 'l_date', 'l_sprice')
fields = ()
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,846 | ondi-project/ondi-back | refs/heads/master | /ondi/user/models.py | from django.apps import apps
from django.contrib.auth.models import AbstractUser
from django.core.validators import MaxValueValidator
from django.core.validators import MinLengthValidator
from django.core.validators import MinValueValidator
from django.db import models
class User(AbstractUser):
phone = models.CharField(max_length=11, validators=[MinLengthValidator(11)])
image = models.ImageField(null=True)
class Score(models.Model):
class Meta:
unique_together = ('from_user', 'to_user',)
from_user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='%(class)s_from_user')
to_user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='%(class)s_to_user')
score = models.IntegerField(
validators=[
MaxValueValidator(5),
MinValueValidator(0)
]
)
comment = models.TextField()
class Report(models.Model):
class Meta:
unique_together = ('from_user', 'to_user',)
from_user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='%(class)s_from_user')
to_user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='%(class)s_to_user')
content = models.TextField(max_length=255)
class Notification(models.Model):
to_user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='%(class)s_to_user')
message = models.TextField()
class Favorite(models.Model):
class Meta:
unique_together = ('from_user', 'product',)
from_user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='%(class)s_from_user')
product = models.ForeignKey('main.Product', on_delete=models.CASCADE, related_name='%(class)s_product')
class Like(models.Model):
class Meta:
unique_together = ('from_user', 'product',)
from_user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='%(class)s_from_user')
product = models.ForeignKey('main.Product', on_delete=models.CASCADE, related_name='%(class)s_product')
#구매
class Sold(models.Model):
class Meta:
unique_together = ('from_user', 'product',)
from_user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='%(class)s_from_user')
product = models.ForeignKey('main.Product', on_delete=models.CASCADE, related_name='%(class)s_product')
price = models.IntegerField()
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,847 | ondi-project/ondi-back | refs/heads/master | /ondi/user/migrations/0007_auto_20210205_0642.py | # Generated by Django 3.1.6 on 2021-02-05 06:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0003_auto_20210205_0553'),
('user', '0006_auto_20210205_0534'),
]
operations = [
migrations.AddField(
model_name='favorite',
name='product',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='favorite_product', to='main.product'),
preserve_default=False,
),
migrations.AlterUniqueTogether(
name='favorite',
unique_together={('from_user', 'product')},
),
migrations.CreateModel(
name='Like',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('from_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='like_from_user', to=settings.AUTH_USER_MODEL)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='like_product', to='main.product')),
],
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,848 | ondi-project/ondi-back | refs/heads/master | /ondi/user/serializers.py | from django.db import transaction
from dj_rest_auth.registration.serializers import RegisterSerializer
from rest_framework import serializers
from .models import *
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = [
'id',
'username',
'first_name',
'last_name',
'email',
'groups',
'user_permissions',
'is_staff',
'is_active',
'is_superuser',
'last_login',
'date_joined',
'phone',
'image',
]
class UserRegisterSerializer(RegisterSerializer):
phone = serializers.CharField(max_length=11)
@transaction.atomic
def save(self, request):
user = super().save(request)
user.phone = self.data.get('phone')
user.save()
return user
class ReportSerializer(serializers.ModelSerializer):
class Meta:
model = Report
fields = [
'id',
'from_user',
'to_user',
'content',
]
read_only_fields = ['from_user',]
class ScoreSerializer(serializers.ModelSerializer):
class Meta:
model = Score
fields = [
'id',
'from_user',
'to_user',
'score',
'comment',
]
read_only_fields = ['from_user',]
class NotificationSerializer(serializers.ModelSerializer):
class Meta:
model = Notification
fields = [
'id',
'to_user',
'message',
]
class FavoriteSerializer(serializers.ModelSerializer):
class Meta:
model = Favorite
fields = [
'id',
'from_user',
'product',
]
read_only_fields = ['from_user',]
class LikeSerializer(serializers.ModelSerializer):
class Meta:
model = Like
fields = [
'id',
'from_user',
'product',
]
read_only_fields = ['from_user',]
class SoldSerializer(serializers.ModelSerializer):
class Meta:
model = Sold
fields = '__all__'
read_only_fields = ['from_user',]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,849 | ondi-project/ondi-back | refs/heads/master | /ondi/user/urls.py | from django.urls import path
from .views import *
urlpatterns = [
path('users', UserListView.as_view()),
path('users/<int:pk>', UserRetrieveView.as_view()),
path('users/<int:pk>/selling', UserSellingListView.as_view()),
path('users/<int:pk>/sold', UserSoldListView.as_view()),
path('reports', ReportListCreateView.as_view()),
path('reports/<int:pk>', ReportRetrieveDestroyView.as_view()),
path('notifications', NotificationListCreateView.as_view()),
path('notifications/<int:pk>', NotificationRetrieveDestroyView.as_view()),
path('scores', ScoreListCreateView.as_view()),
path('scores/<int:pk>', ScoreRetrieveDestroyView.as_view()),
path('favorites', FavoriteListCreateView.as_view()),
path('favorites/<int:pk>', FavoriteRetrieveDestroyView.as_view()),
path('likes', LikeListCreateView.as_view()),
path('likes/<int:pk>', LikeRetrieveDestroyView.as_view()),
path('sold', SoldListCreateView.as_view()),
path('sold/<int:pk>', SoldRetrieveDestroyView.as_view()),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,850 | ondi-project/ondi-back | refs/heads/master | /ondi/user/migrations/0009_purchased.py | # Generated by Django 3.1.6 on 2021-02-05 12:13
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0008_auto_20210205_1806'),
('user', '0008_auto_20210205_0736'),
]
operations = [
migrations.CreateModel(
name='Purchased',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('price', models.IntegerField()),
('from_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='purchased_from_user', to=settings.AUTH_USER_MODEL)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='purchased_product', to='main.product')),
],
options={
'unique_together': {('from_user', 'product')},
},
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,851 | ondi-project/ondi-back | refs/heads/master | /ondi/main/serializer.py | from django.contrib.auth.models import *
from rest_framework import generics,serializers
from rest_framework.response import Response
from .models import *
from django.db.models import Q
class ProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = '__all__'
# fields = ('id','p_name', 'p_price','p_image','p_date','p_viewcount','p_likecount','p_tag') #로그인한 내가 좋아한지여부 추가해줘야함!
liked = serializers.SerializerMethodField()
def get_liked(self, obj):
request = self.context.get('request', None)
if request:
user = request.user
print(user.id)
try:
print('like')
queryset = Like.objects.filter(from_user=user, product=product)
print('hihi')
print(queryset[0])
print('hello')
if queryset[0]:
print('true')
return True
except :
return False
return False
#Main에서 보여지는 것.#최신순
class ProductListSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = ('id','p_name', 'p_price','p_image','p_date','p_viewcount','p_likecount','p_tag') #로그인한 내가 좋아한지여부 추가해줘야함!
class ProductListView(generics.ListAPIView):
queryset = Product.objects.filter(p_buy = False)
serializer_class = ProductListSerializer
def list(self, request):
queryset = self.get_queryset()
serializer_class = self.get_serializer_class()
serializer = serializer_class(queryset, many=True)
sorted_serializer_data = sorted(serializer.data, key=lambda x: x['p_date'],reverse=True)
page = self.paginate_queryset(queryset)
print("Product Work", page)
if page is not None:
serializer = self.get_serializer(page, many=True)
sorted_serializer_data = sorted(serializer.data, key=lambda x: x['p_date'],reverse=True)
return self.get_paginated_response(sorted_serializer_data)
return Response(sorted_serializer_data)
#카테고리별 (카테고리, 조회option 입력받아야함)
class CategoryListView(generics.ListAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
def list(self, request, category, view_option):
# 'p_keyword' 'p_likecount' 'p_viewcount' 'p_date'
if view_option == 'p_likecount':
option = 'p_likecount'
elif view_option == 'p_viewcount':
option = 'p_viewcount'
elif view_option == 'p_date':
option = 'p_date'
print(option)
self.queryset = Product.objects.filter(p_category=category)
queryset = self.get_queryset()
serializer_class = self.get_serializer_class()
serializer = serializer_class(queryset, many=True)
sorted_serializer_data = sorted(serializer.data, key=lambda x: x[option], reverse=True)
page = self.paginate_queryset(queryset)
print("Product Work", page)
if page is not None:
serializer = self.get_serializer(page, many=True)
sorted_serializer_data = sorted(serializer.data, key=lambda x: x[option], reverse=True)
return self.get_paginated_response(sorted_serializer_data)
return Response(sorted_serializer_data)
#검색화면 (검색어 입력받아야함)
class SearchListView(generics.ListAPIView):
queryset = Product.objects.all()
serializer_class = ProductListSerializer
def list(self, request, product_search):
self.queryset = Product.objects.filter(Q(p_tag__contains = product_search)|Q(p_name__contains = product_search))
queryset = self.get_queryset()
serializer_class = self.get_serializer_class()
serializer = serializer_class(queryset, many=True)
sorted_serializer_data = sorted(serializer.data, key=lambda x: x['p_date'], reverse=True)
page = self.paginate_queryset(queryset)
print("Product Work", page)
if page is not None:
serializer = self.get_serializer(page, many=True)
sorted_serializer_data = sorted(serializer.data, key=lambda x: x['p_date'], reverse=True)
return self.get_paginated_response(sorted_serializer_data)
return Response(sorted_serializer_data)
#LiveList에서 보여지는것
class LiveListSerializer(serializers.ModelSerializer):
l_product =ProductListSerializer(read_only =True)
class Meta:
model = LiveProduct
fields = ('id','l_date', 'l_product','l_sprice')
class LiveListView(generics.ListAPIView):
queryset = LiveProduct.objects.all()
serializer_class = LiveListSerializer
def list(self, request):
queryset = self.get_queryset()
serializer_class = self.get_serializer_class()
serializer = serializer_class(queryset, many=True)
sorted_serializer_data = sorted(serializer.data, key=lambda x: x['l_date'])
page = self.paginate_queryset(queryset)
print("Live Work", page)
if page is not None:
serializer = self.get_serializer(page, many=True)
sorted_serializer_data = sorted(serializer.data, key = lambda x: x['l_date'])
return self.get_paginated_response(sorted_serializer_data)
return Response(sorted_serializer_data)
#카테고리별 : 함수짜놓기...
#개별product view
class ProductView(generics.ListAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
def list(self, request, product_id,user_id):
# 조회수올리기
product = Product.objects.get(id=product_id)
before_value = product.p_viewcount
after = (int(before_value) + 1)
product.p_viewcount = (after)
product.save()
# Like여부확인
user = User.objects.get(id=user_id)
print(user_id)
# p_like =False
try:
print('like')
like = Like.objects.filter(from_user=user, product=product)
if like[0]:
p_like = True
except:
p_like = False
# 상품보내주기
self.queryset = Product.objects.filter(id= product_id)
#livebutton설정
if user_id == self.queryset[0].p_seller.id:
livebutton = True
else:
livebutton = False
queryset = self.get_queryset()
serializer_class = self.get_serializer_class()
serializer = serializer_class(queryset, many=True)
sorted_serializer_data = sorted(serializer.data, key=lambda x: x['p_date'], reverse=True)
sorted_serializer_data.append({'like':p_like, 'livebutton': livebutton})
page = self.paginate_queryset(queryset)
print("Product Work", page)
if page is not None:
serializer = self.get_serializer(page, many=True)
sorted_serializer_data = sorted(serializer.data, key=lambda x: x['p_date'], reverse=True)
return self.get_paginated_response(sorted_serializer_data)
return Response(sorted_serializer_data) | {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,852 | ondi-project/ondi-back | refs/heads/master | /ondi/main/migrations/0003_auto_20210205_0553.py | # Generated by Django 3.1.6 on 2021-02-05 05:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20210205_1014'),
]
operations = [
migrations.AddField(
model_name='liveproduct',
name='l_sprice',
field=models.IntegerField(default=0, verbose_name='라이브 시작가격'),
),
migrations.AddField(
model_name='product',
name='p_viewcount',
field=models.IntegerField(default=0, null=True, verbose_name='조회수'),
),
migrations.AlterField(
model_name='product',
name='p_image',
field=models.ImageField(upload_to='', verbose_name='대표사진'),
),
migrations.AlterField(
model_name='product',
name='p_live',
field=models.CharField(default=0, max_length=200, verbose_name='라이브방송여부'),
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,853 | ondi-project/ondi-back | refs/heads/master | /ondi/main/migrations/0008_auto_20210205_1806.py | # Generated by Django 3.1.6 on 2021-02-05 09:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0007_product_p_buy'),
]
operations = [
migrations.AlterField(
model_name='product',
name='p_category',
field=models.CharField(max_length=20),
),
migrations.AlterField(
model_name='product',
name='p_content',
field=models.CharField(max_length=200, verbose_name='상품설명'),
),
migrations.AlterField(
model_name='product',
name='p_name',
field=models.CharField(max_length=20, verbose_name='상품제목'),
),
migrations.AlterField(
model_name='product',
name='p_tag',
field=models.CharField(max_length=20),
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,854 | ondi-project/ondi-back | refs/heads/master | /ondi/main/migrations/0006_merge_20210205_1506.py | # Generated by Django 3.1.6 on 2021-02-05 06:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0005_auto_20210205_1211'),
('main', '0003_auto_20210205_0553'),
]
operations = [
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,855 | ondi-project/ondi-back | refs/heads/master | /ondi/main/views.py | from django.shortcuts import render
from rest_framework import generics,serializers
from rest_framework.response import Response
from .models import *
from .serializer import *
from django.contrib.auth.hashers import make_password,check_password
from django.http import HttpResponse, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from django.utils.decorators import method_decorator
import json
import simplejson
from django.template.defaulttags import register
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.core.serializers.json import DjangoJSONEncoder
import json
from rest_framework.views import APIView
from rest_framework.parsers import MultiPartParser, FormParser
from rest_framework.response import Response
from rest_framework import status
#Main화면 : 상품들 최신순으로 보여짐
class ProductListCreateView(generics.ListCreateAPIView):
queryset = Product.objects.filter(p_buy = False).order_by('-p_date')
serializer_class = ProductSerializer
def perform_create(self, serializer):
serializer.save(seller=self.request.user)
# @method_decorator(csrf_exempt,name='dispatch')
# def main(request):
# if request.method == "GET":
# return ProductListView.as_view()(request)
#LiveList화면 : Live들 최근예정순으로 보여짐
@method_decorator(csrf_exempt,name='dispatch')
def livelist(request):
if request.method == "GET":
return LiveListView.as_view()(request)
#카테고리화면 :
@method_decorator(csrf_exempt,name='dispatch')
def category(request):
if request.method == "GET":
product_category = request.GET.get('p_category')
product_view_option= request.GET.get('view_option') #'p_keyword' 'p_likecount' 'p_viewcount' 'p_date'
#############삭제해야함###########3
if product_category ==None:
product_category='의류'
if product_view_option ==None or product_view_option =='p_keyword':
product_view_option ='p_likecount'
###############
# #카테고리정보를 받으면
return CategoryListView.as_view()(request,product_category,product_view_option)
#검색화면
@method_decorator(csrf_exempt,name='dispatch')
def search(request):
if request.method == "GET":
product_search = request.GET.get('p_search')
#############삭제해야함###########3
if product_search ==None:
product_search='상품'
###############
# 검색어를 받으면
return SearchListView.as_view()(request,product_search)
#상품등록화면 : {'p_category':--,'p_name',p_price,p_content,p_image,p_tag,p_nego,p_date,p_likecount,p_seller,p_live}
@method_decorator(csrf_exempt,name='dispatch')
def post(request):
# if request.method == "GET":
# print('get')
# return HttpResponse(simplejson.dumps({"response": "GET"}))
if request.method == "POST":
print('post')
image = request.FILES['p_image']
# request.GET.get('')
req = json.loads(request.body.decode('utf-8'))
category = request.POST.get('p_category',None)
name = request.POST.get('p_name',None)
price = request.POST.get('p_price',None)
content = request.POST.get('p_content',None)
tag = request.POST.get('p_tag',None) #리스트형식
nego = request.POST.get('p_nego',None) #True ,False형태로
seller_id = request.POST.get('p_seller',None) #전화번호로 ?아마
seller = User.objects.get(id=seller_id)
if req != "None":
print("POST 데이터를 정상적으로 입력받았습니다")
poster = Product(p_category=category, p_name=name, p_price=price,p_content=content,p_tag=tag,p_nego=nego,p_likecount=0, p_seller =seller,p_live=None)
poster.p_image=image
poster.p_date=timezone.now()
poster.save()
return HttpResponse(simplejson.dumps({"response": "Good"}))
else:
print("POST 데이터를 찾을 수 없습니다")
return HttpResponse(simplejson.dumps({"response": "Fail"}))
@method_decorator(csrf_exempt,name='dispatch')
def view_product(request):
if request.method == "GET":
# 특정상품보여주기!
product_id = request.GET.get('p_id')
user_id = request.GET.get('u_id')
##########없애줘야힘
if product_id ==None:
product_id =6
user_id =2
####################
return ProductView.as_view()(request, product_id,user_id)
if request.method == "POST":
print('POST')
# 라이브여부! -->
live = request.POST.get('p_live', None) # 없으면 OFF #신청하면 READY #해당시각이면 ON
# 라이브 방송한다고하면!!!
if live == 'READY':
product_id = request.POST.get('p_id', None) # 상품정보
live_time = request.POST.get('l_date', None) # 라이브시간
live_price = request.POST.get('l_sprice', None) # 라이브시작 가격
# 해당 Product에 p_live 변수 업데이트 & LiveProduct DB 생성
# p_live 변수 변경
product = Product.objects.get(id=product_id)
product.p_live = live # live "None" --->"Ready"로 수정
product.save()
# LiveProduct DB 생성
liveposter = LiveProduct(l_date=live_time, l_product=product, l_sprice=live_price)
liveposter.save()
print("POST 데이터를 정상적으로 입력받았습니다")
return HttpResponse(simplejson.dumps({"response": "Good"}))
else:
print("POST 데이터를 찾을 수 없습니다")
return HttpResponse(simplejson.dumps({"response": "Fail"})) | {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,856 | ondi-project/ondi-back | refs/heads/master | /ondi/user/views.py | from django.shortcuts import render
from rest_framework import generics
from rest_framework.response import Response
from .models import *
from .serializers import *
from main.serializers import *
class UserListView(generics.ListAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
class UserRetrieveView(generics.RetrieveAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
class UserSellingListView(generics.ListAPIView):
serializer_class = ProductSerializer
def get_queryset(self):
return Product.objects.filter(p_seller_id=self.kwargs.get('pk'), p_buy=False)
class UserSoldListView(generics.ListAPIView):
serializer_class = ProductSerializer
def get_queryset(self):
return Product.objects.filter(p_seller_id=self.kwargs.get('pk'), p_buy=True)
class ReportListCreateView(generics.ListCreateAPIView):
queryset = Report.objects.all()
serializer_class = ReportSerializer
def perform_create(self, serializer):
serializer.save(from_user=self.request.user)
class ReportRetrieveDestroyView(generics.RetrieveDestroyAPIView):
queryset = Report.objects.all()
serializer_class = ReportSerializer
class ScoreListCreateView(generics.ListCreateAPIView):
queryset = Score.objects.all()
serializer_class = ScoreSerializer
def perform_create(self, serializer):
serializer.save(from_user=self.request.user)
class ScoreRetrieveDestroyView(generics.RetrieveDestroyAPIView):
queryset = Score.objects.all()
serializer_class = ScoreSerializer
class NotificationListCreateView(generics.ListCreateAPIView):
queryset = Notification.objects.all()
serializer_class = NotificationSerializer
class NotificationRetrieveDestroyView(generics.RetrieveDestroyAPIView):
queryset = Notification.objects.all()
serializer_class = NotificationSerializer
class ScoreListCreateView(generics.ListCreateAPIView):
queryset = Score.objects.all()
serializer_class = ScoreSerializer
def perform_create(self, serializer):
serializer.save(from_user=self.request.user)
class FavoriteListCreateView(generics.ListCreateAPIView):
queryset = Favorite.objects.all()
serializer_class = FavoriteSerializer
def perform_create(self, serializer):
serializer.save(from_user=self.request.user)
class FavoriteRetrieveDestroyView(generics.RetrieveDestroyAPIView):
queryset = Favorite.objects.all()
serializer_class = FavoriteSerializer
class LikeListCreateView(generics.ListCreateAPIView):
queryset = Like.objects.all()
serializer_class = LikeSerializer
def perform_create(self, serializer):
serializer.save(from_user=self.request.user)
class LikeRetrieveDestroyView(generics.RetrieveDestroyAPIView):
queryset = Like.objects.all()
serializer_class = LikeSerializer
class SoldListCreateView(generics.ListCreateAPIView):
queryset = Sold.objects.all()
serializer_class = SoldSerializer
def post(self, request, *args, **kwargs):
product = Product.objects.get(pk=request.data.get('product'))
product.p_buy = True
product.save()
return self.create(request, *args, **kwargs)
def perform_create(self, serializer):
serializer.save(from_user=self.request.user)
class SoldRetrieveDestroyView(generics.RetrieveDestroyAPIView):
queryset = Sold.objects.all()
serializer_class = SoldSerializer
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,857 | ondi-project/ondi-back | refs/heads/master | /ondi/user/migrations/0008_auto_20210205_0736.py | # Generated by Django 3.1.6 on 2021-02-05 07:36
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0003_auto_20210205_0553'),
('user', '0007_auto_20210205_0642'),
]
operations = [
migrations.AlterUniqueTogether(
name='like',
unique_together={('from_user', 'product')},
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,858 | ondi-project/ondi-back | refs/heads/master | /ondi/main/models.py | from django.db import models
from user.models import *
from django.utils import timezone
# Create your models here.
class Product(models.Model):
#상품 DB
# {카테고리 / 이름/가격/내용/사진/태그/협상여부/등록시간/좋아요수/판매자}
p_category = models.CharField(max_length=20)#카테고리선택으로
p_name = models.CharField(max_length=20, verbose_name="상품제목")
p_price = models.IntegerField(verbose_name="상품가격", default=0)
#상품설명 어떻게오느냐따라달라질듯 RichTextUploadingField
p_content = models.CharField(max_length=200, verbose_name="상품설명")
p_image = models.ImageField(verbose_name="대표사진", upload_to="")
p_tag = models.CharField(max_length=20)
p_nego = models.BooleanField()
p_date = models.DateTimeField(default=timezone.now, verbose_name="등록날짜")
p_likecount = models.IntegerField(verbose_name="좋아요수", null=True, default=0)
p_seller = models.ForeignKey(User , on_delete=models.CASCADE,default="")
#라이브방송여부
p_live = models.CharField(max_length=200, verbose_name="라이브방송여부",default=0) #['예정','진행중', '0'] #라이브진행중, 라이브종료,
p_viewcount = models.IntegerField(verbose_name="조회수", null=True, default=0)
p_buy = models.BooleanField(default = False, verbose_name="판매완료여부") #안팔렸으면 False, 팔렸으면 True
class LiveProduct(models.Model):
#일단 상품이아닌 라이브 깜짝방송 이런느낌으로#
# {상품정보,라이브시간}
#date확인다시!
l_date = models.DateTimeField(default=timezone.now, verbose_name="live시작시간")
l_product = models.ForeignKey(Product, on_delete=models.CASCADE,default="")
l_sprice = models.IntegerField(verbose_name="라이브 시작가격", default=0) | {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,859 | ondi-project/ondi-back | refs/heads/master | /ondi/main/migrations/0003_auto_20210205_1134.py | # Generated by Django 3.1.6 on 2021-02-05 02:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20210205_1014'),
]
operations = [
migrations.AddField(
model_name='liveproduct',
name='l_sprice',
field=models.IntegerField(default=0, verbose_name='라이브 시작가격'),
),
migrations.AlterField(
model_name='product',
name='p_image',
field=models.ImageField(upload_to='', verbose_name='대표사진'),
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,860 | ondi-project/ondi-back | refs/heads/master | /ondi/user/migrations/0005_auto_20210205_0532.py | # Generated by Django 3.1.6 on 2021-02-05 05:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0004_user_image'),
]
operations = [
migrations.AlterField(
model_name='user',
name='image',
field=models.ImageField(default='e', upload_to=''),
preserve_default=False,
),
]
| {"/ondi/main/urls.py": ["/ondi/main/views.py"], "/ondi/main/admin.py": ["/ondi/main/models.py"], "/ondi/user/serializers.py": ["/ondi/user/models.py"], "/ondi/user/urls.py": ["/ondi/user/views.py"], "/ondi/main/serializer.py": ["/ondi/main/models.py"], "/ondi/main/views.py": ["/ondi/main/models.py", "/ondi/main/serializer.py"], "/ondi/user/views.py": ["/ondi/user/models.py", "/ondi/user/serializers.py"]} |
51,868 | Raytr0/Pong | refs/heads/main | /menu.py | import pygame
import sys
# Setup pygame/window ---------------------------------------- #
mainClock = pygame.time.Clock()
from pygame.locals import *
pygame.init()
pygame.display.set_caption('Menu')
screen = pygame.display.set_mode((500, 500), 0, 32)
font = pygame.font.Font("freesansbold.ttf", 50)
start_font = pygame.font.Font("freesansbold.ttf", 20)
def draw_text(text, font, color, surface, x, y):
textobj = font.render(text, 1, color)
textrect = textobj.get_rect()
textrect.topleft = (x, y)
surface.blit(textobj, textrect)
click = False
def main_menu():
while True:
screen.fill((31, 31, 31))
draw_text('P O N G', font, (255, 255, 255), screen, 155, 100)
mx, my = pygame.mouse.get_pos()
button_1 = pygame.Rect(150, 350, 200, 50)
button_2 = pygame.Rect(150, 250, 200, 50)
if button_1.collidepoint((mx, my)):
if click:
import game
if button_2.collidepoint((mx, my)):
if click:
import pong_cpu
pygame.draw.rect(screen, (175, 238, 238), button_1)
pygame.draw.rect(screen, (175, 238, 238), button_2)
draw_text('2 player', start_font, (255, 0, 0), screen, 210, 365)
draw_text('Vs Computer', start_font, (255, 0, 0), screen, 190, 265)
click = False
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == MOUSEBUTTONDOWN:
if event.button == 1:
click = True
pygame.display.update()
mainClock.tick(60)
main_menu() | {"/menu.py": ["/game.py"]} |
51,869 | Raytr0/Pong | refs/heads/main | /game.py | #Credit to HaltonCoders for the amazing pong code
#Modified and edited by Raytr0
import sys
import pygame
import random
# Text to RGB
# A dictionary where the color word is the key and the rbg equivalent is the value
Colors = {
"black": (0, 0, 0),
"white": (255, 255, 255),
"red": (255, 0, 0),
"darkRed": (55, 0, 0),
"lightRed": (255, 108, 23),
"green": (0, 255, 0),
"darkGreen": (0, 55, 0),
"blue": (0, 0, 255),
"darkBlue": (0, 0, 55),
"navyBlue": (0, 30, 100),
"lightPurple": (113, 0, 155),
"darkPurple": (55, 0, 55),
"lightGrey": (200, 200, 200),
"paleTurquoise": (175, 238, 238),
"lightYellow": (255, 240, 23)
}
pygame.init() # init stands for initialize
clock = pygame.time.Clock() # Makes a clock in pygame that you can access with the variable 'clock'
# win: window
winWidth, winHeight = 800, 600 # Sets the dimensions of the window
win = pygame.display.set_mode((winWidth, winHeight)) # makes the display(window) with the dimensions you gave
pygame.display.set_caption("Pong") # sets the name of the window
winColor = pygame.Color('grey12') # Sets a color for the window that I will use in the future
lineColor = Colors['paleTurquoise']
paddleWidth, paddleHeight = 10, 90 # sets the dimensions of the paddles
paddleColor = Colors['blue'] # the colour of the paddle
paddleSpeed = 5 # the number of pixels the paddle will move by each time
ballDiameter = 16
ballSpeedX = 4
ballSpeedY = 4
ballColor = Colors['red']
ballStartX = winWidth/2 - ballDiameter/2
ballStartY = winHeight/2 - ballDiameter/2
ball = pygame.Rect(ballStartX, ballStartY, ballDiameter, ballDiameter)
# As the height and width are the same, it will be a square
def reset():
global ballSpeedX, ballSpeedY
ball.center = (winWidth/2, winHeight/2)
ballSpeedX *= random.choice((1, -1))
ballSpeedY *= random.choice((1, -1))
player1Paddle = pygame.rect.Rect(paddleWidth, (winHeight - paddleHeight)//2, paddleWidth, paddleHeight)
player2Paddle = pygame.rect.Rect(winWidth - 2*paddleWidth, (winHeight - paddleHeight)//2, paddleWidth, paddleHeight)
player1score, player2score = 0, 0
scoreFont = pygame.font.Font("freesansbold.ttf", 20)
winFont = pygame.font.Font("freesansbold.ttf", 50)
def reset_game():
global ballSpeedX, ballSpeedY
global player1Paddle, player2Paddle
global player1score, player2score
ball.center = (winWidth / 2, winHeight / 2)
ballSpeedX *= random.choice((1, -1))
ballSpeedY *= random.choice((1, -1))
player1score = 0
player2score = 0
player1Paddle = pygame.rect.Rect(paddleWidth, (winHeight - paddleHeight)//2, paddleWidth, paddleHeight)
player2Paddle = pygame.rect.Rect(winWidth - 2*paddleWidth, (winHeight - paddleHeight)//2, paddleWidth, paddleHeight)
ballMove = False # Variable for the whether the ball is moving
while True:
keysPressed = pygame.key.get_pressed()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if keysPressed[pygame.K_SPACE]:
ballMove = not ballMove
if keysPressed[pygame.K_r]:
reset_game()
if keysPressed[pygame.K_UP] and player2Paddle.y >= 10:
player2Paddle.y -= paddleSpeed
elif keysPressed[pygame.K_DOWN] and player2Paddle.y <= winHeight - (paddleHeight + paddleSpeed + 2):
player2Paddle.y += paddleSpeed
if keysPressed[pygame.K_w] and player1Paddle.y >= 10:
player1Paddle.y -= paddleSpeed
elif keysPressed[pygame.K_s] and player1Paddle.y <= winHeight - (paddleHeight + paddleSpeed + 2):
player1Paddle.y += paddleSpeed
if ballMove:
ball.x += ballSpeedX
ball.y += ballSpeedY
if player1Paddle.colliderect(ball):
ballSpeedX *= -1
if player2Paddle.colliderect(ball):
ballSpeedX *= -1
if ball.x < 0:
ballSpeedX = abs(ballSpeedX)
player2score += 1
ballMove = False
reset()
if ball.y < 0:
ballSpeedY = abs(ballSpeedY)
if ball.x + ballDiameter > winWidth:
ballSpeedX = - abs(ballSpeedX)
player1score += 1
ballMove = False
reset()
if ball.y + ballDiameter > winHeight:
ballSpeedY = - abs(ballSpeedY)
win.fill(winColor)
pygame.draw.line(win, lineColor, (winWidth//2, 0), (winWidth//2, winHeight))
pygame.draw.circle(win, lineColor, (winWidth//2, winHeight//2), 50, width=1)
pygame.draw.ellipse(win, ballColor, ball)
pygame.draw.rect(win, paddleColor, player1Paddle)
pygame.draw.rect(win, paddleColor, player2Paddle)
if player1score == 5:
player1WinText = scoreFont.render('Player 1 Wins!', False, Colors["white"])
win.blit(player1WinText, (100, 100))
resetGameText = scoreFont.render('To play again press "r"', False, Colors["white"])
win.blit(resetGameText, (100, 400))
ballMove = False
ball.center = (winWidth / 2, winHeight / 2)
elif player2score == 5:
player2WinText = scoreFont.render('Player 2 Wins!', False, Colors["white"])
win.blit(player2WinText, (500, 100))
resetGameText = scoreFont.render('To play again press "r"', False, Colors["white"])
win.blit(resetGameText, (500, 400))
ballMove = False
ball.center = (winWidth / 2, winHeight / 2)
player1ScoreText = scoreFont.render(str(player1score), False, Colors["white"])
player2ScoreText = scoreFont.render(str(player2score), False, Colors["white"])
win.blit(player1ScoreText, (370, 290))
win.blit(player2ScoreText, (420, 290))
pygame.display.flip() # update the pygame window/redraw it
clock.tick(60) # in milliseconds | {"/menu.py": ["/game.py"]} |
51,872 | jc26/CMU_Coursebook | refs/heads/master | /cmucoursebook/urls.py | from django.conf.urls import url
from django.contrib.auth import views as auth_views
from cmucoursebook import views
from forms import CustomLoginForm
urlpatterns = [
url(r'^$', views.home, name='home'),
url(r'^profile/(?P<username>[\w.@+-]+)$', views.profile, name='profile'),
url(r'^edit-profile$', views.edit_profile, name="edit-profile"),
url(r'^edit-image', views.edit_image, name="edit-image"),
url(r'^get-image/(?P<username>[\w.@+-]+)$', views.get_image, name="get-image"),
#
url(r'^browse$',views.browse, name='browse'),
url(r'^search$',views.search, name='search'),
url(r'^search2$',views.search2, name='search2'),
url(r'^course-detail/(?P<cid>(\d+))$',views.course_detail, name='course-detail'),
url(r'^course-detail/get-course-data$', views.get_course_data, name="get-course-data"),
url(r'^search-users$',views.search_users, name='search-users'),
#
url(r'^add-class/(?P<cid>(\d+))$',views.add_class, name='add-class'),
url(r'^like-class/(?P<cid>(\d+))$',views.like_class, name='like-class'),
url(r'^add-courses/',views.add_courses, name='add-courses'),
url(r'^delete-course/',views.delete_course, name='delete-course'),
#
url(r'^friends$',views.friends, name='friends'),
url(r'^request-friendship/(?P<username>[\w.@+-]+)$',views.request_friendship, name='request-friendship'),
url(r'^confirm-friendship/(?P<username>[\w.@+-]+)$',views.confirm_friendship, name='confirm-friendship'),
url(r'^deny-friendship/(?P<username>[\w.@+-]+)$',views.deny_friendship, name='deny-friendship'),
url(r'^remove-friend/(?P<username>[\w.@+-]+)$',views.remove_friend, name='remove-friend'),
#
url(r'^login$', auth_views.login, {'template_name':'login.html', 'authentication_form': CustomLoginForm}, name='login'),
url(r'^logout$', auth_views.logout_then_login, name='logout'),
url(r'^register$', views.register, name='register'),
url(r'^confirm-registration/(?P<username>[a-zA-Z0-9_@\+\-]+)/(?P<token>[a-z0-9\-]+)$',
views.confirm_registration, name='confirm'),
url(r'^upload',views.upload, name='upload'),
url(r'^add-comment/(\d+)$',views.add_comment, name='add-comment'),
]
| {"/cmucoursebook/forms.py": ["/cmucoursebook/models.py"], "/cmucoursebook/views.py": ["/cmucoursebook/models.py", "/cmucoursebook/forms.py"]} |
51,873 | jc26/CMU_Coursebook | refs/heads/master | /cmucoursebook/forms.py | from django import forms
from django.contrib.auth.forms import AuthenticationForm
from cmucoursebook.models import *
from django.core.validators import validate_email
MAX_UPLOAD_SIZE = 3000000
class CustomLoginForm(AuthenticationForm):
username = forms.CharField(label='username', widget=forms.TextInput(attrs={'placeholder': 'Username...', 'class':"form-control", 'class':"form-username"}))
password = forms.CharField(label='password', widget=forms.PasswordInput(attrs={'placeholder': 'Password...', 'class':"form-control", 'class':"form-password"}))
class RegistrationForm(forms.Form):
firstname = forms.CharField(max_length=20, label='First name', widget=forms.TextInput(attrs={'placeholder': 'First Name', 'class':"form-control"}))
lastname = forms.CharField(max_length=20, label='Last name', widget=forms.TextInput(attrs={'placeholder': 'Last Name', 'class':"form-control"}))
email = forms.CharField(max_length=50, label='Email', widget=forms.TextInput(attrs={'placeholder': 'Email', 'class':"form-control", 'class':"form-email"}))
username = forms.CharField(max_length=20, label='Username', widget=forms.TextInput(attrs={'placeholder': 'Username', 'class':"form-control"}))
password1 = forms.CharField(max_length=200, label='Password', widget=forms.PasswordInput(attrs={'placeholder': 'Password', 'class':"form-control"}))
password2 = forms.CharField(max_length=200, label='Confirm Password', widget=forms.PasswordInput(attrs={'placeholder': 'Confirm Password', 'class':"form-control"}))
def clean(self):
# Call superclass's validation
cleaned_data = super(RegistrationForm, self).clean()
password1 = cleaned_data.get('password1')
password2 = cleaned_data.get('password2')
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords did not match.")
return cleaned_data
def clean_username(self):
# Confirms that the username is not already present in the
username = self.cleaned_data.get('username')
if User.objects.filter(username__exact=username):
raise forms.ValidationError("Username is already taken.")
return username
def clean_email(self):
# Confirms that the email is an Andrew email(XXX@xxx.cmu.edu)
email = self.cleaned_data.get('email')
print(email)
validate_email(email)
if len(email) < 7:
raise forms.ValidationError("Email is invalid.")
suffix = email[-7:]
if suffix != 'cmu.edu':
raise forms.ValidationError("Not a CMU email.")
return email
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('from_city', 'from_country', 'major', 'year', 'age', 'bio')
def clean_from_city(self):
from_city = self.cleaned_data['from_city']
if len(from_city) > 50:
raise forms.ValidationError('from_city is too long!')
return from_city
def clean_from_country(self):
from_country = self.cleaned_data['from_country']
if len(from_country) > 50:
raise forms.ValidationError('from_country is too long!')
return from_country
def clean_major(self):
major = self.cleaned_data['major']
if len(major) > 50:
raise forms.ValidationError('major is too long!')
return major
def clean_year(self):
choice = ['FR', 'SO', 'JR', 'SR', 'GR']
year = self.cleaned_data.get('year')
if year not in choice:
raise forms.ValidationError('invalid year!')
return year
def clean_age(self):
try:
cleaned_age = int(self.cleaned_data.get('age'))
except:
cleaned_age = None
if cleaned_age and (cleaned_age < 0 or cleaned_age > 200):
raise forms.ValidationError("age is not a reasonable positive integer.")
return cleaned_age
def clean_bio(self):
bio = self.cleaned_data['bio']
if len(bio) > 430:
raise forms.ValidationError('bio is too long!')
return bio
class ImageForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('img',)
widgets = {
'img': forms.FileInput(),
}
def clean_img(self):
img = self.cleaned_data['img']
if not img:
raise forms.ValidationError('You must upload a image')
if not img.content_type or not img.content_type.startswith('image'):
raise forms.ValidationError('File type is not image')
if img.size > MAX_UPLOAD_SIZE:
raise forms.ValidationError('File too big (max size is {0} bytes)'.format(MAX_UPLOAD_SIZE))
return img
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ('first_name', 'last_name')
widgets = {
'first_name': forms.TextInput(),
'last_name': forms.TextInput(),
}
class FileForm(forms.ModelForm):
datatype = forms.TypedChoiceField(
coerce=lambda x: x == 'True',
choices=((True, 'Course'),(False, 'History')),
widget=forms.RadioSelect
)
class Meta:
model = DataFile
fields = ('csvfile',)
def clean_csvfile(self):
csvfile = self.cleaned_data['csvfile']
if not csvfile:
raise forms.ValidationError('You must upload a csv file')
return csvfile
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ('difficulty', 'comment', 'skills')
def clean_difficulty(self):
difficulty = self.cleaned_data['difficulty']
if not difficulty:
raise forms.ValidationError('Invalid difficulty input')
try:
int_difficulty = int(difficulty)
except:
raise forms.ValidationError('Invalid difficulty input')
if int_difficulty > 3 or int_difficulty < 1:
raise forms.ValidationError('Invalid difficulty input')
return difficulty
def clean_comment(self):
comment = self.cleaned_data['comment']
if not comment:
raise forms.ValidationError('You must make some comment!')
if len(comment) > 800:
raise forms.ValidationError('comments is too long!')
return comment
def clean_skills(self):
skills = self.cleaned_data['skills']
if not skills:
raise forms.ValidationError('You must make some skills!')
if len(skills) > 100:
raise forms.ValidationError('skills is too long!')
return skills | {"/cmucoursebook/forms.py": ["/cmucoursebook/models.py"], "/cmucoursebook/views.py": ["/cmucoursebook/models.py", "/cmucoursebook/forms.py"]} |
51,874 | jc26/CMU_Coursebook | refs/heads/master | /cmucoursebook/models.py | from __future__ import unicode_literals
from django.db import models
# User class for built-in authentication module
from django.contrib.auth.models import User
class Course(models.Model):
cid = models.CharField(max_length=5, primary_key=True)
name = models.CharField(max_length=100)
department = models.CharField(max_length=50)
description = models.CharField(max_length=1000)
start = models.CharField(max_length=5) #start time
end = models.CharField(max_length=5) #end time
# days is in COLON DELIMITED format with Monday = MN Tuesday = TU Wednesday = WD Thursday = TH Friday = FR
# i.e. MN:WD:FR means Mondays, Wednesdays, and Fridays
days = models.CharField(max_length=15)
# these two fields are averaged over the most recent semester, either Fall or Spring
# Summer semester is ignored
# year 2017 is ignored
hours = models.DecimalField(max_digits=4, decimal_places=2, null=True, blank=True)
rating = models.DecimalField(max_digits=3, decimal_places=2, null=True, blank=True)
likes = models.IntegerField(default=0)
class Profile(models.Model):
YEAR_IN_SCHOOL_CHOICES = (
('FR', 'Freshman'),
('SO', 'Sophomore'),
('JR', 'Junior'),
('SR', 'Senior'),
('GR', 'Graduate'),
)
user = models.ForeignKey(User, related_name="linked_user")
major = models.CharField(max_length=50, null=True, blank=True)
year = models.CharField(max_length=2, choices=YEAR_IN_SCHOOL_CHOICES, default='FR')
age = models.IntegerField(null=True, blank=True)
from_city = models.CharField(max_length=50, null=True, blank=True)
from_country = models.CharField(max_length=50, null=True, blank=True)
bio = models.CharField(max_length=430, null=True, blank=True)
# img = models.FileField(upload_to='cmucoursebook/static/image', null=True, blank=False)
img = models.FileField(upload_to='cmucoursebook/static', null=True, blank=False)
friends = models.ManyToManyField(User, related_name="friends")
pending = models.IntegerField(default=0)
pending_friends = models.ManyToManyField(User, related_name="pending_friends")
curr = models.ManyToManyField(Course, related_name="current_courses")
plan = models.ManyToManyField(Course, related_name="planned_courses")
past = models.ManyToManyField(Course, related_name="past_courses")
liked = models.ManyToManyField(Course, related_name="liked_courses")
class History(models.Model):
semester = models.CharField(max_length=6) #Fall, Spring, Summer
year = models.CharField(max_length=4)
instructor = models.CharField(max_length=50)
department = models.CharField(max_length=50)
cid = models.CharField(max_length=5)
coursename = models.CharField(max_length=50)
section = models.CharField(max_length=3)
ctype = models.CharField(max_length=5)
response = models.CharField(max_length=3)
enrollment = models.CharField(max_length=3)
resprate = models.DecimalField(max_digits=3, decimal_places=2) # Response rate
hours = models.DecimalField(max_digits=4, decimal_places=2) #Time spent per week
iisl = models.DecimalField(max_digits=3, decimal_places=2) #Interest in student learning
ecr = models.DecimalField(max_digits=3, decimal_places=2) #Explain course requirements
clg = models.DecimalField(max_digits=3, decimal_places=2) #Clear learning goals
ipfs = models.DecimalField(max_digits=3, decimal_places=2) #Instructor provides Feedback to students
ios = models.DecimalField(max_digits=3, decimal_places=2) #Importance of subject
esm = models.DecimalField(max_digits=3, decimal_places=2) #Explains subject matter
srs = models.DecimalField(max_digits=3, decimal_places=2) #Show respect for students
os = models.DecimalField(max_digits=3, decimal_places=2) #Overall teaching
oc = models.DecimalField(max_digits=3, decimal_places=2) #Overall course
class Comment(models.Model):
user = models.ForeignKey(User, related_name="comment_author")
course = models.ForeignKey(Course, related_name="comment_course")
difficulty = models.CharField(max_length=1) # 1 easy 2 medium 3 hard
comment = models.CharField(max_length=800)
skills = models.CharField(max_length=100)
timestamp = models.DateTimeField(auto_now=True)
class Skill(models.Model):
user = models.ForeignKey(User, related_name="skill_author")
course = models.ForeignKey(Course, related_name="skill_course")
tag = models.CharField(max_length=20)
count = models.IntegerField()
class Timeslot(models.Model):
date = models.CharField(max_length=10, null=True, blank=True)
start = models.CharField(max_length=5, null=True, blank=True)
end = models.CharField(max_length=5, null=True, blank=True)
class Schedule(models.Model):
course = models.ForeignKey(Course, related_name="schedule_course")
timeslot = models.ManyToManyField(Timeslot, related_name="schedule_time")
class DataFile(models.Model):
user = models.ForeignKey(User, related_name="updated_user")
# csvfile = models.FileField(upload_to='cmucoursebook/static/files', null=True, blank=False)
csvfile = models.FileField(upload_to='cmucoursebook/static', null=True, blank=False)
| {"/cmucoursebook/forms.py": ["/cmucoursebook/models.py"], "/cmucoursebook/views.py": ["/cmucoursebook/models.py", "/cmucoursebook/forms.py"]} |
51,875 | jc26/CMU_Coursebook | refs/heads/master | /cmucoursebook/migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-27 00:02
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('difficulty', models.CharField(max_length=1)),
('comment', models.CharField(max_length=800)),
('skills', models.CharField(max_length=100)),
('timestamp', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Course',
fields=[
('cid', models.CharField(max_length=5, primary_key=True, serialize=False)),
('name', models.CharField(max_length=100)),
('department', models.CharField(max_length=50)),
('description', models.CharField(max_length=1000)),
('start', models.CharField(max_length=5)),
('end', models.CharField(max_length=5)),
('days', models.CharField(max_length=15)),
('hours', models.DecimalField(blank=True, decimal_places=2, max_digits=4, null=True)),
('rating', models.DecimalField(blank=True, decimal_places=2, max_digits=3, null=True)),
('likes', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='DataFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('csvfile', models.FileField(null=True, upload_to='cmucoursebook/static')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='updated_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='History',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('semester', models.CharField(max_length=6)),
('year', models.CharField(max_length=4)),
('instructor', models.CharField(max_length=50)),
('department', models.CharField(max_length=50)),
('cid', models.CharField(max_length=5)),
('coursename', models.CharField(max_length=50)),
('section', models.CharField(max_length=3)),
('ctype', models.CharField(max_length=5)),
('response', models.CharField(max_length=3)),
('enrollment', models.CharField(max_length=3)),
('resprate', models.DecimalField(decimal_places=2, max_digits=3)),
('hours', models.DecimalField(decimal_places=2, max_digits=4)),
('iisl', models.DecimalField(decimal_places=2, max_digits=3)),
('ecr', models.DecimalField(decimal_places=2, max_digits=3)),
('clg', models.DecimalField(decimal_places=2, max_digits=3)),
('ipfs', models.DecimalField(decimal_places=2, max_digits=3)),
('ios', models.DecimalField(decimal_places=2, max_digits=3)),
('esm', models.DecimalField(decimal_places=2, max_digits=3)),
('srs', models.DecimalField(decimal_places=2, max_digits=3)),
('os', models.DecimalField(decimal_places=2, max_digits=3)),
('oc', models.DecimalField(decimal_places=2, max_digits=3)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('major', models.CharField(blank=True, max_length=50, null=True)),
('year', models.CharField(choices=[('FR', 'Freshman'), ('SO', 'Sophomore'), ('JR', 'Junior'), ('SR', 'Senior'), ('GR', 'Graduate')], default='FR', max_length=2)),
('age', models.IntegerField(blank=True, null=True)),
('from_city', models.CharField(blank=True, max_length=50, null=True)),
('from_country', models.CharField(blank=True, max_length=50, null=True)),
('bio', models.CharField(blank=True, max_length=430, null=True)),
('img', models.FileField(null=True, upload_to='cmucoursebook/static')),
('pending', models.IntegerField(default=0)),
('curr', models.ManyToManyField(related_name='current_courses', to='cmucoursebook.Course')),
('friends', models.ManyToManyField(related_name='friends', to=settings.AUTH_USER_MODEL)),
('liked', models.ManyToManyField(related_name='liked_courses', to='cmucoursebook.Course')),
('past', models.ManyToManyField(related_name='past_courses', to='cmucoursebook.Course')),
('pending_friends', models.ManyToManyField(related_name='pending_friends', to=settings.AUTH_USER_MODEL)),
('plan', models.ManyToManyField(related_name='planned_courses', to='cmucoursebook.Course')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='linked_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Schedule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='schedule_course', to='cmucoursebook.Course')),
],
),
migrations.CreateModel(
name='Skill',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tag', models.CharField(max_length=20)),
('count', models.IntegerField()),
('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='skill_course', to='cmucoursebook.Course')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='skill_author', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Timeslot',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.CharField(blank=True, max_length=10, null=True)),
('start', models.CharField(blank=True, max_length=5, null=True)),
('end', models.CharField(blank=True, max_length=5, null=True)),
],
),
migrations.AddField(
model_name='schedule',
name='timeslot',
field=models.ManyToManyField(related_name='schedule_time', to='cmucoursebook.Timeslot'),
),
migrations.AddField(
model_name='comment',
name='course',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comment_course', to='cmucoursebook.Course'),
),
migrations.AddField(
model_name='comment',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comment_author', to=settings.AUTH_USER_MODEL),
),
]
| {"/cmucoursebook/forms.py": ["/cmucoursebook/models.py"], "/cmucoursebook/views.py": ["/cmucoursebook/models.py", "/cmucoursebook/forms.py"]} |
51,876 | jc26/CMU_Coursebook | refs/heads/master | /cmucoursebook/views.py | from django.shortcuts import render, redirect, get_object_or_404
from django.core.urlresolvers import reverse
from django.http import HttpResponse, Http404, JsonResponse
from django.core import serializers
# Django transaction system so we can use @transaction.atomic
from django.db import transaction
# Decorator to use built-in authentication system
from django.contrib.auth.decorators import login_required
# Imports the model in models.py
from cmucoursebook.models import *
# Used to create and manually log in a user
from cmucoursebook.forms import *
# Used to generate a one-time-use token to verify a user's email address
from django.contrib.auth.tokens import default_token_generator
# Used to send mail from within Django
from django.core.mail import send_mail
import csv, collections
# register page
@transaction.atomic
def register(request):
context = {}
# Just display the registration form if this is a GET request
if request.method == 'GET':
context['form'] = RegistrationForm()
return render(request, 'register.html', context)
form = RegistrationForm(request.POST)
context['form'] = form
# Check the validity of the form data
if not form.is_valid():
return render(request, 'register.html', context)
# Creates the new user and its empty profile
new_user = User.objects.create_user(first_name=form.cleaned_data['firstname'],
last_name=form.cleaned_data['lastname'],
username=form.cleaned_data['username'],
password=form.cleaned_data['password1'],
email=form.cleaned_data['email'])
# Mark the user as inactive to prevent login before email confirmation.
new_user.is_active = False
new_user.save()
# Create a profile for the new user
new_profile = Profile.objects.create(user=new_user)
new_profile.save()
# Generate a one-time use token and an email message body
token = default_token_generator.make_token(new_user)
email_body = """
Welcome to the CMU Course Book!
Please click the link below to verify your email address and complete the registration of your account:
http://%s%s
""" % (request.get_host(), reverse('confirm', args=(new_user.username, token)))
send_mail(subject="Verify your email address",
message=email_body,
from_email="zhouchep@andrew.cmu.edu",
recipient_list=[new_user.email])
context['email'] = form.cleaned_data['email']
return render(request, 'need_confirm.html', context)
# return redirect(reverse('home'))
# wait to confirm
@transaction.atomic
def confirm_registration(request, username, token):
user = get_object_or_404(User, username=username)
# Send 404 error if token is invalid
if not default_token_generator.check_token(user, token):
raise Http404
# Otherwise token was valid, activate the user.
user.is_active = True
user.save()
context = {'user': user}
return render(request, 'confirmed.html', context)
# the dashboard
@login_required
def home(request):
the_profile = Profile.objects.get(user=request.user)
courses = the_profile.curr.all()
courses_as_list = []
i = 1
for course in courses:
courses_as_list.append((course, i))
i = i + 1
monday = []
tuesday = []
wednesday = []
thursday = []
friday = []
for course, index in courses_as_list:
days = course.days.split(':')
if 'MN' in days:
monday.append((course, index))
if 'TU' in days:
tuesday.append((course, index))
if 'WD' in days:
wednesday.append((course, index))
if 'TH' in days:
thursday.append((course, index))
if 'FR' in days:
friday.append((course, index))
# if user is super user
formfile = FileForm()
context = {'user':request.user, 'formfile':formfile, 'courses':courses,
'monday':monday, 'tuesday':tuesday, 'wednesday':wednesday, 'thursday':thursday, 'friday':friday,
'pending':the_profile.pending}
return render(request, 'home.html', context)
@login_required
def profile(request, username):
the_user = request.user
the_profile = Profile.objects.get(user=request.user)
try:
c_user = User.objects.get(username=username)
c_profile = Profile.objects.get(user=c_user)
except User.DoesNotExist:
c_user = None
if c_user == request.user:
formuser = UserForm()
formprofile = ProfileForm()
formimage= ImageForm()
else:
formuser = None
formprofile = None
formimage = None
courses_curr = c_profile.curr.all()
courses_past = c_profile.past.all()
courses_plan = c_profile.plan.all()
courses_like = c_profile.liked.all()
pending = the_profile.pending
isFriend = True
try:
the_profile.friends.get(username=username)
except User.DoesNotExist:
isFriend = False
isPending = True
try:
c_profile.pending_friends.get(username=the_user.username)
except User.DoesNotExist:
isPending = False
theyIsPending = True # check to see if they waiting on you to accept/deny friendship
try:
the_profile.pending_friends.get(username=c_user.username)
except User.DoesNotExist:
theyIsPending = False
context = {'cuser': c_user, 'profile': c_profile, 'user': the_user, 'formimage': formimage,
'formuser': formuser, 'formprofile': formprofile, 'courses_curr': courses_curr,
'courses_past': courses_past, 'courses_plan': courses_plan, 'courses_like': courses_like,
'pending': pending, 'isFriend': isFriend, 'isPending': isPending, 'theyIsPending': theyIsPending}
return render(request, 'profile.html', context)
@login_required
def edit_profile(request):
if request.method == 'GET':
return redirect(reverse('home'))
the_user = request.user
form_user = UserForm(request.POST, request.FILES, instance=the_user)
form_user.save()
the_profile = Profile.objects.get(user=the_user)
form_profile = ProfileForm(request.POST, request.FILES, instance=the_profile)
if form_profile.is_valid():
form_profile.save()
return redirect(reverse('profile', kwargs={'username':request.user.username}))
@login_required
def edit_image(request):
if request.method == 'GET':
return redirect(reverse('home'))
the_user = request.user
the_profile = Profile.objects.get(user=the_user)
form_image = ImageForm(request.POST, request.FILES, instance=the_profile)
if form_image.is_valid():
form_image.content_type = form_image.cleaned_data['img'].content_type
form_image.save()
return redirect(reverse('profile', kwargs={'username': request.user.username}))
@login_required
def get_image(request, username):
the_profile = get_object_or_404(Profile, user=User.objects.get(username=username))
the_img = the_profile.img
if not the_img:
# the_img = open("cmucoursebook/static/image/default.jpg")
# the_img = open("cmucoursebook/static/default.jpg")
the_img = open("/home/ubuntu/Final_Sprint/cmucoursebook/static/default.jpg")
img_type = "image/jpeg"
return HttpResponse(the_img, content_type=img_type)
# search friends
@login_required
def friends(request):
the_profile = Profile.objects.get(user=request.user)
friends = the_profile.friends.all()
pending_friends = the_profile.pending_friends.all()
pending = the_profile.pending
context = {'friends': friends, 'pending_friends': pending_friends, 'pending': pending}
return render(request, 'friends.html', context)
# friendship request
@login_required
def request_friendship(request, username):
# target_user
try:
target_user = User.objects.get(username=username)
target_profile = Profile.objects.get(user=target_user)
except:
print("malformed input")
return redirect(reverse('home'))
# add current user to target user's pending friends
target_profile.pending_friends.add(request.user)
# increment their pending count by 1
target_profile.pending = target_profile.pending_friends.all().count()
target_profile.save()
# assuming you can only request friendship on their profile page, this will redirect back to same page
return redirect(reverse('profile', kwargs={'username': username}))
# friendship confirm
@login_required
def confirm_friendship(request, username):
# current user
the_user = request.user
the_profile = Profile.objects.get(user=the_user)
# target user
try:
target_user = User.objects.get(username=username)
target_profile = Profile.objects.get(user=target_user)
except:
print("malformed input")
return redirect(reverse('home'))
# add target user to friends
the_profile.friends.add(target_user)
# remove target user from pending friends
the_profile.pending_friends.remove(target_user)
# decrement your pending count by 1
the_profile.pending = the_profile.pending_friends.all().count()
the_profile.save()
# add you to target's friendlist
target_profile.friends.add(the_user)
# assuming you can only confirm/deny friendship on your friends plage, redirects back to friends page
return redirect(reverse('friends'))
# friendship deny
@login_required
def deny_friendship(request, username):
# current user
the_profile = Profile.objects.get(user=request.user)
# target user
try:
target_user = User.objects.get(username=username)
except:
print("malformed input")
return redirect(reverse('home'))
# remove target user from pending friends
try:
the_profile.pending_friends.remove(target_user)
except:
print("malformed input")
return redirect(reverse('home'))
# decrement your pending count by 1
the_profile.pending = the_profile.pending_friends.all().count()
the_profile.save()
# assuming you can only confirm/deny friendship on your friends plage, redirects back to friends page
return redirect(reverse('friends'))
# remove friend
@login_required
def remove_friend(request, username):
the_user = request.user
the_profile = Profile.objects.get(user=request.user)
try:
target_user = User.objects.get(username=username)
target_profile = Profile.objects.get(user=target_user)
the_profile.friends.remove(target_user)
target_profile.friends.remove(the_user)
except:
print("malformed input, someone is trying to hack our web app, Sir!")
return redirect(reverse('home'))
# temporary redirect, need to redirect to their profile page (the place where the remove friend button is)
return redirect(reverse('profile', kwargs={'username': username}))
# course detail
@login_required
def course_detail(request, cid):
the_course = get_object_or_404(Course, cid=cid)
the_user = request.user
the_profile = Profile.objects.get(user=the_user)
try:
the_comments = Comment.objects.filter(course=the_course).order_by('-timestamp')
except:
the_comments = None
try:
my_comment = Comment.objects.get(course=the_course, user=request.user)
except:
my_comment = None
added = False
liked = False
if the_profile.curr.filter(cid=cid).exists():
added = True
if the_profile.liked.filter(cid=cid).exists():
liked = True
context = {'user': the_user, 'course': the_course, 'added': added,
'comments': the_comments, 'my_comment': my_comment, 'liked': liked,
'pending': the_profile.pending}
return render(request, 'course_detail.html', context)
# user add class
@login_required
def add_class(request, cid):
the_user = request.user
the_profile = Profile.objects.get(user=the_user)
try:
the_course = Course.objects.get(cid=cid)
except:
the_course = None
if the_course:
if the_profile.curr.filter(cid=cid).exists():
print('This course is in your current schedule')
else:
the_profile.curr.add(the_course)
return redirect(reverse('course-detail', kwargs={'cid': cid}))
# user add courses for curr/past/plan
@login_required
def add_courses(request):
if request.GET:
return redirect(reverse('home'))
the_user = request.user
cid = request.POST['cid']
semester = request.POST['semester']
try:
the_course = Course.objects.get(cid=cid)
except:
print('This course id does not exist')
return redirect(reverse('home'))
the_profile = Profile.objects.get(user=the_user)
if semester == 'Current':
if the_profile.curr.filter(cid=cid).exists():
print('This course is in your current schedule')
else:
the_profile.curr.add(the_course)
elif semester == 'Past':
if the_profile.past.filter(cid=cid).exists():
print('This course is in your past schedule')
else:
the_profile.past.add(the_course)
elif semester == 'Future':
if the_profile.plan.filter(cid=cid).exists():
print('This course is in your future schedule')
else:
the_profile.plan.add(the_course)
else:
print('Error semester, someone is trying to hack our web app, Sir!')
return redirect(reverse('home'))
# delete course
@login_required
def delete_course(request):
profile = Profile.objects.get(user=request.user)
try:
course = Course.objects.get(cid=request.POST['cid'])
semester = request.POST['semester']
except:
print('malformed input!')
return redirect(reverse('home'))
if semester == 'curr':
try:
profile.curr.remove(course)
except:
print('This course is not in curr!')
return redirect(reverse('home'))
elif semester == 'past':
try:
profile.past.remove(course)
except:
print('This course is not in past!')
return redirect(reverse('home'))
elif semester == 'plan':
try:
profile.plan.remove(course)
except:
print('This course is not in plan!')
return redirect(reverse('home'))
goto = request.POST['page'] #either gonna be 'profile' or 'home'
if goto == 'profile':
return redirect(reverse(goto, kwargs={'username': request.user}))
elif goto == 'course-detail':
return redirect(reverse(goto, kwargs={'cid': request.POST['cid']}))
else:
return redirect(reverse('home'))
# search course
@login_required
def search(request):
try:
cid = request.POST['cid']
except:
return redirect(reverse('home'))
num = Course.objects.filter(cid = cid).count()
if num == 0:
context = {'error': cid}
return render(request, 'not_found.html', context)
else:
return redirect(reverse('course-detail', kwargs={'cid': cid}))
#search course with GET method
@login_required
def search2(request):
try:
cid = request.GET['cid']
except:
return redirect(reverse('home'))
num = Course.objects.filter(cid = cid).count()
if num == 0:
context = {'error': cid}
return render(request, 'not_found.html', context)
else:
return redirect(reverse('course-detail', kwargs={'cid': cid}))
# browse
@login_required
def browse(request):
context = {}
context['pending'] = Profile.objects.get(user=request.user).pending
dept_list = []
dept_list.append('All')
for course in Course.objects.all():
if course.department not in dept_list:
dept_list.append(course.department)
context['dept_list'] = dept_list
# top 10
rankby = ['liked', 'rating', 'workload(ascending)', 'workload(descending)']
context['rankby'] = rankby
if not request.GET:
return render(request, 'browse.html', context)
if 'department' in request.GET:
try:
selected_dept = request.GET['department']
except:
return redirect(reverse('home'))
context['dept'] = selected_dept
if selected_dept == 'All':
courses = Course.objects.all()
else:
courses = Course.objects.filter(department=selected_dept)
else:
courses = None
context['courses'] = courses
if 'orderby' in request.GET:
try:
the_dept = request.GET['dept']
except:
the_dept = None
if not the_dept or the_dept == 'All':
the_courses = Course.objects.all()
else:
the_courses = Course.objects.filter(department=the_dept)
if request.GET['orderby'] == 'liked':
top5course = the_courses.order_by('-likes')[:5]
elif request.GET['orderby'] == 'rating':
top5course = the_courses.order_by('-rating')[:5]
elif request.GET['orderby'] == 'workload(ascending)':
top5course = the_courses.order_by('hours')[:5]
elif request.GET['orderby'] == 'workload(descending)':
top5course = the_courses.order_by('-hours')[:5]
else:
top5course = None
else:
top5course = None
context['top5course'] = top5course
return render(request, 'browse.html', context)
def search_users(request):
try:
key = request.GET['key']
tag = request.GET['tag']
except:
print("malformed input, someone is trying to hack our web app, Sir!")
return redirect(reverse('home'))
context = {}
user_list = []
msg =""
if tag == 'username':
try:
the_user = User.objects.get(username=key);
user_list.append(the_user)
except:
msg = "Sorry, no such user"
elif tag == 'firstname':
user_list = User.objects.filter(first_name=key);
elif tag == 'lastname':
user_list = User.objects.filter(last_name=key);
elif tag == 'email':
user_list = User.objects.filter(email=key);
else:
msg = "Sorry, no such user"
if not user_list:
msg = "Sorry, no such user"
context['user_list'] = user_list
context['msg'] = msg
return render(request, 'browse.html', context)
# get course data
# for course_detail.html trend plot
@login_required
def get_course_data(request):
cid = request.GET['cid']
data_type = request.GET['type']
if data_type == 'difficulty':
course = Course.objects.get(cid=cid)
comments = Comment.objects.filter(course=course)
easy = medium = hard = 0
for comment in comments:
difficulty = comment.difficulty
if difficulty == '1':
easy = easy + 1
elif difficulty == '2':
medium = medium + 1
elif difficulty == '3':
hard = hard + 1
else: # just in case it's blank
continue
response = {'Easy': easy, 'Medium': medium, 'Hard': hard}
return JsonResponse(response)
else:
response = collections.OrderedDict()
visited = {}
for record in History.objects.filter(cid=cid):
if data_type == 'rating':
data = record.oc
else: # data_type == 'hours'
data = record.hours
key = record.year + ' ' + record.semester
if not visited.has_key(key):
visited[key] = (data, 1)
else:
(prev_average, prev_count) = visited[key]
new_count = prev_count + 1
new_average = ((prev_average * prev_count) + data) / new_count
visited[key] = (new_average, new_count)
ordered_dict = collections.OrderedDict(sorted(visited.items()))
for item in ordered_dict.items():
(semester, data_tuple) = item
(keep, trash) = data_tuple
semester = "'" + semester[2:]
response[semester] = float(keep)
return JsonResponse(response)
# For faculty/super user, the can upload history data and course data
@login_required
def upload(request):
the_user = request.user
form_file = FileForm(request.POST, request.FILES, instance=the_user)
if form_file.is_valid():
form_file.content_type = form_file.cleaned_data['csvfile'].content_type
form_file.save()
else:
print('bad file')
return redirect(reverse('home'))
iscourse = request.POST.get('datatype')
content = csv.reader(request.FILES.get('csvfile'))
new_courses = []
try:
# if it is course data
if iscourse == 'True':
for line in content:
try:
the_course = Course.objects.get(cid=line[0])
the_course.name = line[1]
the_course.department = line[2]
the_course.description = line[3]
the_course.start = line[4]
the_course.end = line[5]
the_course.days = line[6]
except:
the_course = Course(cid=line[0],
name=line[1],
department=line[2],
description=line[3],
start=line[4],
end=line[5],
days=line[6])
the_course.save()
new_courses.append(the_course)
# if it is history data
else:
for line in content:
new_history = History(semester=line[0],
year=line[1],
instructor=line[2],
department=line[3],
cid=line[4],
coursename=line[5],
section=line[6],
ctype=line[7],
response=line[8],
enrollment=line[9],
resprate=line[10],
hours=line[11],
iisl=line[12],
ecr=line[13],
clg=line[14],
ipfs=line[15],
ios=line[16],
esm=line[17],
srs=line[18],
os=line[19],
oc=line[20])
new_history.save()
except:
print('upload data error!')
#compute and store average hours/week and average overall rating
def computeAvg(course, hset):
count = 0
total_hours = 0
total_rating = 0
for history in hset:
count += 1
total_hours = total_hours + history.hours
total_rating = total_rating + history.oc
hours = total_hours / count
rating = total_rating / count
course.hours = hours
course.rating = rating
course.save()
# if both are present, populate 'hours' and 'ratings' field of models.Course
if new_courses:
for course in new_courses:
i = 2016
while True:
fall_set = History.objects.filter(cid=course.cid, year=str(i), semester='Fall')
spring_set = History.objects.filter(cid=course.cid, year=str(i), semester='Spring')
if fall_set:
computeAvg(course, fall_set)
break
elif spring_set:
computeAvg(course, spring_set)
break
elif i < 2000: # no previous data should be available on CMU SIO
break # used to break out of inf loop
else:
i -= 1
return redirect(reverse('home'))
def add_comment(request, cid):
the_course = get_object_or_404(Course, cid=cid)
# the_course = Course.objects.get(cid=cid)
form = CommentForm(request.POST)
if not form.is_valid():
raise Http404
else:
try:
the_comment = Comment.objects.get(course=the_course, user=request.user)
the_comment.delete()
except:
None
the_comment = Comment(comment=request.POST['comment'],
skills=request.POST['skills'],
difficulty=request.POST['difficulty'],
user=request.user,
course=the_course)
the_comment.save()
return redirect(reverse('course-detail', kwargs={'cid': cid}))
def like_class(request, cid):
the_user = request.user
the_profile = Profile.objects.get(user=the_user)
try:
the_course = Course.objects.get(cid=cid)
except:
print('This course id does not exist')
return redirect(reverse('home'))
if the_course:
if the_profile.liked.filter(cid=cid).exists():
print('You have liked this course!')
else:
the_profile.liked.add(the_course)
the_course.likes += 1
the_course.save()
return redirect(reverse('course-detail', kwargs={'cid': cid}))
| {"/cmucoursebook/forms.py": ["/cmucoursebook/models.py"], "/cmucoursebook/views.py": ["/cmucoursebook/models.py", "/cmucoursebook/forms.py"]} |
51,877 | jc26/CMU_Coursebook | refs/heads/master | /cmucoursebook/apps.py | from __future__ import unicode_literals
from django.apps import AppConfig
class CmucoursebookConfig(AppConfig):
name = 'cmucoursebook'
| {"/cmucoursebook/forms.py": ["/cmucoursebook/models.py"], "/cmucoursebook/views.py": ["/cmucoursebook/models.py", "/cmucoursebook/forms.py"]} |
51,879 | apeyrache/phy | refs/heads/master | /phy/cluster/manual/tests/test_session.py | # -*- coding: utf-8 -*-
"""Tests of session structure."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os
import os.path as op
import numpy as np
from numpy.testing import assert_array_equal as ae
from pytest import raises
from ..session import BaseSession, Session
from ....utils.tempdir import TemporaryDirectory
from ....io.mock.artificial import MockModel
from ....io.mock.kwik import create_mock_kwik
from ....plot.waveforms import add_waveform_view
#------------------------------------------------------------------------------
# Generic tests
#------------------------------------------------------------------------------
def test_session_connect():
"""Test @connect decorator and event system."""
session = BaseSession()
# connect names should be on_something().
with raises(ValueError):
@session.connect
def invalid():
pass
_track = []
@session.connect
def on_my_event():
_track.append('my event')
assert _track == []
session.emit('invalid')
assert _track == []
session.emit('my_event')
assert _track == ['my event']
# Although the callback doesn't accept a 'data' keyword argument, this does
# not raise an error because the event system will only pass the argument
# if it is part of the callback arg spec.
session.emit('my_event', data='hello')
def test_session_connect_multiple():
"""Test @connect decorator and event system."""
session = BaseSession()
_track = []
@session.connect
def on_my_event():
_track.append('my event')
@session.connect
def on_my_event():
_track.append('my event again')
session.emit('my_event')
assert _track == ['my event', 'my event again']
def test_session_unconnect():
"""Test unconnect."""
session = BaseSession()
_track = []
@session.connect
def on_my_event():
_track.append('my event')
session.emit('my_event')
assert _track == ['my event']
# Unregister and test that the on_my_event() callback is no longer called.
session.unconnect(on_my_event)
session.emit('my_event')
assert _track == ['my event']
def test_session_connect_alternative():
"""Test the alternative @connect() syntax."""
session = BaseSession()
_track = []
assert _track == []
@session.connect()
def on_my_event():
_track.append('my event')
session.emit('my_event')
assert _track == ['my event']
def test_action():
session = BaseSession()
_track = []
@session.action(title='My action')
def my_action():
_track.append('action')
session.my_action()
assert _track == ['action']
assert session.actions == [{'func': my_action, 'title': 'My action'}]
session.execute_action(session.actions[0])
assert _track == ['action', 'action']
def test_action_event():
session = BaseSession()
_track = []
@session.connect
def on_hello(out, kwarg=''):
_track.append(out + kwarg)
# We forgot the 'title=', but this still works.
@session.action('My action')
def my_action_hello(data):
_track.append(data)
session.emit('hello', data + ' world', kwarg='!')
# Need one argument.
with raises(TypeError):
session.my_action_hello()
# This triggers the 'hello' event which adds 'hello world' to _track.
session.my_action_hello('hello')
assert _track == ['hello', 'hello world!']
#------------------------------------------------------------------------------
# Kwik tests
#------------------------------------------------------------------------------
def _start_manual_clustering(filename=None, model=None, tempdir=None):
session = Session(store_path=tempdir)
session.open(filename=filename, model=model)
@session.action
def show_waveforms(title="Show waveforms"):
view = add_waveform_view(session)
return view
return session
def test_session_mock():
with TemporaryDirectory() as tempdir:
session = _start_manual_clustering(model=MockModel(),
tempdir=tempdir)
view = session.show_waveforms()
session.select([0])
view_bis = session.show_waveforms()
session.merge([3, 4])
view.close()
view_bis.close()
session = _start_manual_clustering(model=MockModel(),
tempdir=tempdir)
session.select([1, 2])
view = session.show_waveforms()
view.close()
def test_session_kwik():
n_clusters = 5
n_spikes = 50
n_channels = 28
n_fets = 2
n_samples_traces = 3000
with TemporaryDirectory() as tempdir:
# Create the test HDF5 file in the temporary directory.
filename = create_mock_kwik(tempdir,
n_clusters=n_clusters,
n_spikes=n_spikes,
n_channels=n_channels,
n_features_per_channel=n_fets,
n_samples_traces=n_samples_traces)
session = _start_manual_clustering(filename=filename,
tempdir=tempdir)
session.select([0])
session.merge([3, 4])
view = session.show_waveforms()
# This won't work but shouldn't raise an error.
session.select([1000])
# TODO: more tests
session.undo()
session.redo()
view.close()
def test_session_stats():
n_clusters = 5
n_spikes = 50
n_channels = 28
n_fets = 2
n_samples_traces = 3000
with TemporaryDirectory() as tempdir:
# Create the test HDF5 file in the temporary directory.
filename = create_mock_kwik(tempdir,
n_clusters=n_clusters,
n_spikes=n_spikes,
n_channels=n_channels,
n_features_per_channel=n_fets,
n_samples_traces=n_samples_traces)
session = _start_manual_clustering(filename,
tempdir=tempdir)
assert session
# TODO
# masks = session.stats.cluster_masks(3)
# assert masks.shape == (n_channels,)
# session.merge([3, 4])
# masks = session.stats.cluster_masks(3)
# assert masks.shape == (n_channels,)
# masks = session.stats.cluster_masks(n_clusters)
# assert masks.shape == (n_channels,)
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,880 | apeyrache/phy | refs/heads/master | /phy/cluster/manual/wizard.py | # -*- coding: utf-8 -*-
"""Wizard."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import math
from operator import itemgetter
import numpy as np
#------------------------------------------------------------------------------
# Wizard
#------------------------------------------------------------------------------
def _argsort(seq, reverse=True, n_max=None):
"""Return the list of clusters in decreasing order of value from
a list of tuples (cluster, value)."""
out = [cl for (cl, v) in sorted(seq, key=itemgetter(1),
reverse=reverse)]
if n_max is not None:
out = out[:n_max]
return out
class Wizard(object):
def __init__(self, cluster_metadata=None):
self._cluster_metadata = cluster_metadata
self._similarity = None
self._quality = None
self._cluster_ids = None
@property
def cluster_ids(self):
return self._cluster_ids
@cluster_ids.setter
def cluster_ids(self, value):
self._cluster_ids = value
def similarity(self, func):
"""Register a function returing the similarity between two clusters."""
self._similarity = func
return func
def quality(self, func):
"""Register a function returing the quality of a cluster."""
self._quality = func
return func
def _check_cluster_ids(self):
if self._cluster_ids is None:
raise RuntimeError("The list of clusters need to be set.")
def best_clusters(self, n_max=None):
"""Return the list of best clusters sorted by decreasing quality."""
self._check_cluster_ids()
quality = [(cluster, self._quality(cluster))
for cluster in self._cluster_ids]
return _argsort(quality, n_max=n_max)
def best_cluster(self):
"""Return the best cluster."""
clusters = self.best_clusters(n_max=1)
if clusters:
return clusters[0]
def most_similar_clusters(self, cluster, n_max=None):
"""Return the `n_max` most similar clusters."""
self._check_cluster_ids()
# TODO: filter according to the cluster group.
similarity = [(other, self._similarity(cluster, other))
for other in self._cluster_ids
if other != cluster]
return _argsort(similarity, n_max=n_max)
def mark_dissimilar(self, cluster_0, cluster_1):
"""Mark two clusters as dissimilar after a human decision.
This pair should not be reproposed again to the user.
"""
# TODO
pass
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,881 | apeyrache/phy | refs/heads/master | /phy/plot/ccg.py | # -*- coding: utf-8 -*-
"""Plotting CCGs."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import numpy as np
import matplotlib.pyplot as plt
from ._mpl_utils import _bottom_left_frame
#------------------------------------------------------------------------------
# CCG plotting
#------------------------------------------------------------------------------
def plot_ccg(ccg, baseline=None, bin=1., color=None, ax=None):
"""Plot a CCG with matplotlib and return an Axes instance."""
if ax is None:
ax = plt.subplot(111)
assert ccg.ndim == 1
n = ccg.shape[0]
assert n % 2 == 1
bin = float(bin)
x_min = -n // 2 * bin - bin / 2
x_max = (n // 2 - 1) * bin + bin / 2
width = bin * 1.05
left = np.linspace(x_min, x_max, n)
ax.bar(left, ccg, facecolor=color, width=width, linewidth=0)
if baseline is not None:
ax.axhline(baseline, color='k', linewidth=2, linestyle='-')
ax.axvline(color='k', linewidth=2, linestyle='--')
ax.set_xlim(x_min, x_max + bin / 2)
ax.set_ylim(0)
# Only keep the bottom and left ticks.
_bottom_left_frame(ax)
return ax
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,882 | apeyrache/phy | refs/heads/master | /phy/io/kwik_model.py | # -*- coding: utf-8 -*-
"""The KwikModel class manages in-memory structures and KWIK file open/save."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os.path as op
import numpy as np
from ..ext import six
from .base_model import BaseModel
from ..cluster.manual.cluster_info import ClusterMetadata
from .h5 import open_h5, _check_hdf5_path
from ..waveform.loader import WaveformLoader
from ..waveform.filter import bandpass_filter, apply_filter
from ..electrode.mea import MEA, linear_positions
from ..utils.logging import debug
from ..utils.array import PartialArray
#------------------------------------------------------------------------------
# Kwik utility functions
#------------------------------------------------------------------------------
def _to_int_list(l):
"""Convert int strings to ints."""
return [int(_) for _ in l]
def _list_int_children(group):
"""Return the list of int children of a HDF5 group."""
return sorted(_to_int_list(group.keys()))
def _list_channel_groups(kwik):
"""Return the list of channel groups in a kwik file."""
if 'channel_groups' in kwik:
return _list_int_children(kwik['/channel_groups'])
else:
return []
def _list_recordings(kwik):
"""Return the list of recordings in a kwik file."""
if '/recordings' in kwik:
return _list_int_children(kwik['/recordings'])
else:
return []
def _list_channels(kwik, channel_group=None):
"""Return the list of channels in a kwik file."""
assert isinstance(channel_group, six.integer_types)
path = '/channel_groups/{0:d}/channels'.format(channel_group)
if path in kwik:
channels = _list_int_children(kwik[path])
return channels
else:
return []
def _list_clusterings(kwik, channel_group=None):
"""Return the list of clusterings in a kwik file."""
if channel_group is None:
raise RuntimeError("channel_group must be specified when listing "
"the clusterings.")
assert isinstance(channel_group, six.integer_types)
path = '/channel_groups/{0:d}/clusters'.format(channel_group)
clusterings = sorted(kwik[path].keys())
# Ensure 'main' exists and is the first.
assert 'main' in clusterings
clusterings.remove('main')
return ['main'] + clusterings
_COLOR_MAP = np.array([[1., 1., 1.],
[1., 0., 0.],
[0.5, 0.763, 1.],
[0.105, 1., 0.],
[1., 0.658, 0.5],
[0.421, 0., 1.],
[0.5, 1., 0.763],
[1., 0.947, 0.],
[1., 0.5, 0.974],
[0., 0.526, 1.],
[0.868, 1., 0.5],
[1., 0.316, 0.],
[0.553, 0.5, 1.],
[0., 1., 0.526],
[1., 0.816, 0.5],
[1., 0., 0.947],
[0.5, 1., 0.921],
[0.737, 1., 0.],
[1., 0.5, 0.5],
[0.105, 0., 1.],
[0.553, 1., 0.5],
[1., 0.632, 0.],
[0.711, 0.5, 1.],
[0., 1., 0.842],
[1., 0.974, 0.5],
[0.9, 0., 0.],
[0.45, 0.687, 0.9],
[0.095, 0.9, 0.],
[0.9, 0.592, 0.45],
[0.379, 0., 0.9],
[0.45, 0.9, 0.687],
[0.9, 0.853, 0.],
[0.9, 0.45, 0.876],
[0., 0.474, 0.9],
[0.782, 0.9, 0.45],
[0.9, 0.284, 0.],
[0.497, 0.45, 0.9],
[0., 0.9, 0.474],
[0.9, 0.734, 0.45],
[0.9, 0., 0.853],
[0.45, 0.9, 0.829],
[0.663, 0.9, 0.],
[0.9, 0.45, 0.45],
[0.095, 0., 0.9],
[0.497, 0.9, 0.45],
[0.9, 0.568, 0.],
[0.639, 0.45, 0.9],
[0., 0.9, 0.758],
[0.9, 0.876, 0.45]])
_KWIK_EXTENSIONS = ('kwik', 'kwx', 'raw.kwd')
def _kwik_filenames(filename):
"""Return the filenames of the different Kwik files for a given
experiment."""
basename, ext = op.splitext(filename)
return {ext: '{basename}.{ext}'.format(basename=basename, ext=ext)
for ext in _KWIK_EXTENSIONS}
class SpikeLoader(object):
"""Translate selection with spike ids into selection with
absolute times."""
def __init__(self, waveforms, spike_times):
self._spike_times = spike_times
self._waveforms = waveforms
def __getitem__(self, item):
times = self._spike_times[item]
return self._waveforms[times]
#------------------------------------------------------------------------------
# KwikModel class
#------------------------------------------------------------------------------
class KwikModel(BaseModel):
"""Holds data contained in a kwik file."""
def __init__(self, filename=None,
channel_group=None,
recording=None,
clustering=None):
super(KwikModel, self).__init__()
# Initialize fields.
self._spike_times = None
self._spike_clusters = None
self._metadata = None
self._clustering = 'main'
self._probe = None
self._channels = []
self._features = None
self._masks = None
self._waveforms = None
self._cluster_metadata = None
self._traces = None
self._waveform_loader = None
if filename is None:
raise ValueError("No filename specified.")
# Open the file.
self.name = op.splitext(op.basename(filename))[0]
self._kwik = open_h5(filename)
if not self._kwik.is_open():
raise ValueError("File {0} failed to open.".format(filename))
# This class only works with kwik version 2 for now.
kwik_version = self._kwik.read_attr('/', 'kwik_version')
if kwik_version != 2:
raise IOError("The kwik version is {v} != 2.".format(kwik_version))
# Open the Kwx file if it exists.
filenames = _kwik_filenames(filename)
if op.exists(filenames['kwx']):
self._kwx = open_h5(filenames['kwx'])
else:
self._kwx = None
# Open the Kwd file if it exists.
if op.exists(filenames['raw.kwd']):
self._kwd = open_h5(filenames['raw.kwd'])
else:
self._kwd = None
# Load global information about the file.
self._load_meta()
# List channel groups and recordings.
self._channel_groups = _list_channel_groups(self._kwik.h5py_file)
self._recordings = _list_recordings(self._kwik.h5py_file)
# Choose the default channel group if not specified.
if channel_group is None and self.channel_groups:
channel_group = self.channel_groups[0]
# Load the channel group.
self.channel_group = channel_group
# Choose the default recording if not specified.
if recording is None and self.recordings:
recording = self.recordings[0]
# Load the recording.
self.recording = recording
# Once the channel group is loaded, list the clusterings.
self._clusterings = _list_clusterings(self._kwik.h5py_file,
self.channel_group)
# Choose the first clustering (should always be 'main').
if clustering is None and self.clusterings:
clustering = self.clusterings[0]
# Load the specified clustering.
self.clustering = clustering
# Internal properties and methods
# -------------------------------------------------------------------------
@property
def _channel_groups_path(self):
return '/channel_groups/{0:d}'.format(self._channel_group)
@property
def _spikes_path(self):
return '{0:s}/spikes'.format(self._channel_groups_path)
@property
def _channels_path(self):
return '{0:s}/channels'.format(self._channel_groups_path)
@property
def _clusters_path(self):
return '{0:s}/clusters'.format(self._channel_groups_path)
@property
def _clustering_path(self):
return '{0:s}/{1:s}'.format(self._clusters_path, self._clustering)
def _load_meta(self):
"""Load metadata from kwik file."""
metadata = {}
# Automatically load all metadata from spikedetekt group.
path = '/application_data/spikedetekt/'
metadata_fields = self._kwik.attrs(path)
for field in metadata_fields:
if field.islower():
try:
metadata[field] = self._kwik.read_attr(path, field)
except TypeError:
debug("Unable to load metadata field {0:s}".format(field))
self._metadata = metadata
# Channel group
# -------------------------------------------------------------------------
@property
def channel_groups(self):
return self._channel_groups
def _channel_group_changed(self, value):
"""Called when the channel group changes."""
if value not in self.channel_groups:
raise ValueError("The channel group {0} is invalid.".format(value))
self._channel_group = value
# Load channels.
self._channels = _list_channels(self._kwik.h5py_file,
self._channel_group)
# Load spike times.
path = '{0:s}/time_samples'.format(self._spikes_path)
self._spike_times = self._kwik.read(path)[:]
# Load features masks.
path = '{0:s}/features_masks'.format(self._channel_groups_path)
if self._kwx is not None:
fm = self._kwx.read(path)
self._features = PartialArray(fm, 0)
# TODO: sparse, memory mapped, memcache, etc.
k = self._metadata['nfeatures_per_channel']
# This partial array simulates a (n_spikes, n_channels) array.
self._masks = PartialArray(fm,
(slice(0, k * self.n_channels, k), 1))
assert self._masks.shape == (self.n_spikes, self.n_channels)
self._cluster_metadata = ClusterMetadata()
@self._cluster_metadata.default
def group(cluster):
return 3
# Load probe.
positions = self._load_channel_positions()
# TODO: support multiple channel groups.
self._probe = MEA(positions=positions,
n_channels=self.n_channels)
self._create_waveform_loader()
def _load_channel_positions(self):
"""Load the channel positions from the kwik file."""
positions = []
for channel in self.channels:
path = '{0:s}/{1:d}'.format(self._channels_path, channel)
position = self._kwik.read_attr(path, 'position')
positions.append(position)
return np.array(positions)
def _create_waveform_loader(self):
"""Create a waveform loader."""
n_samples = (self._metadata['extract_s_before'],
self._metadata['extract_s_after'])
order = self._metadata['filter_butter_order']
b_filter = bandpass_filter(rate=self._metadata['sample_rate'],
low=self._metadata['filter_low'],
high=self._metadata['filter_high'],
order=order)
def filter(x):
return apply_filter(x, b_filter)
self._waveform_loader = WaveformLoader(n_samples=n_samples,
channels=self._channels,
filter=filter,
filter_margin=order * 3,
scale_factor=.01)
@property
def channels(self):
"""List of channels in the current channel group."""
return self._channels
@property
def n_channels(self):
"""Number of channels in the current channel group."""
return len(self._channels)
@property
def recordings(self):
return self._recordings
def _recording_changed(self, value):
"""Called when the recording number changes."""
if value not in self.recordings:
raise ValueError("The recording {0} is invalid.".format(value))
self._recording = value
# Traces.
if self._kwd is not None:
path = '/recordings/{0:d}/data'.format(self._recording)
self._traces = self._kwd.read(path)
# Create a new WaveformLoader if needed.
if self._waveform_loader is None:
self._create_waveform_loader()
self._waveform_loader.traces = self._traces
@property
def clusterings(self):
return self._clusterings
def _clustering_changed(self, value):
"""Called when the clustering changes."""
if value not in self.clusterings:
raise ValueError("The clustering {0} is invalid.".format(value))
self._clustering = value
# NOTE: we are ensured here that self._channel_group is valid.
path = '{0:s}/clusters/{1:s}'.format(self._spikes_path,
self._clustering)
self._spike_clusters = self._kwik.read(path)[:]
# TODO: cluster metadata
# Data
# -------------------------------------------------------------------------
@property
def _clusters(self):
"""List of clusters in the Kwik file."""
clusters = self._kwik.groups(self._clustering_path)
clusters = [int(cluster) for cluster in clusters]
return sorted(clusters)
@property
def metadata(self):
"""A dictionary holding metadata about the experiment."""
return self._metadata
@property
def probe(self):
"""A Probe instance."""
return self._probe
@property
def traces(self):
"""Traces from the current recording (may be memory-mapped)."""
return self._traces
@property
def spike_times(self):
"""Spike times from the current channel_group."""
return self._spike_times
@property
def n_spikes(self):
"""Return the number of spikes."""
return len(self._spike_times)
@property
def features(self):
"""Features from the current channel_group (may be memory-mapped)."""
return self._features
@property
def masks(self):
"""Masks from the current channel_group (may be memory-mapped)."""
return self._masks
@property
def waveforms(self):
"""Waveforms from the current channel_group (may be memory-mapped)."""
return SpikeLoader(self._waveform_loader, self.spike_times)
@property
def spike_clusters(self):
"""Spike clusters from the current channel_group."""
return self._spike_clusters
@property
def cluster_metadata(self):
"""ClusterMetadata instance holding information about the clusters."""
# TODO
return self._cluster_metadata
def save(self):
"""Commits all in-memory changes to disk."""
raise NotImplementedError()
def close(self):
"""Close all opened files."""
if self._kwx is not None:
self._kwx.close()
if self._kwd is not None:
self._kwd.close()
self._kwik.close()
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,883 | apeyrache/phy | refs/heads/master | /phy/stats/tests/test_ccg.py | # -*- coding: utf-8 -*-
"""Tests of CCG functions."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import numpy as np
from numpy.testing import assert_array_equal as ae
from pytest import raises
from ..ccg import _increment, _diff_shifted, correlograms
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_utils():
# First, test _increment().
# Original array.
arr = np.arange(10)
# Indices of elements to increment.
indices = [0, 2, 4, 2, 2, 2, 2, 2, 2]
ae(_increment(arr, indices), [1, 1, 9, 3, 5, 5, 6, 7, 8, 9])
# Then, test _shitdiff.
# Original array.
arr = [2, 3, 5, 7, 11, 13, 17]
# Shifted once.
ds1 = [1, 2, 2, 4, 2, 4]
# Shifted twice.
ds2 = [3, 4, 6, 6, 6]
ae(_diff_shifted(arr, 1), ds1)
ae(_diff_shifted(arr, 2), ds2)
def test_ccg_1():
spike_times = [2, 3, 10, 12, 20, 24, 30, 40]
spike_clusters = [0, 1, 0, 0, 2, 1, 0, 2]
binsize = 1
winsize_bins = 2 * 3 + 1
c_expected = np.zeros((3, 3, 4))
c_expected[0, 1, 1] = 1
c_expected[0, 0, 2] = 1
c = correlograms(spike_times, spike_clusters,
binsize=binsize, winsize_bins=winsize_bins)
ae(c, c_expected)
def test_ccg_2():
sr = 20000
nspikes = 10000
spike_times = np.cumsum(np.random.exponential(scale=.002, size=nspikes))
spike_times = (spike_times * sr).astype(np.int64)
max_cluster = 10
spike_clusters = np.random.randint(0, max_cluster, nspikes)
# window = 50 ms
winsize_samples = 2 * (25 * 20) + 1
# bin = 1 ms
binsize = 1 * 20
# 51 bins
winsize_bins = 2 * ((winsize_samples // 2) // binsize) + 1
assert winsize_bins % 2 == 1
c = correlograms(spike_times, spike_clusters,
binsize=binsize, winsize_bins=winsize_bins)
assert c.shape == (max_cluster, max_cluster, 26)
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,884 | apeyrache/phy | refs/heads/master | /phy/io/mock/tests/test_kwik.py | # -*- coding: utf-8 -*-
"""Tests of mock Kwik file creation."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os
import os.path as op
from random import randint
import numpy as np
from numpy.testing import assert_array_equal as ae
import h5py
from pytest import raises
from ..artificial import (artificial_spike_times,
artificial_spike_clusters,
artificial_features,
artificial_masks,
artificial_traces)
from ....electrode.mea import MEA, staggered_positions
from ....utils.tempdir import TemporaryDirectory
from ...h5 import open_h5
from ...kwik_model import (KwikModel, _list_channel_groups, _list_channels,
_list_recordings,
_list_clusterings, _kwik_filenames)
from ..kwik import create_mock_kwik
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_create_kwik():
n_clusters = 10
n_spikes = 50
n_channels = 28
n_fets = 2
n_samples_traces = 3000
with TemporaryDirectory() as tempdir:
# Create the test HDF5 file in the temporary directory.
filename = create_mock_kwik(tempdir,
n_clusters=n_clusters,
n_spikes=n_spikes,
n_channels=n_channels,
n_features_per_channel=n_fets,
n_samples_traces=n_samples_traces)
with open_h5(filename) as f:
assert f
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,885 | apeyrache/phy | refs/heads/master | /phy/cluster/manual/session.py | # -*- coding: utf-8 -*-
"""Session structure."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os
import os.path as op
from functools import partial
import shutil
import numpy as np
from ...ext.six import string_types
from ...utils._misc import (_phy_user_dir,
_ensure_phy_user_dir_exists)
from ...ext.slugify import slugify
from ...utils.event import EventEmitter
from ...utils.logging import set_level, warn
from ...io.kwik_model import KwikModel
from ...io.base_model import BaseModel
from ._history import GlobalHistory
from ._utils import _concatenate_per_cluster_arrays
from .cluster_info import ClusterMetadata
from .clustering import Clustering
from .selector import Selector
from .store import ClusterStore, StoreItem
#------------------------------------------------------------------------------
# BaseSession class
#------------------------------------------------------------------------------
class BaseSession(EventEmitter):
"""Provide actions, views, and an event system for creating an interactive
session."""
def __init__(self):
super(BaseSession, self).__init__()
self._actions = []
def action(self, func=None, title=None):
"""Decorator for a callback function of an action.
The 'title' argument is used as a title for the GUI button.
"""
if func is None:
return partial(self.action, title=title)
# HACK: handle the case where the first argument is the title.
if isinstance(func, string_types):
return partial(self.action, title=func)
# Register the action.
self._actions.append({'func': func, 'title': title})
# Set the action function as a Session method.
setattr(self, func.__name__, func)
return func
@property
def actions(self):
"""List of registered actions."""
return self._actions
def execute_action(self, action, *args, **kwargs):
"""Execute an action defined by an item in the 'actions' list."""
action['func'](*args, **kwargs)
#------------------------------------------------------------------------------
# Store items
#------------------------------------------------------------------------------
class FeatureMasks(StoreItem):
fields = [('masks', 'disk'),
('mean_masks', 'memory')]
def store_from_model(self, cluster, spikes):
# Load all features and masks for that cluster in memory.
masks = self.model.masks[spikes]
# Store the masks, features, and mean masks.
self.store.store(cluster, masks=masks,
mean_masks=masks.mean(axis=0))
#------------------------------------------------------------------------------
# Session class
#------------------------------------------------------------------------------
def _ensure_disk_store_exists(dir_name, root_path=None):
# Disk store.
if root_path is None:
_ensure_phy_user_dir_exists()
root_path = _phy_user_dir('cluster_store')
# Create the disk store if it does not exist.
if not op.exists(root_path):
os.mkdir(root_path)
if not op.exists(root_path):
raise RuntimeError("Please create the store directory "
"{0}".format(root_path))
# Put the store in a subfolder, using the name.
dir_name = slugify(dir_name)
path = op.join(root_path, dir_name)
if not op.exists(path):
os.mkdir(path)
return path
def _process_ups(ups):
"""This function processes the UpdateInfo instances of the two
undo stacks (clustering and cluster metadata) and concatenates them
into a single UpdateInfo instance."""
if len(ups) == 0:
return
elif len(ups) == 1:
return ups[0]
elif len(ups) == 2:
up = ups[0]
up.update(ups[1])
return up
else:
raise NotImplementedError()
class Session(BaseSession):
"""Default manual clustering session.
Parameters
----------
filename : str
Path to a .kwik file, to be used if 'model' is not used.
model : instance of BaseModel
A Model instance, to be used if 'filename' is not used.
"""
def __init__(self, store_path=None):
super(Session, self).__init__()
self.model = None
self._store_path = store_path
# self.action and self.connect are decorators.
self.action(self.open, title='Open')
self.action(self.select, title='Select clusters')
self.action(self.merge, title='Merge')
self.action(self.split, title='Split')
self.action(self.move, title='Move clusters to a group')
self.action(self.undo, title='Undo')
self.action(self.redo, title='Redo')
self.connect(self.on_open)
self.connect(self.on_cluster)
# Public actions
# -------------------------------------------------------------------------
def open(self, filename=None, model=None):
if model is None:
model = KwikModel(filename)
self.model = model
self.emit('open')
def select(self, clusters):
self.selector.selected_clusters = clusters
self.emit('select', self.selector)
def merge(self, clusters):
up = self.clustering.merge(clusters)
self.emit('cluster', up=up)
def split(self, spikes):
up = self.clustering.split(spikes)
self.emit('cluster', up=up)
def move(self, clusters, group):
up = self.cluster_metadata.set_group(clusters, group)
self.emit('cluster', up=up)
def undo(self):
up = self._global_history.undo()
self.emit('cluster', up=up, add_to_stack=False)
def redo(self):
up = self._global_history.redo()
self.emit('cluster', up=up, add_to_stack=False)
# Event callbacks
# -------------------------------------------------------------------------
def on_open(self):
"""Update the session after new data has been loaded."""
self._global_history = GlobalHistory(process_ups=_process_ups)
# TODO: call this after the channel groups has changed.
# Update the Selector and Clustering instances using the Model.
spike_clusters = self.model.spike_clusters
self.clustering = Clustering(spike_clusters)
self.cluster_metadata = self.model.cluster_metadata
# TODO: n_spikes_max in a user parameter
self.selector = Selector(spike_clusters, n_spikes_max=100)
# Kwik store.
path = _ensure_disk_store_exists(self.model.name,
root_path=self._store_path)
self.store = ClusterStore(model=self.model, path=path)
self.store.register_item(FeatureMasks)
# TODO: do not reinitialize the store every time the dataset
# is loaded! Check if the store exists and check consistency.
self.store.generate(self.clustering.spikes_per_cluster)
@self.connect
def on_cluster(up=None, add_to_stack=None):
self.store.update(up)
def on_cluster(self, up=None, add_to_stack=True):
if add_to_stack:
self._global_history.action(self.clustering)
# TODO: if metadata
# self._global_history.action(self.cluster_metadata)
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,886 | apeyrache/phy | refs/heads/master | /phy/utils/event.py | # -*- coding: utf-8 -*-
"""Simple event system."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import re
from collections import defaultdict
from functools import partial
from inspect import getargspec
#------------------------------------------------------------------------------
# Event system
#------------------------------------------------------------------------------
class EventEmitter(object):
"""Class that emits events and accepts registered callbacks."""
def __init__(self):
self._callbacks = defaultdict(list)
def _get_on_name(self, func):
"""Return 'eventname' when the function name is `on_<eventname>()`."""
r = re.match("^on_(.+)$", func.__name__)
if r:
event = r.group(1)
else:
raise ValueError("The function name should be "
"`on_<eventname>`().")
return event
def _create_emitter(self, event):
"""Create a method that emits an event of the same name."""
if not hasattr(self, event):
setattr(self, event,
lambda *args, **kwargs: self.emit(event, *args, **kwargs))
def connect(self, func=None, event=None):
"""Decorator for a function reacting to an event being raised."""
if func is None:
return self.connect
# Get the event name from the function.
if event is None:
event = self._get_on_name(func)
# We register the callback function.
self._callbacks[event].append(func)
# self.event() should emit the event.
self._create_emitter(event)
return func
def unconnect(self, *funcs):
"""Unconnect callback functions."""
for func in funcs:
for callbacks in self._callbacks.values():
if func in callbacks:
callbacks.remove(func)
def emit(self, event, *args, **kwargs):
"""Call all callback functions registered for that event."""
for callback in self._callbacks.get(event, []):
# Only keep the kwargs that are part of the callback's arg spec.
kwargs = {n: v for n, v in kwargs.items()
if n in getargspec(callback).args}
callback(*args, **kwargs)
#------------------------------------------------------------------------------
# Progress reporter
#------------------------------------------------------------------------------
class ProgressReporter(EventEmitter):
"""A class that reports total progress done with multiple jobs."""
def __init__(self):
super(ProgressReporter, self).__init__()
# A mapping {channel: [value, max_value]}.
self._channels = {}
def _value(self, channel):
return self._channels[channel][0]
def _max_value(self, channel):
return self._channels[channel][1]
def _set_value(self, channel, index, value):
if channel not in self._channels:
self._channels[channel] = [0, 0]
old_value = self._value(channel)
max_value = self._max_value(channel)
if ((index == 0 and value > max_value) or
(index == 1 and old_value > value)):
raise ValueError("The current value {0} ".format(value) +
"needs to be less "
"than the maximum value {0}.".format(max_value))
else:
self._channels[channel][index] = value
def increment(self, *channels):
"""Increment the values of one or multiple channels."""
self.set(**{channel: (self._value(channel) + 1)
for channel in channels})
def set(self, **values):
"""Set the current values of one or several channels."""
for channel, value in values.items():
self._set_value(channel, 0, value)
current, total = self.current(), self.total()
self.emit('report', current, total)
if current == total:
self.emit('complete')
def set_max(self, **max_values):
"""Set the maximum values of one or several channels."""
for channel, max_value in max_values.items():
self._set_value(channel, 1, max_value)
def is_complete(self):
return self.current() == self.total()
def current(self):
"""Return the total current value."""
return sum(v[0] for k, v in self._channels.items())
def total(self):
"""Return the total of the maximum values."""
return sum(v[1] for k, v in self._channels.items())
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,887 | apeyrache/phy | refs/heads/master | /phy/cluster/manual/tests/test_wizard.py | # -*- coding: utf-8 -*-
"""Test wizard."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import numpy as np
import numpy.random as npr
from numpy.testing import assert_array_equal as ae
from pytest import raises
from ..wizard import Wizard
from ..cluster_info import ClusterMetadata
#------------------------------------------------------------------------------
# Test wizard
#------------------------------------------------------------------------------
def test_wizard():
wizard = Wizard()
wizard.cluster_ids = [2, 3, 5]
@wizard.quality
def quality(cluster):
return {2: .9,
3: .3,
5: .6,
}[cluster]
@wizard.similarity
def similarity(cluster, other):
cluster, other = min((cluster, other)), max((cluster, other))
return {(2, 3): 1,
(2, 5): 2,
(3, 5): 3}[cluster, other]
assert wizard.best_clusters() == [2, 5, 3]
assert wizard.best_cluster() == 2
assert wizard.most_similar_clusters(2) == [5, 3]
wizard.mark_dissimilar(2, 3)
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,888 | apeyrache/phy | refs/heads/master | /phy/io/mock/kwik.py | # -*- coding: utf-8 -*-
"""Mock Kwik files."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os
import os.path as op
from random import randint
import numpy as np
from numpy.testing import assert_array_equal as ae
import h5py
from pytest import raises
from ...io.mock.artificial import (artificial_spike_times,
artificial_spike_clusters,
artificial_features,
artificial_masks,
artificial_traces)
from ...electrode.mea import MEA, staggered_positions
from ...utils.tempdir import TemporaryDirectory
from ..h5 import open_h5
from ..kwik_model import (KwikModel, _list_channel_groups, _list_channels,
_list_recordings,
_list_clusterings, _kwik_filenames)
#------------------------------------------------------------------------------
# Mock Kwik file
#------------------------------------------------------------------------------
def create_mock_kwik(dir_path, n_clusters=None, n_spikes=None,
n_channels=None, n_features_per_channel=None,
n_samples_traces=None,
with_kwx=True, with_kwd=True):
"""Create a test kwik file."""
filename = op.join(dir_path, '_test.kwik')
filenames = _kwik_filenames(filename)
kwx_filename = filenames['kwx']
kwd_filename = filenames['raw.kwd']
# Create the kwik file.
with open_h5(filename, 'w') as f:
f.write_attr('/', 'kwik_version', 2)
def _write_metadata(key, value):
f.write_attr('/application_data/spikedetekt', key, value)
_write_metadata('sample_rate', 20000.)
# Filter parameters.
_write_metadata('filter_low', 500.)
_write_metadata('filter_high', 0.95 * .5 * 20000.)
_write_metadata('filter_butter_order', 3)
_write_metadata('extract_s_before', 15)
_write_metadata('extract_s_after', 25)
_write_metadata('nfeatures_per_channel', n_features_per_channel)
# Create spike times.
spike_times = artificial_spike_times(n_spikes).astype(np.int64)
if spike_times.max() >= n_samples_traces:
raise ValueError("There are too many spikes: decrease 'n_spikes'.")
f.write('/channel_groups/1/spikes/time_samples', spike_times)
# Create spike clusters.
spike_clusters = artificial_spike_clusters(n_spikes,
n_clusters).astype(np.int32)
f.write('/channel_groups/1/spikes/clusters/main', spike_clusters)
# Create channels.
positions = staggered_positions(n_channels)
for channel in range(n_channels):
group = '/channel_groups/1/channels/{0:d}'.format(channel)
f.write_attr(group, 'name', str(channel))
f.write_attr(group, 'position', positions[channel])
# Create cluster metadata.
for cluster in range(n_clusters):
group = '/channel_groups/1/clusters/main/{0:d}'.format(cluster)
color = ('/channel_groups/1/clusters/main/{0:d}'.format(cluster) +
'/application_data/klustaviewa')
f.write_attr(group, 'cluster_group', 3)
f.write_attr(color, 'color', randint(2, 10))
# Create recordings.
f.write_attr('/recordings/0', 'name', 'recording_0')
# Create the kwx file.
if with_kwx:
with open_h5(kwx_filename, 'w') as f:
f.write_attr('/', 'kwik_version', 2)
features = artificial_features(n_spikes,
n_channels * n_features_per_channel)
masks = artificial_masks(n_spikes,
n_channels * n_features_per_channel)
fm = np.dstack((features, masks)).astype(np.float32)
f.write('/channel_groups/1/features_masks', fm)
# Create the raw kwd file.
if with_kwd:
with open_h5(kwd_filename, 'w') as f:
f.write_attr('/', 'kwik_version', 2)
traces = artificial_traces(n_samples_traces, n_channels)
# TODO: int16 traces
f.write('/recordings/0/data', traces.astype(np.float32))
return filename
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,889 | apeyrache/phy | refs/heads/master | /phy/utils/_misc.py | # -*- coding: utf-8 -*-
"""Utility functions."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os
import os.path as op
from inspect import getargspec
from ..ext.six import string_types
#------------------------------------------------------------------------------
# Various Python utility functions
#------------------------------------------------------------------------------
def _as_dict(x):
"""Convert a list of tuples to a dict."""
if isinstance(x, list):
return dict(x)
else:
return x
def _concatenate_dicts(*dicts):
"""Concatenate dictionaries."""
out = {}
for dic in dicts:
out.update(dic)
return out
def _is_list(obj):
return isinstance(obj, list)
def _as_list(obj):
"""Ensure an object is a list."""
if isinstance(obj, string_types):
return [obj]
elif not hasattr(obj, '__len__'):
return [obj]
else:
return obj
def _fun_arg_count(f):
"""Return the number of arguments of a function.
WARNING: with methods, only works if the first argument is named 'self'.
"""
args = getargspec(f).args
if args and args[0] == 'self':
args = args[1:]
return len(args)
#------------------------------------------------------------------------------
# Config
#------------------------------------------------------------------------------
_PHY_USER_DIR_NAME = '.phy'
def _phy_user_dir(sub_dir=None):
"""Return the absolute path to the phy user directory."""
home = op.expanduser("~")
path = op.realpath(op.join(home, _PHY_USER_DIR_NAME))
if sub_dir is not None:
path = op.join(path, sub_dir)
return path
def _ensure_phy_user_dir_exists():
"""Create the phy user directory if it does not exist."""
path = _phy_user_dir()
if not op.exists(path):
os.mkdir(path)
return path
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,890 | apeyrache/phy | refs/heads/master | /phy/plot/waveforms.py | # -*- coding: utf-8 -*-
"""Plotting waveforms."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import numpy as np
from vispy import gloo
from vispy.gloo import Texture2D
from vispy.visuals import Visual
from vispy.visuals.shaders import ModularProgram, Function, Variable
from vispy.visuals.glsl.color import HSV_TO_RGB, RGB_TO_HSV
from ._vispy_utils import PanZoomCanvas
from ..utils.array import _unique, _as_array, _index_of, _normalize
from ..utils.logging import debug
from ..utils._color import _random_color
#------------------------------------------------------------------------------
# Waveforms visual
#------------------------------------------------------------------------------
class Waveforms(Visual):
# TODO: use ST instead of PanZoom
# TODO: move GLSL code to .glsl files.
VERT_SHADER = """
// TODO: add depth
attribute vec2 a_data; // -1..1
attribute float a_time; // -1..1
attribute vec2 a_box; // 0..(n_clusters-1, n_channels-1)
uniform float n_clusters;
uniform float n_channels;
uniform vec2 u_data_scale;
uniform sampler2D u_channel_pos;
uniform sampler2D u_cluster_color;
varying vec4 v_color;
varying vec2 v_box;
// TODO: use VisPy transforms
vec2 get_box_pos(vec2 box) { // box = (cluster, channel)
vec2 box_pos = texture2D(u_channel_pos,
vec2(box.y / (n_channels - 1.), .5)).xy;
box_pos = 2. * box_pos - 1.;
// Spacing between cluster boxes.
float h = 2.5 * u_data_scale.x;
// TODO: add superposition
box_pos.x += h * (box.x - .5 * (n_clusters - 1.));
return box_pos;
}
vec3 get_color(float cluster) {
return texture2D(u_cluster_color,
vec2(cluster / (n_clusters - 1.), .5)).xyz;
}
void main() {
vec2 pos = u_data_scale * vec2(a_time, a_data.x); // -1..1
vec2 box_pos = get_box_pos(a_box);
v_box = a_box;
gl_Position = vec4($transform(pos + box_pos), 0., 1.);
// Compute the waveform color as a function of the cluster color
// and the mask.
float mask = a_data.y;
// TODO: store the colors in HSV in the texture?
vec3 rgb = get_color(a_box.x);
vec3 hsv = $rgb_to_hsv(rgb);
// Change the saturation and value as a function of the mask.
hsv.y = mask;
hsv.z = .5 * (1. + mask);
v_color.rgb = $hsv_to_rgb(hsv);
v_color.a = .5;
}
"""
FRAG_SHADER = """
varying vec4 v_color;
varying vec2 v_box;
void main() {
if ((fract(v_box.x) > 0.) || (fract(v_box.y) > 0.))
discard;
gl_FragColor = v_color;
}
"""
def __init__(self, **kwargs):
super(Waveforms, self).__init__(**kwargs)
self.n_spikes, self.n_channels, self.n_samples = None, None, None
self._spike_clusters = None
self._waveforms = None
self._spike_ids = None
self._to_bake = []
self.program = ModularProgram(self.VERT_SHADER, self.FRAG_SHADER)
self.program.vert['rgb_to_hsv'] = Function(RGB_TO_HSV)
self.program.vert['hsv_to_rgb'] = Function(HSV_TO_RGB)
self.program['u_data_scale'] = (.05, .03)
gloo.set_state(clear_color='black', blend=True,
blend_func=('src_alpha', 'one_minus_src_alpha'))
# Data properties
# -------------------------------------------------------------------------
def _set_or_assert_n_spikes(self, arr):
"""If n_spikes is None, set it using the array's shape. Otherwise,
check that the array has n_spikes rows."""
if self.n_spikes is None:
self.n_spikes = arr.shape[0]
assert arr.shape[0] == self.n_spikes
def set_to_bake(self, *bakes):
for bake in bakes:
if bake not in self._to_bake:
self._to_bake.append(bake)
@property
def spike_clusters(self):
"""The clusters assigned to *all* spikes, not just the displayed
spikes."""
return self._spike_clusters
@spike_clusters.setter
def spike_clusters(self, value):
"""Set all spike clusters."""
value = _as_array(value)
self._spike_clusters = value
self.set_to_bake('spikes_clusters')
@property
def waveforms(self):
"""Displayed waveforms."""
return self._waveforms
@waveforms.setter
def waveforms(self, value):
# WARNING: when setting new data, waveforms need to be set first.
# n_spikes will be set as a function of waveforms.
value = _as_array(value)
# TODO: support sparse structures
assert value.ndim == 3
self.n_spikes, self.n_samples, self.n_channels = value.shape
self._waveforms = value
self.set_to_bake('spikes', 'spikes_clusters', 'color')
@property
def masks(self):
"""Masks of the displayed waveforms."""
return self._masks
@masks.setter
def masks(self, value):
value = _as_array(value)
self._set_or_assert_n_spikes(value)
# TODO: support sparse structures
assert value.ndim == 2
assert value.shape == (self.n_spikes, self.n_channels)
self._masks = value
self.set_to_bake('spikes')
@property
def spike_ids(self):
"""The list of spike ids to display, should correspond to the
waveforms."""
if self._spike_ids is None:
self._spike_ids = np.arange(self.n_spikes).astype(np.int64)
return self._spike_ids
@spike_ids.setter
def spike_ids(self, value):
value = _as_array(value)
self._set_or_assert_n_spikes(value)
self._spike_ids = value
self.set_to_bake('spikes')
@property
def channel_positions(self):
"""Array with the coordinates of all channels."""
return self._channel_positions
@channel_positions.setter
def channel_positions(self, value):
value = _as_array(value)
self._channel_positions = value
self.set_to_bake('channel_positions')
@property
def cluster_ids(self):
"""Clusters of the displayed spikes."""
return _unique(self.spike_clusters[self.spike_ids])
@property
def n_clusters(self):
return len(self.cluster_ids)
@property
def cluster_colors(self):
"""Colors of the displayed clusters."""
return self._cluster_colors
@cluster_colors.setter
def cluster_colors(self, value):
self._cluster_colors = _as_array(value)
assert len(self._cluster_colors) == self.n_clusters
self.set_to_bake('color')
@property
def box_scale(self):
return tuple(self.program['u_data_scale'])
@box_scale.setter
def box_scale(self, value):
assert isinstance(value, tuple) and len(value) == 2
self.program['u_data_scale'] = value
self.update()
# Data baking
# -------------------------------------------------------------------------
def _bake_color(self):
u_cluster_color = self.cluster_colors.reshape((1, self.n_clusters, -1))
u_cluster_color = (u_cluster_color * 255).astype(np.uint8)
# TODO: more efficient to update the data from an existing texture
self.program['u_cluster_color'] = Texture2D(u_cluster_color)
debug("bake color", u_cluster_color.shape)
def _bake_channel_positions(self):
# WARNING: channel_positions must be in [0,1] because we have a
# texture.
positions = self.channel_positions.astype(np.float32)
positions = _normalize(positions, keep_ratio=True)
positions = positions.reshape((1, self.n_channels, -1))
# Rescale a bit and recenter.
positions = .1 + .8 * positions
u_channel_pos = np.dstack((positions,
np.zeros((1, self.n_channels, 1))))
u_channel_pos = (u_channel_pos * 255).astype(np.uint8)
# TODO: more efficient to update the data from an existing texture
self.program['u_channel_pos'] = Texture2D(u_channel_pos,
wrapping='clamp_to_edge')
debug("bake channel pos", u_channel_pos.shape)
def _bake_spikes(self):
# Bake masks.
# WARNING: swap channel/time axes in the waveforms array.
waveforms = np.swapaxes(self._waveforms, 1, 2)
masks = np.repeat(self._masks.ravel(), self.n_samples)
data = np.c_[waveforms.ravel(), masks.ravel()].astype(np.float32)
# TODO: more efficient to update the data from an existing VBO
self.program['a_data'] = data
debug("bake spikes", data.shape)
# TODO: SparseCSR, this should just be 'channel'
self._channels_per_spike = np.tile(np.arange(self.n_channels).
astype(np.float32),
self.n_spikes)
# TODO: SparseCSR, this should be np.diff(spikes_ptr)
self._n_channels_per_spike = self.n_channels * np.ones(self.n_spikes,
dtype=np.int32)
self._n_waveforms = np.sum(self._n_channels_per_spike)
# TODO: precompute this with a maximum number of waveforms?
a_time = np.tile(np.linspace(-1., 1., self.n_samples),
self._n_waveforms).astype(np.float32)
self.program['a_time'] = a_time
self.program['n_clusters'] = self.n_clusters
self.program['n_channels'] = self.n_channels
def _bake_spikes_clusters(self):
# WARNING: needs to be called *after* _bake_spikes().
if not hasattr(self, '_n_channels_per_spike'):
raise RuntimeError("'_bake_spikes()' needs to be called before "
"'bake_spikes_clusters().")
# Get the spike cluster indices (between 0 and n_clusters-1).
spike_clusters_idx = self.spike_clusters[self.spike_ids]
spike_clusters_idx = _index_of(spike_clusters_idx, self.cluster_ids)
# Generate the box attribute.
a_cluster = np.repeat(spike_clusters_idx,
self._n_channels_per_spike * self.n_samples)
a_channel = np.repeat(self._channels_per_spike, self.n_samples)
a_box = np.c_[a_cluster, a_channel].astype(np.float32)
# TODO: more efficient to update the data from an existing VBO
self.program['a_box'] = a_box
debug("bake spikes clusters", a_box.shape)
def _bake(self):
"""Prepare and upload the data on the GPU.
Return whether something has been baked or not.
"""
if self.n_spikes is None or self.n_spikes == 0:
return
n_bake = len(self._to_bake)
# Bake what needs to be baked.
# WARNING: the bake functions are called in alphabetical order.
# Tweak the names if there are dependencies between the functions.
for bake in sorted(self._to_bake):
# Name of the private baking method.
name = '_bake_{0:s}'.format(bake)
if hasattr(self, name):
getattr(self, name)()
self._to_bake = []
return n_bake > 0
def draw(self, event):
"""Draw the waveforms."""
# Bake what needs to be baked at this point.
self._bake()
if self.n_spikes is not None and self.n_spikes > 0:
self.program.draw('line_strip')
class WaveformView(PanZoomCanvas):
def __init__(self, **kwargs):
super(WaveformView, self).__init__(**kwargs)
self.visual = Waveforms()
def on_key_press(self, event):
# TODO: more interactivity
# TODO: keyboard shortcut manager
super(WaveformView, self).on_key_press(event)
u, v = self.visual.box_scale
coeff = 1.1
if event.key == '+':
if 'Control' in event.modifiers:
self.visual.box_scale = (u*coeff, v)
else:
self.visual.box_scale = (u, v*coeff)
if event.key == '-':
if 'Control' in event.modifiers:
self.visual.box_scale = (u/coeff, v)
else:
self.visual.box_scale = (u, v/coeff)
def add_waveform_view(session, backend=None):
"""Add a waveform view in a session.
This function binds the session events to the created waveform view.
The caller needs to show the waveform view explicitly.
"""
if backend in ('pyqt4', None):
kwargs = {'always_on_top': True}
else:
kwargs = {}
view = WaveformView(**kwargs)
@session.connect
def on_open():
if session.model is None:
return
view.visual.spike_clusters = session.clustering.spike_clusters
view.visual.channel_positions = session.model.probe.positions
view.update()
@session.connect
def on_cluster(up=None):
pass
# TODO: select the merged cluster
# session.select(merged)
@session.connect
def on_select(selector):
spikes = selector.selected_spikes
if len(spikes) == 0:
return
if view.visual.spike_clusters is None:
on_open()
view.visual.waveforms = session.model.waveforms[spikes]
view.visual.masks = session.model.masks[spikes]
view.visual.spike_ids = spikes
# TODO: how to choose cluster colors?
view.visual.cluster_colors = [_random_color()
for _ in selector.selected_clusters]
view.update()
# Unregister the callbacks when the view is closed.
@view.connect
def on_close(event):
session.unconnect(on_open, on_cluster, on_select)
# TODO: first_draw() event in VisPy view that is emitted when the view
# is first rendered (first paint event).
@view.connect
def on_draw(event):
if view.visual.spike_clusters is None:
on_open()
on_select(session.selector)
return view
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,891 | apeyrache/phy | refs/heads/master | /phy/__init__.py | # -*- coding: utf-8 -*-
from .utils import default_logger
__author__ = 'Kwik Team'
__email__ = 'cyrille.rossant at gmail.com'
__version__ = '0.1.0-alpha'
# Set up the default logger.
default_logger()
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,892 | apeyrache/phy | refs/heads/master | /phy/cluster/manual/store.py | # -*- coding: utf-8 -*-
"""Cluster store."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os
import os.path as op
from ...utils.logging import debug
from ...utils._misc import (_concatenate_dicts,
_phy_user_dir,
_ensure_phy_user_dir_exists)
from ...io.h5 import open_h5
from ...io.sparse import load_h5, save_h5
from ...ext.six import string_types
#------------------------------------------------------------------------------
# Data stores
#------------------------------------------------------------------------------
class MemoryStore(object):
"""Store cluster-related data in memory."""
def __init__(self):
self._ds = {}
def store(self, cluster, **data):
"""Store cluster-related data."""
if cluster not in self._ds:
self._ds[cluster] = {}
self._ds[cluster].update(data)
def load(self, cluster, keys=None):
"""Load cluster-related data."""
if keys is None:
return self._ds.get(cluster, {})
else:
if isinstance(keys, string_types):
return self._ds.get(cluster, {}).get(keys, None)
assert isinstance(keys, (list, tuple))
return {key: self._ds.get(cluster, {}).get(key, None)
for key in keys}
@property
def clusters(self):
"""List of cluster ids in the store."""
return sorted(self._ds.keys())
def delete(self, clusters):
"""Delete some clusters from the store."""
assert isinstance(clusters, list)
for cluster in clusters:
if cluster in self._ds:
del self._ds[cluster]
def clear(self):
"""Clear the store completely by deleting all clusters."""
self.delete(self.clusters)
class DiskStore(object):
"""Store cluster-related data in HDF5 files."""
def __init__(self, directory):
assert directory is not None
self._directory = op.realpath(directory)
# Internal methods
# -------------------------------------------------------------------------
def _cluster_path(self, cluster):
"""Return the absolute path of a cluster in the disk store."""
# TODO: subfolders
rel_path = '{0:05d}.h5'.format(cluster)
return op.realpath(op.join(self._directory, rel_path))
def _cluster_file_exists(self, cluster):
"""Return whether a cluster file exists."""
return op.exists(self._cluster_path(cluster))
def _cluster_file(self, cluster, mode):
"""Return a file handle of a cluster file."""
path = self._cluster_path(cluster)
return open_h5(path, mode)
# Data get/set methods
# -------------------------------------------------------------------------
def _get(self, f, key):
"""Return the data for a given key."""
path = '/{0:s}'.format(key)
return load_h5(f, path)
def _set(self, f, key, value):
"""Set the data for a given key."""
path = '/{0:s}'.format(key)
save_h5(f, path, value, overwrite=True)
# Public methods
# -------------------------------------------------------------------------
def store(self, cluster, **data):
"""Store cluster-related data."""
with self._cluster_file(cluster, 'a') as f:
for key, value in data.items():
self._set(f, key, value)
def load(self, cluster, keys=None):
"""Load cluster-related data."""
# The cluster doesn't exist: return None for all keys.
if not self._cluster_file_exists(cluster):
if keys is None:
return {}
else:
return {key: None for key in keys}
# Create the output dictionary.
out = {}
# Open the cluster file in read mode.
with self._cluster_file(cluster, 'r') as f:
# If a single key is requested, return the value.
if isinstance(keys, string_types):
return self._get(f, keys)
# All keys are requested if None.
if keys is None:
keys = f.datasets()
assert isinstance(keys, (list, tuple))
# Fetch the values for all requested keys.
for key in keys:
out[key] = self._get(f, key)
return out
@property
def clusters(self):
"""List of cluster ids in the store."""
if not op.exists(self._directory):
return []
files = os.listdir(self._directory)
clusters = [int(op.splitext(file)[0]) for file in files]
return sorted(clusters)
def delete(self, clusters):
"""Delete some clusters from the store."""
for cluster in clusters:
if self._cluster_file_exists(cluster):
os.remove(self._cluster_path(cluster))
def clear(self):
"""Clear the store completely by deleting all clusters."""
self.delete(self.clusters)
#------------------------------------------------------------------------------
# Store
#------------------------------------------------------------------------------
class Store(object):
"""Wrap a MemoryStore and a DiskStore."""
def __init__(self, store_path):
assert store_path is not None
# Create the memory store.
self._memory_store = MemoryStore()
# Create the disk store.
self._disk_store = DiskStore(store_path)
# Where the info are stored: a {'field' => ('memory' or 'disk')} dict.
self._dispatch = {}
def register_field(self, name, location):
"""Register a field to be stored either in 'memory' or on 'disk'."""
self._check_location(location)
self._dispatch[name] = location
def _check_location(self, location):
"""Check that a location is valid."""
if location not in ('memory', 'disk'):
raise ValueError("'location 'should be 'memory' or 'disk'.")
def _filter(self, keys, location):
"""Return all keys registered in the specified location."""
if keys is None:
return None
else:
return [key for key in keys
if self._dispatch.get(key, None) == location]
# Public methods
# -------------------------------------------------------------------------
@property
def clusters(self):
"""Return the list of clusters present in the store."""
clusters_memory = self._memory_store.clusters
clusters_disk = self._disk_store.clusters
# Both stores should have the same clusters at all times.
if clusters_memory != clusters_disk:
raise RuntimeError("Cluster store inconsistency.")
return clusters_memory
def store(self, cluster, location=None, **data):
"""Store cluster-related information."""
# If the location is specified, register the fields there.
if location in ('memory', 'disk'):
for key in data.keys():
self.register_field(key, location)
elif location is not None:
self._check_location(location)
# Store data in memory.
data_memory = {k: data[k] for k in self._filter(data.keys(), 'memory')}
self._memory_store.store(cluster, **data_memory)
# Store data on disk.
data_disk = {k: data[k] for k in self._filter(data.keys(), 'disk')}
self._disk_store.store(cluster, **data_disk)
def load(self, cluster, keys=None):
"""Load cluster-related information."""
if isinstance(keys, string_types):
if self._dispatch[keys] == 'memory':
return self._memory_store.load(cluster, keys)
elif self._dispatch[keys] == 'disk':
return self._disk_store.load(cluster, keys)
elif keys is None or isinstance(keys, list):
data_memory = self._memory_store.load(cluster,
self._filter(keys, 'memory'))
data_disk = self._disk_store.load(cluster,
self._filter(keys, 'disk'))
return _concatenate_dicts(data_memory, data_disk)
else:
raise ValueError("'keys' should be a list or a string.")
def clear(self):
"""Clear the cluster store."""
self._memory_store.clear()
self._disk_store.clear()
def delete(self, clusters):
"""Delete all information about the specified clusters."""
self._memory_store.delete(clusters)
self._disk_store.delete(clusters)
#------------------------------------------------------------------------------
# Cluster store
#------------------------------------------------------------------------------
class ClusterStore(object):
def __init__(self, model=None, path=None):
assert model is not None
assert path is not None
self._model = model
self._store = Store(path)
self._items = []
def register_item(self, item_cls):
"""Register a StoreItem instance in the store."""
item = item_cls(model=self._model, store=self._store)
assert item.fields is not None
# Register the storage location for that item.
for name, location in item.fields:
self._store.register_field(name, location)
# Register the StoreItem instance.
self._items.append(item)
# Create the self.<name>(cluster) method for loading.
for name, _ in item.fields:
setattr(self, name,
lambda cluster: self._store.load(cluster, name))
def update(self, up):
# Delete the deleted clusters from the store.
self._store.delete(up.deleted)
if up.description == 'merge':
self.merge(up)
elif up.description == 'assign':
self.assign(up)
else:
raise NotImplementedError()
def merge(self, up):
for item in self._items:
item.merge(up)
def assign(self, up):
for item in self._items:
item.assign(up)
def generate(self, spikes_per_cluster):
"""Populate the cache for all registered fields and the specified
clusters."""
assert isinstance(spikes_per_cluster, dict)
clusters = sorted(spikes_per_cluster.keys())
self._store.delete(clusters)
for item in self._items:
for cluster in clusters:
item.store_from_model(cluster, spikes_per_cluster[cluster])
class StoreItem(object):
fields = None # list of (field_name, storage_location)
def __init__(self, model=None, store=None):
self.model = model
self.store = store
def merge(self, up):
"""May be overridden."""
self.assign(up)
def assign(self, up):
"""May be overridden."""
for cluster in up.added:
self.store_from_model(cluster, up.new_spikes_per_cluster[cluster])
def store_from_model(self, cluster, spikes):
"""Must be overridden."""
raise NotImplementedError()
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,893 | apeyrache/phy | refs/heads/master | /phy/cluster/manual/tests/test_store.py | # -*- coding: utf-8 -*-
"""Test cluster store."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os.path as op
import numpy as np
from numpy.testing import assert_array_equal as ae
from ....utils.logging import set_level
from ....utils.tempdir import TemporaryDirectory
from ..store import MemoryStore, DiskStore, Store, ClusterStore, StoreItem
from .._utils import _spikes_per_cluster
from .._update_info import UpdateInfo
#------------------------------------------------------------------------------
# Test data stores
#------------------------------------------------------------------------------
def test_memory_store():
ms = MemoryStore()
assert ms.load(2) == {}
assert ms.load(3).get('key', None) is None
assert ms.load(3) == {}
assert ms.load(3, ['key']) == {'key': None}
assert ms.load(3) == {}
assert ms.clusters == []
ms.store(3, key='a')
assert ms.load(3) == {'key': 'a'}
assert ms.load(3, ['key']) == {'key': 'a'}
assert ms.load(3, 'key') == 'a'
assert ms.clusters == [3]
ms.store(3, key_bis='b')
assert ms.load(3) == {'key': 'a', 'key_bis': 'b'}
assert ms.load(3, ['key']) == {'key': 'a'}
assert ms.load(3, ['key_bis']) == {'key_bis': 'b'}
assert ms.load(3, ['key', 'key_bis']) == {'key': 'a', 'key_bis': 'b'}
assert ms.load(3, 'key_bis') == 'b'
assert ms.clusters == [3]
ms.delete([2, 3])
assert ms.load(3) == {}
assert ms.load(3, ['key']) == {'key': None}
assert ms.clusters == []
def test_disk_store():
a = np.random.rand(2, 4)
b = np.random.rand(3, 5)
def _assert_equal(d_0, d_1):
"""Test the equality of two dictionaries containing NumPy arrays."""
assert sorted(d_0.keys()) == sorted(d_1.keys())
for key in d_0.keys():
ae(d_0[key], d_1[key])
with TemporaryDirectory() as tempdir:
ds = DiskStore(tempdir)
assert ds.load(2) == {}
assert ds.load(3).get('key', None) is None
assert ds.load(3) == {}
assert ds.load(3, ['key']) == {'key': None}
assert ds.load(3) == {}
assert ds.clusters == []
ds.store(3, key=a)
_assert_equal(ds.load(3), {'key': a})
_assert_equal(ds.load(3, ['key']), {'key': a})
ae(ds.load(3, 'key'), a)
assert ds.clusters == [3]
ds.store(3, key_bis=b)
_assert_equal(ds.load(3), {'key': a, 'key_bis': b})
_assert_equal(ds.load(3, ['key']), {'key': a})
_assert_equal(ds.load(3, ['key_bis']), {'key_bis': b})
_assert_equal(ds.load(3, ['key', 'key_bis']), {'key': a, 'key_bis': b})
ae(ds.load(3, 'key_bis'), b)
assert ds.clusters == [3]
ds.delete([2, 3])
assert ds.load(3) == {}
assert ds.load(3, ['key']) == {'key': None}
assert ds.clusters == []
def test_store():
with TemporaryDirectory() as tempdir:
cs = Store(tempdir)
model = {'spike_clusters': np.random.randint(size=100, low=0, high=10)}
def reset(model):
cs.clear()
# Find unique clusters.
clusters = np.unique(model['spike_clusters'])
# Load data for all clusters.
generate(clusters)
ae(cs.clusters, clusters)
def generate(clusters):
for cluster in clusters:
cs.store(cluster,
data_memory=np.array([1, 2]),
location='memory')
cs.store(cluster,
data_disk=np.array([3, 4]),
location='disk')
reset(model)
ae(cs.load(3, 'data_memory'), [1, 2])
ae(cs.load(5, 'data_disk'), [3, 4])
def test_cluster_store():
with TemporaryDirectory() as tempdir:
# We define some data and a model.
n_spikes = 100
n_clusters = 10
spike_ids = np.arange(n_spikes)
spike_clusters = np.random.randint(size=n_spikes,
low=0, high=n_clusters)
spikes_per_cluster = _spikes_per_cluster(spike_ids, spike_clusters)
model = {'spike_clusters': spike_clusters}
# We initialize the ClusterStore.
cs = ClusterStore(model=model, path=tempdir)
# We create a n_spikes item to be stored in memory,
# and we define how to generate it for a given cluster.
class MyItem(StoreItem):
fields = [('n_spikes', 'memory')]
def store_from_model(self, cluster, spikes):
self.store.store(cluster, n_spikes=len(spikes))
def merge(self, up):
n = sum(len(up.old_spikes_per_cluster[cl])
for cl in up.deleted)
self.store.store(up.added[0], n_spikes=n)
cs.register_item(MyItem)
# Now we generate the store.
cs.generate(spikes_per_cluster)
# We check that the n_spikes field has successfully been created.
for cluster in sorted(spikes_per_cluster):
assert cs.n_spikes(cluster) == len(spikes_per_cluster[cluster])
# Merge.
spc = spikes_per_cluster
spikes = np.sort(np.concatenate([spc[0], spc[1]]))
spc[20] = spikes
up = UpdateInfo(added=[20], deleted=[0, 1],
spikes=spikes,
new_spikes_per_cluster=spc,
old_spikes_per_cluster=spc,)
cs.merge(up)
assert cs.n_spikes(20) == len(spikes)
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,894 | apeyrache/phy | refs/heads/master | /phy/io/tests/test_kwik_model.py | # -*- coding: utf-8 -*-
"""Tests of Kwik file opening routines."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os
import os.path as op
from random import randint
import numpy as np
from numpy.testing import assert_array_equal as ae
import h5py
from pytest import raises
from ...io.mock.artificial import (artificial_spike_times,
artificial_spike_clusters,
artificial_features,
artificial_masks,
artificial_traces)
from ...electrode.mea import MEA, staggered_positions
from ...utils.tempdir import TemporaryDirectory
from ..h5 import open_h5
from ..kwik_model import (KwikModel, _list_channel_groups, _list_channels,
_list_recordings,
_list_clusterings, _kwik_filenames)
from ..mock.kwik import create_mock_kwik
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
_N_CLUSTERS = 10
_N_SPIKES = 50
_N_CHANNELS = 28
_N_FETS = 2
_N_SAMPLES_TRACES = 3000
def test_kwik_utility():
channels = list(range(_N_CHANNELS))
with TemporaryDirectory() as tempdir:
# Create the test HDF5 file in the temporary directory.
filename = create_mock_kwik(tempdir,
n_clusters=_N_CLUSTERS,
n_spikes=_N_SPIKES,
n_channels=_N_CHANNELS,
n_features_per_channel=_N_FETS,
n_samples_traces=_N_SAMPLES_TRACES)
model = KwikModel(filename)
assert _list_channel_groups(model._kwik.h5py_file) == [1]
assert _list_recordings(model._kwik.h5py_file) == [0]
assert _list_clusterings(model._kwik.h5py_file, 1) == ['main']
assert _list_channels(model._kwik.h5py_file, 1) == channels
def test_kwik_open():
with TemporaryDirectory() as tempdir:
# Create the test HDF5 file in the temporary directory.
filename = create_mock_kwik(tempdir,
n_clusters=_N_CLUSTERS,
n_spikes=_N_SPIKES,
n_channels=_N_CHANNELS,
n_features_per_channel=_N_FETS,
n_samples_traces=_N_SAMPLES_TRACES)
with raises(ValueError):
KwikModel()
# Test implicit open() method.
kwik = KwikModel(filename)
kwik.metadata
assert kwik.channels == list(range(_N_CHANNELS))
assert kwik.n_channels == _N_CHANNELS
assert kwik.n_spikes == _N_SPIKES
assert kwik.spike_times[:].shape == (_N_SPIKES,)
assert kwik.spike_clusters[:].shape == (_N_SPIKES,)
assert kwik.spike_clusters[:].min() == 0
assert kwik.spike_clusters[:].max() == _N_CLUSTERS - 1
assert kwik.features.shape == (_N_SPIKES,
_N_CHANNELS * _N_FETS)
kwik.features[0, ...]
assert kwik.masks.shape == (_N_SPIKES, _N_CHANNELS)
assert kwik.traces.shape == (_N_SAMPLES_TRACES, _N_CHANNELS)
# TODO: fix this
# print(kwik.waveforms[0].shape)
assert kwik.waveforms[10].shape == (1, 40, _N_CHANNELS)
assert kwik.waveforms[[10, 20]].shape == (2, 40, _N_CHANNELS)
with raises(ValueError):
kwik.clustering = 'foo'
with raises(ValueError):
kwik.recording = 47
with raises(ValueError):
kwik.channel_group = 42
# TODO: test cluster_metadata.
kwik.cluster_metadata
# Test probe.
assert isinstance(kwik.probe, MEA)
assert kwik.probe.positions.shape == (_N_CHANNELS, 2)
ae(kwik.probe.positions, staggered_positions(_N_CHANNELS))
# Not implemented yet.
with raises(NotImplementedError):
kwik.save()
kwik.close()
def test_kwik_open_no_kwx():
with TemporaryDirectory() as tempdir:
# Create the test HDF5 file in the temporary directory.
filename = create_mock_kwik(tempdir,
n_clusters=_N_CLUSTERS,
n_spikes=_N_SPIKES,
n_channels=_N_CHANNELS,
n_features_per_channel=_N_FETS,
n_samples_traces=_N_SAMPLES_TRACES,
with_kwx=False)
# Test implicit open() method.
kwik = KwikModel(filename)
kwik.close()
def test_kwik_open_no_kwd():
with TemporaryDirectory() as tempdir:
# Create the test HDF5 file in the temporary directory.
filename = create_mock_kwik(tempdir,
n_clusters=_N_CLUSTERS,
n_spikes=_N_SPIKES,
n_channels=_N_CHANNELS,
n_features_per_channel=_N_FETS,
n_samples_traces=_N_SAMPLES_TRACES,
with_kwd=False)
# Test implicit open() method.
kwik = KwikModel(filename)
kwik.close()
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,895 | apeyrache/phy | refs/heads/master | /phy/cluster/manual/_update_info.py | # -*- coding: utf-8 -*-
"""UpdateInfo class."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import numpy as np
from ...utils._bunch import Bunch
#------------------------------------------------------------------------------
# UpdateInfo class
#------------------------------------------------------------------------------
def update_info(**kwargs):
"""Hold information about clustering changes."""
d = dict(
description=None, # information about the update: 'merge', 'assign',
# or 'metadata_<name>'
spikes=[], # all spikes affected by the update
added=[], # new clusters
deleted=[], # deleted clusters
descendants=[], # pairs of (old_cluster, new_cluster)
metadata_changed=[] # clusters with changed metadata
)
d.update(kwargs)
return Bunch(d)
UpdateInfo = update_info
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,896 | apeyrache/phy | refs/heads/master | /phy/io/mock/artificial.py | # -*- coding: utf-8 -*-
"""Mock datasets."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import numpy as np
import numpy.random as nr
from ...ext import six
from ...utils._color import _random_color
from ..base_model import BaseModel
from ...cluster.manual.cluster_info import ClusterMetadata
from ...electrode.mea import MEA, staggered_positions
#------------------------------------------------------------------------------
# Artificial data
#------------------------------------------------------------------------------
def artificial_waveforms(n_spikes=None, n_samples=None, n_channels=None):
# TODO: more realistic waveforms.
return .25 * nr.normal(size=(n_spikes, n_samples, n_channels))
def artificial_features(n_spikes=None, n_features=None):
return .25 * nr.normal(size=(n_spikes, n_features))
def artificial_masks(n_spikes=None, n_channels=None):
return nr.uniform(size=(n_spikes, n_channels))
def artificial_traces(n_samples, n_channels):
# TODO: more realistic traces.
return .25 * nr.normal(size=(n_samples, n_channels))
def artificial_spike_clusters(n_spikes, n_clusters, low=0):
return nr.randint(size=n_spikes, low=low, high=max(1, n_clusters))
def artificial_spike_times(n_spikes, max_isi=50):
# TODO: switch from sample to seconds in the way spike times are
# represented throughout the package.
return np.cumsum(nr.randint(low=0, high=max_isi, size=n_spikes))
#------------------------------------------------------------------------------
# Artificial Model
#------------------------------------------------------------------------------
class MockModel(BaseModel):
n_channels = 28
n_features = 28 * 4
n_spikes = 1000
n_samples_traces = 20000
n_samples_waveforms = 40
n_clusters = 10
def __init__(self):
super(BaseModel, self).__init__()
self.name = 'mock'
self._metadata = {'description': 'A mock model.'}
self._cluster_metadata = ClusterMetadata()
@self._cluster_metadata.default
def color(cluster):
return _random_color()
positions = staggered_positions(self.n_channels)
self._probe = MEA(positions=positions)
self._traces = artificial_traces(self.n_samples_traces,
self.n_channels)
self._spike_clusters = artificial_spike_clusters(self.n_spikes,
self.n_clusters)
self._spike_times = artificial_spike_times(self.n_spikes)
self._features = artificial_features(self.n_spikes, self.n_features)
self._masks = artificial_masks(self.n_spikes, self.n_channels)
self._waveforms = artificial_waveforms(self.n_spikes,
self.n_samples_waveforms,
self.n_channels)
@property
def metadata(self):
return self._metadata
@property
def traces(self):
return self._traces
@property
def spike_times(self):
return self._spike_times
@property
def spike_clusters(self):
return self._spike_clusters
@property
def cluster_metadata(self):
return self._cluster_metadata
@property
def features(self):
return self._features
@property
def masks(self):
return self._masks
@property
def waveforms(self):
return self._waveforms
@property
def probe(self):
return self._probe
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,897 | apeyrache/phy | refs/heads/master | /phy/plot/tests/test_ccg.py | # -*- coding: utf-8 -*-
"""Test CCG plotting."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import numpy as np
import matplotlib.pyplot as plt
from ..ccg import plot_ccg
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_plot_ccg():
n_bins = 51
ccg = np.random.randint(size=n_bins, low=10, high=50)
plot_ccg(ccg, baseline=20, color='g')
# plt.show()
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,898 | apeyrache/phy | refs/heads/master | /phy/utils/_bunch.py | # -*- coding: utf-8 -*-
"""Bunch class."""
#------------------------------------------------------------------------------
# Bunch class
#------------------------------------------------------------------------------
class Bunch(dict):
"""A dict with additional dot syntax."""
def __init__(self, *args, **kwargs):
super(Bunch, self).__init__(*args, **kwargs)
self.__dict__ = self
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,899 | apeyrache/phy | refs/heads/master | /phy/utils/testing.py | # -*- coding: utf-8 -*-
"""Utility functions used for tests."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import sys
import time
from contextlib import contextmanager
from ..ext.six import StringIO
#------------------------------------------------------------------------------
# Utility functions
#------------------------------------------------------------------------------
@contextmanager
def captured_output():
new_out, new_err = StringIO(), StringIO()
old_out, old_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = new_out, new_err
yield sys.stdout, sys.stderr
finally:
sys.stdout, sys.stderr = old_out, old_err
def show_test(canvas, n_frames=2):
"""Show a VisPy canvas for a fraction of second."""
with canvas as c:
for _ in range(n_frames):
c.update()
c.app.process_events()
time.sleep(1./60.)
def show_colored_canvas(color, n_frames=5):
"""Show an emty VisPy canvas with a given background color for a fraction
of second."""
from vispy import app, gloo
c = app.Canvas()
@c.connect
def on_paint(e):
gloo.clear(color)
show_test(c, n_frames=n_frames)
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,900 | apeyrache/phy | refs/heads/master | /phy/cluster/manual/selector.py | # -*- coding: utf-8 -*-
"""Selector structure."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import numpy as np
from ...ext import six
from ...utils.array import _as_array
from ._utils import _unique, _spikes_in_clusters
from ...utils.logging import debug, info, warn
#------------------------------------------------------------------------------
# Selector class
#------------------------------------------------------------------------------
class Selector(object):
"""Object representing a selection of spikes or clusters."""
def __init__(self, spike_clusters, n_spikes_max=None):
self._spike_clusters = spike_clusters
self._n_spikes_max = n_spikes_max
self._selected_spikes = np.array([], dtype=np.int64)
@property
def n_spikes_max(self):
"""Maximum number of spikes allowed in the selection."""
return self._n_spikes_max
@n_spikes_max.setter
def n_spikes_max(self, value):
self._n_spikes_max = value
# Update the selected spikes accordingly.
self.selected_spikes = self._subset()
if self._n_spikes_max is not None:
assert len(self._selected_spikes) <= self._n_spikes_max
def _subset(self, spikes=None, n_spikes_max=None):
"""Prune the current selection to get at most n_spikes_max spikes."""
if n_spikes_max is None:
n_spikes_max = self._n_spikes_max
if spikes is None:
spikes = self._selected_spikes
# Nothing to do if the selection already satisfies n_spikes_max.
if n_spikes_max is None or len(spikes) <= n_spikes_max:
return spikes
# Fill 50% regularly sampled spikes for the selection.
step = int(np.clip(2. / n_spikes_max * len(spikes),
1, len(spikes)))
my_spikes = spikes[::step]
assert len(my_spikes) <= len(spikes)
assert len(my_spikes) <= n_spikes_max
# Number of remaining spikes to find in the selection.
n_start = (n_spikes_max - len(my_spikes)) // 2
n_end = n_spikes_max - len(my_spikes) - n_start
assert (n_start >= 0) & (n_end >= 0)
# The other 50% come from the start and end of the selection.
my_spikes = np.r_[spikes[:n_start],
my_spikes,
spikes[-n_end:]]
my_spikes = _unique(my_spikes)
assert len(my_spikes) <= n_spikes_max
return my_spikes
@property
def selected_spikes(self):
"""Labels of the selected spikes."""
return self._selected_spikes
@selected_spikes.setter
def selected_spikes(self, value):
"""Explicitely select a number of spikes."""
value = _as_array(value)
# Make sure there are less spikes than n_spikes_max.
self._selected_spikes = self._subset(value)
@property
def selected_clusters(self):
"""Clusters containing at least one selected spike."""
return _unique(self._spike_clusters[self._selected_spikes])
@selected_clusters.setter
def selected_clusters(self, value):
"""Select spikes belonging to a number of clusters."""
# TODO: smarter subselection: select n_spikes_max/n_clusters spikes
# per cluster, so that the number of spikes per cluster is independent
# from the sizes of the clusters.
value = _as_array(value)
# All spikes from the selected clusters.
spikes = _spikes_in_clusters(self._spike_clusters, value)
# Make sure there are less spikes than n_spikes_max.
self.selected_spikes = self._subset(spikes)
def update(self, up=None):
"""Called when clustering has changed."""
# TODO
pass
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,901 | apeyrache/phy | refs/heads/master | /phy/utils/tests/test_event.py | # -*- coding: utf-8 -*-
"""Test event system."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import raises
from ..event import EventEmitter, ProgressReporter
#------------------------------------------------------------------------------
# Test event system
#------------------------------------------------------------------------------
def test_event_system():
ev = EventEmitter()
_list = []
@ev.connect
def on_my_event(arg, kwarg=None):
_list.append((arg, kwarg))
with raises(TypeError):
ev.my_event()
ev.my_event('a')
assert _list == [('a', None)]
ev.my_event('b', 'c')
assert _list == [('a', None), ('b', 'c')]
ev.unconnect(on_my_event)
ev.my_event('b', 'c')
assert _list == [('a', None), ('b', 'c')]
#------------------------------------------------------------------------------
# Test progress reporter
#------------------------------------------------------------------------------
def test_progress_reporter():
"""Test the progress reporter."""
pr = ProgressReporter()
_reported = []
_completed = []
@pr.connect
def on_report(value, value_max):
# value is the sum of the values, value_max the sum of the max values
_reported.append((value, value_max))
@pr.connect
def on_complete():
_completed.append(True)
pr.set_max(channel_1=10, channel_2=15)
assert _reported == []
assert pr.current() == 0
assert pr.total() == 25
pr.set(channel_1=7)
assert _reported == [(7, 25)]
assert pr.current() == 7
assert pr.total() == 25
with raises(ValueError):
pr.set(channel_1=11)
with raises(ValueError):
pr.set_max(channel_1=6)
pr.set(channel_2=13)
assert _reported[-1] == (20, 25)
assert pr.current() == 20
assert pr.total() == 25
pr.increment('channel_1', 'channel_2')
assert _reported[-1] == (22, 25)
assert pr.current() == 22
assert pr.total() == 25
pr.set(channel_1=10, channel_2=15)
assert _reported[-1] == (25, 25)
assert _completed == [True]
assert pr.is_complete()
pr.set_max(channel_2=20)
assert not pr.is_complete()
pr.set(channel_1=10, channel_2=20)
assert pr.is_complete()
| {"/phy/cluster/manual/tests/test_session.py": ["/phy/cluster/manual/session.py", "/phy/io/mock/artificial.py", "/phy/io/mock/kwik.py", "/phy/plot/waveforms.py"], "/phy/io/mock/tests/test_kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/session.py": ["/phy/utils/_misc.py", "/phy/utils/event.py", "/phy/io/kwik_model.py", "/phy/cluster/manual/selector.py", "/phy/cluster/manual/store.py"], "/phy/cluster/manual/tests/test_wizard.py": ["/phy/cluster/manual/wizard.py"], "/phy/io/mock/kwik.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py"], "/phy/cluster/manual/store.py": ["/phy/utils/_misc.py"], "/phy/cluster/manual/tests/test_store.py": ["/phy/cluster/manual/store.py", "/phy/cluster/manual/_update_info.py"], "/phy/io/tests/test_kwik_model.py": ["/phy/io/mock/artificial.py", "/phy/io/kwik_model.py", "/phy/io/mock/kwik.py"], "/phy/cluster/manual/_update_info.py": ["/phy/utils/_bunch.py"], "/phy/plot/tests/test_ccg.py": ["/phy/plot/ccg.py"], "/phy/utils/tests/test_event.py": ["/phy/utils/event.py"]} |
51,910 | YaoXinZhi/Bi-LSTM-Attention | refs/heads/master | /Visualize_results.py | #! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Created on 09/01/2020 21:14
@Author: XinZhi Yao
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
def read_file(loss_acc_file):
loss_list = []
acc_list = []
with open(loss_acc_file) as f:
for line in f:
l = line.strip().split('\t')
loss_list.append(float(l[0]))
acc_list.append(float(l[1])/100)
return loss_list, acc_list
def draw_acc_curve(train_acc_list: list, valid_acc_list: list):
if len(train_acc_list) != len(valid_acc_list):
raise ValueError
epoch = [i for i in range(len(train_acc_list))]
plt.title('ACC curve')
plt.plot(epoch, train_acc_list, color='green', label='train_acc')
plt.plot(epoch, valid_acc_list, color='red', label='valid_acc')
plt.legend()
plt.xlabel('epoch')
plt.ylabel('acc')
plt.show()
def draw_loss_curve(train_loss_list: list, valid_loss_list: list):
if len(train_loss_list) != len(valid_loss_list):
raise ValueError
epoch = [i for i in range(len(train_loss_list))]
plt.title('loss curve')
plt.plot(epoch, train_loss_list, color='green', label='train_loss')
plt.plot(epoch, valid_loss_list, color='red', label='valid_loss')
plt.legend()
plt.xlabel('epoch')
plt.ylabel('acc')
plt.show()
if __name__ == '__main__':
ques_train_loss_acc_file = 'model/ques_train_loss_acc.txt'
quse_valid_loss_acc_file = 'model/ques_valid_loss_acc.txt'
ag_train_loss_acc_file = 'model/ag_train_loss_acc.txt'
ag_valid_loss_acc_file = 'model/ag_valid_loss_acc.txt'
ques_train_loss_list, ques_train_acc_list = read_file(ques_train_loss_acc_file)
ques_valid_loss_list, ques_valid_acc_list = read_file(quse_valid_loss_acc_file)
ag_train_loss_list, ag_train_acc_list = read_file(ag_train_loss_acc_file)
ag_valid_loss_list, ag_valid_acc_list = read_file(ag_valid_loss_acc_file)
# draw_acc_curve(ques_train_acc_list, ques_valid_acc_list)
# draw_acc_curve(ag_train_acc_list, ag_valid_acc_list)
# draw_loss_curve(ques_train_loss_list, ques_valid_loss_list)
draw_loss_curve(ag_train_loss_list, ag_valid_loss_list) | {"/main_attention_lstm.py": ["/utils.py", "/data_loader.py", "/Attention_BiLSTM_model.py"]} |
51,911 | YaoXinZhi/Bi-LSTM-Attention | refs/heads/master | /data/data_Statistics.py | #! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Created on 09/01/2020 15:08
@Author: XinZhi Yao
"""
import os
from collections import defaultdict, OrderedDict
import matplotlib.pyplot as plt
def sort_dic_key(dic: dict):
sorted_dic = OrderedDict()
key_sort = sorted(dic.keys(), key=lambda x: x, reverse=False)
for key in key_sort:
sorted_dic[key] = dic[key]
# print(sorted_dic)
return sorted_dic
def read_file(file):
len_count_dic = defaultdict(int)
label_count_dic = defaultdict(int)
with open(file) as f:
for line in f:
l = line.strip().split('\t')
test_len = len(l[1].split())
# print(test_len)
len_count_dic[test_len] += 1
# print(len_count_dic)
label_count_dic[l[0]] +=1
# print(len_count_dic)
len_count_sort_dic = sort_dic_key(len_count_dic)
# print(len_count_sort_dic)
return len_count_sort_dic, label_count_dic
def unzip_dic(count_dic: dict):
len_list = []
count_list = []
for len, count in count_dic.items():
len_list.append(len)
count_list.append(count)
return len_list, count_list
def draw_len_bar(train_len_dic: dict, test_len_dic: dict):
train_len_list, train_count_list = unzip_dic(train_len_dic)
test_len_list, test_count_list = unzip_dic(test_len_dic)
plt.bar(train_len_list, train_count_list,label='train_data')
plt.bar(test_len_list, test_count_list, label='valid_data')
plt.legend()
plt.xlabel('length')
plt.ylabel('count')
plt.title('sentence length statistics')
plt.show()
def draw_label_dic(train_label_dic: dict, test_label_dic: dict):
label_set = set()
for key in train_label_dic.keys():
label_set.add(key)
label_list = list(label_set)
train_count_list = [train_label_dic[key] for key in label_list]
test_count_list = [test_label_dic[key] for key in label_list]
plt.bar(label_list, train_count_list, label='train_data')
plt.bar(label_list, test_count_list, label='valid_data')
plt.legend()
plt.xlabel('label')
plt.ylabel('count')
plt.title('Label count statistics.')
plt.show()
def data_statistics(train_file, test_file):
train_len_sort_dic, train_label_dic = read_file(train_file)
test_len_sort_dic, test_label_dic = read_file(test_file)
# draw_len_bar(train_len_sort_dic, test_len_sort_dic)
draw_label_dic(train_label_dic, test_label_dic)
if __name__ == '__main__':
ag_path = 'AG_corpus_data'
ag_train_file = os.path.join(ag_path, 'AG.train.txt')
ag_test_file = os.path.join(ag_path, 'AG.valid.txt')
ques_path = 'question_clas'
ques_train_file = os.path.join(ques_path, 'question.train.txt')
ques_test_file = os.path.join(ques_path, 'question.valid.txt')
# data_statistics(ag_train_file, ag_test_file)
data_statistics(ques_train_file, ques_test_file)
| {"/main_attention_lstm.py": ["/utils.py", "/data_loader.py", "/Attention_BiLSTM_model.py"]} |
51,912 | YaoXinZhi/Bi-LSTM-Attention | refs/heads/master | /utils.py | #! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Created on 07/01/2020 16:03
@Author: XinZhi Yao
"""
import os
import functools
def logging(s, log_path, log_=True):
# write log file.
print(s)
if log_:
with open(log_path, 'a+') as f_log:
f_log.write(s + '\n')
def get_logger(log_path, **kwargs):
# logging = get_logger(log_path='log.txt')
return functools.partial(logging, log_path=log_path, **kwargs)
def save_loss_acc_file(loss_list, acc_list, save_file):
with open(save_file, 'w') as wf:
for i in range(len(loss_list)):
wf.write('{0}\t{1}\n'.format(loss_list[i], acc_list[i]))
print('{0} save done.'.format(os.path.basename(save_file)))
| {"/main_attention_lstm.py": ["/utils.py", "/data_loader.py", "/Attention_BiLSTM_model.py"]} |
51,913 | YaoXinZhi/Bi-LSTM-Attention | refs/heads/master | /data/AG_corpus_data/AG_pre.py | #! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Created on 09/01/2020 9:57
@Author: XinZhi Yao
"""
from collections import defaultdict, OrderedDict
from string import punctuation
import matplotlib.pyplot as plt
def unzip_dic(count_dic: dict):
length_list = []
count_list = []
for len, count in count_dic.items():
length_list.append(len)
count_list.append(count)
return length_list, count_list
def draw_len_dis(train_len_count_dic: dict, test_len_count_dic: dict):
train_len_list, train_count_list = unzip_dic(train_len_count_dic)
test_len_list, test_count_list = unzip_dic(test_len_count_dic)
plt.bar(train_len_list, train_count_list, label='train_data')
plt.bar(test_len_list, test_count_list, label='valid_data')
plt.legend()
plt.xlabel('length')
plt.ylabel('count')
plt.title('Sentence length statistics')
plt.show()
def AG_pre(text_file, label_file, out_file):
with open(text_file) as f:
text_list = []
length_dic = defaultdict(int)
length_sort_dic = OrderedDict()
for line in f:
l = line.strip().lower()
for punc in punctuation:
text = l.replace(punc, ' ')
length_dic[len(text.split())] += 1
text_list.append(text)
length_sort = sorted(length_dic.keys(), key=lambda x: x, reverse=False)
for key in length_sort:
length_sort_dic[key] = length_dic[key]
data_size = len(text_list)
with open(label_file) as f:
label_list = []
for line in f:
l = line.strip()
label_list.append(l)
label_size = len(label_list)
label_set = set(label_list)
print('data_size: {0} | label_size: {1}'.format(data_size, label_size))
print('length_len: {0}'.format(length_sort_dic))
print('label: {0}'.format(label_set))
print('-' * 90)
if data_size != label_size:
raise ValueError
with open(out_file, 'w') as wf:
for idx in range(data_size):
wf.write('{0}\t{1}\n'.format(label_list[idx], text_list[idx]))
print('save done.')
return length_sort_dic
if __name__ == '__main__':
train_text_file = 'train_texts.txt'
train_label_file = 'train_labels.txt'
train_out_file = 'AG.train.txt'
test_label_file = 'test_labels.txt'
test_text_file = 'test_texts.txt'
test_out_file = 'AG.valid.txt'
train_length_dic = AG_pre(train_text_file, train_label_file, train_out_file)
test_length_dic = AG_pre(test_text_file, test_label_file, test_out_file)
print(train_length_dic)
print(test_length_dic)
draw_len_dis(train_length_dic, test_length_dic) | {"/main_attention_lstm.py": ["/utils.py", "/data_loader.py", "/Attention_BiLSTM_model.py"]} |
51,914 | YaoXinZhi/Bi-LSTM-Attention | refs/heads/master | /data_loader.py | #! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Created on 07/01/2020 16:29
@Author: XinZhi Yao
"""
import os
import torch
import numpy as np
import random
from collections import defaultdict
# Done(20200108): Add start and end symbols
# Done: label2index label_class, turn lb_list to lb_index_list.
# Done: lower in pre_file()
def pre_file(input, output, lower=True):
wf = open(output, 'w')
with open(input) as f:
for line in f:
l = line.strip().split(' ')
label = l[0].split(':')[0]
sent = ' '.join(l[1:])
if lower:
sent = sent.lower()
wf.write('{0}\t{1}\n'.format(label, sent))
wf.close()
print('pre-process {0} done.'.format(os.path.basename(input)))
class VocabEntry(object):
"""docstring for Vocab"""
def __init__(self, word2id=None, start_end_symbol=False):
super(VocabEntry, self).__init__()
if word2id:
self.word2id = word2id
self.unk_id = word2id['<unk>']
else:
self.word2id = dict()
if start_end_symbol:
self.word2id['<pad>'] = 0
self.word2id['<s>'] = 1
self.word2id['</s>'] = 2
self.word2id['<unk>'] = 3
self.unk_id = self.word2id['<unk>']
else:
self.word2id['<pad>'] = 0
self.word2id['<unk'] = 1
self.unk_id = self.word2id['<unk>']
self.id2word_ = {v: k for k, v in self.word2id.items()}
def __getitem__(self, word):
return self.word2id.get(word, self.unk_id)
def __contains__(self, word):
return word in self.word2id
def __len__(self):
return len(self.word2id)
def add(self, word):
if word not in self:
wid = self.word2id[word] = len(self)
self.id2word_[wid] = word
return wid
else:
return self[word]
def id2word(self, wid):
return self.id2word_[wid]
def decode_sentence(self, sentence):
decoded_sentence = []
for wid_t in sentence:
wid = wid_t.item()
decoded_sentence.append(self.id2word_[wid])
return decoded_sentence
@staticmethod
def from_corpus(fname):
vocab = VocabEntry()
with open(fname) as fin:
for line in fin:
_ = [vocab.add(word) for word in line.split()]
return vocab
class LabelEntry(object):
def __init__(self, lb2index=None):
super(LabelEntry, self).__init__()
if lb2index:
self.lb2index = lb2index
else:
self.lb2index = dict()
self.index2lb_ = {v: k for k, v in self.lb2index.items()}
def __getitem__(self, lb):
# if not self.lb2index.get(lb):
# print('lb: {0}'.format(lb))
# raise KeyboardInterrupt
# else:
# return self.lb2index.get(lb, 'None')
return self.lb2index.get(lb, 'None')
def __contains__(self, lb):
return lb in self.lb2index
def __len__(self):
return len(self.lb2index)
def add(self, lb):
if lb not in self:
lb_index = self.lb2index[lb] = len(self)
self.index2lb_ = word
return lb_index
else:
return self[lb]
def index2lb(self, lb_index):
return self.index2lb_[lb]
class MonoTextData(object):
def __init__(self, fname, max_length, label=False, vocab=None,
minfre=0, init_vocab_file=None, start_end_symbol=False, lb_entry=None):
super(MonoTextData, self).__init__()
self.label = label
self.max_length = max_length
self.select_max_length = max_length
self.minfre = minfre
self.start_end_symbol = start_end_symbol
if self.start_end_symbol:
self.select_max_length -= 2
if init_vocab_file:
print('inin vocab.')
vocab = self._read_init_vocab(init_vocab_file, vocab)
self.data, self.labels, self.vocab, self.lb_entry , self.dropped = self._read_corpus(fname, vocab, lb_entry)
def __len__(self):
return len(self.data)
def _init_vocab(self):
vocab = defaultdict(lambda: len(vocab))
vocab['<pad>'] = 0
if self.start_end_symbol:
vocab['<s>'] = 1
vocab['</s>'] = 2
vocab['<unk>'] = 3
else:
vocab['<unk>'] = 1
return vocab
def _read_init_vocab(self, fname, vocab):
print('init voacb from {0}'.format(fname))
if not vocab:
vocab = self._init_vocab()
vocab_count_dic = defaultdict(int)
with open(fname) as fin:
for line in fin:
if self.label:
split_line = line.split('\t')[1].split()
else:
split_line = line.split()
if len(split_line) < 1 or len(split_line) > self.select_max_length:
continue
for word in split_line:
vocab_count_dic[word] += 1
for word, value in vocab_count_dic.items():
if value > self.minfre:
index = vocab[word]
if not isinstance(vocab, VocabEntry):
vocab = VocabEntry(vocab, self.start_end_symbol)
return vocab
def _read_corpus(self, fname, vocab, lb_entry):
data = []
labels = [] if self.label else None
dropped = 0
vocab_count_dic = defaultdict(int)
if not vocab:
vocab = self._init_vocab()
if not lb_entry:
lb_entry = defaultdict(lambda: len(lb_entry))
if self.minfre:
with open(fname) as fin:
for line in fin:
if self.label:
lb = line.split('\t')[0]
print(lb)
lb_index = lb_entry[lb]
split_line = line.split('\t')[1].split()
else:
split_line = line.split()
if len(split_line) < 1 or len(split_line) > self.select_max_length:
continue
for word in split_line:
vocab_count_dic[word] += 1
for word, count in vocab_count_dic.items():
if count > self.minfre:
# print(word, count)
index = vocab[word]
if not isinstance(vocab, VocabEntry):
vocab = VocabEntry(vocab)
if not isinstance(lb_entry, LabelEntry):
lb_entry = LabelEntry[lb_entry]
with open(fname) as fin:
for line in fin:
if self.label:
split_line = line.split('\t')
lb = split_line[0]
split_line = split_line[1].split()
else:
split_line = line.split()
if len(split_line) < 1 or len(split_line) > self.select_max_length:
dropped += 1
continue
if self.label:
labels.append(lb_entry[lb])
data.append([vocab[word] for word in split_line])
if not isinstance(vocab, VocabEntry):
vocab = VocabEntry(vocab)
if not isinstance(lb_entry, LabelEntry):
lb_entry = LabelEntry(lb_entry)
return data, labels, vocab, lb_entry, dropped
def padding_to_fixlen(self, data):
sents_len = np.array([len(sent) for sent in data])
padded_sents_list = []
for sent in data:
if self.start_end_symbol:
sent = [self.vocab['<s>']] + sent + [self.vocab['</s>']]
if len(sent) < self.max_length:
for _ in range(self.max_length - len(sent)):
sent += [self.vocab.word2id['<pad>']]
padded_sents_list.append(sent)
else:
padded_sents_list.append(sent)
return padded_sents_list
def batch_iter(self, data, batch_size, labels=None, num_epochs=1, shuffle=True):
data = self.padding_to_fixlen(data)
data = np.array(data)
if self.label:
labels = np.array(labels)
zip_data = []
data_size = len(data)
num_batches_per_epoch = int((data_size - 1) / batch_size) + 1
for epoch in range(num_epochs):
if shuffle:
shuffle_indices = np.random.permutation(np.arange(data_size))
shuffled_data = data[shuffle_indices]
if self.label:
shuffled_label = labels[shuffle_indices]
else:
shuffled_data = data
if self.label:
shuffled_label = labels
for batch_num in range(num_batches_per_epoch):
start_index = batch_num * batch_size
end_index = min((batch_num + 1) * batch_size, data_size)
yield shuffled_data[start_index: end_index], shuffled_label[start_index: end_index]
# if __name__ == '__main__':
# train_raw_file = 'data/train'
# train_out_file = 'data/question.train.txt'
#
# test_raw_file = 'data/valid'
# test_out_file = 'data/question.valid.txt'
# #
# # pre_file(train_raw_file, train_out_file)
# # pre_file(test_raw_file, test_out_file)
#
# max_length = 20
# batch_size = 16
# start_end_symbol = True
# label = True
# epochs = 5
#
# train_dataset = MonoTextData(train_out_file, label=label, max_length=max_length, start_end_symbol=start_end_symbol)
# vocab = train_dataset.vocab
# vocab_size = vocab.__len__()
# lb_entry = train_dataset.lb_entry
# # if label:
# # print('data size: {0}, dropped: {1}, vocab_size: {2}, label num: {3}'.format(len(train_dataset.data), train_dataset.dropped, vocab_size ,train_dataset.lb_entry.__len__()))
# # else:
# # print('data size: {0}, dropped: {1}, vocab_size: {2}'.format(len(train_dataset.data), train_dataset.dropped, vocab_size))
#
# if label:
# print('train data size: {0}, dropped: {1}, test data size: {2}, vocab_size: {3}, label num: {4}'. \
# format(len(train_dataset.data), train_dataset.dropped, len(test_dataset.data), vocab_size,
# train_dataset.lb_entry.__len__()))
# else:
# print('data size: {0}, dropped: {1}, test data size: {2}, vocab_size: {3}, label num: {4}'. \
# format(len(train_dataset.data), train_dataset.dropped, len(test_dataset), vocab_size,
# train_dataset.lb_entry.__len__()))
#
# test_dataset = MonoTextData(test_out_file, label=True, max_length=max_length, vocab=vocab,
# start_end_symbol=start_end_symbol, lb_entry=lb_entry)
#
# train_data_loader = train_dataset.batch_iter(train_dataset.data, batch_size=batch_size, labels=train_dataset.labels,
# num_epochs=epochs, shuffle=True)
#
# test_data_loader = test_dataset.batch_iter(test_dataset.data, batch_size=batch_size, labels=test_dataset.labels,
# num_epochs=1, shuffle=False)
#
# # for i in test_data_loader:
# count = 0
# iter_nlog = np.floor(train_dataset.data.__len__() / batch_size)
# report_batch = 0
# report_epoch = 1
# for i in train_data_loader:
# report_batch += 1
# if report_batch % iter_nlog == 0:
# print('epoch: {0}, batch: {1}'.\
# format(report_epoch, report_batch))
# report_epoch += 1
#
# batch_data, batch_label = i
# batch_data_tensor = torch.Tensor(batch_data).long()
# # print(batch_data_tensor)
# print(batch_data_tensor.shape)
# batch_label_tensor = torch.Tensor(batch_label).long()
# # print(batch_label_tensor)
# print(batch_label_tensor.shape)
| {"/main_attention_lstm.py": ["/utils.py", "/data_loader.py", "/Attention_BiLSTM_model.py"]} |
51,915 | YaoXinZhi/Bi-LSTM-Attention | refs/heads/master | /config/config_ag.py | #! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Created on 09/01/2020 10:48
@Author: XinZhi Yao
"""
params = {
# model parameters
'embed_dim': 64,
'hidden_size': 32,
'bidirectional': True,
'weight_decay': 0.001,
'momentum': 0,
'attention_size': 16,
# 'sequence_length': 20,
'max_length': 75,
'output_size': 6,
# data parameters
'seed': 1314,
'use_cuda': False,
'start_end_symbol': True,
'label': True,
'model_save_path': 'model/bilstm_attn_model_ag.pt',
'logging_file': 'model/log_ag.txt',
'train_data_path': 'data/AG_corpus_data/AG.train.txt',
'valid_data_path': 'data/AG_corpus_data/AG.valid.txt',
'train_loss_acc_save_file': 'model/ag_train_loss_acc.txt',
'valid_loss_acc_save_file': 'model/ag_valid_loss_acc.txt',
}
| {"/main_attention_lstm.py": ["/utils.py", "/data_loader.py", "/Attention_BiLSTM_model.py"]} |
51,916 | YaoXinZhi/Bi-LSTM-Attention | refs/heads/master | /main_attention_lstm.py | #! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Created on 07/01/2020 15:04
@Author: XinZhi Yao
"""
import os
import time
import importlib
import argparse
from tqdm import tqdm
import numpy as np
from sklearn import metrics
import torch
import torch.nn as nn
import torch.optim as optim
from utils import get_logger, save_loss_acc_file
from data_loader import MonoTextData
import Attention_BiLSTM_model
logging = None
def init_config():
parser = argparse.ArgumentParser(description='Bi-LSTM + Attention model for text classification.')
parser.add_argument('--dataset', type=str, choices=['ques', 'ag'], required=True, help='dataset config file.')
parser.add_argument('--lr', type=float, default=0.001, required=False, help='learning rate.')
parser.add_argument('--epochs', type=int, default=1000, required=False, help='number of epoch.')
parser.add_argument('--batch_size', type=int, default=16, required=False, help='size of mini batch data.')
parser.add_argument('--dropout', type=float, default=0.5, required=False, help='dropout rate.')
parser.add_argument('--opt', type=str, choices=["sgd", "adam"], default='adam', required=False, help='optim.')
parser.add_argument('--load_path', type=str, default=None, required=False, help='load model path.')
parser.add_argument('--save_loss_acc',action='store_true', default=False, help='where save train loss and train acc.')
args = parser.parse_args()
config_file = 'config.config_{0}'.format(args.dataset)
params = importlib.import_module(config_file).params
args = argparse.Namespace(**vars(args), **params)
args.vocab = None
args.vocab_size = None
args.use_cuda = torch.cuda.is_available()
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if args.use_cuda:
torch.cuda.manual_seed(args.seed)
torch.backends.cudnn.deterministic = True
return args
def evaluate(model, valid_dataset, batch_size, use_cuda, compute_f=False):
criterion = torch.nn.CrossEntropyLoss()
corrects = eval_loss = 0
report_size = valid_dataset.data.__len__()
valid_data_loader = valid_dataset.batch_iter(valid_dataset.data, batch_size=batch_size,
labels=valid_dataset.labels, num_epochs=1, shuffle=False)
pred_label_list = []
ture_label_list = []
for mini_batch in valid_data_loader:
batch_data, batch_label = mini_batch
batch_data_tensor = torch.Tensor(batch_data).long()
batch_label_tensor = torch.Tensor(batch_label).long()
if use_cuda:
batch_data_tensor = batch_data_tensor.cuda()
batch_label_tensor = batch_label_tensor.cuda()
batch_size, _ = batch_data_tensor.shape
pred = model(batch_data_tensor, batch_size=batch_size)
loss = criterion(pred, batch_label_tensor)
eval_loss += loss.item()
corrects += (torch.max(pred, 1)[1].view(batch_label_tensor.size()).data == batch_label_tensor).sum()
pred_label_list += torch.max(pred, 1)[1].view(batch_label_tensor.size()).cpu().numpy().tolist()
ture_label_list += batch_label_tensor.cpu().numpy().tolist()
if compute_f:
precision_score = metrics.precision_score(ture_label_list, pred_label_list, average='macro')
recall_score = metrics.recall_score(ture_label_list, pred_label_list, average='macro')
f1_score = 2 * precision_score * recall_score / (precision_score + recall_score)
logging('precision: {0:.4f} | recall: {1:.4f} | F_score: {2:.4f}'.\
format(precision_score, recall_score, f1_score))
return eval_loss / report_size, corrects, corrects*100.0/ report_size, report_size
def main(args):
global logging
logging = get_logger(args.logging_file)
if args.use_cuda:
logging('using cuda')
logging(str(args))
# load training data and valid data
train_dataset = MonoTextData(args.train_data_path, label=args.label, max_length=args.max_length,
start_end_symbol=args.start_end_symbol)
args.vocab = train_dataset.vocab
args.vocab_size = args.vocab.__len__()
lb_entry = train_dataset.lb_entry
valid_dataset = MonoTextData(args.valid_data_path, label=args.label, max_length=args.max_length, vocab=args.vocab,
start_end_symbol=args.start_end_symbol, lb_entry=lb_entry)
train_data_loader = train_dataset.batch_iter(train_dataset.data, batch_size=args.batch_size,
labels=train_dataset.labels, num_epochs=args.epochs)
if args.label:
logging('train data size: {0}, dropped: {1}, valid data size: {2}, vocab_size: {3}, label num: {4}'.\
format(len(train_dataset.data), train_dataset.dropped, len(valid_dataset.data) , args.vocab_size ,
train_dataset.lb_entry.__len__()))
else:
logging('data size: {0}, dropped: {1}, valid data size: {2}, vocab_size: {3}, label num: {4}'.\
format(len(train_dataset.data), train_dataset.dropped, len(valid_dataset) , args.vocab_size,
train_dataset.lb_entry.__len__()))
# init model
bilstm_attn = Attention_BiLSTM_model.bilstm_attn(args)
logging('init model done.')
if not args.load_path is None:
if args.use_cuda:
bilstm_attn.load_state_dict(torch.load(args.load_path))
else:
bilstm_attn.load_state_dict(torch.load(args.load_path, map_location='cpu'))
loss, corrects, acc, valid_dataset_size = evaluate(bilstm_attn, valid_dataset, args.batch_size, args.use_cuda)
logging('loed model: loss {0:.4f} | accurcy {1}%({2}/{3})'. \
format(loss, acc, corrects, valid_dataset_size))
if args.use_cuda:
bilstm_attn = bilstm_attn.cuda()
if args.opt == 'sgd':
optimizer = optim.SGD(bilstm_attn.parameters(), lr=args.lr, momentum=args.momentum)
elif args.opt == 'adam':
optimizer = optim.Adam(bilstm_attn.parameters(), lr=args.lr, weight_decay=args.weight_decay)
else:
raise ValueError('optimizer not supported.')
criterion = torch.nn.CrossEntropyLoss()
# train, evaluate and save best model.
best_acc = 0
eval_niter = np.floor(train_dataset.data.__len__() / args.batch_size)
log_niter = np.floor(eval_niter / 10.0)
train_loss = 0
train_corrects = 0
report_epoch = 1
report_size = 0
train_loss_list = []
train_acc_list = []
valid_loss_list = []
valid_acc_list = []
try:
logging('-'*90)
n_iter = 0
logging_start_time = time.time()
epoch_start_time = time.time()
total_start_time = time.time()
for mini_batch in train_data_loader:
n_iter += 1
batch_data, batch_label = mini_batch
report_size += len(batch_data)
batch_data_tensor = torch.Tensor(batch_data).long()
batch_label_tensor = torch.Tensor(batch_label).long()
if args.use_cuda:
batch_data_tensor = batch_data_tensor.cuda()
batch_label_tensor = batch_label_tensor.cuda()
batch_size, _ = batch_data_tensor.shape
target = bilstm_attn(batch_data_tensor, batch_size=batch_size)
loss = criterion(target, batch_label_tensor)
corrects = (torch.max(target, 1)[ 1 ].view(batch_label_tensor.size()).data == batch_label_tensor).sum()
optimizer.zero_grad()
loss.backward()
optimizer.step()
train_loss += loss.item()
train_corrects += corrects
# todo: Visualization of attention weights.
if n_iter % log_niter == 0:
logging_end_time = time.time()
report_loss = train_loss / report_size
report_acc = train_corrects * 100 / report_size
batch = n_iter - (report_epoch - 1) * eval_niter
logging('epoch-batch {0}-{1} | cost_time {2:2.2f} | train_loss: {3:5.6f} | train_acc: {4}%'.\
format(report_epoch, int(batch), logging_end_time-logging_start_time, report_loss, report_acc))
bilstm_attn.eval()
eval_loss, eval_corrects, eval_acc, eval_valid_size = evaluate(bilstm_attn, valid_dataset, args.batch_size, args.use_cuda)
bilstm_attn.train()
train_loss_list.append(report_loss)
train_acc_list.append(float(report_acc))
valid_loss_list.append(eval_loss)
valid_acc_list.append(float(eval_acc))
train_loss = report_size = 0
train_corrects = 0
logging_start_time = logging_start_time
if n_iter % eval_niter == 0:
epoch_end_time = time.time()
bilstm_attn.eval()
# report_loss = train_loss / report_size
eval_loss, corrects, acc, valid_dataset_size = evaluate(bilstm_attn, valid_dataset, args.batch_size, args.use_cuda)
logging('-' * 10)
logging('end_epoch {0:3d} | cost_time {1:2.2f} s | eval_loss {2:.4f} | accurcy {3}%({4}/{5})'.\
format(report_epoch, epoch_end_time-epoch_start_time, eval_loss, acc, corrects, valid_dataset_size))
# train_loss = report_size = 0
report_epoch += 1
epoch_start_time = time.time()
bilstm_attn.train()
if best_acc < acc:
best_acc = acc
logging('update best acc: {0}%'.format(best_acc))
torch.save(bilstm_attn.state_dict(), args.model_save_path)
logging('-'*10)
except KeyboardInterrupt:
logging('_'*90)
logging('Exiting from training early.')
bilstm_attn.eval()
logging('load best model.')
if args.use_cuda:
bilstm_attn.load_state_dict(torch.load(args.model_save_path))
else:
bilstm_attn.load_state_dict(torch.load(args.model_save_path, map_location='cpu'))
loss, corrects, acc, valid_dataset_size = evaluate(bilstm_attn, valid_dataset, args.batch_size, args.use_cuda, compute_f=True)
logging('total_epoch {0:3d} | total_time {1:2.2f} s | loss {2:.4f} | accurcy {3}%({4}/{5})'. \
format(report_epoch, time.time() - total_start_time, loss, acc, corrects, valid_dataset_size))
logging('-'*90)
if args.save_loss_acc:
save_loss_acc_file(train_loss_list, train_acc_list, args.train_loss_acc_save_file)
save_loss_acc_file(valid_loss_list, valid_acc_list, args.valid_loss_acc_save_file)
if __name__ == '__main__':
args = init_config()
main(args)
| {"/main_attention_lstm.py": ["/utils.py", "/data_loader.py", "/Attention_BiLSTM_model.py"]} |
51,917 | YaoXinZhi/Bi-LSTM-Attention | refs/heads/master | /Attention_BiLSTM_model.py | #! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Created on 08/01/2020 15:32
@Author: XinZhi Yao
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
class bilstm_attn(nn.Module):
def __init__(self, args):
super(bilstm_attn, self).__init__()
self.batch_size = args.batch_size
self.output_size = args.output_size
self.hidden_size = args.hidden_size
self.vocab_size = args.vocab_size
self.embed_dim = args.embed_dim
self.bidirectional = args.bidirectional
self.dropout = args.dropout
self.use_cuda = args.use_cuda
self.sequence_length = args.max_length
self.lookup_table = nn.Embedding(self.vocab_size, self.embed_dim, padding_idx=args.vocab['<pad>'])
self.lookup_table.weight.data.uniform_(-1., 1.)
self.layer_size = 1
self.lstm = nn.LSTM(
self.embed_dim,
self.hidden_size,
self.layer_size,
dropout=self.dropout,
bidirectional=self.bidirectional,
)
if self.bidirectional:
self.layer_size = self.layer_size * 2
self.attention_size = args.attention_size
if self.use_cuda:
self.w_omega = torch.zeros(self.hidden_size * self.layer_size, self.attention_size).cuda()
self.u_omega = torch.zeros(self.attention_size).cuda()
else:
self.w_omega = torch.zeros(self.hidden_size * self.layer_size, self.attention_size)
self.u_omega = torch.zeros(self.attention_size)
self.w_omega.requires_grad = True
self.u_omega.requires_grad = True
self.label = nn.Linear(self.hidden_size * self.layer_size, self.output_size)
def attention_net(self, lstm_output):
# lstm_output sequence_length, batch_size, hidden_size*layer_size
# output_reshape [sequence_length * batch_size, hidden_size*layer_size]
output_reshape = lstm_output.reshape(-1, self.hidden_size*self.layer_size)
# attn_tanh [sequence_length * batch_size, attention_size]
attn_tanh = torch.tanh(torch.mm(output_reshape, self.w_omega))
# attn_hidden_layer [sequence_legth*batch_size, 1] self.u_omega.reshape(-1, 1)
attn_hidden_layer = torch.mm(attn_tanh, self.u_omega.reshape(-1, 1))
# exps [batch_size, sequence_length]
exps = torch.exp(attn_hidden_layer).reshape(-1, self.sequence_length)
# alphas [batch_size, squence_length]
alphas = exps / torch.sum(exps, 1).reshape(-1, 1)
alphas_reshape = alphas.reshape(-1, self.sequence_length, 1)
# state batch_size, squence_length, hidden_size*layer_size
state = lstm_output.permute(1, 0, 2)
# attn_output [batch_size, hidden_size*layer_size]
attn_output = torch.sum(state * alphas_reshape, 1)
return attn_output
def forward(self, input_sentences, batch_size):
input = self.lookup_table(input_sentences)
input = input.permute(1, 0, 2)
if self.use_cuda:
h_0 = torch.zeros(self.layer_size, batch_size, self.hidden_size).cuda()
c_0 = torch.zeros(self.layer_size, batch_size, self.hidden_size).cuda()
else:
h_0 = torch.zeros(self.layer_size, batch_size, self.hidden_size)
c_0 = torch.zeros(self.layer_size, batch_size, self.hidden_size)
lstm_output, (final_hidden_state, final_cell_state) = self.lstm(input, (h_0, c_0))
attn_output = self.attention_net(lstm_output)
logits = self.label(attn_output)
return logits
| {"/main_attention_lstm.py": ["/utils.py", "/data_loader.py", "/Attention_BiLSTM_model.py"]} |
51,918 | YaoXinZhi/Bi-LSTM-Attention | refs/heads/master | /config/config_ques.py | #! usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Created on 07/01/2020 15:14
@Author: XinZhi Yao
"""
params = {
# model parameters
'embed_dim': 64,
'hidden_size': 32,
'bidirectional': True,
'weight_decay': 0.001,
'momentum': 0,
'attention_size': 16,
# 'sequence_length': 20,
'max_length': 20,
'output_size': 6,
# data parameters
'seed': 1314,
'use_cuda': False,
'start_end_symbol': True,
'label': True,
'model_save_path': 'model/bilstm_attn_model_ques.pt',
'logging_file': 'model/log_que.txt',
'train_data_path': 'data/question_clas/question.train.txt',
'valid_data_path': 'data/question_clas/question.valid.txt',
'train_loss_acc_save_file': 'model/ques_train_loss_acc.txt',
'valid_loss_acc_save_file': 'model/ques_valid_loss_acc.txt',
} | {"/main_attention_lstm.py": ["/utils.py", "/data_loader.py", "/Attention_BiLSTM_model.py"]} |
51,927 | dimayasin/SPI | refs/heads/master | /spi-project/SPI/apps/spiapps/models.py | from __future__ import unicode_literals
from django.db import models
from django.db import connection
import re #, bcrypt
import datetime
PN_REGEX = re.compile(r'^[a-zA-Z0-9.-\/#]+$')
class Part_Manager(models.Manager):
def validatePartsData(self, postData):
errors = []
mytime=datetime.datetime.strptime(postData['date'], '%Y-%m-%d').date()
time2 = datetime.datetime.today().date()
if len(postData['pn']) < 1:
errors.append("Part number should be more than 1 character long")
if not PN_REGEX.match(postData['pn']):
errors.append("Part number should contain: Letters, numbers, or one of these characters(. - / \\ or #)")
if mytime >= time2 :
errors.append("Birth date shouldn't be a current or future date.")
return errors
class Parts(models.Model):
pn=models.CharField(max_length=255)
source=models.CharField(max_length=255)
description=models.CharField(max_length=255)
part_type=models.CharField(max_length=255)
fleet=models.CharField(max_length=255)
ata=models.CharField(max_length=255)
uom=models.CharField(max_length=255)
cond=models.CharField(max_length=255)
date=models.DateField(default= datetime.date.today)
price=models.FloatField()
object = Part_Manager()
| {"/spi-project/SPI/apps/spiapps/views.py": ["/spi-project/SPI/apps/spiapps/models.py"]} |
51,928 | dimayasin/SPI | refs/heads/master | /spi-project/SPI/apps/spiapps/urls.py | from django.conf.urls import url
from . import views # This line is new!
urlpatterns = [
url(r'^$', views.index),
url(r'summ$', views.summ),
url(r'inputData$', views.inputData),
url(r'brows$', views.brows),
url(r'pn$', views.pn),
url(r'desc$', views.desc),
url(r'bulk$', views.bulk),
url(r'uploadData$', views.uploadData),
url(r'bulksearch', views.bulksearch),
# url(r'show', views.show),
] | {"/spi-project/SPI/apps/spiapps/views.py": ["/spi-project/SPI/apps/spiapps/models.py"]} |
51,929 | dimayasin/SPI | refs/heads/master | /spi-project/SPI/apps/spiapps/views.py | from __future__ import unicode_literals
from django.shortcuts import render,redirect,HttpResponse
from django import forms
from django.db import connection
import django_excel as excel
import pyodbc
import openpyxl
from django.contrib import messages
from openpyxl import load_workbook, workbook
from .models import Parts
secret_key = 'TARDIS'
cursor = connection.cursor()
def index(request):
return render(request,'index.html')
def summ(request):
return render(request, 'summ.html')
def inputData(request):
return render(request, 'input_data.html')
def brows(request):
return render(request, 'brows.html')
def pn(request):
return render(request, 'lookuppn.html')
def desc(request):
return render(request, 'lookupdesc.html')
def bulk(request):
return render(request, 'lookupbulk.html')
excel_data = list()
def uploadData(request):
if request.method == "POST":
excel_file = request.FILES['excel_file']
wb=openpyxl.load_workbook(excel_file)
worksheet=wb['Sheet1']
# for row in worksheet.iter_rows():
# row_data = list()
# # thisRow = list()
# for cell in row:
# row_data.append(str(cell.value))
# excel_data.append(row_data)
# row_num=2
# for row_num in worksheet.iter_rows():
row_data = list()
part_num=worksheet.cell(row=2, column=1)
description=worksheet.cell(row=2, column=2)
row_data.append(str(part_num))
row_data.append(str(description))
excel_data.append(row_data)
context={"excel_data":excel_data}
return redirect('/bulksearch', context)
else:
return render(request,'lookupbulk.html', {})
""" def show(request, str):
excel_file = request.FILES[str]
wb=openpyxl.load_workbook(excel_file)
worksheet=wb['Sheet1']
excel_data = list()
for row in worksheet.iter_rows():
row_data = list()
# thisRow = list()
for cell in row:
if cell.value.upper() =='PN':
continue
else:
row_data.append(str(cell.value))
excel_data.append(row_data) """
def bulksearch(request):
wb=openpyxl.Workbook()
ws = wb.active
counter=1
i=1
for row in ws.iter_rows():
for cell in row:
if len(excel_data[counter]) >0:
cell.value='A' #str(excel_data[counter])
# print(excel_data[counter])
counter +=1
i += 1
wb.save(filename = 'temp.xlsx')
# str='temp.xlsx'
# show(str)
return render(request,'showdata.html')
| {"/spi-project/SPI/apps/spiapps/views.py": ["/spi-project/SPI/apps/spiapps/models.py"]} |
51,949 | indionapolis/test-task-SBER | refs/heads/main | /src/bots.py | from abc import ABC
from aiogram import Dispatcher, Bot as TelegramBotClient, executor, types
from collections import defaultdict
from src.actions import all_actions
TELEGRAM_API_TOKEN = "1649979045:AAFHeqndPGoqas48PQQ_o_hS516ohUkGm5g"
USER_COMMANDS_CACHE = defaultdict(set)
class Bot(ABC):
def __init__(self):
self._init_bot()
def _init_bot(self):
pass
def run(self):
pass
class TelegramBot(Bot):
def _init_bot(self):
self._bot = TelegramBotClient(token=TELEGRAM_API_TOKEN)
self._dp = Dispatcher(self._bot)
self.actions = {action.command: action() for action in all_actions}
self.commands_list = [f"/{action.command} {action.params} - {action.description}" for action in all_actions]
self._dp.register_message_handler(self._handle_update)
async def _handle_update(self, message: types.Message):
command = message.get_command(pure=True)
if command and command in self.actions.keys():
action = self.actions.get(command)
args = message.get_args().strip().split()
# set params in cache
[USER_COMMANDS_CACHE[f'command:{message.chat["id"]}'].add(arg) for arg in args]
flag, response = action.execute(USER_COMMANDS_CACHE[f'command:{message.chat["id"]}'])
# flush cache if success
if flag:
del USER_COMMANDS_CACHE[f'command:{message.chat["id"]}']
await message.answer(response)
else:
await message.answer("Hi! available commands are:\n\n" + "\n".join(self.commands_list))
def run(self):
executor.start_polling(self._dp, skip_updates=True)
| {"/src/bots.py": ["/src/actions.py"], "/src/actions.py": ["/src/restaurant_client.py"], "/tests/test_actions.py": ["/src/actions.py"], "/tests/test_telegram_bot.py": ["/src/bot_factory.py"], "/src/bot_factory.py": ["/src/bots.py"]} |
51,950 | indionapolis/test-task-SBER | refs/heads/main | /src/actions.py | import re
from abc import ABC
from typing import Tuple
from src.restaurant_client import Client
restaurant_client = Client()
class Action(ABC):
command: str = None
params: str = None
description: str = None
def execute(self, params: str) -> str:
pass
class MakeReservation(Action):
command = "book"
params = "<valid time: HH:MM> <number of people more than 0>"
description = "creates booking according to params"
# pattern to validate params
pattern = re.compile(r"^([0-9]|0[0-9]|1[0-9]|2[0-3]):[0-5][0-9] [1-9]\d*$")
time = re.compile(r"^([0-9]|0[0-9]|1[0-9]|2[0-3]):[0-5][0-9]$")
n_people = re.compile(r"^[1-9]\d*$")
params_map = {"time": time, "n_people": n_people}
def execute(self, params: set) -> Tuple[bool, str]:
result = {}
for param in params:
for item in self.params_map.items():
if item[1].match(param):
result[item[0]] = param
if len(result.keys()) == len(self.params_map.keys()):
msg = restaurant_client.book(**result)
return True, msg
else:
difference = set(self.params_map.keys()) - set(result.keys())
return False, f"need to specify following param{'' if len(difference) == 1 else 's'}: " \
f"{('<%s> ' * len(difference)) % tuple(difference)}"
# 2 2 1 4
# 2
all_actions = [MakeReservation]
| {"/src/bots.py": ["/src/actions.py"], "/src/actions.py": ["/src/restaurant_client.py"], "/tests/test_actions.py": ["/src/actions.py"], "/tests/test_telegram_bot.py": ["/src/bot_factory.py"], "/src/bot_factory.py": ["/src/bots.py"]} |
51,951 | indionapolis/test-task-SBER | refs/heads/main | /tests/test_actions.py | import pytest
from src.actions import MakeReservation
@pytest.fixture
def reservation_action():
return MakeReservation()
def test_incorrect_reservation(reservation_action):
response = reservation_action.execute("")
assert response == "Incorrect format, example: <valid time: HH:MM> <number of people more then 0>"
response = reservation_action.execute("1:00 0")
assert response == "Incorrect format, example: <valid time: HH:MM> <number of people more then 0>"
response = reservation_action.execute("1:60 5")
assert response == "Incorrect format, example: <valid time: HH:MM> <number of people more then 0>"
def test_correct_reservation(reservation_action):
response = reservation_action.execute("12:00 1")
assert response == "Your booking received!"
response = reservation_action.execute("00:00 100")
assert response == "Your booking received!"
| {"/src/bots.py": ["/src/actions.py"], "/src/actions.py": ["/src/restaurant_client.py"], "/tests/test_actions.py": ["/src/actions.py"], "/tests/test_telegram_bot.py": ["/src/bot_factory.py"], "/src/bot_factory.py": ["/src/bots.py"]} |
51,952 | indionapolis/test-task-SBER | refs/heads/main | /src/restaurant_client.py | def generate_time_table():
return {f'{i}:00': False for i in range(0, 23)}
class Client:
def __init__(self):
self._reservations = []
self._booking_table = [(2, generate_time_table()),
(2, generate_time_table()),
(4, generate_time_table()),
(4, generate_time_table())]
def book(self, time: str, n_people: str) -> str:
for i, table in enumerate(self._booking_table):
if table[0] >= int(n_people):
if not table[1].get(time, True):
table[1][time] = True
self._reservations.append({"time": time, "n_people": n_people})
return f'booked successfully, your time: {time}, your table: {i}'
return 'unfortunately there are no available tables on your time'
def get_bookings(self):
return self._reservations
| {"/src/bots.py": ["/src/actions.py"], "/src/actions.py": ["/src/restaurant_client.py"], "/tests/test_actions.py": ["/src/actions.py"], "/tests/test_telegram_bot.py": ["/src/bot_factory.py"], "/src/bot_factory.py": ["/src/bots.py"]} |
51,953 | indionapolis/test-task-SBER | refs/heads/main | /tests/test_telegram_bot.py | import pytest
from aiogram import types, Bot
from src.bot_factory import BotFactory
from tests.fixtures import MESSAGE
@pytest.fixture
def telegram_bot():
return BotFactory.get_instance("TELEGRAM")
@pytest.fixture
def non_booking_message():
return types.Message(**MESSAGE)
@pytest.fixture
def booking_message():
MESSAGE["text"] = "/book 1:00 1"
return types.Message(**MESSAGE)
async def mock_answer(*args, **kwargs):
assert args[1] == "Hi! available commands are:\n\n/book <valid time: HH:MM> <number of people more then 0> " \
"- creates booking according to params"
async def mock_answer_booking(*args, **kwargs):
assert args[1] == "Your booking received!"
@pytest.mark.asyncio
async def test_non_booking_command(telegram_bot, non_booking_message, monkeypatch):
Bot.set_current(telegram_bot._bot)
monkeypatch.setattr(types.Message, "answer", mock_answer, raising=True)
await telegram_bot._handle_update(non_booking_message)
@pytest.mark.asyncio
async def test_booking_command(telegram_bot, booking_message, monkeypatch):
Bot.set_current(telegram_bot._bot)
monkeypatch.setattr(types.Message, "answer", mock_answer_booking, raising=True)
await telegram_bot._handle_update(booking_message)
| {"/src/bots.py": ["/src/actions.py"], "/src/actions.py": ["/src/restaurant_client.py"], "/tests/test_actions.py": ["/src/actions.py"], "/tests/test_telegram_bot.py": ["/src/bot_factory.py"], "/src/bot_factory.py": ["/src/bots.py"]} |
51,954 | indionapolis/test-task-SBER | refs/heads/main | /src/bot_factory.py | from src.bots import TelegramBot, Bot
BOTS = {"TELEGRAM": TelegramBot}
class BotFactory:
@staticmethod
def get_instance(bot_type: str) -> Bot:
bot = BOTS.get(bot_type)
if not bot:
raise ValueError(f"bot type {bot_type} is not available")
return bot()
| {"/src/bots.py": ["/src/actions.py"], "/src/actions.py": ["/src/restaurant_client.py"], "/tests/test_actions.py": ["/src/actions.py"], "/tests/test_telegram_bot.py": ["/src/bot_factory.py"], "/src/bot_factory.py": ["/src/bots.py"]} |
51,972 | vincycode7/maistore-jwt-extented | refs/heads/master | /resources/product.py | import sqlite3
from flask_restful import Resource, reqparse
from flask_jwt import jwt_required
from models.product import ProductModel
#class to create user and get user
class ProductList(Resource):
@jwt_required() #use for authentication before calling get
def get(self):
products = ProductModel.find_all()
if products: return {"items" : [ product.json() for product in products]},201
return {"message" : 'Items not found'}, 400
class Product(Resource):
#parser is now a class variable
parser = reqparse.RequestParser()
parser.add_argument('productname', type=str, required=True, help="productname field is required")
parser.add_argument('price', type=float, required=True, help="price field is requried")
parser.add_argument('quantity', type=int, required=True, help="quantity field is required")
parser.add_argument("category", type=str, required=True, help="category the product falls in is required")
parser.add_argument("store_id", type=str, required=True, help="store_id of the user posting the product is required")
@jwt_required() #use for authentication before calling get
def get(self, productname):
product = ProductModel.find_by_name(productname=productname)
if product: return {"item" : product.json()},201
return {"message" : 'Item not found'}, 400
@jwt_required() #use for authentication before calling post
def post(self, productname):
data = Product.parser.parse_args()
#check form integrety
message = ProductModel.check_form_integrity(productname, data)
if message:
return message
product = ProductModel.instance_from_dict(dict_=data)
#insert
try:
print(f"{product}")
product.save_to_db()
except Exception as e:
print(e)
return {"message" : "An error occured inserting the item"}, 500 #Internal server error
return product.json(), 201
@jwt_required() #use for authentication before calling post
def delete(self, productname, username=None, password=None):
product = ProductModel.find_by_name(productname=productname)
if product:
product.delete_from_db()
return {"message" : "Item deleted"}, 200 # 200 ok
return {"message" : "Item Not in database"}, 401 # 400 is for bad request
@jwt_required() #use for authentication before calling post
def put(self, productname):
data = Product.parser.parse_args()
message = ProductModel.check_form_integrity(productname, data)
if message: return message
product = ProductModel.find_by_name(productname=data["productname"])
if product:
#update
for each in data.keys(): product.__setattr__(each, data[each])
product.save_to_db()
else:
#insert
product = ProductModel.instance_from_dict(dict_=data)
product.save_to_db()
return product.json(), 201
| {"/resources/product.py": ["/models/product.py"], "/resources/user.py": ["/models/user.py"], "/app.py": ["/resources/store.py", "/resources/user.py", "/resources/product.py"], "/resources/store.py": ["/models/store.py"]} |
51,973 | vincycode7/maistore-jwt-extented | refs/heads/master | /resources/user.py | from flask_restful import Resource, reqparse
from flask_jwt import jwt_required
from werkzeug.security import safe_str_cmp
from flask_jwt_extended import create_access_token, create_refresh_token
from models.user import UserModel
#class to list all user
class UserList(Resource):
@jwt_required()
def get(self):
users = UserModel.find_all()
if users: return {"users" : [ user.json() for user in users]},201
return {"message" : 'Item not found'}, 400
#class to register user
class UserRegister(Resource):
parser = reqparse.RequestParser()
parser.add_argument(name="username", type=str, required=True, help="username cannot be blank",case_sensitive=False)
parser.add_argument(name="password", type=str, required=True, help="password cannot be blank")
parser.add_argument(name="email", type=str, required=True, help="email cannot be blank")
def post(self):
data = UserRegister.parser.parse_args()
#check if data already exist
# if UserModel.find_by_username(username=data["username"]): return {"message" : f"username {data['username']} already exists."},400 # 400 is for bad request
print(data)
if UserModel.find_by_email(email=data["email"]): return {"message" : f"email {data['email']} already exists."},400 # 400 is for bad request
print(data)
user = UserModel(**data)
#insert
try:
user.save_to_db()
except Exception as e:
print(e)
return {"message" : "An error occured inserting the item"}, 500 #Internal server error
return user.json(), 201
#class to create user and get user
class User(Resource):
@jwt_required()
def get(self, username):
user = UserModel.find_by_username(username=username)
if user: return {"user" : user.json()},201
return {"message" : 'user not found'}, 400
@jwt_required() #use for authentication before calling post
def put(self, username):
data = UserRegister.parser.parse_args()
user = UserModel.find_by_username(username=username)
email = UserModel.find_by_email(email=data["email"])
if user:
#update
try:
# for product in products: product.update_self_vars(data)
if email and not (email.email == user.email): return {"message" : f"email {data['email']} already exists."},400 # 400 is for bad request
for each in data.keys(): user.__setattr__(each, data[each])
user.save_to_db()
except Exception as e:
print(f"error is {e}")
return {"message" : "An error occured updating the item the item"}, 500 #Internal server error
#confirm the unique key to be same with the product route
else:
# user = UserModel.instance_from_dict(dict_=data)
user = UserModel(**data)
#insert
try:
if email: return {"message" : f"email {data['email']} already exists."},400 # 400 is for bad request
user.save_to_db()
except Exception as e:
print(f"error is {e}")
return {"message" : "An error occured inserting the item"}, 500 #Internal server error
return user.json(), 201
@jwt_required() #use for authentication before calling post
def delete(self, username, password=None):
user = UserModel.find_by_username(username=username)
if user:
user.delete_from_db()
return {"message" : "User deleted"}, 200 # 200 ok
return {"message" : "User Not found"}, 400 # 400 is for bad request
class UserExt(Resource):
@classmethod
def get(cls, user_id):
user = UserModel.find_by_id(id=user_id)
if not user: return {"message" : "User not found"}, 404
return {"user":user.json()}, 200
@classmethod
def delete(cls, user_id):
user = UserModel.find_by_id(id=user_id)
if not user: return {"message" : "User not found"}, 404
user.delete_from_db()
return {"message" : "User deleted."}, 200
class UserLogin(Resource):
parser = reqparse.RequestParser()
parser.add_argument(name="userid", type=str, required=True, help="userid cannot be blank",case_sensitive=False)
parser.add_argument(name="password", type=str, required=True, help="password cannot be blank")
@classmethod
def post(cls):
#get user
data = cls.parser.parse_args()
#find user in database
user = UserModel.find_by_id(id=data['userid'])
if user and safe_str_cmp(user.password,data['password']):
access_token = create_access_token(identity=user.id, fresh=True)
refresh_token = create_refresh_token(user.id)
return {
"access_token" : access_token,
"refresh_token" : refresh_token
}, 200
return {"message" : "Invalid credentials"} | {"/resources/product.py": ["/models/product.py"], "/resources/user.py": ["/models/user.py"], "/app.py": ["/resources/store.py", "/resources/user.py", "/resources/product.py"], "/resources/store.py": ["/models/store.py"]} |
51,974 | vincycode7/maistore-jwt-extented | refs/heads/master | /app.py | import os,db
from resources.store import Store, StoreList
from resources.user import User, UserList, UserRegister, UserExt, UserLogin
from resources.product import Product, ProductList
from flask import Flask, request
# from flask_jwt import JWT, jwt_required
from flask_jwt_extended import JWTManager
from flask_restful import Resource, Api, reqparse
"""
Flask is the main framework for the project
flask_jwt is used for authentication via tokens
flask_restful makes working with flask alot easier
Flask SQLAlchemy is used to easily store data to a relational database
"""
#export PATH="$PATH:/home/vcode/.local/bin"
#runner : reset && python app.py
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL','sqlite:///data.db')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['PROPAGATE_EXCEPTIONS'] = True
app.secret_key = "vcode" #always remember to get the apps's secret key, also this key should be hidden from the public.
api = Api(app=app)
@app.before_first_request
def create_tables():
db.create_all()
# jwt = JWT(app, authenticate, identity) #creates a new end point called */auth*
jwt = JWTManager(app) #This doesn't create the auth endpoint
#User
api.add_resource(UserRegister, "/register") #https://mistore.com/register
api.add_resource(UserLogin,"/login")
api.add_resource(User, '/user/<string:username>') #https://mistore.com/gbenga
api.add_resource(UserExt, '/users/<int:user_id>')
# api.add_resource(Users, '/user/<string:name>?<string:password>') #https://mistore.com/gbenga
api.add_resource(UserList , "/users") #https://mistore.com//student
#store
api.add_resource(Store, "/store/<string:storename>") #https://maistore.com/store/shoprite
api.add_resource(StoreList, "/stores") #https://maistore.com/store
#product
api.add_resource(ProductList, "/products") #https://mistore.com/product
api.add_resource(Product, '/product/<string:productname>') #https://mistore.com/product/bags
if __name__ == "__main__":
from db import db
db.init_app(app)
app.run(port=5000, debug=True) | {"/resources/product.py": ["/models/product.py"], "/resources/user.py": ["/models/user.py"], "/app.py": ["/resources/store.py", "/resources/user.py", "/resources/product.py"], "/resources/store.py": ["/models/store.py"]} |
51,975 | vincycode7/maistore-jwt-extented | refs/heads/master | /resources/store.py | from flask_restful import Resource, reqparse
from flask_jwt import jwt_required
from models.store import StoreModel
class Store(Resource):
parser = reqparse.RequestParser()
parser.add_argument(name="storename", required=True, help="a store name is required to proceed", type=str,case_sensitive=False)
parser.add_argument(name="user_id", required=True, help="only active users can create a store", type=str,case_sensitive=False)
parser.add_argument(name="location", required=False, help="a store location is good for business",case_sensitive=False)
def get(self, storename):
store = StoreModel.find_by_name(storename=storename)
if store:
return store.json()
else:
return {"message" : "store not found"}, 404
def post(self, storename):
storename = storename.lower()
if StoreModel.find_by_name(storename=storename):
return {"message" : f"A store with name {storename} already exists"}, 400
data = Store.parser.parse_args()
message = StoreModel.check_form_integrity(storename=storename, data=data)
if message: return message
store = StoreModel(**data)
try:
store.save_to_db()
except Exception as e:
print(e)
return {"message" : "An error occured while creating the store."}, 500
return store.json(), 201
def delete(self, storename):
store = StoreModel.find_by_name(storename=storename)
if store:
store.delete_from_db()
return {"message" : "Store deleted"}
class StoreList(Resource):
def get(self):
stores = StoreModel.findall()
if stores:
return {"stores" : [store.json() for store in stores]} | {"/resources/product.py": ["/models/product.py"], "/resources/user.py": ["/models/user.py"], "/app.py": ["/resources/store.py", "/resources/user.py", "/resources/product.py"], "/resources/store.py": ["/models/store.py"]} |
51,976 | vincycode7/maistore-jwt-extented | refs/heads/master | /models/user.py | #import packages
from db import db
#class to create user and get user
class UserModel(db.Model):
__tablename__ = "user"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(30))
password = db.Column(db.String(80))
email = db.Column(db.String(100))
stores = db.relationship("StoreModel", lazy="dynamic")
def __init__(self, username, password, email):
self.username = username
self.password = password
self.email = email
# a json representation
def json(self):
return {
"id" : self.id,
"username" : self.username,
"password" : self.password,
"email" : self.email,
"stores" : [store.json() for store in self.stores.all()]
}
def save_to_db(self):
db.session.add(self)
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
@classmethod
def find_all(cls):
results = cls.query.all() #the comma is required because it expects a tuple
return results
@classmethod
def find_by_username(cls, username=None):
result = cls.query.filter_by(username=username).first()
return result
@classmethod
def find_by_email(cls, email=None):
result = cls.query.filter_by(email=email).first()
return result
@classmethod
def find_by_id(cls, id):
result = cls.query.filter_by(id=id).first()
return result
@classmethod
def check_form_integrity(cls,username=None, data=None):
#check if form is empty
if data == None: return {"message" : "Invalid object type, use json."}, 404
#confirm the unique key to be same with the product route
# if username != data['username']:
# return {"message" : f"user {username} does not match {data['username']} in the form"}, 40
#implement later
return False | {"/resources/product.py": ["/models/product.py"], "/resources/user.py": ["/models/user.py"], "/app.py": ["/resources/store.py", "/resources/user.py", "/resources/product.py"], "/resources/store.py": ["/models/store.py"]} |
51,977 | vincycode7/maistore-jwt-extented | refs/heads/master | /models/product.py | from db import db
class ProductModel(db.Model):
__tablename__ = "product"
id = db.Column(db.Integer, primary_key=True)
productname = db.Column(db.String(40))
price = db.Column(db.Float(precision=2))
quantity = db.Column(db.Integer)
category = db.Column(db.String(40))
store_id = db.Column(db.Integer, db.ForeignKey("store.id"))
store = db.relationship("StoreModel")
def __init__(self, productname, price, store_id, quantity=0, category=None):
self.productname = productname
self.price = price
self.quantity = quantity
self.category = category
self.store_id = store_id
# a json representation
def json(self):
return {
"id" : self.id,
"productname" : self.productname,
"price" : self.price,
"quantity" : self.quantity,
"category" : self.category,
"store" : self.store.json()
}
def save_to_db(self):
#connect to the database
db.session.add(self)
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
@classmethod
def find_all(cls):
result = cls.query.all()
return result
@classmethod
def find_by_name(cls, productname=None):
result = cls.query.filter_by(productname=productname).first()
return result
@classmethod
def find_by_id(cls, _id):
result = cls.query.filter_by(id=id).first()
return result
@classmethod
def check_form_integrity(cls,productname, data):
#check if form is empty
if data == None: return {"message" : "Invalid object type, use json."}, 404
#check if user posted it
#implement later
#confirm the unique key to be same with the product route
if productname != data['productname']:
return {"message" : f"product {productname} does not match {data['name']} in the form"}, 404
return False
@classmethod
def instance_from_dict(cls, dict_):
return cls(
productname=dict_.get('productname'),
price=dict_.get('price'),
quantity=dict_.get('quantity', None),
category=dict_.get('category', None),
store_id=dict_.get('store_id')
) | {"/resources/product.py": ["/models/product.py"], "/resources/user.py": ["/models/user.py"], "/app.py": ["/resources/store.py", "/resources/user.py", "/resources/product.py"], "/resources/store.py": ["/models/store.py"]} |
51,978 | vincycode7/maistore-jwt-extented | refs/heads/master | /models/store.py | from db import db
class StoreModel(db.Model):
__tablename__ = "store"
id = db.Column(db.Integer, primary_key=True)
storename = db.Column(db.String(40))
userid = db.Column(db.String(40))
location = db.Column(db.String(200))
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
user = db.relationship("UserModel")
products = db.relationship("ProductModel", lazy="dynamic")
def __init__(self, storename, user_id, location=None):
self.storename = storename
self.user_id = user_id
self.location = location
# a json representation
def json(self):
return {
"id" : self.id,
"storename" : self.storename,
"user" : self.user.json(),
"products" : [product.json() for product in self.products.all()]
}
def save_to_db(self):
#connect to the database
db.session.add(self)
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
@classmethod
def find_all(cls):
result = cls.query.all()
return result
@classmethod
def find_by_name(cls, storename=None):
result = cls.query.filter_by(storename=storename).first()
return result
@classmethod
def find_by_id(cls, _id):
result = cls.query.filter_by(id=id).first()
return result
@classmethod
def check_form_integrity(cls,storename, data):
#check if form is empty
if data == None: return {"message" : "Invalid object type, use json."}, 404
#check if user posted it
#implement later
#confirm the unique key to be same with the product route
if storename != data['storename']:
return {"message" : f"product {storename} does not match {data['storename']} in the form"}, 404
return False
@classmethod
def instance_from_dict(cls, dict_):
return cls(
storename=dict_.get('storename'),
user_id=dict_.get('user_id'),
location=dict_.get('location', None),
) | {"/resources/product.py": ["/models/product.py"], "/resources/user.py": ["/models/user.py"], "/app.py": ["/resources/store.py", "/resources/user.py", "/resources/product.py"], "/resources/store.py": ["/models/store.py"]} |
51,981 | makarandmandolkar/Unscented-Kalman-Filter | refs/heads/master | /partB.py | """
This file runs the required components for part B
"""
import localization
robot_local = localization.Localization()
def compare_sequence_controls(noise=False):
""" compares the control sequence listed in question 2 between the
motion model and the filter
"""
robot_local.control_sequence()
def run_filter(iter):
""" compares the result from running the UKF on the odometry and measurement
data sets to dead reckoning and to the ground truth
"""
robot_local.robot_localization(iter)
def filter_results():
""" plots the filter results that were pre-ran """
robot_local.plot_results()
#robot_local.load_data()
def show_states():
""" plots states of robot """
robot_local.plot_states()
| {"/partB.py": ["/localization.py"], "/measure.py": ["/params.py"], "/run.py": ["/partA.py", "/partB.py"], "/motion.py": ["/params.py"], "/utils.py": ["/params.py"], "/partA.py": ["/motion.py", "/measure.py", "/params.py", "/utils.py"], "/localization.py": ["/filter.py", "/motion.py", "/utils.py"], "/filter.py": ["/motion.py", "/measure.py", "/params.py"]} |
51,982 | makarandmandolkar/Unscented-Kalman-Filter | refs/heads/master | /measure.py |
"""
This file contains the sensor model that estimates the
range and bearing of landmarks and other robots
"""
from params import*
import numpy as np
def sensor(landmark, pose, noise=False):
""" computes the expected range and bearing given a landmark and the robot pose
with the option to add noise
Args:
landmark (array): global x and y position of a landmark
pose (array): x, y, theta of robot
Returns
(array): range and bearing of landmark
"""
# expected values
range = np.sqrt((pose[0]-landmark[0])**2 + (pose[1]-landmark[1])**2)
bearing = np.arctan2(landmark[1]-pose[1], landmark[0]-pose[0]) - pose[2]
#### IMPORTANT ####
# BUG:
# wrap angle netween 0 to pi and 0 to -pi
if bearing > np.pi:
bearing -= 2*np.pi
elif bearing < -np.pi:
bearing += 2*np.pi
#print("SENSOR: ",range, " ",bearing)
if noise:
n_r = np.random.normal(0, std_r**2)
n_b = np.random.normal(0, std_b**2)
return [range + n_r, bearing + n_b]
else:
return [range, bearing]
def landmark_position(meas_lm, pose):
""" determines global position of landmark given range, bearing, and robots pose """
# inputs:
# meas_lm: range, bearing
# pose: x, y, and theta
# outputs:
# global x and y of landmark
r = meas_lm[0]
b = meas_lm[1]
x_lm = r*np.cos(b) + pose[0]
y_lm = r*np.sin(b) + pose[1]
return [x_lm, y_lm]
| {"/partB.py": ["/localization.py"], "/measure.py": ["/params.py"], "/run.py": ["/partA.py", "/partB.py"], "/motion.py": ["/params.py"], "/utils.py": ["/params.py"], "/partA.py": ["/motion.py", "/measure.py", "/params.py", "/utils.py"], "/localization.py": ["/filter.py", "/motion.py", "/utils.py"], "/filter.py": ["/motion.py", "/measure.py", "/params.py"]} |
51,983 | makarandmandolkar/Unscented-Kalman-Filter | refs/heads/master | /run.py |
"""
All files written in python 3. See ReadMe.txt for help.
This file runs the required components for
part A by calling partA.py and the required components for part B by calling
partB.py. It provides the figures for questions
2 and 3, then prints the results to question
6 to the terminalself.
"""
from partA import*
from partB import*
if __name__ == "__main__":
#### PART B ####
# compare the control sequence between dead reckoning and UKF
# from question 8
#compare_sequence_controls()
# Specify how long to run the filter by setting iter.
# The nubmer of iteration corresponds to the number of lines
# in ds0_Odometry.dat: 0 -> 95818.
# Set iter = None and it will run until the end
# Takes aprrox. 40s to run the entire data set on i7 32GB RAM
# iter = None
iter = 5000
run_filter(iter)
# If you do not want to wait for the full data set to run
# uncomment the line bellow to plot the results from the full filter
# that were saved to filter_output.txt
# filter_results()
# plot the states x,y,theta of the robot versus time
#show_states()
#### PART A ####
#
# # Want to add noise to motion and sensor model?
# noise = True
#
# # plot for question 2
# question_two(noise)
#
# # plot for question 3 may take ~5 seconds
# # arguements are the file paths defined in params.py
# question_three(odom_path, ground_truth, noise)
#
# # print results for quesion 6
# question_six(noise)
#
| {"/partB.py": ["/localization.py"], "/measure.py": ["/params.py"], "/run.py": ["/partA.py", "/partB.py"], "/motion.py": ["/params.py"], "/utils.py": ["/params.py"], "/partA.py": ["/motion.py", "/measure.py", "/params.py", "/utils.py"], "/localization.py": ["/filter.py", "/motion.py", "/utils.py"], "/filter.py": ["/motion.py", "/measure.py", "/params.py"]} |
51,984 | makarandmandolkar/Unscented-Kalman-Filter | refs/heads/master | /motion.py |
"""
This file contains the motion model for a simple
differential drive mobile robot. In the robots frame
positive x is forward and positive y is to the left.
The control commands are forward velocity and angular
velocity. Positive angular velocity if considered counterclockwise.
"""
import numpy as np
from params import*
def mobile_robot(u, pose, dt, noise=False):
""" updates pose of mobile robot with option to add noise
Args:
u (array): shape 2x1 velocity and angular velocity
pose (array): shape 3x1 previous pose containing x, y, and theta
Returns
pose (array): shape 3x1 new pose containing x, y, and theta
"""
v = u[0]
w = u[1]
x = pose[0]
y = pose[1]
theta = pose[2]
# determine the change in pose
dx = v*np.cos(theta)*dt
dy = v*np.sin(theta)*dt
dtheta = w*dt
# wrap theta from 0 to 2pi
theta = theta + dtheta
num_rev = theta/(2*np.pi)
rev_frac = num_rev - int(num_rev)
theta = rev_frac*2*np.pi
# wrap pi to -pi
if theta > np.pi:
theta -= 2*np.pi
elif theta < -np.pi:
theta += 2*np.pi
if noise:
n_dx = np.random.normal(0, std_dx**2)
n_dy = np.random.normal(0, std_dy**2)
n_dtheta = np.random.normal(0, std_dtheta**2)
return [dx + x + n_dx, dy + y + n_dy, theta + n_dtheta]
else:
return [dx + x, dy + y, theta]
#
| {"/partB.py": ["/localization.py"], "/measure.py": ["/params.py"], "/run.py": ["/partA.py", "/partB.py"], "/motion.py": ["/params.py"], "/utils.py": ["/params.py"], "/partA.py": ["/motion.py", "/measure.py", "/params.py", "/utils.py"], "/localization.py": ["/filter.py", "/motion.py", "/utils.py"], "/filter.py": ["/motion.py", "/measure.py", "/params.py"]} |
51,985 | makarandmandolkar/Unscented-Kalman-Filter | refs/heads/master | /utils.py | """
utility functions
"""
from params import*
def landmark_data(lm_dict):
""" creates a dictionary of landmark ground truth data """
file = open(landmark_truth, "r")
for line in file:
if not line.startswith("#"):
values = line.split()
# subject number, global x, globaly
lm_dict.update({float(values[0]) : [float(values[1]), float(values[2])]})
file.close()
def barcode_data(barcodes_dict):
""" creates a dictionary mapping barcodes to subject numbers """
file = open(barcodes_file, "r")
for line in file:
if not line.startswith("#"):
values = line.split()
key = int(values[1])
subject = int(values[0])
# landmarks have numbers 6 -> 20
if subject >= 6:
# key is the barcode number
# element if the subject number
barcodes_dict.update({key : subject})
file.close()
def measurement_data(measurement_mat):
""" creates a matrix for the measurements made by the robot """
file = open(measure_data, "r")
for line in file:
if not line.startswith("#"):
values = line.split()
meas = [float(values[0]), int(values[1]), float(values[2]), float(values[3])]
measurement_mat.append(meas)
file.close()
def odometry_data(odometry_mat):
""" creates a matrix for the odometry data """
file = open(odom_path, "r")
for line in file:
if not line.startswith("#"):
values = line.split()
odom = [float(values[0]), float(values[1]), float(values[2])]
odometry_mat.append(odom)
file.close()
def ground_truth_data():
""" creates a matrix of ground truth robot pose """
x_true = [] # global x position
y_true = [] # global y position
theta_true = [] # orientation
file_ground = open(ground_truth, "r")
for line in file_ground:
if not line.startswith("#"):
values = line.split()
x_true.append(float(values[1]))
y_true.append(float(values[2]))
theta_true.append(float(values[3]))
ground = [x_true, y_true, theta_true]
file_ground.close()
return ground
def dead_reck_data():
""" loads the Dead Reckoning data """
x = []
y = []
theta = []
file = open(motion_model_odom, "r")
for line in file:
values = line.split()
x.append(float(values[0]))
y.append(float(values[1]))
theta.append(float(values[2]))
dead_reck = [x, y, theta]
file.close()
return dead_reck
def filter_data():
""" loads the pose for UKF """
x = []
y = []
theta = []
file = open(filter_output, "r")
for line in file:
values = line.split()
x.append(float(values[0]))
y.append(float(values[1]))
theta.append(float(values[2]))
filter = [x, y, theta]
file.close()
return filter
#
| {"/partB.py": ["/localization.py"], "/measure.py": ["/params.py"], "/run.py": ["/partA.py", "/partB.py"], "/motion.py": ["/params.py"], "/utils.py": ["/params.py"], "/partA.py": ["/motion.py", "/measure.py", "/params.py", "/utils.py"], "/localization.py": ["/filter.py", "/motion.py", "/utils.py"], "/filter.py": ["/motion.py", "/measure.py", "/params.py"]} |
51,986 | makarandmandolkar/Unscented-Kalman-Filter | refs/heads/master | /params.py |
#### data file paths from UTIAS ####
odom_path = "ds0/ds0_Odometry.dat"
ground_truth = "ds0/ds0_Groundtruth.dat"
landmark_truth = "ds0/ds0_Landmark_Groundtruth.dat"
measure_data = "ds0/ds0_Measurement.dat"
barcodes_file = "ds0/ds0_Barcodes.dat"
#### data file paths I wrote to #####
motion_model_odom = "ds0/motion_model_odom.txt"
filter_output = "ds0/filter_output.txt"
#### UKF parameters ####
n = 3 # state dimension
pts = 2*n+1 # number of sigma points
beta = 2
alpha = 10**-5
lamda = (alpha**2)*n-n
#### motion model noise ####
# standard deviation in pose
std_dx = 0.004 # mm
std_dy = 0.004 # mm
std_dtheta = 0.0085 # 0.0085 rad ~ 0.5 deg
# std_dx = 4e-3 # mm
# std_dy = 4e-3 # mm
# std_dtheta = 8.5e-2 # rad
#### sensor model noise ####
# standard deviation in range and bearing
std_r = 0.002 # 4 mm
std_b = 0.0085 # 0.0085 rad ~ 0.5 deg
# std_r = 4e-2 # mm
# std_b = 8.5e-3 # rad
#
| {"/partB.py": ["/localization.py"], "/measure.py": ["/params.py"], "/run.py": ["/partA.py", "/partB.py"], "/motion.py": ["/params.py"], "/utils.py": ["/params.py"], "/partA.py": ["/motion.py", "/measure.py", "/params.py", "/utils.py"], "/localization.py": ["/filter.py", "/motion.py", "/utils.py"], "/filter.py": ["/motion.py", "/measure.py", "/params.py"]} |
51,987 | makarandmandolkar/Unscented-Kalman-Filter | refs/heads/master | /partA.py | """
All files written in python 3
This file runs the required components for
part A. It provides the figures for questions
2 and 3, then prints the results to question
6 to the terminal
"""
from motion import mobile_robot
from measure import*
from params import*
from utils import landmark_data
import matplotlib.pyplot as plt
import numpy as np
def question_two(noise):
# control inputs
u = np.array([[0.5, 0.0, 0.5, 0.0, 0.5],
[0.0, -1/(2*np.pi), 0.0, 1/(2*np.pi), 0.0]])
# each command applied for 1 second
dt = 1
# initialize pose
pose = np.array([0, 0, 0])
# store trajectory
# columns are x, y, theta
traj = np.array([pose])
for cmd in range(u.shape[1]):
# update pose
pose = mobile_robot(u[:,cmd], pose, dt, noise)
traj = np.append(traj, [pose], axis=0)
#print(traj[:,0])
#print(traj[:,1])
plt.figure(dpi=110, facecolor='w')
plt.plot(traj[:,0], traj[:,1], 'red', linewidth=2)
plt.xlabel("Global X Positon")
plt.ylabel("Global Y Position")
plt.title("Motion of Robot Given Control Sequence")
#plt.legend("Motion Model")
plt.show()
def question_three(odom_file, ground_file, noise):
# controls
v = [] # velocity
w = [] # angular velocity
t = [] # time stamps
# ground truth position of robot
x_true = [] # global x position
y_true = [] # global y position
theta_true = [] # orientation
# open odometry file and store controls
file_odom = open(odom_file, "r")
for line in file_odom:
if not line.startswith("#"):
values = line.split()
v.append(float(values[1]))
w.append(float(values[2]))
t.append(float(values[0]))
file_odom.close()
# open ground truth file
file_ground = open(ground_file, "r")
for line in file_ground:
if not line.startswith("#"):
values = line.split()
x_true.append(float(values[1]))
y_true.append(float(values[2]))
theta_true.append(float(values[3]))
file_odom.close()
# initialize pose with ground truth
pose = [x_true[0], y_true[0], theta_true[0]]
# init time
curr_time = 0
# store trajectory
# lists are x, y, theta
traj = [[pose[0]], [pose[1]], [pose[2]]]
# write odometry data to file
#file = open(motion_model_odom, "w")
#file.write(str(pose[0]) + " " + str(pose[1]) + " " + str(pose[2]) + "\n")
# apply odometry controls to motion model
for cmd in range(len(v)):
# delta t
dt = t[cmd] - curr_time
curr_time = t[cmd]
# update pose
pose = mobile_robot([v[cmd], w[cmd]], pose, dt, noise)
traj[0].append(pose[0])
traj[1].append(pose[1])
traj[2].append(pose[2])
# write to file
#file.write(str(pose[0]) + " " + str(pose[1]) + " " + str(pose[2]) + "\n")
#file.close()
plt.figure(dpi=110, facecolor='w')
#plt.plot(traj_ground[0][0:5000], traj_ground[1][0:5000], 'red', linewidth=2)
#plt.plot(traj[0][0:5000], traj[1][0:5000], 'blue', linewidth=2)
plt.plot(x_true, y_true, 'red', linewidth=2)
plt.plot(traj[0], traj[1], 'black', linewidth=2)
plt.xlabel("Global X Positon")
plt.ylabel("Global Y Position")
plt.title("Odometry and Ground Truth Motion Model")
plt.legend(("Ground Truth","Dead Reckoning"))
plt.show()
def question_six(noise):
lm_dict = {}
landmark_data(lm_dict)
#print(lm_dict)
# landmark subjects
lm = [6, 13, 17]
# robot position assume theta = 0 rad
pose_rob = [[2, 3, 0], [0, 3, 0], [1, -2, 0]]
#pose_rob = [[1, 1, -3.14], [0, 3, 0], [1, -2, 0]]
# global position of landmark
meas_lm = []
print("-----------------------------")
print("Landmark Position Estimation")
print("-----------------------------")
for i in range(len(lm)):
# range and bearing measurement
meas_rb = sensor(lm_dict[lm[i]], pose_rob[i], noise)
# global position of landmark
meas_xy = landmark_position(meas_rb, pose_rob[i])
meas_lm.append(meas_xy)
print("The range and bearing of landmark ", lm[i], ": ", "range: ", meas_rb[0], "(m)", " bearing: ", meas_rb[1], " (rad)")
print("The global position of landmark ", lm[i], ": ", "x: ", meas_xy[0], " (m)", " y: ", meas_xy[1], " (m)")
print("Error in x: ", lm_dict[lm[i]][0] - meas_xy[0], " (m)", " Error in y: ", lm_dict[lm[i]][1] - meas_xy[1], " (m)")
print("----------------------------------------------------------------------------------------------")
#print(lm_dict[lm[i]][0])
| {"/partB.py": ["/localization.py"], "/measure.py": ["/params.py"], "/run.py": ["/partA.py", "/partB.py"], "/motion.py": ["/params.py"], "/utils.py": ["/params.py"], "/partA.py": ["/motion.py", "/measure.py", "/params.py", "/utils.py"], "/localization.py": ["/filter.py", "/motion.py", "/utils.py"], "/filter.py": ["/motion.py", "/measure.py", "/params.py"]} |
51,988 | makarandmandolkar/Unscented-Kalman-Filter | refs/heads/master | /localization.py | """
This class runs the robot localization pipeline. Determines robot
pose using UKF
"""
import filter
from motion import mobile_robot
from utils import*
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
class Localization(object):
def __init__(self):
# seed mu with the initial odom ground truth position
self.mu = np.array([1.29812900, 1.88315210, 2.82870000])
# init covariance matrix with small numbers
self.cov_mat = np.array([[.10, 0, 0],
[0, .10, 0],
[0, 0, .20]])
# dictionary containing robot landmark know global positions
self.lm_dict = {}
# dictionary containing to mapping btw barcodes and subject number
self.lm_barcodes = {}
# matrix containing the odometry data
self.odometry = np.zeros((95818, 3), dtype=float)
# matrix containing the measurement data
self.measurement = np.zeros((7720, 4), dtype=float)
# store iteration when lm detected
self.lm_iter = []
self.lm_num = []
# ground truth pose
self.ground_truth = []
# dead reckoning data
self.dead_reck = []
# pre-ran UKF localization pose data
self.filter_data = []
# number of odometry commands
self.length = 95818
# the current measurement number
self.num_z = 0
# landmark detected
self.lm_detected = False
# use curren measurement
self.use_meas = False
# current barcode
self.barcode = None
# count which odometry and measurement reading we see
self.odom_ctr1 = 0
self.odom_ctr2 = 1
self.m_ctr = 0
# contains position of robot when a measurement is taken
self.robot_lm = [[], []]
# number of landmark scene
self.num_lm = 0
# parameter to skip n measurements
self.skip = 1
# number of landmarks used in load_measurements
self.lm_used = 0
# sets dt to zero for multiple measurements
self.dtzero = False
def load_landmarks(self):
""" loads landmark global positions """
landmark_data(self.lm_dict)
def load_barcodes(self):
""" loads mapping btw barcodes and subjects """
barcode_data(self.lm_barcodes)
def load_measurements(self):
""" contains all the measrements made by the robot """
meas = []
measurement_data(meas)
self.measurement = np.array(meas)
#print(self.measurement[7719,0])
def load_odometry(self):
""" contain all odometry commands send to robot """
odom = []
odometry_data(odom)
self.odometry = np.array(odom)
#print(self.odometry[0,:])
def load_ground_truth(self):
""" load ground truth data """
self.ground_truth = ground_truth_data()
#print(self.ground_truth)
def load_dead_data(self):
""" loads the Dead Reckoning data """
self.dead_reck = dead_reck_data()
#print(self.dead_reck)
def load_filter_data(self):
""" load the pose from the localization function bellow """
self.filter_data = filter_data()
def load_data(self):
""" loads all the data """
self.load_landmarks()
self.load_barcodes()
self.load_measurements()
self.load_odometry()
self.load_ground_truth()
self.load_dead_data()
self.load_filter_data()
def plot_states(self):
""" plot the state of the robot """
self.load_data()
# number of points
num_g = 87676
num_dr = len(self.dead_reck[0])
num_f = len(self.filter_data[0])
# delta t
#dt_g = 0.03
# end time
tf = 1387.28
# time arrays
tvec_gt = np.linspace(0, tf, num_g)
tvec_dr = np.linspace(0, tf, num_dr)
tvec_f = np.linspace(0, tf, num_f)
# plot x position
plt.figure(dpi=110, facecolor='w')
plt.plot(tvec_f, self.filter_data[0], 'blue', linewidth=2)
plt.plot(tvec_dr, self.dead_reck[0], 'black', linewidth=2)
plt.plot(tvec_gt, self.ground_truth[0], 'red', linewidth=2)
plt.xlabel("Time (s)")
plt.ylabel("Global X Position (m)")
plt.title("Comparing the Global X Position of Robot")
plt.legend(("Filter","Dead Reckoning", "Ground Truth"))
plt.xlim(0,1315)
plt.show()
# plot y position
plt.figure(dpi=110, facecolor='w')
plt.plot(tvec_f, self.filter_data[1], 'blue', linewidth=2)
plt.plot(tvec_dr, self.dead_reck[1], 'black', linewidth=2)
plt.plot(tvec_gt, self.ground_truth[1], 'red', linewidth=2)
plt.xlabel("Time (s)")
plt.ylabel("Global Y Position (m)")
plt.title("Comparing the Global Y Position of Robot")
plt.legend(("Filter","Dead Reckoning", "Ground Truth"))
plt.xlim(0,1315)
plt.show()
# plot theta
plt.figure(dpi=110, facecolor='w')
plt.plot(tvec_f, self.filter_data[2], 'blue', linewidth=2)
plt.plot(tvec_dr, self.dead_reck[2], 'black', linewidth=2)
plt.plot(tvec_gt, self.ground_truth[2], 'red', linewidth=2)
plt.xlabel("Time (s)")
plt.ylabel("Angular Position (rad)")
plt.title("Comparing the Global Angular Position of Robot")
plt.legend(("Filter","Dead Reckoning", "Ground Truth"))
plt.xlim(0,1315)
plt.show()
def plot_results(self):
""" plots dead reckoning, ground truth, and filtered data """
self.load_data()
plt.figure(dpi=110, facecolor='w')
plt.plot(self.filter_data[0], self.filter_data[1], 'blue', linewidth=2)
plt.plot(self.dead_reck[0], self.dead_reck[1], 'black', linewidth=2)
plt.plot(self.ground_truth[0], self.ground_truth[1], 'red', linewidth=2)
plt.xlabel("Global X Positon")
plt.ylabel("Global Y Position")
plt.title("Trajectory of Robot Given Control Sequence")
plt.legend(("Filter","Dead Reckoning", "Ground Truth"))
plt.show()
def control_sequence(self):
""" compare dead reckoning to UKF given a control sequence """
# filter
ukf = filter.UKF()
# init covariance
cov_mat = np.array([[0.004**2, 0, 0],
[0, 0.004*2, 0],
[0, 0, .0085**2]])
# control inputs
u = np.array([[0.5, 0.0, 0.5, 0.0, 0.5],
[0.0, -1/(2*np.pi), 0.0, 1/(2*np.pi), 0.0]])
# each command applied for 1 second
dt = 1
# initialize pose
pose_dr = np.array([0, 0, 0])
pose_ukf = pose_dr
# dead reckoning trajectory
traj_dr = np.array([pose_dr])
# ukf trajectory
traj_ukf = np.array([pose_ukf])
for cmd in range(u.shape[1]):
# update pose
pose_dr = mobile_robot(u[:,cmd], pose_dr, dt, True)
traj_dr = np.append(traj_dr, [pose_dr], axis=0)
pose_ukf, cov_mat = ukf.unscented_kalman_filter(pose_ukf, cov_mat, u[:,cmd], None, dt)
traj_ukf = np.append(traj_ukf, [pose_ukf], axis=0)
#print(cov_mat)
plt.figure(dpi=110, facecolor='w')
plt.plot(traj_dr[:,0], traj_dr[:,1], 'red', linewidth=2)
plt.plot(traj_ukf[:,0], traj_ukf[:,1], 'blue', linewidth=1)
plt.xlabel("Global X Positon")
plt.ylabel("Global Y Position")
plt.title("Trajectory of Robot")
plt.legend(("Dead Reckoning", "UKF"))
plt.show()
def animate(self, i, trajectory, line1):
#line1.set.xdata(trajectory[0][i])
#line1.set.ydata(trajectory[1][i])
line1.set_data(trajectory[0][i], trajectory[1][i])
return line1
def robot_localization(self, num_iter=None):
""" This is the main loop for running the UKF for localization with the
odometry and measurment data
"""
#### IMPORTANT #####
# LOAD DATA FIRST
self.load_data()
#### IMPORTANT #####
# filter
ukf = filter.UKF()
# init time
curr_time = 0
# init gaussian
mu = self.mu
cov_mat = self.cov_mat
# store trajectory: x, y, theta
trajectory = [[self.mu[0]], [self.mu[1]], [self.mu[2]]]
# write the trajectory to file
file = open(filter_output, "w")
file.write(str(self.mu[0]) + " " + str(self.mu[1]) + " " + str(self.mu[2]) + "\n")
# current measurement
z = None
# how long to run localization algorithm?
if num_iter == None:
# run until the end
iter = self.length-1
else:
iter = num_iter
# start reading in odometry commands
while(self.odom_ctr1 != iter):
#print("------------------------ ")
#print("Odom index: ", self.odom_ctr1)
#print("Measurement index: ", self.m_ctr)
#print("------------------------ ")
# assume no lonadmarks scene yet
self.lm_detected = False
self.use_meas = False
# get odometry time stamps
time_stamp_odom = self.odometry[self.odom_ctr1, 0] # index based on odometry
time_stamp_odom_next = self.odometry[self.odom_ctr2, 0] # index based on odometry
# check for last measurements
if self.m_ctr < 7720:
time_stamp_meas = self.measurement[self.m_ctr, 0] # index based on measurements
else:
# ensure no measurements considered after end of file
time_stamp_meas = 0
# controls from odometry
u = [self.odometry[self.odom_ctr1, 1], self.odometry[self.odom_ctr1, 2]]
#### Check for measurements ####
# the current measurement is btw the current and next odom commands
if time_stamp_odom <= time_stamp_meas <= time_stamp_odom_next:
code = self.measurement[self.m_ctr, 1]
# detected a landmark
if code in self.lm_barcodes.keys():
self.barcode = code
self.lm_detected = True
# detected another robot
else:
# increment measurement count
self.m_ctr += 1
self.lm_detected = False
else:
# no measurements yet
self.lm_detected = False
#self.use_meas = False
dt = time_stamp_odom - curr_time
curr_time = time_stamp_odom
mu, cov_mat = ukf.unscented_kalman_filter(mu, cov_mat, u, None, dt)
# increment odom index
self.odom_ctr1 += 1
self.odom_ctr2 += 1
# landmark detected
if self.lm_detected == True:
# first land mark detected then apply this measurement
if self.m_ctr == 0:
self.use_meas = True
# current landmark time step does NOT equal previous
# landmark time step then apply measurement
elif self.measurement[self.m_ctr, 0] != self.measurement[self.m_ctr-1, 0]:
self.use_meas = True
# current landmark time step equals previous landmark time step
# then dont apply measurement
# or apply it as set dt = 0
elif self.measurement[self.m_ctr, 0] == self.measurement[self.m_ctr-1, 0]:
self.use_meas = False
self.m_ctr += 1
#self.use_meas = True
#self.dtzero = True
# landmark detected and can apply this measurement
if self.use_meas == True:
# skip every measurement defined by skip
if self.num_lm % self.skip != 0:
self.m_ctr += 1
self.num_lm += 1
else:
# update number of landmarks scene
self.num_lm += 1
#print("Line in data file: ", self.odom_ctr1+5)
#print("landmark detected (barcode): ", self.barcode)
#print("Measurement number: ", self.m_ctr)
# store location of robot when measurment is taken
self.robot_lm[0].append(mu[0])
self.robot_lm[1].append(mu[1])
self.lm_iter.append(self.odom_ctr1)
# map the subject (stored as barcode number) -> subject number
subject = self.lm_barcodes[self.barcode]
#print("subject number: ", subject)
self.lm_num.append(subject)
# adjust dt according to when measurement was recorded
if self.dtzero == True:
dt = None
self.dtzero = False
else:
dt = time_stamp_meas - curr_time
curr_time = time_stamp_meas
# get global position of subjects landmark
lm_pos = self.lm_dict[subject]
# get range and bearing measurements at current index
r = self.measurement[self.m_ctr, 2]
b = self.measurement[self.m_ctr, 3]
# store results in measurement vector z
z = np.array([lm_pos[0], lm_pos[1], r, b])
# update measurement index
self.m_ctr += 1
self.lm_used += 1
mu, cov_mat = ukf.unscented_kalman_filter(mu, cov_mat, u, z, dt)
#print(cov_mat)
#print(self.m_ctr)
# update trajectory
trajectory[0].append(mu[0])
trajectory[1].append(mu[1])
trajectory[2].append(mu[2])
file.write(str(mu[0]) + " " + str(mu[1]) + " " + str(mu[2]) + "\n")
#print("Number of landmarks scene: ", self.num_lm)
#print("Number of landmarks used: ", self.lm_used)
print("Last time stamp", self.odometry[self.odom_ctr1, 0])
# decide how much data to plot
if num_iter == None:
# plot all data
plt.figure(dpi=110, facecolor='w')
plt.plot(trajectory[0], trajectory[1], 'blue', linewidth=2)
plt.plot(self.dead_reck[0], self.dead_reck[1], 'black', linewidth=2)
plt.plot(self.ground_truth[0], self.ground_truth[1], 'red', linewidth=2)
plt.xlabel("Global X Positon")
plt.ylabel("Global Y Position")
plt.title("Trajectory of Robot")
plt.legend(("UKF","Dead Reckoning", "Ground Truth"))
plt.show()
else:
plt.figure(1, dpi=110, facecolor='w')
plt.plot(trajectory[0], trajectory[1], 'blue', linewidth=2)
plt.plot(self.dead_reck[0][0:iter], self.dead_reck[1][0:iter], 'black', linewidth=2)
plt.plot(self.ground_truth[0][0:iter], self.ground_truth[1][0:iter], 'red', linewidth=2)
#plt.scatter(self.robot_lm[0], self.robot_lm[1], c='green', alpha=1, s=15)
plt.xlabel("Global X Positon")
plt.ylabel("Global Y Position")
plt.title("Trajectory of Robot")
plt.legend(("UKF","Dead Reckoning", "Ground Truth", "Measurement"))
plt.show()
# fig, ax = plt.subplots(figsize=(5, 5))
# ax.set(xlim=(0.5, 2.5), ylim=(-0.5, 3.0))
#
# #line1 = ax.plot(trajectory[0][0:iter], trajectory[1][0:iter], 'blue', linewidth=2)
# #line1, = ax.plot([], [], 'blue', linewidth=2)
# line1, = ax.plot([], [], lw=2)
#
# anim = FuncAnimation(fig, self.animate, frames=iter, fargs=(trajectory, line1),
# interval=50, blit=True)
#
# plt.show()
# plt.xlim((0.5, 2.5))
# plt.ylim((-0.5, 3.0))
# plt.pause(0.5)
# print("Start recording")
# plt.pause(0.5)
#
#
# lm_indx = 0
# for i in range(iter):
#
#
# plt.scatter(trajectory[0][i], trajectory[1][i], c='blue', alpha=1, s=15)
# #plt.plot(trajectory[0][0:i], trajectory[1][0:i], 'blue', linewidth=2)
# plt.scatter(self.dead_reck[0][i], self.dead_reck[1][i], c='black', alpha=1, s=15)
# #plt.plot(self.dead_reck[0][0:i], self.dead_reck[1][0:i], 'black', linewidth=2)
# plt.scatter(self.ground_truth[0][i], self.ground_truth[1][i], c='red', alpha=1, s=15)
# #plt.plot(self.ground_truth[0][0:i], self.ground_truth[1][0:i], 'red', linewidth=2)
#
# if i % 100 == 0:
# print("Iteration", i)
#
# # print(i)
# # #plt.scatter(1, 1, c='green', alpha=1, s=30)
# #
# # # hack to get lm to plot
# # plt.scatter(trajectory[0][0], trajectory[1][0], c='green', alpha=1, s=15)
# #
# # if i == self.lm_iter[lm_indx]:
# # plt.scatter(self.robot_lm[0][lm_indx], self.robot_lm[1][lm_indx], c='green', alpha=1, s=15)
# # plt.text(self.robot_lm[0][lm_indx]+0.01, self.robot_lm[1][lm_indx]+0.02, self.lm_num[lm_indx], fontsize=9)
# #
# # print("lm found", self.lm_iter[lm_indx]," subject# ", self.lm_num[lm_indx])
# # lm_indx += 1
#
#
# plt.xlabel("Global X Positon")
# plt.ylabel("Global Y Position")
# plt.title("Trajectory of Robot")
# plt.legend(("UKF","Dead Reckoning", "Ground Truth"))
# #plt.legend(("UKF","Dead Reckoning", "Ground Truth", "Landmark"))
#
# plt.pause(0.00001)
# print("Stop recording")
#
# plt.show()
#
| {"/partB.py": ["/localization.py"], "/measure.py": ["/params.py"], "/run.py": ["/partA.py", "/partB.py"], "/motion.py": ["/params.py"], "/utils.py": ["/params.py"], "/partA.py": ["/motion.py", "/measure.py", "/params.py", "/utils.py"], "/localization.py": ["/filter.py", "/motion.py", "/utils.py"], "/filter.py": ["/motion.py", "/measure.py", "/params.py"]} |
51,989 | makarandmandolkar/Unscented-Kalman-Filter | refs/heads/master | /filter.py |
"""
UKF implementation for differential drive robot with landmark sensor
"""
import numpy as np
from scipy import linalg
from motion import mobile_robot
from measure import sensor
from params import*
class UKF(object):
def __init__(self):
# sigma weights
# first weight for mean
self.wm = lamda/(n+lamda)
#print("Weight 0 mean, ", self.wm)
# first weight for covarince matrix
self.wc = lamda/(n+lamda) + (1 - (alpha)**2 + beta)
#print("Weight 0 cov, ", self.wc)
# remaining weights are equal
self.w = 1/(2*(n+lamda))
#print("Weight, ", 12*self.w + self.wm + self.wc)
# model noise
# motion model noise
self.R = np.array([[std_dx**2, 0, 0],
[0, std_dy**2, 0],
[0, 0, std_dtheta**2]])
# self.R = np.array([[std_dx**2, std_dx*std_dy, std_dx*std_dtheta],
# [std_dx*std_dy, std_dy**2, std_dy*std_dtheta],
# [std_dx*std_dtheta, std_dy*std_dtheta, std_dtheta**2]])
# sensor model noise
self.Q = np.array([[std_r**2, 0],
[0, std_b**2]])
# self.Q = np.array([[std_r**2, std_r*std_b],
# [std_r*std_b, std_b**2]])
def wrap_two_pi(self, angle):
""" wraps and angle between 0 and 2pi """
num_rev = angle/(2*np.pi)
rev_frac = num_rev - int(num_rev)
angle = rev_frac*2*np.pi
return angle
def wrap_pi(self, angle):
""" wraps angle pi to -pi """
angle = self.wrap_two_pi(angle)
if angle > np.pi:
angle -= 2*np.pi
elif angle < -np.pi:
angle += 2*np.pi
return angle
def compute_sigma_points(self, mu, cov_mat):
""" computes sigma points
Arg:
mu (np.array): shape 3x1 contains averages x, y, and theta
cov_mat (np.array): shape 3x3 covariance matrix for mu
Returns:
sigma_mat (np.array): shape 7x3 contains 7 sigma points, each row
coressponds to the robots pose
"""
# take square root of cov_mat
sqrt_cov_mat = linalg.sqrtm(cov_mat)
#sqrt_cov_mat = np.real(complex(sqrt_cov_mat))
sqrt_cov_mat = sqrt_cov_mat.real
if np.any(np.iscomplex(sqrt_cov_mat)):
print("ERROR")
print("The square root of covariance matrix has complex numbers")
print("ERROR")
print(sqrt_cov_mat)
# first row in sigma_mat is mu
sigma_mat = np.array([mu])
#print(np.sqrt(n+lamda))
# compute next three sigma points
# where i is a row of the covariance matrix square root
for i in range(0, n):
sigma_pt = mu + np.sqrt(n+lamda)*sqrt_cov_mat[i,:]
sigma_mat = np.append(sigma_mat, [sigma_pt], axis=0)
# compute next three sigma points
# the difference here is the subtraction of the covariance matrix square root
for i in range(0, n):
sigma_pt = mu - np.sqrt(n+lamda)*sqrt_cov_mat[i,:] # subtract
sigma_mat = np.append(sigma_mat, [sigma_pt], axis=0)
return sigma_mat
def propagate_sigma_points(self, sigma_mat, u, dt):
""" passes sigma_points through the motion model
Args:
sigma_mat (np.array): shape 7x3 of sigma points based on robots pose
u (np.array): shape 2x1 velocity and angular velocity
dt (float): change in time between measurements
Returns:
sigma_mat_star (np.array): shape 7x3 of new points based on motion model
"""
# array of propagated points
sigma_prime = []
# pass each row through motion model at a time
for i in range(0, pts):
# turn noise off based on PR Table 3.4
sigma_new = mobile_robot(u, sigma_mat[0,:], dt)
sigma_prime.append(sigma_new)
sigma_mat_star = np.array(sigma_prime)
return sigma_mat_star
def predict_mean(self, sigma_mat_star):
""" computes the predicted mean vector
Args:
sigma_mat_star (np.array): shape 7x3 containt the sigma points
that were propagated through the motion model
Returns:
mu_bar (np.array): shape 3x1 array of means for pose
"""
# init empty array
mu_bar = np.array([0, 0, 0])
for i in range(0, pts):
# apply first weight for the mean
if i == 0:
w_m = self.wm
else:
w_m = self.w
# update the predicted mean
mu_bar = mu_bar + w_m * sigma_mat_star[i,:]
return mu_bar
def predict_covariance(self, mu_bar, sigma_mat_star):
""" computes the predicted covariance matrix for mean vector
Args:
mu_bar (np.array): shape 3x1 array of means for pose
sigma_mat_star (np.array): shape 7x3 of new points based on motion model
Returns:
cov_mat_bar (np.array): shape 3x3 covarince matrix for vector of means mu_bar
"""
# note: to multiply two 1D arrays in numpy the outer must be
# np.array([[e1,e2,e3]]), need extra pair of brackets
# init empty covariance bar matrix
cov_mat_bar = np.zeros((n,n))
# update based on contribution from each propagated sigma point
for i in range(0, pts):
# apply first weight for covariance
if i == 0:
w_c = self.wc
else:
w_c = self.w
# difference between propagated sigma and mu bar
delta1 = sigma_mat_star[i,:] - mu_bar
delta1 = delta1[np.newaxis] # 1x3
delta2 = delta1.T # 3x1
#print("shape1: ", delta1.shape)
#print("shape2: ", delta2.shape)
#print("weight ", w_c)
# add motion model noise here ---> PR stable 3.4
cov_mat_bar = np.add(cov_mat_bar, w_c * np.dot(delta2, delta1))
cov_mat_bar = np.add(cov_mat_bar, self.R)
return cov_mat_bar
def observation_sigma(self, sigma_mat_new, landmark):
""" passes each sigma point through sensor model
Args:
sigma_mat_new (np.array): shape 7x3 contains the sigma points based
on mu_bar
landmark (np.array): shape 2x1 contains the global x and y position
of a landmark
Returns:
obs_mat (np.array): 7x2 observation matrix each row contains a
observed range and bearing corresponding to each sigma point
"""
# init empty array for observations
obs = []
# pass each sigma point through sensor model
for i in range (0, pts):
meas = sensor(landmark, sigma_mat_new[i,:])
obs.append(meas)
obs_mat = np.array(obs)
return obs_mat
def predicted_observation(self, obs_mat):
""" the resulting observation sigma points are used to compute the
predicted observation
Args:
obs_mat (np.array): 7x2 observation matrix each row contains a
observed range and bearing corresponding to each sigma point
Returns:
z_hat (np.array): 2x1 this is the predicted observation containing
predicted rand and bearing
"""
# init empty z_hat
z_hat = np.array([0, 0])
# update based on contribution from each row of the observation matrix
for i in range(0, pts):
# apply first weight for mean
if i == 0:
w_m = self.wm
else:
w_m = self.w
z_hat = z_hat + w_m * obs_mat[i,:]
# bound bearing between 0 -> pi and 0 -> -pi
z_hat[1] = self.wrap_pi(z_hat[1])
return z_hat
def uncertainty(self, obs_mat, z_hat):
""" computes the uncertainty in the measurement
Args:
obs_mat (np.array): 7x2 observation matrix each row contains a
z_hat (np.array): 2x1 containing predicted rand and bearing
Returns:
uncert_mat (np.array): shape 2x2 uncertainty in measurement
"""
# init empy uncertainty array ---> PR St
uncert_mat = np.zeros((2,2))
for i in range(0, pts):
# apply first weight
if i == 0:
w_c = self.wc
else:
w_c = self.w
# difference in observation and predicted observation
delta1 = obs_mat[i,:] - z_hat
delta1 = delta1[np.newaxis] # 1x2
delta2 = delta1.T #2x1
# add measurement noise here ---> PR table 3.4
uncert_mat = np.add(uncert_mat, w_c * np.dot(delta2, delta1))
uncert_mat = uncert_mat + w_c*np.dot(delta2, delta1) + self.Q
#uncert_mat = np.add(uncert_mat, self.Q)
return uncert_mat
def cross_covariance(self, sigma_mat_new, mu_bar, obs_mat, z_hat):
"""
Args:
sigma_mat_new (np.array): shape 7x3 contains the sigma points based
on mu_bar
mu_bar (np.array): shape 3x1 array of means for pose
obs_mat (np.array): 7x2 observation matrix each row contains a
observed range and bearing corresponding to each sigma point
z_hat (np.array): 2x1 containing predicted rand and bearing
Returns:
cross_cov_mat (np.array): shape 3x2
"""
# init empty cross covariance matrix
cross_cov_mat = np.zeros((3,2))
for i in range(0, pts):
# apply first weight
if i == 0:
w_c = self.wc
else:
w_c = self.w
# difference btw new sigma points and the predicted mean vector
delta_states = sigma_mat_new[i,:] - mu_bar
delta_states = delta_states[np.newaxis] # 1x3
delta_states = delta_states.T # 3x1
# difference btw observation and predicted observation
delta_obs = obs_mat[i,:] - z_hat
delta_obs = delta_obs[np.newaxis] # 1x2
cross_cov_mat = cross_cov_mat + w_c * np.dot(delta_states, delta_obs)
return cross_cov_mat
def kalman_gain(self, cross_cov_mat, uncert_mat):
""" computes the kalman gain
Args:
cross_cov_mat (np.array): shape 3x2
uncert_mat (np.array): shape 2x2 uncertainty in measurement
Returns:
kal_gain (np.array): shape 3x2 kalman gain
"""
# check if uncertainty matrix is invertible by checking the determinant
if np.linalg.det(uncert_mat) == 0:
print("ERROR")
print("The uncertainty matrix in not invertible ")
print("ERROR")
# invert the uncertainty matrix
uncert_inv = np.linalg.inv(uncert_mat)
# kalman gain
kal_gain = np.dot(cross_cov_mat, uncert_inv)
return kal_gain
def update_mean(self, mu_bar, kal_gain, z, z_hat):
""" estimates the posterior by update the mean based on the kalman gain
and difference in measurements and predicted observation
Args:
mu_bar (np.array): shape 3x1 array of means for pose
kal_gain (np.array): shape 3x2 kalman gain
z (np.array): shape 2x1 contains range and bearing of landmark
z_hat (np.array): 2x1 containing predicted rand and bearing
Returns:
new_mean (np.array): shape 3x1 update mean of the robots pose
"""
# difference btw measurements and predicted observation
delta_meas = z - z_hat
delta_meas = delta_meas[np.newaxis] # 1x2
delta_meas = delta_meas.T # 2x1
#print("mu shape", mu_bar.shape)
#print("delta shape", delta_meas.shape)
#influ = np.dot(kal_gain, delta_meas)
#print("influ", influ.shape)
new_mean = mu_bar + np.dot(kal_gain, delta_meas).T
# remove extra brackets because it causes indexing error
# in motion model
new_mean = new_mean.ravel()
return new_mean
def update_covariance(self, cov_mat_bar, kal_gain, uncert_mat):
""" estimates the posterior by updating the covariance matrix
Args:
cov_mat_bar (np.array): shape 3x3 covarince matrix for vector of means mu_bar
kal_gain (np.array): shape 3x2 kalman gain
uncert_mat (np.array): shape 2x2 uncertainty in measurement
Returns:
new_cov_mat (np.array): shape 3x3 update covariance matrix for
mean of robots pose
"""
# kalman gain times the uncertainty matrix
#mat = np.dot(kal_gain, uncert_mat)
# (3x2) * (2x3)
new_cov_mat = cov_mat_bar - np.dot(np.dot(kal_gain, uncert_mat), kal_gain.T)
#new_cov_mat = new_cov_mat.ravel()
return new_cov_mat
def unscented_kalman_filter(self, mu, cov_mat, u, meas, dt):
""" updates the guassian of the states
Args:
mu (np.array): shape 3x1 contains averages x, y, and theta
cov_mat (np.array): shape 3x3 covariance matrix for mu
u (np.array): shape 2x1 control input linear and angular velocity
meas (np.array): shape 4x1 contains:
landmark global position x,
landmark global position y,
range (m),
bearing (rad)
dt (float): change in time (s)
Returns:
mu (np.array): shape 3x1 contains averages x, y, and theta
cov_mat (np.array): shape 3x3 covariance matrix for mu
"""
# This is when we want to consider the controls update
# The alternative if when there are multiple measurements at the
# same time step the controls are ignored. dt must be a real number
# to propagate the controls and ge the next state.
if dt != None:
# sample sigma points ---> PR step 2
sigma_mat = self.compute_sigma_points(mu, cov_mat)
# pass sigma points through motion model ---> PR ste 3
sigma_mat_star = self.propagate_sigma_points(sigma_mat, u, dt)
### compute predicted belief ###
# determine mu bar ---> PR step 4
mu_bar = self.predict_mean(sigma_mat_star)
# determine covariance matrix bar ---> PR step 5
cov_mat_bar = self.predict_covariance(mu_bar, sigma_mat_star)
# if no new measurements then controls have been applied
# and the algorithm terminates here
if np.all(meas) == None:
#print("No measurement included")
return mu_bar, cov_mat_bar
else:
mu_bar = mu
cov_mat_bar = cov_mat
#print("Measurement included")
# from the measurement array
# global x and y position of landmark
landmark = [meas[0], meas[1]]
# range and bearing of landmark
z = [meas[2], meas[3]]
# sample new sigma points ---> PR step 6
sigma_mat_new = self.compute_sigma_points(mu_bar, cov_mat_bar)
# from +x-axis NOT counterclockwise like sensor
# pass new sigma points through measurement model ---> PR step 7
obs_mat = self.observation_sigma(sigma_mat_new, landmark)
#print("bugger: ", obs_mat)
# predicted observation --> PR step 8
z_hat = self.predicted_observation(obs_mat)
# predict uncertainty ---> PR step 9
uncert_mat = self.uncertainty(obs_mat, z_hat)
# compute cross-covariance between state and observation ---> PR step 10
cross_cov_mat = self.cross_covariance(sigma_mat_new, mu_bar, obs_mat, z_hat)
# kalman gain ---> PR step 11
kal_gain = self.kalman_gain(cross_cov_mat, uncert_mat)
### compute desired belief ###
# determine new mean ---> PR step 12
new_mean = self.update_mean(mu_bar, kal_gain, z, z_hat)
# detemine the new covariance matrix ---> PR step 13
new_cov_mat = self.update_covariance(cov_mat_bar, kal_gain, uncert_mat)
return new_mean, new_cov_mat
#
| {"/partB.py": ["/localization.py"], "/measure.py": ["/params.py"], "/run.py": ["/partA.py", "/partB.py"], "/motion.py": ["/params.py"], "/utils.py": ["/params.py"], "/partA.py": ["/motion.py", "/measure.py", "/params.py", "/utils.py"], "/localization.py": ["/filter.py", "/motion.py", "/utils.py"], "/filter.py": ["/motion.py", "/measure.py", "/params.py"]} |
51,999 | CodeRecipeJYP/flask-restapi-teamtreehouse | refs/heads/master | /resources/reviews.py | from flask import jsonify, Blueprint
from flask_restful import Resource, Api, reqparse, inputs
import models
class ReviewList(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument(
'course',
required=True,
type=inputs.positive,
help='No course provided',
location=['form', 'json']
)
self.reqparse.add_argument(
'rating',
required=True,
type=inputs.int_range(1, 5),
help='No rating provided',
location=['form', 'json'],
)
self.reqparse.add_argument(
'comment',
required=False,
nullable=True,
default='',
location=['form', 'json'],
)
# Standard setup
super().__init__()
def get(self):
return jsonify({'reviews': [{'course': 1, 'rating': 5}]})
def post(self):
args = self.reqparse.parse_args()
models.Review.create(**args)
return jsonify({'reviews': [{'course': 1, 'rating': 5}]})
class Review(Resource):
def get(self, id):
return jsonify({'course': 1, 'rating': 5})
def put(self, id):
return jsonify({'course': 1, 'rating': 5})
def delete(self, id):
return jsonify({'course': 1, 'rating': 5})
reviews_api = Blueprint('resources.reviews', __name__)
api = Api(reviews_api)
api.add_resource(
ReviewList,
'/reviews',
endpoint='reviews'
)
api.add_resource(
Review,
'/reviews/<int:id>',
endpoint='review'
)
| {"/app.py": ["/resources/reviews.py", "/resources/forms.py"]} |
52,000 | CodeRecipeJYP/flask-restapi-teamtreehouse | refs/heads/master | /app.py | from flask import Flask
import models
from resources.courses import courses_api
from resources.reviews import reviews_api
from resources.forms import forms_api
from flask_cors import CORS
HOST = '0.0.0.0'
PORT = 5000
app = Flask(__name__)
CORS(app)
app.register_blueprint(courses_api)
app.register_blueprint(reviews_api, url_prefix='/api/v1')
app.register_blueprint(forms_api, url_prefix='/api/v1')
@app.route('/')
def hello_world():
return 'Hello World!'
if __name__ == '__main__':
models.initialize()
app.run(host=HOST, port=PORT)
| {"/app.py": ["/resources/reviews.py", "/resources/forms.py"]} |
52,001 | CodeRecipeJYP/flask-restapi-teamtreehouse | refs/heads/master | /resources/forms.py | from flask import jsonify, Blueprint
from flask_restful import (Resource, Api, reqparse,
inputs, fields, marshal,
marshal_with, url_for, abort)
import models
form_fields = {
'id': fields.Integer,
'libraryName': fields.String,
'libraryLocation': fields.String,
'managerName': fields.String,
'managerEmail': fields.String,
'managerPhonenumber': fields.String,
'capacityOfAudiences': fields.Integer,
'facilities': fields.String,
'requirementsForSpeaker': fields.String,
'personalInfoAgreement': fields.Boolean,
'noVolunteerAgreement': fields.Boolean,
'otherFacilities': fields.String,
}
form_requireds = {
'libraryName': True,
'libraryLocation': True,
'managerName': True,
'managerEmail': True,
'managerPhonenumber': True,
'capacityOfAudiences': True,
'facilities': True,
'requirementsForSpeaker': False,
'personalInfoAgreement': True,
'noVolunteerAgreement': True,
'otherFacilities': False,
}
def form_or_404(form_id):
try:
form = models.Form.get(models.Form.id == form_id)
except models.Form.DoesNotExist:
abort(404)
else:
return form
class FormList(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
for propertyName in form_requireds:
if form_requireds[propertyName]:
self.reqparse.add_argument(
propertyName,
required=True,
nullable=False,
help='No form {} provided'.format(propertyName),
location=['form', 'json']
)
super().__init__()
def get(self):
courses = [marshal(form, form_fields)
for form in models.Form.select()]
return jsonify({'courses': courses})
@marshal_with(form_fields)
def post(self):
args = self.reqparse.parse_args()
print("args={}".format(args))
form = models.Form.create(**args)
print("form={}".format(form))
return form
class Form(Resource):
# @marshal_with(course_fields) == marshal(add_reviews(course_or_404(id)), course_fields)
@marshal_with(form_fields)
def get(self, id):
return form_or_404(id)
def put(self, id):
return jsonify({'title': 'Python Basics'})
def delete(self, id):
return jsonify({'title': 'Python Basics'})
forms_api = Blueprint('resources.forms', __name__)
api = Api(forms_api)
api.add_resource(
FormList,
'/forms',
endpoint='forms'
)
api.add_resource(
Form,
'/forms/<int:id>',
endpoint='form'
)
| {"/app.py": ["/resources/reviews.py", "/resources/forms.py"]} |
52,002 | opsiff/djangowebsite | refs/heads/master | /XianYuBack/authorize/utils/jwcAuth.py | import requests
import re
import re
headers={
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Origin': 'http//jwch.fzu.edu.cn',
'Proxy-Connection': 'keep-alive',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36',
'Referer': 'http://jwch.fzu.edu.cn/',
}
formData = {
"muser": '041702324',
"passwd": '10086KKDDaaggccd'
}
def parse(text):
# print(text)
name=re.findall(r'<td align="center"><span id="ContentPlaceHolder1_LB_xm">(.+)</span></td>',text)[0]
number=re.findall(r'<td align="center"><span id="ContentPlaceHolder1_LB_xh">(.+)</span></td>',text)[0]
sex=re.findall(r'<td align="center"><span id="ContentPlaceHolder1_LB_xb">(.+)</span></td>',text)[0]
birth=re.findall(r'<td align="center"><span id="ContentPlaceHolder1_LB_csrq">(.+)</span></td>',text)[0]
college=re.findall(r'<td colspan="3"><span id="ContentPlaceHolder1_LB_xymc">(.+)</span></td>',text)[0]
major=re.findall(r'<td colspan="2"><span id="ContentPlaceHolder1_LB_zymc">(.+)</span></td>',text)[0]
grade=re.findall(r'<td><span id="ContentPlaceHolder1_LB_nj">(.+)</span></td>',text)[0]
dic={
"name":name,
"number":int(number),
"sex":sex,
"birth":birth,
"college":college,
"major":major,
"grade":int(grade)
}
# print(dic)
return dic
def request(formData):
try:
url = 'http://59.77.226.32/logincheck.asp'
session=requests.session() #创建一个会话
response=session.post(url,headers=headers,data=formData) #post请求提交表单
html=response.text
#正则提取时间
top=re.search(r'top\.aspx\?id=\d+',html).group()
num = re.search(r'=\d+',top).group()[1:]
headers_clone = headers #重新搞一个请求头
# headers_clone['Referer']=left
#发送get请求
informatin="http://59.77.226.35/jcxx/xsxx/StudentInformation.aspx?id="+num
res = session.get(informatin, headers=headers_clone)
return parse(res.text)
except:
return -1
if __name__=="__main__":
request(formData) | {"/XianYuBack/authorize/views.py": ["/XianYuBack/authorize/models.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.