index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
21,290
|
elidiocampeiz/ArrowFieldTraversal
|
refs/heads/master
|
/graph_utils.py
|
def get_graph(filename):
with open(filename, 'r') as fp:
text_data = fp.readlines()
data = [line_string.rstrip().split(' ') for line_string in text_data]
# print(data)
# print(data[0])# size
graph_matrix = [] #[[None]*int(data[0][1])]*int(data[0][0])
# print(graph_matrix)
for c_idx, line in enumerate(data[1:]):
row = []
for r_idx, item in enumerate(line):
item_data = item.split('-')
if item_data[0]=='O':
item_data.append("*")
arrow = {'type': item_data[0], 'direction': item_data[1]}
# print(c_idx, r_idx, arrow)
row.append( arrow)
graph_matrix.append(row)
# print(graph_matrix)
for r in range(0, len(graph_matrix)):
for c in range(0, len(graph_matrix[0])):
node = (r, c)
graph_matrix[r][c]['edges'] = connect_edges(graph_matrix, node)
# graph = {}
# {(i, j) : dict({'type': 'type', 'direction': 'dir', 'edges': set( (x,y),... ) })
return graph_matrix
def connect_edges(graph, node):
edges = set()
i, j = node
direction = graph[i][j]['direction']
curr_type = graph[i][j]['type']
r, c = node
new_node = (r, c)
while True:
r, c = get_next(graph, new_node, direction)
new_node = (r, c)
# print(r, c)
if r == None or c == None or not (0 <= r < len(graph) and 0 <= c < len(graph[0])):
break
if curr_type != graph[r][c]['type']:
edges.add(new_node)
# print(node, edges)
return edges
def get_next(graph, node, direction):
i, j = node
dir_map = {
'N': (i-1, j),
'NE': (i-1, j+1),
'NW': (i-1, j-1),
'S': (i+1, j),
'SE': (i+1, j+1),
'SW':(i+1, j-1),
'E' : (i, j+1),
'W': (i, j-1),
'*': (None, None),
}
return dir_map[direction]
def get_edges(graph, node):
i, j = node
return graph[i][j]['edges']
# funtions that reconstructs the minimum path from all explored paths
def trace_path(grid, start, end, paths):
# if end is not in paths it means it can not be reached from start
if not end in paths:
return []
temp = end
path = []
# Retrace path
while temp != None:
i, j = temp
direction = grid[i][j]['direction']
path.append( (temp, direction) )
temp = paths[temp]
# Reverse path and return it
path.reverse()
return path
def format_path(path):
node, direction = path[0]
formated_path = []
for next_node, next_dir in path[1:]:
r, c = node
n_r, n_c = next_node
num = max(abs(n_r - r), abs(n_c - c))
item = ''+str(num)+direction
formated_path.append(item)
node, direction = next_node, next_dir
return ' '.join(formated_path)
def write_file(filename, path_str):
with open(filename, 'w+') as fp:
fp.write(path_str)
# def test_paths(path_str, filename):
# solution = {
# 'small.txt':'2S 5SE 7N 2W 1W 1SE 1NE 2E 7S 1NE 5NW 2W 3SE 2NE 2SE 3S',
# 'rect.txt': '8S 9E 1SW 2W 7N 3SE 1SE 2N 2NW 2NW 7E 1E 1SW 5S 2E 3SW 3W 4NW 2NW 14E 1NW 2S 2NE 1N 1SW 7SW 2N 1NW 2W 7W 1SE 7N 1E 2SE 5SE 3N 4E 5E 5S 1E',
# 'small':'2S 5SE 7N 2W 1W 1SE 1NE 2E 7S 1NE 5NW 2W 3SE 2NE 2SE 3S',
# 'rect': '8S 9E 1SW 2W 7N 3SE 1SE 2N 2NW 2NW 7E 1E 1SW 5S 2E 3SW 3W 4NW 2NW 14E 1NW 2S 2NE 1N 1SW 7SW 2N 1NW 2W 7W 1SE 7N 1E 2SE 5SE 3N 4E 5E 5S 1E',
# }
# res = filename in solution and path_str == solution[filename]
# if not res:
# print('exp',solution[filename])
# print('act', path_str)
# return res
|
{"/GraphTraversal.py": ["/graph_utils.py"]}
|
21,293
|
akhidwivedi/Employee-management
|
refs/heads/master
|
/employee/forms.py
|
from django import forms
from .models import employee
from django.forms import ModelForm
from django.contrib.auth import authenticate
class UserLoginForm(forms.Form):
username = forms.CharField()
password = forms.CharField(widget=forms.PasswordInput)
def clean(self,*args,**kwrgs):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username and password:
user = authenticate(username=username,password=password)
if not user:
raise forms.ValidationError('this user does not exist')
if not user.check_password(password):
raise forms.ValidationError('the entered pass word is incorrect')
if not user.is_active:
raise forms.ValidationError('this user is not active')
return super(UserLoginForm,self).clean(*args,**kwrgs)
class EmployeeForm(forms.ModelForm):
class Meta:
model= employee
fields= '__all__'
def __init__(self,*args,**kwargs):
super(EmployeeForm,self).__init__(*args,**kwargs)
self.fields['position'].empty_label ="select"
|
{"/employee/forms.py": ["/employee/models.py"], "/employee/views.py": ["/employee/forms.py", "/employee/models.py"]}
|
21,294
|
akhidwivedi/Employee-management
|
refs/heads/master
|
/employee/views.py
|
from django.shortcuts import render,redirect
from .forms import EmployeeForm,UserLoginForm
from .models import employee
def employee_list(request):
context ={'employee_list': employee.objects.all()}
return render(request,"employee/employee_list.html",context)
def employee_forms(request,id=0):
if request.method== "GET":
if id==0:
form =EmployeeForm()
else:
employees= employee.objects.get(pk=id)
form = EmployeeForm(instance=employees)
return render(request,"employee/employee_forms.html",{'form':form})
# if request.method == "POST":
else:
if id==0:
form=EmployeeForm(request.POST)
else:
employees= employee.objects.get(pk=id)
form= EmployeeForm(request.POST,instance= employees)
if form.is_valid():
form.save()
return redirect('/employee/list')
def employee_delete(request,id):
employees= employee.objects.get(pk=id)
employees.delete()
return redirect('/employee/list')
|
{"/employee/forms.py": ["/employee/models.py"], "/employee/views.py": ["/employee/forms.py", "/employee/models.py"]}
|
21,295
|
akhidwivedi/Employee-management
|
refs/heads/master
|
/employee/models.py
|
from django.db import models
from django.forms import ModelForm
# Create your models her
class position(models.Model):
title=models.CharField(max_length=40)
def __str__(self):
return self.title
class employee(models.Model):
fullname=models.CharField(max_length=50)
emp_code=models.CharField(max_length=30)
mobile=models.CharField(max_length=30)
position=models.ForeignKey(position,on_delete=models.CASCADE)
def __str__(self):
return self.fullname
|
{"/employee/forms.py": ["/employee/models.py"], "/employee/views.py": ["/employee/forms.py", "/employee/models.py"]}
|
21,296
|
akhidwivedi/Employee-management
|
refs/heads/master
|
/employee/migrations/0001_initial.py
|
# Generated by Django 2.2 on 2020-04-07 10:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='position',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=40)),
],
),
migrations.CreateModel(
name='employee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fullname', models.CharField(max_length=50)),
('emp_code', models.CharField(max_length=30)),
('mobile', models.CharField(max_length=30)),
('position', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='employee.position')),
],
),
]
|
{"/employee/forms.py": ["/employee/models.py"], "/employee/views.py": ["/employee/forms.py", "/employee/models.py"]}
|
21,297
|
akhidwivedi/Employee-management
|
refs/heads/master
|
/employee/migrations/0003_delete_userlogin.py
|
# Generated by Django 2.2 on 2020-04-18 04:40
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('employee', '0002_userlogin'),
]
operations = [
migrations.DeleteModel(
name='UserLogin',
),
]
|
{"/employee/forms.py": ["/employee/models.py"], "/employee/views.py": ["/employee/forms.py", "/employee/models.py"]}
|
21,298
|
akhidwivedi/Employee-management
|
refs/heads/master
|
/employee/urls.py
|
from django.urls import path,include
from . import views
urlpatterns = [
# path('login/',views.login_view,name = 'employee_login'),
path('', views.employee_forms,name='employee_insert'),
path('list/',views.employee_list,name='employee_list'),
path('delete/<int:id>/',views.employee_delete,name='employee_delete'),
path('<int:id>/', views.employee_forms,name= 'employee_update'),
]
|
{"/employee/forms.py": ["/employee/models.py"], "/employee/views.py": ["/employee/forms.py", "/employee/models.py"]}
|
21,300
|
prasad5141/skillathon_blog
|
refs/heads/master
|
/webproject/urls.py
|
"""webproject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from blogapp.views import homeview, addarticleview, getarticles, updatearticleview, articleview, registrationview, loginview, logoutview, deletearticle
from blogapi.views import testview, getarticles, createarticle, createuserview
urlpatterns = [
path('admin/', admin.site.urls),
path('', getarticles, name="get_article"),
path('addarticle', addarticleview, name='add_article'),
path('updatearticle/<int:id>/', updatearticleview, name="update_article"),
path('article/<int:id>/', articleview, name="get_article" ),
path('articledelete/<int:id>', deletearticle, name="delete_article"),
path('registration', registrationview, name="user_registration"),
path('login/', loginview, name="login"),
path('logout', logoutview, name="logout"),
path('api/v1/testview', testview),
path('api/v1/articles/', getarticles),
path('api/v1/creatarticle', createarticle ),
path('api/v1/registration', createuserview )
]
|
{"/webproject/urls.py": ["/blogapp/views.py", "/blogapi/views.py"], "/blogapi/serializers.py": ["/blogapp/models.py"], "/blogapp/views.py": ["/blogapp/models.py"], "/blogapp/admin.py": ["/blogapp/models.py"], "/blogapi/views.py": ["/blogapi/serializers.py", "/blogapp/models.py"]}
|
21,301
|
prasad5141/skillathon_blog
|
refs/heads/master
|
/blogapi/serializers.py
|
from rest_framework import serializers
from blogapp.models import Article
class GetArticlesSerializer(serializers.ModelSerializer):
user = serializers.SerializerMethodField()
class Meta:
model = Article
fields = ('title', 'content', 'user')
def get_user(self, instance):
username = instance.posted_by.username
return username
class CreateArticleSerializer(serializers.Serializer):
title = serializers.CharField(required=True)
content = serializers.CharField(required=True)
class Meta:
fields = ('title', 'content')
class CreateUser(serializers.Serializer):
username = serializers.CharField(required=True)
password = serializers.CharField(required=True)
class Meta:
fields = ('username', 'password')
|
{"/webproject/urls.py": ["/blogapp/views.py", "/blogapi/views.py"], "/blogapi/serializers.py": ["/blogapp/models.py"], "/blogapp/views.py": ["/blogapp/models.py"], "/blogapp/admin.py": ["/blogapp/models.py"], "/blogapi/views.py": ["/blogapi/serializers.py", "/blogapp/models.py"]}
|
21,302
|
prasad5141/skillathon_blog
|
refs/heads/master
|
/blogapp/views.py
|
from django.shortcuts import render, redirect
from .forms import ArticleForm
from django.contrib import messages
from .models import Article
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django.contrib.auth.models import User
from django.contrib.auth.hashers import check_password
from django.contrib.auth import login, logout
from django.contrib.auth.decorators import login_required
# Create your views here.
def homeview(request):
return render(request, 'home.html')
@login_required(login_url='/login')
def addarticleview(request):
if request.method == "GET":
form = ArticleForm()
return render(request, 'addarticle.html', {"form":form})
if request.method == "POST":
print("POST Method triggered")
form = ArticleForm(request.POST)
print(form)
if form.is_valid():
article = form.save(commit=False)
article.posted_by = request.user
article.save()
messages.success(request, "Article added successfully")
return redirect('/')
def getarticles(request):
print(request.user)
articles = Article.objects.all().order_by('-posted_on')
return render(request, 'home.html', {'articles':articles})
@login_required(login_url='/login')
def updatearticleview(request, id):
if request.method == "GET":
print(id)
article = Article.objects.get(id=id)
form = ArticleForm(initial={'title':article.title, 'content':article.content})
return render(request, 'update_article.html', {'form':form})
if request.method == "POST":
print(id)
form = ArticleForm(request.POST)
if form.is_valid():
article = Article.objects.get(id=id)
title = request.POST.get('title')
content = request.POST.get('content')
article.title = title
article.content = content
article.save()
return redirect('/')
def articleview(request, id):
if request.method == "GET":
article = Article.objects.get(id=id)
is_owner = False
if request.user == article.posted_by:
is_owner = True
return render(request, 'article.html', {"article":article, "is_owner":is_owner} )
def deletearticle(request, id):
try:
article = Article.objects.get(id=id)
if request.user == article.posted_by:
article.delete()
return redirect('/')
except Exception as e:
print(e)
return redirect('/')
def registrationview(request):
if request.method == "GET":
form = UserCreationForm()
return render(request, 'registration.html', {"form":form})
if request.method == "POST":
print(request.POST)
username = request.POST.get('username')
password1 = request.POST.get('password1')
password2 = request.POST.get('password2')
if password1 == password2:
username_status = User.objects.filter(username=username).exists()
print(username_status, 'username status')
if username_status == False:
User.objects.create_user(username=username, password=password1)
return redirect('/login')
else:
form = UserCreationForm()
messages.error(request, "Username already taken.")
return render(request, 'registration.html', {"form":form} )
else:
form = UserCreationForm()
messages.error(request, "Both password are not Matching.")
return render(request, 'registration.html', {"form":form} )
def loginview(request):
if request.method == "GET":
form = AuthenticationForm()
return render(request, 'login.html', {"form":form})
if request.method == "POST":
username = request.POST.get('username')
password = request.POST.get('password')
try:
user = User.objects.get(username=username)
password_status = check_password(password, user.password)
print(password_status, "this is password status")
if password_status == True:
login(request, user)
return redirect('/')
else:
form = AuthenticationForm()
messages.error(request, "Login failed.")
return render(request, 'login.html', {"form":form})
except:
form = AuthenticationForm()
messages.error(request, "Username or Password Invalid.")
return render(request, 'login.html', {"form":form})
def logoutview(request):
logout(request)
return redirect('/')
|
{"/webproject/urls.py": ["/blogapp/views.py", "/blogapi/views.py"], "/blogapi/serializers.py": ["/blogapp/models.py"], "/blogapp/views.py": ["/blogapp/models.py"], "/blogapp/admin.py": ["/blogapp/models.py"], "/blogapi/views.py": ["/blogapi/serializers.py", "/blogapp/models.py"]}
|
21,303
|
prasad5141/skillathon_blog
|
refs/heads/master
|
/blogapp/admin.py
|
from django.contrib import admin
# Register your models here.
from .models import Article
# class ArticleAdmin(admin.ModelAdmin):
# list_display = ['title', 'article']
admin.site.register(Article)
|
{"/webproject/urls.py": ["/blogapp/views.py", "/blogapi/views.py"], "/blogapi/serializers.py": ["/blogapp/models.py"], "/blogapp/views.py": ["/blogapp/models.py"], "/blogapp/admin.py": ["/blogapp/models.py"], "/blogapi/views.py": ["/blogapi/serializers.py", "/blogapp/models.py"]}
|
21,304
|
prasad5141/skillathon_blog
|
refs/heads/master
|
/blogapp/models.py
|
from django.db import models
# from django.contrib.auth import User
from django.contrib.auth.models import User
# Create your models here.
class Tag(models.Model):
name = models.CharField(max_length=150, unique=True, null=False, blank=False)
class Article(models.Model):
title = models.CharField(max_length=250)
content = models.TextField(max_length=5000)
posted_by = models.ForeignKey(User, on_delete=models.CASCADE)
posted_on = models.DateTimeField(auto_now_add=True)
tag = models.ManyToManyField(Tag, blank=True)
|
{"/webproject/urls.py": ["/blogapp/views.py", "/blogapi/views.py"], "/blogapi/serializers.py": ["/blogapp/models.py"], "/blogapp/views.py": ["/blogapp/models.py"], "/blogapp/admin.py": ["/blogapp/models.py"], "/blogapi/views.py": ["/blogapi/serializers.py", "/blogapp/models.py"]}
|
21,305
|
prasad5141/skillathon_blog
|
refs/heads/master
|
/blogapi/views.py
|
from django.shortcuts import render
from rest_framework.response import Response
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes
from django.contrib.auth.models import User
from .serializers import GetArticlesSerializer,CreateArticleSerializer, CreateUser
from rest_framework_jwt.utils import jwt_payload_handler, jwt_encode_handler
from rest_framework.permissions import IsAuthenticated
# Create your views here.
from blogapp.models import Article
@api_view(['GET'])
def testview(request):
data = {"message":"hello world"}
return Response(data=data, status=status.HTTP_200_OK)
@api_view(['GET'])
def getarticles(request):
print(request.user)
articles = Article.objects.all().order_by('-posted_on')
serializer = GetArticlesSerializer(articles, many=True)
return Response(data=serializer.data, status=status.HTTP_200_OK)
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def createarticle(request):
serializer = CreateArticleSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
print(request.data.get('title'))
print(request.data.get('content'))
a = Article.objects.create(title=request.data.get('title'), content=request.data.get('content'), posted_by=request.user)
return Response(status=status.HTTP_201_CREATED)
@api_view(['POST'])
def createuserview(request):
serializer = CreateUser(data=request.data)
serializer.is_valid(raise_exception=True)
username = request.data.get('username')
password = request.data.get('password')
user = User.objects.create_user(username=username, password=password)
payload = jwt_payload_handler(user)
token = jwt_encode_handler(payload)
return Response(data={"token":token}, status=status.HTTP_201_CREATED)
|
{"/webproject/urls.py": ["/blogapp/views.py", "/blogapi/views.py"], "/blogapi/serializers.py": ["/blogapp/models.py"], "/blogapp/views.py": ["/blogapp/models.py"], "/blogapp/admin.py": ["/blogapp/models.py"], "/blogapi/views.py": ["/blogapi/serializers.py", "/blogapp/models.py"]}
|
21,307
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/src/pyramid_bubbling/util.py
|
# -*- coding:utf-8 -*-
from zope.interface.interface import InterfaceClass
from zope.interface import (
implementedBy,
implementer
)
_repository = {}
def clean_dynamic_interface():
global _repository
_repository = {}
def dynamic_interface(type_):
global _repository
try:
return _repository[type_]
except KeyError:
_repository[type_] = make_interface_on_the_fly(type_)
return _repository[type_]
def make_interface_on_the_fly(cls):
name = "I{}".format(cls.__name__)
iface = InterfaceClass(name)
implementer(iface)(cls)
return iface
def iface_from_class(Class, dynamic=True, Exception=Exception):
if isinstance(Class, InterfaceClass):
return Class
elif isinstance(Class, type) and dynamic:
return dynamic_interface(Class)
else:
raise Exception("interface is not found from {}".format(Class))
# def iface_from_class(Class, dynamic=True, Exception=Exception):
# try:
# if isinstance(Class, InterfaceClass):
# iface = Class
# else:
# iface = next(iter(implementedBy(Class)))
# except StopIteration:
# if dynamic:
# iface = dynamic_interface(Class)
# else:
# raise Exception("interface is not found from {}".format(Class))
# return iface
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,308
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/src/pyramid_bubbling/interfaces.py
|
# -*- coding:utf-8 -*-
from zope.interface import (
Interface,
)
class IParentFromInstanceAdapter(Interface):
def __call__(instance):
pass
def from_class(cls):
pass
class IBubbling(Interface):
def get_iterator(startpoint):
pass
def get_bubbling_path_order(leaf):
pass
def fire(startpoint, case):
pass
class IAccess(Interface):
def exists(target):
pass
def access(target):
pass
def notify(subject, method_name, *args, **kwargs):
pass
class IEvent(Interface):
def __call__(subject, *args, **kwargs):
pass
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,309
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/tests/test_components.py
|
# -*- coding:utf-8 -*-
import unittest
from testfixtures import compare
from zope.interface import Interface, implementer
class INode(Interface):
pass
@implementer(INode)
class base(object):
pass
def NodeFactory(name, iface_name, base=base):
from zope.interface.interface import InterfaceClass
def __init__(self, parent=None):
self.parent = parent
attrs = {"__init__": __init__}
cls = type(name, (base, ), attrs)
return implementer(InterfaceClass(iface_name))(cls)
def get_registry():
from zope.interface.registry import Components
return Components()
class BubblinglookupTests(unittest.TestCase):
def _add_bubbling_path(self, *args, **kwargs):
from pyramid_bubbling.components import _add_bubbling_path
return _add_bubbling_path(*args, **kwargs)
def _callFUT(self, *args, **kwargs):
from pyramid_bubbling.components import _lookup
return _lookup(*args, **kwargs)
def test_it(self):
from zope.interface import providedBy
from pyramid_bubbling.components import ParentFromInstance
A = NodeFactory("A", "IA")
registry = get_registry()
fn = ParentFromInstance(None, None)
self._add_bubbling_path(registry, A, fn)
result = self._callFUT(registry, [providedBy(A())])
compare(result, fn)
class BubblingOrderTests(unittest.TestCase):
def _add_bubbling_path(self, *args, **kwargs):
from pyramid_bubbling.components import _add_bubbling_path
return _add_bubbling_path(*args, **kwargs)
def _getTargetClass(self):
from pyramid_bubbling import Bubbling
return Bubbling
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_setting_each_adapter(self):
from pyramid_bubbling.components import ParentFromInstance
## c -> b -> a
A = NodeFactory("A", "IA")
B = NodeFactory("B", "IB")
C = NodeFactory("C", "IC")
registry = get_registry()
self._add_bubbling_path(registry, A, ParentFromInstance(None, lambda cls: None));
self._add_bubbling_path(registry, B, ParentFromInstance(None, lambda cls: A))
self._add_bubbling_path(registry, C, ParentFromInstance(None, lambda cls: B))
from pyramid_bubbling.components import RegistryAccessForClass
target = self._makeOne(RegistryAccessForClass(registry))
result = target.get_bubbling_path_order(C)
compare(result, [C, B, A])
def test_setting_default_one(self):
from pyramid_bubbling.components import ParentFromInstance
## c -> b -> a
A = NodeFactory("A", "IA")
A.parent = None
B = NodeFactory("B", "IB")
B.parent = A
C = NodeFactory("C", "IC")
C.parent = B
registry = get_registry()
self._add_bubbling_path(registry, INode, ParentFromInstance(None, lambda cls: cls.parent));
from pyramid_bubbling.components import RegistryAccessForClass
target = self._makeOne(RegistryAccessForClass(registry))
result = target.get_bubbling_path_order(C)
compare(result, [C, B, A])
class BubblingEventRegistryTests(unittest.TestCase):
def _add_bubbling_path(self, *args, **kwargs):
from pyramid_bubbling.components import _add_bubbling_path
return _add_bubbling_path(*args, **kwargs)
def _add_bubbling_event(self, *args, **kwargs):
from pyramid_bubbling.components import _add_bubbling_event
return _add_bubbling_event(*args, **kwargs)
def _getTargetClass(self):
from pyramid_bubbling import Bubbling
return Bubbling
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_it(self):
from pyramid_bubbling.components import ParentFromInstance
## d -> c -> b -> a
A = NodeFactory("A", "IA")
B = NodeFactory("B", "IB")
C = NodeFactory("C", "IC")
D = NodeFactory("D", "ID")
called = []
def on_called(self):
called.append(self)
registry = get_registry()
def lookup_parent(self):
return self.parent
self._add_bubbling_path(registry, A, ParentFromInstance(lookup_parent, None))
self._add_bubbling_path(registry, B, ParentFromInstance(lookup_parent, None))
self._add_bubbling_path(registry, C, ParentFromInstance(lookup_parent, None))
self._add_bubbling_path(registry, D, ParentFromInstance(lookup_parent, None))
self._add_bubbling_event(registry, A, on_called, name="called")
self._add_bubbling_event(registry, B, on_called, name="called")
self._add_bubbling_event(registry, C, on_called, name="called")
self._add_bubbling_event(registry, D, on_called, name="called")
a = A()
b = B(a)
c = C(b)
d = D(c)
from pyramid_bubbling.components import RegistryAccess
target = self._makeOne(RegistryAccess(registry))
target.fire(d, "called")
compare(called, [d, c, b, a])
def test_it__same_lookup(self):
from pyramid_bubbling.components import ParentFromInstance
## d -> c -> b -> a
A = NodeFactory("A", "IA")
B = NodeFactory("B", "IB")
C = NodeFactory("C", "IC")
D = NodeFactory("D", "ID")
called = []
def on_called(self):
called.append(self)
registry = get_registry()
def lookup_parent(self):
return self.parent
relation = ParentFromInstance(lookup_parent, None)
self._add_bubbling_path(registry, INode, relation)
self._add_bubbling_event(registry, INode, on_called, name="called")
a = A()
b = B(a)
c = C(b)
d = D(c)
from pyramid_bubbling.components import RegistryAccess
target = self._makeOne(RegistryAccess(registry))
target.fire(d, "called")
compare(called, [d, c, b, a])
def test_it__default_event_and_each_event(self):
from pyramid_bubbling.components import ParentFromInstance
## d -> c
C = NodeFactory("C", "IC")
D = NodeFactory("D", "ID")
called = []
def on_called(self):
called.append(self)
def on_called_double(self):
called.append(self)
called.append(self)
registry = get_registry()
def lookup_parent(self):
return self.parent
relation = ParentFromInstance(lookup_parent, None)
self._add_bubbling_path(registry, INode, relation)
self._add_bubbling_event(registry, INode, on_called, name="called")
self._add_bubbling_event(registry, C, on_called_double, name="called")
c = C()
d = D(c)
from pyramid_bubbling.components import RegistryAccess
target = self._makeOne(RegistryAccess(registry))
target.fire(d, "called")
compare(called, [d, c, c])
def test_not_connected(self):
## d -> c. b -> a
from pyramid_bubbling.components import ParentFromInstance
A = NodeFactory("A", "IA")
B = NodeFactory("B", "IB")
C = NodeFactory("C", "IC")
D = NodeFactory("D", "ID")
called = []
registry = get_registry()
def lookup_parent(self):
return self.parent
self._add_bubbling_path(registry, B, ParentFromInstance(lookup_parent, None))
self._add_bubbling_path(registry, D, ParentFromInstance(lookup_parent, None))
def on_called(self):
called.append(self)
self._add_bubbling_event(registry, INode, on_called, name="called")
a = A()
b = B(a)
c = C(b)
d = D(c)
from pyramid_bubbling.components import RegistryAccess
target = self._makeOne(RegistryAccess(registry))
target.fire(d, "called")
compare(called, [d, c])
def test_multi_case(self):
from pyramid_bubbling.components import ParentFromInstance
A = NodeFactory("A", "IA")
B = NodeFactory("B", "IB")
C = NodeFactory("C", "IC")
D = NodeFactory("D", "ID")
## [bar] d -> b -> a
## [foo] d -> c -> b
## bar
bar = []
def on_bar(self):
bar.append(self)
registry = get_registry()
def lookup_parent(self):
return self.parent
self._add_bubbling_path(registry, B, ParentFromInstance(lookup_parent, None), name="bar")
self._add_bubbling_path(registry, D, ParentFromInstance(lambda s : s.parent.parent, None), name="bar")
self._add_bubbling_event(registry, D, on_bar, name="bar")
self._add_bubbling_event(registry, B, on_bar, name="bar")
self._add_bubbling_event(registry, A, on_bar, name="bar")
a = A()
b = B(a)
c = C(b)
d = D(c)
from pyramid_bubbling.components import RegistryAccess
target = self._makeOne(RegistryAccess(registry, name="bar"))
target.fire(d, "bar")
compare(bar, [d, b, a])
## foo
foo = []
def on_foo(self):
foo.append(self)
self._add_bubbling_path(registry, C, ParentFromInstance(lookup_parent, None), name="foo")
self._add_bubbling_path(registry, D, ParentFromInstance(lookup_parent, None), name="foo")
self._add_bubbling_event(registry, D, on_foo, name="foo")
self._add_bubbling_event(registry, C, on_foo, name="foo")
self._add_bubbling_event(registry, B, on_foo, name="foo")
target = self._makeOne(RegistryAccess(registry, name="foo"))
target.fire(d, "foo")
compare(foo, [d, c, b])
if __name__ == '__main__':
unittest.main()
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,310
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/tests/test_self.py
|
# -*- coding:utf-8 -*-
import unittest
from testfixtures import compare
class BubblingAttributesTests(unittest.TestCase):
def test_bubbling_attribute__just_access(self):
from pyramid_bubbling import bubbling_attribute
class Parent(object):
pass
class Child(object):
def __init__(self, parent):
self.parent = parent
@bubbling_attribute(Parent)
def __parent__(self):
return self.parent
self.assertEqual(Child.__parent__, Parent)
def test_bubbling_attribute_factory__just_access(self):
from pyramid_bubbling import bubbling_attribute_factory
class Parent(object):
pass
class Child(object):
def __init__(self, parent):
self.parent = parent
__parent__ = bubbling_attribute_factory(Parent, "parent")
self.assertEqual(Child.__parent__, Parent)
def NodeFactory(name, base=object, parent=None, attribute_name="__parent__"):
from pyramid_bubbling import bubbling_attribute
def __init__(self, parent=None):
self.parent = parent
attrs = {"__init__": __init__}
if parent:
@bubbling_attribute(parent)
def __parent__(self):
return self.parent
attrs["__parent__"] = __parent__
return type(name, (base, ), attrs)
class BubblingOrderTests(unittest.TestCase):
def _getTargetClass(self):
from pyramid_bubbling import Bubbling
return Bubbling
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_it(self):
## d -> c -> b -> a
A = NodeFactory("A")
B = NodeFactory("B", parent=A)
C = NodeFactory("C", parent=B)
D = NodeFactory("D", parent=C)
target = self._makeOne()
result = target.get_bubbling_path_order(D)
compare(result, [D, C, B, A])
def test_if_not_connected(self):
## d -> c; b -> a
A = NodeFactory("A")
B = NodeFactory("B", parent=A)
C = NodeFactory("C")
D = NodeFactory("D", parent=C)
target = self._makeOne()
result = target.get_bubbling_path_order(D)
compare(result, [D, C])
def test_if_orphan(self):
## d
D = NodeFactory("D")
target = self._makeOne()
result = target.get_bubbling_path_order(D)
compare(result, [D])
class BubblingEventSelfTests(unittest.TestCase):
def _getTargetClass(self):
from pyramid_bubbling import Bubbling
return Bubbling
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_bound_event_is_not_found__raise_exception(self):
from pyramid_bubbling import BubblingRuntimeException
A = NodeFactory("A")
a = A()
target = self._makeOne()
with self.assertRaises(BubblingRuntimeException):
target.fire(a, "undefined_event")
def test_it(self):
## d -> c -> b -> a
## event: [d, c, b, a]
called = []
def on_called(self, subject):
called.append(self)
A = NodeFactory("A")
A.on_called = on_called
B = NodeFactory("B", parent=A)
B.on_called = on_called
C = NodeFactory("C", parent=B)
C.on_called = on_called
D = NodeFactory("D", parent=C)
D.on_called = on_called
a = A()
b = B(a)
c = C(b)
d = D(c)
target = self._makeOne()
target.fire(d, "called")
compare(called, [d, c, b, a])
def test_it__stop(self):
## routing order: d -> c -> b -> a
## event: [d]
from pyramid_bubbling import Stop
called = []
def on_called__stop(self, subject):
called.append(self)
return Stop
A = NodeFactory("A")
A.on_called__stop = on_called__stop
B = NodeFactory("B", parent=A)
B.on_called__stop = on_called__stop
C = NodeFactory("C", parent=B)
C.on_called__stop = on_called__stop
D = NodeFactory("D", parent=C)
D.on_called__stop = on_called__stop
a = A()
b = B(a)
c = C(b)
d = D(c)
target = self._makeOne()
target.fire(d, "called__stop")
compare(called, [d])
def test_not_connected(self):
## d -> c. b -> a
called = []
def on_called(self, subject):
called.append(self)
A = NodeFactory("A")
A.on_called = on_called
B = NodeFactory("B", parent=A)
B.on_called = on_called
C = NodeFactory("C")
C.on_called = on_called
D = NodeFactory("D", parent=C)
D.on_called = on_called
a = A()
b = B(a)
c = C()
d = D(c)
target = self._makeOne()
target.fire(d, "called")
compare(called, [d, c])
def test_multi_case(self):
from pyramid_bubbling import Accessor, bubbling_attribute_factory
## [foo] d -> c -> b
## [bar] d -> b -> a
bar = []
def on_bar(self, subject):
bar.append(self)
foo = []
def on_foo(self, subject):
foo.append(self)
A = NodeFactory("A")
A.on_bar = on_bar
B = NodeFactory("B", parent=A)
B.__bar__ = bubbling_attribute_factory(A, "parent")
B.on_foo = on_foo
B.on_bar = on_bar
C = NodeFactory("C")
C.__foo__ = bubbling_attribute_factory(B, "parent")
C.on_foo = on_foo
D = NodeFactory("D", parent=C)
D.__foo__ = bubbling_attribute_factory(C, "parent")
D.__bar__ = bubbling_attribute_factory(B, "parent.parent")
D.on_foo = on_foo
D.on_bar = on_bar
a = A()
b = B(a)
c = C(b)
d = D(c)
target_foo = self._makeOne(Accessor("__foo__"))
target_foo.fire(d, "foo")
compare(foo, [d, c, b])
target_bar = self._makeOne(Accessor("__bar__"))
target_bar.fire(d, "bar")
compare(bar, [d, b, a])
if __name__ == '__main__':
unittest.main()
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,311
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
import sys
import os
py3 = sys.version_info.major >= 3
version = '0.0'
requires = [
"setuptools",
"zope.interface",
"venusian"
]
tests_require = [
"testfixtures"
]
long_description = "\n".join(open(f).read() for f in ["README.rst", "CHANGES.txt"])
setup(name='pyramid_bubbling',
version=version,
description="",
long_description=long_description,
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Framework :: Pyramid",
],
keywords='',
author='podhmo',
url='',
license='MIT',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=tests_require,
extras_require={
"testing": tests_require,
},
)
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,312
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/tests/test_it.py
|
# -*- coding:utf-8 -*-
import unittest
from testfixtures import compare
from pyramid import testing
from pyramid_bubbling import bubbling_attribute
from zope.interface import Interface, implementer
class INode(Interface):
pass
class Document(object):
def __init__(self, name):
self.name = name
def on_click(self, subject, result):
result.append(("document", self.name))
@implementer(INode)
class DocumentWithInterface(Document):
pass
class Area(object):
def __init__(self, name, document):
self.name = name
self.document = document
def on_click(self, subject, result):
result.append(("area", self.name))
@bubbling_attribute(Document)
def __parent__(self):
return self.document
@implementer(INode)
class AreaWithInterface(Area):
@bubbling_attribute(DocumentWithInterface)
def __parent__(self):
return self.document
class Node(object):
def __init__(self, name, area):
self.name = name
self.area = area
def on_click(self, subject, result):
result.append(("node", self.name))
@bubbling_attribute(Area)
def __parent__(self):
return self.area
@implementer(INode)
class NodeWithInterface(Node):
@bubbling_attribute(AreaWithInterface)
def __parent__(self):
return self.area
def click_simple(subject, result):
return result.append(subject.name)
def make_request(config):
return testing.DummyRequest(registry=config.registry)
class SelfCaseIntegrationTests(unittest.TestCase):
def setUp(self):
"""
Document[doc]
Area[top]
Node[item1]
Node[item2]
Area[bottom]
Node[item3]
"""
self.doc = Document("doc")
self.top = Area("top", self.doc)
self.item1 = Node("item1", self.top)
self.item2 = Node("item2", self.top)
self.bottom = Area("bottom", self.doc)
self.item3 = Node("item3", self.bottom)
def test_it(self):
"""click item2 => bubbling: node, area, document"""
from pyramid_bubbling import Bubbling, Accessor
bubbling = Bubbling(access=Accessor("__parent__"))
result = []
bubbling.fire(self.item2, "click", result)
compare(result, [("node", "item2"), ("area", "top"), ("document", "doc")])
def test_stop(self):
"""click item2_ => bubbling: node, area[stop]"""
from pyramid_bubbling import Bubbling, Accessor
from pyramid_bubbling import Stop
class StopArea(Area):
def on_click(self, *args, **kwargs):
super(StopArea, self).on_click(*args, **kwargs)
return Stop
top = StopArea("stop_top", self.doc)
item2 = Node("stop_item2", top)
bubbling = Bubbling(access=Accessor("__parent__"))
result = []
bubbling.fire(item2, "click", result)
compare(result, [("node", "stop_item2"), ("area", "stop_top")])
def test_configuration(self):
"""click item2 => bubbling: node, area, document"""
from pyramid_bubbling import (
Accessor
)
with testing.testConfig() as config:
config.include("pyramid_bubbling")
config.verify_bubbling_path(Node, [Node, Area, Document], access=Accessor("__parent__"))
result = config.verify_bubbling_event(Node, event_name="click", access=Accessor("__parent__"))
compare(result, [Node.on_click, Area.on_click, Document.on_click])
class UseRegistryIntegrationTests(unittest.TestCase):
def setUp(self):
"""
Document[doc]
Area[top]
Node[item1]
Node[item2]
Area[bottom]
Node[item3]
"""
from pyramid_bubbling.components import ParentFromInstance
self.doc = DocumentWithInterface("doc")
self.top = AreaWithInterface("top", self.doc)
self.item1 = NodeWithInterface("item1", self.top)
self.item2 = NodeWithInterface("item2", self.top)
self.bottom = AreaWithInterface("bottom", self.doc)
self.item3 = NodeWithInterface("item3", self.bottom)
## config
self.config = testing.setUp()
self.config.include("pyramid_bubbling")
def access(s):
return getattr(s, "__parent__", None)
self.config.add_bubbling_path(INode, ParentFromInstance(access, access))
self.config.add_bubbling_event(INode, click_simple, "click")
def tearDown(self):
testing.tearDown()
from pyramid_bubbling.util import clean_dynamic_interface
clean_dynamic_interface()
def test_it(self):
"""click item2 => bubbling: node, area, document"""
from pyramid_bubbling.api import get_bubbling
bubbling = get_bubbling(make_request(self.config), self.item2)
result = []
bubbling.fire(self.item2, "click", result)
compare(result, ['item2', 'top', 'doc'])
def test_stop(self):
"""click item2_ => bubbling: node, area[stop]"""
from pyramid_bubbling import (
Stop
)
from pyramid_bubbling.api import get_bubbling
class StopArea(AreaWithInterface):
pass
def click_simple_stop(subject, result):
click_simple(subject, result)
return Stop
self.config.add_bubbling_event(StopArea, click_simple_stop, "click")
top = StopArea("stop_top", self.doc)
item2 = NodeWithInterface("stop_item2", top)
bubbling = get_bubbling(make_request(self.config), item2)
result = []
bubbling.fire(item2, "click", result)
compare(result, ["stop_item2", "stop_top"])
def test_configuration(self):
"""click item2 => bubbling: node, area, document"""
config = self.config
config.verify_bubbling_path(NodeWithInterface, [NodeWithInterface, AreaWithInterface, DocumentWithInterface])
result = config.verify_bubbling_event(NodeWithInterface, event_name="click")
compare(result, [click_simple, click_simple, click_simple])
if __name__ == '__main__':
unittest.main()
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,313
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/src/pyramid_bubbling/api.py
|
# -*- coding:utf-8 -*-
from zope.interface import (
providedBy,
)
from . import (
Bubbling,
Accessor
)
from .components import (
RegistryAccessForClass,
RegistryAccess
)
import logging
logger = logging.getLogger(__name__)
def get_bubbling(request, start_point, path_name=""):
try:
next(iter(providedBy(start_point)))
except (StopIteration, TypeError):
return Bubbling(Accessor(path_name))
if isinstance(start_point, type):
return Bubbling(RegistryAccessForClass(request.registry, path_name))
else:
return Bubbling(RegistryAccess(request.registry, path_name))
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,314
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/demo/models.py
|
# -*- coding:utf-8 -*-
from pyramid_bubbling.components import bubbling_event_config
from zope.interface import Interface, implementer
class INode(Interface):
pass
@implementer(INode)
class Node(object):
pass
class Document(Node):
def __init__(self, name):
self.name = name
class Area(Node):
def __init__(self, name, document):
self.name = name
self.document = document
class Button(Node):
def __init__(self, name, area):
self.name = name
self.area = area
@bubbling_event_config(Document, "click")
def on_click_document(subject, result):
result.append(("document", subject.name))
@bubbling_event_config(Area, "click")
def on_click_area(subject, result):
result.append(("area", subject.name))
@bubbling_event_config(Button, "click")
def on_click_button(subject, result):
result.append(("button", subject.name))
@bubbling_event_config(INode, "tap")
def on_tap(subject, result):
result.append(("tap", subject.name))
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,315
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/demo/main.py
|
# -*- coding:utf-8 -*-
from pyramid.testing import testConfig, DummyRequest
from pyramid_bubbling.components import ParentFromInstance
import sys
import os
sys.path.append(os.path.abspath(os.path.dirname(__file__)))
from models import (
Document,
Area,
Button
)
with testConfig() as config:
config.include("pyramid_bubbling")
config.add_bubbling_path(Area, ParentFromInstance(
lambda s: s.document,
lambda c: Document
))
config.add_bubbling_path(Button, ParentFromInstance(
lambda s: s.area,
lambda c: Area
))
config.scan("models")
config.verify_bubbling_path(Button, [Button, Area, Document])
config.verify_bubbling_event(Button, "click")
def make_request():
return DummyRequest(config.registry)
doc = Document("1")
area = Area("2", document=doc)
button = Button("3", area=area)
from pyramid_bubbling.api import get_bubbling
request = make_request()
bubbling = get_bubbling(request, button)
## click
print("----------------click------------------------")
result = []
bubbling.fire(button, "click", result)
assert result == [('button', '3'), ('area', '2'), ('document', '1')]
print(result)
for x in bubbling.get_ordered_event(button, "click"):
print(x)
## tap
print("----------------tap------------------------")
result = []
bubbling.fire(button, "tap", result)
assert result == [('tap', '3'), ('tap', '2'), ('tap', '1')]
print(result)
for x in bubbling.get_ordered_event(button, "tap"):
print(x)
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,316
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/src/pyramid_bubbling/__init__.py
|
# -*- coding:utf-8 -*-
import operator as op
from zope.interface import implementer
from pyramid.exceptions import ConfigurationError
from .interfaces import (
IBubbling,
IAccess
)
class BubblingConfigurationError(ConfigurationError):
pass
class BubblingRuntimeException(Exception):
pass
class _Singleton(object):
def __init__(self, name):
self.name = name
def __repr__(self):
return "<_Singleton {!r} at 0x{:x}>".format(self.name, id(self))
Stop = _Singleton("Stop")
Start = _Singleton("Sart")
@implementer(IAccess)
class Accessor(object):
def __init__(self, k="__parent__"):
self.k = k
def exists(self, target):
return hasattr(target, self.k)
def access(self, target):
return getattr(target, self.k, None)
def get_notify(self, subject, case):
method_name = "on_{}".format(case)
return getattr(subject, method_name, None)
@implementer(IBubbling)
class Bubbling(object):
def __init__(self, access=None):
self.access = access or Accessor()
def get_iterator(self, startpoint):
target = startpoint
yield target
access = self.access
while access.exists(target):
target = access.access(target)
yield target
def get_bubbling_path_order(self, leaf):
iterator = self.get_iterator(leaf)
result = []
for target in iterator:
if target is None:
break
# if not isinstance(target, type):
# raise BubblingConfigurationError("{} is not correct class".format(target))
result.append(target)
# if len(result) <= 1:
# raise BubblingConfigurationError("{} doesn't have bubbling relation".format(leaf))
return result
def get_ordered_event(self, startpoint, case, *args, **kwargs):
iterator = self.get_iterator(startpoint)
access = self.access
for subject in iterator:
notify = access.get_notify(subject, case)
yield subject, notify
def fire(self, startpoint, case, *args, **kwargs):
assert isinstance(case, str)
iterator = self.get_iterator(startpoint)
access = self.access
status = Start
for subject in iterator:
notify = access.get_notify(subject, case)
if callable(notify):
status = notify(subject, *args, **kwargs)
if status is Stop:
break
if status is Start:
raise BubblingRuntimeException("case={}: event not found".format(case))
class BubblingAttribute(property):
def __init__(self, parent, *args, **kwargs):
self.parent = parent
super(BubblingAttribute, self).__init__(*args, **kwargs)
def __get__(self, wrapper, type_):
if wrapper is None:
return self.parent
else:
return super(BubblingAttribute, self).__get__(wrapper, type_)
def bubbling_attribute(parent_class):
def _(method):
return BubblingAttribute(parent_class, method)
return _
def bubbling_attribute_factory(parent_class, attr):
if isinstance(attr, str):
return BubblingAttribute(parent_class, op.attrgetter(attr))
else:
return BubblingAttribute(parent_class, attr)
def includeme(config):
config.add_directive("add_bubbling_event", config.maybe_dotted(".components.add_bubbling_event"))
config.add_directive("add_bubbling_path", config.maybe_dotted(".components.add_bubbling_path"))
config.add_directive("verify_bubbling_path", config.maybe_dotted(".components.verify_bubbling_path"))
config.add_directive("verify_bubbling_event", config.maybe_dotted(".components.verify_bubbling_event"))
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,317
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/tests/test_config.py
|
# -*- coding:utf-8 -*-
import unittest
from pyramid import testing
from testfixtures import compare
class A:
pass
class B:
def __init__(self, a):
self.a = a
class C:
def __init__(self, b):
self.b = b
from zope.interface import (
Interface,
implementer
)
class INode(Interface):
pass
@implementer(INode)
class X(object):
pass
@implementer(INode)
class Y(object):
parent = X
def __init__(self, parent):
self.parent = parent
@implementer(INode)
class Z(object):
parent = Y
def __init__(self, parent):
self.parent = parent
class I(object):
parent = None
def on_called(self, subject):
pass
class J(object):
parent = I
def __init__(self, parent):
self.parent = parent
def on_called(self, subject):
pass
class K(object):
parent = J
def __init__(self, parent):
self.parent = parent
def on_called(self, subject):
pass
class ConfigurationTestsBase(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
self.config.include("pyramid_bubbling")
def tearDown(self):
testing.tearDown()
from pyramid_bubbling.util import clean_dynamic_interface
clean_dynamic_interface()
class ConfigurationVerifyBubblingPathTests(ConfigurationTestsBase):
def test_each_path(self):
from pyramid_bubbling.components import ParentFromInstance
config = self.config
config.add_bubbling_path(C, ParentFromInstance(lambda s:s.b, lambda c: B))
config.add_bubbling_path(B, ParentFromInstance(lambda s:s.a, lambda c: A))
config.verify_bubbling_path(C, [C, B, A])
def test_each_path__missing1(self):
from pyramid_bubbling.components import ParentFromInstance
from pyramid_bubbling import BubblingConfigurationError
config = self.config
##config.add_bubbling_path(C, ParentFromInstance(lambda s:s.b, lambda c: B))
config.add_bubbling_path(B, ParentFromInstance(lambda s:s.a, lambda c: A))
with self.assertRaises(BubblingConfigurationError):
config.verify_bubbling_path(C, [C, B, A])
def test_each_path__missing2(self):
from pyramid_bubbling.components import ParentFromInstance
from pyramid_bubbling import BubblingConfigurationError
config = self.config
config.add_bubbling_path(C, ParentFromInstance(lambda s:s.b, lambda c: B))
##config.add_bubbling_path(B, ParentFromInstance(lambda s:s.a, lambda c: A))
with self.assertRaises(BubblingConfigurationError):
config.verify_bubbling_path(C, [C, B, A])
def test_simple(self):
from pyramid_bubbling.components import ParentFromInstance
config = self.config
def access(s):
return getattr(s, "parent", None)
config.add_bubbling_path(INode, ParentFromInstance(access, access))
config.verify_bubbling_path(Z, [Z, Y, X])
class ConfigurationBubblingVerifyEventTests(ConfigurationTestsBase):
def test_not_bound__raise_expception(self):
from pyramid_bubbling.components import ParentFromInstance
from pyramid_bubbling import BubblingConfigurationError
config = self.config
config.add_bubbling_path(C, ParentFromInstance(lambda s:s.b, lambda c: B))
config.add_bubbling_path(B, ParentFromInstance(lambda s:s.a, lambda c: A))
with self.assertRaises(BubblingConfigurationError):
result = config.verify_bubbling_event(C)
compare(result, [None, None, None])
def test_each_path(self):
from pyramid_bubbling.components import ParentFromInstance
config = self.config
config.add_bubbling_path(C, ParentFromInstance(lambda s:s.b, lambda c: B))
config.add_bubbling_path(B, ParentFromInstance(lambda s:s.a, lambda c: A))
def called(subject):
pass
config.add_bubbling_event(C, called)
config.add_bubbling_event(B, called)
config.add_bubbling_event(A, called)
result = config.verify_bubbling_event(C)
compare(result, [called, called, called])
def test_each_path__named(self):
from pyramid_bubbling.components import ParentFromInstance
config = self.config
config.add_bubbling_path(C, ParentFromInstance(lambda s:s.b, lambda c: B))
config.add_bubbling_path(B, ParentFromInstance(lambda s:s.a, lambda c: A))
def called(subject):
pass
config.add_bubbling_event(C, called, "called")
config.add_bubbling_event(B, called, "called")
config.add_bubbling_event(A, called, "called")
result = config.verify_bubbling_event(C, "called")
compare(result, [called, called, called])
class ConfigurationForSimpleBubblingTests(ConfigurationTestsBase):
def test_bubbling_path(self):
from pyramid_bubbling import Accessor
config = self.config
config.verify_bubbling_path(K, [K, J, I], access=Accessor("parent"))
def test__bubbling_path_lookup_failure(self):
from pyramid_bubbling import BubblingConfigurationError
from pyramid_bubbling import Accessor
config = self.config
with self.assertRaises(BubblingConfigurationError):
config.verify_bubbling_path(K, [K, J, I], access=Accessor("*dummy*"))
def test_bound_event(self):
from pyramid_bubbling import Accessor
config = self.config
config.verify_bubbling_event(K, event_name="called", access=Accessor("parent"))
def test_bound_event__another_event_name(self):
from pyramid_bubbling import BubblingConfigurationError
from pyramid_bubbling import Accessor
config = self.config
with self.assertRaises(BubblingConfigurationError):
config.verify_bubbling_event(K, event_name="foo", access=Accessor("parent"))
if __name__ == '__main__':
unittest.main()
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,318
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/src/pyramid_bubbling/components.py
|
# -*- coding:utf-8 -*-
import venusian
from .interfaces import (
IAccess,
IEvent,
IParentFromInstanceAdapter
)
from zope.interface import (
implementer,
provider,
providedBy,
implementedBy
)
from zope.interface.verify import verifyObject
from weakref import WeakValueDictionary
from . import (
Bubbling,
BubblingConfigurationError
)
from .util import iface_from_class
@implementer(IAccess)
class RegistryAccess(object):
def __init__(self, registry, name=""):
self.registry = registry
self.name = name
self.cache = WeakValueDictionary()
def _get_relation(self, target):
try:
fn = self.cache[target]
except KeyError:
fn = _lookup(self.registry, [providedBy(target)], name=self.name)
if fn is None:
return fn
self.cache[target] = fn
return fn
def exists(self, target):
## todo: speedup if need.
fn = self._get_relation(target)
return bool(fn and fn(target))
def access(self, target):
fn = self._get_relation(target)
return fn(target)
def get_notify(self, target, name):
return self.registry.adapters.lookup([providedBy(target)], IEvent, name=name)
@implementer(IAccess)
class RegistryAccessForClass(object):
def __init__(self, registry, name=""):
self.registry = registry
self.name = name
def lookup(self, target):
return _lookup(self.registry, [implementedBy(target)], name=self.name)
def exists(self, target):
## todo: speedup if need.
fn = self.lookup(target)
return bool(fn and fn.from_class(target))
def access(self, target):
return self.lookup(target).from_class(target)
def get_notify(self, target, name):
return self.registry.adapters.lookup([implementedBy(target)], IEvent, name=name)
@implementer(IParentFromInstanceAdapter)
class ParentFromInstance(object):
def __init__(self, lookup, cls_lookup):
self.lookup = lookup
self.cls_lookup = cls_lookup
def default_notify(self, *args, **kwargs):
import sys
sys.stderr.write("case:{}, args={}, kwargs={}".format(None, args, kwargs))
return True
def __call__(self, *args, **kwargs):
return self.lookup(*args, **kwargs)
def from_class(self, *args, **kwargs):
return self.cls_lookup(*args, **kwargs)
def _add_bubbling_path(registry, ParentFromInstanceClass, parent_from_instance, name="", dynamic=True):
verifyObject(IParentFromInstanceAdapter, parent_from_instance)
iface = iface_from_class(ParentFromInstanceClass, dynamic=dynamic, Exception=BubblingConfigurationError)
if not isinstance(iface, (list, tuple)):
iface = [iface]
registry.adapters.register(iface, IParentFromInstanceAdapter, name, parent_from_instance)
def add_bubbling_path(config, instanceClass, parent_from_instance, name=""):
_add_bubbling_path(config.registry, config.maybe_dotted(instanceClass), parent_from_instance, name=name)
def _add_bubbling_event(registry, SubjectClass, fn, name="", dynamic=True):
iface = iface_from_class(SubjectClass, dynamic=dynamic, Exception=BubblingConfigurationError)
if not isinstance(iface, (list, tuple)):
iface = [iface]
fn = provider(IEvent)(fn)
registry.adapters.register(iface, IEvent, name, fn)
def add_bubbling_event(config, SubjectClass, fn, name=""):
_add_bubbling_event(config.registry, config.maybe_dotted(SubjectClass), fn, name=name)
def _lookup(registry, obj, name=""):
return registry.adapters.lookup(obj, IParentFromInstanceAdapter, name=name)
def lookup(request, iface, name=""):
return _lookup(request.registry, iface, name=name)
class bubbling_event_config(object):
def __init__(self, SubjectClass, name=""):
self.SubjectClass = SubjectClass
self.name = name
def __call__(self, wrapped):
def callback(context, name, ob):
config = context.config.with_package(info.module)
config.add_bubbling_event(self.SubjectClass, wrapped, name=self.name)
#info = venusian.attach(wrapped, callback, category='pyramid_bubbling', depth=1)
info = venusian.attach(wrapped, callback, category='pyramid_bubbling')
return wrapped
def verify_bubbling_path(config, startpoint, expected, name="", access=None):
from pyramid.config.util import MAX_ORDER
def register():
bubbling = Bubbling(access or RegistryAccessForClass(config.registry, name=name))
result = bubbling.get_bubbling_path_order(startpoint)
if not (result == expected):
raise BubblingConfigurationError("expected:{} != result:{}".format(expected, result))
config.action(None, register, order=MAX_ORDER)
def verify_bubbling_event(config, startpoint, event_name="", path_name="", access=None):
bubbling = Bubbling(access or RegistryAccessForClass(config.registry, name=path_name))
r = []
for subject, ev in bubbling.get_ordered_event(startpoint, event_name):
if subject is None:
break
if ev is None:
raise BubblingConfigurationError("subject={}, not bound event. registered event:({})".format(
subject,
config.registry.adapters.lookupAll([implementedBy(startpoint)], IEvent)
))
r.append(ev)
return r
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,319
|
podhmo/pyramid_bubbling
|
refs/heads/master
|
/gen.py
|
# -*- coding:utf-8 -*-
print("""
pyramid_bubbling
================
bubbling event
sample
----------------------------------------
""")
import os
import sys
out = os.popen("../bin/python {}".format(sys.argv[1]))
print("output ::")
print("")
for line in out.readlines():
print(" ", line.rstrip())
print("")
for f in sys.argv[1:]:
print(f)
print("::")
print("")
with open(f) as rf:
for line in rf.readlines():
print(" ", line.rstrip())
|
{"/src/pyramid_bubbling/api.py": ["/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/components.py"], "/src/pyramid_bubbling/__init__.py": ["/src/pyramid_bubbling/interfaces.py"], "/src/pyramid_bubbling/components.py": ["/src/pyramid_bubbling/interfaces.py", "/src/pyramid_bubbling/__init__.py", "/src/pyramid_bubbling/util.py"]}
|
21,330
|
testitesti22/ha-sun2
|
refs/heads/master
|
/custom_components/sun2/sensor.py
|
"""Sun2 Sensor."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_MONITORED_CONDITIONS, DEVICE_CLASS_TIMESTAMP)
from homeassistant.core import callback
from homeassistant.util import dt as dt_util
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import (
async_track_time_change, async_track_point_in_time)
from .helpers import (
async_init_astral_loc, astral_event, nearest_second, SIG_LOC_UPDATED)
_LOGGER = logging.getLogger(__name__)
_SOLAR_DEPRESSIONS = ('astronomical', 'civil', 'nautical')
_ELEV_RND = 0.5
_ELEV_MAX_ERR = 0.02
_DELTA = timedelta(minutes=5)
_ONE_DAY = timedelta(days=1)
ATTR_NEXT_CHANGE = 'next_change'
class Sun2Sensor(Entity):
"""Sun2 Sensor."""
def __init__(self, hass, sensor_type, icon, default_solar_depression=0):
"""Initialize sensor."""
if any(sol_dep in sensor_type for sol_dep in _SOLAR_DEPRESSIONS):
self._solar_depression, self._event = sensor_type.rsplit('_', 1)
else:
self._solar_depression = default_solar_depression
self._event = sensor_type
self._icon = icon
self._name = sensor_type.replace('_', ' ').title()
self._state = None
self._yesterday = None
self._today = None
self._tomorrow = None
async_init_astral_loc(hass)
self._unsub_dispatcher = None
self._unsub_update = None
@property
def should_poll(self):
"""Do not poll."""
return False
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def state(self):
"""Return the state of the entity."""
return self._state
def _device_state_attributes(self):
return {
'yesterday': self._yesterday,
'today': self._today,
'tomorrow': self._tomorrow,
}
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return self._device_state_attributes()
@property
def icon(self):
"""Return the icon to use in the frontend."""
return self._icon
def _setup_fixed_updating(self):
# Default behavior is to update every local midnight.
# Override for sensor types that should update at a different time,
# or that have a more dynamic update schedule (in which case override
# with a method that does nothing and set up the update at the end of
# an override of _update instead.)
@callback
def async_update_at_midnight(now):
self.async_schedule_update_ha_state(True)
self._unsub_update = async_track_time_change(
self.hass, async_update_at_midnight, 0, 0, 0)
async def async_loc_updated(self):
"""Location updated."""
self.async_schedule_update_ha_state(True)
async def async_added_to_hass(self):
"""Subscribe to update signal and set up fixed updating."""
self._unsub_dispatcher = async_dispatcher_connect(
self.hass, SIG_LOC_UPDATED, self.async_loc_updated)
self._setup_fixed_updating()
async def async_will_remove_from_hass(self):
"""Disconnect from update signal and cancel fixed updating."""
self._unsub_dispatcher()
if self._unsub_update:
self._unsub_update()
def _get_astral_event(self, event, date_or_dt):
return astral_event(event, date_or_dt, self._solar_depression)
def _get_data(self, date_or_dt):
return self._get_astral_event(self._event, date_or_dt)
def _update(self):
today = dt_util.now().date()
self._yesterday = self._get_data(today-timedelta(days=1))
self._state = self._today = self._get_data(today)
self._tomorrow = self._get_data(today+timedelta(days=1))
async def async_update(self):
"""Update state."""
self._update()
class Sun2PointInTimeSensor(Sun2Sensor):
"""Sun2 Point in Time Sensor."""
def __init__(self, hass, sensor_type, icon):
"""Initialize sensor."""
super().__init__(hass, sensor_type, icon, 'civil')
@property
def device_class(self):
"""Return the class of this device."""
return DEVICE_CLASS_TIMESTAMP
def _update(self):
super()._update()
if self._state != 'none':
self._state = self._state.isoformat()
def _hours_to_hms(hours):
try:
return str(timedelta(hours=hours)).split('.')[0]
except TypeError:
return None
class Sun2PeriodOfTimeSensor(Sun2Sensor):
"""Sun2 Period of Time Sensor."""
def __init__(self, hass, sensor_type, icon):
"""Initialize sensor."""
super().__init__(hass, sensor_type, icon, 0.833)
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return 'hr'
def _device_state_attributes(self):
data = super()._device_state_attributes()
data.update({
'yesterday_hms': _hours_to_hms(data['yesterday']),
'today_hms': _hours_to_hms(data['today']),
'tomorrow_hms': _hours_to_hms(data['tomorrow']),
})
return data
def _get_data(self, date_or_dt):
if 'daylight' in self._event:
start = self._get_astral_event('dawn', date_or_dt)
end = self._get_astral_event('dusk', date_or_dt)
else:
start = self._get_astral_event('dusk', date_or_dt)
end = self._get_astral_event('dawn', date_or_dt+timedelta(days=1))
if 'none' in (start, end):
return None
return (end - start).total_seconds()/3600
def _update(self):
super()._update()
if self._state is not None:
self._state = round(self._state, 3)
class Sun2MinMaxElevationSensor(Sun2Sensor):
"""Sun2 Min/Max Elevation Sensor."""
def __init__(self, hass, sensor_type, icon, is_min):
"""Initialize sensor."""
super().__init__(hass, sensor_type, icon)
self._event = 'solar_midnight' if is_min else 'solar_noon'
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return '°'
def _get_data(self, date_or_dt):
event_time = self._get_astral_event(self._event, date_or_dt)
return self._get_astral_event('solar_elevation', event_time)
def _update(self):
super()._update()
if self._state is not None:
self._state = round(self._state, 3)
class Sun2MinElevationSensor(Sun2MinMaxElevationSensor):
"""Sun2 Min Elevation Sensor."""
def __init__(self, hass, sensor_type, icon):
"""Initialize sensor."""
super().__init__(hass, sensor_type, icon, is_min=True)
class Sun2MaxElevationSensor(Sun2MinMaxElevationSensor):
"""Sun2 Max Elevation Sensor."""
def __init__(self, hass, sensor_type, icon):
"""Initialize sensor."""
super().__init__(hass, sensor_type, icon, is_min=False)
def _nearest_multiple(value, multiple):
return int(round(value / multiple)) * multiple
def _calc_nxt_time(time0, elev0, time1, elev1, trg_elev):
return nearest_second(
time0 + (time1 - time0) * ((trg_elev - elev0) / (elev1 - elev0)))
class Sun2ElevationSensor(Sun2Sensor):
"""Sun2 Elevation Sensor."""
def __init__(self, hass, sensor_type, icon):
"""Initialize sensor."""
super().__init__(hass, sensor_type, icon)
self._reset()
def _reset(self):
self._prv_sol_midn = None
self._sol_noon = None
self._sol_midn = None
self._prv_time = None
self._prv_elev = None
self._next_change = None
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return {ATTR_NEXT_CHANGE: self._next_change}
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return '°'
async def async_loc_updated(self):
"""Location updated."""
self._reset()
if self._unsub_update:
self._unsub_update()
self._unsub_update = None
self.async_schedule_update_ha_state(True)
def _setup_fixed_updating(self):
pass
def _get_nxt_time(self, time1, elev1, trg_elev, min_time, max_time):
if self._prv_time < min_time:
return None
time0 = self._prv_time
elev0 = self._prv_elev
nxt_elev = trg_elev + 1.5 * _ELEV_MAX_ERR
while abs(nxt_elev - trg_elev) >= _ELEV_MAX_ERR:
try:
nxt_time = _calc_nxt_time(time0, elev0, time1, elev1, trg_elev)
except ZeroDivisionError:
return None
if nxt_time < min_time or nxt_time > max_time:
return None
if nxt_time in (time0, time1):
break
nxt_elev = astral_event("solar_elevation", nxt_time)
if nxt_time > time1:
time0 = time1
elev0 = elev1
time1 = nxt_time
elev1 = nxt_elev
elif elev0 < trg_elev < nxt_elev or elev0 > trg_elev > nxt_elev:
time1 = nxt_time
elev1 = nxt_elev
else:
time0 = nxt_time
elev0 = nxt_elev
return nxt_time
def _set_nxt_time(self, cur_time):
if self._sol_noon - _DELTA <= cur_time < self._sol_noon:
return self._sol_noon
elif self._sol_midn - _DELTA <= cur_time:
return self._sol_midn
else:
return cur_time + _DELTA
def _update(self):
# Astral package ignores microseconds, so round to nearest second
# before continuing.
cur_time = nearest_second(dt_util.now())
cur_elev = astral_event("solar_elevation", cur_time)
self._state = f'{cur_elev:0.1f}'
_LOGGER.debug('Raw elevation = %f -> %s', cur_elev, self._state)
# Find the next solar midnight AFTER the current time, and the solar noon and
# solar midnight that precede it. This only needs to be done once a day when we
# reach or pass the previously determined solar midnight.
if not self._sol_midn or cur_time >= self._sol_midn:
date = cur_time.date()
# solar_midnight() returns the solar midnight (which is when the
# sun reaches its lowest point) nearest to the start of today. Note
# that it may have occurred yesterday.
self._sol_midn = astral_event("solar_midnight", date)
while self._sol_midn <= cur_time:
date += _ONE_DAY
self._sol_midn = astral_event("solar_midnight", date)
self._sol_noon = astral_event("solar_noon", date - _ONE_DAY)
self._prv_sol_midn = astral_event("solar_midnight", date - _ONE_DAY)
_LOGGER.debug(
"Solar midnight/noon/midnight: %s/%0.2f, %s/%0.2f, %s/%0.2f",
self._prv_sol_midn,
astral_event("solar_elevation", self._prv_sol_midn),
self._sol_noon,
astral_event("solar_elevation", self._sol_noon),
self._sol_midn,
astral_event("solar_elevation", self._sol_midn),
)
if self._prv_time:
# Extrapolate based on previous point and current point to find
# next point.
rnd_elev = _nearest_multiple(cur_elev, _ELEV_RND)
if cur_time < self._sol_noon:
nxt_time = self._get_nxt_time(
cur_time, cur_elev,
rnd_elev + _ELEV_RND, self._prv_sol_midn, self._sol_noon)
else:
nxt_time = self._get_nxt_time(
cur_time, cur_elev,
rnd_elev - _ELEV_RND, self._sol_noon, self._sol_midn)
else:
nxt_time = None
if not nxt_time:
nxt_time = self._set_nxt_time(cur_time)
self._prv_time = cur_time
self._prv_elev = cur_elev
self._next_change = nxt_time
@callback
def async_update(now):
self._unsub_update = None
self.async_schedule_update_ha_state(True)
self._unsub_update = async_track_point_in_time(self.hass, async_update, nxt_time)
_SENSOR_TYPES = {
# Points in time
'solar_midnight': (Sun2PointInTimeSensor, 'mdi:weather-night'),
'astronomical_dawn': (Sun2PointInTimeSensor, 'mdi:weather-sunset-up'),
'nautical_dawn': (Sun2PointInTimeSensor, 'mdi:weather-sunset-up'),
'dawn': (Sun2PointInTimeSensor, 'mdi:weather-sunset-up'),
'sunrise': (Sun2PointInTimeSensor, 'mdi:weather-sunset-up'),
'solar_noon': (Sun2PointInTimeSensor, 'mdi:weather-sunny'),
'sunset': (Sun2PointInTimeSensor, 'mdi:weather-sunset-down'),
'dusk': (Sun2PointInTimeSensor, 'mdi:weather-sunset-down'),
'nautical_dusk': (Sun2PointInTimeSensor, 'mdi:weather-sunset-down'),
'astronomical_dusk': (Sun2PointInTimeSensor, 'mdi:weather-sunset-down'),
# Time periods
'daylight': (Sun2PeriodOfTimeSensor, 'mdi:weather-sunny'),
'civil_daylight': (Sun2PeriodOfTimeSensor, 'mdi:weather-sunny'),
'nautical_daylight': (Sun2PeriodOfTimeSensor, 'mdi:weather-sunny'),
'astronomical_daylight': (Sun2PeriodOfTimeSensor, 'mdi:weather-sunny'),
'night': (Sun2PeriodOfTimeSensor, 'mdi:weather-night'),
'civil_night': (Sun2PeriodOfTimeSensor, 'mdi:weather-night'),
'nautical_night': (Sun2PeriodOfTimeSensor, 'mdi:weather-night'),
'astronomical_night': (Sun2PeriodOfTimeSensor, 'mdi:weather-night'),
# Min/Max elevation
'min_elevation': (Sun2MinElevationSensor, 'mdi:weather-night'),
'max_elevation': (Sun2MaxElevationSensor, 'mdi:weather-sunny'),
# Elevation
'elevation': (Sun2ElevationSensor, 'mdi:weather-sunny'),
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_MONITORED_CONDITIONS): vol.All(
cv.ensure_list, [vol.In(_SENSOR_TYPES)]),
})
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up sensors."""
async_add_entities([_SENSOR_TYPES[event][0](hass, event,
_SENSOR_TYPES[event][1])
for event in config[CONF_MONITORED_CONDITIONS]], True)
|
{"/custom_components/sun2/sensor.py": ["/custom_components/sun2/helpers.py"], "/custom_components/sun2/binary_sensor.py": ["/custom_components/sun2/helpers.py"]}
|
21,331
|
testitesti22/ha-sun2
|
refs/heads/master
|
/custom_components/sun2/binary_sensor.py
|
"""Sun2 Binary Sensor."""
from datetime import timedelta
import logging
import voluptuous as vol
try:
from homeassistant.components.binary_sensor import BinarySensorEntity
except ImportError:
from homeassistant.components.binary_sensor import BinarySensorDevice
BinarySensorEntity = BinarySensorDevice
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_ABOVE, CONF_ELEVATION, CONF_MONITORED_CONDITIONS, CONF_NAME)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.event import async_track_point_in_time
from homeassistant.util import dt as dt_util
from .helpers import (
async_init_astral_loc, astral_event, nearest_second, SIG_LOC_UPDATED)
_LOGGER = logging.getLogger(__name__)
DEFAULT_ELEVATION_ABOVE = -0.833
DEFAULT_ELEVATION_NAME = 'Above Horizon'
ABOVE_ICON = 'mdi:white-balance-sunny'
BELOW_ICON = 'mdi:moon-waxing-crescent'
_ONE_DAY = timedelta(days=1)
_ONE_SEC = timedelta(seconds=1)
_SENSOR_TYPES = [CONF_ELEVATION]
ATTR_NEXT_CHANGE = 'next_change'
# elevation
# elevation: <threshold>
# elevation:
# above: <threshold>
# name: <friendly_name>
def _val_cfg(config):
if isinstance(config, str):
config = {config: {}}
else:
if CONF_ELEVATION in config:
value = config[CONF_ELEVATION]
if isinstance(value, float):
config[CONF_ELEVATION] = {CONF_ABOVE: value}
if CONF_ELEVATION in config:
options = config[CONF_ELEVATION]
for key in options:
if key not in [CONF_ELEVATION, CONF_ABOVE, CONF_NAME]:
raise vol.Invalid(
f'{key} not allowed for {CONF_ELEVATION}')
if CONF_ABOVE not in options:
options[CONF_ABOVE] = DEFAULT_ELEVATION_ABOVE
if CONF_NAME not in options:
above = options[CONF_ABOVE]
if above == DEFAULT_ELEVATION_ABOVE:
name = DEFAULT_ELEVATION_NAME
else:
name = 'Above '
if above < 0:
name += f'minus {-above}'
else:
name += f'{above}'
options[CONF_NAME] = name
return config
_BINARY_SENSOR_SCHEMA = vol.All(
vol.Any(
vol.In(_SENSOR_TYPES),
vol.Schema({
vol.Required(vol.In(_SENSOR_TYPES)): vol.Any(
vol.Coerce(float),
vol.Schema({
vol.Optional(CONF_ABOVE): vol.Coerce(float),
vol.Optional(CONF_NAME): cv.string,
}),
),
}),
),
_val_cfg,
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_MONITORED_CONDITIONS): vol.All(
cv.ensure_list, [_BINARY_SENSOR_SCHEMA]),
})
class Sun2ElevationSensor(BinarySensorEntity):
"""Sun2 Elevation Sensor."""
def __init__(self, hass, name, above):
"""Initialize sensor."""
self._name = name
self._threshold = above
self._state = None
self._next_change = None
async_init_astral_loc(hass)
self._unsub_dispatcher = None
self._unsub_update = None
@property
def should_poll(self):
"""Do not poll."""
return False
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return {ATTR_NEXT_CHANGE: self._next_change}
@property
def icon(self):
"""Return the icon to use in the frontend."""
return ABOVE_ICON if self.is_on else BELOW_ICON
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
async def async_loc_updated(self):
"""Location updated."""
if self._unsub_update:
self._unsub_update()
self._unsub_update = None
self.async_schedule_update_ha_state(True)
async def async_added_to_hass(self):
"""Subscribe to update signal."""
self._unsub_dispatcher = async_dispatcher_connect(
self.hass, SIG_LOC_UPDATED, self.async_loc_updated)
async def async_will_remove_from_hass(self):
"""Disconnect from update signal."""
self._unsub_dispatcher()
if self._unsub_update:
self._unsub_update()
def _find_nxt_dttm(self, t0_dttm, t0_elev, t1_dttm, t1_elev):
# Do a binary search for time between t0 & t1 where elevation is
# nearest threshold, but also above (or equal to) it if current
# elevation is below it (i.e., current state is False), or below it if
# current elevation is above (or equal to) it (i.e., current state is
# True.)
slope = 1 if t1_elev > t0_elev else -1
# Find mid point and throw away fractional seconds since astral package
# ignores microseconds.
tn_dttm = nearest_second(t0_dttm + (t1_dttm - t0_dttm) / 2)
tn_elev = astral_event("solar_elevation", tn_dttm)
while not (
(self._state and tn_elev <= self._threshold
or not self._state and tn_elev > self._threshold)
and abs(tn_elev - self._threshold) <= 0.01):
if (tn_elev - self._threshold) * slope > 0:
if t1_dttm == tn_dttm:
break
t1_dttm = tn_dttm
else:
if t0_dttm == tn_dttm:
break
t0_dttm = tn_dttm
tn_dttm = nearest_second(t0_dttm + (t1_dttm - t0_dttm) / 2)
tn_elev = astral_event("solar_elevation", tn_dttm)
# Did we go too far?
if self._state and tn_elev > self._threshold:
tn_dttm -= slope * _ONE_SEC
if astral_event("solar_elevation", tn_dttm) > self._threshold:
raise RuntimeError("Couldn't find next update time")
elif not self._state and tn_elev <= self._threshold:
tn_dttm += slope * _ONE_SEC
if astral_event("solar_elevation", tn_dttm) <= self._threshold:
raise RuntimeError("Couldn't find next update time")
return tn_dttm
def _get_nxt_dttm(self, cur_dttm):
# Find next segment of elevation curve, between a pair of solar noon &
# solar midnight, where it crosses the threshold, but in the opposite
# direction (i.e., where output should change state.) Note that this
# might be today, tomorrow, days away, or never, depending on location,
# time of year and specified threshold.
# Start by finding the next five solar midnight & solar noon "events"
# since current time might be anywhere from before today's solar
# midnight (if it is this morning) to after tomorrow's solar midnight
# (if it is this evening.)
date = cur_dttm.date()
evt_dttm1 = astral_event("solar_midnight", date)
evt_dttm2 = astral_event("solar_noon", date)
evt_dttm3 = astral_event("solar_midnight", date + _ONE_DAY)
evt_dttm4 = astral_event("solar_noon", date + _ONE_DAY)
evt_dttm5 = astral_event("solar_midnight", date + 2 * _ONE_DAY)
# See if segment we're looking for falls between any of these events.
# If not move ahead a day and try again, but don't look more than a
# a year ahead.
end_date = date + 366 * _ONE_DAY
while date < end_date:
if cur_dttm < evt_dttm1:
if self._state:
t0_dttm = cur_dttm
t1_dttm = evt_dttm1
else:
t0_dttm = evt_dttm1
t1_dttm = evt_dttm2
elif cur_dttm < evt_dttm2:
if not self._state:
t0_dttm = cur_dttm
t1_dttm = evt_dttm2
else:
t0_dttm = evt_dttm2
t1_dttm = evt_dttm3
elif cur_dttm < evt_dttm3:
if self._state:
t0_dttm = cur_dttm
t1_dttm = evt_dttm3
else:
t0_dttm = evt_dttm3
t1_dttm = evt_dttm4
else:
if not self._state:
t0_dttm = cur_dttm
t1_dttm = evt_dttm4
else:
t0_dttm = evt_dttm4
t1_dttm = evt_dttm5
t0_elev = astral_event("solar_elevation", t0_dttm)
t1_elev = astral_event("solar_elevation", t1_dttm)
# Did we find it?
# Note, if t1_elev > t0_elev, then we're looking for an elevation
# ABOVE threshold. In this case we can't use this range if the
# threshold is EQUAL to the elevation at t1, because this range
# does NOT include any points with a higher elevation value. For
# all other cases it's ok for the threshold to equal the elevation
# at t0 or t1.
if (t0_elev <= self._threshold < t1_elev
or t1_elev <= self._threshold <= t0_elev):
nxt_dttm = self._find_nxt_dttm(
t0_dttm, t0_elev, t1_dttm, t1_elev)
if nxt_dttm - cur_dttm > _ONE_DAY:
_LOGGER.warning(
'Sun elevation will not reach %f again until %s',
self._threshold, nxt_dttm.date())
return nxt_dttm
# Shift one day ahead.
date += _ONE_DAY
evt_dttm1 = evt_dttm3
evt_dttm2 = evt_dttm4
evt_dttm3 = evt_dttm5
evt_dttm4 = astral_event("solar_noon", date + _ONE_DAY)
evt_dttm5 = astral_event("solar_midnight", date + 2 * _ONE_DAY)
# Didn't find one.
return None
async def async_update(self):
"""Update state."""
cur_dttm = dt_util.now()
cur_elev = astral_event("solar_elevation", cur_dttm)
self._state = cur_elev > self._threshold
_LOGGER.debug(
'name=%s, above=%f, elevation=%f',
self._name, self._threshold, cur_elev)
nxt_dttm = self._get_nxt_dttm(cur_dttm)
self._next_change = nxt_dttm
@callback
def async_update(now):
self._unsub_update = None
self.async_schedule_update_ha_state(True)
if nxt_dttm:
self._unsub_update = async_track_point_in_time(self.hass, async_update, nxt_dttm)
else:
_LOGGER.error(
'Sun elevation never reaches %f at this location',
self._threshold)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up sensors."""
sensors = []
for cfg in config[CONF_MONITORED_CONDITIONS]:
if CONF_ELEVATION in cfg:
options = cfg[CONF_ELEVATION]
sensors.append(Sun2ElevationSensor(
hass, options[CONF_NAME], options[CONF_ABOVE]))
async_add_entities(sensors, True)
|
{"/custom_components/sun2/sensor.py": ["/custom_components/sun2/helpers.py"], "/custom_components/sun2/binary_sensor.py": ["/custom_components/sun2/helpers.py"]}
|
21,332
|
testitesti22/ha-sun2
|
refs/heads/master
|
/custom_components/sun2/helpers.py
|
"""Sun2 Helpers."""
from datetime import timedelta
try:
from astral import AstralError
except ImportError:
AstralError = TypeError
from homeassistant.const import EVENT_CORE_CONFIG_UPDATE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.sun import get_astral_location
SIG_LOC_UPDATED = 'sun2_loc_updated'
_LOC = None
_ELEV = None
_HASS = None
def _get_astral_location():
global _LOC, _ELEV
try:
_LOC, _ELEV = get_astral_location(_HASS)
except TypeError:
_LOC = get_astral_location(_HASS)
def _update_location(event):
_get_astral_location()
dispatcher_send(_HASS, SIG_LOC_UPDATED)
@callback
def async_init_astral_loc(hass):
"""Initialize astral Location."""
global _HASS
if not _LOC:
_HASS = hass
_get_astral_location()
hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, _update_location)
def astral_event(event, date_or_dt, depression=None):
"""Return astral event result."""
if depression is not None:
_LOC.solar_depression = depression
try:
if _ELEV is not None:
if event in ('solar_midnight', 'solar_noon'):
return getattr(_LOC, event.replace('solar_', ''))(date_or_dt)
else:
return getattr(_LOC, event)(date_or_dt, observer_elevation=_ELEV)
return getattr(_LOC, event)(date_or_dt)
except AstralError:
return 'none'
def nearest_second(time):
"""Round time to nearest second."""
return (time.replace(microsecond=0) +
timedelta(seconds=0 if time.microsecond < 500000 else 1))
|
{"/custom_components/sun2/sensor.py": ["/custom_components/sun2/helpers.py"], "/custom_components/sun2/binary_sensor.py": ["/custom_components/sun2/helpers.py"]}
|
21,338
|
dreyk/emt
|
refs/heads/master
|
/models/resnet/model.py
|
import tensorflow as tf
import models.resnet.resnet_gn_ws as resnet
import models.resnet.layers as clayers
import logging
def _encoder_layer(input, planes, blocks,dilate=1, stride=1,layer=1):
downsample = None
if stride != 1 or input.shape[3] != planes * 4:
downsample = resnet.Downsample(planes * 4, stride if dilate==1 else 1)
logging.info('Encoder Layer {} downsample to {}'.format(layer,planes * 4))
input = resnet.Bottleneck(planes, stride,dilate=dilate,layer=layer,name=f'EncoderL{layer}-0')(input,downsample)
for i in range(1, blocks):
input = resnet.Bottleneck(planes,layer=layer,dilate=dilate,name=f'EncoderL{layer}-{i}')(input)
return input
def encoder(input):
conv_out = [input]
conv1 = tf.keras.layers.Conv2D(64,
kernel_size=7,
strides=2,
padding='same',
use_bias=False,
kernel_initializer='he_uniform', kernel_regularizer=clayers.ws_reg)(input)
logging.info("conv1: {}".format(conv1.shape))
bn1 = clayers.GroupNormalization()(conv1)
logging.info("bn1: {}".format(bn1.shape))
relu = tf.keras.layers.Activation(tf.keras.activations.relu)(bn1)
logging.info("relu: {}".format(relu.shape))
conv_out.append(relu)
maxpool = tf.keras.layers.MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding="same")(relu)
logging.info("maxpool: {}".format(maxpool.shape))
conv_out.append(maxpool)
layer1 = _encoder_layer(maxpool, 64, 3,layer='1')
conv_out.append(layer1)
layer2 = _encoder_layer(layer1, 128, 4, stride=2,layer='2')
conv_out.append(layer2)
layer3 = _encoder_layer(layer2, 256, 6, stride=2,dilate=2,layer='3')
conv_out.append(layer3)
layer4 = _encoder_layer(layer3, 512, 3, stride=2,dilate=4,layer='4')
conv_out.append(layer4)
return conv_out
def fba_fusion(alpha, img, F, B):
F = ((alpha * img + (1 - alpha**2) * F - alpha * (1 - alpha) * B))
B = ((1 - alpha) * img + (2 * alpha - alpha**2) * B - alpha * (1 - alpha) * F)
F = tf.clip_by_value(F,0,1)
B = tf.clip_by_value(B,0,1)
la = 0.1
alpha = (alpha * la +tf.reduce_sum((img - B) * (F - B),3,True)) / (tf.reduce_sum((F - B) * (F - B),3,True) + la)
alpha = tf.clip_by_value(alpha,0,1)
return alpha, F, B
class FBADecoder(tf.keras.layers.Layer):
expansion = 1
def __init__(self,**kwargs):
super(FBADecoder, self).__init__(**kwargs)
pool_scales = (1, 2, 3, 6)
self.ppm = []
for p in pool_scales:
self.ppm.append(resnet.FBADecoderBlock(256,1,p))
self.conv_up1_0 = resnet.FBADecoderBlock(256,3)
self.conv_up1_1 = resnet.FBADecoderBlock(256, 3)
self.conv_up2 = resnet.FBADecoderBlock(256, 3)
self.conv_up3 = resnet.FBADecoderBlock(64, 3)
self.upool = tf.keras.layers.UpSampling2D(2)
self.conv_up4_0 = resnet.FBADecoderBlock(32, 3)
self.conv_up4_1 = resnet.FBADecoderBlock(16, 3)
self.conv_up4_7 = resnet.FBADecoderBlock(7, 3)
def call(self, conv_out,img, two_chan_trimap):
for i,c in enumerate(conv_out):
logging.info('Decode conv_out{}={}'.format(i,conv_out[i].shape))
conv5 = conv_out[-1]
logging.info('Decode conv5={}'.format(conv5.shape))
ppm_out = [conv5]
w = conv5.shape[2]
h = conv5.shape[1]
for pool_scale in self.ppm:
x = pool_scale(conv5)
logging.info('Decode pool_scale0={}'.format(x.shape))
sw = w // x.shape[2]
sh = h // x.shape[1]
x = tf.keras.layers.UpSampling2D((sh,sw),interpolation='bilinear')(x)
logging.info('Decode pool_scale1={}'.format(x.shape))
ppm_out.append(x)
ppm_out = tf.concat(ppm_out,-1)
x = self.conv_up1_0(ppm_out)
x = self.conv_up1_1(x)
x = tf.keras.layers.UpSampling2D((2,2),interpolation='bilinear')(x)
x = tf.concat([x, conv_out[-4]], -1)
x = self.conv_up2(x)
x = tf.keras.layers.UpSampling2D((2, 2), interpolation='bilinear')(x)
x = tf.concat([x, conv_out[-5]], -1)
x = self.conv_up3(x)
x = tf.keras.layers.UpSampling2D((2, 2), interpolation='bilinear')(x)
logging.info('Decode last x={}'.format(x.shape))
logging.info('Decode conv_out[-6]={}'.format(conv_out[-4].shape))
x = tf.concat([x, conv_out[-6][:,:,:,:3], img, two_chan_trimap], -1)
output = self.conv_up4_0(x)
output = self.conv_up4_1(output)
output = self.conv_up4_2(output)
alpha = tf.clip_by_value(output[:, :, :, 0:1], 0, 1)
fg = tf.keras.activations.sigmoid(output[:, :, :, 1:4])
bg = tf.keras.activations.sigmoid(output[:, :, :, 4:7])
alpha, fg, bg = fba_fusion(alpha, img, fg, bg)
return [alpha, fg, bg]
def Matting():
img = tf.keras.layers.Input(shape=(320, 320, 3), name='img')
trimap = tf.keras.layers.Input(shape=(320, 320, 1), name='trimap')
inputs = [img, trimap]
input = tf.keras.layers.concatenate(inputs)
encoder_outs = encoder(input)
decoder_outputs = FBADecoder()(encoder_outs,img,trimap)
model = tf.keras.Model(inputs=inputs, outputs=decoder_outputs)
return model
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,339
|
dreyk/emt
|
refs/heads/master
|
/unet_one_face.py
|
import tensorflow as tf
import models.unet.unet as unet
import data.one_person as data
import logging
import os
import argparse
def train(args):
logdir = args.checkpoint_dir
os.makedirs(logdir)
file_writer = tf.summary.create_file_writer(logdir)
ds = data.data_fn(args, True)
model = unet.unet((args.resolution, args.resolution, 3), first_chan=32, pools=4, growth_add=8, growth_scale=0,
out_chans=1, use_group_norm=args.batch_size == 1)
model.summary()
l1 = tf.keras.losses.MeanAbsoluteError()
optimizer = tf.keras.optimizers.Adam(learning_rate=0.0001)
step = 0
tf.summary.trace_on(graph=True, profiler=False)
for e in range(args.num_epochs):
logging.info('epoch %d',e)
for (img, y_batch_train) in ds:
with tf.GradientTape() as tape:
outputs = model(img, training=True) # Logits for this minibatch
palpha = outputs
alpha = y_batch_train[:, :, :, 0:1]
alpha_l1 = l1(alpha, palpha)
loss_value = alpha_l1
if step % 10 == 0:
logging.info("Step {}: Loss={}".format(step, loss_value))
model.save(os.path.join(logdir, 'model'), save_format='tf')
with file_writer.as_default():
if step==0:
tf.summary.trace_export('grpah',0)
tf.summary.trace_off()
tf.summary.scalar("Loss", loss_value, step=step)
tf.summary.scalar("Alpha/L1", alpha_l1, step=step)
tf.summary.image("Src", img, step=step, max_outputs=3)
tf.summary.image("Alpha", alpha, step=step, max_outputs=3)
tf.summary.image("PAlpha", palpha, step=step, max_outputs=3)
tf.summary.image("Res", img * palpha, step=step, max_outputs=3)
grads = tape.gradient(loss_value, model.trainable_weights)
optimizer.apply_gradients(zip(grads, model.trainable_weights))
step += 1
def create_arg_parser():
conf_parser = argparse.ArgumentParser(
add_help=False
)
conf_parser.add_argument(
'--checkpoint_dir',
default=os.environ.get('TRAINING_DIR', 'training') + '/' + os.environ.get('BUILD_ID', '1'),
help='Directory to save checkpoints and logs')
args, remaining_argv = conf_parser.parse_known_args()
parser = argparse.ArgumentParser(
parents=[conf_parser],
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
checkpoint_dir = args.checkpoint_dir
logging.info('Checkpoint %s', checkpoint_dir)
parser.add_argument('--batch_size', default=1, type=int, help='Mini batch size')
parser.add_argument('--coco', default='test', type=str, help='Coco path')
parser.add_argument('--epoch_len', default=10, type=int, help='Repeat at least')
parser.add_argument('--num_epochs', type=int, default=50, help='Number of training epochs')
parser.add_argument('--resolution', default=160, type=int, help='Resolution of images')
parser.add_argument('--data_set', type=str, required=True,
help='Path to the dataset')
return parser
if __name__ == '__main__':
logging.getLogger().setLevel('INFO')
args = create_arg_parser().parse_args()
train(args)
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,340
|
dreyk/emt
|
refs/heads/master
|
/sss_test.py
|
from imageio import imread
from sss_semantic_soft_segmentation.semantic_soft_segmentation import semantic_soft_segmentation
from scipy.io import loadmat
if __name__ == '__main__':
#img = imread('COCO_train2014_000000362884.jpg', mode='RGB')
image = imread('./SIGGRAPH18SSS/samples/docia.png')
ori = loadmat('./SIGGRAPH18SSS/Feat/docia.mat')
features = ori['embedmap']
print(features.shape)
#features = img[:, img.shape[1] // 2 + 1:, :]
#image = img[:, :img.shape[1] // 2, :]
sss = semantic_soft_segmentation(image, features)
print(sss)
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,341
|
dreyk/emt
|
refs/heads/master
|
/data/alpha_base.py
|
import tensorflow as tf
import logging
import glob
import os
import json
import numpy as np
import cv2
import random
from scipy import ndimage
unknown_code = 128
def pre_trimap(alpha):
trimap = np.copy(alpha)
k_size = 5
trimap[np.where((ndimage.grey_dilation(alpha[:, :], size=(k_size, k_size)) - ndimage.grey_erosion(alpha[:, :],
size=(k_size,
k_size))) != 0)] = unknown_code
return trimap
def generate_trimap(alpha):
trimap = pre_trimap(alpha)
k_size = int(np.random.uniform(1, 5)) * 2 + 1
trimap = cv2.GaussianBlur(trimap, (k_size,k_size),25)
trimap = trimap.astype(np.float32) / 255
trimap = np.expand_dims(trimap,axis=2)
return trimap
def random_choice(img,mask,resolution):
crop_size = random.choice([resolution,resolution+resolution//2,resolution*2])
trimap = pre_trimap(mask)
y_indices, x_indices = np.where(trimap == unknown_code)
num_unknowns = len(y_indices)
if num_unknowns > 0:
ix = np.random.choice(range(num_unknowns))
center_x = x_indices[ix]
center_y = y_indices[ix]
x0 = max(0, center_x - int(crop_size / 2))
y0 = max(0, center_y - int(crop_size / 2))
x1 = min(x0+crop_size,img.shape[1])
y1 = min(y0 +crop_size, img.shape[0])
if (x1-x0)>crop_size/2 and (y1-y0)>crop_size/2:
img = img[y0:y1,x0:x1,:]
mask = mask[y0:y1,x0:x1]
return img,mask
def _coco_bg(args):
if args.coco == 'test':
return ['./testdata/default.png']
with open(args.coco + '/annotations/instances_train2017.json') as f:
coco_data = json.load(f)
coco_data = coco_data['annotations']
coco_images = {}
people = {}
for a in coco_data:
i_id = a['image_id']
if a['category_id'] != 1:
if i_id in people:
continue
else:
coco_images[i_id] = True
else:
if i_id in coco_images:
del coco_images[i_id]
people[i_id] = True
del people
names = []
for k in coco_images.keys():
name = '{}/train2017/{:012d}.jpg'.format(args.coco, int(k))
names.append(name)
return names
def _crop_back(img, w, h):
nw = max(img.shape[1], w)
nh = max(img.shape[0], h)
img = cv2.resize(img, (nw, nh))
x_shift = int(np.random.uniform(0, nw - w))
y_shift = int(np.random.uniform(0, nh - h))
return img[y_shift:y_shift + h, x_shift:x_shift + w, :]
def _resize_and_put(img, x_shift, y_shift, iw, ih, w, h):
img = cv2.resize(img, (iw, ih))
if len(img.shape) == 3:
res = np.zeros((h, w, img.shape[2]), dtype=img.dtype)
res[y_shift:y_shift + ih, x_shift:x_shift + iw, :] = img
else:
res = np.zeros((h, w), dtype=img.dtype)
res[y_shift:y_shift + ih, x_shift:x_shift + iw] = img
return res
def augumnted_data_fn(args, training):
import albumentations
def _soft_strong_aug(p=0.5):
return albumentations.Compose([
albumentations.HorizontalFlip(),
albumentations.VerticalFlip(),
], p=p)
def _strong_aug(p=0.5):
return albumentations.Compose([
albumentations.HorizontalFlip(),
albumentations.VerticalFlip(),
albumentations.ShiftScaleRotate(shift_limit=0, scale_limit=0, rotate_limit=15, p=0.3),
albumentations.OneOf([
albumentations.OpticalDistortion(p=0.3),
albumentations.GridDistortion(p=0.1),
albumentations.IAAPiecewiseAffine(p=0.3),
], p=0.2),
albumentations.OneOf([
albumentations.CLAHE(clip_limit=2),
albumentations.IAASharpen(),
albumentations.IAAEmboss(),
], p=0.3),
albumentations.OneOf([
albumentations.RandomBrightnessContrast(p=0.3),
], p=0.4),
albumentations.HueSaturationValue(p=0.3),
], p=p)
augmentation = _soft_strong_aug(p=0.9)
files = glob.glob(args.data_set + '/masks/*.*')
for i in range(len(files)):
mask = files[i]
img = os.path.basename(mask)
img = args.data_set + '/images/' + img
files[i] = (img, mask)
coco_images = _coco_bg(args)
def _generator():
for i in files:
img = cv2.imread(i[0])
mask = cv2.imread(i[1])
if len(mask.shape) == 3:
mask = cv2.cvtColor(mask, cv2.COLOR_BGR2GRAY)
img,mask = random_choice(img,mask,args.resolution)
data = {"image": img, "mask": mask}
augmented = augmentation(**data)
img, mask = augmented["image"], augmented["mask"]
fg = cv2.resize(img,(args.resolution, args.resolution))
mask = cv2.resize(mask,(args.resolution, args.resolution))
name = random.choice(coco_images)
bg = cv2.imread(name)
bg = _crop_back(bg,args.resolution, args.resolution)
fmask = mask.astype(np.float32) / 255
fmask = np.expand_dims(fmask, 2)
fg = fg.astype(np.float32) / 255 * fmask
bg = bg.astype(np.float32) / 255 * (1 - fmask)
img = fg + bg
img = np.clip(img, 0, 1)
fg = np.clip(fg, 0, 1)
bg = np.clip(bg, 0, 1)
trimap = generate_trimap(mask)
yield np.concatenate([img, trimap], axis=2), np.concatenate([fmask, fg, bg], axis=2)
ds = tf.data.Dataset.from_generator(_generator, (tf.float32, tf.float32),
(tf.TensorShape([args.resolution, args.resolution, 4]),
tf.TensorShape([args.resolution, args.resolution, 7])))
if training:
ds = ds.shuffle(args.batch_size * 3, reshuffle_each_iteration=True)
ds = ds.batch(args.batch_size, True)
return ds
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,342
|
dreyk/emt
|
refs/heads/master
|
/models/fc_densenet/matting.py
|
import models.fc_densenet.layers as fc_layers
import tensorflow as tf
def fba_fusion(alpha, img, F, B):
F = ((alpha * img + (1 - alpha**2) * F - alpha * (1 - alpha) * B))
B = ((1 - alpha) * img + (2 * alpha - alpha**2) * B - alpha * (1 - alpha) * F)
F = tf.clip_by_value(F,0,1)
B = tf.clip_by_value(B,0,1)
la = 0.1
alpha = (alpha * la +tf.reduce_sum((img - B) * (F - B),3,True)) / (tf.reduce_sum((F - B) * (F - B),3,True) + la)
alpha = tf.clip_by_value(alpha,0,1)
return alpha, F, B
def FCDensNetMatting(
n_filters_first_conv=48,
n_pool=5,
growth_rate=16,
n_layers_per_block=[4, 5, 7, 10, 12, 15, 12, 10, 7, 5, 4],
dropout_p=0.2
):
if type(n_layers_per_block) == list:
print(len(n_layers_per_block))
elif type(n_layers_per_block) == int:
n_layers_per_block = [n_layers_per_block] * (2 * n_pool + 1)
else:
raise ValueError
img = tf.keras.layers.Input(shape=(320, 320, 3), name='img')
trimap = tf.keras.layers.Input(shape=(320, 320, 1), name='trimap')
inputs = [img, trimap]
input = tf.keras.layers.concatenate([img, trimap])
print('n_filters_first_conv={}'.format(n_filters_first_conv))
stack = tf.keras.layers.Conv2D(filters=n_filters_first_conv, kernel_size=3, padding='same',
kernel_initializer='he_uniform',kernel_regularizer=fc_layers.ws_reg)(input)
print('stack={}'.format(stack.shape))
n_filters = n_filters_first_conv
skip_connection_list = []
for i in range(n_pool):
for j in range(n_layers_per_block[i]):
l = fc_layers.BN_ReLU_Conv(stack, growth_rate, dropout_p=dropout_p)
stack = tf.keras.layers.concatenate([stack, l])
n_filters += growth_rate
skip_connection_list.append(stack)
stack = fc_layers.TransitionDown(stack, n_filters, dropout_p)
skip_connection_list = skip_connection_list[::-1]
block_to_upsample = []
for j in range(n_layers_per_block[n_pool]):
l = fc_layers.BN_ReLU_Conv(stack, growth_rate, dropout_p=dropout_p)
block_to_upsample.append(l)
stack = tf.keras.layers.concatenate([stack, l])
block_to_upsample = tf.keras.layers.concatenate(block_to_upsample)
for i in range(n_pool):
n_filters_keep = growth_rate * n_layers_per_block[n_pool + i]
stack = fc_layers.TransitionUp(skip_connection_list[i], block_to_upsample, n_filters_keep)
block_to_upsample = []
for j in range(n_layers_per_block[n_pool + i + 1]):
l = fc_layers.BN_ReLU_Conv(stack, growth_rate, dropout_p=dropout_p)
block_to_upsample.append(l)
stack = tf.keras.layers.concatenate([stack, l])
block_to_upsample = tf.keras.layers.concatenate(block_to_upsample)
l = tf.keras.layers.Conv2D(7, kernel_size=1, padding='same', kernel_initializer='he_uniform',kernel_regularizer=fc_layers.ws_reg)(stack)
alpha = tf.keras.activations.hard_sigmoid(l[:, :, :, 0:1])
alpha = tf.clip_by_value(alpha,0,1)
fg = tf.keras.activations.sigmoid(l[:, :, :, 1:4])
bg = tf.keras.activations.sigmoid(l[:, :, :, 4:7])
alpha, fg, bg = fba_fusion(alpha, img, fg, bg)
model = tf.keras.Model(inputs=inputs, outputs=[alpha, fg, bg])
return model
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,343
|
dreyk/emt
|
refs/heads/master
|
/data/coco.py
|
import json
import random
import cv2
import numpy as np
class CocoBG:
def __init__(self,path):
self.images = _load_coco(path)
def get_random(self,w,h,rgb=True):
name = random.choice(self.images)
bg = cv2.imread(name)
if rgb:
bg = bg[:,:,::-1]
return self.crop_back(bg, w, h)
def crop_back(self,img, w, h):
nw = max(img.shape[1], w)
nh = max(img.shape[0], h)
img = cv2.resize(img, (nw, nh))
x_shift = int(np.random.uniform(0, nw - w))
y_shift = int(np.random.uniform(0, nh - h))
return img[y_shift:y_shift + h, x_shift:x_shift + w, :]
def _load_coco(coco_path):
if coco_path == 'test':
return ['./testdata/default.png']
with open(coco_path + '/annotations/instances_train2017.json') as f:
coco_data = json.load(f)
coco_data = coco_data['annotations']
coco_images = {}
people = {}
for a in coco_data:
i_id = a['image_id']
if a['category_id'] != 1:
if i_id in people:
continue
else:
coco_images[i_id] = True
else:
if i_id in coco_images:
del coco_images[i_id]
people[i_id] = True
del people
names = []
for k in coco_images.keys():
name = '{}/train2017/{:012d}.jpg'.format(coco_path, int(k))
names.append(name)
return names
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,344
|
dreyk/emt
|
refs/heads/master
|
/live_aug.py
|
import cv2
import numpy as np
original_img = cv2.imread('testdata/images/test.png')
original_mask = cv2.imread('testdata/masks/test.png')
bg = cv2.imread('testdata/default.png')
original_img = cv2.resize(original_img,(160,160))
original_mask = cv2.cvtColor(original_mask,cv2.COLOR_BGR2GRAY)
original_mask = cv2.resize(original_mask,(160,160))
bg = cv2.resize(bg,(160,160))
bg = bg.astype(np.float32)
def _strong_aug(p=0.5):
import albumentations
return albumentations.Compose([
albumentations.HorizontalFlip(p=0.5),
albumentations.ShiftScaleRotate(shift_limit=0.0625, scale_limit=0.2, rotate_limit=0, p=0.5,border_mode=cv2.BORDER_CONSTANT),
albumentations.OneOf([
albumentations.OpticalDistortion(p=0.5,border_mode=cv2.BORDER_CONSTANT),
albumentations.GridDistortion(p=0.5,border_mode=cv2.BORDER_CONSTANT),
albumentations.IAAPiecewiseAffine(p=0.5),
albumentations.ElasticTransform(p=0.5,border_mode=cv2.BORDER_CONSTANT),
], p=0.5),
albumentations.OneOf([
albumentations.CLAHE(clip_limit=2),
albumentations.IAASharpen(),
albumentations.IAAEmboss(),
], p=0.5),
albumentations.OneOf([
albumentations.RandomBrightnessContrast(p=0.5),
], p=0.4),
albumentations.HueSaturationValue(p=0.5),
], p=p)
augmentation = _strong_aug(p=1)
while True:
data = {"image": original_img, "mask": original_mask}
augmented = augmentation(**data)
img, mask = augmented["image"], augmented["mask"]
img = img.astype(np.float32)
mask = mask.astype(np.float32) / 255
mask = np.expand_dims(mask, axis=2)
img = img * mask + bg * (1 - mask)
img = img.astype(np.uint8)
cv2.imshow('test',img)
key = cv2.waitKey()
if key in [ord('q'), 202]:
break
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,345
|
dreyk/emt
|
refs/heads/master
|
/fc_densnet_train.py
|
import tensorflow as tf
import data.alpha_base as data
import models.fc_densenet.matting as fc_densenet
import models.resnet.model as resnet_mat
import logging
import os
import argparse
import numpy as np
def gauss_kernel(size=5, sigma=1.0):
grid = np.float32(np.mgrid[0:size,0:size].T)
gaussian = lambda x: np.exp((x - size//2)**2/(-2*sigma**2))**2
kernel = np.sum(gaussian(grid), axis=2)
kernel /= np.sum(kernel)
return kernel
def conv_gauss(t_input, stride=1, k_size=5, sigma=1.6, repeats=1):
t_kernel = tf.reshape(tf.constant(gauss_kernel(size=k_size, sigma=sigma), tf.float32),
[k_size, k_size, 1, 1])
t_kernel3 = tf.concat([t_kernel]*t_input.get_shape()[3], axis=2)
t_result = t_input
for r in range(repeats):
t_result = tf.nn.depthwise_conv2d(t_result, t_kernel3,
strides=[1, stride, stride, 1], padding='SAME')
return t_result
def make_laplacian_pyramid(t_img, max_levels):
t_pyr = []
current = t_img
for level in range(max_levels):
t_gauss = conv_gauss(current, stride=1, k_size=5, sigma=2.0)
t_diff = current - t_gauss
t_pyr.append(t_diff)
current = tf.nn.avg_pool(t_gauss, [1,2,2,1], [1,2,2,1], 'VALID')
t_pyr.append(current)
return t_pyr
def laploss(t_img1, t_img2, max_levels=3):
t_pyr1 = make_laplacian_pyramid(t_img1, max_levels)
t_pyr2 = make_laplacian_pyramid(t_img2, max_levels)
t_losses = [tf.norm(a-b,ord=1)/tf.size(a, out_type=tf.float32) for a,b in zip(t_pyr1, t_pyr2)]
t_loss = tf.reduce_sum(t_losses)
return t_loss
def train(args):
logdir = args.checkpoint_dir
os.makedirs(logdir)
file_writer = tf.summary.create_file_writer(logdir)
ds = data.augumnted_data_fn(args, True)
model = resnet_mat.Matting()
model.summary()
def train1(args):
logdir = args.checkpoint_dir
os.makedirs(logdir)
file_writer = tf.summary.create_file_writer(logdir)
ds = data.augumnted_data_fn(args, True)
model = fc_densenet.FCDensNetMatting()
model.summary()
l1 = tf.keras.losses.MeanAbsoluteError()
optimizer = tf.keras.optimizers.Adam(learning_rate=0.00001)
step = 0
for e in range(args.num_epochs):
for (x_batch_train, y_batch_train) in ds:
with tf.GradientTape() as tape:
img = x_batch_train[:,:,:,0:3]
trimap = x_batch_train[:,:,:,3:]
outputs = model([img,trimap], training=True) # Logits for this minibatch
palpha = outputs[0]
palpha_dx, palpha_dy = tf.image.image_gradients(palpha)
pfg = outputs[1]
pbg = outputs[2]
alpha = y_batch_train[:,:,:,0:1]
alpha_dx, alpha_dy = tf.image.image_gradients(alpha)
fg = y_batch_train[:, :, :, 1:4]
bg = y_batch_train[:, :, :, 4:7]
alpha_l1 = l1(alpha,palpha)
#alpha_g = l1(alpha_dx,palpha_dx)+l1(alpha_dy,palpha_dy)
alpha_c = l1(img, fg * palpha + bg * (1 - palpha))
alpha_lap = laploss(alpha,palpha)
fb_l1 = l1(fg,pfg)+l1(bg, pbg)
pfg_dx,pfg_dy = tf.image.image_gradients(pfg)
pbg_dx, pbg_dy = tf.image.image_gradients(pbg)
fb_exl = tf.reduce_mean(tf.abs(pfg_dx)*tf.abs(pbg_dx)/4+tf.abs(pfg_dy)*tf.abs(pbg_dy)/4)
fb_c = l1(img,alpha*pfg+(1-alpha)*pbg)
fb_lap = laploss(fg,pfg)+laploss(bg,pbg)
loss_value = alpha_l1+alpha_c+alpha_lap+0.25*(fb_l1+fb_exl+fb_c+fb_lap)
if step % 50 == 0:
logging.info("Step {}: Loss={}".format(step, loss_value))
with file_writer.as_default():
tf.summary.scalar("Loss", loss_value, step=step)
tf.summary.scalar("Alpha/L1", alpha_l1, step=step)
#tf.summary.scalar("Alpha/G", alpha_g, step=step)
tf.summary.scalar("Alpha/C", alpha_c, step=step)
tf.summary.scalar("Alpha/Lap", alpha_lap, step=step)
tf.summary.scalar("FB/L1", fb_l1, step=step)
tf.summary.scalar("FB/Exl", fb_exl, step=step)
tf.summary.scalar("FB/Lap", fb_lap, step=step)
tf.summary.image("Src", img, step=step, max_outputs=3)
tf.summary.image("BG", bg, step=step, max_outputs=3)
tf.summary.image("FG", fg, step=step, max_outputs=3)
tf.summary.image("PBG", pbg, step=step, max_outputs=3)
tf.summary.image("PFG", pfg, step=step, max_outputs=3)
tf.summary.image("Alpha", alpha, step=step, max_outputs=3)
tf.summary.image("Trimap", trimap, step=step, max_outputs=3)
tf.summary.image("PAlpha", palpha, step=step, max_outputs=3)
tf.summary.image("Res", img*palpha, step=step, max_outputs=3)
grads = tape.gradient(loss_value, model.trainable_weights)
optimizer.apply_gradients(zip(grads, model.trainable_weights))
step += 1
def create_arg_parser():
conf_parser = argparse.ArgumentParser(
add_help=False
)
conf_parser.add_argument(
'--checkpoint_dir',
default=os.environ.get('TRAINING_DIR', 'training') + '/' + os.environ.get('BUILD_ID', '1'),
help='Directory to save checkpoints and logs')
args, remaining_argv = conf_parser.parse_known_args()
parser = argparse.ArgumentParser(
parents=[conf_parser],
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
checkpoint_dir = args.checkpoint_dir
logging.info('Checkpoint %s',checkpoint_dir)
parser.add_argument('--batch_size', default=1, type=int, help='Mini batch size')
parser.add_argument('--num_epochs', type=int, default=50, help='Number of training epochs')
parser.add_argument('--resolution', default=320, type=int, help='Resolution of images')
parser.add_argument('--coco', type=str, required=True,
help='Coco Path')
parser.add_argument('--data_set', type=str, required=True,
help='Path to the dataset')
return parser
if __name__ == '__main__':
logging.getLogger().setLevel('INFO')
args = create_arg_parser().parse_args()
train(args)
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,346
|
dreyk/emt
|
refs/heads/master
|
/play.py
|
import cv2
import numpy as np
back = cv2.imread('./testdata/default.png')
front = cv2.imread('/Users/agunin/Downloads/Alpha/People/hairs-3225896_1920.png',cv2.IMREAD_UNCHANGED)
small_front = cv2.resize(front,(256,256))
smal_back= cv2.resize(back,(256,256))
a = small_front[:,:,3:].astype(np.float32)/255
res = small_front[:,:,0:3].astype(np.float32)*a + smal_back.astype(np.float32)*(1-a)
res = res.astype(np.uint8)
cv2.imwrite('res.png',res)
#key = cv2.waitKey()
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,347
|
dreyk/emt
|
refs/heads/master
|
/models/fc_densenet/model.py
|
import models.layers.layers as fc_layers
import tensorflow as tf
def FCDensNet(
input_shape=(None, None, 3),
n_classes=1,
n_filters_first_conv=48,
n_pool=5,
growth_rate=16,
n_layers_per_block=[4, 5, 7, 10, 12, 15, 12, 10, 7, 5, 4],
dropout_p=0.2
):
if type(n_layers_per_block) == list:
print(len(n_layers_per_block))
elif type(n_layers_per_block) == int:
n_layers_per_block = [n_layers_per_block] * (2 * n_pool + 1)
else:
raise ValueError
#####################
# First Convolution #
#####################
inputs = tf.keras.layers.Input(shape=input_shape,name='input')
stack = tf.keras.layers.Conv2D(filters=n_filters_first_conv, kernel_size=3, padding='same', kernel_initializer='he_uniform')(inputs)
n_filters = n_filters_first_conv
#####################
# Downsampling path #
#####################
skip_connection_list = []
for i in range(n_pool):
for j in range(n_layers_per_block[i]):
l = fc_layers.BN_ReLU_Conv(stack, growth_rate, dropout_p=dropout_p)
stack = tf.keras.layers.concatenate([stack, l])
n_filters += growth_rate
skip_connection_list.append(stack)
stack = fc_layers.TransitionDown(stack, n_filters, dropout_p)
skip_connection_list = skip_connection_list[::-1]
#####################
# Bottleneck #
#####################
block_to_upsample = []
for j in range(n_layers_per_block[n_pool]):
l = fc_layers.BN_ReLU_Conv(stack, growth_rate, dropout_p=dropout_p)
block_to_upsample.append(l)
stack = tf.keras.layers.concatenate([stack, l])
block_to_upsample = tf.keras.layers.concatenate(block_to_upsample)
#####################
# Upsampling path #
#####################
for i in range(n_pool):
n_filters_keep = growth_rate * n_layers_per_block[n_pool + i]
stack = fc_layers.TransitionUp(skip_connection_list[i], block_to_upsample, n_filters_keep)
block_to_upsample = []
for j in range(n_layers_per_block[n_pool + i + 1]):
l = fc_layers.BN_ReLU_Conv(stack, growth_rate, dropout_p=dropout_p)
block_to_upsample.append(l)
stack = tf.keras.layers.concatenate([stack, l])
block_to_upsample = tf.keras.layers.concatenate(block_to_upsample)
#####################
# Softmax #
#####################
output = fc_layers.SoftmaxLayer(stack, n_classes)
outputs = [output,stack]
for sc in skip_connection_list:
outputs.append(sc)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
return model
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,348
|
dreyk/emt
|
refs/heads/master
|
/models/resnet/resnet_gn_ws.py
|
import tensorflow as tf
import models.resnet.layers as clayers
import logging
class Conv3x3(tf.keras.layers.Layer):
def __init__(self,out_planes, stride=1,dilate=1,**kwargs):
super(Conv3x3, self).__init__(**kwargs)
self.conv = tf.keras.layers.Conv2D(out_planes,kernel_size=3,
strides=stride,
dilation_rate=dilate,
padding='same',
use_bias=False,
kernel_initializer='he_uniform',kernel_regularizer=clayers.ws_reg)
def call(self, inputs):
return self.conv(inputs)
class Conv1x1(tf.keras.layers.Layer):
def __init__(self,out_planes, stride=1,**kwargs):
super(Conv1x1, self).__init__(**kwargs)
self.conv = tf.keras.layers.Conv2D(out_planes, kernel_size=1,
strides=stride,
padding='same',
use_bias=False,
kernel_initializer='he_uniform', kernel_regularizer=clayers.ws_reg)
def call(self, inputs):
return self.conv(inputs)
class BasicBlock(tf.keras.layers.Layer):
expansion = 1
def __init__(self,planes, stride=1, downsample=None,**kwargs):
super(BasicBlock, self).__init__(**kwargs)
self.conv1 = Conv3x3(planes, stride)
self.bn1 = clayers.GroupNormalization()
self.relu = tf.keras.layers.Activation(tf.keras.activations.relu)
self.conv2 = Conv3x3(planes, stride)
self.bn2 = clayers.GroupNormalization()
self.downsample = downsample
self.stride = stride
def call(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class Downsample(tf.keras.layers.Layer):
def __init__(self, planes, stride=1, **kwargs):
super(Downsample, self).__init__(**kwargs)
self.conv = Conv1x1(planes, stride)
self.norm = clayers.GroupNormalization()
def call(self, x):
x = self.conv(x)
return self.norm(x)
class Bottleneck(tf.keras.layers.Layer):
expansion = 4
def __init__(self, planes, stride=1,dilate=1,layer=1, **kwargs):
super(Bottleneck, self).__init__(**kwargs)
if stride==2 and dilate>1:
stride = 1
dilate = dilate//2
self.conv1 = Conv1x1(planes)
self.bn1 = clayers.GroupNormalization()
self.conv2 = Conv3x3(planes, stride,dilate=dilate)
self.bn2 = clayers.GroupNormalization()
self.conv3 = Conv1x1(planes * self.expansion)
self.bn3 = clayers.GroupNormalization()
self.relu = tf.keras.layers.Activation(tf.keras.activations.relu)
self.layer = layer
self.stride = stride
def call(self, x,downsample=None):
logging.info('Bottelneck layer{} x: {}'.format(self.layer, x.shape))
identity = x
out = self.conv1(x)
logging.info('Bottelneck layer{} conv1: {}'.format(self.layer,out.shape))
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
logging.info('Bottelneck layer{} conv2: {}'.format(self.layer, out.shape))
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
logging.info('Bottelneck layer{} conv3: {}'.format(self.layer, out.shape))
if downsample is not None:
identity = downsample(x)
logging.info('Bottelneck layer{} identity: {}'.format(self.layer, identity.shape))
out += identity
out = self.relu(out)
return out
class FBADecoderBlock(tf.keras.layers.Layer):
def __init__(self,filters,kernel_size=1,pool_scale=0,**kwargs):
super(FBADecoderBlock, self).__init__(**kwargs)
self.pool = pool_scale
self.conv = tf.keras.layers.Conv2D(filters, kernel_size=kernel_size,
padding='same',
use_bias=True,
kernel_initializer='he_uniform', kernel_regularizer=clayers.ws_reg)
self.relu = tf.keras.layers.Activation(tf.keras.activations.relu)
self.norm = clayers.GroupNormalization()
def call(self, x):
if self.pool>0:
kernel = (x.shape[1] + self.pool - 1) // self.pool
x = tf.keras.layers.AveragePooling2D(kernel)(x)
x = self.conv(x)
x = self.relu(x)
return self.norm(x)
class FBADecoderPPM(tf.keras.layers.Layer):
def __init__(self,pool_scales,**kwargs):
super(FBADecoderPPM, self).__init__(**kwargs)
self.ppm = []
self.pool_scales = pool_scales
for _ in pool_scales:
conv = tf.keras.layers.Conv2D(256, kernel_size=1,
padding='same',
use_bias=True,
kernel_initializer='he_uniform', kernel_regularizer=clayers.ws_reg)
relu = tf.keras.layers.Activation(tf.keras.activations.relu)
norm = clayers.GroupNormalization()
self.ppm.append([conv,relu,norm])
def call(self, x):
for i,p in enumerate(self.ppm):
kernel = (x.shape[1]+self.pool_scales[i]-1)//self.pool_scales[i]
x = tf.keras.layers.AveragePooling2D(kernel)(x)
for l in p:
x = l(x)
logging.info('Decoder PPM{}: {}'.format(i,x.shape))
return x
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,349
|
dreyk/emt
|
refs/heads/master
|
/models/unet/unet.py
|
import models.layers.layers as layers
import tensorflow as tf
def block(input, filters, norm, pooling=True):
conv1 = tf.keras.layers.Conv2D(filters, 3, padding='same', kernel_initializer='he_normal')(input)
n1 = norm(conv1)
r1 = tf.keras.layers.Activation(tf.keras.activations.relu)(n1)
conv2 = tf.keras.layers.Conv2D(filters, 3, padding='same', kernel_initializer='he_normal')(r1)
n2 = norm(conv2)
r3 = tf.keras.layers.Activation(tf.keras.activations.relu)(n2)
if pooling:
pool = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(r3)
return r3, pool
return r3
def fba_fusion(alpha, img, F, B):
F = ((alpha * img + (1 - alpha ** 2) * F - alpha * (1 - alpha) * B))
B = ((1 - alpha) * img + (2 * alpha - alpha ** 2) * B - alpha * (1 - alpha) * F)
F = tf.clip_by_value(F, 0, 1)
B = tf.clip_by_value(B, 0, 1)
la = 0.1
alpha = (alpha * la + tf.reduce_sum((img - B) * (F - B), 3, True)) / (
tf.reduce_sum((F - B) * (F - B), 3, True) + la)
alpha = tf.clip_by_value(alpha, 0, 1)
return alpha, F, B
def unet(input_shape=(None, None, 3), first_chan=16, pools=4, growth_add=0, growth_scale=2, out_chans=1,
use_group_norm=True):
if use_group_norm:
def _norm(norm_groups):
return layers.GroupNormalization(groups=norm_groups)
norm = _norm
else:
def _norm(_):
return tf.keras.layers.BatchNormalization()
norm = _norm
inputs = tf.keras.layers.Input(input_shape)
filters = first_chan
connections = []
pool = inputs
for i in range(pools):
conv, pool = block(pool, filters, norm(first_chan // 2), True)
connections.append(conv)
if growth_add > 0:
filters += growth_add
else:
filters *= growth_scale
connections.reverse()
conv = block(pool, filters, norm(first_chan), False)
for i in range(pools):
up = tf.keras.layers.Conv2DTranspose(filters, 3, strides=2, padding='same', kernel_initializer='he_normal')(conv)
concat = tf.keras.layers.concatenate([connections[i], up])
conv = block(concat, filters, norm(first_chan), False)
if i < (pools - 1):
if growth_add > 0:
filters -= growth_add
else:
filters = filters // growth_scale
conv = tf.keras.layers.Conv2D(filters, 7, padding='same', kernel_initializer='he_normal')(conv)
conv = tf.keras.layers.Conv2D(out_chans, 1, padding='same', kernel_initializer='he_normal')(conv)
conv_final = tf.keras.layers.Conv2D(out_chans, 1, padding='same', kernel_initializer='he_normal')(conv)
alpha = tf.keras.activations.sigmoid(conv_final[:, :, :, 0:1])
if out_chans == 7:
fg = tf.keras.activations.sigmoid(conv_final[:, :, :, 1:4])
bg = tf.keras.activations.sigmoid(conv_final[:, :, :, 4:7])
alpha, fg, bg = fba_fusion(alpha, inputs, fg, bg)
model = tf.keras.Model(inputs=inputs, outputs=[alpha, fg, bg])
else:
model = tf.keras.Model(inputs=inputs, outputs=alpha)
return model
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,350
|
dreyk/emt
|
refs/heads/master
|
/data/one_person.py
|
import tensorflow as tf
import os
import cv2
import numpy as np
import glob
import data.coco as coco
import logging
def _strong_aug(p=0.5):
import albumentations
return albumentations.Compose([
albumentations.HorizontalFlip(p=0.5),
albumentations.ShiftScaleRotate(shift_limit=0.0625, scale_limit=0.2, rotate_limit=0, p=0.5,border_mode=cv2.BORDER_CONSTANT),
albumentations.OneOf([
albumentations.OpticalDistortion(p=0.5,border_mode=cv2.BORDER_CONSTANT),
albumentations.GridDistortion(p=0.5,border_mode=cv2.BORDER_CONSTANT),
albumentations.IAAPiecewiseAffine(p=0.5),
albumentations.ElasticTransform(p=0.5,border_mode=cv2.BORDER_CONSTANT),
], p=0.5),
albumentations.OneOf([
albumentations.CLAHE(clip_limit=2),
albumentations.IAASharpen(),
albumentations.IAAEmboss(),
], p=0.5),
albumentations.OneOf([
albumentations.RandomBrightnessContrast(p=0.5),
], p=0.4),
albumentations.HueSaturationValue(p=0.5),
], p=p)
def data_fn(args, training):
files = glob.glob(args.data_set + '/masks/*.*')
for i in range(len(files)):
mask = files[i]
img = os.path.basename(mask)
img = args.data_set + '/images/' + img
files[i] = (img, mask)
logging.info('Number of training files: {}'.format(len(files)))
coco_bg = coco.CocoBG(args.coco)
augmentation = _strong_aug(p=1)
def _generator():
for _ in range(args.epoch_len):
for i in files:
img = cv2.imread(i[0])[:,:,::-1]
mask = cv2.imread(i[1])
img = cv2.resize(img,(args.resolution,args.resolution))
mask = cv2.resize(mask, (args.resolution, args.resolution))
if len(mask.shape) == 3:
mask = cv2.cvtColor(mask, cv2.COLOR_BGR2GRAY)
data = {"image": img, "mask": mask}
augmented = augmentation(**data)
img, mask = augmented["image"], augmented["mask"]
bg = coco_bg.get_random(args.resolution,args.resolution)
bg = bg.astype(np.float32)
img = img.astype(np.float32)
mask = mask.astype(np.float32)/255
mask = np.expand_dims(mask,axis=2)
img = img*mask+bg*(1-mask)
yield img/255, mask
ds = tf.data.Dataset.from_generator(_generator, (tf.float32, tf.float32),
(tf.TensorShape([args.resolution, args.resolution, 3]),
tf.TensorShape([args.resolution, args.resolution, 1])))
if training:
ds = ds.shuffle(args.batch_size * 3, reshuffle_each_iteration=True)
ds = ds.batch(args.batch_size, True)
return ds
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,351
|
dreyk/emt
|
refs/heads/master
|
/data/alpha.py
|
import tensorflow as tf
import os
import cv2
import numpy as np
import glob
def data_fn(args, training):
files = glob.glob(args.data_set + '/masks/*.*')
for i in range(len(files)):
mask = files[i]
img = os.path.basename(mask)
img = args.data_set + '/images/' + img
files[i] = (img, mask)
def _generator():
for i in files:
img = cv2.imread(i[0])[:,:,::-1]
mask = cv2.imread(i[1])
img = cv2.resize(img,(args.resolution,args.resolution))
mask = cv2.resize(mask, (args.resolution, args.resolution))
if len(mask.shape) == 3:
mask = cv2.cvtColor(mask, cv2.COLOR_BGR2GRAY)
img = img.astype(np.float32)/255
mask = mask.astype(np.float32)/255
mask = np.expand_dims(mask,axis=2)
yield img, mask
ds = tf.data.Dataset.from_generator(_generator, (tf.float32, tf.float32),
(tf.TensorShape([args.resolution, args.resolution, 3]),
tf.TensorShape([args.resolution, args.resolution, 1])))
if training:
ds = ds.shuffle(args.batch_size * 3, reshuffle_each_iteration=True)
ds = ds.batch(args.batch_size, True)
return ds
|
{"/models/resnet/model.py": ["/models/resnet/resnet_gn_ws.py"], "/unet_one_face.py": ["/models/unet/unet.py", "/data/one_person.py"], "/fc_densnet_train.py": ["/data/alpha_base.py", "/models/fc_densenet/matting.py", "/models/resnet/model.py"], "/data/one_person.py": ["/data/coco.py"]}
|
21,356
|
stasSajin/dw-benchmarks
|
refs/heads/master
|
/dw_benchmark/ddl/shared/utils.py
|
try:
import importlib.resources as pkg_resources
except ImportError:
# Try backported to PY<37 `importlib_resources`.
import importlib_resources as pkg_resources
from dw_benchmark import templates
def run_warmup(model, engine) -> None:
print("Running warmup by selecting * from all tables in a model")
warmup = pkg_resources.read_text(templates, "warmup.sql")
statement = warmup.format(schema=model.schema_name)
_ = run_query(engine, statement)
def read_query(sql_file_path):
with open(sql_file_path) as file:
query = file.read()
return query
def run_query(engine, statement):
with engine.connect() as con:
result = con.execution_options(autocommit=True).execute(statement)
return result
def create_user_with_permissions(user: str, engine, pw: str = "Password1"):
try:
run_query(engine, f"create user if not exists {user} password '{pw}';")
except:
print(f"User {user} already exists. Skipping user creation")
schemas = run_query(
engine, "select schema_name from information_schema.schemata;"
).fetchall()
for schema in schemas:
schema_name = schema[0]
print(f"Granting all permissions to schema {schema_name} to user {user}")
stmt = f"grant usage on schema {schema_name} to {user}; grant all privileges on all tables in schema {schema_name} to {user};"
run_query(engine, stmt)
|
{"/redshift-experiments/optimized_vs_unoptimized.py": ["/dw_benchmark/ddl/shared/models.py", "/dw_benchmark/ddl/shared/utils.py", "/dw_benchmark/ddl/redshift_ddls/redshift_utils.py"], "/dw_benchmark/ddl/redshift_ddls/redshift_utils.py": ["/dw_benchmark/ddl/shared/models.py", "/dw_benchmark/ddl/shared/utils.py"]}
|
21,357
|
stasSajin/dw-benchmarks
|
refs/heads/master
|
/redshift-experiments/optimized_vs_unoptimized.py
|
from dw_benchmark.ddl.shared.models import TCPDS100g, TCPDS100gUnoptimized
from dw_benchmark.ddl.shared.utils import run_warmup
from dw_benchmark.ddl.shared.utils import create_user_with_permissions
from dw_benchmark.ddl.redshift_ddls.redshift_utils import get_redshift_engine
from dw_benchmark.ddl.redshift_ddls.redshift_utils import run_copy_commands
from dw_benchmark.ddl.redshift_ddls.redshift_utils import run_ddl
from dw_benchmark.ddl.redshift_ddls.redshift_utils import run_analyze
import config
engine = get_redshift_engine(
host=config.host, db=config.db, user=config.user, pwd=config.pw
)
for model in [TCPDS100g, TCPDS100gUnoptimized]:
run_ddl(TCPDS100g, engine=engine)
run_copy_commands(TCPDS100gUnoptimized, engine=engine, role=config.role)
run_analyze(engine)
run_warmup(TCPDS100gUnoptimized, engine=engine)
run_warmup(TCPDS100g, engine=engine)
create_user_with_permissions("tcpds", engine=engine)
|
{"/redshift-experiments/optimized_vs_unoptimized.py": ["/dw_benchmark/ddl/shared/models.py", "/dw_benchmark/ddl/shared/utils.py", "/dw_benchmark/ddl/redshift_ddls/redshift_utils.py"], "/dw_benchmark/ddl/redshift_ddls/redshift_utils.py": ["/dw_benchmark/ddl/shared/models.py", "/dw_benchmark/ddl/shared/utils.py"]}
|
21,358
|
stasSajin/dw-benchmarks
|
refs/heads/master
|
/dw_benchmark/ddl/shared/models.py
|
from enum import Enum
class TCPDS100g(str, Enum):
s3_url: str = "s3://fivetran-benchmark/tpcds_100_dat"
schema_name: str = "tcpds100g"
ddl: str = "optimized.sql"
class TCPDS100gUnoptimized(str, Enum):
s3_url: str = "s3://fivetran-benchmark/tpcds_100_dat"
schema_name: str = "tcpds100g_unoptimized"
ddl: str = "not_optimized.sql"
class TCPDS1T(str, Enum):
s3_url: str = "s3://fivetran-benchmark/tpcds_1000_dat"
schema_name: str = "tcpds1TB"
ddl: str = "optimized.sql"
class TCPDS3T(str, Enum):
s3_url: str = "s3://redshift-downloads/TPC-DS/3TB"
schema_name: str = "tcpds3TB"
ddl: str = "optimized.sql"
class TCPDS10T(str, Enum):
s3_url: str = "s3://redshift-downloads/TPC-DS/10TB"
schema_name: str = "tcpds10TB"
ddl: str = "optimized.sql"
|
{"/redshift-experiments/optimized_vs_unoptimized.py": ["/dw_benchmark/ddl/shared/models.py", "/dw_benchmark/ddl/shared/utils.py", "/dw_benchmark/ddl/redshift_ddls/redshift_utils.py"], "/dw_benchmark/ddl/redshift_ddls/redshift_utils.py": ["/dw_benchmark/ddl/shared/models.py", "/dw_benchmark/ddl/shared/utils.py"]}
|
21,359
|
stasSajin/dw-benchmarks
|
refs/heads/master
|
/dw_benchmark/ddl/redshift_ddls/redshift_utils.py
|
try:
import importlib.resources as pkg_resources
except ImportError:
# Try backported to PY<37 `importlib_resources`.
import importlib_resources as pkg_resources
from dw_benchmark import templates
from dw_benchmark.ddl.shared.models import TCPDS100g
from dw_benchmark.ddl.shared.models import TCPDS100gUnoptimized
from dw_benchmark.ddl.shared.models import TCPDS1T
from dw_benchmark.ddl.shared.models import TCPDS3T
from dw_benchmark.ddl.shared.models import TCPDS10T
from dw_benchmark.ddl.shared.utils import run_query
from sqlalchemy import create_engine
def get_redshift_engine(host, db, user, pwd):
return create_engine(f"postgresql://{user}:{pwd}@{host}:5439/{db}")
def generate_ddl(model):
print(f"Generating DDL for {model.schema_name}")
ddl = pkg_resources.read_text(templates, model.ddl)
ddl = ddl.format(schema=model.schema_name)
return ddl
def run_copy_commands(model, engine, role):
# the copy parameters depend on the source of data.
# for fivetran data-sources
if model in [TCPDS100g, TCPDS100gUnoptimized, TCPDS1T]:
shared_params = (
"format delimiter '|' acceptinvchars compupdate on region 'us-east-1'"
)
else:
# for redshift data sources
shared_params = "gzip delimiter '|' compupdate on region 'us-east-1'"
tables = [
"store_sales",
"catalog_sales",
"web_sales",
"web_returns",
"store_returns",
"catalog_returns",
"call_center",
"catalog_page",
"customer_address",
"customer",
"customer_demographics",
"date_dim",
"household_demographics",
"income_band",
"inventory",
"item",
"promotion",
"reason",
"ship_mode",
"store",
"time_dim",
"warehouse",
"web_page",
"web_site",
]
for table in tables:
print(f"Running copy for table {model.schema_name}.{table}")
statement = f"copy {model.schema_name}.{table} from '{model.s3_url}/{table}/' iam_role '{role}' {shared_params};"
_ = run_query(engine, statement)
def run_ddl(model, engine) -> None:
ddl = generate_ddl(model)
_ = run_query(engine, ddl)
def run_analyze(engine) -> None:
"""
Compute table statistics on all the db.
"""
_ = run_query(engine, "analyze;")
|
{"/redshift-experiments/optimized_vs_unoptimized.py": ["/dw_benchmark/ddl/shared/models.py", "/dw_benchmark/ddl/shared/utils.py", "/dw_benchmark/ddl/redshift_ddls/redshift_utils.py"], "/dw_benchmark/ddl/redshift_ddls/redshift_utils.py": ["/dw_benchmark/ddl/shared/models.py", "/dw_benchmark/ddl/shared/utils.py"]}
|
21,360
|
meenasirisha145/python18
|
refs/heads/master
|
/chapter2.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 16 11:52:04 2018 by Meena Sirisha"""
name="meena sirisha"
name.title()
name.lower()
name.upper()
f_name="meena"
l_name="sirisha"
full_name=f_name+" "+l_name
full_name
print("Hello"+" "+full_name.title()+".")
print("Languages:\nPython\nC\nJavaScript")
print("Skills:\nC\tC++\tJava")
lan=" Machine Learning using Python "
lan
lan.rstrip()
lan
lan.lstrip()
lan.strip()
lan
lan=lan.strip()
lan
message = 'One of Python\'s strengths is its diverse community.'
print(message)
a="Machine Learning"
b=1
print(a+" "+str(b)+"st class")
import this
#%%---------------CHAPTER-3----------------------%%#
bicycles = ['trek', 'cannondale', 'redline', 'specialized']
print(bicycles)
print(bicycles[2].title())
bicycles[0]="hero"
print(bicycles)
bicycles.append("ladybird")
print(bicycles)
bicycles.pop()
bicycles
bicycles.sort()
bicycles
bicycles.insert(2,"ladybird")
bicycles
del(bicycles[1])
bicycles
bicycles.pop(1)
bicycles
bicycles.remove("redline")
bicycles
#%%%------------------Practise Exercise----------------------%%%#
guests=["apoorva","shruti","soumya","aadhya"]
for guest in guests:
print(guest.title() + ","+" " "I kindly invite you to the dinner at my home")
print("Task Completed : Inviting People")
print("oh Shruti is unable to attend the dinner")
#Replacing Shruti with Priya
guests[1]="Priya"
guests
for guest in guests:
print(guest.title() + ","+" "+ "I kindly invite you to the dinner at my home")
for guest in guests:
print(guest.title() + ","+" " +"I found a bigger dining table")
guests.insert(0,"Pragati")
guests.insert(2,"ramya")
guests.append("ujala")
guests
for guest in guests:
print(guest.title() + ","+" "+ "I kindly invite you to the dinner at my home")
print("I can invite only two people for the dinner")
for guest in guests[2:]:
print(guest.title()+","+" "+"I can't invite you to the dinner")
guests.pop()
guests
for guest in guests:
print(guest.title() + ","+" "+ "you are still invited to the dinner at my home")
del(guests)
places=["US","UK","Canada","Finland","Singapore"]
places
sorted(places)
|
{"/mat.py": ["/matplotlib.py"]}
|
21,361
|
meenasirisha145/python18
|
refs/heads/master
|
/tweets.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 24 09:29:23 2018 by Meena Sirisha"""
import tweepy
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from tweepy import Stream
consumer_key = '9czpgrhLcCi6k3xzkkRrLXef4'
consumer_secret = '0wIhwcUnUyQWPScb5ndhdHBetXyu89ygVq0v33b9ffkbaVpP1U'
access_token = '964058868992086016-vjVnFGDqFF1wEtng3qfiWKQZjKuSY4A'
access_secret = ' o5I3NCIaHP49VoW7VzzpnhI7vlzfTA2khdqdFGwOM4b04'
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_secret)
class listener(StreamListener):
def on_data(self, data):
try:
with open('data/twitter_data.txt', 'a' ,encoding='utf8') as f:
all_data=json.loads(data)
tweet = all_data["text"]
print(all_data)
f.write
f.flush
return(True)
def on_error(self, status):
print(status)
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_secret)
twitterStream = Stream(auth, listener())
twitterStream.filter(track=["car"])
|
{"/mat.py": ["/matplotlib.py"]}
|
21,362
|
meenasirisha145/python18
|
refs/heads/master
|
/var.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 16 11:01:27 2018 by Meena Sirisha """
x=5
x
y=8
y
z=8
z
import os
import sys
os.path.dirname(sys.executable)
import keyword
print(keyword.kwlist)
a=b=c=1
a
b
c
a,b,c=1,2,"tom"
a
b
c
a;b;c
print(a,b,c)
for i in range(4):
print(i)
print(i+2)
for j in range(3):
print(j)
for i in range(4):
print(i,end=',')
for i in range(4):
print(i,i+2, sep='-',end=',')
p=1
q=2
r=3
total=p+\
q+\
r
total
w='''tom
boy'''
w
a=10
print("no of students:",a)
print(1,2,3,sep="*")
n=input("enter a number: ")
y=eval(input("enter a num"))
t=8+y
t
import math
math.pi
|
{"/mat.py": ["/matplotlib.py"]}
|
21,363
|
meenasirisha145/python18
|
refs/heads/master
|
/matplotlib.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 9 14:55:52 2018 by Meena Sirisha """
#importong matplotlib
import matplotlib as mpl
import matplotlib.pyplot as plt #this is used most often
#aesthetics style like aes in ggplot
plt.style.use('classic')
#%%plots
#plotting from script
import matplotlib.pyplot as plt
import numpy as np
x=np.linspace(0,10,100)
plt.plot(x,np.sin(x))
plt.plot(x,np.cos(x))
plt.show()
|
{"/mat.py": ["/matplotlib.py"]}
|
21,364
|
meenasirisha145/python18
|
refs/heads/master
|
/mat.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 9 15:50:03 2018
@author: Meena
"""
import matplotlib as mpl
import numpy as np
|
{"/mat.py": ["/matplotlib.py"]}
|
21,365
|
meenasirisha145/python18
|
refs/heads/master
|
/untitled0.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 15 14:29:00 2018 by Meena Sirisha"""
a=list(range(1,101))
a
import random
a=random.sample(range(1,101),100)
print(a)
print(min(a))
print(max(a))
b=sorted(a)
print(b)
len(b)
b[round(len(b)/2)]
len(b)%2==0
round((len(b)/2)-1)
(b[round((len(b)/2)-1)]+b[round(len(b)/2)])/2
def median(l):
if len(l)%2==0:
print(l[round(len(l)/2)])
else:
print((l[round((len(l)/2)-1)]+l[round(len(l)/2)])/2)
median(b)
|
{"/mat.py": ["/matplotlib.py"]}
|
21,366
|
meenasirisha145/python18
|
refs/heads/master
|
/py2.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 22 10:49:20 2018 by Meena Sirisha"""
|
{"/mat.py": ["/matplotlib.py"]}
|
21,367
|
meenasirisha145/python18
|
refs/heads/master
|
/np1/numpy1.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 31 11:19:48 2018 by Meena Sirisha """
import numpy as np
np.__version__
np.abs
np.array([1,4,2,5,3])
l=[i for i in range(5)]
l
l
np.full((3,5),3.14)
x=np.arange(0,20,2)
len(x)
np.shape(x)
np.linspace(0,1,5)
np.random.random((3,3))
np.random.normal(0,1,(3,3))
np.random.randint(0,10,(3,3))
np.eye(3)
np.empty(3)
np.zeros(10)
np.random.seed(0)
x1=np.random.randint(10,size=6)
x2=np.random.randint(10,size=(3,4))
x3=np.random.randint(10,size=(3,4,5))
x1
x2
x3
x1,x2,x3
x3.ndim
x3.shape
x3.size
x3.itemsize
x3.nbytes
x2[0][2]
x3[2][1][0]
x=np.arange(10)
x
x[:5]
x[5:]
x[4:7]
x[::2]
x[1::2]
x[0::2]
x[1::3]
x[::3]
x[::-1]
x[::-3]
x2
x2[:2,:3]#two rows and three columns
x2[:3,::2]
x2[:3,::3]
x2[::-1,::-1]
x2[:,0]
x2[0,:]
y=x2[:2,:2]
y[0][0]=25
y
x2
z=x2[:2,:2].copy()
z
z[0][0]=52
z
x2
|
{"/mat.py": ["/matplotlib.py"]}
|
21,368
|
meenasirisha145/python18
|
refs/heads/master
|
/Assignment/asgn1.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 1 18:58:50 2018 by Meena Sirisha"""
import urllib.request
import re
url = "https://www.sec.gov/Archives/edgar/data/3662/0000889812-99-003241.txt"
req = urllib.request.Request(url)
resp = urllib.request.urlopen(req)
respData = resp.read()
respData
theText = respData.decode()
theText
r = re.findall("\nITEM \d+\. MANAGEMENT'S DISCUSSION AND ANALYSIS .*?(?:\nITEM|\nPART)", theText,re.S)
r
import nltk
from nltk.tokenize import word_tokenize
w1=word_tokenize(r)
len(w1)
|
{"/mat.py": ["/matplotlib.py"]}
|
21,369
|
meenasirisha145/python18
|
refs/heads/master
|
/charshift.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 10 22:14:18 2018
@author: Meena
"""
strs = 'abcdefghijklmnopqrstuvwxyz' #use a string like this, instead of ord()
def shifttext():
inp = input('Input string here: ')
shift=int(input('input shift here: '))
cstring = []
for i in inp: #iterate over the text not some list
if i in strs: # if the char is not a space ""
cstring.append(strs[(strs.index(i) + shift) % 26])
else:
cstring.append(i) #if space the simply append it to data
output = ''.join(cstring)
return output
shifttext()
|
{"/mat.py": ["/matplotlib.py"]}
|
21,370
|
meenasirisha145/python18
|
refs/heads/master
|
/list1.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 16 12:52:24 2018 by Meena Sirisha """
##lists
x=[1,2,3]
x
print(x)
x=[1,8,5,6,8,4,5,6,4]
x[0]
len(x)
sum(x)
max(x)
min(x)
for i in range(len(x)):
print(x[i],end=',')
if 3 in x:
print("yes")
else:
print("no")
x.append(3)
x
sorted(x)
x.index(5)
#lists are mutable
l=[1,2,"a"]
l
type(l)
l[1]=3
l
#tuples are immutable list but faster and consumes less memory
t=(1,2,"a")
type(t)
t[2]=3##does not support item assignment
#Dictonaries
d={"a":1,"b":2,3:"d"}
d
type(d)
d["b"]=6
d
##sets
s1={1,2,3}#unordered so slicing cannot be done
#frozen set
f=frozenset({3,8,4,6})
type(f)#immutable
###practise for lists
b="nice"
a="ls"
my_list=['my','list',a,b]
my_list
my_list2=[[4,5,6,7],[3,4,5,6]]
my_list2
my_list[1]
my_list[-3]
my_list[1:3]
len(my_list)
my_list[-len(my_list)]
my_list[1:]
my_list[:3]
my_list2[1][0]
my_list2[0][2]
my_list2[0][:3]
my_list+my_list
my_list+my_list2
my_list*2
my_list2[1][1]>4
my_list.index(a)
my_list.count(a)
my_list.append("!")
my_list
my_list.extend("!")
my_list
my_list.count("!")
my_list.sort()
my_list
my_list.remove("!")
my_list
my_list.pop()
my_list
x=[i for i in range(10)]
print(x)
squares=[]
for x in range(10):
squares.append(x**2)
print(squares)
l3=[1,2,"a",True]
sum(l3[0:2])
l1=[1,2,3,4,5]
l2=[1,2,3,"a",True]
l3=[i for i in range(5)]
l3
type(l1)
type(l2)
type(l3)
type(l3[4])
for i in range(len(l2)):
print(type(l2[i]))
for i in range(len(l2)):
print(type(l2[i]),end=' ')
l=l1+l2
l
sum(l)
sum(l[1:4])
l[len(l)-2].upper()
len(l)
#list inside a list
l4=[1,2,[l2]]
l4
print(l4)
l4[1]
l4[2]
l4[2][0]
l4[2][0][0]
l4=[1,2,l2]
l4[2][0]
#dictionaries
d1={1:'appu',2:'meenu',3:'hitesh',4:'lalit',5:'achal','dean':'dhiraj sir','l':[1,2,3]}
d1
d1.keys()
d1.values()
d1.items()
d1[1]
d1['dean']
d1['l']
d1['f']=d1.pop(4)
d1
d1['f']='lalit sahni'
d1
d1['l'][1]='z'
d1
for key in d1:
print(key,end=' ')
print(d1[key],end=' ')
#list to a set
l1=[1,2,3,4,5,5]
s1=set(l1)
type(s1)
s1
a=set([1,2,3,4,5,6])
a
b=set([1,2,"a",True,l4])
b
s2=set()
s2.add(4)
s2.add(5)
s2
a|b#union
a&b#intersection
a<b#subset
a.issubset(b)
a-b#difference
b-a
a.issuperset(b)
a^b
len(a)
c=set([4,5,3,2,1,6])
sorted(c)
s3=set([1,2,4,'apple','Tom',3])
s4=set([1,None,3])
s4
all(s4)
all(s3)
s3.remove(1)
s3
s3.remove(1)
s3.discard(1)
s3.pop()
s3
s5=s3|s4
s5
s3.update(s4)
s3
s3.isdisjoint(s4)
|
{"/mat.py": ["/matplotlib.py"]}
|
21,371
|
meenasirisha145/python18
|
refs/heads/master
|
/pivot.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 16 15:43:36 2018 by Meena Sirisha"""
import numpy as np
import pandas as pd
rollnosL=[101,102,103,104,105,106,107,108,109,110,111]
namesL=["meena","apoorva","kaustav","shubham","goldie","hitesh","shruti","vijay","lalit","achal","varun"]
genderL=['F','F','M','M','M','M','F','M','M','M','M']
pythonL=np.random.randint(60,90,11)
sasL=np.random.randint(60,90,11)
hadoopL=np.random.randint(70,90,11)
feesL=np.random.randint(100000,150000,11)
courseL=["pg","pg","msc","msc","pg","pg","pg","pg","pg","pg","bsc"]
hadoopL=np.random.randint(60,90,11)
hostelL=[True,False,True,False,False,False,False,True,True,True,False]
df=pd.DataFrame({'rollno':rollnosL,'name':namesL,'gender':genderL,'hostel':hostelL,'python':pythonL,'sas':sasL,'hadoop':hadoopL,'course':courseL,'fees':feesL},columns=['rollno','name','gender','hostel','python','sas','hadoop','course','fees'])
df
df['total']=df['python']+df['sas']+df['hadoop']
df
df.to_csv("student.csv")
df.columns
df.groupby('gender').mean()
df.groupby('gender').size()
df.groupby('gender').sum()
from numpy import random
classes=['C1','C2','C3']
sclass = random.choice(classes,11)
sclass
df['sclass']=pd.Series(sclass)
df
pd.pivot_table(df,index=['name'])
pd.pivot_table(df,index=['name','sclass'])
pd.pivot_table(df,index=['name','sclass','hostel'])
pd.pivot_table(df,index=['course','sclass',],values=['total','python'])#default is mean
pd.pivot_table(df,index=['course','sclass',],values=['total','python'],aggfunc=np.sum)
pd.pivot_table(df,index=['course','sclass',],values=['total','python'],aggfunc=[np.sum,np.mean,len])
|
{"/mat.py": ["/matplotlib.py"]}
|
21,372
|
meenasirisha145/python18
|
refs/heads/master
|
/np1/pandas2.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 1 13:40:26 2018 by Meena Sirisha """
import pandas as pd
from pandas import *
s=Series([3,7,4,4,0.3],index=['a','b','c','d','e'])
s
df=DataFrame(np.arange(9).reshape(3,3),index=['b','a','c'],columns=['Paris','Berlin','madrid'])
df
df[:2]
df[1:2]
df[:2]
df[df['Paris']>1]
df['Paris']
df.Berlin[df['Berlin']>1]=0
df
df.ix['a','Berlin']
df.ix[['b','c'],'Berlin']
df.ix['a',['Berlin','madrid']]
s.drop('d')
df.drop('Berlin',axis=1)
df.drop('c')
df
s2=Series([0,1,2],index=['a','c','f'])
s2
s+s2
s.add(s2,fill_value=0)
s.subtract(s2,fill_value=0)
s.align(s2,join='outer')
s.align(s2,join='inner')
df2=DataFrame(np.arange(12).reshape(4,3),index=['b','e','a','c'],columns=['Paris','Lisbonne','madrid'])
df2
df+df2
df.add(df2,fill_value=0)
l1=[0,1,2,3,4,5,6]
type(l1)
l2=['b','b','a','c','a','a','b']
import numpy as np
from numpy import*
d1={'data1':arange(7),'keyleft':l2}
d1
df1=DataFrame(d1,columns=['data1','keyleft'])
df1
d2={'data2':arange(4),'key':['a','b','d','a']}
df2=DataFrame(d2,columns=['data2','key'])
df2
pd.merge(df1,df2,left_on='keyleft',right_on='key',how='inner')
pd.merge(df1,df2,left_on='keyleft',right_on='key',how='outer')
merge(df1,df2)
d3={'data1':arange(6),'key':['a','b','a','a','b','c']}
d3
df3=DataFrame(d3,columns=['data1','key'])
df3
d4={'data2':arange(5),'key':['a','b','a','b','d']}
df4=DataFrame(d4,columns=['data2','key'])
df4
pd.merge(df3,df4,on='key',how='left')
pd.merge(df3,df4,on='key',how='right')
s
s.rank()
s.rank(method='first')
s.rank(method='max',ascending=False)
df
df.rank()
df.rank(axis=1)#ranking row wise
s.sort_index(ascending=False)
s.sort_index()
df.sort_index()
df.sort_index(by='Berlin')
df.sort_index(axis=1)
df.max()
df+df.max()
f=lambda x: math.sqrt(x)
df.applymap(f)
df['Berlin'].map(f)
math.factorial(5)
###Computing Descriptive Statistics####
df.describe()
df.sum()
df.sum(axis=1)
df.cov
df.corr()
df.reindex(['c','b','a','g'])
df.reindex(['c','b','a','g'],fill_value=15)
df.reindex(columns=['Varsovie','Paris','madrid'])##works with only unique index values
import os
os.getcwd()
os.chdir("F:\pywork\pyWork\pyProjects\mspython18")
data=pd.read_csv("50_Startups.csv")
data
|
{"/mat.py": ["/matplotlib.py"]}
|
21,373
|
meenasirisha145/python18
|
refs/heads/master
|
/list2.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 31 10:22:49 2018 by Meena Sirisha """
l=[1,2,3]
for i in range(len(l)):
print(l[i],sep=" ",end=".")
def square(a):
""" This will square the value """
return(a**2)
square(3)
l.append(3)
l
%%timeit
l = []
for n in range(1000):
l.append(n**2)
|
{"/mat.py": ["/matplotlib.py"]}
|
21,374
|
meenasirisha145/python18
|
refs/heads/master
|
/crawlown.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 31 10:20:37 2018 by Meena Sirisha"""
import requests #used for calling url
import csv
from bs4 import BeautifulSoup #converts the text into structured form
page=requests.get("https://www.fantasypros.com/nfl/reports/leaders/qb.php?year=2017")
page
soup = BeautifulSoup(page.text,'html.parser')
soup
tables=soup.find_all("table")
for table in tables:
print(table.get("id"))
if(table.get("id")=="data"):
for row in table.find_all("tr"):
for col in row.find_all("td"):
print(col.contents[0],end=" ")
|
{"/mat.py": ["/matplotlib.py"]}
|
21,375
|
meenasirisha145/python18
|
refs/heads/master
|
/BM1.py
|
import numpy as np
import pandas as pd
import csv
import math
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_squared_error
train = pd.read_csv('bigmarttrain.csv')
train.isnull().sum()
w = train.loc[train.Item_Weight.isnull(),'Item_Identifier'].unique()
w
w.shape
list(w)
for x in list(w):
train.loc[(train.Item_Weight.isnull()) & (train.Item_Identifier==x),'Item_Weight'] = train.loc[train.Item_Identifier==x,'Item_Weight'].median()
for x in list(w):
print(x)
train.isnull().sum()
train.loc[train.Item_Weight.isnull(),'Item_Weight'] = train.loc[:,'Item_Weight'].median()
z = train.loc[train.Outlet_Size.isnull(),'Outlet_Type']
for x in z:
train.loc[(train.Outlet_Size.isnull()) & (train.Outlet_Type==x),'Outlet_Size'] = train.loc[train.Outlet_Type==x,'Outlet_Size'].mode()[0]
train.isnull().sum()
train.Item_Fat_Content=train.Item_Fat_Content.astype("category").cat.codes
train.head()
train.Outlet_Type=train.Outlet_Type.astype("category").cat.codes
train.head()
train.Outlet_Location_Type=train.Outlet_Location_Type.astype("category").cat.codes
train.head()
train.Outlet_Establishment_Year=train.Outlet_Establishment_Year.astype("category").cat.codes
train.Outlet_Size=train.Outlet_Size.astype("category").cat.codes
train.Item_Type=train.Item_Type.astype("category").cat.codes
train.head()
target = train.Item_Outlet_Sales
train = train.drop(['Item_Outlet_Sales','Item_Identifier','Outlet Identifier'],axis=1)
train1 = train[0:7600]
train2 = train[7600:]
target1 = target[0:7600]
target2 = target[7600:]
tra1 = np.array(train1)
tra2 = np.array(train2)
tar1 = np.array(target1)
tar2 = np.array(target2)
model = RandomForestRegressor(n_estimators=200,criterion='mse',max_depth=None,min_samples_split=75,min_samples_leaf=1,max_features='auto',max_leaf_nodes=None,\
min_impurity_split=1e-07,bootstrap=True,oob_score=False,n_jobs=-1,random_state=79,verbose=1,warm_start=False)
model = model.fit(tra1,tar1)
scorer = mean_squared_error(tar2,model.predict(tra2))
print(math.sqrt(scorer))
test = pd.read_csv('test.csv')
test.isnull().sum()
wt = test.loc[test.Item_Weight.isnull(),'Item_Identifier']
test.loc[test.Item_Weight.isnull(),'Item_Weight'] = [test.loc[test.Item_Identifier==wt.values[i],'Item_Weight'].median() for i in range(len(wt))]
test.loc[test.Item_Weight.isnull(),'Item_Weight'] = test.loc[:,'Item_Weight'].median()
zt = test.loc[test.Outlet_Size.isnull(),'Outlet_Type']
test.loc[test.Outlet_Size.isnull(),'Outlet_Size'] = [test.loc[test.Outlet_Type==zt.values[i],'Outlet_Size'].mode()[0] for i in range(len(zt))]
itemsid = test.Item_Identifier
storeid = test.Outlet_Identifier
test.loc[:,'Item_Identifier'] = [dict1[test.Item_Identifier.values[i]] for i in range(len(test))]
test.loc[:,'Item_Fat_Content'] = [dict2[test.Item_Fat_Content.values[i]] for i in range(len(test))]
test.loc[:,'Item_Type'] = [dict3[test.Item_Type.values[i]] for i in range(len(test))]
test.loc[:,'Outlet_Identifier'] = [dict4[test.Outlet_Identifier.values[i]] for i in range(len(test))]
test.loc[:,'Outlet_Size'] = [dict5[test.Outlet_Size.values[i]] for i in range(len(test))]
test.loc[:,'Outlet_Location_Type'] = [dict6[test.Outlet_Location_Type.values[i]] for i in range(len(test))]
test.loc[:,'Outlet_Type'] = [dict7[test.Outlet_Type.values[i]] for i in range(len(test))]
tester = np.array(test)
pred = model.predict(tester)
submission = pd.DataFrame(itemsid,columns=['Item_Identifier'])
submission['Outlet_Identifier'] = storeid
submission['Item_Outlet_Sales'] = pred
submission.to_csv('BM1.csv',index=False)
|
{"/mat.py": ["/matplotlib.py"]}
|
21,376
|
meenasirisha145/python18
|
refs/heads/master
|
/pivot1.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 19 12:40:36 2018 by Meena Sirisha"""
import numpy as np
import pandas as pd
data=pd.read_csv("F:\pywork\pyWork\pyProjects\mspython18/student.csv",header=0)
data
data.head()
data.columns
data.dtypes
data.select_dtypes(['object'])#only string
data['rollno'].dtype
del data['Unnamed: 0']
#data.drop(labels="Unnamed: 0",axis=1,inplace=True)
data.head()
data.describe()
data.groupby('course')['sclass'].describe()
data.groupby('course')['sclass'].describe().unstack()
data.groupby('sclass')#nothing
data.groupby('sclass').aggregate([min,np.median,max])
data[['sclass','python','sas']].groupby('sclass').aggregate([min,np.median,max,np.sum,np.std])
data[['python']]
data[['course','hadoop','sas']].groupby('course').aggregate([np.mean,np.median,min,max])
pd.pivot_table(data,index="course",values=["sas","hadoop"],aggfunc=[np.mean,np.median,min,max])
pd.pivot_table(data,index=["course","gender"],values=["sas","hadoop"],aggfunc=[np.mean,np.median,min,max])
pd.pivot_table(data,index="gender",columns="sclass",values='sas').plot(kind="bar")
aggregation={'sas':{'totalsas':'sum','avgsas':'mean'},'hadoop':{'meanhadoop':'mean','stdhadoop':'std'}}
data[data['sclass']=='C1'].groupby('gender').agg(aggregation)
data.groupby(['gender','sclass']).agg({'python':[min,max,np.mean]})
|
{"/mat.py": ["/matplotlib.py"]}
|
21,377
|
meenasirisha145/python18
|
refs/heads/master
|
/Assignment/asgn.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 1 10:22:54 2018 by Meena Sirisha"""
import numpy as np
import pandas as pd
import math
import requests #used for calling url
import csv
from bs4 import BeautifulSoup #converts the text into structured form
data=pd.read_csv('F:\pywork\pyWork\pyProjects\pythonbasic\Assignment\cik_list.csv')
data
data.columns.values
urls=data.SECFNAME.tolist()
urls
type(urls)
from urllib.parse import urlencode
url1="https://www.sec.gov/Archives/"
type(url1)
links=[]
for url in urls:
links.append(url1+url)
links
pages=[]
for link in links:
page=requests.get(link)
pages.append(page)
pages[0]
pages[0].content
soups=[]
for pag in pages:
soup = BeautifulSoup(pag.text,'html.parser')
soups.append(soup)
|
{"/mat.py": ["/matplotlib.py"]}
|
21,378
|
meenasirisha145/python18
|
refs/heads/master
|
/plot.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 12 14:54:37 2018 by Meena Sirisha"""
x1=[1,2,3]
y1=[2,4,1]
x2=[2,5,6,7,8]
y2=[5,4,8,6,1]
import matplotlib.pyplot as plt
plt.plot(x1,y1,label="line1")
plt.plot(x2,y2,label="line2")
plt.xlabel("X axis")
plt.ylabel("Y axis")
plt.show()
x1,y1
tick_label=["one","two","three"]
plt.bar(x1,y1,tick_label=tick_label,width=0.8,color=["red","green"])
marks=np.random.uniform(30,100,1000)
marks
np.all(marks >= 30)
np.all(marks < 100)
range=(20,100)
bins=10
plt.hist(marks,bins,range,color="green",histtype="bar",rwidth=0.8)
plt.scatter(x1,y1)
x1,y1
activity = ['sleep','study','eat']
colors = ['red','green','yellow']
plt.pie(y1, labels=activity, colors = colors)
plt.pie(y1, labels=activity, colors = colors, startangle=45, shadow=True, radius=1.2, explode=(0.1,0.2,0.3), autopct = '%1.1f%%')
#rotate start of pie by 90deg, explode offset each wedge, autopct - label format
plt.legend()
plt.show()
|
{"/mat.py": ["/matplotlib.py"]}
|
21,379
|
meenasirisha145/python18
|
refs/heads/master
|
/np.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 16 10:51:20 2018
@author: Meena
"""
import numpy as np
|
{"/mat.py": ["/matplotlib.py"]}
|
21,380
|
meenasirisha145/python18
|
refs/heads/master
|
/groupby.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 19 11:14:31 2018 by Meena Sirisha"""
#%%Group by
import numpy as np
import pandas as pd
#Marks
rng=np.random.RandomState(42)
marks=pd.Series(rng.randint(50,100,11))
marks
marks.sum()
marks.std()
#Dictionary
dict(x=1,y=4)
#Groupwise
df=pd.DataFrame({'A':rng.randint(1,10,6),'B':rng.randint(1,10,6)})
df
df.sum(axis=0)
df.sum(axis=1)
df.mean()
df.mean(axis=0)
df.mean(axis='columns')
df.mean(axis='rows')
df.describe()
#GroupBy
# Split -Apply -Combine
#Repeat
['A','B','C']*2
np.repeat(['A','B','C'],2)
np.repeat(['A','B','C'],[1,2,3])
df1=pd.DataFrame({'key':['A','B','C']*2,'data1':range(6),'data2':rng.randint(0,10,6)},columns=['key','data1','data2'])
df1
df1.groupby('key').sum()
grouped=df1.groupby('key')
grouped.sum()
df1.groupby('key').aggregate(['min','max','median'])
df1.groupby('key').aggregate([np.median,'median'])#error they are repeating
df1.groupby('key').aggregate({'data1':'min','data2':'max'})
df1.groupby('key').aggregate([np.median])
df1.groupby('key').aggregate({'data1':['min','mean'],'data2':['min','max']})
#Filter :Select Column
df1.filter(items=['data1','key'])
df1.filter(like='0',axis=0)#row by position
df1.filter(like='2')
df1.filter(like='e',axis=1)
df1.filter(like='d',axis=1)#col by position
df1.groupby('key').std()
#Lambda
df1['data2'].mean()
df1['data1'].mean()
df1
grouped.filter(lambda x : x['data2'].mean()>4)#list the elements of group whose mean is >4
grouped.filter(lambda x : x['data2'].std()>4)
grouped.transform(lambda x : x-x.mean())
#Apply Method
grouped.apply(lambda x : x['data2']*2)
#Provide Group Keys
df1.groupby('key').sum()
df1.groupby(df1['key']).sum()
df2=df1.set_index('key')
df2
newmap={'A':'Post Graduate','B': 'MSc','C': 'BSc'}
df2.groupby(newmap).sum()
df2.groupby(str.lower).mean()
df2.groupby([str,str.lower,newmap]).mean()#str =index
#Stack
df2.groupby('key').sum().unstack()
|
{"/mat.py": ["/matplotlib.py"]}
|
21,381
|
meenasirisha145/python18
|
refs/heads/master
|
/myplot.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 9 15:02:04 2018 by Meena Sirisha """
import matplotlib.pyplot as plt
import numpy as np
x=np.linspace(0,10,100)
plt.plot(x,np.sin(x))
plt.plot(x,np.cos(x))
plt.show()
|
{"/mat.py": ["/matplotlib.py"]}
|
21,382
|
meenasirisha145/python18
|
refs/heads/master
|
/pybasic1.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 15 15:02:27 2018 by Meena Sirisha"""
import random
a=random.sample(range(1,101),100)
print(a)
print(min(a))
print(max(a))
b=sorted(a)
print(b)
def median(l):
if len(l)%2==0:
print(l[round(len(l)/2)])
else:
print((l[round((len(l)/2)-1)]+l[round(len(l)/2)])/2)
median(b)
mean=sum(b)/len(b)
mean
from random import randint
random_list = []
for i in range(1,10):
random_list.append(randint(1,10))
# if I use sort there will be error in case of duplicates
print(sorted(random_list))
print(str(sorted(random_list)[1]) + " " + str(sorted(random_list)[-2]))
#Without sort function ??
print(random_list)
max=0
sec_max = 0
min = 99999999
sec_min = 0
for number in random_list:
if(number>max):
sec_max=max
max = number
if number > sec_max and number < max:
sec_max = number
if(number<min):
sec_min=min
min = number
if number<sec_min and number > min:
sec_min = number
print(str(sec_min) + " " + str(sec_max))
age=input('how old are you?')
age
|
{"/mat.py": ["/matplotlib.py"]}
|
21,383
|
meenasirisha145/python18
|
refs/heads/master
|
/asgn2.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 17 12:39:36 2018 by Meena Sirisha"""
import random
from random import randint
random.seed(123)
random_list = []
for i in range(1,10):
random_list.append(randint(1,10))
random_list
range(len(random_list))
newlist=random_list[:]
newlist
num=input("enter a number:")
for i in range(len(random_list)):
newlist[i]=random_list[i-int(num)]
print(newlist)
#%%------------------second Question-------------------%%#
l1=[]
for i in range(0,6):
num=input("enter a number?")
l1.append(int(num))
l2=sorted(l1)
for i in l2:
print("*" * i)
row=input("enter a number")
i=1
j=int(row)
while i<=row:
print((j*' '),i*'* ')
j=j-1
i=i+1
print((5*' ')+3*"* ")
k=int(input("Enter the number of rows"))
for i in range(0,k):
print(' '*(k-i),'* '*(i))
num=int(input("enter length of pyramid"))
hidden_triangle = num-1
i=1
while(i<=num):
if(hidden_triangle > 0):
print(hidden_triangle * " ",end='')
hidden_triangle-=1
print(i* "* ")
i+=1
|
{"/mat.py": ["/matplotlib.py"]}
|
21,384
|
meenasirisha145/python18
|
refs/heads/master
|
/np1/pandas.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 31 14:09:02 2018 by Meena Sirisha """
#####-----PANDAS-----########
import pandas as pd
pd.__version__
import tensorflow as tf
tf.__version__
data=pd.Series([0.25,0.5,0.75,1.0])
data
data.values
data[1]
data=pd.Series([0.25,0.5,0.75,1.0],index=['a','b','c','d'])
data[0]
data['a']
data
data = pd.Series([0.25, 0.5, 0.75, 1.0],index=[2, 5, 3, 7])
data
data[5]
population_dict = {'California': 38332521,'Texas': 26448193,'New York': 19651127,
'Florida': 19552860,'Illinois': 12882135}
population = pd.Series(population_dict)
population
pd.Series({2:'a', 1:'b', 3:'c'}, index=[3, 2])
#Notice that in this case, the Series is populated only with the explicitly identified keys
#data can be a dictionary, in which index defaults to the sorted dictionary keys
pd.Series({2:'a', 1:'b', 3:'c'})
area_dict = {'California': 423967, 'Texas': 695662, 'New York': 141297,
'Florida': 170312, 'Illinois': 149995}
area = pd.Series(area_dict)
area
states = pd.DataFrame({'population': population,'area': area})
states
pop=[100,200,300]
ar=[10,20,30]
state=pd.DataFrame({'pop':pop,'ar':ar})
state
states.columns
pd.DataFrame(population,columns=['population'])
rollno=[1,2,3]
names=['a','b','c']
df=pd.DataFrame(rollno,columns=['rollnos'])
df
df['names']=names
df
df1=pd.DataFrame({'rollno':rollno,'names':names},columns=['rollno','names'])
df1
gender=['f','m','m']
df2=pd.DataFrame({'rollno':rollno,'names':names,'gender':gender},columns=['rollno','names','gender'])
df2
df3=pd.DataFrame({'rollno':rollno,'names':names,'gender':gender},columns=['rollno','names'])
df3
df4=pd.DataFrame(list(zip(rollno,names,gender)))
df4
df4.columns=['rollno','NAMES','gender']
df4
pd.DataFrame([{'a': 1, 'b': 2}, {'b': 3, 'c': 4}])
l= np.random.rand(3,2)
l
pd.DataFrame(l,columns=['foo',"bar"], index=['a','b','c'])
ind=pd.Index([2,3,5,7,11])
ind
ind[1]=0#Index is immutable
ind[::2]
ind1=pd.Index([1,3,5,7,9])
ind&ind1#intersection
ind|ind1#union
data
data.loc[2]#loc-location-explicit indexing
data.loc[2:7]
data.iloc[1:3]#iloc-index location-implicit indexing
data.iloc[0:3]
states['area']
states.area
states.population
states['density']=states.population/states.area
states
states.values[0]
states.iloc[:3,:2]
states.loc[:'Illinois',:'population']
states.ix[:3,:'area']
|
{"/mat.py": ["/matplotlib.py"]}
|
21,385
|
meenasirisha145/python18
|
refs/heads/master
|
/bigmart/BM1.py
|
import numpy as np
import pandas as pd
import csv
import math
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_squared_error
train = pd.read_csv('train.csv')
train.isnull().sum()
w = train.loc[train.Item_Weight.isnull(),'Item_Identifier']
train.loc[train.Item_Weight.isnull(),'Item_Weight'] = [train.loc[train.Item_Identifier==w.values[i],'Item_Weight'].median() for i in range(len(w))]
train.loc[train.Item_Weight.isnull(),'Item_Weight'] = train.loc[:,'Item_Weight'].median()
z = train.loc[train.Outlet_Size.isnull(),'Outlet_Type']
train.loc[train.Outlet_Size.isnull(),'Outlet_Size'] = [train.loc[train.Outlet_Type==z.values[i],'Outlet_Size'].mode()[0] for i in range(len(z))]
dict1 = dict(zip(list(train.Item_Identifier.unique()),range(len(list(train.Item_Identifier.unique())))))
dict2 = dict(zip(list(train.Item_Fat_Content.unique()),range(len(list(train.Item_Fat_Content.unique())))))
dict3 = dict(zip(list(train.Item_Type.unique()),range(len(list(train.Item_Type.unique())))))
dict4 = dict(zip(list(train.Outlet_Identifier.unique()),range(len(list(train.Outlet_Identifier.unique())))))
dict5 = dict(zip(list(train.Outlet_Size.unique()),range(len(list(train.Outlet_Size.unique())))))
dict6 = dict(zip(list(train.Outlet_Location_Type.unique()),range(len(list(train.Outlet_Location_Type.unique())))))
dict7 = dict(zip(list(train.Outlet_Type.unique()),range(len(list(train.Outlet_Type.unique())))))
train.loc[:,'Item_Identifier'] = [dict1[train.Item_Identifier.values[i]] for i in range(len(train))]
train.loc[:,'Item_Fat_Content'] = [dict2[train.Item_Fat_Content.values[i]] for i in range(len(train))]
train.loc[:,'Item_Type'] = [dict3[train.Item_Type.values[i]] for i in range(len(train))]
train.loc[:,'Outlet_Identifier'] = [dict4[train.Outlet_Identifier.values[i]] for i in range(len(train))]
train.loc[:,'Outlet_Size'] = [dict5[train.Outlet_Size.values[i]] for i in range(len(train))]
train.loc[:,'Outlet_Location_Type'] = [dict6[train.Outlet_Location_Type.values[i]] for i in range(len(train))]
train.loc[:,'Outlet_Type'] = [dict7[train.Outlet_Type.values[i]] for i in range(len(train))]
target = train.Item_Outlet_Sales
train = train.drop(['Item_Outlet_Sales'],axis=1)
train1 = train[0:7600]
train2 = train[7600:]
target1 = target[0:7600]
target2 = target[7600:]
tra1 = np.array(train1)
tra2 = np.array(train2)
tar1 = np.array(target1)
tar2 = np.array(target2)
model = RandomForestRegressor(n_estimators=200,criterion='mse',max_depth=None,min_samples_split=75,min_samples_leaf=1,max_features='auto',max_leaf_nodes=None,\
min_impurity_split=1e-07,bootstrap=True,oob_score=False,n_jobs=-1,random_state=79,verbose=1,warm_start=False)
model = model.fit(tra1,tar1)
scorer = mean_squared_error(tar2,model.predict(tra2))
print(math.sqrt(scorer))
test = pd.read_csv('test.csv')
test.isnull().sum()
wt = test.loc[test.Item_Weight.isnull(),'Item_Identifier']
test.loc[test.Item_Weight.isnull(),'Item_Weight'] = [test.loc[test.Item_Identifier==wt.values[i],'Item_Weight'].median() for i in range(len(wt))]
test.loc[test.Item_Weight.isnull(),'Item_Weight'] = test.loc[:,'Item_Weight'].median()
zt = test.loc[test.Outlet_Size.isnull(),'Outlet_Type']
test.loc[test.Outlet_Size.isnull(),'Outlet_Size'] = [test.loc[test.Outlet_Type==zt.values[i],'Outlet_Size'].mode()[0] for i in range(len(zt))]
itemsid = test.Item_Identifier
storeid = test.Outlet_Identifier
test.loc[:,'Item_Identifier'] = [dict1[test.Item_Identifier.values[i]] for i in range(len(test))]
test.loc[:,'Item_Fat_Content'] = [dict2[test.Item_Fat_Content.values[i]] for i in range(len(test))]
test.loc[:,'Item_Type'] = [dict3[test.Item_Type.values[i]] for i in range(len(test))]
test.loc[:,'Outlet_Identifier'] = [dict4[test.Outlet_Identifier.values[i]] for i in range(len(test))]
test.loc[:,'Outlet_Size'] = [dict5[test.Outlet_Size.values[i]] for i in range(len(test))]
test.loc[:,'Outlet_Location_Type'] = [dict6[test.Outlet_Location_Type.values[i]] for i in range(len(test))]
test.loc[:,'Outlet_Type'] = [dict7[test.Outlet_Type.values[i]] for i in range(len(test))]
tester = np.array(test)
pred = model.predict(tester)
submission = pd.DataFrame(itemsid,columns=['Item_Identifier'])
submission['Outlet_Identifier'] = storeid
submission['Item_Outlet_Sales'] = pred
submission.to_csv('BM1.csv',index=False)
|
{"/mat.py": ["/matplotlib.py"]}
|
21,386
|
meenasirisha145/python18
|
refs/heads/master
|
/pandasdata.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 13 15:08:17 2018 by Meena Sirisha"""
#%% Data Creation----
import numpy as np
rollnosL=[101,102,103,104,105,106,107,108,109,110,111]
namesL=["meena","apoorva","kaustav","shubham","goldie","hitesh","shruti","vijay","lalit","achal","varun"]
genderL=['F','F','M','M','M','M','F','M','M','M','M']
pythonL=np.random.randint(60,90,11)
sasL=np.random.randint(60,90,11)
import pandas as pd
series=pd.Series(namesL,index=rollnosL)
type(series)
series.index=rollnosL
series
111 in series
112 in series
print(series.index)
print(series.iteritems)
series.keys()
series.values
series.iteritems
list(series.items())
list(series.items())[1:5]
series[110]="achal kumar"
series
series=="shruti"
series[:5]
series[101:105]
series.iloc[0:5]
series.iloc[0]##implicit indexing
series.loc[101]##explicit indexing
series[0:1]
series.loc[103:110]
series.ix[108]
rollno=pd.Series(rollnosL)
gender=pd.Series(genderL)
python=pd.Series(pythonL)
sas=pd.Series(sasL)
name=pd.Series(namesL)
df=pd.concat([name,gender,python,sas],axis=1)
df
df.index=rollno
df
df.columns=("name","gender","python","sas")
df
df1=pd.DataFrame({'rollno':rollnosL,'name':namesL,'gender':genderL,'python':pythonL,'sas':sasL})
df1
df1.index=rollno
df1
df2=pd.DataFrame({'rollno':rollnosL,'name':namesL,'gender':genderL,'python':pythonL,'sas':sasL},columns=['rollno','name','gender','python','sas'])
df2
df2.index=rollno
df2
df2.transpose()
df2.T
df2.loc[101]
df2.values[0]
df2.iloc[0:1]
df2.name
df2[0:3]
df2.iloc[0:3,0:2]
df2.loc[101:105,:"python"]
df2.iloc[0:5,0:2]
df2['total']=df2['python']+df2['sas']
df2
df2[df2['total']>150]
hadoopL=np.random.randint(70,90,11)
feesL=np.random.randint(100000,150000,11)
courseL=["pg","pg","msc","msc","pg","pg","pg","pg","pg","pg","bsc"]
hadoopL=np.random.randint(60,90,11)
hostelL=[True,False,True,False,False,False,False,True,True,True,False]
df2
|
{"/mat.py": ["/matplotlib.py"]}
|
21,387
|
meenasirisha145/python18
|
refs/heads/master
|
/Assignment/asgn2.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 1 11:42:36 2018 by Meena Sirisha"""
import requests #used for calling url
import csv
from bs4 import BeautifulSoup #converts the text into structured form
page=requests.get("https://www.sec.gov/Archives/edgar/data/3662/0000889812-99-003241.txt")
type(page)
data=str(page.content)
type(data)
import re
r = re.findall("\nITEM \d+\. MANAGEMENT'S DISCUSSION AND ANALYSIS .*?(?:\nITEM|\nPART)", data,re.S)
r
data.find("ITEM 7")
data1=data[10000:]
data1.find("ITEM 7")
data1.find("ITEM 8")
data1
|
{"/mat.py": ["/matplotlib.py"]}
|
21,388
|
meenasirisha145/python18
|
refs/heads/master
|
/np1/numpy2.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 1 11:17:59 2018 by Meena Sirisha """
import numpy as np
from numpy import *
a=np.array([[0,1,2,3],[10,11,12,13]])
a
a.shape
np.shape(a)
#%%numpy arrays
type(a)
a.size
size(a)
a.ndim
a1=array([[1,2,3],[4,5,6]],float)
a1
a1.shape
type(a1[0,0]) is type(a1[1,2])
a1.dtype
a1.dtype.type
a1.itemsize
b=a1[:,::2]
b
b[0,1]=100
b
a1
b=a1[:,::2].copy()
b[0,1]=99
b
a1
#%%fancy indexing
a=arange(0,80,10)
a
y=a[[1,2,-3]]
y
z=take(a,[1,2,-3])
z
type([a[1],a[2],a[-3]])
type(z)
ind=[1,2,-3]
x=take(a,ind)
x
#%%masking
mask=array([0,1,1,0,1,1,0,0],dtype=bool)
mask1=array([True,False,True,False,True,False,False,False],dtype=bool)
x=a[mask]
x
y=compress(mask,a)
y
z=a[mask1]
z
x=arange(0,36)
x
x=x.reshape(6,6)
x[(0,1,2,3,4),(1,2,3,4,5)]
x[3:,[0,2,5]]
x
mask=array([1,0,1,0,0,1],dtype=bool)
x[mask,2]
#%%array caluculation methods
a1
a1[0,2]=3
a1
sum(a)
%timeit sum(a1)
%timeit a1.sum()
sum(a1,axis=0)
sum(a1,axis=-1)
prod(a1,axis=0)#columnwise for axis=0
prod(a1,axis=1)#rowwise for axis=1
a1.min(axis=0)
a1.min(axis=1)
amin(a1,axis=0)
argmin(a1,axis=0)
argmin(a1,axis=1)
a1.max(axis=None)
a1.max(axis=1)
a1.argmax(axis=0)
a1.argmax(axis=1)
a1.mean(axis=0)
a1.mean(axis=1)
average(a1,axis=None)
average(a1,weights=[1,2],axis=0)
a1.std(axis=0)
a1.clip(3,5)
a1
a1.ptp(axis=0)#max-min=range
a1.ptp(axis=None)#range for entire array
#%%Comparison and Logical Operators
a=array([1,2,3,4])
b=array([4,5,6,7])
a+b
a==b
a<=b
a>=b
a!=b
a1=array([[1,2,3],[4,5,6],[7,8,9]],float)
a1
average(a1,weights=[1,2,3],axis=0)
#%%Shape Operations
a1.flatten()#converts multi-dimensional array to 1D array
a1
a1.ravel()#same as flatten but returns a view if possible
id(a1)
b=a1
id(b)
c=a1.copy()
id(c)
a1.swapaxes(0,1)#transpose
a1
a.resize(4,2)
a
np.array([1,2,3,4,5,6]).reshape(3,2)
np.array([1,2,3,4,5,6]).reshape(6,1).squeeze()
np.array([1,2,3,4,5,6]).reshape(1,6).squeeze()
a.T ##transpose
a.squeeze()#transpose
a
a.T
a[2,0]=0
a
a.nonzero()
a.cumsum(axis=None)
a1.cumprod(axis=None)
|
{"/mat.py": ["/matplotlib.py"]}
|
21,392
|
oswaldoneto/trokeybe
|
refs/heads/master
|
/landing/models.py
|
from django.db import models
class Registro(models.Model):
nome = models.CharField(max_length=150)
telefone = models.CharField(max_length=150)
email = models.EmailField()
class Anuncio(models.Model):
VENDER = 'V'
COMPRAR = 'C'
TIPO = (
(VENDER, 'VENDER'),
(COMPRAR, 'COMPRAR'),
)
registro = models.ForeignKey(Registro)
tipo = models.CharField(max_length=1, choices=TIPO)
marca = models.CharField(max_length=100)
modelo = models.CharField(max_length=100)
ano = models.CharField(max_length=10)
valor = models.CharField(max_length=100)
|
{"/landing/admin.py": ["/landing/models.py"], "/landing/views.py": ["/landing/forms.py", "/landing/mailing.py", "/landing/models.py"]}
|
21,393
|
oswaldoneto/trokeybe
|
refs/heads/master
|
/landing/admin.py
|
from django.contrib import admin
from landing.models import Registro, Anuncio
class VendoAdmin(admin.TabularInline):
model = Anuncio
class RegistroAdmin(admin.ModelAdmin):
list_display = ('nome', 'telefone', 'email',)
inlines = [VendoAdmin, ]
admin.site.register(Registro, RegistroAdmin)
|
{"/landing/admin.py": ["/landing/models.py"], "/landing/views.py": ["/landing/forms.py", "/landing/mailing.py", "/landing/models.py"]}
|
21,394
|
oswaldoneto/trokeybe
|
refs/heads/master
|
/landing/mailing.py
|
import sendgrid
from sendgrid.helpers.mail.mail import Mail, Email, Personalization, Substitution, Content
API_KEY = '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'
def send_welcome(email, nome):
def prepare_data(email, nome):
mail = Mail()
mail.set_from(Email('contato@trokey.com.br'))
mail.set_subject('Seu registro no trokey.com.br')
mail.set_template_id('582e8167-1737-4482-afd8-3145cacbd3dd')
personalization = Personalization()
mails_bcc = ['picobasa@gmail.com', 'oswaldo.neto@gmail.com',]
for mail_bcc in mails_bcc:
personalization.add_bcc(Email(mail_bcc))
personalization.add_to(Email(email))
personalization.add_substitution(Substitution('nome', nome ))
mail.add_personalization(personalization)
mail.add_content(Content("text/html", " "))
return mail.get()
sg = sendgrid.SendGridAPIClient(apikey=API_KEY)
# prepare api send request data
data = prepare_data(email, nome)
# api send call
response = sg.client.mail.send.post(request_body=data)
|
{"/landing/admin.py": ["/landing/models.py"], "/landing/views.py": ["/landing/forms.py", "/landing/mailing.py", "/landing/models.py"]}
|
21,395
|
oswaldoneto/trokeybe
|
refs/heads/master
|
/landing/views.py
|
from django.views.generic.base import TemplateView
from django.views.generic.edit import FormView
from landing import forms
from landing.forms import RegistroForm
from landing.mailing import send_welcome
from landing.models import Registro, Anuncio
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context.update({'registered': self.request.GET['registered'] if 'registered' in self.request.GET else 'false'})
return context
class RegistroView(FormView):
template_name = 'register.html'
form_class = RegistroForm
success_url = '/?registered=true'
def form_valid(self, form):
cleaned_data = super(RegistroView, self).form_valid(form)
nome = form.cleaned_data['nome']
email = form.cleaned_data['email']
telefone = form.cleaned_data['telefone']
marca_vender = form.cleaned_data['marca_vender']
modelo_vender = form.cleaned_data['modelo_vender']
ano_vender = form.cleaned_data['ano_vender']
valor_vender = form.cleaned_data['valor_vender']
marca_comprar = form.cleaned_data['marca_comprar']
modelo_comprar = form.cleaned_data['modelo_comprar']
ano_comprar = form.cleaned_data['ano_comprar']
valor_comprar = form.cleaned_data['valor_comprar']
registro = Registro.objects.create(nome=nome, email=email, telefone=telefone)
Anuncio.objects.create(marca=marca_vender, modelo=modelo_vender, ano=ano_vender, valor=valor_vender,
tipo=Anuncio.VENDER, registro=registro)
Anuncio.objects.create(marca=marca_comprar, modelo=modelo_comprar, ano=ano_comprar,
valor=valor_comprar, tipo=Anuncio.COMPRAR, registro=registro)
#send_welcome(email, nome)
return cleaned_data
|
{"/landing/admin.py": ["/landing/models.py"], "/landing/views.py": ["/landing/forms.py", "/landing/mailing.py", "/landing/models.py"]}
|
21,396
|
oswaldoneto/trokeybe
|
refs/heads/master
|
/landing/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-11 02:18
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Anuncio',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo', models.CharField(choices=[('V', 'VENDER'), ('C', 'COMPRAR')], max_length=1)),
('marca', models.CharField(max_length=100)),
('modelo', models.CharField(max_length=100)),
('ano', models.IntegerField()),
('valor', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Registro',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(max_length=150)),
('telefone', models.CharField(max_length=150)),
('email', models.EmailField(max_length=254)),
],
),
migrations.AddField(
model_name='anuncio',
name='registro',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='landing.Registro'),
),
]
|
{"/landing/admin.py": ["/landing/models.py"], "/landing/views.py": ["/landing/forms.py", "/landing/mailing.py", "/landing/models.py"]}
|
21,397
|
oswaldoneto/trokeybe
|
refs/heads/master
|
/landing/forms.py
|
from django import forms
class RegistroForm(forms.Form):
nome = forms.CharField(required=True)
email = forms.CharField(required=False)
telefone = forms.CharField(required=True)
marca_vender = forms.CharField(required=False)
modelo_vender = forms.CharField(required=False)
ano_vender = forms.CharField(required=False)
valor_vender = forms.CharField(required=False)
marca_comprar = forms.CharField(required=False)
modelo_comprar = forms.CharField(required=False)
ano_comprar = forms.CharField(required=False)
valor_comprar = forms.CharField(required=False)
def clean(self):
cleaned_data = super(RegistroForm, self).clean()
telefone = cleaned_data.get('telefone')
if telefone and len(telefone) < 14:
raise forms.ValidationError('Número de telefone inválido.')
marca_vender = cleaned_data.get('marca_vender')
modelo_vender = cleaned_data.get('modelo_vender')
ano_vender = cleaned_data.get('ano_vender')
valor_vender = cleaned_data.get('valor_vender')
if len(marca_vender) > 0 or len(modelo_vender) > 0 or len(ano_vender) > 0 or len(valor_vender) > 0:
bloco_vender_valido = len(marca_vender) > 0 and len(modelo_vender) > 0 and len(ano_vender) > 0 and len(
valor_vender) > 0
if not bloco_vender_valido:
raise forms.ValidationError('Preencha os dados do seu carro ou deixe os campos em branco.')
marca_comprar = cleaned_data.get('marca_comprar')
modelo_comprar = cleaned_data.get('modelo_comprar')
ano_comprar = cleaned_data.get('ano_comprar')
valor_comprar = cleaned_data.get('valor_comprar')
if len(marca_comprar) > 0 or len(modelo_comprar) > 0 or len(ano_comprar) > 0 or len(valor_comprar) > 0:
bloco_comprar_valido = len(marca_comprar) > 0 and len(modelo_comprar) > 0 and len(ano_comprar) > 0 and len(
valor_comprar) > 0
if not bloco_comprar_valido:
raise forms.ValidationError('Preencha os dados do carro que deseja ou deixe os campos em branco.')
if len(marca_vender) == 0 and len(modelo_vender) == 0 and len(ano_vender) == 0 and len(
valor_vender) == 0 and len(marca_comprar) == 0 and len(modelo_comprar) == 0 and len(
ano_comprar) == 0 and len(
valor_comprar) == 0:
raise forms.ValidationError(
'Você precisa preencher os dados do seu carro, do carro que deseja ou de ambos para continuar')
|
{"/landing/admin.py": ["/landing/models.py"], "/landing/views.py": ["/landing/forms.py", "/landing/mailing.py", "/landing/models.py"]}
|
21,415
|
Circles24/watson
|
refs/heads/master
|
/watson/spider/admin.py
|
from django.contrib import admin
from .models import Task, Page
admin.site.register([Task, Page])
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,416
|
Circles24/watson
|
refs/heads/master
|
/watson/spider/urls.py
|
from django.urls import path
from .views import get_task, post_task
urlpatterns = [
path('post', post_task, name='post task'),
path('get', get_task, name='get task')
]
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,417
|
Circles24/watson
|
refs/heads/master
|
/watson/authentication/views.py
|
from rest_framework.response import Response
from rest_framework.decorators import api_view
from .serializers import RegisterSerializer
@api_view(['POST'])
def register(request):
try:
serializer = RegisterSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response({'msg':'operation successful'})
else:
return Response(serializer.errors, 400)
except Exception as ex:
print('internal server error -> ', ex)
return Response({'msg':'internal server error'}, 500)
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,418
|
Circles24/watson
|
refs/heads/master
|
/watson/spider/migrations/0003_page_urls.py
|
# Generated by Django 3.1.4 on 2020-12-06 14:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spider', '0002_task_freq_data'),
]
operations = [
migrations.AddField(
model_name='page',
name='urls',
field=models.JSONField(default=list),
),
]
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,419
|
Circles24/watson
|
refs/heads/master
|
/watson/spider/crawl_engine.py
|
from .models import Task, Page
from django.utils import timezone
from django.conf import settings
from queue import Queue
from datetime import timedelta
from bs4 import BeautifulSoup
from bs4.element import Comment
from celery import shared_task
from .stop_words import stop_words
import time
import random
import logging
import requests
import os
import re
logger = logging.getLogger('crawler')
logger.setLevel(logging.INFO)
fh = logging.FileHandler(filename=settings.CRAWLER_LOG_FILE)
fh.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
headers = {
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36',
'referrer': 'https://google.com',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.9',
'Pragma': 'no-cache',
}
def is_url_valid(url):
try:
res = requests.head(url, headers=headers)
is_status_valid = res.status_code == 200
is_header_valid = False
possible_headers = ['content-type', 'Content-Type', 'Content-type', 'CONTENT-TYPE']
for header in possible_headers:
if header in res.headers.keys():
is_header_valid = res.headers[header].startswith('text/html')
return is_status_valid and is_header_valid
except Exception as ex:
return False
def tag_visible(element):
if element.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']:
return False
if isinstance(element, Comment):
return False
return True
def get_text_from_soup(soup):
texts = soup.findAll(text=True)
visible_texts = filter(tag_visible, texts)
return u" ".join(t.strip() for t in visible_texts)
def random_delay():
delays = [7, 4, 6, 2, 10, 19]
delay = random.choice(delays)
time.sleep(delay)
def crawl_data(url, levels, token_file):
process_queue = Queue()
process_queue.put({'level':0, 'url':url})
while process_queue.empty() == False:
data = process_queue.get()
try:
level = data['level']
url = data['url']
page = None
urls = []
text = None
need_to_crawl = True
if Page.objects.filter(url=url).count() > 0:
page = Page.objects.filter(url=url).first()
delta = timezone.now()-page.last_updated_at
if delta.days <= 3:
text = page.text
urls = page.urls
need_to_crawl = False
logger.info(f'level:::{level} url:::{url} need_to_crawl:::{need_to_crawl}')
if need_to_crawl == True:
random_delay()
html_content = requests.get(url, headers=headers).content
soup = BeautifulSoup(html_content, 'html.parser')
text = get_text_from_soup(soup)
links = soup.findAll('a')
for link in links:
try:
random_delay()
if link['href'].startswith('http') and is_url_valid(url) == True:
urls.append(link['href'])
elif link['href'].startswith('/'):
temp_url = url+link['href']
if is_url_valid(temp_url) == True:
urls.append(temp_url)
except Exception as ex:
logger.error(f'level:::{level} url:::{url} error while processing links::{ex}')
logger.info(f'level:::{level} url:::{url} processed webpage')
token_file.writelines(text)
if level+1 <= levels:
for url in urls:
if is_url_valid(url):
logger.info(f'level:::{level+1} url:::{url} added url')
process_queue.put({'level':level+1, 'url':url})
if page is None:
page = Page.objects.create(url=url, text=text, urls=urls, last_updated_at=timezone.now())
elif need_to_crawl == True:
page.text = text
page.urls = urls
page.last_updated_at = timezone.now()
page.save()
except Exception as ex:
logger.error(f'level:::{level} url:::{url} error while processing given web page')
def digest_token_file(token_file):
logger.info(f'digesting tokens')
lines = token_file.readlines()
freq_data = {}
for line in lines:
words = line.split()
for word in words:
if word not in stop_words:
if word in freq_data.keys():
freq_data[word] = freq_data[word]+1
else :
freq_data[word] = 1
logger.info(f'done with token digestion')
return freq_data
@shared_task
def start_processing(task_id):
task = Task.objects.filter(id=task_id).first()
token_file = open(os.path.join(settings.DATA_DUMP, f'{task_id}.txt'), 'w')
task.status = 'i'
task.save()
crawl_data(task.url, task.level, token_file)
token_file.close()
token_file = open(os.path.join(settings.DATA_DUMP, f'{task_id}.txt'), 'r')
freq_data = digest_token_file(token_file)
token_file.close()
task.freq_data = freq_data
task.finished_at = timezone.now()
task.status = 'c'
task.save()
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,420
|
Circles24/watson
|
refs/heads/master
|
/watson/spider/migrations/0001_initial.py
|
# Generated by Django 3.1.4 on 2020-12-05 18:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Page',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.CharField(max_length=200)),
('text', models.TextField()),
('last_updated_at', models.DateTimeField()),
],
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.CharField(max_length=200)),
('level', models.IntegerField()),
('status', models.CharField(choices=[('p', 'pending'), ('i', 'initiated'), ('c', 'completed')], default='p', max_length=1)),
('started_at', models.DateTimeField(default=django.utils.timezone.now)),
('finished_at', models.DateTimeField(null=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,421
|
Circles24/watson
|
refs/heads/master
|
/watson/authentication/serializers.py
|
from rest_framework import serializers
from django.contrib.auth.models import User
class RegisterSerializer(serializers.Serializer):
username = serializers.CharField()
email = serializers.EmailField()
password = serializers.CharField()
def validate_username(self, username):
if User.objects.filter(username=username).count() > 0:
raise serializers.ValidationError(f'username {username} already present')
return username
def validate_email(self, email):
if User.objects.filter(email=email).count() > 0:
raise serializers.ValidationError(f'email {email} already present')
return email
def save(self):
User.objects.create_user(username=self.validated_data['username'],
email=self.validated_data['email'],
password=self.validated_data['password'])
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,422
|
Circles24/watson
|
refs/heads/master
|
/watson/spider/serializers.py
|
from rest_framework import serializers
from .models import Task
from .crawl_engine import start_processing
import requests
def is_url_valid(url):
try:
res = requests.head(url)
is_status_valid = res.status_code == 200
is_header_valid = False
possible_headers = ['content-type', 'Content-Type', 'Content-type', 'CONTENT-TYPE']
for header in possible_headers:
if header in res.headers.keys():
is_header_valid = res.headers[header].startswith('text/html')
return is_status_valid and is_header_valid
except Exception as ex:
return False
class TaskSerializer(serializers.ModelSerializer):
class Meta:
model = Task
fields = '__all__'
def validate_url(self, url):
if is_url_valid(url) == False:
raise serializers.ValidationError('invalid url')
return url
def save_and_start_processing(self):
self.save()
start_processing.delay(self.data['id'])
class GetTaskSerializer(serializers.Serializer):
id = serializers.IntegerField()
def validate_id(self, id):
if Task.objects.filter(id=id).count() == 0:
raise serializers.ValidationError(f'no task with id::{id} exists')
return id
def get_task(self):
return Task.objects.filter(id=self.validated_data['id']).first()
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,423
|
Circles24/watson
|
refs/heads/master
|
/watson/authentication/urls.py
|
from django.urls import path
from rest_framework_simplejwt.views import TokenObtainPairView
from .views import register
urlpatterns = [
path('login', TokenObtainPairView.as_view(), name='login'),
path('register', register, name='register')
]
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,424
|
Circles24/watson
|
refs/heads/master
|
/watson/spider/migrations/0002_task_freq_data.py
|
# Generated by Django 3.1.4 on 2020-12-05 18:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spider', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='task',
name='freq_data',
field=models.JSONField(default=dict),
),
]
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,425
|
Circles24/watson
|
refs/heads/master
|
/watson/spider/views.py
|
from rest_framework.decorators import api_view, permission_classes
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from .models import Task
from .serializers import TaskSerializer, GetTaskSerializer
@api_view(['POST'])
@permission_classes([IsAuthenticated])
def post_task(request):
try:
data = dict(request.data)
data['user'] = request.user.pk
serializer = TaskSerializer(data=data)
if serializer.is_valid():
serializer.save_and_start_processing()
return Response({'msg':'operation successful'})
else :
return Response(serializer.errors, 400)
except Exception as ex:
print('internal server error ->', ex)
return Response({'msg':'internal server error'}, 500)
@api_view(['GET'])
@permission_classes([IsAuthenticated])
def get_task(request):
try:
serializer = GetTaskSerializer(data=request.data)
if serializer.is_valid():
task = serializer.get_task()
serializer = TaskSerializer(serializer.get_task())
if task.user != request.user:
return Response({'msg':'unauthorized operation'}, 401)
return Response(serializer.data)
else :
return Response(serializer.errors, 400)
except Exception as ex:
print('internal server error ->', ex)
return Response({'msg':'internal server error'}, 500)
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,426
|
Circles24/watson
|
refs/heads/master
|
/watson/spider/models.py
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
class Task(models.Model):
user = models.ForeignKey(to=User, on_delete=models.CASCADE)
url = models.CharField(max_length=200)
level = models.IntegerField()
status_choices = (('p', 'pending'), ('i', 'initiated'), ('c', 'completed'))
status = models.CharField(default='p', max_length=1, choices=status_choices)
started_at = models.DateTimeField(default=timezone.now)
finished_at = models.DateTimeField(null=True)
freq_data = models.JSONField(default=dict)
class Page(models.Model):
url = models.CharField(max_length=200)
text = models.TextField()
last_updated_at = models.DateTimeField()
urls = models.JSONField(default=list)
|
{"/watson/spider/admin.py": ["/watson/spider/models.py"], "/watson/spider/urls.py": ["/watson/spider/views.py"], "/watson/authentication/views.py": ["/watson/authentication/serializers.py"], "/watson/spider/crawl_engine.py": ["/watson/spider/models.py"], "/watson/spider/serializers.py": ["/watson/spider/models.py", "/watson/spider/crawl_engine.py"], "/watson/authentication/urls.py": ["/watson/authentication/views.py"], "/watson/spider/views.py": ["/watson/spider/models.py", "/watson/spider/serializers.py"]}
|
21,490
|
ericflo/awesomestream
|
refs/heads/master
|
/tests/test_utils.py
|
import unittest
from awesomestream.utils import permutations
class PermutationsTest(unittest.TestCase):
def test_multiple_permutations(self):
vals = [[1, 2, 3], [4, 5], [6, 7]]
self.assertEqual(
list(permutations(vals)),
[
[1, 4, 6],
[2, 4, 6],
[3, 4, 6],
[1, 5, 6],
[2, 5, 6],
[3, 5, 6],
[1, 4, 7],
[2, 4, 7],
[3, 4, 7],
[1, 5, 7],
[2, 5, 7],
[3, 5, 7],
]
)
def test_single_permutation(self):
vals = [['asdf']]
self.assertEqual(list(permutations(vals)), [['asdf']])
def test_double_permutation(self):
vals = [['1', '2']]
self.assertEqual(list(permutations(vals)), [['1'], ['2']])
|
{"/tests/test_utils.py": ["/awesomestream/utils.py"], "/examples/sqlite_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"], "/tests/test_backend.py": ["/awesomestream/backends.py"], "/awesomestream/repl.py": ["/awesomestream/jsonrpc.py"], "/awesomestream/backends.py": ["/awesomestream/utils.py"], "/examples/redis_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"]}
|
21,491
|
ericflo/awesomestream
|
refs/heads/master
|
/awesomestream/jsonrpc.py
|
import sys
import traceback
from uuid import uuid1
from urlparse import urlparse
from httplib import HTTPConnection
from simplejson import dumps, loads
from werkzeug import Request, Response
from werkzeug.exceptions import HTTPException, NotFound, BadRequest
def create_app(backend):
@Request.application
def application(request):
try:
# Parse the JSON in the request
try:
data = loads(request.stream.read())
except ValueError:
raise BadRequest()
# Grab the function to execute
try:
method = getattr(backend, data['method'])
except (KeyError, IndexError):
raise BadRequest()
if method is None:
raise NotFound()
# Get the args and kwargs
args = data.get('args', [])
kwargs = data.get('kwargs', {})
kwargs = dict(((k.encode('utf-8'), v) for k, v in kwargs.iteritems()))
# Attempt to call the method with the params, or catch the
# exception and pass that back to the client
try:
response = Response(dumps({
'id': data.get('id'),
'result': method(*args, **kwargs),
'error': None,
}))
except (KeyboardInterrupt, SystemExit):
raise
except Exception, e:
print e
response = Response(dumps({
'id': data.get('id'),
'result': None,
'error': ''.join(traceback.format_exception(*sys.exc_info())),
}))
# Finish up and return the response
response.headers['Content-Type'] = 'application/json'
response.headers['Content-Length'] = len(response.data)
response.status_code = 200
return response
except HTTPException, e:
# If an http exception is caught we can return it as response
# because those exceptions render standard error messages when
# called as wsgi application.
return e
return application
def run_server(app, port, numthreads=10):
from cherrypy import wsgiserver
server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', port), app,
numthreads=numthreads)
try:
server.start()
except KeyboardInterrupt:
server.stop()
class Client(object):
def __init__(self, server):
self.server = server
url_parts = urlparse(server)
self.port = url_parts.port
self.host, _, _ = url_parts.netloc.partition(':')
self.headers = {'Content-Type': 'application/json'}
def __getattr__(self, obj):
return self._request(obj)
def _request(self, method):
def _inner(*args, **kwargs):
data = dumps({
'id': str(uuid1()),
'method': method,
'args': args,
'kwargs': kwargs,
})
conn = HTTPConnection(self.host, self.port)
conn.request('POST', '/', data, self.headers)
response = conn.getresponse().read()
decoded_response = loads(response)
conn.close()
error = decoded_response.get('error')
if error is not None:
raise ValueError(error)
return decoded_response.get('result')
return _inner
|
{"/tests/test_utils.py": ["/awesomestream/utils.py"], "/examples/sqlite_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"], "/tests/test_backend.py": ["/awesomestream/backends.py"], "/awesomestream/repl.py": ["/awesomestream/jsonrpc.py"], "/awesomestream/backends.py": ["/awesomestream/utils.py"], "/examples/redis_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"]}
|
21,492
|
ericflo/awesomestream
|
refs/heads/master
|
/examples/sqlite_server.py
|
import os
import sys
# Munge the path a bit to make this work from directly within the examples dir
sys.path.insert(0, os.path.abspath(os.path.join(__file__, '..', '..')))
from awesomestream.backends import SQLBackend
from awesomestream.jsonrpc import create_app, run_server
if __name__ == '__main__':
backend = SQLBackend(
dsn='sqlite:////tmp/stream.db',
keys=['kind', 'user', 'game'],
)
app = create_app(backend)
run_server(app, 9997)
|
{"/tests/test_utils.py": ["/awesomestream/utils.py"], "/examples/sqlite_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"], "/tests/test_backend.py": ["/awesomestream/backends.py"], "/awesomestream/repl.py": ["/awesomestream/jsonrpc.py"], "/awesomestream/backends.py": ["/awesomestream/utils.py"], "/examples/redis_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"]}
|
21,493
|
ericflo/awesomestream
|
refs/heads/master
|
/tests/test_backend.py
|
import unittest
from awesomestream.backends import MemoryBackend
class MemoryBackendTest(unittest.TestCase):
def setUp(self):
self.backend = MemoryBackend(keys=['kind', 'user', 'game'])
def test_basic(self):
items = [
{'kind': 'play', 'user': 1, 'game': 'bloons'},
{'kind': 'play', 'user': 1, 'game': 'ryokan'},
{'kind': 'high-score', 'user': 1, 'game': 'ryokan', 'score': 10},
{'kind': 'high-score', 'user': 2, 'game': 'ryokan', 'score': 20},
]
# First we insert some data
for item in items:
self.backend.insert(item)
# Now we assert that it comes back properly in different queries
self.assertEqual(self.backend.items(), list(reversed(items)))
self.assertEqual(self.backend.items(kind='play'), [items[1], items[0]])
self.assertEqual(self.backend.items(user=1),
[items[2], items[1], items[0]])
self.assertEqual(self.backend.items(user=2), [items[3]])
self.assertEqual(self.backend.items(user=1, kind='play'),
[items[1], items[0]])
self.assertEqual(self.backend.items(user=1, kind='high-score'),
[items[2]])
self.assertEqual(self.backend.items(user=[1, 2]), list(reversed(items)))
if __name__ == '__main__':
unittest.main()
|
{"/tests/test_utils.py": ["/awesomestream/utils.py"], "/examples/sqlite_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"], "/tests/test_backend.py": ["/awesomestream/backends.py"], "/awesomestream/repl.py": ["/awesomestream/jsonrpc.py"], "/awesomestream/backends.py": ["/awesomestream/utils.py"], "/examples/redis_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"]}
|
21,494
|
ericflo/awesomestream
|
refs/heads/master
|
/awesomestream/utils.py
|
import time
import datetime
def coerce_ts(value=None):
'''
Given a variety of inputs, this function will return the proper
timestamp (a float). If None or no value is given, then it will
return the current timestamp.
'''
if value is None:
return time.time()
if isinstance(value, int):
value = float(value)
if isinstance(value, datetime.timedelta):
value = datetime.datetime.now() + value
if isinstance(value, datetime.date):
value = datetime.datetime(year=value.year, month=value.month,
day=value.day)
if isinstance(value, datetime.datetime):
value = float(time.mktime(value.timetuple()) * 1e6)
return value
def coerce_dt(value=None):
'''
Given a variety of inputs, this function will return the proper
``datetime.datetime`` instance. If None or no value is given, then
it will return ``datetime.datetime.now()``.
'''
if value is None:
return datetime.datetime.now()
if isinstance(value, int):
value = float(value)
if isinstance(value, float):
return datetime.datetime.fromtimestamp(value)
if isinstance(value, datetime.date):
return datetime.datetime(year=value.year, month=value.month,
day=value.day)
if isinstance(value, datetime.timedelta):
value = datetime.datetime.now() + value
return value
# TODO: The following are not the proper function names. Should figure out
# exactly what we want to call these so that it's less ambiguous to
# someone new to the code.
def combinations(iterable, r):
pool = tuple(iterable)
n = len(pool)
if r > n:
return
indices = range(r)
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(range(r)):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i+1, r):
indices[j] = indices[j-1] + 1
yield tuple(pool[i] for i in indices)
def all_combinations(iterable):
for r in range(1, len(iterable) + 1):
for item in combinations(iterable, r):
yield item
def permutations(lst):
current = [-1] + ([0] * (len(lst) - 1))
maxes = [len(item) - 1 for item in lst]
while 1:
for i, (c, m) in enumerate(zip(current, maxes)):
if i > 0:
current[i - 1] = 0
if c < m:
current[i] = c + 1
break
yield [lst[i][idx] for i, idx in enumerate(current)]
if current == maxes:
raise StopIteration
|
{"/tests/test_utils.py": ["/awesomestream/utils.py"], "/examples/sqlite_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"], "/tests/test_backend.py": ["/awesomestream/backends.py"], "/awesomestream/repl.py": ["/awesomestream/jsonrpc.py"], "/awesomestream/backends.py": ["/awesomestream/utils.py"], "/examples/redis_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"]}
|
21,495
|
ericflo/awesomestream
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
version = '0.1'
LONG_DESCRIPTION = """
AwesomeStream
=============
AwesomeStream is a set of tools for creating a "stream server". That is, a
server which can store information about events that happen, and can query back
those events in reverse-chronological order, sliced in interesting ways.
Example and Use Case
====================
Say that you run a website like GitHub, where people interact in various
different ways. People can create repositories, fork them, watch or unwatch
repositories, add friends, etc. There are all kinds of things that a user
can do on the site. Let's look at how AwesomeStream can help.
First, we'll set up a simple redis-based server:
>>> from awesomestream.backends import RedisBackend
>>> from awesomestream.jsonrpc import create_app, run_server
>>> backend = RedisBackend(
... keys=['user', 'kind', 'repo'],
... host='127.0.0.1',
... port=6379
... )
>>>
>>> app = create_app(backend)
>>> run_server(app, 8080)
This simple script sets up a Redis-based AwesomeStream server--one that pays
special attention to the 'user', 'kind', and 'repo' keys. This will make a
bit more sense in a bit.
In another console, we're going to instantiate a client.
>>> from awesomestream.jsonrpc import Client
>>> c = Client('http://127.0.0.1:8080/')
OK, now that we've set up our client, lets start logging user actions. Look,
a user has just created a new repo!
>>> c.insert({
... 'kind': 'create-repo',
... 'repo': 17,
... 'user': 291,
... 'name': 'frist',
... 'description': 'This is my first repo ever!',
... })
>>>
But the user made a mistake, and named it 'frist' instead of 'first'. So they
go ahead and delete it:
>>> c.insert({
... 'kind': 'delete-repo',
... 'repo': 17,
... 'user': 291,
... 'reason': 'Made a typo :(',
... })
>>>
Then they give up and decide to watch another user's repo instead:
>>> c.insert({'kind': 'watch', 'repo': 2842, 'user': 291, 'owner': 23})
And finally they add that user as a friend:
>>> c.insert({'kind': 'friend', 'user': 291, 'friend': 23})
That second user notices that someone is following them, and follows back:
>>> c.insert({'kind': 'friend', 'user': 23, 'friend': 291})
Now that we have data inserted into the stream server, we can query it to get
back the full stream. Here's how something like that might look:
>>> c.items()
[{'kind': 'friend', 'user': 23, 'friend': 291},
{'kind': 'friend', 'user': 291, 'friend': 23},
{'repo': 2842, 'owner': 23, 'kind': 'watch', 'user': 291},
{'repo': 17, 'kind': 'delete-repo', 'reason': 'Made a typo :(', 'user': 291},
{'repo': 17, 'kind': 'create-repo', 'user': 291, 'name': 'frist', 'description': 'This is my first repo ever!'}
]
As you can see, we got the entire stream back, in reverse chronological order.
But let's say we want to filter this out, to only see 'friend' requests. We
can do that easily:
>>> c.items(kind='friend')
[{'kind': 'friend', 'user': 23, 'friend': 291},
{'kind': 'friend', 'user': 291, 'friend': 23}
]
Notice that they are still in reverse chronological order. We can also combine
our predicates, to get only friend requests made by a specific user.
>>> c.items(kind='friend', user=23)
[{'kind': 'friend', 'user': 23, 'friend': 291}]
But an extremely common case is that you want to see only your activity
that is generated by your friends. With AwesomeStream, that's simple:
>>> c.items(user=[23, 291])
[{'kind': 'friend', 'user': 23, 'friend': 291},
{'kind': 'friend', 'user': 291, 'friend': 23},
{'repo': 2842, 'owner': 23, 'kind': 'watch', 'user': 291},
{'repo': 17, 'kind': 'delete-repo', 'reason': 'Made a typo :(', 'user': 291},
{'repo': 17, 'kind': 'create-repo', 'user': 291, 'name': 'frist', 'description': 'This is my first repo ever!'}
]
As you can see, every user ID passed into that list is retrieved. By default,
the items() function retrieves 20 items, but often times we'll need to
customize that. Here's how that would look:
>>> c.items(user=[23, 291], start=1, end=3)
[{'kind': 'friend', 'user': 291, 'friend': 23},
{'repo': 2842, 'owner': 23, 'kind': 'watch', 'user': 291}
]
Supported Backends
==================
* In-Memory (mostly for testing)
* SQL
* Redis
Planned Support
===============
* CouchDB
* Cassandra
Maturity
========
I'm writing this for eventual deployment on http://radiosox.com/, but have not
yet deployed it in production. Do so at your own risk.
Requirements
============
Short Summary:
Use pip, and do `pip install -U -r requirements.txt`
Longer Summary:
Strictly speaking, the only requirement is *simplejson*. That being said,
if you want redis support, you need *redis* installed. If you want SQL
support, you need *SQLAlchemy* installed. If you want support for creating
a WSGI app to expose this over HTTP, you'll need *werkzeug* installed.
Finally, if you want a simple, pure-python way of running that WSGI app,
you'll want to install *cherrypy*.
"""
setup(
name='awesomestream',
version=version,
description="AwesomeStream makes awesome streams",
long_description=LONG_DESCRIPTION,
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Environment :: Web Environment",
],
keywords='json-rpc,stream,feed,werkzeug,cherrypy,sqlalchemy,redis',
author='Eric Florenzano',
author_email='floguy@gmail.com',
url='http://github.com/ericflo/awesomestream',
license='BSD',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=['setuptools'],
)
|
{"/tests/test_utils.py": ["/awesomestream/utils.py"], "/examples/sqlite_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"], "/tests/test_backend.py": ["/awesomestream/backends.py"], "/awesomestream/repl.py": ["/awesomestream/jsonrpc.py"], "/awesomestream/backends.py": ["/awesomestream/utils.py"], "/examples/redis_server.py": ["/awesomestream/backends.py", "/awesomestream/jsonrpc.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.