index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
20,595,489
|
Chelsea-Dover/MushroomHunters
|
refs/heads/master
|
/Morels/Player/models.py
|
from django.db import models
from django.contrib.auth.models import User
from Game import models as game_models
# Create your models here.
class MyUser(models.Model):
profilePic = models.ImageField(upload_to='profile_images', blank=True)
level = models.IntegerField(default=0)
user = models.OneToOneField(User)
def __str__(self):
return self.user.username
class Card(models.Model):
cardValue = models.IntegerField()
stickValue = models.IntegerField()
picture = models.ImageField(upload_to=None, blank=False, default="../../static")
type = models.CharField(max_length=50)
def __str__(self):
return self.type
class Player(models.Model):
score = models.IntegerField(default=0)
sticks = models.IntegerField(default=0)
turns = models.IntegerField(default=20)
userPlayingCards = models.ManyToManyField(Card, blank=True, related_name='playingcards')
userHand = models.ManyToManyField(Card, blank=True)
userPlayer = models.ForeignKey(MyUser)
def __str__(self):
return self.userPlayer.user.username
class Invite(models.Model):
invite_sender = models.ForeignKey(MyUser, related_name='sender')
invite_receiver = models.ForeignKey(MyUser, related_name='receiver')
time_sent = models.DateTimeField('')
status_accepted = models.BooleanField(default=False)
def __str__(self):
return self.status_accepted
class Insult(models.Model):
insults = models.CharField(max_length=100)
def __str__(self):
return self.insults
class Bot(models.Model):
name = models.CharField(max_length=50)
insults = models.ForeignKey(Insult, default='')
botPlayers = models.ManyToManyField(Player)
def __str__(self):
return self.name
class LeaderBoard(models.Model):
user_id = models.ForeignKey(User, default='')
score = models.ForeignKey(Player, default=0)
date = models.DateTimeField(auto_now=False, auto_now_add=False)
def __str__(self):
return self.user_id
|
{"/Morels/Game/views.py": ["/Morels/Game/models.py"], "/Morels/Game/admin.py": ["/Morels/Game/models.py"], "/Morels/Player/views.py": ["/Morels/Player/models.py"], "/Morels/Player/admin.py": ["/Morels/Player/models.py"]}
|
20,595,490
|
Chelsea-Dover/MushroomHunters
|
refs/heads/master
|
/Morels/Player/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Bot',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Card',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('cardValue', models.IntegerField()),
('stickValue', models.IntegerField()),
('picture', models.ImageField(upload_to=None, default='../../static')),
('type', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Insult',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('insults', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Invite',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('time_sent', models.DateTimeField(verbose_name='')),
('status_accepted', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='LeaderBoard',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('date', models.DateTimeField()),
],
),
migrations.CreateModel(
name='MyUser',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('profilePic', models.ImageField(upload_to='profile_images', blank=True)),
('level', models.IntegerField(default=0)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Player',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('score', models.IntegerField(default=0)),
('userHand', models.ManyToManyField(to='Player.Card', blank=True)),
('userPlayer', models.ForeignKey(to='Player.MyUser')),
],
),
migrations.AddField(
model_name='leaderboard',
name='score',
field=models.ForeignKey(to='Player.Player', default=0),
),
migrations.AddField(
model_name='leaderboard',
name='user_id',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL, default=''),
),
migrations.AddField(
model_name='invite',
name='invite_receiver',
field=models.ForeignKey(to='Player.MyUser', related_name='receiver'),
),
migrations.AddField(
model_name='invite',
name='invite_sender',
field=models.ForeignKey(to='Player.MyUser', related_name='sender'),
),
migrations.AddField(
model_name='bot',
name='botPlayers',
field=models.ManyToManyField(to='Player.Player'),
),
migrations.AddField(
model_name='bot',
name='insults',
field=models.ForeignKey(to='Player.Insult', default=''),
),
]
|
{"/Morels/Game/views.py": ["/Morels/Game/models.py"], "/Morels/Game/admin.py": ["/Morels/Game/models.py"], "/Morels/Player/views.py": ["/Morels/Player/models.py"], "/Morels/Player/admin.py": ["/Morels/Player/models.py"]}
|
20,595,491
|
Chelsea-Dover/MushroomHunters
|
refs/heads/master
|
/Morels/Game/urls.py
|
from django.conf.urls import url
urlpatterns = [
url(r'^(?P<game_id>\d+)/$', 'Game.views.game', name='game'),
url(r'^(?P<game_id>\d+)/update/', 'Game.views.update', name='update'),
url(r'^(?P<game_id>\d+)/sell_cards/', 'Game.views.sell_cards', name='sell_cards'),
url(r'^(?P<game_id>\d+)/play_cards/', 'Game.views.play_cards', name='play_cards'),
url(r'^(?P<game_id>\d+)/buy_cards/', 'Game.views.buy_cards', name='play_cards'),
]
|
{"/Morels/Game/views.py": ["/Morels/Game/models.py"], "/Morels/Game/admin.py": ["/Morels/Game/models.py"], "/Morels/Player/views.py": ["/Morels/Player/models.py"], "/Morels/Player/admin.py": ["/Morels/Player/models.py"]}
|
20,595,492
|
Chelsea-Dover/MushroomHunters
|
refs/heads/master
|
/Morels/Player/migrations/0006_player_turns.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('Player', '0005_auto_20151110_0349'),
]
operations = [
migrations.AddField(
model_name='player',
name='turns',
field=models.IntegerField(default=20),
),
]
|
{"/Morels/Game/views.py": ["/Morels/Game/models.py"], "/Morels/Game/admin.py": ["/Morels/Game/models.py"], "/Morels/Player/views.py": ["/Morels/Player/models.py"], "/Morels/Player/admin.py": ["/Morels/Player/models.py"]}
|
20,595,493
|
Chelsea-Dover/MushroomHunters
|
refs/heads/master
|
/Morels/Game/models.py
|
from django.db import models
from Player.models import MyUser, Card, Player
from django.utils import timezone
# import datetime
# Create your models here.
class Deck(models.Model):
deckCard = models.ManyToManyField(Card, related_name='deckCards', default=None)
class Decay(models.Model):
decayDeckCard = models.ManyToManyField(Card, related_name='decayCards', default=None)
class Forest(models.Model):
forestCard = models.ManyToManyField(Card, related_name='forestCards', default=None)
class Night(models.Model):
nightDeckCard = models.ManyToManyField(Card, related_name='nightCards', default=None)
class FryingPan(models.Model):
card_id = models.ForeignKey(Card)
def __str__(self):
return self.card_id
class Game(models.Model):
winner = models.ForeignKey(MyUser, default=None, blank=True, null=True)
date = models.DateTimeField(editable=False)
player_1 = models.ForeignKey(Player, related_name='player_1')
player_2 = models.ForeignKey(Player, related_name='player_2')
current_player = models.ForeignKey(Player, related_name='current_player', default=None)
deck_id = models.ForeignKey(Deck, related_name='deck_id', default=None)
forest_id = models.ForeignKey(Forest, related_name='forest_id', default=None)
night_id = models.ForeignKey(Night, related_name='night_id', default=None)
decay_id = models.ForeignKey(Decay, related_name='decay_id', default=None)
def save(self, *args, **kwargs):
if not self.id:
self.date = timezone.now()
return super(Game, self).save(*args, **kwargs)
class PlayingCard(models.Model):
fryingPan_id = models.ForeignKey(FryingPan)
card_id = models.ForeignKey(Card)
def __str__(self):
return self.fryingPan_id
|
{"/Morels/Game/views.py": ["/Morels/Game/models.py"], "/Morels/Game/admin.py": ["/Morels/Game/models.py"], "/Morels/Player/views.py": ["/Morels/Player/models.py"], "/Morels/Player/admin.py": ["/Morels/Player/models.py"]}
|
20,595,494
|
Chelsea-Dover/MushroomHunters
|
refs/heads/master
|
/Morels/Player/admin.py
|
from django.contrib import admin
# Register your models here.
from .models import *
admin.site.register(Card)
admin.site.register(Bot)
admin.site.register(Insult)
admin.site.register(LeaderBoard)
admin.site.register(MyUser)
|
{"/Morels/Game/views.py": ["/Morels/Game/models.py"], "/Morels/Game/admin.py": ["/Morels/Game/models.py"], "/Morels/Player/views.py": ["/Morels/Player/models.py"], "/Morels/Player/admin.py": ["/Morels/Player/models.py"]}
|
20,595,495
|
Chelsea-Dover/MushroomHunters
|
refs/heads/master
|
/Morels/Game/admin.py
|
from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Game)
admin.site.register(Deck)
admin.site.register(Decay)
admin.site.register(Forest)
admin.site.register(Night)
admin.site.register(PlayingCard)
admin.site.register(FryingPan)
admin.site.register(Player)
|
{"/Morels/Game/views.py": ["/Morels/Game/models.py"], "/Morels/Game/admin.py": ["/Morels/Game/models.py"], "/Morels/Player/views.py": ["/Morels/Player/models.py"], "/Morels/Player/admin.py": ["/Morels/Player/models.py"]}
|
20,618,279
|
aabdulqader/Social-Media-Project
|
refs/heads/main
|
/users/views.py
|
from django.shortcuts import render, redirect
from django.contrib.auth.models import User
from .forms import SignUpForm
from django.contrib import messages
def SignUpView(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
messages.success(request, f'Your account has been created! You are now able to log in')
return redirect('login')
else:
form = SignUpForm()
return render (request, 'users/signup.html', {'form':form})
|
{"/users/admin.py": ["/users/models.py"], "/users/views.py": ["/users/models.py"]}
|
20,624,387
|
Carnicero90/boolpress-django-demo
|
refs/heads/master
|
/posts/views.py
|
from django.shortcuts import render
from .models import Post
def index(request):
post_list = Post.objects.order_by('-pub_date')[:5]
for post in post_list:
post.tags = post.tag_set.all()
context = {'post_list': post_list}
return render(request, 'posts/index.html', context)
|
{"/posts/views.py": ["/posts/models.py"], "/posts/admin.py": ["/posts/models.py"]}
|
20,624,388
|
Carnicero90/boolpress-django-demo
|
refs/heads/master
|
/posts/models.py
|
from django.db import models
from django.utils.text import slugify
# TODO: sposta sta merda da qui (e rifalla pure)
def generate_unique_slug(klass, field, instance=None):
"""
return unique slug if origin slug is exist.
eg: `foo-bar` => `foo-bar-1`
:param `klass` is Class model.
:param `field` is specific field for title.
:param `instance` is instance object for excluding specific object.
"""
origin_slug = slugify(field)
unique_slug = origin_slug
numb = 1
if instance is not None:
while klass.objects.filter(slug=unique_slug).exclude(id=instance.id).exists():
unique_slug = '%s-%d' % (origin_slug, numb)
numb += 1
else:
while klass.objects.filter(slug=unique_slug).exists():
unique_slug = '%s-%d' % (origin_slug, numb)
numb += 1
return unique_slug
class Category(models.Model):
name = models.CharField(max_length=30, unique=True)
slug = models.SlugField(max_length=40, unique=True, editable=False)
def __str__(self):
return self.name
class Post(models.Model):
title = models.CharField(max_length=100)
subtitle = models.CharField(max_length=300)
content = models.TextField()
author = models.CharField(max_length=50)
slug = models.SlugField(max_length=120, unique=True, editable=False)
pub_date = models.DateTimeField('date published')
image = models.CharField(max_length=500, null=True, blank=True)
category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, blank=True)
def __str__(self):
return self.title
def save(self, *args, **kwargs):
self.slug = generate_unique_slug(Post, self.title, self)
super(Post, self).save(*args, **kwargs)
class Tag(models.Model):
name = models.CharField(max_length=20)
posts = models.ManyToManyField(Post)
|
{"/posts/views.py": ["/posts/models.py"], "/posts/admin.py": ["/posts/models.py"]}
|
20,624,389
|
Carnicero90/boolpress-django-demo
|
refs/heads/master
|
/posts/admin.py
|
from django.contrib import admin
from .models import Post, Category, Tag
admin.site.register(Category)
admin.site.register(Post)
admin.site.register(Tag)
|
{"/posts/views.py": ["/posts/models.py"], "/posts/admin.py": ["/posts/models.py"]}
|
20,624,390
|
Carnicero90/boolpress-django-demo
|
refs/heads/master
|
/posts/migrations/0002_auto_20210702_2054.py
|
# Generated by Django 3.2.4 on 2021-07-02 18:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('posts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('slug', models.SlugField(editable=False, max_length=40, unique=True)),
],
),
migrations.AlterField(
model_name='post',
name='slug',
field=models.SlugField(editable=False, max_length=120, unique=True),
),
migrations.AddField(
model_name='post',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='posts.category'),
),
]
|
{"/posts/views.py": ["/posts/models.py"], "/posts/admin.py": ["/posts/models.py"]}
|
20,737,928
|
CyprienB/Home-Depot
|
refs/heads/master
|
/Procedures.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 4 14:42:10 2017
@author: Bastide
"""
def geocode(postal, *List, recursion=0):
# Returns the list[City,Postal Code,(lat,long)
from geopy.exc import GeocoderTimedOut
from geopy.geocoders import Nominatim
import uszipcode as usz
import time
# Try with uszipcode
search = usz.ZipcodeSearchEngine()
info= search.by_zipcode(str(postal))
if info["City"] is not None:
city = info["City"]
lat = info["Latitude"]
long = info["Longitude"]
return [city,
postal,
(lat,long)]
# if no result is found use geopy
else:
try:
info = Nominatim().geocode(str(postal)+", United States of America")
# Avoid time out error
except GeocoderTimedOut as e:
if recursion > 10: # max recursions
raise e
time.sleep(0.2) # wait a bit
# try again
return geocode(postal, List, recursion=recursion + 1)
else:
# If no result found use previous info
if info is None:
print("attention")
print(info)
print(List[-1])
if List is not None:
return List[-1]
else:
return [None, None, (0,0)]
else :
city = info.address
lat=info.latitude
long=info.longitude
return [city,
postal,
(lat,long)]
# Same without list for correction
def geocode2(postal, recursion=0):
# Returns the list[City,Postal Code,(lat,long)
from geopy.exc import GeocoderTimedOut
from geopy.geocoders import GoogleV3
import uszipcode as usz
import time
# Try with uszipcode
search = usz.ZipcodeSearchEngine()
info= search.by_zipcode(str(postal))
if info["City"] is not None:
city = info["City"]
lat = info["Latitude"]
long = info["Longitude"]
return [city,
postal,
(lat,long)]
# if no result is found use geopy
else:
try:
info = GoogleV3().geocode(str(postal)+", United States of America")
# Avoid time out error
except GeocoderTimedOut as e:
if recursion > 10: # max recursions
raise e
time.sleep(0.1) # wait a bit
# try again
return geocode2(postal,recursion=recursion + 1)
else:
# If no result found use previous info
if info is None:
return None
else :
city = info.address
lat=info.latitude
long=info.longitude
return [city,
postal,
(lat,long)]
# Facilitate the openpyxl formating
def cell(Sheet, rownb, columnnb):
return Sheet.cell(row=rownb, column=columnnb).value
## Return distances between two postal codes
#def get_distance(Zip1,Zip2):
# from geopy.distance import vincenty
# zip1=geocode2(Zip1)[2]
# zip2=geocode2(Zip2)[2]
# return vincenty(zip1,zip2).miles
#
# Return the number of instance in the Sheet, we can adjust the starting line
def instance(Sheet, starting_row=2, column=1):
r=0
while cell(Sheet, r+starting_row, column) is not None:
r+=1
return r
# Return a dictionnary of pricing (State : Carrier : ( Flat , Break, Extra))
def get_lm_pricing(workbook, sheet):
import openpyxl as xl
Workbook = xl.load_workbook(workbook)
Sheet = Workbook[sheet]
Pricing={}
# Get State in DIct
nb_state = instance(Sheet,starting_row=3)
for r in range(nb_state):
Pricing[cell(Sheet,3+r,2)]= {}
# Get carriers
c=0
Carriers = []
while cell(Sheet,1,4+3*c) is not None:
Carriers.append(cell(Sheet,1 ,4+3*c))
c+=1
# Create Dictionnaries
for r in range(len(Pricing)):
for c in range(len(Carriers)):
# Append only if there is pricing info
if cell(Sheet,r+3,3*c+3) is not None:
Pricing[cell(Sheet, r+3,2)][Carriers[c]]={'Flat':cell(Sheet,r+3,3+3*c),'Break':cell(Sheet,r+3,4+3*c),'Extra':cell(Sheet,r+3,5+3*c)}
return Pricing
# Return a list of all"neighboring" states to the one in the argument
def neig_states(state_code, Sheet):
a=instance(Sheet)
List=[state_code]
for r in range(a):
if cell(Sheet,r+2,1) == state_code:
List.append(cell(Sheet,r+2, 2))
if cell(Sheet, r+2, 2) == state_code and cell(Sheet,r+2,3) == "1st":
List.append(cell(Sheet, r+2, 1))
return List
# Return a list of STates defining The region, based on degree of neighboor
def get_second_neig(state_code, Sheet):
List = neig_states(state_code, Sheet)
List2 = List
for state in List:
A = neig_states(state,Sheet)
# Remove Duplicates
List2 = list(set().union(List2,A))
return List2
# Compute Distance using zip database that get updated if new zip happens
def compute_distance(Workbook, Sheet, column_origin, column_destination, column_distance):
import openpyxl as xl
from geopy.distance import vincenty
from Procedures import instance, cell, geocode2
print('Open File')
wb = xl.load_workbook(Workbook)
wsdist = wb[Sheet]
print('Open Database')
wdata = xl.load_workbook('Excel Files/Zip_latlong.xlsx')
wslatlong = wdata['Zip']
linelatlong = instance(wslatlong)
# Collect Data
Zip_lat_long = {}
for r in range(linelatlong):
Zip_lat_long[str(cell(wslatlong,r+2,1))] = (cell(wslatlong,r+2,2),cell(wslatlong,r+2,3))
print ('Collecting Data ',r*100/linelatlong,'%')
# Compute distance
linedistance = instance(wsdist)
# a serve to know if zipcode not in database appears
a = 0
for r in range(linedistance):
try :
wsdist.cell(row=r+2,column=column_distance).value = vincenty(Zip_lat_long[str(cell(wsdist,r+2,column_origin))],Zip_lat_long[str(cell(wsdist,r+2,column_destination))]).miles
except KeyError:
Zip_lat_long[str(cell(wsdist,r+2,column_origin))]= (geocode2(cell(wsdist,r+2,column_origin))[2][0],geocode2(cell(wsdist,r+2,column_origin))[2][1])
Zip_lat_long[str(cell(wsdist,r+2,column_destination))]= (geocode2(cell(wsdist,r+2,column_destination))[2][0],geocode2(cell(wsdist,r+2,column_destination))[2][1])
wsdist.cell(row=r+2,column=column_distance).value = vincenty(Zip_lat_long[str(cell(wsdist,r+2,column_origin))],Zip_lat_long[str(cell(wsdist,r+2,column_destination))]).miles
a+=1
print('Compute Distances ',(r+1)/linedistance*100, '%')
print('Saving File')
wb.save(Workbook)
# Update database
if a!=0:
print("Update Database")
ZipList = Zip_lat_long.keys()
c = 0
for r in ZipList:
wslatlong.cell(row = c+2,column = 1).value = r
wslatlong.cell(row = c+2,column = 2).value = Zip_lat_long[r][0]
wslatlong.cell(row = c+2,column = 3).value = Zip_lat_long[r][1]
c+=1
print('Updating Database ',c*100/len(ZipList),'%')
wdata.save('Excel Files/Zip_latlong.xlsx')
print('Database updated')
|
{"/3rd step Formating removing useless DA.py": ["/Procedures.py"], "/LineHaul_Dist_Calculation.py": ["/Procedures.py"], "/Test_Variability.py": ["/Procedures.py"], "/get_Lat_long.py": ["/Procedures.py"], "/Combined_All_Das_Future.py": ["/Procedures.py"], "/Combined.py": ["/Procedures.py"], "/Combined_All_Das_test.py": ["/Procedures.py"], "/Optional_get_Zip_State.py": ["/Procedures.py"], "/temp.py": ["/Procedures.py"], "/4th step second optimization with cost of opening a DA.py": ["/Procedures.py"], "/2nd_Step_Optimization_Full_USa.py": ["/Procedures.py"], "/Combined_All_Das.py": ["/Procedures.py"]}
|
20,792,884
|
esaki-ren/practice
|
refs/heads/main
|
/net.py
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class SpeachEmbedder(nn.Module):
def __init__(self, nmels, n_hidden, n_outputs, dvec_ndim, num_layer):
super(SpeachEmbedder, self).__init__()
self.lstm_stack = nn.LSTM(nmels, n_hidden, num_layers=num_layer,
batch_first=True)
for name, param in self.lstm_stack.named_parameters():
if 'bias' in name:
nn.init.constant_(param, 0.0)
elif 'weight' in name:
nn.init.xavier_normal_(param)
self.extractor = nn.Linear(n_hidden, dvec_ndim)
self.out = nn.Linear(n_hidden, n_outputs)
def forward(self, x, phase, h=None):
x, _ = self.lstm_stack(x.float())
x = x[:,x.size(1)-1] # 最後のフレームのみ使用
x = nn.relu(self.extractor(x))
if phase == "train" or phase == "validation" or phase == "test":
x = nn.relu(self.out(x.float())) # one-hot
elif phase == "extract":
x = x / torch.norm(x, dim=1).unsqueeze(1) # d-vector
return x
class MyRNN(nn.Module):
def __init__(self, n_in, n_hidden, n_out, dvec_ndim=5, num_layers=1, bidirectional=False):
super(MyRNN, self).__init__()
self.n_in = n_in
self.n_hidden = n_hidden
self.num_layers = num_layers
self.n_direction = 2 if bidirectional else 1
self.lstm = nn.LSTM(n_in, n_hidden, num_layers,
bidirectional=bidirectional, batch_first=True)
self.extractor = nn.Linear(self.n_direction*self.n_hidden, dvec_ndim)
self.linear_out = nn.Linear(dvec_ndim, n_out)
def init_hidden(self, batch_size):
h0 = torch.zeros(self.num_layers*self.n_direction, batch_size,
self.n_hidden)
c0 = torch.zeros(self.num_layers*self.n_direction, batch_size,
self.n_hidden)
return h0, c0
def pack_padded(self, x, length):
length = torch.flatten(length)
pack = nn.utils.rnn.pack_padded_sequence(x, length, batch_first=True)
norm_data = (pack.data - pack.data.mean()) / pack.data.std()
pack.data[:] = norm_data
return pack
def forward(self, input, lengths, phase):
# pack_padded
""" lengths = torch.flatten(lengths)
print(input.shape)
input = nn.utils.rnn.pack_padded_sequence(input, lengths, batch_first=True)
norm_data = (input.data - input.data.mean()) / input.data.std()
input.data[:] = norm_data """
input = self.pack_padded(input, lengths) # class内メソッドはselfをつける
#import pdb; pdb.set_trace()
#print('input:', input)
output, (h, c) = self.lstm(input)
print('h:', h)
#output, output_lengths = nn.utils.rnn.pad_packed_sequence(output, batch_first=True)
#output = output[:, output.size(1)-1]
#print(output)
h = self.extractor(h.view(-1, self.n_hidden))
if phase=="train" or phase=="validation" or phase=="test":
h = self.linear_out(h)
#print(h)
elif phase=="extract":
h = h / torch.norm(h, dim=1).unsqueeze(1)
return h
|
{"/train.py": ["/params.py"], "/make_mspec.py": ["/params.py"]}
|
20,792,885
|
esaki-ren/practice
|
refs/heads/main
|
/lightning_net.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
import pytorch_lightning as pl
from pytorch_lightning import Trainer
from pytorch_lightning.metrics.functional import accuracy
from pytorch_lightning.plugins.ddp_sequential_plugin import DDPSequentialPlugin
from pytorch_lightning.utilities import BOLTS_AVAILABLE, FAIRSCALE_PIPE_AVAILABLE
class DvectorExtractor(pl.LightningModule):
def __init__(self, n_input, n_hidden, n_output, dvec_dim, num_layer, lr, optimizer, bidirectional=False):
super().__init__()
self.n_input = n_input
self.n_hidden = n_hidden
self.n_output = n_output
self.dvec_dim = dvec_dim
self.num_layer = num_layer
self.n_direction = 2 if bidirectional else 1
self.bn = nn.BatchNorm1d(n_hidden)
self.relu = nn.ReLU()
self.lstm = nn.LSTM(n_input, n_hidden, num_layer, bidirectional=bidirectional, batch_first=True)
self.extractor = nn.Linear(self.n_direction*self.n_hidden, dvec_dim)
self.out = nn.Linear(dvec_dim, n_output)
def forward(self, sorted_x, sorted_length):
# pack_padded
sorted_length = torch.flatten(sorted_length)
pack = nn.utils.rnn.pack_padded_sequence(sorted_x, sorted_length, batch_first=True)
norm_data = (pack.data - pack.data.mean()) / pack.data.std()
pack.data[:] = norm_data
# forward
out, (h, c) = self.lstm(input)
h = self.relu(self.bn(h))
h = self.extractor(h.view(-1, self.n_hidden))
h = self.out(h)
return h
def training_step(self, batch, batch_idx):
data, label = batch
out = self.forward(data)
#loss = F.nll_loss(logits, y) The negative log likelihood loss.
criterion = nn.CrossEntropyLoss()
loss = criterion(out, label)
self.log('Training Loss', loss)
return loss
def _evaluate(self, batch, batch_idx, stage=None):
# これを使ってvalやtestを行う
data, label = batch
out = self.forward(x)
#logits = F.log_softmax(out, dim=-1)
#loss = F.nll_loss(logits, y)
criterion = nn.CrossEntropyLoss()
loss = criterion(out, label)
logits = F.log_softmax(out, dim=-1)
preds = torch.argmax(logits, dim=-1)
acc = accuracy(preds, label)
if stage:
self.log(f'{stage}_loss', loss, prog_bar=True)
self.log(f'{stage}_acc', acc, prog_bar=True)
return loss, acc
def validation_step(self, batch, batch_idx):
return self._evaluate(batch, batch_idx, 'val')[0]
def test_step(self, batch, batch_idx):
loss, acc = self._evaluate(batch, batch_idx, 'test')
self.log_dict({'test_loss': loss, 'test_acc': acc})
def configure_optimizers(self):
optimizer = torch.optim.SGD(self.parameters(), lr=self.hparams.lr, momentum=0.9, weight_decay=5e-4)
return {
'optimizer': optimizer,
'lr_scheduler': {
'scheduler': torch.optim.lr_scheduler.OneCycleLR(
optimizer,
0.1,
epochs=self.trainer.max_epochs,
steps_per_epoch=math.ceil(45000 / self.hparams.batch_size)),
'interval': 'step',
}
}
@property
def automatic_optimization(self) -> bool: # ->は返り値に期待する型
return not self._manual_optimization
def pack_padded(sorted_x, sorted_length):
# labelもsortしないといけないのでこの中でsortはやらない方が良い?
sorted_length = torch.flatten(sorted_length)
pack = nn.utils.rnn.pack_padded_sequence(sorted_x, sorted_length, batch_first=True)
norm_data = (pack.data - pack.data.mean()) / pack.data.std()
pack.data[:] = norm_data
return pack
|
{"/train.py": ["/params.py"], "/make_mspec.py": ["/params.py"]}
|
20,792,886
|
esaki-ren/practice
|
refs/heads/main
|
/train.py
|
import sys
from os import environ
from os.path import dirname, join, expanduser
from pathlib import Path
# データセット読み込みディレクトリ
if "DATASET_ROOT" in environ:
ret = Path(environ["DATASET_ROOT"])
else:
ret = Path("~", "dataset")
ret = ret.expanduser()
ret.mkdir(exist_ok=True, parents=True)
DATA_ROOT = join(ret, "CommonVoice", "cv-corpus-6.1-2020-12-11", "ja")
#print("DATA_ROOT:", DATA_ROOT)
# データ保存用ディレクトリ
if "SAVE_ROOT" in environ:
ret = Path(environ["SAVE_ROOT"])
else:
ret = Path("./results")
ret = ret.expanduser()
SAVE_ROOT = join(ret, "CommonVoice", "cv-corpus-6.1-2020-12-11", "ja")
#print("SAVE_ROOT", SAVE_ROOT)
import random
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
from torch.utils.data import DataLoader, Dataset, random_split
import torchaudio
from torchvision import transforms
torchaudio.set_audio_backend('sox_io') # sox_io: Linux, Mac
class SpeechDataset(Dataset):
fs = 16000
def __init__(self, data_dir, train=True, transform=None, split_rate=0.8):
tsv = join(data_dir, "validated.tsv")
# データセットの一意性確認と正解ラベルの列挙
import pandas as pd
df = pd.read_table(tsv)
assert not df.path.duplicated().any()
# duplicated():重複した行を抽出 numpy.any():どれかがTrueだったらTrue
# df.path.duplicated().any():df内のpathに重複があったらTrue
# このpathはtsv内の要素?を指定している
self.classes = df.client_id.drop_duplicates().tolist()
# drop_duplicated():重複した行を削除
# client_idの重複を削除
self.n_classes = len(self.classes)
# データセットの準備
self.transform = transform
data_dirs = tsv.split('/')
# '/'で区切ったものをリストに
dataset = torchaudio.datasets.COMMONVOICE('/'.join(data_dirs[:-1]),
tsv=data_dirs[-1])
# 引数のurlとversionは非推奨になった
# データセットの分割
n_train = int(len(dataset) * split_rate)
n_val = len(dataset) - n_train
torch.manual_seed(torch.initial_seed()) # シードの固定
train_dataset, val_dataset = random_split(dataset, [n_train, n_val])
self.dataset = train_dataset if train else val_dataset
def __len__(self):
return len(self.dataset)
def __getitem__(self, idx):
x, fs, dictionary = self.dataset[idx]
# datasetはtensor(波形, fs, tsvの要素の辞書)
if fs != self.fs:
x = torchaudio.transforms.Resample(fs)(x)
# リサンプル
# MFCC等は外部でtransformとして記述
# ただし、推論と合わせるためにMFCCは先に済ましておく?
x = torchaudio.transforms.MFCC(log_mels=True)(x)
# .main()を呼び出すことに注意
if self.transform:
x = self.transform(x)
# 特徴量:音声テンソル、正解ラベル:話者IDのインデックス
return x, self.classes.index(dictionary['client_id'])
train_dataset = SpeechDataset(DATA_ROOT, train=True)
val_test_dataset = SpeechDataset(DATA_ROOT, train=False)
# train_dataset[0][0].shape (データ数, MFCC次元, 時間サンプル)
#print(train_dataset[0][0].dim())
# 前処理の定義
Squeeze2dTo1d = lambda x: torch.squeeze(x, -3)
# lambda 引数:返り値
# squeeze 次元削除(ここではデータ数の部分が削除され、2次元の画像になっている)
"""
音声の前処理
1. frame_period=25, hop_length=12.5で40次のMFCCに変換
2. バッチ学習では時間長を合わせる必要がある→長さを10秒(12.5ms*800)に揃える
(ほどんどの音声は10秒未満であるから対応可能、足りない場合は音声を繰り返してパディング)
3. ランダムな時間位置からランダムに2~4秒切り出す(切り出し時間位置を変えることでデータ拡張)
4. 拡大縮小して3秒に揃える(音の高低を変えずに時間軸方向を拡大縮小し、ピッチ変更によるデータの拡張)
"""
class CircularPad1dCrop:
# 最後の1次元を指定サイズにCrop
# 長さが足りない時はCircularPad(同じものを繰り返すパディング?)
# 音声データの時間方向の長さを揃える
def __init__(self, size):
self.size = size
def __call__(self, x):
#print(self.size, x.size()[-1])
n_repeat = self.size // x.size()[-1] + 1
repeat_sizes = ((1, ) * (x.dim() - 1) + (n_repeat,))
# 二次元形状でリピート回数を設定
out = x.repeat(*repeat_sizes).clone()
return out.narrow(-1, 0, self.size)
# [tensor].narrow(dim, index, size)
# テンソル内部のある次元のある部分を切り取って返す
# dim:切り取る次元(行列なら、行方向で切るか(1)、列方向で切るか(2))
# index:切り取る部分の始点
# size:切り取るサイズ
# 800(10秒)、320(4秒)、240(3秒)
train_transform = transforms.Compose([CircularPad1dCrop(800), transforms.RandomCrop((40, random.randint(160, 320))),
transforms.Resize((40, 240)), Squeeze2dTo1d])
test_transform = transforms.Compose([CircularPad1dCrop(240), Squeeze2dTo1d])
# 学習・テストデータの準備
batch_size = 32
if train_dataset:
train_dataset.transform = train_transform # transformをセット
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
else:
n_epochs = 0 # 学習データがない時は回さない
if val_test_dataset:
val_test_dataset.transform = test_transform # transformsをセット
val_test_dataloader = DataLoader(val_test_dataset, batch_size=batch_size, shuffle=True)
# 学習モデル(1dCNN)
class SpeakerNet(nn.Module):
def __init__(self, n_classes):
super().__init__()
self.conv = nn.Sequential(
nn.BatchNorm1d(40),
nn.Conv1d(40, 128, kernel_size=5, padding=2),
nn.BatchNorm1d(128),
nn.ReLU(inplace=True),
nn.MaxPool1d(kernel_size=2),
nn.Conv1d(128, 128, kernel_size=3, padding=1),
nn.BatchNorm1d(128),
nn.ReLU(inplace=True),
nn.MaxPool1d(kernel_size=2),
nn.Conv1d(128, 128, kernel_size=3, padding=1),
nn.BatchNorm1d(128),
nn.ReLU(inplace=True),
nn.MaxPool1d(kernel_size=2),
nn.Conv1d(128, 64, kernel_size=3, padding=1),
nn.BatchNorm1d(64),
nn.ReLU(inplace=True),
nn.Dropout(),
)
self.fc = nn.Sequential(
nn.Linear(30*64, 1024),
nn.BatchNorm1d(1024),
nn.ReLU(inplace=True),
nn.Dropout(),
nn.Linear(1024, n_classes),
)
def forward(self, x):
x = self.conv(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
"""
前処理、学習、検証、推論
train_dataset:学習用データセット
val_test_dataset:検証/テスト用データセット
n_classes:分類クラス数
n_epochs:学習エポック数
load_pretrained_state:学習済みモデルを使う場合の.pthファイルパス
test_last_hidden_layer:テストデータの推論結果に最終隠れ層を使う
show_progress:エポックの学習状況をprintする
show_chart:結果をグラフ表示する
save_state:test_acc > 0.9の時のtest_loss最小更新時のstateを保存
(load_pretrained_stateで使う)
"""
# モデルの準備
n_classes = None
if not n_classes:
assert train_dataset, 'train_dataset or n_classes must be a valid'
n_classes = train_dataset.n_classes
model = SpeakerNet(n_classes)
load_pretrained_state = None
if load_pretrained_state:
model.load_state_dict(torch.load(load_pretrained_state))
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters())
# 学習
n_epochs = 50
show_progress = True
save_state = False
show_chart = False
test_last_hidden_layer=False
losses = []
accs = []
val_losses = []
val_accs = []
for epoch in range(n_epochs):
# 学習ループ
running_loss = 0.0
running_acc = 0.0
for x_train, y_train in train_dataloader:
optimizer.zero_grad()
#print(x_train.size())
y_pred = model(x_train)
loss = criterion(y_pred, y_train)
loss.backward()
running_loss += loss.item()
pred = torch.argmax(y_pred, dim=1)
#print(pred.eq(y_train))
running_acc += torch.mean(pred.eq(y_train).float())
optimizer.step()
running_loss /= len(train_dataloader)
running_acc /= len(train_dataloader)
losses.append(running_loss)
accs.append(running_acc)
# 検証ループ
val_running_loss = 0.0
val_running_acc = 0.0
for val_test in val_test_dataloader:
if not(type(val_test) is list and len(val_test) == 2):
break
x_val, y_val = val_test
y_pred = model(x_val)
val_loss = criterion(y_pred, y_val)
val_running_loss += val_loss.item()
pred = torch.argmax(y_pred, dim=1)
val_running_acc += torch.mean(pred.eq(y_val).float())
optimizer.step()
val_running_loss /= len(val_test_dataloader)
val_running_acc /= len(val_test_dataloader)
can_save = (val_running_acc > 0.9 and
val_running_loss < min(val_losses))
val_losses.append(val_running_loss)
val_accs.append(val_running_acc)
if show_progress:
print(f'epoch:{epoch}, loss:{running_loss:.3f},'
f'acc:{running_acc:.3f}, val_loss:{val_running_loss:.3f},'
f'val_acc:{val_running_acc:.3f}, can_save:{can_save}')
# セーブ
if save_state and can_save:
torch.save(model.state_dict(), f'model/0001-epoch{epoch:02}.pth')
# f'':format、{引数:02}だと前に引数に0を2つつけたものが入る
# グラフ
if n_epochs > 0 and show_chart:
fig, ax = plt.subplots(2)
# plt.subplots:figure()+add.subplotのようなもの
ax[0].plot(losses, label='train loss')
ax[0].plot(val_losses, label='val loss')
ax[0].legend() # 凡例
ax[1].plot(losses, label='train loss')
ax[1].plot(val_losses, label='val loss')
ax[1].legend()
plt.show()
# 推論
if not val_test_dataset:
break
if test_last_hidden_layer:
model.fc = model.fc[:-1]
y_preds = torch.Tensor()
for val_test in val_test_dataloader:
x_test = val_test[0] if type(val_test) is list else val_test
y_pred = model.eval()(x_test)
if not test_last_hidden_layer:
y_pred = torch.argmax(y_pred, dim=1)
y_preds = torch.cat([y_preds, y_pred])
y_preds.detach()
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
from torch.utils.data import TensorDataset, DataLoader, random_split
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
import numpy as np
from glob import glob
from pathlib import Path
import os
import librosa
from librosa import load
from librosa.feature import melspectrogram
from scipy import signal
from scipy.io import loadmat, savemat, wavfile
from natsort import natsorted
import matplotlib.pyplot as plt
import time
from params import *
import pytorch_lightning as pl
from pytorch_lightning import Trainer
""" from net import SpeachEmbedder, MyRNN
#from net_lightning import SpeachEmbedder
from make_mspec import LoadMspec
from dataset import MyDataset, MyDataset2
np.set_printoptions(threshold=10)
# あとで調べる
def make_stft_args(frame_period=5, fs=16000, nperseg=None, window='hann', **kwargs):
nshift = fs * frame_period // 1000
if nperseg is None:
nperseg = nshift * 4
noverlap = nperseg - nshift
dct = dict(window=window, nperseg=nperseg, noverlap=noverlap)
dct["fs"] = fs
return dct
"""
"""
やってること
'./rensyu/data/jvs_ver1/'の下にある.DS_Store以外のフォルダ名をリスト化
後ろに'/parallel100/wav24kHz16bit'をくっつけてwavの読み込み
speaker_idxは話者ラベルに使用
Tensorのlistのmspecの長さを揃える
"""
"""
parser = get_params()
args = parser.parse_args()
path = './rensyu/data/jvs_ver1/'
train_dir = '/parallel100/wav24kHz16bit'
save_dir = '/parallel100/mspec_train'
dir_speaker = [filename for filename in natsorted(os.listdir(path)) if not filename.startswith('.')]
speaker_classes = len(dir_speaker)
mspecs, mspecs_len, speaker_label = LoadMspec(path, dir_speaker, save_dir)
#mean = torch.mean(torch.stack(mspecs))
#import pdb; pdb.set_trace()
num_layers = 1
n_hidden = 80
batch_size = 10
n_epoch = 3
lr = 0.001
weight_decay = 1e-6
model_save_path = './rensyu/tutorial/model_param.pt'
use_cuda = torch.cuda.is_available()
def train(model, optimizer, datas, labels, lengths, save_dir):
if use_cuda:
model = model.cuda()
datas = nn.utils.rnn.pad_sequence(datas, batch_first=True)
labels = torch.LongTensor(labels).flatten()
dataset = MyDataset2(datas, labels, lengths)
train_rate = 0.8
train_size = int(len(dataset) * train_rate)
val_size = len(dataset) - train_size
train_dataset, val_dataset = random_split(dataset, [train_size, val_size])
#print(train_dataset[0])
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
val_loader = DataLoader(val_dataset, batch_size=batch_size, shuffle=False)
dataset_loaders = {"train": train_loader, "validation": val_loader}
# training loop
criterion = nn.CrossEntropyLoss()
loss_history = {"train": [], "validation": []}
print("Start Training...")
start_time = time.time()
for epoch in range(1, n_epoch+1):
for phase in ["train", "validation"]:
if phase == "train":
model.train()
elif phase == "validation":
model.eval()
running_loss = 0
print('phase:', phase)
for data, label, lengths in dataset_loaders[phase]:
episode_start_time = time.time()
# ここまでは合ってる
# sorted
#sorted_length, sorted_idx = torch.sort(lengths.view(-1), dim=0, descending=True)
sorted_length, sorted_idx = lengths.sort(0,descending=True)
#sorted_length = sorted_length.long().numpy()
data = torch.squeeze(torch.stack(data, 1), 1) # dataloaderでlistにされてるからここでstack
# stackだと重ねた次元は減らないからsqueeze
sorted_label = label[sorted_idx]
sorted_data = data[sorted_idx]
sorted_data = torch.squeeze(sorted_data, 1)
#import pdb; pdb.set_trace()
#h, c = model.init_hidden(len(sorted_length))
if use_cuda:
sorted_data, sorted_label = sorted_data.cuda(), sorted_label.cuda()
#h, c = h.cuda(), c.cuda()
optimizer.zero_grad()
output = model(sorted_data, sorted_length, phase)
loss = criterion(output, label)
if phase == "train":
print('---backward---')
loss.backward()
print('---optim---')
optimizer.step()
running_loss += loss.data.item()
print('Episode time: %1.3f Episode Loss: %1.3f' %(time.time() - episode_start_time, loss.item()))
loss_history[phase].append(running_loss / (len(dataset_loaders[phase])))
print('loss_history', loss_history[phase])
fig = plt.figure()
plt.plot(loss_history[phase])
plt.title('train_loss_history(phase:{0}, epoch:{1}'.format(phase,epoch))
plt.ylabel("loss")
plt.grid()
fig.savefig('./rensyu/tutorial/train_loss_history({0}).png'.format(phase))
torch.save(model.state_dict(), save_dir)
return loss_history
model = MyRNN(n_in=80, n_hidden=n_hidden, n_out=speaker_classes)
print(model)
optimizer = torch.optim.Adam(model.parameters(), lr=lr)
if os.path.isfile(model_save_path):
model.load_state_dict(torch.load(model_save_path))
loss_history = train(model=model, optimizer=optimizer, datas=mspecs,
labels=speaker_label, lengths=mspecs_len, save_dir=model_save_path)
torch.save(model.state_dict(), model_save_path)
"""
""" lstm = nn.RNN(80, 5, batch_first=True)
packed_out, ht = lstm(mspecs_packed)
out, _ = pad_packed_sequence(packed_out)
print('output:',out)
print ('only last:',ht[-1])
"""
""" model = SpeachEmbedder(80, 16, speaker_classes, 2)
trainer = pl.Trainer(max_epochs=3)
trainer.fit(model, loader_train)
model_save_path = './rensyu/tutrial/model_param.pt'
torch.save(model.state_dict(), model_save_path)
"""
""" モデルのロード
model = SpeachEmbedder(80, 32, speaker_classes, 2)
model.load_state_dict(torch.load(model_save_path)) """
""" mspecs = nn.utils.rnn.pad_sequence(mspecs, batch_first=True)
mspecs (音声の数, 時間サンプル, mspecの値)
speaker_label = np.array(speaker_label, dtype='int')
speaker_label = speaker_label.reshape(-1,1)
"""
""" model = SpeachEmbedder(80, 16, speaker_classes, 5)
print(model)
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
print('Train Start')
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
for epoch in range(1, epochs+1):
train(loader_train, model, optimizer, criterion, device, epochs, epoch)
"""
|
{"/train.py": ["/params.py"], "/make_mspec.py": ["/params.py"]}
|
20,792,887
|
esaki-ren/practice
|
refs/heads/main
|
/params.py
|
import argparse
def get_params():
parser = argparse.ArgumentParser()
# stft_params
stft_args = parser.add_argument_group('STFT parameters')
stft_args.add_argument('-frame_period', default=5, type=int,
help='the length of stft frame')
stft_args.add_argument('-fs', default=16000, type=int,
help='sampling frequency')
stft_args.add_argument('-nperseg', default=None, type=int,
help='nperseg')
stft_args.add_argument('-window', default='hann',
help='window function.')
# training_params
training_args = parser.add_argument_group('training parameters')
training_args.add_argument('-bn', '--batch_size', default=32, type=int,
help='training batch size')
training_args.add_argument('-epoch', default=5, type=int,
help='total epoch')
training_args.add_argument('-lr', default=1e-3,
help='initial learning rate')
training_args.add_argument('-wright_decay', default=1e-6,
help='weight decay')
# model_params
model_args = parser.add_argument_group('model parameters')
model_args.add_argument('-in_dim', default=80, type=int,
help='input dimension')
model_args.add_argument('-h_dim1', default=80, type=int,
help='hidden layer1 hidden dimension')
return parser
|
{"/train.py": ["/params.py"], "/make_mspec.py": ["/params.py"]}
|
20,792,888
|
esaki-ren/practice
|
refs/heads/main
|
/dataset.py
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
from torch.utils.data import TensorDataset, DataLoader
class MyDataset(torch.utils.data.Dataset):
def __init__(self, data, label, lengths, transform=None):
self.transform = transform
self.data = data
self.data_num = len(data)
self.label = label
self.length = lengths
def __len__(self):
return self.data_num
def __getitem__(self, idx):
if self.transform:
out_data = self.transform(self.data)[0][idx]
#out_label = torch.tensor(self.label[idx])
out_label = self.label[idx]
else:
out_data = self.data[idx]
#out_label = torch.tensor(self.label[idx])
out_label = self.label[idx]
length = torch.LongTensor(self.length[idx])
return out_data, out_label, length
class MyDataset2(torch.utils.data.Dataset):
def __init__(self, datas, labels, lengths):
self.datas = datas
self.labels = labels
if isinstance(lengths, list):
lengths = np.array(lengths)[:, None]
elif isinstance(lengths, np.ndarray):
lengths = lengths[:, None]
self.lengths = lengths
def __getitem__(self, idx):
data= self.datas[idx],
label = self.labels[idx]
length = torch.LongTensor(self.lengths[idx])
return data, label, length
def __len__(self):
return len(self.datas)
|
{"/train.py": ["/params.py"], "/make_mspec.py": ["/params.py"]}
|
20,792,889
|
esaki-ren/practice
|
refs/heads/main
|
/make_mspec.py
|
import numpy as np
from glob import glob
import os
from pathlib import Path
import librosa
from librosa import load
from librosa.feature import melspectrogram
from scipy import signal
from scipy.io import loadmat, savemat, wavfile
from natsort import natsorted # 数字順にsort
import torch
import torch.nn as nn
from params import *
parser = get_params()
args = parser.parse_args()
# あとで調べる
def make_stft_args(frame_period=args.frame_period, fs=args.fs, nperseg=args.nperseg,
window=args.window, **kwargs):
nshift = fs * frame_period // 1000
if nperseg is None:
nperseg = nshift * 4
noverlap = nperseg - nshift
dct = dict(window=window, nperseg=nperseg, noverlap=noverlap)
dct["fs"] = fs
return dct
def WavLoad(wav_path, fs=16000, fmin=0, fmax=7600, nmels=80, **kwargs):
"""
入力pathからwavを読み込んでメルスペクトログラムを抽出し、正規化
メルスペクトログラムは(時間, mel)
overlapやhop_lengthなどの細かい設定が未実装
"""
wav, fs = load(wav_path, sr=fs)
wav = wav/np.max(np.abs(wav))
wav *= 0.99
_, t, Zxx = signal.stft(wav, **stft_args)
pspec = np.abs(Zxx)
mspec = melspectrogram(sr=fs, S=pspec, n_mels=nmels, fmin=fmin, fmax=fmax)
return wav, fs, pspec.T, mspec.T
# 転置させるとpspec, mspecは(時間, spec)
def LoadDataName(data_path):
""" ディレクトリ内のファイルの名称のリストを作成 """
dataset_root = Path(data_path).expanduser()
dirs = []
for d in sorted(dataset_root.glob('*')):
dirs.append(str(d))
return list(dirs)
def MakeMspec(path, dir_speaker, train_dir, save_dir):
""" ディレクトリ内のwavからmel spectrogram等を作成
mel spectrogramをnpy形式で保存"""
wavs = []
pspecs = []
mspecs = []
mspec_len = []
speaker_label = np.empty(1)
speaker_idx = 0
for speaker in dir_speaker:
load_dirs = LoadDataName(path + speaker + train_dir)
save_dirs = path + speaker + save_dir
print(path + speaker + train_dir)
if os.path.exists(save_dirs):
continue
else:
for idx, l_d in enumerate(load_dirs):
wav, fs, pspec, mspec = WavLoad(l_d)
wavs.append(wav)
pspecs.append(pspec)
mspecs.append(mspec)
mspec_len.append(len(mspec))
speaker_label = np.append(speaker_label, speaker_idx)
os.makedirs(save_dirs, exist_ok=True)
np.save(save_dirs+'/'+str(idx)+'mspec', mspec)
speaker_idx += 1
# return wavs, pspecs, mspecs, mspec_len, speaker_label そもそもnpyを呼び出す時にlabel等も作るから必要ない?
def LoadMspec(path, dir_speaker, save_dir):
""" MakeMspecで保存されたmel spectrogramを読み込む"""
mspecs = []
mspec_len = []
speaker_label = []
speaker_idx = 0
for idx, speaker in enumerate(dir_speaker):
#target_vector = np.zeros(len(dir_speaker))
#target_vector[idx] = 1
load_dirs = natsorted(LoadDataName(path + speaker + save_dir))
print(path + speaker + save_dir)
for l_d in load_dirs:
mspec = np.load(l_d, allow_pickle=True)
#mspec = mspec.flatten() pack_paddedに平坦化は必要なかった
mspecs.append(torch.from_numpy(mspec))
mspec_len.append(mspec.shape[0])
speaker_label = np.append(speaker_label, speaker_idx)
#speaker_label.append(target_vector)
#import pdb; pdb.set_trace()
speaker_idx += 1
return mspecs, mspec_len, speaker_label
stft_args = make_stft_args()
path = './rensyu/data/jvs_ver1/'
train_dir = '/parallel100/wav24kHz16bit'
save_dir = '/parallel100/mspec_train'
dir_speaker = [filename for filename in natsorted(os.listdir(path)) if not filename.startswith('.')]
speaker_classes = len(dir_speaker)
MakeMspec(path, dir_speaker, train_dir, save_dir)
# mspecs, mspecs_len, speaker_label = LoadMspec(path, dir_speaker, save_dir)
"""
その他に仕様変更したいこと
できれば辞書形式?で話者ごとに格納したい """
|
{"/train.py": ["/params.py"], "/make_mspec.py": ["/params.py"]}
|
20,814,065
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/maya_utils/attrUtils.py
|
import numpy as np
import math
from maya import cmds
def getAnimationValues(frame_num):
animations = [("ry", np.linspace(0, math.radians(120), frame_num)),
("ry", np.linspace(0, math.radians(-120), frame_num)),
("rz", np.linspace(0, math.radians(120), frame_num)),
("rz", np.linspace(0, math.radians(-120), frame_num)),
("rx", np.linspace(0, math.radians(180), frame_num)),
("rx", np.linspace(0, math.radians(-180), frame_num))]
return animations
def getConnectionPairs(node, source=True, destination=True):
connectionPairs = list()
if source:
conns = cmds.listConnections(node, plugs=True, connections=True, destination=False)
if conns:
connectionPairs.extend(zip(conns[1::2], conns[::2]))
if destination:
conns = cmds.listConnections(node, plugs=True, connections=True, source=False)
if conns:
connectionPairs.extend(zip(conns[::2], conns[1::2]))
return connectionPairs
class DisconnectCtx():
def __init__(self, nodes, source=True, destination=True):
self.connectionPairs = list()
for node in nodes:
self.connectionPairs.extend(getConnectionPairs(node, source, destination))
def __enter__(self):
for srcAttr, destAttr in self.connectionPairs:
cmds.disconnectAttr(srcAttr, destAttr)
def __exit__(self, exc_type, exc_value, exc_traceback):
for srcAttr, destAttr in self.connectionPairs:
cmds.connectAttr(srcAttr, destAttr)
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,066
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/enviroment/constants.py
|
ACTIONS_MULTIPLIERS = [("ty", .2), ("tz", .5)]
ACTIONS_PENALTY = {"ty": 1, "tz": 0}
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,067
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/rl_node/rlvp.py
|
import sys
from rl_vp.enviroment import observation, constants
from tensorflow.keras.models import load_model
from maya.api import OpenMaya as om
import os
def maya_useNewAPI():
"""
The presence of this function tells Maya that the plugin produces, and
expects to be passed, objects created using the Maya Python API 2.0.
"""
pass
nodeName = 'rlvp'
nodeID = om.MTypeId(0x60011)
class RLVPNode(om.MPxNode):
'''A template Maya Python DG Node.'''
def __init__(self):
om.MPxNode.__init__(self)
self.filePath = ''
self. model = None
def getOrLoad(self, filePath):
'''A interface for loading and caching Keras models'''
if filePath == self.filePath:
return False
if not os.path.exists(filePath):
return f"Unable to find file {filePath}"
self.filePath = filePath
try:
self.model = load_model(filePath)
except Exception as e:
return str(e)
return False
def compute(self, plug, datablock):
if plug not in RLVPNode.outputs:
return None
filePath_h = datablock.inputValue(RLVPNode.filePath)
error = self.getOrLoad(filePath_h.asString())
if error:
om.MGlobal.displayError(error)
return None
if self.model is None:
return None
start_mtx = datablock.inputValue(RLVPNode.start).asMatrix()
mid_mtx = datablock.inputValue(RLVPNode.mid).asMatrix()
end_mtx = datablock.inputValue(RLVPNode.end).asMatrix()
parent_mtx = datablock.inputValue(RLVPNode.parent_matrix).asMatrix()
initialized = datablock.inputValue(RLVPNode.initialized).asShort()
rest_vect = None
if not initialized:
rest_vect = observation.getRestVector([start_mtx, mid_mtx, end_mtx], parent_mtx)
rest_h = datablock.outputValue(RLVPNode.rest_vector)
rest_h.set3Double(rest_vect.x, rest_vect.y, rest_vect.z)
init_h = datablock.outputValue(RLVPNode.initialized)
init_h.setShort(1)
else:
rest_vect = datablock.inputValue(RLVPNode.rest_vector).asVector()
print(rest_vect)
obs = observation.getObservation([start_mtx, mid_mtx, end_mtx], parent_mtx, rest_vect)
action = self.model.predict(obs.reshape(1, obs.size))
for act, attr, mult in zip(action[0], RLVPNode.outputs, RLVPNode.outputs_mult):
attr_h = datablock.outputValue(attr)
print(float(act*mult*rest_vect.length()))
attr_h.setDouble(float(act*mult*rest_vect.length()))
datablock.setClean(attr)
# The plug was successfully computed
return self
def creator():
return RLVPNode()
def initialize():
# (1) Setup attributes
nAttr = om.MFnNumericAttribute()
tAttr = om.MFnTypedAttribute()
mAttr = om.MFnMatrixAttribute()
kDouble = om.MFnNumericData.kDouble # Maya's float type
kString = om.MFnData.kString
kBool = om.MFnNumericData.kBoolean
RLVPNode.filePath = tAttr.create('filePath', 'fp', kString)
tAttr.usedAsFilename = True
RLVPNode.start = mAttr.create( "start", "start")
mAttr.hidden = False
mAttr.keyable = False
RLVPNode.mid = mAttr.create( "mid", "mid")
mAttr.hidden = False
mAttr.keyable = False
RLVPNode.end = mAttr.create( "end", "end")
mAttr.hidden = False
mAttr.keyable = False
RLVPNode.parent_matrix = mAttr.create( "parentMatrix", "parentMatrix")
mAttr.hidden = False
mAttr.keyable = False
restX = nAttr.create( "restX", "restX", kDouble )
restY = nAttr.create( "restY", "restY", kDouble )
restZ = nAttr.create( "restZ", "restZ", kDouble )
RLVPNode.rest_vector = nAttr.create( "restVector", "restVector", restX, restY, restZ )
nAttr.hidden = False
nAttr.keyable = False
RLVPNode.initialized = nAttr.create("initialized", "initialized", kBool, 0)
tAttr.hidden = False
tAttr.keyable = False
RLVPNode.outputs = list()
RLVPNode.outputs_mult = list()
for attr, mult in constants.ACTIONS_MULTIPLIERS:
# (2) Setup the output attributes
RLVPNode.outputs.append(nAttr.create(attr, attr, kDouble))
RLVPNode.outputs_mult.append(mult)
nAttr.writable = False
nAttr.storable = False
nAttr.readable = True
# (3) Add the attributes to the node
RLVPNode.addAttribute(RLVPNode.filePath)
RLVPNode.addAttribute(RLVPNode.start)
RLVPNode.addAttribute(RLVPNode.mid)
RLVPNode.addAttribute(RLVPNode.end)
RLVPNode.addAttribute(RLVPNode.parent_matrix)
RLVPNode.addAttribute(RLVPNode.rest_vector)
RLVPNode.addAttribute(RLVPNode.initialized)
for attr in RLVPNode.outputs:
RLVPNode.addAttribute(attr)
RLVPNode.attributeAffects(RLVPNode.start, attr)
RLVPNode.attributeAffects(RLVPNode.mid, attr)
RLVPNode.attributeAffects(RLVPNode.end, attr)
RLVPNode.attributeAffects(RLVPNode.filePath, attr)
def initializePlugin(obj):
plugin = om.MFnPlugin(obj, "Autodesk", "1.0", "Any")
try:
plugin.registerNode(nodeName, nodeID, creator, initialize)
except Exception:
sys.stderr.write("Failed to register node\n")
raise
def uninitializePlugin(obj):
plugin = om.MFnPlugin(obj)
try:
plugin.deregisterNode(nodeID)
except Exception:
sys.stderr.write("Failed to deregister node\n")
pass
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,068
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/math_utils/sampling.py
|
import numpy as np
import math
import random
import itertools
# https://www.cs.ubc.ca/~rbridson/docs/bridson-siggraph07-poissondisk.pdf
def getDistanceSquare(a, b):
return np.sum(abs(a - b)**2)
def isValid(candidate, radius, minBBox, maxBBox, grid, samples):
# check if the candidate is outside of border
if (candidate < minBBox).any() or (candidate > maxBBox).any():
return False
cellSize = radius/math.sqrt(2)
cell = ((candidate-minBBox)/cellSize).astype(int)
minCell = cell - 2
maxCell = cell + 2
searchEnd = np.array(grid.shape)
searchStart = np.zeros_like(searchEnd)
minIndices = np.where(minCell > 0)
searchStart[minIndices] = minCell[minIndices]
maxIndices = np.where(np.less_equal(maxCell, searchEnd))
searchEnd[maxIndices] = maxCell[maxIndices]
indices = [range(a, b) for a, b in zip(searchStart, searchEnd)]
for index in itertools.product(*indices):
pointIndex = grid[index]-1
if pointIndex != -1:
distanceSquare = getDistanceSquare(samples[int(pointIndex)], candidate)
if distanceSquare < radius**2:
return False
return True
def poissionDiscSampling(radius=30, bundingBox=((0, 0, 0), (1000, 1000, 1000)), sampleRetrial=1, maxTrials=100000):
samples = []
activeList = []
cellSize = radius/math.sqrt(2)
bboxMin = np.array(bundingBox[0])
bboxMax = np.array(bundingBox[1])
sampleRegionSize = bboxMax-bboxMin
grid = np.zeros(np.ceil(sampleRegionSize/cellSize).astype(int))
trial = 0
center = (sampleRegionSize*.5)+bboxMin
activeList = [center]
while len(activeList) > 0:
activeIndex = random.randrange(len(activeList))
spawnCentre = activeList[activeIndex]
candidateAccepted = False
for _ in range(sampleRetrial):
angle = random.random() * math.pi * 2
direction = np.array([math.sin(angle), math.cos(angle), math.tan(angle)])
candidate = spawnCentre + direction * random.uniform(radius, 2*radius)
trial += 1
if isValid(candidate, radius, bboxMin, bboxMax, grid, samples):
samples.append(candidate)
coord = (candidate-bboxMin)/cellSize
grid[int(coord[0])][int(coord[1])][int(coord[2])] = len(samples)
candidateAccepted = True
activeList.append(candidate)
break
if candidateAccepted is False:
activeList.pop(activeIndex)
if trial > maxTrials:
print("Limit trials reached!!")
break
return samples
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,069
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/enviroment/rewards.py
|
import numpy as np
from maya import cmds
from maya.api import OpenMaya as om
from rl_vp.math_utils import vector_math as vm
def getTriangleBindData(joints, triangles, positions):
joints_pos = [cmds.xform(a, q=1, ws=1, t=1) for a in joints]
vertices = set(triangles.flatten())
vertex_data = np.zeros((max(vertices)+1, 2), np.int16)
for vertex in vertices:
closest_point, closest_seg = vm.getCloserSegment(positions[vertex][:3], joints_pos)
vertex_data[vertex] = closest_seg
triangle_bind = dict()
for i, tri in enumerate(triangles):
start_seg_idnx = np.bincount(vertex_data[tri][:, 0]).argmax()
end_seg_idnx = np.bincount(vertex_data[tri][:, 1]).argmax()
tri_segment = [joints[start_seg_idnx], joints[end_seg_idnx]]
triangle_bind[i] = {"vertices": list(tri), "segment": tri_segment}
return triangle_bind
def getTrianglesVolume(positions, triangle_bind):
np_positions = np.array(positions)[:, :3]
result = dict()
for i, data in triangle_bind.items():
start = cmds.xform(data.get("segment")[0], q=1, ws=1, t=1)
end = cmds.xform(data.get("segment")[1], q=1, ws=1, t=1)
vol = vm.getTriangleVolume(start, end, np_positions[data.get("vertices")])
result[i] = vol
return result
def getAgentSegmentSide(agent_pos, drivers_pos):
ba = drivers_pos[1]-drivers_pos[0]
ca = drivers_pos[2]-drivers_pos[0]
# get middle plane vector, project segment into total limb extension
# and move that pojection into the correct position
m = drivers_pos[0]+(ca.normal()*((ba * ca) / ca.length()))
# check if the limb is straight, the prev projection is lenght 0
if m.length() == 0:
return .1
# get the prjection betwen the midle plane and the agent position
pb = agent_pos-drivers_pos[1]
proj = (m.normal()*((pb * m) / m.length()))
# check if the projection is on the same direction than the closer segment
direction = ((proj-agent_pos).normal()*ba.normal())
return direction
def getAgentCollisionValue(agent_pos, drivers_pos):
np_drivers_pos = np.array(drivers_pos)[:, :3]
np_agent_pos = om.MPoint(agent_pos)
ba = np_drivers_pos[1] - np_drivers_pos[0]
ca = np_drivers_pos[2] - np_drivers_pos[0]
# get middle plane vector, project segment into total limb extension
# and move that pojection into the correct position
middle = np_drivers_pos[0] + vm.projectVector(ca, ba)
if vm.magnitude(middle) == 0: # drivers straight
distToStart = vm.magnitude(np_drivers_pos[0] - list(np_agent_pos)[:3])
distToEnd = vm.magnitude(np_drivers_pos[-1] - list(np_agent_pos)[:3])
return distToEnd-distToStart
# matrix that define the intersection plane
x_axis = vm.normalize(np_drivers_pos[1]-middle)
y_axis = vm.normalize(np_drivers_pos[-1]-middle)
z_axis = np.cross(x_axis, y_axis)
y_axis = np.cross(z_axis, x_axis)
matrx = list()
for ax in [x_axis, y_axis, z_axis]:
matrx.extend(ax)
matrx.append(0)
matrx.extend(middle)
matrx.append(1)
# distance from the agent to the plane
mmtx = om.MMatrix(matrx)
realtive_pos = np_agent_pos*mmtx.inverse()
return realtive_pos[1]
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,070
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/math_utils/vector_math.py
|
import numpy as np
def magnitude(x):
return np.linalg.norm(x)
def normalize(x):
return np.array(x) / magnitude(x)
def pointToLineDistance(start, end, point):
""" segment line AB, point P, where each one is an array([x, y]) """
A = np.array(start)
B = np.array(end)
P = np.array(point)
if all(A == P) or all(B == P):
return 0
if all(A == B):
return magnitude(P-A)
if np.arccos(np.dot((P - A) / magnitude(P - A), (B - A) / magnitude(B - A))) > np.pi / 2:
return magnitude(P - A)
if np.arccos(np.dot((P - B) / magnitude(P - B), (A - B) / magnitude(A - B))) > np.pi / 2:
return magnitude(P - B)
return magnitude(np.cross(A-B, A-P))/magnitude(B-A)
def closestPointInLine(start, end, point):
a = np.array(start)
b = np.array(end)
p = np.array(point)
n = normalize(b - a)
ap = p - a
t = ap @ n
if t >= magnitude(b - a):
return b
x = a + t * n # x is a point on line
return x
def getCloserSegment(pos, segments_pos):
closest_dist = None
closest_seg = None
closest_point = None
for i, seg_pos in enumerate(segments_pos[:-1]):
curr_dist = pointToLineDistance(seg_pos, segments_pos[i+1], pos)
if not closest_dist or curr_dist < closest_dist:
closest_dist = curr_dist
closest_seg = (i, i+1)
closest_point = closestPointInLine(seg_pos, segments_pos[i+1], pos)
return closest_point, closest_seg
def featNorm(features):
"""Normalize features by mean and standard deviation.
in order to be able to dernomalize them afterwards
Args:
features (np.array): un normlized np.array
Returns:
tuple: (normalizedFeatures, mean, standardDeviation)
"""
mean = np.mean(features, axis=0)
std = np.std(features - mean, axis=0)
featuresNorm = (features - mean) / (std + np.finfo(np.double).eps)
return (featuresNorm, mean, std)
def featDenorm(featuresNorm, mean, std):
"""Denormalize features by mean and standard deviation
Args:
features_norm (np.array): normlized np.array
mean (float): average of the array elements
std (np.array): standard deviation, a measure of the spread of the array elements
Returns:
np.array: un normlized np.array
"""
features = (featuresNorm * std) + mean
return features
def easeInOutCubic(currentTime, start, end, totalTime):
"""
Args:
currentTime (float): is the current time (or position) of the tween.
start (float): is the beginning value of the property.
end (float): is the change between the beginning and
destination value of the property.
totalTime (float): is the total time of the tween.
Returns:
float: normalized interpoltion value
"""
currentTime /= totalTime/2
if currentTime < 1:
return end/2*currentTime*currentTime*currentTime + start
currentTime -= 2
return end/2*(currentTime*currentTime*currentTime + 2) + start
def getTriangleVolume(start, end, triangle_position):
"""project the triangle over the plane formed between the segment and the triangle normal
and return the volume of that prism
Args:
start (np.array): line segment start
end (np.array): line segment end
triangle_position (list): list of np.array defining a triangle
Returns:
float: projected triangle volume
"""
points = [np.array(a) for a in triangle_position]
x_axis = np.array(start) - np.array(end)
x_axis = x_axis / np.linalg.norm(x_axis)
cross_vect = np.cross((triangle_position[2]- triangle_position[1]), (triangle_position[0]- triangle_position[1]))
cross_lenght = np.linalg.norm(cross_vect)
y_axis = cross_vect / cross_lenght
# if the normal is parallel to the segment, create a pyramid intead of a prism
if round(np.dot(y_axis, x_axis), 3) in [1, -1]:
# projection = points.copy()
start_heights = sum([np.linalg.norm(a-start) for a in points])
end_heights = sum([np.linalg.norm(a-end) for a in points])
heights_mean = min([start_heights, end_heights])/len(points)
else:
z_axis = np.cross(y_axis, x_axis)
y_axis = np.cross(z_axis, x_axis)
norm_x = x_axis / np.linalg.norm(x_axis)
norm_y = y_axis / np.linalg.norm(y_axis)
norm_z = z_axis / np.linalg.norm(z_axis)
root_mtx = np.zeros((4, 4))
root_mtx[0][0:3] = norm_x
root_mtx[1][0:3] = norm_y
root_mtx[2][0:3] = norm_z
root_mtx[3][0:3] = np.array(start)
root_mtx[3][-1] = 1
root_mtx_inv = np.linalg.inv(root_mtx)
points = [np.append(a, 1) for a in triangle_position]
bind_pos = [np.dot(a, root_mtx_inv) for a in points]
heights_mean = sum([a[1] for a in bind_pos])/len(points)
return (cross_lenght/2.0) * heights_mean
def getCloserIndex(point, points):
"""the closes index of a point in a point list
Args:
points (list): list of 3 values list Ex: [[1,2,3],[5,6,7],...]
point (list): 3 values list Ex:[1,2,3]
numOfOutput (int): how many closest indixes return
Returns:
list: indices of the closest elements of the points list
"""
pointsAr = np.asarray(points)
distances = np.sum((pointsAr - np.array(point))**2, axis=1)
return np.argmin(distances)
def projectVector(source, projection):
np_source = np.array(source)
np_projection = np.array(projection)
return normalize(np_source) * ((np_projection @ np_source) / magnitude(np_source))
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,071
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/plot.py
|
import numpy as np
import os
import matplotlib.pyplot as plt
def plot_learning_courve(scores, figure_file, mean_amount=100):
running_avg = np.zeros(len(scores))
for i in range(len(running_avg)):
running_avg[i] = np.mean(scores[max(0, i-mean_amount):i+1])
plt.plot(range(len(scores)), running_avg)
plt.title('Running average of previous {} scores'.format(mean_amount))
plt.savefig(figure_file)
plt.show()
folder = r"D:\dev\RL_Maya\tests"
all_subdirs = [os.path.join(folder, d) for d in os.listdir(folder) if os.path.isdir(os.path.join(folder, d))]
latest_subdir = max(all_subdirs, key=os.path.getmtime)
#latest_subdir = os.path.join(folder, '2021_07_29_15_09_goblin_mtx_collision_joint1_disc_03')
"""
fileName = 'joint1_joint2_joint3_rwd.txt'
data_path = os.path.join(latest_subdir, fileName)
with open(data_path) as f:
content = f.read().splitlines()
figure_file = '{}.png'.format(os.path.splitext(data_path)[0])
score = [float(a) for a in content]
plot_learning_courve(score, figure_file, mean_amount=1)
"""
fileName = '_agnt_rew.txt'
found_files = [a for a in os.listdir(latest_subdir) if a.endswith(fileName)]
if not found_files:
raise BaseException(f"unable to find {fileName} on {latest_subdir}")
data_path = os.path.join(latest_subdir, found_files[0])
with open(data_path) as f:
content = f.read().splitlines()
locData = dict()
for data in content:
parts = data.split(" : ")
name = parts[-1]
value = float(parts[0])
locData.setdefault(name, []).append(value)
i = 0
batch = 5
for name, values in locData.items():
print(name, values)
figure_file = '{}.png'.format(name)
plot_learning_courve(values, os.path.join(latest_subdir, figure_file), mean_amount=50)
continue
plt.plot(range(len(values)), values, label=name)
plt.legend()
plt.show()
plt.savefig(os.path.join(latest_subdir, figure_file))
plt.clf()
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,072
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/enviroment/observation.py
|
from maya.api import OpenMaya as om
from rl_vp.math_utils import vector_math as vm
def getObservation(drivers_mtx, agent_mtx, restVector):
"""standalone get observation since is ti will be usead for the enviroment
and the node
Args:
drivers_mtx ([type]): [description]
agent_mtx ([type]): [description]
restVector ([type]): [description]
Returns:
[type]: [description]
"""
observation = list()
for drv_mtx in drivers_mtx:
# get relative position
localMat = agent_mtx*drv_mtx.inverse()
localTrf = om.MTransformationMatrix(localMat)
rbd_lTr = localTrf.translation(om.MSpace.kObject)
observation.extend([rbd_lTr.x, rbd_lTr.y, rbd_lTr.z])
localTrf = om.MTransformationMatrix(drivers_mtx[1])
rbd_lOri = localTrf.rotation(asQuaternion=True)
observation.extend(rbd_lOri)
# state = list()
# observation.extend([curr_vector.x, curr_vector.y, curr_vector.z])
observation.extend([restVector.x*restVector.x,
restVector.y*restVector.y,
restVector.z*restVector.z])
observation.extend([restVector.x, restVector.y, restVector.z])
# return np.array(state)
featuresNorm, _, _ = vm.featNorm(observation)
return featuresNorm
def getRestVector(drivers_mtx, agent_mtx):
drivers_pos = [list(a)[12:15] for a in drivers_mtx]
agent_pos = list(agent_mtx)[12:15]
closestPnt, _ = vm.getCloserSegment(agent_pos, drivers_pos)
return om.MVector(om.MPoint(agent_pos)-om.MPoint(closestPnt))
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,073
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/maya_utils/transforms.py
|
from maya import cmds
import numpy as np
from rl_vp.math_utils import vector_math as vm
JOINT_METADATA = "isJoint"
def createAimedJoint(pos, joints):
cmds.select(d=True)
segments_pos = [np.array(cmds.xform(a, q=1, ws=1, t=1)) for a in joints]
closes_pnt, closerSeg = vm.getCloserSegment(pos, segments_pos)
zAxis = vm.normalize(pos - closes_pnt)
yAxis = vm.normalize(segments_pos[closerSeg[1]] - segments_pos[closerSeg[0]])
xAxis = vm.normalize(np.cross(yAxis, zAxis))
yAxis = vm.normalize(np.cross(zAxis, xAxis))
jnt = cmds.joint()
grp = cmds.group(jnt)
cmds.addAttr(grp, longName=JOINT_METADATA, at="bool", keyable=False)
matrix = list(xAxis)
matrix.append(0)
matrix.extend(yAxis)
matrix.append(0)
matrix.extend(zAxis)
matrix.append(0)
matrix.extend(pos)
matrix.append(1)
cmds.xform(grp, os=1, m=matrix)
seg = np.array(segments_pos[2])-np.array(segments_pos[0])
vect = np.array(pos)-np.array(segments_pos[0])
w = (np.dot(vect, seg) / np.linalg.norm(seg)**2)
cmds.parentConstraint(joints[0], joints[1], grp, mo=1)
cmds.parentConstraint(joints[0], grp, e=True, w=1-w)
cmds.parentConstraint(joints[1], grp, e=True, w=w)
return jnt
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,074
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/enviroment/env.py
|
import math
import random
import logging
import numpy as np
from maya import cmds
from maya.api import OpenMaya as om
from rl_vp.maya_utils import mUtils
from rl_vp.math_utils import vector_math as vm
from rl_vp.enviroment import observation as obs_utils
from rl_vp.enviroment import rewards as rew_utils
from rl_vp.enviroment import constants
logger = logging.getLogger(__name__)
DRIVER_ATTR = {"rx":(-120, 120), "ry":(-40, 120), "rz":(-120, 120)}
class Enviroment():
def __init__(self, agent, drivers, maxFrame=20, hasAnimation=False, driver_attrs=DRIVER_ATTR):
self.agent = mUtils.MNode(agent)
self.drivers = [mUtils.MNode(a) for a in drivers]
self.maxFrame = maxFrame
self.action_space = len(constants.ACTIONS_MULTIPLIERS)
self.drivers_attrs = driver_attrs
self.hasAnimation = hasAnimation
# self.mesh = mUtils.MNode(mesh)
# self.mfn = self.mesh.getShape().getBestFn()
# triangle_counts, triangle_vertices = self.mfn.getTriangles()
# self.all_triangles = np.array(triangle_vertices).reshape(-1, 3)
self.reInit(agent)
def reInit(self, agent):
self.agent = mUtils.MNode(agent)
self.currentFrame = 0
self.animations = self.createAnimations()
self.agent_pos = None
self.agent_mtx = None
self.rest_distance = 1.0
self.restVector = om.MVector()
self.drivers_mtx = list()
self.drivers_pos = list()
self.closest_seg = list()
self.reset(getState=False)
self.updateStatesCache()
_, self.closest_seg = self.getCloserSegment()
self.restVector = self.getAgentToSegmentVector()
self.rest_distance = vm.magnitude(self.restVector)
curr_coll = rew_utils.getAgentCollisionValue(self.agent_pos, self.drivers_pos)
self.startSide = math.copysign(1, curr_coll)
state = self.getState()
self.observation_space = state.size
# positions = np.array(self.mfn.getPoints(space=om.MSpace.kWorld))[:, :3]
# self.vertices = skinCluster.getInfluencesVertices(self.mesh, [str(self.agent)], 0.05)
# self.triangles = meshes.getVertextriangles(self.all_triangles, self.vertices)
# self.bind_data = rew_utils.getTriangleBindData(self.drivers, self.triangles, positions)
# self.bind_volume = rew_utils.getTrianglesVolume(positions, self.bind_data)
return
def step(self, action, addFrame=True):
if self.hasAnimation:
cmds.currentTime(self.currentFrame)
for attr, value in self.animations[self.currentFrame]:
if hasattr(self.drivers[1], attr):
plug = getattr(self.drivers[1], attr)
plug.setFloat(value)
else:
raise ValueError("Unable to create animation")
self.setAction(action)
observation = self.getState()
reward = self.getReward()
logger.debug(f"Action {action} Reward {reward}")
done = False
info = ""
if addFrame:
self.currentFrame += 1
if self.currentFrame >= self.maxFrame:
done = True
return observation, reward, done, info
def reset(self, getState=True):
self.currentFrame = 0
if self.hasAnimation:
cmds.currentTime(self.currentFrame)
else:
self.resetJointDriver()
self.setAction([0]*self.action_space)
if not getState:
return
return self.getState()
def createAnimations(self):
animations = dict()
all_animations = dict()
for attr, limits in self.drivers_attrs.items():
max_value = math.radians(random.randrange(limits[0], limits[1]))
all_animations[attr] = np.linspace(0, max_value, self.maxFrame)
for x in range(self.maxFrame):
animations[x] = list()
for attr in self.drivers_attrs.keys():
animations[x].append((attr, all_animations[attr][x]))
"""
for frame in range(self.maxFrame):
frame_values = list()
for attr, limits in self.drivers_attrs.items():
value = math.radians(random.randrange(limits[0], limits[1]))
frame_values.append((attr, value))
animations[frame] = frame_values
"""
return animations
def resetJointDriver(self):
for attr in self.drivers_attrs.keys():
if not hasattr(self.drivers[1], attr):
continue
plug = getattr(self.drivers[1], attr)
plug.setFloat(0)
def setAction(self, action):
for act, attr in zip(action, constants.ACTIONS_MULTIPLIERS):
plug = getattr(self.agent, attr[0])
plug.set(float(attr[1]*act*self.rest_distance))
# plug.set(float(attr[1]*act))
def updateStatesCache(self):
self.agent_pos = self.agent.getPosition()
self.agent_mtx = self.agent.getMatrix()
self.drivers_mtx = [a.getMatrix() for a in self.drivers]
self.drivers_pos = [a.getPosition() for a in self.drivers]
if self.closest_seg:
self.curr_vector = self.getAgentToSegmentVector()
def getAgentToSegmentVector(self):
closestPnt = vm.closestPointInLine(self.drivers_pos[self.closest_seg[0]],
self.drivers_pos[self.closest_seg[1]],
self.agent_pos)
return om.MPoint(self.agent_pos)-om.MPoint(closestPnt)
def getState(self):
self.updateStatesCache()
return obs_utils.getObservation(self.drivers_mtx, self.agent_mtx, self.restVector)
def getPoseRwd(self):
rewards = list()
# distance from oprimal volume preserv
delta_dist = self.restVector.length()-self.curr_vector.length()
# check that is in the same direction
dot_p = 1-(self.curr_vector.normal()*self.restVector.normal())
rewards = delta_dist+dot_p
return np.exp(-3 * (rewards ** 2))
# return np.exp(-3 * rewards)
def getCollisionReward(self):
rew = .1
curr_coll = rew_utils.getAgentCollisionValue(self.agent_pos, self.drivers_pos)
curr_side = math.copysign(1, curr_coll)
if curr_side != self.startSide:
rew = 1-np.exp(abs(curr_coll)/(self.rest_distance/4.0))
return rew
def getGasPenalty(self):
penalty = 0
values = list()
for attr, multipl in constants.ACTIONS_PENALTY.items():
plug = getattr(self.agent, attr)
curr_val=abs(plug.get())
if curr_val > 0.0:
values.append((curr_val/self.rest_distance)*multipl)
penalty = sum(values)
if penalty > 2:
return -2
elif penalty > .01:
return penalty*-1
return .0
def getReward(self):
pose_rew = self.getPoseRwd()
coll_rew = self.getCollisionReward()
gas_rew = self.getGasPenalty()
logger.debug(f"Pose Rew {pose_rew} Collision Rew {coll_rew} Gas Penalty {gas_rew}")
rew = pose_rew + coll_rew + gas_rew
return rew
def getCloserSegment(self):
return vm.getCloserSegment(self.agent_pos, self.drivers_pos)
def render(self):
pass
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,075
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/train.py
|
import os
import logging
import numpy as np
from rl_vp.ppo import ppo_simple
from rl_vp.enviroment.env import Enviroment
from rl_vp.math_utils import sampling
from rl_vp.math_utils import vector_math as vm
from maya import cmds
from datetime import datetime
from tensorflow import keras
logger = logging.getLogger(__name__)
# episode simulation and nn hyper params
BATCH_SIZE = 1024
GOAL_REWARD = .98
GOAL_ESPISODES = 10
def getModelPath(name=""):
test_path = r'D:\dev\RL_Maya\tests'
date_str = datetime.now().strftime('%Y_%m_%d_%H_%M')
model_folder = os.path.join(test_path, date_str)
if name:
model_folder += f"_{name}"
if not os.path.exists(model_folder):
os.mkdir(model_folder)
return model_folder
def createTrainAgents(joints=['joint1', 'joint2', 'joint3'],
bundingBox=((-4, -5, -4), (4, 5, 4)),
radius=1):
positions = sampling.poissionDiscSampling(radius=radius, bundingBox=bundingBox, sampleRetrial=5)
start = 0
end = 1
totalTime = 1
segments_pos = [np.array(cmds.xform(a, q=1, ws=1, t=1)) for a in joints]
maxY = (segments_pos[-1][1] - segments_pos[0][1])
locators = list()
for pos in positions:
closes_pnt, closerSeg = vm.getCloserSegment(pos, segments_pos)
zAxis = vm.normalize(pos - closes_pnt)
yAxis = vm.normalize(segments_pos[closerSeg[1]]-segments_pos[closerSeg[0]])
xAxis = vm.normalize(np.cross(yAxis, zAxis))
zAxis = vm.normalize(np.cross(xAxis, yAxis))
loc = cmds.spaceLocator()[0]
locators.append(loc)
grp = cmds.group(loc)
matrix = list(xAxis)
matrix.append(0)
matrix.extend(yAxis)
matrix.append(0)
matrix.extend(zAxis)
matrix.append(0)
matrix.extend(pos)
matrix.append(1)
cmds.xform(grp, os=1, m=matrix)
t = ((pos[1] + segments_pos[0][1]) + maxY)/maxY
topW = vm.easeInOutCubic(t, start, end, totalTime)
lowW = 1.0 - topW
cmds.parentConstraint(joints[0], joints[1], grp, mo=1)
cmds.parentConstraint(joints[0], grp, e=True, w=lowW)
cmds.parentConstraint(joints[1], grp, e=True, w=topW)
return locators
def train(drivers, agents, name="", n_trains=8, n_episodes=16, epochs=32, batchMax=50, maxFrame=100):
model_folder = getModelPath(name)
backup_folder = os.path.join(model_folder, 'backup')
if not os.path.exists(backup_folder):
os.makedirs(backup_folder)
FILE_NAME = "_".join(drivers)
rwdFile = os.path.join(model_folder, '{}_epRwd.txt'.format(FILE_NAME))
score_file = os.path.join(model_folder, '{}_rwd.txt').format(FILE_NAME)
with open(score_file, 'w') as fd:
fd.write("")
agnt_file = os.path.join(model_folder, '{}_agnt_rew.txt').format(FILE_NAME)
with open(agnt_file, 'w') as fd:
fd.write("")
env = Enviroment(agents[0], drivers, maxFrame)
ppoAgent = ppo_simple.Agent(env, rwdFile, rwdDiscount=0.5, hidden_size=1024, num_layers=1)
rew_history = list()
for tr_n in range(n_trains):
# get all agents data --------------
np.random.shuffle(agents)
agnt_to_train = agents[:batchMax]
all_states = np.array([]).reshape(0, ppoAgent.num_states).astype(np.float32)
all_actions = np.array([]).reshape(0, ppoAgent.num_actions).astype(np.float32)
all_real_rwds = all_rwds = np.array([]).reshape(0, 1).astype(np.float32)
for curr_agent in agnt_to_train:
cmds.hide(agents)
cmds.showHidden(curr_agent)
# re initi with random agent
env.reInit(curr_agent)
states, rwds, actions, real_rewd = ppoAgent.get_batch(n_episodes, tr_n)
all_states = np.vstack([all_states, states])
all_actions = np.vstack([all_actions, actions])
all_rwds = np.vstack([all_rwds, rwds])
all_real_rwds = np.vstack([all_real_rwds, real_rewd])
# randomize = np.arange(len(all_states))
# np.random.shuffle(randomize)
# all_states = all_states[randomize]
# all_actions = all_actions[randomize]
# all_rwds = all_rwds[randomize]
# all_real_rwds = all_real_rwds[randomize]
# Train the model ------------------
# get instance of early stopping callback, it stop if the model doesnt learn
early_stop_patient = keras.callbacks.EarlyStopping(patience=8)
# train value nn to approximate rewards for given states
ppoAgent.critic.fit(all_states, all_rwds, validation_split=0.1,
verbose=2, callbacks=[early_stop_patient],
epochs=epochs, batch_size=BATCH_SIZE)
# get the approximate value for all states
all_values = ppoAgent.critic.predict(all_states)
# evaluate how advantageous an action is
all_advg = np.maximum(0, all_rwds - all_values)
all_advg /= np.max(all_advg)
# add a advantage dimension for each action else fit will fail
all_advg_dim = all_advg.copy()
for a in range(env.action_space-1):
all_advg_dim = np.append(all_advg_dim, all_advg, axis=1)
# train policy net
ppoAgent.actor.fit([all_states, all_advg_dim], all_actions,
verbose=2, epochs=epochs, batch_size=BATCH_SIZE)
rewd_mean = np.mean(all_real_rwds)
rew_history.append(rewd_mean)
with open(score_file, 'a') as fd:
fd.write(f"{rewd_mean}\n")
with open(agnt_file, 'a') as fd:
fd.write(f"{rewd_mean} : {curr_agent}\n")
ppoAgent.result_model.save(os.path.join(model_folder, f'{FILE_NAME}_{tr_n:02d}.h5'))
# tf.saved_model.save(ppoAgent.result_model, os.path.join(model_folder, f'{FILE_NAME}_{tr_n:02d}.h5'))
ppoAgent.actor.save(os.path.join(backup_folder, f'{FILE_NAME}_actor_{tr_n:02d}.h5'))
ppoAgent.critic.save(os.path.join(backup_folder, f'{FILE_NAME}_critic_{tr_n:02d}.h5'))
#if len(rew_history) > GOAL_ESPISODES and (np.array(rew_history[GOAL_ESPISODES*-1:]) >= GOAL_REWARD).all():
# logger.debug(np.array(rew_history[GOAL_ESPISODES*-1:]))
# logger.info(f"Convergence of {GOAL_REWARD} Reached at train {tr_n}!")
# return
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,076
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/maya_utils/meshes.py
|
from .mUtils import mNode
import numpy as np
def getFacesVertices(mesh):
"""Returns the index of the vertex that conform each face
Args:
meshFn (UsdGeom.Mesh): USD mesh prim
Returns:
list of list: vertex indices for each face ex: [[1,2,3,4],[2,3,5,6],...]
"""
meshNode = mNode.MNode(mesh).getShape()
fn = meshNode.getBestFn()
faceVertexCount, faceVerticesIndex = fn.getVertices()
faceVertices = list()
i = 0
for vtxCount in faceVertexCount:
faceVertices.append(list(faceVerticesIndex[i:i+vtxCount]))
i += vtxCount
return faceVertices
def getVerticesFaces(faceVertices):
"""given a list with all the vertices indices for each face
returns a list for each vertex and wich faces are conneted to it
Args:
faceVertices (list): [description]
Returns:
list: list of faces connected to the vertices
"""
result = list()
# cast them into a dict in order to not worry about the initial list size
vertexFaces = dict()
for f, fVtx in enumerate(faceVertices):
for vtx in fVtx:
vertexFaces.setdefault(vtx, set()).add(f)
# convert dict to list for faster post iteration
indices = list(vertexFaces.keys())
indices.sort()
for idx in indices:
result.append(vertexFaces[idx])
return result
def nearVertices(vertex, vertexFaces, faceVertices):
"""return all surrounding vertices indices
Args:
vertex (int): vertex index
vertexFaces (list): list with all the faces id connected to the each vertiex
faceVertices (list): list of each vertex id that conform the face
Returns:
set: all vertices idices of connected to the overtex
"""
result = set()
faces = vertexFaces[vertex]
for f in faces:
result.update(faceVertices[f])
return result
def getNearVerticesDistances(vertex, points, vertexFaces, faceVertices):
conn_vtx = nearVertices(vertex, vertexFaces, faceVertices)
distances = dict()
for vtx in conn_vtx:
distances[vtx] = np.linalg.norm(points[vertex]-points[vtx])
return distances
def getGeodesicDistances(start, verticesList, points, vertexFaces, faceVertices):
distances = dict()
vertices_list = list(verticesList)
if start not in vertices_list:
vertices_list.append(start)
for vtx in vertices_list:
dist = getNearVerticesDistances(vtx, points, vertexFaces, faceVertices)
distances[vtx] = dict([(k, v) for k, v in dist.items() if k in vertices_list])
geodesic_dist = {start: 0}
geodesic_dist.update(distances[start])
for vtx in vertices_list:
curr_dist = geodesic_dist.copy()
for indx, dist in geodesic_dist.items():
near_dist = dict([(k, v+dist) for k, v in distances[indx].items() if k not in curr_dist])
curr_dist.update(near_dist)
geodesic_dist.update(curr_dist)
return geodesic_dist
def getConnectedVertices(vertices, vertexFaces, faceVertices):
result = dict()
for vertex in vertices:
result[vertex] = nearVertices(vertex, vertexFaces, faceVertices)
return result
def getVertextriangles(all_triangles, vertices):
vertex_is_in_triangle = np.all(np.isin(all_triangles, vertices), axis=1)
return all_triangles[np.where(vertex_is_in_triangle)]
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,077
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/maya_utils/mUtils/mPlug.py
|
from maya.api import OpenMaya as om
import numbers
import logging
_logger = logging.getLogger(__name__)
INT_DATA = [om.MFnNumericData.kShort, om.MFnNumericData.kInt, om.MFnNumericData.kLong, om.MFnNumericData.kByte]
FLOAT_DATA = [om.MFnNumericData.kFloat, om.MFnNumericData.kDouble, om.MFnNumericData.kAddr]
class MPlug(om.MPlug):
"""wrapper of OpenMaya.MPlug to easier the set and get of the values
Inheritance:
om.MPlug
"""
def set(self, inValue):
"""
Sets the given plug's value to the passed in value.
Args:
node (str): node name
inValue (_Type_): Any value of any data type.
Raises:
logger.error: the provide argument has wrong data type for the plug
"""
plugAttribute = self.attribute()
apiType = plugAttribute.apiType()
_logger.debug("Setting {} type {} as {}".format(self.info, plugAttribute.apiTypeStr, inValue))
# Float Groups - rotate, translate, scale
if apiType in [om.MFn.kAttribute3Double, om.MFn.kAttribute3Float]:
if self.isCompound:
if isinstance(inValue, list) or isinstance(inValue, tuple):
for c in xrange(self.numChildren()):
MPlug(self.child(c)).set(inValue[c])
elif type(inValue) in [om.MEulerRotation, om.MVector, om.MPoint]:
MPlug(self.child(0)).set(inValue.x)
MPlug(self.child(1)).set(inValue.y)
MPlug(self.child(2)).set(inValue.z)
else:
_logger.error('{0} :: Passed in value ({1}) is {2}. Needs to be type list.'.format(
self.info, inValue, type(inValue)))
# Distance
elif apiType in [om.MFn.kDoubleLinearAttribute, om.MFn.kFloatLinearAttribute]:
if isinstance(inValue, numbers.Number):
value = om.MDistance(inValue, om.MDistance.kCentimeters)
self.setMDistance(value)
else:
_logger.error('{0} :: Passed in value ({1}) is {2}. Needs to be type number.'.format(
self.info, inValue, type(inValue)))
# Angle
elif apiType in [om.MFn.kDoubleAngleAttribute, om.MFn.kFloatAngleAttribute]:
if isinstance(inValue, numbers.Number):
value = om.MAngle(inValue, om.MAngle.kDegrees)
self.setMAngle(value)
else:
_logger.error('{0} :: Passed in value ({1}) is {2}. Needs to be type number.'.format(
self.info, inValue, type(inValue)))
# Typed - matrix WE DON'T HANDLE THIS CASE YET!!!!!!!!!
elif apiType == om.MFn.kTypedAttribute:
pType = om.MFnTypedAttribute(plugAttribute).attrType()
if pType == om.MFnData.kMatrix:
if isinstance(inValue, om.MPlug):
pass
else:
plugNode = self.node()
MFnTrans = om.MFnTransform(plugNode)
sourceMatrix = om.MTransformationMatrix(inValue)
MFnTrans.set(sourceMatrix)
# String
elif pType == om.MFnData.kString:
value = inValue
self.setString(value)
# MATRIX
elif apiType == om.MFn.kMatrixAttribute:
if isinstance(inValue, om.MPlug):
# inValue must be a MPlug!
sourceValueAsMObject = om.MFnMatrixData(
inValue.asMObject()).object()
self.setMObject(sourceValueAsMObject)
elif isinstance(inValue, om.MMatrix):
mtx_data = om.MFnMatrixData()
mtx_data.create()
mtx_data.set(inValue)
self.setMObject(mtx_data.object())
else:
_logger.error('Value object is not an MPlug or MMatrix')
# Numbers
elif apiType == om.MFn.kNumericAttribute:
pType = om.MFnNumericAttribute(plugAttribute).numericType()
if pType == om.MFnNumericData.kBoolean:
if isinstance(inValue, bool) or isinstance(inValue, numbers.Number):
self.setBool(bool(inValue))
else:
_logger.error('{0} :: Passed in value ({1}) is {2}. Needs to be type bool.'.format(
self.info, inValue, type(inValue)))
elif pType in INT_DATA:
if isinstance(inValue, numbers.Number):
self.setInt(inValue)
else:
_logger.error('{0} :: Passed in value ({1}) is {2}. Needs to be type number.'.format(
self.info, inValue, type(inValue)))
elif pType in FLOAT_DATA:
if isinstance(inValue, numbers.Number):
self.setDouble(inValue)
else:
_logger.error('{0} :: Passed in value ({1}) is {2}. Needs to be type number.'.format(
self.info, inValue, type(inValue)))
# Enums TODO: set enum with string
elif apiType == om.MFn.kEnumAttribute:
self.setInt(inValue)
def get(self):
"""
Gets the value of the given plug.
Returns:
variable The value of the passed in node plug.
"""
pAttribute = self.attribute()
apiType = pAttribute.apiType()
# Float Groups - rotate, translate, scale; Compounds
if apiType in [om.MFn.kAttribute3Double, om.MFn.kAttribute3Float, om.MFn.kCompoundAttribute]:
result = []
if self.isCompound:
for c in xrange(self.numChildren()):
result.append(self.get(self.child(c)))
return result
# Distance
elif apiType in [om.MFn.kDoubleLinearAttribute, om.MFn.kFloatLinearAttribute]:
return self.asMDistance().asCentimeters()
# Angle
elif apiType in [om.MFn.kDoubleAngleAttribute, om.MFn.kFloatAngleAttribute]:
return self.asMAngle().asDegrees()
# TYPED
elif apiType == om.MFn.kTypedAttribute:
pType = om.MFnTypedAttribute(pAttribute).attrType()
# Matrix
if pType == om.MFnData.kMatrix:
return om.MFnMatrixData(self.asMObject()).matrix()
# String
elif pType == om.MFnData.kString:
return self.asString()
# MATRIX
elif apiType == om.MFn.kMatrixAttribute:
return om.MFnMatrixData(self.asMObject()).matrix()
# NUMBERS
elif apiType == om.MFn.kNumericAttribute:
pType = om.MFnNumericAttribute(pAttribute).numericType()
if pType == om.MFnNumericData.kBoolean:
return self.asBool()
elif pType in INT_DATA:
return self.asInt()
elif pType in FLOAT_DATA:
return self.asDouble()
# Enum
elif apiType == om.MFn.kEnumAttribute:
return self.asInt()
def __getitem__(self, key):
return MPlug(self.elementByLogicalIndex(key))
def connectTo(self, destination, force=False):
"""connect current plug to a destination plug
Args:
destination (MPlug): plug to recive the connection
force (bool, optional): if is true it will disconnect any input connections
to the destination. Defaults to False.
Raises:
ValueError: if the destination is not an MPlug
BaseException: if the destination has input connection and the force flag is False
"""
if not isinstance(destination, om.MPlug):
raise ValueError(
"{} is not an instance of MPlug".format(destination))
source = destination.source()
MDGMod = om.MDGModifier()
if source:
if source == self:
_logger.debug("skipping connection already made")
return
elif not force:
raise BaseException("{} is connected to {} try with force argument".format(destination.info,
source.info))
MDGMod.disconnect(source, destination)
MDGMod.connect(self, destination)
MDGMod.doIt()
def source(self):
"""if has input connection return it, else None
Returns:
MPlug, None: the input connections, else None
"""
mplug = super(MPlug, self).source()
if mplug.isNull:
return None
return MPlug(mplug)
def destinations(self):
"""return all the plugs where this plug is connected to
Returns:
list: all destinations MPlugs
"""
plugArr = super(MPlug, self).destinations()
if not plugArr:
return []
return [MPlug(a) for a in plugArr if not a.isNull]
def disconnectSource(self):
"""remove eny input connections that may have
"""
src_plug = self.source()
if src_plug.isNull:
return
MDGMod = om.MDGModifier()
MDGMod.disconnect(src_plug, self)
MDGMod.doIt()
def disconnectDestinations(self):
"""remove all the connections where this plug is connected to
"""
dest_plugs = self.destinations()
MDGMod = om.MDGModifier()
for dest_plug in dest_plugs:
if dest_plug.isNull:
return
MDGMod.disconnect(self, dest_plug)
MDGMod.doIt()
def __getattr__(self, name):
"""get the attribute as MPlug
Raises:
AttributeError: if the child attribute is not found
Returns:
mPlug.MPlug
"""
fn = om.MFnDependencyNode(self.node())
if not self.isCompound or not fn.hasAttribute(name):
raise AttributeError("MPlug {} doesn't have property called {}".format(self.info, name))
child = self.child(fn.attribute(name))
return MPlug(child)
def __str__(self):
"""override string operator to return node name
Returns:
str: node name
"""
return self.info
def __unicode__(self):
return self.info
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,078
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/initialization.py
|
import math
import numpy as np
from maya import cmds
from maya.api import OpenMaya as om
from rl_vp.enviroment import rewards
from rl_vp.maya_utils import mUtils, skinCluster, meshes, attrUtils, transforms
from rl_vp.math_utils import vector_math as vm
from rl_vp.math_utils import fuzzyCMeans
DELTA_VOLUME_ATTR = "deltaVolume"
CLUSTER_VERTICES_ATTR = "clustersVertices"
CLUSTER_JOINTS_ATTR = "clustersJoints"
CLUSTER_WEIGHTS_ATTR = "clustersWeights"
ALL_METADATA_ATTR = [DELTA_VOLUME_ATTR, CLUSTER_VERTICES_ATTR,
CLUSTER_JOINTS_ATTR, CLUSTER_WEIGHTS_ATTR]
def getVolumeChange(mesh, joints):
mesh = mUtils.MNode(mesh)
mfn = mesh.getShape().getBestFn()
positions = np.array(mfn.getPoints(space=om.MSpace.kWorld))[:, :3]
triangle_counts, triangle_vertices = mfn.getTriangles()
all_triangles = np.array(triangle_vertices).reshape(-1, 3)
vertices = skinCluster.getInfluencesVertices(mesh, joints)
triangles = meshes.getVertextriangles(all_triangles, vertices)
bind_data = rewards.getTriangleBindData(joints, triangles, positions)
bind_volume = rewards.getTrianglesVolume(positions, bind_data)
deformation_volume = list()
animations = attrUtils.getAnimationValues(2)
default_mxt = cmds.xform(joints[1], q=1, ws=1, m=1)
decendant = set(joints)
for jnt in joints:
decendant.update(cmds.listRelatives(jnt, ad=1, type="joint"))
with attrUtils.DisconnectCtx(decendant, source=True, destination=False):
for attr, values in animations:
cmds.setAttr(f"{joints[1]}.{attr}", math.degrees(values[-1]))
deformed_positions = np.array(mfn.getPoints(space=om.MSpace.kWorld))
deformation_volume.append(rewards.getTrianglesVolume(deformed_positions, bind_data))
cmds.xform(joints[1], ws=1, m=default_mxt)
delta_volume_vtx = np.zeros((len(positions)), np.float16)
for i, data in bind_data.items():
for deformed in deformation_volume:
delta = abs(bind_volume[i] - deformed[i])
delta_volume_vtx[data.get("vertices")] += delta
return delta_volume_vtx
def getVolumeLossVertices(delta_volume_vtx, tol=0.03):
wrong_vtx = list()
for x, vol in enumerate(delta_volume_vtx):
if vol > tol:
wrong_vtx.append(x)
return wrong_vtx
def createClustersJoints(mesh, joints, wrong_vtx, delta_volume_vtx, cluster_n=4):
mesh_node = mUtils.MNode(mesh)
mfn = mesh_node.getShape().getBestFn()
positions = np.array(mfn.getPoints(space=om.MSpace.kWorld))[:, :3]
normals = np.array(mfn.getNormals())
# default fuzzyCMeans arguments
expo = 2
min_err = 0.001
max_iter = 500
verbose = 0
raw_data = list()
for x in wrong_vtx:
pos = positions[x]
vol_normal = normals[x]
data = list(pos)
data.extend(vol_normal)
data.append(delta_volume_vtx[x])
raw_data.append(data)
raw_data = np.array(raw_data)
m, centroids, m1 = fuzzyCMeans.fcm(raw_data, cluster_n, expo, min_err, max_iter, verbose)
cluster_joints = list()
for j in range(cluster_n):
indx = np.argmax(m.T[:, j])
vtx = wrong_vtx[indx]
cluster_joints.append(transforms.createAimedJoint(positions[vtx], joints))
return cluster_joints, m.T
def applyClustersDefaultSkin(mesh, clusters_joints, clusters_vertices, clusters_weights, prune=0.1):
mesh_node = mUtils.MNode(mesh)
mfn = mesh_node.getShape().getBestFn()
positions = np.array(mfn.getPoints(space=om.MSpace.kWorld))[:, :3]
skin_cluster = skinCluster.getDeformersFromMesh(mesh_node)[0]
faceVertices = meshes.getFacesVertices(mesh_node)
vertexFaces = meshes.getVerticesFaces(faceVertices)
vertices_distances = list()
joints_positions = [np.array(cmds.xform(a, q=1, ws=1, t=1)) for a in clusters_joints]
for i, joint_pos in enumerate(joints_positions):
start_vtx = vm.getCloserIndex(joint_pos, positions[clusters_vertices])
dist_dict = meshes.getGeodesicDistances(clusters_vertices[start_vtx], clusters_vertices, positions, vertexFaces, faceVertices)
vertices_distances.append([dist_dict[a] for a in clusters_vertices])
vtx_dist = np.array(vertices_distances).T
max_dist = np.amax(vtx_dist, axis=1)
influences = skinCluster.getSkinInfluences(skin_cluster)
missing_inf = [a for a in clusters_joints if a not in influences.values()]
if missing_inf:
cmds.skinCluster(skin_cluster, e=1, wt=0, ai=missing_inf)
new_weighst = dict()
for i, vtx in enumerate(clusters_vertices):
normal_dist = 1 - (vtx_dist[i] / max_dist[i])
weights = clusters_weights[i]
dist_weights = weights * normal_dist
if prune:
mask = np.greater_equal(dist_weights, prune)
dist_weights *= mask
vtx_weighst = dict()
for i, w in enumerate(dist_weights):
vtx_weighst[clusters_joints[i]] = w
new_weighst[vtx] = vtx_weighst
weightMap = skinCluster.getSkinWeightsMapping(skin_cluster, normalize=False, tol=0.0001)
for k, v in new_weighst.items():
weightMap[k].update(v)
normal_weights = skinCluster.normalizeWeights(weightMap)
relaxedMap = skinCluster.relaxWeightMapping(normal_weights, faceVertices, vertexFaces, iterations=1)
skinCluster.setSkinWeights(skin_cluster, relaxedMap)
def setMetadataAttribute(node, attr_name, data, storable=False):
if not cmds.objExists(f"{node}.{attr_name}"):
cmds.addAttr(node, longName=attr_name, dt="string", storable=storable)
cmds.setAttr(f"{node}.{attr_name}", str(data), type="string")
def clearMetadata(node):
for attr_name in ALL_METADATA_ATTR:
if cmds.objExists(f"{node}.{attr_name}"):
cmds.setAttr(f"{node}.{attr_name}", "", type="string")
def initData(mesh, joints):
clearMetadata(mesh)
delta_volume_vtx = getVolumeChange(mesh, joints)
data_list = delta_volume_vtx.tolist()
setMetadataAttribute(mesh, DELTA_VOLUME_ATTR, str(data_list))
return delta_volume_vtx
def selectAffectedVertices(mesh, tolerance=0.03, select=True):
if not cmds.objExists(f"{mesh}.{DELTA_VOLUME_ATTR}"):
raise BaseException(f"unable to find {DELTA_VOLUME_ATTR} in {mesh} was not inited")
delta_str = cmds.getAttr(f"{mesh}.{DELTA_VOLUME_ATTR}")
if not delta_str:
raise BaseException(f"unable to find {DELTA_VOLUME_ATTR} data in {mesh} was not inited")
delta_volume_vtx = np.array(eval(delta_str))
wrong_vtx = getVolumeLossVertices(delta_volume_vtx, tolerance)
setMetadataAttribute(mesh, CLUSTER_VERTICES_ATTR, wrong_vtx, storable=True)
if select:
cmds.select(cl=1)
for i in wrong_vtx:
cmds.select(f"{mesh}.vtx[{i}]", add=1)
return wrong_vtx
def initClustersJoints(mesh, joints, cluster_n=4, tolerance=0.03):
if not cmds.objExists(f"{mesh}.{DELTA_VOLUME_ATTR}"):
raise BaseException(f"unable to find {DELTA_VOLUME_ATTR} in {mesh} was not inited")
delta_str = cmds.getAttr(f"{mesh}.{DELTA_VOLUME_ATTR}")
if not delta_str:
raise BaseException(f"unable to find {DELTA_VOLUME_ATTR} data in {mesh} was not inited")
wrong_vtx_str = ""
if not cmds.objExists(f"{mesh}.{CLUSTER_VERTICES_ATTR}"):
wrong_vtx_str = str(selectAffectedVertices(mesh, tolerance=tolerance, select=False))
else:
wrong_vtx_str = cmds.getAttr(f"{mesh}.{CLUSTER_VERTICES_ATTR}")
if not wrong_vtx_str:
raise BaseException(f"unable to find {CLUSTER_VERTICES_ATTR} data in {mesh} was not inited")
delta_volume_vtx = np.array(eval(delta_str))
clusters_vertices = np.array(eval(wrong_vtx_str))
cluster_joints_str = ""
if cmds.objExists(f"{mesh}.{CLUSTER_JOINTS_ATTR}"):
cluster_joints_str = cmds.getAttr(f"{mesh}.{CLUSTER_JOINTS_ATTR}")
if cluster_joints_str:
old_joints = [a for a in eval(cluster_joints_str) if cmds.objExists(a)]
to_delete = set()
for jnt in old_joints:
prnt = cmds.listRelatives(jnt, p=1)
if prnt and cmds.objExists(f"{prnt[0]}.{transforms.JOINT_METADATA}"):
to_delete.add(prnt[0])
else:
to_delete.add(jnt)
cmds.delete(to_delete)
cluster_joints, cluster_weights = createClustersJoints(mesh,
joints,
clusters_vertices,
delta_volume_vtx,
cluster_n)
setMetadataAttribute(mesh, CLUSTER_JOINTS_ATTR, cluster_joints, storable=True)
data_list = cluster_weights.tolist()
setMetadataAttribute(mesh, CLUSTER_WEIGHTS_ATTR, str(data_list), storable=True)
return cluster_joints, cluster_weights
def setDefaultSkin(mesh):
if not cmds.objExists(f"{mesh}.{CLUSTER_JOINTS_ATTR}"):
raise BaseException(f"mesh {mesh} has not the cluster joints information, please recreate cluster joints")
if not cmds.objExists(f"{mesh}.{CLUSTER_VERTICES_ATTR}"):
raise BaseException(f"mesh {mesh} has not the cluster vertices information, please recreate cluster joints")
if not cmds.objExists(f"{mesh}.{CLUSTER_WEIGHTS_ATTR}"):
raise BaseException(f"mesh {mesh} has not the cluster weights information, please recreate cluster joints")
wrong_vtx_str = cmds.getAttr(f"{mesh}.{CLUSTER_VERTICES_ATTR}")
cluster_joints_str = cmds.getAttr(f"{mesh}.{CLUSTER_JOINTS_ATTR}")
clusters_weights_str = cmds.getAttr(f"{mesh}.{CLUSTER_WEIGHTS_ATTR}")
clusters_joints = [a for a in eval(cluster_joints_str) if cmds.objExists(a)]
clusters_vertices = np.array(eval(wrong_vtx_str))
clusters_weights = np.array(eval(clusters_weights_str))
applyClustersDefaultSkin(mesh, clusters_joints, clusters_vertices, clusters_weights)
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,079
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/maya_utils/skinCluster.py
|
import logging
import copy
from . import meshes
from .mUtils import mNode
from maya import cmds
_logger = logging.getLogger(__name__)
def getDeformersFromMesh(sourceMesh, nodeType="skinCluster"):
history = cmds.listHistory(str(sourceMesh), pruneDagObjects=True) or list()
deformers = [a for a in history if 'geometryFilter' in cmds.nodeType(a, i=1)][::-1]
if not deformers:
_logger.debug("unable to find any deformers in {}".format(str(sourceMesh)))
return []
if not nodeType:
return [mNode.MNode(a) for a in deformers]
nodes = list(mNode.MNode(a) for a in deformers if cmds.nodeType(a) == nodeType)
if not nodes:
_logger.debug("unable to find a {} in {}".format(nodeType, str(sourceMesh)))
return nodes
def getSkinInfluences(skinCluster, asMNode=False):
"""get the skin influences
Args:
skinCluster (str,MNode): source skin
Returns:
dict: influenceIndex: influence partial name
"""
skinClusterNode = mNode.MNode(skinCluster)
matrixPlug = skinClusterNode.matrix
influences = dict()
for indx in matrixPlug.getExistingArrayAttributeIndices():
currPreBindlug = matrixPlug[indx]
jntPlug = currPreBindlug.source()
if jntPlug.isNull:
continue
influence = mNode.MNode(jntPlug.node())
if asMNode:
influences[indx] = influence
else:
influences[indx] = str(influence)
return influences
def getSkinWeightsMapping(skinCluster, normalize=True, tol=0.0001):
"""get each vertex influence and weight
Args:
skinCluster (str- omx.XNode): source skincluster
searchForJoints (bool, optional): if the influence is not a joint
keep searching upsream connection until get a joint. Usesfull with localized skincluster. Defaults to True.
Returns:
list(dict): for each vertex, a dict {influenceName: weight}
"""
skin = mNode.MNode(skinCluster)
influences = getSkinInfluences(skin)
weightMap = dict()
for i in range(skin.weightList.numElements()): # for each vertex
plugWeights = skin.weightList[i].weights # access the actual weight attr
vertDict = {}
for j in plugWeights.getExistingArrayAttributeIndices(): # for each joint
weightFloat = plugWeights[j].get()
if weightFloat < tol:
continue
inf = influences.get(j)
if not inf:
continue
vertDict[str(inf)] = weightFloat
allWeigts = sum(vertDict.values())
if allWeigts != 1 and normalize:
# normalize data
for k, v in vertDict.items():
vertDict[k] = float(v)/allWeigts
weightMap[i] = vertDict
return weightMap
def normalizeWeights(weightMaping):
result = dict()
for indx, values in weightMaping.items():
weight_sum = sum(values.values())
if weight_sum == 1:
result[indx] = values
continue
# normalize data
norm_values = dict()
for k, v in values.items():
norm_values[k] = float(v)/weight_sum
result[indx] = norm_values
return result
def setSkinWeights(skinCluster, weightMaping):
tagetSkin = mNode.MNode(skinCluster)
targInfluences = getSkinInfluences(tagetSkin)
for i, vtxW in weightMaping.items(): # for each vertex
plugWeights = tagetSkin.weightList[i].weights
# First reset values to zero:
nb_weights = plugWeights.numElements()
for j in range(nb_weights): # for each joint
plugWeights[j].set(0)
# set weights
influences = list()
weights = list()
for infIdx, inflName in targInfluences.items():
inflWeigt = 0
if inflName in vtxW:
inflWeigt = vtxW[inflName]
influences.append(infIdx)
weights.append(inflWeigt)
for jntIdx, value in zip(influences, weights):
plugWeights[jntIdx].set(value)
def relaxWeightMapping(weightMap, faceVertices, vertexFaces,
vertexList=None, relaxStreght=1.0,
iterations=1, influencesToSkip=[]):
relaxedWeights = copy.deepcopy(weightMap)
if not vertexList:
vertexList = weightMap.keys()
connectedVertices = meshes.getConnectedVertices(vertexList, vertexFaces, faceVertices)
for x in range(iterations):
for currIdx, connected in connectedVertices.items():
newWeigts = dict()
# add missing influences
missingInfl = set()
for con in connected:
for inf in relaxedWeights[con].keys():
if inf in relaxedWeights[currIdx].keys():
continue
missingInfl.add(inf)
for inf in missingInfl:
relaxedWeights[currIdx][inf] = 0.0
for inf, weight in relaxedWeights[currIdx].items():
# get all neighbor vertices that share the same influence
if inf in influencesToSkip:
continue
newW = 0.0
for con in connected:
connW = relaxedWeights[con]
if inf not in connW:
continue
newW += connW[inf]
# average those weights
newW /= len(connected)
newWeigts[inf] = weight+(newW-weight)*relaxStreght
allWeigts = sum(newWeigts.values())
if allWeigts != 1:
# normalize data
for k, v in newWeigts.items():
newWeigts[k] = float(v) / allWeigts
relaxedWeights[currIdx] = newWeigts
return relaxedWeights
def getInfluencesVertices(mesh, influences, tolerance=.01):
skinClusters = getDeformersFromMesh(mesh)
weightMap = getSkinWeightsMapping(skinClusters[0], normalize=False, tol=tolerance)
vertices = list()
for k, v in weightMap.items():
for a in v.keys():
if a in influences:
vertices.append(k)
break
return list(set(vertices))
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,080
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/math_utils/fuzzyCMeans.py
|
import numpy as np
def calc_median(im,seg_img,verbose):
n = len(np.unique(seg_img))
f_img = np.zeros_like(im,dtype=np.int64)
segs = list(np.unique(seg_img))
for i in range(n):
if verbose:
print(i)
mask_indx = np.where((seg_img==segs[i]))
mask = np.zeros_like(im[:,:,0],dtype=np.int64)
mask[mask_indx] = 1
r = im[:,:,0]
r_med = np.median(r[mask_indx])
g = im[:,:,1]
g_med = np.median(g[mask_indx])
b = im[:,:,2]
b_med = np.median(b[mask_indx])
f_img[:,:,0] += mask * int(r_med * 256)
f_img[:,:,1] += mask * int(g_med * 256)
f_img[:,:,2] += mask * int(b_med * 256)
return f_img
def keep_center(im,seg_img,center,verbose):
n = len(np.unique(seg_img))
f_img = np.zeros_like(im,dtype=np.int64)
segs = list(np.unique(seg_img))
for i in range(n):
if verbose:
print(i)
mask_indx = np.where((seg_img==segs[i]))
mask = np.zeros_like(im[:,:,0],dtype=np.int64)
mask[mask_indx] = 1
f_img[:,:,0] += mask * int(center[segs[i],0] * 256)
f_img[:,:,1] += mask * int(center[segs[i],1] * 256)
f_img[:,:,2] += mask * int(center[segs[i],2] * 256)
return f_img
def init_memval(cluster_n, data_n):
U = np.random.random((cluster_n, data_n))
val = sum(U)
U = np.divide(U,np.dot(np.ones((cluster_n,1)),np.reshape(val,(1,data_n))))
return U
def fcm(data,cluster_n,expo = 2,min_err = 0.001,max_iter = 500,verbose = 0):
np.random.seed(0)
U_old={}
data_n = data.shape[0]
U = init_memval(cluster_n, data_n)
for i in range(max_iter):
if verbose:
print('Iteration: ',i)
mf = np.power(U,expo)
center = np.divide(np.dot(mf,data),(np.ones((data.shape[1], 1))*sum(mf.T)).T)
diff = np.zeros((center.shape[0], data.shape[0]))
if center.shape[1] > 1:
for k in range(center.shape[0]):
diff[k, :] = np.sqrt(sum(np.power(data-np.dot(np.ones((data.shape[0], 1)),np.reshape(center[k, :],(1,center.shape[1]))),2).T))
else: # for 1-D data
for k in range(center.shape[0]):
diff[k, :] = abs(center[k]-data).T
dist=diff+0.0001;
num = np.power(dist,(-2/(expo-1)))
U = np.divide(num,np.dot(np.ones((cluster_n, 1)),np.reshape(sum(num),(1,num.shape[1])))+0.0001)
U_old[i]=U;
if i> 0:
if abs(np.amax(U_old[i] - U_old[i-1])) < min_err:
break
U1 = np.argmax(U,axis=0)
return U,center, U1
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,081
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/math_utils/k_means.py
|
import numpy as np
def initializeCentroids(points, k):
"""returns k centroids from the initial points"""
centroids = points.copy()
np.random.shuffle(centroids)
return centroids[:k]
def closestCentroid(points, centroids):
"""returns an array containing the index to the nearest centroid for each point"""
distances = np.sqrt(((points - centroids[:, np.newaxis])**2).sum(axis=2))
return np.argmin(distances, axis=0)
def moveCentroids(points, closest, centroids):
"""returns the new centroids assigned from the points closest to them"""
return np.array([points[closest == k].mean(axis=0) for k in range(centroids.shape[0])])
def evalKMeans(k, raw_data):
points = np.array(raw_data)
centroids = initializeCentroids(points, k)
prev_centroids = np.zeros_like(centroids)
for x in range(100):
closest = closestCentroid(points, centroids)
centroids = moveCentroids(points, closest, centroids)
if np.linalg.norm(centroids - prev_centroids) > 0:
prev_centroids = centroids.copy()
else:
break
closest = closestCentroid(points, centroids)
return centroids, closest
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,082
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/maya_utils/mUtils/mNode.py
|
import math
import logging
import numbers
from six import string_types
from maya.api import OpenMaya as om
from maya.api import OpenMayaAnim as oma
from maya import cmds
from .mPlug import MPlug
_logger = logging.getLogger(__name__)
ISMAYAPRE2020 = cmds.about(apiVersion=1) < 20200000
def exception_handler(func):
"""decorator to mask false maya error to using classes as comand argument
"""
def inner_function(*args, **kwargs):
try:
func(*args, **kwargs)
except AttributeError:
pass
except Exception as e:
raise e
return inner_function
class MNode(om.MObjectHandle):
"""Wrapper of OpenMaya objects to easier use of Maya Api
Inheritance:
MObjectHandle : inherit form MObjectHandle to check the validity of the object
"""
def __init__(self, node):
self._mObject = None
self._mFn = None
self._bestFn = None
if isinstance(node, list):
raise BaseException(f"unable to init from a list input: {node}")
if isinstance(node, string_types):
if not cmds.objExists(node):
raise BaseException("{} does not exists". format(node))
sel = om.MSelectionList()
sel.add(node)
self._mObject = sel.getDependNode(0)
elif isinstance(node, om.MObject):
self._mObject = node
elif isinstance(node, om.MDagPath):
self._mObject = node.node()
elif isinstance(node, om.MObjectHandle):
self._mObject = node.object()
elif isinstance(node, MNode):
self.__dict__ = node.__dict__.copy() # just a shallow copy
super(MNode, self).__init__()
self.assign(self._mObject)
_logger.debug("{} initiated".format(self))
@classmethod
def createNode(cls, node_type, name_str=None):
"""create a node and return a MNode object
Args:
name_str (str): Name of the node
node_type (str): type of the node
Returns:
MNode: MNode object with the created node
"""
flags = dict()
if name_str:
flags["n"] = name_str
node = cmds.createNode(node_type, **flags)
if "shape" in cmds.nodeType(node, i=1):
trf = cmds.listRelatives(node, p=1)[0]
if name_str:
cmds.rename(node, "{}Shape".format(name_str))
node = cmds.rename(trf, name_str)
else:
node = trf
_logger.debug("{} created".format(node))
return cls(node)
def checkValidity(self):
"""Check that the object still exist in memory and on the maya scene
Raises:
BaseException: the mObject is not alive or valid
"""
if not self.isValid():
raise RuntimeError("{} is not Valid". format(self.name()))
if not self.isAlive():
raise RuntimeError("{} is not Alive". format(self.name()))
def getBestFn(self):
"""get maya api that best match the node type,
geting the best Function method (mFn) using maya inheritance
Raises:
BaseException: the mObject is not alive or valid
ValueError: if is unable to get the mFn
Returns:
variable maya.api.MFtype
"""
self.checkValidity()
if self._bestFn:
return self._bestFn
fnList = om.MGlobal.getFunctionSetList(self.object())
input = self.object()
if input.hasFn(om.MFn.kDagNode):
input = om.MDagPath.getAPathTo(input)
for fn in fnList[::-1]:
class_name = 'MFn{}'.format(fn[1:])
if hasattr(om, class_name):
try:
class_ = getattr(om, class_name)
self._bestFn = class_(input)
return self._bestFn
except Exception as e:
_logger.debug(e)
if hasattr(oma, class_name):
try:
class_ = getattr(om, class_name)
self._bestFn = class_(input)
return self._bestFn
except Exception as e:
_logger.debug(e)
raise ValueError('Unable to find MFn for {}'.format(fnList[::-1]))
def getBaseFn(self):
"""get maya api best MFn object form a node name, geting the base function method
Returns:
variable maya.api.MFnDagNode or maya.api.MFnDependencyNode
"""
if not self._mFn:
if self._mObject.hasFn(om.MFn.kDagNode):
path = self.getDagPath()
self._mFn = om.MFnDagNode(path)
else:
self._mFn = om.MFnDependencyNode(self.object())
return self._mFn
def getDagPath(self):
self.checkValidity()
if not self._mObject.hasFn(om.MFn.kDagNode):
raise RuntimeError("{} is not DAG node". format(self.name()))
return om.MDagPath.getAPathTo(self._mObject)
def name(self):
"""return current name of the node
Returns:
str: name of the object
"""
fn = self.getBaseFn()
if hasattr(fn, "partialPathName"):
return fn.partialPathName()
return fn.name()
def __str__(self):
"""override string operator to return node name
Returns:
str: node name
"""
return self.name()
def __unicode__(self):
return self.name()
@exception_handler
def __apiobject__(self):
"get the MObject for this node if it is valid"
self.checkValidity()
return self.object()
def __getattr__(self, name):
"""get the attribute as MPlug
Raises:
BaseException: the mObject is not alive or valid
Returns:
mPlug.MPlug
"""
self.checkValidity()
fn = self.getBaseFn()
if fn.hasAttribute(name):
return MPlug(fn.findPlug(name, True))
raise AttributeError("MNode {} doesn't have property called {}".format(self.name(), name))
def addAttribute(self, attrName, attrType="float", keyable=True, limits=None, default_value=0.0):
"""add an attribute to the node
Args:
attrName (str): name of the new attribute
attrType (str, optional): type of the attribute. Defaults to "float".
keyable (bool, optional): if is keyable and appears on the channel box. Defaults to True.
limits (tuple, optional): if is defined, set the min and max limit. Defaults to None.
default_value (float, optional): the default value. Defaults to 0.0.
Returns:
MPlug: created attribute plug
"""
fAttr = None
attrName = attrName.replace(" ", "_")
if attrType.lower() == "bool":
attrType = "boolean"
if attrType.lower() in ["float", "int", "boolean"]:
fAttr = om.MFnNumericAttribute()
data_type = getattr(om.MFnNumericData,
"k{}".format(attrType.capitalize()))
new_attr = fAttr.create(attrName, attrName,
data_type, default_value)
fAttr.keyable = keyable
if limits:
fAttr.setMin(limits[0])
fAttr.setMax(limits[1])
elif attrType.lower() == "enum":
fAttr = om.MFnEnumAttribute()
new_attr = fAttr.create(attrName, attrName)
if isinstance(default_value, basestring):
fAttr.addField(default_value, 0)
elif isinstance(default_value, list):
for i, each in enumerate(default_value):
fAttr.addField(each, i)
fAttr.hidden = False
fAttr.keyable = False
fAttr.channelBox = True
fn = self.getBaseFn()
fn.addAttribute(new_attr)
return getattr(self, attrName)
def lockAndHideAttr(self, attrName, value=True):
"""lock and hide (remove from the channel box) attributes
Args:
attrName (str): attribute name
value (bool, optional): if is true lock and hide, if is False shows and unlock. Defaults to True.
Raises:
BaseException: if the node does not have the attribute
"""
if not hasattr(self, attrName):
raise BaseException("{} has not attribute {}".format(self, attrName))
plug = getattr(self, attrName)
plug.isLocked = value
plug.isKeyable = not value
def lockAttr(self, attrName, value=True):
if not hasattr(self, attrName):
raise BaseException("{} has not attribute {}".format(self, attrName))
plug = getattr(self, attrName)
plug.isLocked = value
def hideAttr(self, attrName, value=True):
if not hasattr(self, attrName):
raise BaseException("{} has not attribute {}".format(self, attrName))
plug = getattr(self, attrName)
plug.isKeyable = not value
def getShape(self, index=0):
"""return the node shape, if the node is a shape it will return itself
Args:
index (int, optional): if has more than one shape, specifiy the index. Defaults to 0.
Returns:
MNode: node shape
"""
path = self.getDagPath()
return MNode(path.extendToShape(index))
def replaceShape(self, target, delete_transform=True):
"""replace the node shape
Args:
target (MNode): new shape
delete_transform (bool, optional): delete the target transfrom. Defaults to True.
Raises:
RuntimeError: if the node is not DAG
"""
self.checkValidity()
if not self._mObject.hasFn(om.MFn.kDagNode):
raise RuntimeError("{} is not DAG node". format(self.name()))
MDagMod = om.MDagModifier()
shape = self.getShape()
targ_node = MNode(target)
current_color = self.getColor()
if shape:
if ISMAYAPRE2020:
# deleting with cmds, becasue api miss the includeParents
# and it deletes the transform as well
cmds.delete(shape)
else:
MDagMod.deleteNode(shape.object(), includeParents=False)
if targ_node.object().hasFn(om.MFn.kShape):
target_shape = targ_node.object()
targ_trf = target.getBaseFn().parent(0)
else:
target_shape = targ_node.getShape().object()
targ_trf = targ_node.object()
MDagMod.reparentNode(target_shape, self.object())
MDagMod.doIt()
self._mFn = om.MFnDagNode(self.object())
self._bestFn = None
if delete_transform:
MDagMod.deleteNode(targ_trf)
MDagMod.renameNode(self.getShape().object(), "{}Shape".format(self))
MDagMod.doIt()
if current_color:
self.setColor(current_color)
def setColor(self, color):
"""set shape color
Args:
color (list): three value color in RGB form 0 to 1
"""
shape = self.getShape()
getattr(shape, 'overrideEnabled').set(1)
getattr(shape, 'overrideRGBColors').set(1)
for ch, col in zip(['R', 'G', 'B'], color):
getattr(shape, 'overrideColor{}'.format(ch)).set(col)
def getColor(self):
shape = self.getShape()
overrideEnabled = getattr(shape, 'overrideEnabled').get()
if not overrideEnabled:
return None
overrideRGBColors = getattr(shape, 'overrideRGBColors').get()
if not overrideRGBColors:
return None
overrideColor = list()
for ch, in zip(['R', 'G', 'B']):
overrideColor.append(getattr(shape, 'overrideColor{}'.format(ch)).get())
return overrideColor
def isTransform(self):
self.checkValidity()
return self.object().hasFn(om.MFn.kTransform)
def _checkTransform(self):
if not self.isTransform():
raise RuntimeError(
"{} has not transformation info". format(self.name()))
def getParent(self):
"""get currecnt node parent
Returns:
MNode: current parent, else None if it does not have parent
"""
parent = self.getBaseFn().parent(0)
if parent.hasFn(om.MFn.kWorld):
return None
return MNode(parent)
def setParent(self, parent, stay_in_place=True):
"""reparent node
Args:
parent (str-MNode): new parent
stay_in_place (bool, optional): keep world space position. Defaults to True.
"""
self._checkTransform()
parent_node = None
if isinstance(parent, MNode):
parent_node = parent
elif isinstance(parent, basestring):
parent_node = MNode(parent)
parent_node._checkTransform()
if self.getBaseFn().parent(0) == parent_node.object():
_logger.debug("{} is already a child of {}, parenting skipped".format(self, parent_node))
return
if self.object() == parent_node.object():
_logger.debug("Unable to parent {} to itself, parenting skipped".format(self))
return
current_mtx = self.getMatrix("world")
# using dagmodifier instead of MFnDag.addChild, becasue if the
# node is a grandchild of parent, addChild it will fail
dagMod = om.MDagModifier()
dagMod.reparentNode(self.object(), parent_node.object())
dagMod.doIt()
if stay_in_place:
self.setMatrix(current_mtx)
def getMatrix(self, space='world'):
"""if the node is a DAG node, it return the matrix
Args:
space (str, optional): space to querry, world or object. Defaults to 'world'.
Raises:
RuntimeError: if the node is not alive or is not a DAG node
Returns:
MMatrix: current transformation matrix
"""
self._checkTransform()
path = self.getDagPath()
if space.lower() == 'world':
return path.inclusiveMatrix()
return path.exclusiveMatrix()
def setMatrix(self, matrix, space="world", translation=True, rotation=True, scale=True):
"""set current node transformation matrix
Args:
matrix (MMatrix-list): input matrix
space (str, optional): set the matrix as world or local space. Defaults to "world".
translation (bool, optional): set translattion. Defaults to True.
rotation (bool, optional): set rotation. Defaults to True.
scale (bool, optional): set scale. Defaults to True.
"""
self._checkTransform()
mmatrix = om.MMatrix(matrix)
mtx = om.MTransformationMatrix(mmatrix)
k_space = self.getSpaceConstant(space)
if scale:
self.getBestFn().setScale(mtx.scale(k_space))
if rotation:
# for some reason the rotation it wont set properlly at world space
rot_mtx = om.MTransformationMatrix(mmatrix * self.parentInverseMatrix[0].get())
self.getBestFn().setRotation(rot_mtx.rotation(asQuaternion=True), self.getSpaceConstant("local"))
if translation:
self.getBestFn().setTranslation(mtx.translation(k_space), k_space)
def getSpaceConstant(self, space):
if space.lower() == 'world':
return om.MSpace.kWorld
elif space.lower() == 'local':
return om.MSpace.kObject
def getPosition(self, space='world'):
"""if the node is DAG, return the node position
Args:
space (str, optional): space to querry, world or object. Defaults to 'world'.
Raises:
RuntimeError: if the node is not alive or is not a DAG node
Returns:
MPoint: current position
"""
mTrfMtx = om.MTransformationMatrix(self.getMatrix(space))
return mTrfMtx.translation(self.getSpaceConstant(space))
def setPosition(self, pos, space="world"):
self._checkTransform()
self.getBestFn().setTranslation(om.MVector(pos), self.getSpaceConstant(space))
def getRotation(self, space='world'):
"""if the node is DAG, return the node position
Args:
space (str, optional): space to querry, world or object. Defaults to 'world'.
Raises:
RuntimeError: if the node is not alive or is not a DAG node
Returns:
MPoint: current position
"""
mTrfMtx = om.MTransformationMatrix(self.getMatrix(space))
return mTrfMtx.rotation(self.getSpaceConstant(space))
def setRotation(self, rotation, space="world"):
"""set node rotation
Args:
rotation (list): rotations on degrees
space (str, optional): transformation space. Defaults to "world".
"""
self._checkTransform()
rot = [math.radians(a) for a in rotation]
self.getBestFn().setRotation(om.MEulerRotation(
rot).asQuaternion(), self.getSpaceConstant(space))
def getScale(self, space):
"""set node scale
Args:
space (str, optional): transformation space. Defaults to "world".
Returns:
MVector: current scale
"""
mTrfMtx = om.MTransformationMatrix(self.getMatrix(space))
return mTrfMtx.scale(self.getSpaceConstant(space))
def setScale(self, scale):
"""set node scale
Args:
scale (float, list): scale value, if is float, it will apply uniform scale
space (str, optional): transformation space. Defaults to "world".
"""
self._checkTransform()
if isinstance(scale, numbers.Number):
scale = (scale, scale, scale)
self.getBestFn().scaleBy(scale)
def snapTo(self, target, translation=True, rotation=True, scale=True):
"""snap current node to a target node in world space
Args:
target (str, MNode): target node
translation (bool, optional): transform translation. Defaults to True.
rotation (bool, optional): transform rotation. Defaults to True.
scale (bool, optional): transform scale. Defaults to True.
"""
node = MNode(target)
self.setMatrix(node.getMatrix(), "world", translation, rotation, scale)
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,083
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/maya_utils/mUtils/__init__.py
|
from .mNode import MNode
from .mPlug import MPlug
__all__ = ['MNode', 'MPlug']
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,814,084
|
lopezmauro/RL_Maya
|
refs/heads/main
|
/rl_vp/ppo/ppo_simple.py
|
import logging
from tensorflow.keras.models import Model, load_model
import tensorflow.keras.layers as layers
import tensorflow.keras.backend as K
import numpy as np
from tensorflow.python.framework.ops import disable_eager_execution
disable_eager_execution()
logger = logging.getLogger(__name__)
class Agent:
def __init__(self, env, rwdFile="", epsilon=1.0, minEpsilon=.1,
rwdDiscount=.1, hidden_size=512, num_layers=1):
self.env = env
self.rwdFile = rwdFile
self.hidden_size = hidden_size
self.num_layers = num_layers
self.num_actions = self.env.action_space
self.num_states = self.env.observation_space
self.observation = self.env.reset()
self.name = "PPO_test"
self.epsilon = epsilon
self.minEpsilon = minEpsilon
self.rwdDiscount = rwdDiscount
self.critic = self.build_critic()
self.actor, self.result_model = self.build_actor()
if self.rwdFile:
with open(self.rwdFile, 'w') as fd:
fd.write("")
def build_critic(self):
# nn topology
in_layer = layers.Input(shape=[self.num_states], name="input_x")
x = layers.Dense(self.hidden_size, activation='tanh')(in_layer)
for _ in range(self.num_layers - 1):
x = layers.Dense(self.hidden_size, activation='tanh')(x)
out_layer = layers.Dense(1, activation="linear", name="out")(x)
model = Model(inputs=in_layer, outputs=out_layer)
model.compile(loss='mse', optimizer='RMSProp')
return model
def build_actor(self):
# nn topology
in_layer = layers.Input(shape=[self.num_states], name="input_x")
x = layers.Dense(self.hidden_size, activation='tanh')(in_layer)
for _ in range(self.num_layers - 1):
x = layers.Dense(self.hidden_size, activation='tanh')(x)
out_layer = layers.Dense(self.num_actions,
activation="linear",
name="out")(x)
# advg_ph is a placeholder for inputing rewards value into
# the model, making them avaiable for the loss functions
advg_ph = layers.Input(shape=[self.num_actions], name="rewards")
# custom mean squared error loss with rewards
def custom_loss(y_true, y_pred):
#return advg_ph*K.square(y_pred - y_true)
return K.mean(advg_ph*K.square(y_pred - y_true), axis=-1)
# create two models that share layers
# one has rewards as inputs (for training only)
model_predict = Model(inputs=[in_layer], outputs=out_layer)
model_train = Model(inputs=[in_layer, advg_ph], outputs=out_layer)
model_train.compile(loss=custom_loss, optimizer='RMSProp')
model_predict.compile(loss='mse', optimizer='RMSProp')
return model_train, model_predict
def load_models(self, actor_file, critic_file, predict_file):
self.actor = load_model(actor_file)
self.critic = load_model(critic_file)
self.result_model = load_model(predict_file)
def discount_rewards(self, r, discount):
# adapted from Martin Gorner's Github project:
# https://github.com/GoogleCloudPlatform/tensorflow-without-a-phd
r = np.array(r)
discounted_r = np.zeros_like(r)
running_add = 0
for t in reversed(range(0, r.size)):
running_add = running_add * discount + r[t]
discounted_r[t] = running_add
return discounted_r
def get_batch(self, n_episodes, train_number=0):
all_states = np.array([]).reshape(0, self.num_states).astype(np.float32)
all_actions = np.array([]).reshape(0, self.num_actions).astype(np.float32)
all_dsc_real_rwds = all_dsc_rwds = np.array([]).reshape(0, 1).astype(np.float32)
# Decay epsilon gradient for noise reduction each step
epsilonGradient = np.linspace(self.minEpsilon, self.epsilon, n_episodes)[::-1]
for j in range(n_episodes):
done = False
ep_rwds = np.array([]).reshape(0, 1).astype(np.float32)
ep_real_rwds = np.array([]).reshape(0, 1).astype(np.float32)
observation = self.env.reset().astype(np.float32)
while not done:
action = self.result_model.predict(observation.reshape(1, self.num_states))
_, real_reward, _, _ = self.env.step(action[0], addFrame=False)
action = np.random.normal(action, epsilonGradient[j]) # randomize action
observation_, reward, done, info = self.env.step(action[0])
# store all values
all_states = np.vstack([all_states, observation])
all_actions = np.vstack([all_actions, action])
ep_real_rwds = np.vstack([ep_real_rwds, real_reward])
ep_rwds = np.vstack([ep_rwds, reward])
observation = observation_
if self.rwdFile:
with open(self.rwdFile,'a') as fd:
fd.write("{}\n".format(np.mean(ep_rwds)))
dsc_rwds = self.discount_rewards(ep_rwds, self.rwdDiscount)
all_dsc_rwds = np.vstack([all_dsc_rwds, dsc_rwds])
real_dsc_rwds = self.discount_rewards(ep_real_rwds, self.rwdDiscount)
all_dsc_real_rwds = np.vstack([all_dsc_real_rwds, real_dsc_rwds])
logger.info(f'{self.env.agent} Train {train_number} Episode {j} Avg. reward {np.mean(all_dsc_real_rwds)}')
return all_states, all_dsc_rwds, all_actions, all_dsc_real_rwds
|
{"/rl_vp/train.py": ["/rl_vp/enviroment/env.py"], "/rl_vp/maya_utils/meshes.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/skinCluster.py": ["/rl_vp/maya_utils/mUtils/__init__.py"], "/rl_vp/maya_utils/mUtils/mNode.py": ["/rl_vp/maya_utils/mUtils/mPlug.py"], "/rl_vp/maya_utils/mUtils/__init__.py": ["/rl_vp/maya_utils/mUtils/mNode.py", "/rl_vp/maya_utils/mUtils/mPlug.py"]}
|
20,830,007
|
The-Coffee-Society/no-free-refills
|
refs/heads/main
|
/main.py
|
import convert as data_loader
import os
path_to_sqlite_data = 'dating.db'
def main():
if not os.path.exists(path_to_sqlite_data):
print('You don\'t have a database yet!')
csv_location = os.path.join('Original Downloads from Kaggle', 'Speed Dating Data.csv')
data_loader.import_data(csv_location, path_to_sqlite_data)
if __name__ == '__main__':
main()
|
{"/main.py": ["/convert.py"]}
|
20,861,095
|
Lvzwq/weekly
|
refs/heads/master
|
/controller/__init__.py
|
# -*- coding: utf-8 -*-
from flask import Flask
from config.appconfig import Config
from flask import session
import hashlib
import json
app = Flask(__name__)
app.config.from_object(Config)
app.template_folder = app.config['TEMPLATE_FOLDER']
app.static_folder = app.config['STATIC_PATH']
__all__ = ['index', 'admin']
def is_login():
if not session.get('uid') or not session.get('logged'):
return False
else:
uid = session.get('uid')
logged = session.get('logged')
if uid == hashlib.md5(app.config['ADMIN_USER']).hexdigest() and logged == 1:
return True
else:
return False
def do_signin(username, password):
if username != '' and username == app.config['ADMIN_USER']:
if password != '' and password == app.config['ADMIN_PWD']:
return json.dumps({'status': True, 'msg': '登录成功'})
else:
return json.dumps({'status': False, 'msg': '密码错误'})
else:
return json.dumps({'status': False, 'msg': '用户名错误!'})
def check_param(param):
# 初始化
paper_num = param.get('paper_num')
pid = param.get('pid')
limit = app.config['PAPER_LIMIT']
prev = 1
next = 2
current_id = 1
if not paper_num or not paper_num.isdigit():
paper_num = None
if param.get('limit') and param.get('limit').isdigit():
limit = param.get('limit')
if pid and pid.isdigit():
current_id = int(pid)
prev = current_id - 1
next = current_id + 1
return {'prev': prev, 'next': next, 'current_id': current_id, 'limit': limit, 'paper_num': paper_num}
|
{"/weekly.py": ["/controller/__init__.py"], "/controller/index.py": ["/models/model.py", "/models/helper.py"], "/models/model.py": ["/config/__init__.py"], "/controller/__init__.py": ["/config/__init__.py", "/config/appconfig.py"], "/models/url.py": ["/config/__init__.py"], "/models/util.py": ["/controller/__init__.py"]}
|
20,861,096
|
Lvzwq/weekly
|
refs/heads/master
|
/controller/index.py
|
# -*- coding: utf-8 -*-
from datetime import datetime
from flask import render_template
from models.model import Model
from models.util import *
'''
with app.app_context():
pass
'''
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.route('/')
def index():
model = Model()
paper_list = model.get_all_paper() # 获得所有期数列表
max_paper = model.get_max_paper()
paper_id = max_paper.id
try:
pic_info = model.get_pic_info(paper_id)
print pic_info
article_list = model.get_article_list(pic_info.id)
page_id = pic_info.id
except IndexError, e:
return "so sad,the page has gone"
area_list = model.get_area_list(page_id)
column_list = model.get_column_list(max_paper.id)
# 格式化日期时间
for i in range(len(paper_list)):
paper_list[i].pub_time = paper_list[i].time.strftime('%Y年%m月%d日')
data = {'now': datetime.now().strftime('%Y年%m月%d日')}
week = datetime.now().isoweekday()
data['week'] = format_week(week)
# 格式化大图片链接
data['max_paper'] = max_paper.num
data['pic_url'] = app.config['HOST'] + "/Newspaper/paper" + pic_info.pic_url[3:]
data['current_num'] = max_paper.num # 当前期数
data['column_list'] = column_list
data['current_page'] = column_list[0].id
# 格式化报纸div
for i in range(len(area_list)):
y = str(area_list[i].y)[:-2]
height = str(area_list[i].height)[:-2]
title_y = int(y) + int(height) + 7
area_list[i].title_y = str(title_y) + 'px'
area_list[i].article_title = model.get_article_info(area_list[i].article_id).title
column_list = model.get_column_list(max_paper.id)
data['column_list'] = column_list
model.close_session()
return render_template("paper_index.html", article_list=article_list,
paper_list=paper_list,
data=data, area_list=area_list)
@app.route('/page/<int:page_id>')
def page(page_id):
model = Model()
print page
paper_list = model.get_all_paper()
max_paper = model.get_max_paper()
try:
pic_info = model.get_page_info(page_id)
print pic_info
article_list = model.get_article_list(pic_info.id)
except IndexError, e:
return "<h1>该页面不存在!<h1>"
area_list = model.get_area_list(page_id)
column_list = model.get_column_list(pic_info.paper_id)
model.close_session()
# 格式化日期时间
for i in range(len(paper_list)):
paper_list[i].pub_time = paper_list[i].time.strftime('%Y年%m月%d日')
data = {'now': datetime.now().strftime('%Y年%m月%d日')}
week = datetime.now().isoweekday()
data['week'] = format_week(week)
# 格式化大图片链接
data['max_paper'] = max_paper.num
data['pic_url'] = app.config['HOST'] + "/Newspaper/paper" + pic_info.pic_url[3:]
data['current_num'] = model.get_paper_info(pic_info.paper_id) # 当前期数
data['current_page'] = page_id
data['column_list'] = column_list
# 格式化报纸页面div
for i in range(len(area_list)):
y = str(area_list[i].y)[:-2]
height = str(area_list[i].height)[:-2]
title_y = int(y) + int(height) + 7
area_list[i].title_y = str(title_y) + 'px'
area_list[i].article_title = model.get_article_info(area_list[i].article_id).title
return render_template("paper_index.html", paper_list=paper_list, area_list=area_list, data=data,
article_list=article_list)
@app.route('/paper/<int:paper_id>')
def paper(paper_id):
model = Model()
paper_list = model.get_all_paper()
max_paper = model.get_max_paper()
try:
pic_info = model.get_pic_info(paper_id)
article_list = model.get_article_list(pic_info.id)
page_id = pic_info.id
except IndexError, e:
return "该报纸页面不存在!"
area_list = model.get_area_list(page_id)
column_list = model.get_column_list(pic_info.paper_id)
model.close_session()
for i in range(len(paper_list)):
paper_list[i].pub_time = paper_list[i].time.strftime('%Y年%m月%d日')
data = {'now': datetime.now().strftime('%Y年%m月%d日')}
week = datetime.now().isoweekday()
data['week'] = format_week(week)
# 格式化大图片链接
data['max_paper'] = max_paper.num
data['pic_url'] = app.config['HOST'] + "/Newspaper/paper" + pic_info.pic_url[3:]
data['current_num'] = model.get_paper_info(paper_id) # 当前期数
data['current_page'] = page
data['current_page'] = pic_info.id
data['column_list'] = column_list
print data
# 格式化图片div位置
for i in range(len(area_list)):
y = str(area_list[i].y)[:-2]
height = str(area_list[i].height)[:-2]
title_y = int(y) + int(height) + 7
area_list[i].title_y = str(title_y) + 'px'
area_list[i].article_title = model.get_article_info(area_list[i].article_id).title
return render_template("paper_index.html", data=data, area_list=area_list, paper_list=paper_list,
article_list=article_list)
@app.route('/article/<int:article_id>')
def article(article_id):
data = {'now': datetime.now().strftime('%Y年%m月%d日')}
week = datetime.now().isoweekday()
data['week'] = format_week(week)
model = Model()
paper_list = model.get_all_paper()
max_paper = model.get_max_paper()
try:
article_info = model.get_article_info(article_id)
print article_info.paper_id
pic_info = model.get_page_info(article_info.page_id)
data['current_num'] = model.get_paper_info(article_info.paper_id) # 当前期数
except Exception, e:
return "so sad,article not found!"
area_list = model.get_area_list(article_info.page_id)
column_list = model.get_column_list(pic_info.paper_id)
model.close_session()
for i in range(len(paper_list)):
paper_list[i].pub_time = paper_list[i].time.strftime('%Y年%m月%d日')
# 格式化大图片链接
data['max_paper'] = max_paper.num
data['pic_url'] = app.config['HOST'] + "/Newspaper/paper" + pic_info.pic_url[3:]
data['current_page'] = pic_info.id
data['column_list'] = column_list
# 格式化图片div位置
for i in range(len(area_list)):
y = str(area_list[i].y)[:-2]
height = str(area_list[i].height)[:-2]
title_y = int(y) + int(height) + 7
area_list[i].title_y = str(title_y) + 'px'
area_list[i].article_title = model.get_article_info(area_list[i].article_id).title
return render_template("article_index.html", article=article_info, data=data, paper_list=paper_list,
area_list=area_list)
|
{"/weekly.py": ["/controller/__init__.py"], "/controller/index.py": ["/models/model.py", "/models/helper.py"], "/models/model.py": ["/config/__init__.py"], "/controller/__init__.py": ["/config/__init__.py", "/config/appconfig.py"], "/models/url.py": ["/config/__init__.py"], "/models/util.py": ["/controller/__init__.py"]}
|
20,861,097
|
Lvzwq/weekly
|
refs/heads/master
|
/models/util.py
|
# -*- coding: utf-8 -*-
from controller import app
import json
def format_week(week):
weekly = ['日', '一', '二', '三', '四', '五', '六', '日']
return weekly[week]
|
{"/weekly.py": ["/controller/__init__.py"], "/controller/index.py": ["/models/model.py", "/models/helper.py"], "/models/model.py": ["/config/__init__.py"], "/controller/__init__.py": ["/config/__init__.py", "/config/appconfig.py"], "/models/url.py": ["/config/__init__.py"], "/models/util.py": ["/controller/__init__.py"]}
|
20,861,098
|
Lvzwq/weekly
|
refs/heads/master
|
/controller/admin.py
|
# -*- coding: utf-8 -*-
# 安装Flask-Session sudo pip install Flask-Session
from flask import session, flash
from flask import render_template, request, redirect, url_for
import hashlib
from datetime import timedelta
from models.model import Model
from models.util import *
from controller import is_login, do_signin, check_param
@app.route("/login", methods=["GET", "POST"])
def login():
if is_login():
return redirect(url_for("admin"))
if request.method == 'GET':
return render_template("login.html")
elif request.method == 'POST':
username = request.form['uid']
password = request.form['pwd']
result = json.loads(do_signin(username.strip(), password.strip()))
if result['status'] == True:
flash('You were successfully logged in')
session.permanent = True
app.permanent_session_lifetime = timedelta(minutes=5)
session['uid'] = hashlib.md5(username).hexdigest()
session['logged'] = 1
return redirect(url_for("admin"))
else:
flash(result['msg'])
return redirect(url_for("login"))
else:
return "unsupported login way"
@app.route("/admin")
def admin():
if is_login():
model = Model()
param = request.args
arr = check_param(param)
print arr
if arr['paper_num'] == None:
arr['paper_num'] = model.get_max_paper().num
paper_list = model.get_paper_list(arr['limit'] * (arr['current_id'] - 1), arr['limit'])
paper_count = model.get_paper_count()
paper_info = model.get_paper(arr['paper_num'])
print paper_info
data = {}
data['paper_list'] = paper_list
data['count'] = int(paper_count) / int(arr['limit']) + 1
data.update(arr.copy())
return render_template("admin.html", data=data,paper_info=paper_info)
return redirect(url_for("login"))
@app.route("/edit")
def edit():
model = Model()
if not request.args.get("paper_num"):
return redirect(url_for("admin"))
else:
paper_num = request.args.get('paper_num')
if paper_num.isdigit():
try:
print paper_num
except Exception, e:
print e
pass
return render_template("edit_page.html")
from werkzeug.utils import secure_filename
import os
@app.route("/upload", methods=('GET', 'POST'))
def upload():
if request.method == "POST":
if not request.files:
return redirect(url_for("admin"))
else:
f = request.files['upload']
print f
# print f.read()
# print f.stream.read()
fname = secure_filename(f.filename) # 获取一个安全的文件名,且仅仅支持ascii字符;
filepath = os.path.join(app.config['UPLOAD_FOLDER'], fname)
print filepath
print f.save(filepath)
return 'upload success'
else:
return "upload"
@app.route("/submit", methods=('GET', 'POST'))
def submit():
article = {}
article['title'] = request.form.get('title', None)
article['sub_title'] = request.form.get('sub_title', None)
article['content'] = request.form.get('content', None)
article['reply_title'] = request.form.get('reply_title', None)
article['author'] = request.form.get('author', '')
article['keyword'] = request.form.get('Nkeyword', '')
article['has_pic'] = request.form.get('has_pic', 'a')
article['show_author'] = request.form.get("show_author", 0)
article['time'] = datetime.now()
model = Model()
result = model.insert_article(7421, article)
print result
# result = model.insert_article(article)
return "kkk"
@app.route("/update/<int:article_id>")
def update(article_id):
pass
@app.route("/logout")
def logout():
pass
|
{"/weekly.py": ["/controller/__init__.py"], "/controller/index.py": ["/models/model.py", "/models/helper.py"], "/models/model.py": ["/config/__init__.py"], "/controller/__init__.py": ["/config/__init__.py", "/config/appconfig.py"], "/models/url.py": ["/config/__init__.py"], "/models/util.py": ["/controller/__init__.py"]}
|
20,861,099
|
Lvzwq/weekly
|
refs/heads/master
|
/models/model.py
|
# -*- coding: utf-8 -*-
from sqlalchemy import *
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import func
from sqlalchemy import update, delete, insert
from datetime import datetime
# from sqlalchemy.pool import NullPool
from config.db import MYSQL_HOST, MYSQL_USER, MYSQL_DB, MYSQL_CHARSET, MYSQL_PASSWORD
# 安装的扩展库sqlalchemy
# conn='mysql://root:root@localhost/newscenter?charset=utf8'
conn = 'mysql://%s:%s@%s/%s?charset=%s' % (MYSQL_USER, MYSQL_PASSWORD, MYSQL_HOST, MYSQL_DB, MYSQL_CHARSET)
engine = create_engine(conn) # 使用非连接池的方式连接数据库
Base = declarative_base(engine)
class Area(Base):
__tablename__ = 'area'
__table_args__ = {'autoload': True}
id = Column(SmallInteger, primary_key=True)
page_id = Column(Integer)
paper_id = Column(Integer)
article_id = Column(Integer)
x = Column(String(11))
y = Column(String(11))
width = Column(String(11))
height = Column(String(11))
class Article(Base):
__tablename__ = 'article'
__table_args__ = {'autoload': True}
id = Column(Integer, primary_key=True)
title = Column(String(11))
sub_title = Column(String)
content = Column(Text)
time = Column(DateTime)
paper_id = Column(Integer)
page_id = Column(Integer)
reply_title = Column(String)
author = Column(String(64))
keyword = Column(String)
show_author = Column(Integer)
has_pic = Column(Integer)
class Keyinfo(Base):
__tablename__ = 'keyinfo'
# __table_args__ = {'autoload': True}
id = Column(Integer, primary_key=True)
keyword = Column(String)
article_id = Column(Integer)
class Paper(Base):
__tablename__ = 'paper'
__table_args__ = {'autoload': True}
id = Column(Integer, primary_key=True)
num = Column(Integer)
issued = Column(Integer)
time = Column(DateTime)
class Page(Base):
__tablename__ = 'page'
__table_args__ = {'autoload': True}
id = Column(Integer, primary_key=True)
paper_id = Column(Integer)
num = Column(Integer)
pic_url = Column(String)
name = Column(String)
class Model():
def __init__(self):
# moz_article = Table('article', metadata, autoload=True)
# mapper(Article, moz_article)
# metadata = MetaData(engine)
session = sessionmaker(bind=engine)
self.session = session()
def get_article_list(self, page_id):
article_list = self.session.query(Article.id, Article.title, Article.reply_title, Article.has_pic,
Article.sub_title)
article_list = article_list.filter(Article.page_id == page_id)
article_list = article_list.all()
return article_list
def get_all_paper(self):
paper_list = self.session.query(Paper).filter(Paper.issued == 1).order_by(desc(Paper.id)).all()
return paper_list
def get_max_paper(self):
max = self.session.query(Paper.num, Paper.id).filter(Paper.issued == 1).order_by(desc(Paper.id)).limit(1).all()
return max[0]
def get_area_list(self, page_id):
area_list = self.session.query(Area).filter(Area.page_id == page_id).all()
return area_list
def get_pic_info(self, paper_id, num=1):
pic_info = self.session.query(Page).filter(Page.paper_id == paper_id).filter(Page.num == num).all()
return pic_info[0]
def get_page_info(self, page_id):
page_info = self.session.query(Page).filter(Page.id == page_id).all()
return page_info[0]
# 获得一篇文章的内容
def get_article_info(self, article_id):
article_info = self.session.query(Article).filter(Article.id == article_id).all()
return article_info[0]
def get_paper_info(self, paper_id):
page_info = self.session.query(Paper.num).filter(Paper.id == paper_id).all()
return page_info[0].num
def get_paper(self,paper_num):
return self.session.query(Paper).filter(Paper.num == paper_num).all()
# 根据报纸id获得报纸页面id
def get_column_list(self, paper_id):
column_list = self.session.query(Page).filter(Page.paper_id == paper_id).all()
return column_list
# 后台管理页面获得报纸期数列表
def get_paper_list(self, offset=0, limit=20):
paper_list = self.session.query(Paper).order_by(desc(Paper.id)).limit(limit).offset(offset).all()
return paper_list
def get_paper_count(self):
num = self.session.query(func.count(Paper.id)).all()
return num[0][0]
# 新增一个期刊
def new_paper(self, paper_num):
is_exist = self.session.query(Paper.id).filter(Paper.num == paper_num).all()
if is_exist:
p = update(Paper).where(Paper.num == paper_num).values(time=datetime.now())
self.session.execute(p)
self.session.commit()
else:
paper = Paper()
paper.num = paper_num
paper.time = datetime.now()
paper.issued = 0
self.session.add(paper)
return self.session.commit()
# 删除报纸期刊
def delete_paper(self, paper_num):
is_exist = self.session.query(Paper.id).filter(Paper.num == paper_num).all()
if is_exist:
self.session.query(Paper).filter(Paper.num == paper_num).delete()
self.session.commit()
else:
return True
# 新增一篇文章
def insert_article(self, article_info):
sql = insert(Article, values=article_info)
self.session.execute(sql)
self.session.commit()
# 修改一篇文章
def update_article(self, article_id, article_info):
return update(Article).where(Article.id == article_id).values(article_info).execute()
#删除一篇文章
def delete_article(self, article_id):
return delete(Article, returning=Article.id, return_defaults=True).where(Article.id == article_id).execute()
def insert_area(self, area):
pass
def close_session(self):
self.session.close()
if __name__ == '__main__':
model = Model()
# print model.get_paper_count()
# print model.get_article_list(555)
# print model.get_max_paper().id
# print
# print model.delete_paper(508)
# for i in model.get_area_list(273):
# print i.paper_id
# for i in model.get_all_paper():
|
{"/weekly.py": ["/controller/__init__.py"], "/controller/index.py": ["/models/model.py", "/models/helper.py"], "/models/model.py": ["/config/__init__.py"], "/controller/__init__.py": ["/config/__init__.py", "/config/appconfig.py"], "/models/url.py": ["/config/__init__.py"], "/models/util.py": ["/controller/__init__.py"]}
|
20,861,100
|
Lvzwq/weekly
|
refs/heads/master
|
/config/appconfig.py
|
# -*- coding: utf-8 -*-
class Config(object):
SECRET_KEY = 'hello world bingyan'
HOST = 'http://images.hustnews.com'
PAPER_LIMIT = 20
ALLOWED_MIMETYPES = {'image/jpeg', 'image/png', 'image/gif', 'image/bmp', 'image/jpg'}
UPLOAD_FOLDER = 'static/upload'
ADMIN_USER = 'hustnews'
ADMIN_PWD = '87542701'
TEMPLATE_FOLDER = '../templates'
STATIC_PATH = '../static'
UPLOAD_FOLDER = 'static/upload'
|
{"/weekly.py": ["/controller/__init__.py"], "/controller/index.py": ["/models/model.py", "/models/helper.py"], "/models/model.py": ["/config/__init__.py"], "/controller/__init__.py": ["/config/__init__.py", "/config/appconfig.py"], "/models/url.py": ["/config/__init__.py"], "/models/util.py": ["/controller/__init__.py"]}
|
20,861,101
|
Lvzwq/weekly
|
refs/heads/master
|
/weekly.py
|
# -*- coding: utf-8 -*-
from controller import app
from controller import *
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
app.debug = True
app.run(host='127.0.0.1', port=5000)
|
{"/weekly.py": ["/controller/__init__.py"], "/controller/index.py": ["/models/model.py", "/models/helper.py"], "/models/model.py": ["/config/__init__.py"], "/controller/__init__.py": ["/config/__init__.py", "/config/appconfig.py"], "/models/url.py": ["/config/__init__.py"], "/models/util.py": ["/controller/__init__.py"]}
|
20,995,505
|
kmnkit/donkey_ears_api
|
refs/heads/main
|
/shouts/views.py
|
from rest_framework.viewsets import ModelViewSet
from .models import Shout
from .serializers import ShoutSerializer
from .permissions import IsAuthor
from rest_framework.permissions import IsAdminUser
class ShoutViewSet(ModelViewSet):
queryset = Shout.objects.select_related("user").all()
serializer_class = ShoutSerializer
permission_classes = [IsAuthor, IsAdminUser]
|
{"/shouts/views.py": ["/shouts/models.py"]}
|
20,995,506
|
kmnkit/donkey_ears_api
|
refs/heads/main
|
/shouts/models.py
|
from django.db import models
from core.models import TimeStampedModel
class Shout(TimeStampedModel):
user = models.ForeignKey(
"users.User", related_name="shouts", on_delete=models.CASCADE
)
text = models.TextField(max_length=200)
|
{"/shouts/views.py": ["/shouts/models.py"]}
|
20,995,507
|
kmnkit/donkey_ears_api
|
refs/heads/main
|
/users/views.py
|
import jwt
from django.conf import settings
from django.contrib.auth import authenticate
from rest_framework import status
from rest_framework.viewsets import ModelViewSet
from rest_framework.decorators import action
from rest_framework.response import Response
from .models import User
from .serializers import UserSerializer
class UserViewSet(ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
@action(detail=False, methods=["post"])
def login(self, request):
username = request.data.get("username")
password = request.data.get("password")
if not username or not password:
return Response(status=status.HTTP_400_BAD_REQUEST)
user = authenticate(username=username, password=password)
if user is not None:
encoded_jwt = jwt.encode(
{"pk": user.pk}, settings.SECRET_KEY, algorithm="HS256"
)
return Response(data={"token": encoded_jwt, "id": user.pk})
else:
return Response(status=status.HTTP_401_UNAUTHORIZED)
def logout(self, request):
pass
|
{"/shouts/views.py": ["/shouts/models.py"]}
|
21,057,581
|
anantpad/refills
|
refs/heads/main
|
/connectSQL.py
|
import pyodbc
import os
import dotenv
conn_str = (
'Trusted_Connection=No;'
'driver={SQL Server};'
'server=DEMOCPS00362001.englab.athenahealth.com;'
'database=demodb;'
'uid=sa;'
'pwd=GEps2006;'
)
cnxn = pyodbc.connect(conn_str)
cursor = cnxn.cursor()
for row in cursor.execute('select DISTINCT first,last from PatientProfile'):
print(row.first)
|
{"/queries.py": ["/connectSQL.py"], "/enterData.py": ["/connectSQL.py"], "/webtest.py": ["/enterData.py"]}
|
21,062,226
|
JDima/search-chromosome
|
refs/heads/master
|
/search_chromosome/search_chromosome.py
|
def search_chromosome():
pass
|
{"/search_chromosome/test_public.py": ["/search_chromosome/search_chromosome.py"]}
|
21,062,227
|
JDima/search-chromosome
|
refs/heads/master
|
/search_chromosome/test_public.py
|
import pytest
from .search_chromosome import search_chromosome
|
{"/search_chromosome/test_public.py": ["/search_chromosome/search_chromosome.py"]}
|
21,139,755
|
saeta33/tarotlife-back
|
refs/heads/master
|
/src/api/models/__init__.py
|
from .user import User, UserSchema
from .karuta import Karuta, KarutaSchema
from .user_score import User_score, User_scoreSchema
|
{"/src/api/__init__.py": ["/src/api/views/karuta.py", "/src/api/views/user_score.py", "/src/api/views/reading_list.py", "/src/api/views/user_reading.py"], "/src/api/models/__init__.py": ["/src/api/models/user.py", "/src/api/models/karuta.py", "/src/api/models/user_score.py", "/src/api/models/reading_list.py", "/src/api/models/user_reading.py"]}
|
21,186,217
|
honeydev/junior
|
refs/heads/master
|
/src/test_cases/__init__.py
|
from src.test_cases.models import *
|
{"/tests/base.py": ["/src/main.py", "/src/settings.py"], "/src/user/views.py": ["/src/mailers/send_mail.py", "/src/user/auth.py", "/src/user/decorators.py", "/src/user/forms.py", "/src/user/models.py", "/src/views.py"], "/tests/test_cases/fetch_api_data_test.py": ["/src/qa/models.py", "/src/test_cases/__init__.py", "/src/test_cases/constants.py", "/src/uttils.py", "/tests/base.py", "/tests/factories.py", "/tests/test_uttils.py"], "/src/commands.py": ["/src/qa/models.py", "/src/user/__init__.py", "/src/uttils.py"], "/tests/test_cases/change_test_cases_test.py": ["/src/qa/models.py", "/src/test_cases/__init__.py", "/src/uttils.py", "/tests/base.py", "/tests/factories.py", "/tests/test_uttils.py"], "/src/qa/views.py": ["/src/qa/forms.py", "/src/qa/models.py", "/src/user/__init__.py", "/src/views.py"], "/tests/qa_test.py": ["/src/qa/models.py", "/src/user/models.py", "/tests/base.py"], "/tests/main_page_test.py": ["/src/qa/models.py", "/src/uttils.py", "/tests/base.py"], "/src/test_cases/schemas.py": ["/src/test_cases/constants.py"], "/src/qa/models.py": ["/src/base/base_models.py"], "/src/main.py": ["/src/admin_forms.py", "/src/commands.py", "/src/qa/models.py", "/src/qa/views.py", "/src/settings.py", "/src/test_cases/__init__.py", "/src/test_cases/views.py", "/src/user/__init__.py", "/src/user/auth.py", "/src/user/oauth.py", "/src/user/views_oauth.py", "/src/views.py"], "/src/test_cases/__init__.py": ["/src/test_cases/models.py"], "/src/test_cases/services.py": ["/src/test_cases/models.py", "/src/test_cases/schemas.py", "/src/test_cases/uttils.py"], "/tests/auth_test.py": ["/src/user/auth.py", "/src/user/models.py", "/src/user/oauth.py", "/tests/base.py", "/tests/mocks.py", "/tests/test_uttils.py"], "/tests/factories.py": ["/src/user/models.py"], "/src/qa/__init__.py": ["/src/qa/models.py"], "/src/qa/forms.py": ["/src/admin_forms.py"], "/src/test_cases/views.py": ["/src/qa/models.py", "/src/test_cases/__init__.py", "/src/test_cases/services.py", "/src/user/decorators.py", "/src/views.py"], "/app.py": ["/src/main.py", "/src/settings.py"], "/src/user/__init__.py": ["/src/user/models.py", "/src/user/views.py"], "/src/user/views_oauth.py": ["/src/user/auth.py", "/src/user/decorators.py", "/src/user/models.py", "/src/user/oauth.py"], "/src/views.py": ["/src/qa/models.py", "/src/test_cases/models.py"], "/src/user/auth.py": ["/src/user/models.py", "/src/user/oauth.py"], "/scripts/check_freeze.py": ["/app.py"], "/src/user/oauth.py": ["/src/settings_oauth.py", "/src/user/models.py"]}
|
21,186,218
|
honeydev/junior
|
refs/heads/master
|
/src/test_cases/services.py
|
from src.extensions import db
from src.test_cases.models import TestQuestionUserRelation
from src.test_cases.schemas import UserTestCaseSchema
from src.test_cases.uttils import flat_user_test_case
class TestCaseService:
"""Uset test case logic layer."""
def __init__(self, user, test_case):
self.user = user
self.test_case = test_case
def load_user_case(self):
"""Return user test case relations."""
self.create_missing_relations()
schema = UserTestCaseSchema()
return tuple(filter(
lambda question: question['test_case_id'] == self.test_case.id,
flat_user_test_case(schema.dump(self.user)),
))
def create_missing_relations(self):
questions_id = {
test_question.id
for test_question in self.test_case.test_questions
}
user_question_relations = TestQuestionUserRelation.query.filter(
TestQuestionUserRelation.user == self.user,
TestQuestionUserRelation.test_question_id.in_(questions_id),
).all()
existed_relations_ids = {
relation.test_question.id
for relation in user_question_relations
}
questions_without_relations = questions_id - existed_relations_ids
new_relations = tuple(
TestQuestionUserRelation(
test_question_id=question_id,
user_id=self.user.id,
)
for question_id in questions_without_relations
)
db.session.bulk_save_objects(new_relations)
db.session.commit()
|
{"/tests/base.py": ["/src/main.py", "/src/settings.py"], "/src/user/views.py": ["/src/mailers/send_mail.py", "/src/user/auth.py", "/src/user/decorators.py", "/src/user/forms.py", "/src/user/models.py", "/src/views.py"], "/tests/test_cases/fetch_api_data_test.py": ["/src/qa/models.py", "/src/test_cases/__init__.py", "/src/test_cases/constants.py", "/src/uttils.py", "/tests/base.py", "/tests/factories.py", "/tests/test_uttils.py"], "/src/commands.py": ["/src/qa/models.py", "/src/user/__init__.py", "/src/uttils.py"], "/tests/test_cases/change_test_cases_test.py": ["/src/qa/models.py", "/src/test_cases/__init__.py", "/src/uttils.py", "/tests/base.py", "/tests/factories.py", "/tests/test_uttils.py"], "/src/qa/views.py": ["/src/qa/forms.py", "/src/qa/models.py", "/src/user/__init__.py", "/src/views.py"], "/tests/qa_test.py": ["/src/qa/models.py", "/src/user/models.py", "/tests/base.py"], "/tests/main_page_test.py": ["/src/qa/models.py", "/src/uttils.py", "/tests/base.py"], "/src/test_cases/schemas.py": ["/src/test_cases/constants.py"], "/src/qa/models.py": ["/src/base/base_models.py"], "/src/main.py": ["/src/admin_forms.py", "/src/commands.py", "/src/qa/models.py", "/src/qa/views.py", "/src/settings.py", "/src/test_cases/__init__.py", "/src/test_cases/views.py", "/src/user/__init__.py", "/src/user/auth.py", "/src/user/oauth.py", "/src/user/views_oauth.py", "/src/views.py"], "/src/test_cases/__init__.py": ["/src/test_cases/models.py"], "/src/test_cases/services.py": ["/src/test_cases/models.py", "/src/test_cases/schemas.py", "/src/test_cases/uttils.py"], "/tests/auth_test.py": ["/src/user/auth.py", "/src/user/models.py", "/src/user/oauth.py", "/tests/base.py", "/tests/mocks.py", "/tests/test_uttils.py"], "/tests/factories.py": ["/src/user/models.py"], "/src/qa/__init__.py": ["/src/qa/models.py"], "/src/qa/forms.py": ["/src/admin_forms.py"], "/src/test_cases/views.py": ["/src/qa/models.py", "/src/test_cases/__init__.py", "/src/test_cases/services.py", "/src/user/decorators.py", "/src/views.py"], "/app.py": ["/src/main.py", "/src/settings.py"], "/src/user/__init__.py": ["/src/user/models.py", "/src/user/views.py"], "/src/user/views_oauth.py": ["/src/user/auth.py", "/src/user/decorators.py", "/src/user/models.py", "/src/user/oauth.py"], "/src/views.py": ["/src/qa/models.py", "/src/test_cases/models.py"], "/src/user/auth.py": ["/src/user/models.py", "/src/user/oauth.py"], "/scripts/check_freeze.py": ["/app.py"], "/src/user/oauth.py": ["/src/settings_oauth.py", "/src/user/models.py"]}
|
21,186,219
|
honeydev/junior
|
refs/heads/master
|
/src/test_cases/uttils.py
|
def flat_user_test_case(user_test_case: dict) -> tuple:
return tuple(
{
**{
'completed': question_relation['completed'],
'id': question_relation['id'],
},
**question_relation['test_question'],
}
for question_relation in user_test_case['question_relation']
)
|
{"/tests/base.py": ["/src/main.py", "/src/settings.py"], "/src/user/views.py": ["/src/mailers/send_mail.py", "/src/user/auth.py", "/src/user/decorators.py", "/src/user/forms.py", "/src/user/models.py", "/src/views.py"], "/tests/test_cases/fetch_api_data_test.py": ["/src/qa/models.py", "/src/test_cases/__init__.py", "/src/test_cases/constants.py", "/src/uttils.py", "/tests/base.py", "/tests/factories.py", "/tests/test_uttils.py"], "/src/commands.py": ["/src/qa/models.py", "/src/user/__init__.py", "/src/uttils.py"], "/tests/test_cases/change_test_cases_test.py": ["/src/qa/models.py", "/src/test_cases/__init__.py", "/src/uttils.py", "/tests/base.py", "/tests/factories.py", "/tests/test_uttils.py"], "/src/qa/views.py": ["/src/qa/forms.py", "/src/qa/models.py", "/src/user/__init__.py", "/src/views.py"], "/tests/qa_test.py": ["/src/qa/models.py", "/src/user/models.py", "/tests/base.py"], "/tests/main_page_test.py": ["/src/qa/models.py", "/src/uttils.py", "/tests/base.py"], "/src/test_cases/schemas.py": ["/src/test_cases/constants.py"], "/src/qa/models.py": ["/src/base/base_models.py"], "/src/main.py": ["/src/admin_forms.py", "/src/commands.py", "/src/qa/models.py", "/src/qa/views.py", "/src/settings.py", "/src/test_cases/__init__.py", "/src/test_cases/views.py", "/src/user/__init__.py", "/src/user/auth.py", "/src/user/oauth.py", "/src/user/views_oauth.py", "/src/views.py"], "/src/test_cases/__init__.py": ["/src/test_cases/models.py"], "/src/test_cases/services.py": ["/src/test_cases/models.py", "/src/test_cases/schemas.py", "/src/test_cases/uttils.py"], "/tests/auth_test.py": ["/src/user/auth.py", "/src/user/models.py", "/src/user/oauth.py", "/tests/base.py", "/tests/mocks.py", "/tests/test_uttils.py"], "/tests/factories.py": ["/src/user/models.py"], "/src/qa/__init__.py": ["/src/qa/models.py"], "/src/qa/forms.py": ["/src/admin_forms.py"], "/src/test_cases/views.py": ["/src/qa/models.py", "/src/test_cases/__init__.py", "/src/test_cases/services.py", "/src/user/decorators.py", "/src/views.py"], "/app.py": ["/src/main.py", "/src/settings.py"], "/src/user/__init__.py": ["/src/user/models.py", "/src/user/views.py"], "/src/user/views_oauth.py": ["/src/user/auth.py", "/src/user/decorators.py", "/src/user/models.py", "/src/user/oauth.py"], "/src/views.py": ["/src/qa/models.py", "/src/test_cases/models.py"], "/src/user/auth.py": ["/src/user/models.py", "/src/user/oauth.py"], "/scripts/check_freeze.py": ["/app.py"], "/src/user/oauth.py": ["/src/settings_oauth.py", "/src/user/models.py"]}
|
21,229,636
|
soudk/PyDinsky
|
refs/heads/main
|
/src/random_walker.py
|
#import scipy.stats as ss
import numpy as np
import matplotlib.pyplot as plt
#----------------------------------------------------------------------------
class walker: # Input is boltz rand. variable
def __init__(self,x,y):
self.x = x
self.y = y
self.v = 0.0
def take_step(self,v,xx,yy):
self.v = v
t = np.random.rand()*2*np.pi
x_step = np.cos(t)
y_step = np.sin(t)
self.x += v*x_step
if self.x < xx[0] or self.x > xx[1]:
self.x -= 2*v*x_step
self.y += v*y_step
if self.y < yy[0] or self.y > yy[1]:
self.y -= 2*v*y_step
def sample(self):
return [self.x,self.y,self.v]
# def intx
def rand_b(T): # n is number of steps. T is temp
#c = 4.8*10**(-4) # m/k_b for He
c = 3.3693*10**(-3) #m/k_b for N2
def boltz(v,T):
return (c*v/T)*np.exp(-c*v*v/2*T)
v_mp = np.sqrt(2*c*T) # most probable velocity
x = np.random.rand()*v_mp*4. # setting max v value as 4 times most probable
y = np.random.rand()*1.2*boltz(v_mp,T) # setting max y limts as just little above the max of pdf
if boltz(x,T)>y:
return x # scaling the x-scale to allow use of integral steps
else:
return rand_b(T)
def rand_walker_data(n,T,n_steps,num_walkers,positions): # n=number of walkers, T= temperature, z=positions of seed
tmap = temp_map(n,T,positions)
x = [0,n] # sets range for x
y = [0,n] # sets range for y
wlk = [] # init array of walkers
for i in range(int(num_walkers**0.5)):
for j in range(int(num_walkers**0.5)):
wlk.append(walker((i+1)*(max(x)-min(x))/(num_walkers**0.5+1),(j+1)*(max(y)-min(y))/(num_walkers**0.5+1)))
for i in range(num_walkers):
wlk.append(walker(positions[i,0],positions[i,1]))
pos_x = []
pos_y = []
vel = []
for i in range(n_steps):
temp_x = np.zeros(num_walkers)
temp_y = np.zeros(num_walkers)
temp_z = np.zeros(num_walkers)
for j in range(num_walkers):
temp_x[j] = wlk[j].x
temp_y[j] = wlk[j].y
temp_z[j] = wlk[j].v
wlk[j].take_step(rand_b(tmap[int(temp_x[j]),int(temp_y[j])]),x,y)
pos_x.append(temp_x)
pos_y.append(temp_y)
vel.append(temp_z)
pos_x = np.asarray(pos_x)
pos_y = np.asarray(pos_y)
vel = np.asarray(vel)
return pos_x, pos_y, vel, tmap
def temp_map(n,T,positions):
tmap = np.ones((n,n))*(-1.)
for i in range(len(positions)):
tmap[int(positions[i,0]),int(positions[i,1])] = T[i]
for i in range(n):
for j in range(n):
if tmap[i,j] == -1:
tmap[i,j] = 0.0
for k in range(len(positions)):
x = np.sqrt((positions[k,0]-i)**2+(positions[k,1]-j)**2)
tmap[i,j] += T[k]*np.exp(-(x/25)**2)
# plt.imshow(tmap)
return tmap
#----------------------------------------------------------------------------
'''
num_walkers = 9
num_steps = 1000
n_grid = 100
T = np.linspace(250,350,num_walkers) # Temperature
positions = []
for i in range(num_walkers):
positions.append([int(np.random.rand()*100),int(np.random.rand()*100)])
positions = np.asanyarray(positions)
x,y,v,tmap = rand_walker_data(n_grid,T,num_steps,num_walkers,positions)
#for i in range(num_walkers):
# plt.plot(x[:,i],y[:,i],'.')
#plt.show()
'''
#-------------------------------------------------------------------------
|
{"/main-bokeh.py": ["/random_walker.py"], "/src/plotter.py": ["/random_walker.py"]}
|
21,229,637
|
soudk/PyDinsky
|
refs/heads/main
|
/src/colormap.py
|
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import matplotlib as plt
import numpy as np
#import get_temperature as get_t
import random as ran
#temperature range limits
cold_to_mid = 278.15
mid_to_warm = 293.15
random_range = 0.1
hot_pal = [
[1 - ran.uniform(0, random_range), .2 + ran.uniform(0, random_range), .3 + ran.uniform(0, random_range)],
[1 - ran.uniform(0, random_range), .3 + ran.uniform(0, random_range), .23 + ran.uniform(0, random_range)],
[1 - ran.uniform(0, random_range), .4 + ran.uniform(0, random_range), .15 + ran.uniform(0, random_range)],
[1 - ran.uniform(0, random_range), .5 + ran.uniform(0, random_range), .08 + ran.uniform(0, random_range)],
[1 - ran.uniform(0, random_range), .5 + ran.uniform(0, random_range), 0 + ran.uniform(0, random_range)]
]
med_pal = [
[.4 + ran.uniform(0, random_range), 1 - ran.uniform(0, random_range), .6 + ran.uniform(0, random_range)],
[.55 + ran.uniform(0, random_range), 1 - ran.uniform(0, random_range), .5 + ran.uniform(0, random_range)],
[.7 + ran.uniform(0, random_range), 1 - ran.uniform(0, random_range), .4 + ran.uniform(0, random_range)],
[.85 - ran.uniform(0, random_range), 1 - ran.uniform(0, random_range), .3 + ran.uniform(0, random_range)],
[1 - ran.uniform(0, random_range), 1 - ran.uniform(0, random_range), .2 + ran.uniform(0, random_range)]
]
cold_pal = [
[0 + ran.uniform(0, random_range), 0 + ran.uniform(0, random_range), .8 - ran.uniform(0, random_range)],
[.15 + ran.uniform(0, random_range), .25 + ran.uniform(0, random_range), .85 - ran.uniform(0, random_range)],
[.3 + ran.uniform(0, random_range), .5 + ran.uniform(0, random_range), .9 - ran.uniform(0, random_range)],
[.45 + ran.uniform(0, random_range), .75 - ran.uniform(0, random_range), .95 - ran.uniform(0, random_range)],
[.6 + ran.uniform(0, random_range), 1 - ran.uniform(0, random_range), 1 - ran.uniform(0, random_range)]
]
"""
cold_pal = sns.color_palette()
med_pal = sns.color_palette()
hot_pal = sns.color_palette()
"""
def FindPalette(temp):
#temp = get_t.get_temperature()
palettes = [cold_pal, med_pal, hot_pal]
if temp<cold_to_mid:
return palettes[0]
if cold_to_mid<=temp and temp<mid_to_warm:
return palettes[1]
if mid_to_warm<=temp:
return palettes[2]
def color_assign(velocities, color_palette):
'''Takes an array with velocities for all of the particle and outputs the colours mathcing the velocities. '''
nb_velo = len(velocities) # Number of elements in the array containing the velocities
nb_color = len(color_palette) # Number of color in the palette
min_velo = min(velocities)
max_velo = max(velocities)
interval = (max_velo - min_velo)/nb_color
ranges = []
ranges.append(
[ min(velocities)-0.0001 , # Minimum bound
min(velocities) + interval , # Maximum bound
color_palette[0] ] # RGB Colour Code for the interval
)
for i in range(nb_color - 1):
ranges.append(
[ min(velocities) + interval * (i + 1) , # Minimum bound
min(velocities) + interval * (i + 2) , # Maximum bound
color_palette[(i+1)] ] # RGB Colour Code for the interval
)
color_list = []
for velo in velocities:
for j in ranges:
if velo > j[0] and velo <= j[1]:
color_list.append(j[2])
return color_list
|
{"/main-bokeh.py": ["/random_walker.py"], "/src/plotter.py": ["/random_walker.py"]}
|
21,229,638
|
soudk/PyDinsky
|
refs/heads/main
|
/src/get_temperature.py
|
"""
get_temperature.py
Return the temperature of Montreal at a given time. Future versions: take city name as input and return temperature of that city.
"""
import requests
import json
import os.path as osp
import argparse
import country_converter
script_dir = osp.dirname(__file__)
def get_temperature(city, country):
api_key = "1c7b67b1e9de33125b6801dc38c28efc"
# convert country code to ISO2 standard (required by API)
country_ISO = country_converter.convert(country, to='ISO2')
# get city id
try:
city_id = get_city_id(city, country_ISO)
# pull temperature, given city_id
data = requests.get(f'https://api.openweathermap.org/data/2.5/weather?id={city_id}&appid={api_key}')
except:
data = requests.get(f'https://api.openweathermap.org/data/2.5/weather?q={city},{country_ISO}&appid={api_key}')
contents = data.json()
temperature = contents['main']['temp']
return temperature
def get_city_id(city, country):
# pull city listing from data directory
cities = json.load(open(osp.join(script_dir, '..', 'data', 'city_list.json'), "r"))
# filter city to listings of that city and country, in case more than 1 just take the first entry
city_listing = list(filter(lambda entry: entry['name'] == city and entry['country'] == country, cities))[0]
# return city id
return city_listing['id']
def main():
parser = argparse.ArgumentParser()
parser.add_argument('city')
parser.add_argument('country')
args = parser.parse_args()
print(get_temperature(args.city, args.country))
if __name__ == '__main__':
main()
|
{"/main-bokeh.py": ["/random_walker.py"], "/src/plotter.py": ["/random_walker.py"]}
|
21,229,639
|
soudk/PyDinsky
|
refs/heads/main
|
/main-bokeh.py
|
from bokeh.plotting import figure, curdoc, show
from bokeh.layouts import column
from bokeh.models import ColumnDataSource, Select, CustomJS, Rect
from bokeh.palettes import magma #actually going to use cutom pallete
from bokeh.themes import built_in_themes
import numpy as np
from datetime import date
import json
import sys
sys.path.append('./src/')
import colormap as cm
import random_walker as rw
import get_temperature as gt
# GENERATE STARTING GRAPHS
num_walkers = 1
num_steps = 3000
n_grid = 100
#READ JSON DATA
with open('data/shorter_city_list.json', 'r') as json_file:
data = json.load(json_file)
cities=["none"]
countries=["none"]
coords=["none"]
#What data do we want
for d in data:
cities.append(d["name"])
countries.append(d["country"])
coords.append(d["coord"])
def lon_lat(data):
x = data[0]
x = x + 180 # offset of 10
x = 100*x/(360) # 80 gives an additional padding of 10 on the right
y = data[1]
y = y + 90 # offset of 10
y = 100*y/(180) # 80 gives an additional padding of 10 on the right
data1 = np.asarray([x,y])
return [data1]
#INITIAL PLOT - ALWAYS MONTREAL
#WALKERS
CityName = "Montreal"
Country = "Canada"
T = [gt.get_temperature(CityName, Country)]
#T = [300.0] #testing
positions =[[29.6, 75.2]] #Montreal - hard coded, because of the accent screwing things up.
#positions = lon_lat(countries[coords.index(CityName)]) #Montreal - hard coded
#for i in range(num_walkers):
# positions.append([int(np.random.rand()*100),int(np.random.rand()*100)])
positions = np.asanyarray(positions)
x,y,v,tmap = rw.rand_walker_data(n_grid,T,num_steps,num_walkers,positions)
today = date.today()
date = today.strftime("%B %d, %Y")
def rgb_to_hex(rgb):
return '%02x%02x%02x' % rgb
#PLOTTING
T_cel = round(T[0]-273.15, 1) #temperature in celsius for use in legends etc...
plot = figure(title = "A Random Walk: "+CityName+", "+ Country +" on "+ date +" , "+str(T_cel)+" "+chr(176)+"C", x_axis_label = "X Position", y_axis_label = "Y Position")
size = 10
color_list = cm.color_assign(v[:,0], cm.FindPalette(T[0]))
color_list = np.asarray(color_list)
#print(color_list)
color_list *= 255
#print(color_list)
color_list = np.floor(color_list)
color_list.astype(int)
color_hex = []
for c in color_list:
c.astype(int)
color_hex.append(rgb_to_hex((c[0].astype(int), c[1].astype(int), c[2].astype(int))))
plot.line(x[:,0], y[:,0], color = 'grey', line_alpha = 0.2) #colour here should be the average colour
plot.circle(0, 0, size=0.00000001, color= "#ffffff", legend="Cities & Temperatures") #for the legend title
plot.scatter(x[:,0], y[:,0], size=size, color=color_hex, fill_alpha = 0.3, legend_label=CityName+": "+str(T_cel)+chr(176)+"C")
plot.legend.click_policy="hide"
# HANDLE BOKEH CALLBACKS ...
def update_city1(attr, old, new):
#after an update to the city list
CityName = dropdown_1.value
Country = countries[cities.index(CityName)]
T = [gt.get_temperature(CityName, Country)]
#T = [300.0]
T_cel = round(T[0]-273.15, 1)
#print("Temperature: ", T[0]) #testing
tit = "A Random Walk: "+CityName+", "+ Country +" on "+ date +", "+str(round(T[0]-273.15, 1))+" "+chr(176)+"C"
plot.title.text = tit
#print("------------------------------------", coords[cities.index(CityName)])
positions = lon_lat([coords[cities.index(CityName)]['lon'], coords[cities.index(CityName)]['lat']])
positions = positions
#for i in range(num_walkers):
# positions.append([int(np.random.rand()*100),int(np.random.rand()*100)])
positions = np.asanyarray(positions)
x,y,v,tmap = rw.rand_walker_data(n_grid,T,num_steps,num_walkers,positions)
color_list = cm.color_assign(v[:,0], cm.FindPalette(T[0]))#from cm
plot.line(x[:,0], y[:,0], color = 'grey', line_alpha = 0.2)
color_list = np.asarray(color_list)
#print(color_list)
color_list *= 255
#print(color_list)
color_list = np.floor(color_list)
color_list.astype(int)
color_hex = []
for c in color_list:
c.astype(int)
color_hex.append(rgb_to_hex((c[0].astype(int), c[1].astype(int), c[2].astype(int))))
#print(color_hex)
#Bokeh scatter plot - look into color argument
plot.scatter(x[:,0], y[:,0], size=size, color=color_hex, fill_alpha = 0.7, legend_label=CityName+": "+str(T_cel)+chr(176)+"C")
return 0
# CREATE DROPDOWN WIDGET
dropdown_1 = Select(title = "add city to plot", options = cities, value = None)
dropdown_1.on_change('value', update_city1)
# FORMAT/CREATE THE DOCUMENT TO RENDER
curdoc().add_root(column(dropdown_1, plot))
|
{"/main-bokeh.py": ["/random_walker.py"], "/src/plotter.py": ["/random_walker.py"]}
|
21,229,640
|
soudk/PyDinsky
|
refs/heads/main
|
/src/plotter.py
|
import matplotlib.pyplot as plt
import numpy as np
import colormap as cm
import random_walker as rw
num_walkers = 16
T = np.linspace(250,320,num_walkers) # Temperature
num_steps = 1000
positions = []
#print(x_data[:, 0])
for i in range(num_walkers):
positions.append([int(np.random.rand()*100),int(np.random.rand()*100)])
positions = np.asanyarray(positions)
positions
x, y, v, tmap = rw.rand_walker_data(
100, # Bounds for plot
T, # Temperature array
num_steps, # Number of steps
num_walkers, # Number of walkers
positions)
temperatures = [0, 10, 20, 20]
fig, ax = plt.subplots(nrows=1, ncols=1)
#fig = plt.figure()
#ax = fig.add_subplot(1, 1, 1)
fig, ax1 = plt.subplots( figsize = (15,10) , dpi = 75)
ax1.clear()
ax1.cla()
for i in range(num_walkers):
# We truncate the arrays to prevent the "weird bug", that Christian cannot figure out. It's all his fault really...
color_list = cm.color_assign(v[:,i], cm.FindPalette(T[i]))
plt.scatter(
x[:,i][:num_steps - 1],
y[:,i][:num_steps - 1],
s = 50,
c = np.array(color_list)[:num_steps - 1],
alpha= 0.5)
plt.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False) # labels along the bottom edge are off
plt.tick_params(
axis='y', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
right=False,
left = False,
labelbottom=False,
labelleft = False) # labels along the bottom edge are off
ax = plt.gca()
ax.set_facecolor((.2, .2, .2))
fig.patch.set_facecolor((.2, .2, .2))
plt.show()
'''
color_list = cm.color_assign(v[:,0], cm.FindPalette(5, 15) )
plt.scatter(x[:,0], y[:,0], c = color_list)
plt.show()
'''
|
{"/main-bokeh.py": ["/random_walker.py"], "/src/plotter.py": ["/random_walker.py"]}
|
21,335,952
|
Ananya31-tkm/PROGRAMMING_LAB_PYTHON
|
refs/heads/master
|
/CO1/CO1-Q9.py
|
s = str(input("Enter the String:"))
print("INPUT IS:",s)
slice_mid=s[1:-1]
slice_beg=s[0]
slice_end=s[-1:]
print("Resultant string is:",slice_end+slice_mid+slice_beg)
|
{"/modfibonacci.py": ["/fibonacci.py"]}
|
21,335,953
|
Ananya31-tkm/PROGRAMMING_LAB_PYTHON
|
refs/heads/master
|
/CO1/CO1-Q3-D.py
|
a=str(input("Enter word:"))
print(a)
s=a.split(' ')
count = {}
print
for n in s:
count[n]=count.get(n,0)+1
print("The occurrences of each word in a given line is :")
print(count)
|
{"/modfibonacci.py": ["/fibonacci.py"]}
|
21,335,954
|
Ananya31-tkm/PROGRAMMING_LAB_PYTHON
|
refs/heads/master
|
/Lab-workouts/file-no-of-lines.py
|
st1="Good Morning""\n""Have a Nice Day""\n""Are you okay ?""\n"
fw=open("Afile.txt","w")
fw.write(st1)
fw.close()
fr=open("Afile.txt","r")
c=0
fr.seek(0)
s=fr.readlines()
for i in s:
c=c+1
print(i)
print(len(s))
|
{"/modfibonacci.py": ["/fibonacci.py"]}
|
21,335,955
|
Ananya31-tkm/PROGRAMMING_LAB_PYTHON
|
refs/heads/master
|
/CO1/CO1-Q8.py
|
str1=input('Enter string ')
print('input sring is ',str1)
char = str1[0]
str1 = str1.replace(char, '$')
str1 = char + str1[1:]
print('New string is',str1)
|
{"/modfibonacci.py": ["/fibonacci.py"]}
|
21,335,956
|
Ananya31-tkm/PROGRAMMING_LAB_PYTHON
|
refs/heads/master
|
/CO2/CO2-Q1.py
|
n=int(input("enter number:"))
fact=1
if n<0:
print("cannot find factorial")
elif n==0:
print("Factorial is 0")
else:
for i in range(1,n+1):
fact=fact*i
print("Fctorial of ",n," is",fact)
|
{"/modfibonacci.py": ["/fibonacci.py"]}
|
21,335,957
|
Ananya31-tkm/PROGRAMMING_LAB_PYTHON
|
refs/heads/master
|
/CO1/CO1-Q10.py
|
pi=3.14
R=int(input("Enter radius:"))
print("Area of circle=",pi*R*R)
|
{"/modfibonacci.py": ["/fibonacci.py"]}
|
21,335,958
|
Ananya31-tkm/PROGRAMMING_LAB_PYTHON
|
refs/heads/master
|
/CO1/CO1-Q14.py
|
N = int(input("Enter the integer N :"))
tmp = N
tmp1 = tmp*tmp
tmp2 = tmp*tmp*tmp
print("Find N + NN + NNN\n")
comp = tmp + tmp1 + tmp2
print("Result is : ",comp)
|
{"/modfibonacci.py": ["/fibonacci.py"]}
|
21,335,959
|
Ananya31-tkm/PROGRAMMING_LAB_PYTHON
|
refs/heads/master
|
/CO1/CO1-Q18.py
|
def Merge(dict1, dict2):
return (dict2.update(dict1))
dict1 = {'a': 100, 'b': 48, 'e': 55}
dict2 = {'d': 62, 'c': 14}
print(Merge(dict1, dict2))
print(dict2)
|
{"/modfibonacci.py": ["/fibonacci.py"]}
|
21,335,960
|
Ananya31-tkm/PROGRAMMING_LAB_PYTHON
|
refs/heads/master
|
/CO1/CO1-Q17,Q18.py
|
a1 ={'Swathi':67,'Anu':98,'Riya':66,'Vismaya':88,'Neema':75,'Reshma':89}
print("Inputed dict is :", a1)
a1_sorted_keys = sorted(a1, key=a1.get, reverse=True)
a1_sorted_keys_2 = sorted(a1, key=a1.get)
print("Descending order:",a1_sorted_keys)
print("Ascending order:",a1_sorted_keys_2)
|
{"/modfibonacci.py": ["/fibonacci.py"]}
|
21,335,961
|
Ananya31-tkm/PROGRAMMING_LAB_PYTHON
|
refs/heads/master
|
/Lab-workouts/file_word-count.py
|
st1= "Good Morning Have A Nice Day To All"
fw=open("Dictfile.txt","w")
fw.write(st1)
fw.close()
fr=open("Dictfile.txt","r")
st2=fr.read()
print("Dictfile.txt have the string:\n",st2)
st1=st2.split()
ls=[]
for i in st1:
word_count = st1.count(i)
ls.append((i,word_count))
dict1 = dict(ls)
print("\nFrequency of Words is:\n",dict1)
|
{"/modfibonacci.py": ["/fibonacci.py"]}
|
21,393,424
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/tests/asyncio/test_async_aiohttp.py
|
import unittest
from unittest import mock
from engineio.async_drivers import aiohttp as async_aiohttp
class AiohttpTests(unittest.TestCase):
def test_create_route(self):
app = mock.MagicMock()
mock_server = mock.MagicMock()
async_aiohttp.create_route(app, mock_server, '/foo')
app.router.add_get.assert_any_call('/foo', mock_server.handle_request)
app.router.add_post.assert_any_call('/foo', mock_server.handle_request)
def test_translate_request(self):
request = mock.MagicMock()
request._message.method = 'PUT'
request._message.path = '/foo/bar?baz=1'
request._message.version = (1, 1)
request._message.headers = {
'a': 'b',
'c-c': 'd',
'c_c': 'e',
'content-type': 'application/json',
'content-length': 123,
}
request._payload = b'hello world'
environ = async_aiohttp.translate_request(request)
expected_environ = {
'REQUEST_METHOD': 'PUT',
'PATH_INFO': '/foo/bar',
'QUERY_STRING': 'baz=1',
'CONTENT_TYPE': 'application/json',
'CONTENT_LENGTH': 123,
'HTTP_A': 'b',
# 'HTTP_C_C': 'd,e',
'RAW_URI': '/foo/bar?baz=1',
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.input': b'hello world',
'aiohttp.request': request,
}
for k, v in expected_environ.items():
assert v == environ[k]
assert environ['HTTP_C_C'] == 'd,e' or environ['HTTP_C_C'] == 'e,d'
# @mock.patch('async_aiohttp.aiohttp.web.Response')
def test_make_response(self):
rv = async_aiohttp.make_response(
'202 ACCEPTED', {'foo': 'bar'}, b'payload', {}
)
assert rv.status == 202
assert rv.headers['foo'] == 'bar'
assert rv.body == b'payload'
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,425
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/tests/performance/payload.py
|
import time
from engineio import packet, payload
def test():
p = payload.Payload(
packets=[packet.Packet(packet.MESSAGE, b'hello world')] * 10)
start = time.time()
count = 0
while True:
p = payload.Payload(encoded_payload=p.encode())
count += 1
if time.time() - start >= 5:
break
return count
if __name__ == '__main__':
count = test()
print('payload:', count, 'payloads processed.')
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,426
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/static_files.py
|
content_types = {
'css': 'text/css',
'gif': 'image/gif',
'html': 'text/html',
'jpg': 'image/jpeg',
'js': 'application/javascript',
'json': 'application/json',
'png': 'image/png',
'txt': 'text/plain',
}
def get_static_file(path, static_files):
"""Return the local filename and content type for the requested static
file URL.
:param path: the path portion of the requested URL.
:param static_files: a static file configuration dictionary.
This function returns a dictionary with two keys, "filename" and
"content_type". If the requested URL does not match any static file, the
return value is None.
"""
extra_path = ''
if path in static_files:
f = static_files[path]
else:
f = None
while path != '':
path, last = path.rsplit('/', 1)
extra_path = '/' + last + extra_path
if path in static_files:
f = static_files[path]
break
elif path + '/' in static_files:
f = static_files[path + '/']
break
if f:
if isinstance(f, str):
f = {'filename': f}
else:
f = f.copy() # in case it is mutated below
if f['filename'].endswith('/') and extra_path.startswith('/'):
extra_path = extra_path[1:]
f['filename'] += extra_path
if f['filename'].endswith('/'):
if '' in static_files:
if isinstance(static_files[''], str):
f['filename'] += static_files['']
else:
f['filename'] += static_files['']['filename']
if 'content_type' in static_files['']:
f['content_type'] = static_files['']['content_type']
else:
f['filename'] += 'index.html'
if 'content_type' not in f:
ext = f['filename'].rsplit('.')[-1]
f['content_type'] = content_types.get(
ext, 'application/octet-stream')
return f
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,427
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/tests/common/test_payload.py
|
import unittest
import pytest
from engineio import packet
from engineio import payload
class TestPayload(unittest.TestCase):
def test_encode_empty_payload(self):
p = payload.Payload()
assert p.packets == []
assert p.encode() == ''
def test_decode_empty_payload(self):
p = payload.Payload(encoded_payload='')
assert p.encode() == ''
def test_encode_payload_text(self):
pkt = packet.Packet(packet.MESSAGE, data='abc')
p = payload.Payload([pkt])
assert p.packets == [pkt]
assert p.encode() == '4abc'
def test_encode_payload_text_multiple(self):
pkt = packet.Packet(packet.MESSAGE, data='abc')
pkt2 = packet.Packet(packet.MESSAGE, data='def')
p = payload.Payload([pkt, pkt2])
assert p.packets == [pkt, pkt2]
assert p.encode() == '4abc\x1e4def'
def test_encode_payload_binary(self):
pkt = packet.Packet(packet.MESSAGE, data=b'\x00\x01\x02')
p = payload.Payload([pkt])
assert p.packets == [pkt]
assert p.encode() == 'bAAEC'
def test_encode_payload_binary_multiple(self):
pkt = packet.Packet(packet.MESSAGE, data=b'\x00\x01\x02')
pkt2 = packet.Packet(packet.MESSAGE, data=b'\x03\x04\x05\x06')
p = payload.Payload([pkt, pkt2])
assert p.packets == [pkt, pkt2]
assert p.encode() == 'bAAEC\x1ebAwQFBg=='
def test_encode_payload_text_binary_multiple(self):
pkt = packet.Packet(packet.MESSAGE, data='abc')
pkt2 = packet.Packet(packet.MESSAGE, data=b'\x03\x04\x05\x06')
p = payload.Payload([pkt, pkt2, pkt2, pkt])
assert p.packets == [pkt, pkt2, pkt2, pkt]
assert p.encode() == '4abc\x1ebAwQFBg==\x1ebAwQFBg==\x1e4abc'
def test_encode_jsonp_payload(self):
pkt = packet.Packet(packet.MESSAGE, data='abc')
p = payload.Payload([pkt])
assert p.packets == [pkt]
assert p.encode(jsonp_index=233) == '___eio[233]("4abc");'
def test_decode_jsonp_payload(self):
p = payload.Payload(encoded_payload='d=4abc')
assert p.encode() == '4abc'
def test_decode_invalid_payload(self):
with pytest.raises(ValueError):
payload.Payload(encoded_payload='bad payload')
def test_decode_multi_payload_with_too_many_packets(self):
with pytest.raises(ValueError):
payload.Payload(encoded_payload='4abc\x1e4def\x1e' * 9 + '6')
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,428
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/async_drivers/threading.py
|
from __future__ import absolute_import
import queue
import threading
import time
try:
from simple_websocket import Server, ConnectionClosed
_websocket_available = True
except ImportError: # pragma: no cover
_websocket_available = False
class WebSocketWSGI(object): # pragma: no cover
"""
This wrapper class provides a threading WebSocket interface that is
compatible with eventlet's implementation.
"""
def __init__(self, handler, server):
self.app = handler
def __call__(self, environ, start_response):
self.ws = Server(environ)
return self.app(self)
def close(self):
return self.ws.close()
def send(self, message):
try:
return self.ws.send(message)
except ConnectionClosed:
raise IOError()
def wait(self):
try:
return self.ws.receive()
except ConnectionClosed:
return None
_async = {
'thread': threading.Thread,
'queue': queue.Queue,
'queue_empty': queue.Empty,
'event': threading.Event,
'websocket': WebSocketWSGI if _websocket_available else None,
'sleep': time.sleep,
}
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,429
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/tests/asyncio/test_asyncio_client.py
|
import asyncio
import ssl
import sys
import unittest
from unittest import mock
try:
import aiohttp
except ImportError:
aiohttp = None
import pytest
from engineio import asyncio_client
from engineio import client
from engineio import exceptions
from engineio import packet
from engineio import payload
def AsyncMock(*args, **kwargs):
"""Return a mock asynchronous function."""
m = mock.MagicMock(*args, **kwargs)
async def mock_coro(*args, **kwargs):
return m(*args, **kwargs)
mock_coro.mock = m
return mock_coro
def _run(coro):
"""Run the given coroutine."""
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(sys.version_info < (3, 5), 'only for Python 3.5+')
class TestAsyncClient(unittest.TestCase):
def test_is_asyncio_based(self):
c = asyncio_client.AsyncClient()
assert c.is_asyncio_based()
def test_already_connected(self):
c = asyncio_client.AsyncClient()
c.state = 'connected'
with pytest.raises(ValueError):
_run(c.connect('http://foo'))
def test_invalid_transports(self):
c = asyncio_client.AsyncClient()
with pytest.raises(ValueError):
_run(c.connect('http://foo', transports=['foo', 'bar']))
def test_some_invalid_transports(self):
c = asyncio_client.AsyncClient()
c._connect_websocket = AsyncMock()
_run(c.connect('http://foo', transports=['foo', 'websocket', 'bar']))
assert c.transports == ['websocket']
def test_connect_polling(self):
c = asyncio_client.AsyncClient()
c._connect_polling = AsyncMock(return_value='foo')
assert _run(c.connect('http://foo')) == 'foo'
c._connect_polling.mock.assert_called_once_with(
'http://foo', {}, 'engine.io'
)
c = asyncio_client.AsyncClient()
c._connect_polling = AsyncMock(return_value='foo')
assert _run(c.connect('http://foo', transports=['polling'])) == 'foo'
c._connect_polling.mock.assert_called_once_with(
'http://foo', {}, 'engine.io'
)
c = asyncio_client.AsyncClient()
c._connect_polling = AsyncMock(return_value='foo')
assert (
_run(c.connect('http://foo', transports=['polling', 'websocket']))
== 'foo'
)
c._connect_polling.mock.assert_called_once_with(
'http://foo', {}, 'engine.io'
)
def test_connect_websocket(self):
c = asyncio_client.AsyncClient()
c._connect_websocket = AsyncMock(return_value='foo')
assert _run(c.connect('http://foo', transports=['websocket'])) == 'foo'
c._connect_websocket.mock.assert_called_once_with(
'http://foo', {}, 'engine.io'
)
c = asyncio_client.AsyncClient()
c._connect_websocket = AsyncMock(return_value='foo')
assert _run(c.connect('http://foo', transports='websocket')) == 'foo'
c._connect_websocket.mock.assert_called_once_with(
'http://foo', {}, 'engine.io'
)
def test_connect_query_string(self):
c = asyncio_client.AsyncClient()
c._connect_polling = AsyncMock(return_value='foo')
assert _run(c.connect('http://foo?bar=baz')) == 'foo'
c._connect_polling.mock.assert_called_once_with(
'http://foo?bar=baz', {}, 'engine.io'
)
def test_connect_custom_headers(self):
c = asyncio_client.AsyncClient()
c._connect_polling = AsyncMock(return_value='foo')
assert _run(c.connect('http://foo', headers={'Foo': 'Bar'})) == 'foo'
c._connect_polling.mock.assert_called_once_with(
'http://foo', {'Foo': 'Bar'}, 'engine.io'
)
def test_wait(self):
c = asyncio_client.AsyncClient()
done = []
async def fake_read_look_task():
done.append(True)
c.read_loop_task = fake_read_look_task()
_run(c.wait())
assert done == [True]
def test_wait_no_task(self):
c = asyncio_client.AsyncClient()
c.read_loop_task = None
_run(c.wait())
def test_send(self):
c = asyncio_client.AsyncClient()
saved_packets = []
async def fake_send_packet(pkt):
saved_packets.append(pkt)
c._send_packet = fake_send_packet
_run(c.send('foo'))
_run(c.send('foo'))
_run(c.send(b'foo'))
assert saved_packets[0].packet_type == packet.MESSAGE
assert saved_packets[0].data == 'foo'
assert not saved_packets[0].binary
assert saved_packets[1].packet_type == packet.MESSAGE
assert saved_packets[1].data == 'foo'
assert not saved_packets[1].binary
assert saved_packets[2].packet_type == packet.MESSAGE
assert saved_packets[2].data == b'foo'
assert saved_packets[2].binary
def test_disconnect_not_connected(self):
c = asyncio_client.AsyncClient()
c.state = 'foo'
c.sid = 'bar'
_run(c.disconnect())
assert c.state == 'disconnected'
assert c.sid is None
def test_disconnect_polling(self):
c = asyncio_client.AsyncClient()
client.connected_clients.append(c)
c.state = 'connected'
c.current_transport = 'polling'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c.queue.join = AsyncMock()
c.read_loop_task = AsyncMock()()
c.ws = mock.MagicMock()
c.ws.close = AsyncMock()
c._trigger_event = AsyncMock()
_run(c.disconnect())
c.ws.close.mock.assert_not_called()
assert c not in client.connected_clients
c._trigger_event.mock.assert_called_once_with(
'disconnect', run_async=False
)
def test_disconnect_websocket(self):
c = asyncio_client.AsyncClient()
client.connected_clients.append(c)
c.state = 'connected'
c.current_transport = 'websocket'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c.queue.join = AsyncMock()
c.read_loop_task = AsyncMock()()
c.ws = mock.MagicMock()
c.ws.close = AsyncMock()
c._trigger_event = AsyncMock()
_run(c.disconnect())
c.ws.close.mock.assert_called_once_with()
assert c not in client.connected_clients
c._trigger_event.mock.assert_called_once_with(
'disconnect', run_async=False
)
def test_disconnect_polling_abort(self):
c = asyncio_client.AsyncClient()
client.connected_clients.append(c)
c.state = 'connected'
c.current_transport = 'polling'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c.queue.join = AsyncMock()
c.read_loop_task = AsyncMock()()
c.ws = mock.MagicMock()
c.ws.close = AsyncMock()
_run(c.disconnect(abort=True))
c.queue.join.mock.assert_not_called()
c.ws.close.mock.assert_not_called()
assert c not in client.connected_clients
def test_disconnect_websocket_abort(self):
c = asyncio_client.AsyncClient()
client.connected_clients.append(c)
c.state = 'connected'
c.current_transport = 'websocket'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c.queue.join = AsyncMock()
c.read_loop_task = AsyncMock()()
c.ws = mock.MagicMock()
c.ws.close = AsyncMock()
_run(c.disconnect(abort=True))
c.queue.join.mock.assert_not_called()
c.ws.mock.assert_not_called()
assert c not in client.connected_clients
def test_background_tasks(self):
r = []
async def foo(arg):
r.append(arg)
c = asyncio_client.AsyncClient()
c.start_background_task(foo, 'bar')
pending = asyncio.all_tasks(loop=asyncio.get_event_loop()) \
if hasattr(asyncio, 'all_tasks') else asyncio.Task.all_tasks()
asyncio.get_event_loop().run_until_complete(asyncio.wait(pending))
assert r == ['bar']
def test_sleep(self):
c = asyncio_client.AsyncClient()
_run(c.sleep(0))
def test_create_queue(self):
c = asyncio_client.AsyncClient()
q = c.create_queue()
with pytest.raises(q.Empty):
q.get_nowait()
def test_create_event(self):
c = asyncio_client.AsyncClient()
e = c.create_event()
assert not e.is_set()
e.set()
assert e.is_set()
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_polling_connection_failed(self, _time):
c = asyncio_client.AsyncClient()
c._send_request = AsyncMock(return_value=None)
with pytest.raises(exceptions.ConnectionError):
_run(c.connect('http://foo', headers={'Foo': 'Bar'}))
c._send_request.mock.assert_called_once_with(
'GET',
'http://foo/engine.io/?transport=polling&EIO=4&t=123.456',
headers={'Foo': 'Bar'},
timeout=5,
)
def test_polling_connection_404(self):
c = asyncio_client.AsyncClient()
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 404
c._send_request.mock.return_value.json = AsyncMock(
return_value={'foo': 'bar'}
)
try:
_run(c.connect('http://foo'))
except exceptions.ConnectionError as exc:
assert len(exc.args) == 2
assert (
exc.args[0] == 'Unexpected status code 404 in server response'
)
assert exc.args[1] == {'foo': 'bar'}
def test_polling_connection_404_no_json(self):
c = asyncio_client.AsyncClient()
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 404
c._send_request.mock.return_value.json = AsyncMock(
side_effect=aiohttp.ContentTypeError('foo', 'bar')
)
try:
_run(c.connect('http://foo'))
except exceptions.ConnectionError as exc:
assert len(exc.args) == 2
assert (
exc.args[0] == 'Unexpected status code 404 in server response'
)
assert exc.args[1] is None
def test_polling_connection_invalid_packet(self):
c = asyncio_client.AsyncClient()
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
c._send_request.mock.return_value.read = AsyncMock(return_value=b'foo')
with pytest.raises(exceptions.ConnectionError):
_run(c.connect('http://foo'))
def test_polling_connection_no_open_packet(self):
c = asyncio_client.AsyncClient()
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
c._send_request.mock.return_value.read = AsyncMock(
return_value=payload.Payload(
packets=[
packet.Packet(
packet.CLOSE,
{
'sid': '123',
'upgrades': [],
'pingInterval': 10,
'pingTimeout': 20,
},
)
]
).encode().encode('utf-8')
)
with pytest.raises(exceptions.ConnectionError):
_run(c.connect('http://foo'))
def test_polling_connection_successful(self):
c = asyncio_client.AsyncClient()
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
c._send_request.mock.return_value.read = AsyncMock(
return_value=payload.Payload(
packets=[
packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': [],
'pingInterval': 1000,
'pingTimeout': 2000,
},
)
]
).encode().encode('utf-8')
)
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
on_connect = AsyncMock()
c.on('connect', on_connect)
_run(c.connect('http://foo'))
c._read_loop_polling.mock.assert_called_once_with()
c._read_loop_websocket.mock.assert_not_called()
c._write_loop.mock.assert_called_once_with()
on_connect.mock.assert_called_once_with()
assert c in client.connected_clients
assert (
c.base_url
== 'http://foo/engine.io/?transport=polling&EIO=4&sid=123'
)
assert c.sid == '123'
assert c.ping_interval == 1
assert c.ping_timeout == 2
assert c.upgrades == []
assert c.transport() == 'polling'
def test_polling_https_noverify_connection_successful(self):
c = asyncio_client.AsyncClient(ssl_verify=False)
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
c._send_request.mock.return_value.read = AsyncMock(
return_value=payload.Payload(
packets=[
packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': [],
'pingInterval': 1000,
'pingTimeout': 2000,
},
)
]
).encode().encode('utf-8')
)
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
on_connect = AsyncMock()
c.on('connect', on_connect)
_run(c.connect('https://foo'))
c._read_loop_polling.mock.assert_called_once_with()
c._read_loop_websocket.mock.assert_not_called()
c._write_loop.mock.assert_called_once_with()
on_connect.mock.assert_called_once_with()
assert c in client.connected_clients
assert (
c.base_url
== 'https://foo/engine.io/?transport=polling&EIO=4&sid=123'
)
assert c.sid == '123'
assert c.ping_interval == 1
assert c.ping_timeout == 2
assert c.upgrades == []
assert c.transport() == 'polling'
def test_polling_connection_with_more_packets(self):
c = asyncio_client.AsyncClient()
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
c._send_request.mock.return_value.read = AsyncMock(
return_value=payload.Payload(
packets=[
packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': [],
'pingInterval': 1000,
'pingTimeout': 2000,
},
),
packet.Packet(packet.NOOP),
]
).encode().encode('utf-8')
)
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
c._receive_packet = AsyncMock()
on_connect = AsyncMock()
c.on('connect', on_connect)
_run(c.connect('http://foo'))
assert c._receive_packet.mock.call_count == 1
assert (
c._receive_packet.mock.call_args_list[0][0][0].packet_type
== packet.NOOP
)
def test_polling_connection_upgraded(self):
c = asyncio_client.AsyncClient()
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
c._send_request.mock.return_value.read = AsyncMock(
return_value=payload.Payload(
packets=[
packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': ['websocket'],
'pingInterval': 1000,
'pingTimeout': 2000,
},
)
]
).encode().encode('utf-8')
)
c._connect_websocket = AsyncMock(return_value=True)
on_connect = mock.MagicMock()
c.on('connect', on_connect)
_run(c.connect('http://foo'))
c._connect_websocket.mock.assert_called_once_with(
'http://foo', {}, 'engine.io'
)
on_connect.assert_called_once_with()
assert c in client.connected_clients
assert (
c.base_url
== 'http://foo/engine.io/?transport=polling&EIO=4&sid=123'
)
assert c.sid == '123'
assert c.ping_interval == 1
assert c.ping_timeout == 2
assert c.upgrades == ['websocket']
def test_polling_connection_not_upgraded(self):
c = asyncio_client.AsyncClient()
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
c._send_request.mock.return_value.read = AsyncMock(
return_value=payload.Payload(
packets=[
packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': ['websocket'],
'pingInterval': 1000,
'pingTimeout': 2000,
},
)
]
).encode().encode('utf-8')
)
c._connect_websocket = AsyncMock(return_value=False)
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
on_connect = mock.MagicMock()
c.on('connect', on_connect)
_run(c.connect('http://foo'))
c._connect_websocket.mock.assert_called_once_with(
'http://foo', {}, 'engine.io'
)
c._read_loop_polling.mock.assert_called_once_with()
c._read_loop_websocket.mock.assert_not_called()
c._write_loop.mock.assert_called_once_with()
on_connect.assert_called_once_with()
assert c in client.connected_clients
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_websocket_connection_failed(self, _time):
c = asyncio_client.AsyncClient()
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock(
side_effect=[aiohttp.client_exceptions.ServerConnectionError()]
)
with pytest.raises(exceptions.ConnectionError):
_run(
c.connect(
'http://foo',
transports=['websocket'],
headers={'Foo': 'Bar'},
)
)
c.http.ws_connect.mock.assert_called_once_with(
'ws://foo/engine.io/?transport=websocket&EIO=4&t=123.456',
headers={'Foo': 'Bar'},
timeout=5
)
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_websocket_connection_extra(self, _time):
c = asyncio_client.AsyncClient(websocket_extra_options={
'headers': {'Baz': 'Qux'},
'timeout': 10
})
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock(
side_effect=[aiohttp.client_exceptions.ServerConnectionError()]
)
with pytest.raises(exceptions.ConnectionError):
_run(
c.connect(
'http://foo',
transports=['websocket'],
headers={'Foo': 'Bar'},
)
)
c.http.ws_connect.mock.assert_called_once_with(
'ws://foo/engine.io/?transport=websocket&EIO=4&t=123.456',
headers={'Foo': 'Bar', 'Baz': 'Qux'},
timeout=10,
)
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_websocket_upgrade_failed(self, _time):
c = asyncio_client.AsyncClient()
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock(
side_effect=[aiohttp.client_exceptions.ServerConnectionError()]
)
c.sid = '123'
assert not _run(c.connect('http://foo', transports=['websocket']))
c.http.ws_connect.mock.assert_called_once_with(
'ws://foo/engine.io/?transport=websocket&EIO=4&sid=123&t=123.456',
headers={},
timeout=5,
)
def test_websocket_connection_no_open_packet(self):
c = asyncio_client.AsyncClient()
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock()
ws = c.http.ws_connect.mock.return_value
ws.receive = AsyncMock()
ws.receive.mock.return_value.data = packet.Packet(
packet.CLOSE
).encode()
with pytest.raises(exceptions.ConnectionError):
_run(c.connect('http://foo', transports=['websocket']))
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_websocket_connection_successful(self, _time):
c = asyncio_client.AsyncClient()
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock()
ws = c.http.ws_connect.mock.return_value
ws.receive = AsyncMock()
ws.receive.mock.return_value.data = packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': [],
'pingInterval': 1000,
'pingTimeout': 2000,
},
).encode()
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
on_connect = mock.MagicMock()
c.on('connect', on_connect)
_run(c.connect('ws://foo', transports=['websocket']))
c._read_loop_polling.mock.assert_not_called()
c._read_loop_websocket.mock.assert_called_once_with()
c._write_loop.mock.assert_called_once_with()
on_connect.assert_called_once_with()
assert c in client.connected_clients
assert c.base_url == 'ws://foo/engine.io/?transport=websocket&EIO=4'
assert c.sid == '123'
assert c.ping_interval == 1
assert c.ping_timeout == 2
assert c.upgrades == []
assert c.transport() == 'websocket'
assert c.ws == ws
c.http.ws_connect.mock.assert_called_once_with(
'ws://foo/engine.io/?transport=websocket&EIO=4&t=123.456',
headers={},
timeout=5,
)
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_websocket_https_noverify_connection_successful(self, _time):
c = asyncio_client.AsyncClient(ssl_verify=False)
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock()
ws = c.http.ws_connect.mock.return_value
ws.receive = AsyncMock()
ws.receive.mock.return_value.data = packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': [],
'pingInterval': 1000,
'pingTimeout': 2000,
},
).encode()
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
on_connect = mock.MagicMock()
c.on('connect', on_connect)
_run(c.connect('wss://foo', transports=['websocket']))
c._read_loop_polling.mock.assert_not_called()
c._read_loop_websocket.mock.assert_called_once_with()
c._write_loop.mock.assert_called_once_with()
on_connect.assert_called_once_with()
assert c in client.connected_clients
assert c.base_url == 'wss://foo/engine.io/?transport=websocket&EIO=4'
assert c.sid == '123'
assert c.ping_interval == 1
assert c.ping_timeout == 2
assert c.upgrades == []
assert c.transport() == 'websocket'
assert c.ws == ws
_, kwargs = c.http.ws_connect.mock.call_args
assert 'ssl' in kwargs
assert isinstance(kwargs['ssl'], ssl.SSLContext)
assert kwargs['ssl'].verify_mode == ssl.CERT_NONE
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_websocket_connection_with_cookies(self, _time):
c = asyncio_client.AsyncClient()
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock()
ws = c.http.ws_connect.mock.return_value
ws.receive = AsyncMock()
ws.receive.mock.return_value.data = packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': [],
'pingInterval': 1000,
'pingTimeout': 2000,
},
).encode()
c.http._cookie_jar = [mock.MagicMock(), mock.MagicMock()]
c.http._cookie_jar[0].key = 'key'
c.http._cookie_jar[0].value = 'value'
c.http._cookie_jar[1].key = 'key2'
c.http._cookie_jar[1].value = 'value2'
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
on_connect = mock.MagicMock()
c.on('connect', on_connect)
_run(c.connect('ws://foo', transports=['websocket']))
c.http.ws_connect.mock.assert_called_once_with(
'ws://foo/engine.io/?transport=websocket&EIO=4&t=123.456',
headers={},
timeout=5,
)
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_websocket_connection_with_cookie_header(self, _time):
c = asyncio_client.AsyncClient()
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock()
ws = c.http.ws_connect.mock.return_value
ws.receive = AsyncMock()
ws.receive.mock.return_value.data = packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': [],
'pingInterval': 1000,
'pingTimeout': 2000,
},
).encode()
c.http._cookie_jar = []
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
on_connect = mock.MagicMock()
c.on('connect', on_connect)
_run(
c.connect(
'ws://foo',
headers={'Cookie': 'key=value; key2=value2; key3="value3="'},
transports=['websocket'],
)
)
c.http.ws_connect.mock.assert_called_once_with(
'ws://foo/engine.io/?transport=websocket&EIO=4&t=123.456',
headers={},
timeout=5,
)
c.http.cookie_jar.update_cookies.assert_called_once_with(
{'key': 'value', 'key2': 'value2', 'key3': '"value3="'}
)
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_websocket_connection_with_cookies_and_headers(self, _time):
c = asyncio_client.AsyncClient()
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock()
ws = c.http.ws_connect.mock.return_value
ws.receive = AsyncMock()
ws.receive.mock.return_value.data = packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': [],
'pingInterval': 1000,
'pingTimeout': 2000,
},
).encode()
c.http._cookie_jar = [mock.MagicMock(), mock.MagicMock()]
c.http._cookie_jar[0].key = 'key'
c.http._cookie_jar[0].value = 'value'
c.http._cookie_jar[1].key = 'key2'
c.http._cookie_jar[1].value = 'value2'
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
on_connect = mock.MagicMock()
c.on('connect', on_connect)
_run(
c.connect(
'ws://foo',
headers={'Foo': 'Bar', 'Cookie': 'key3=value3'},
transports=['websocket'],
)
)
c.http.ws_connect.mock.assert_called_once_with(
'ws://foo/engine.io/?transport=websocket&EIO=4&t=123.456',
headers={'Foo': 'Bar'},
timeout=5,
)
c.http.cookie_jar.update_cookies.assert_called_once_with(
{'key3': 'value3'}
)
def test_websocket_upgrade_no_pong(self):
c = asyncio_client.AsyncClient()
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock()
ws = c.http.ws_connect.mock.return_value
ws.receive = AsyncMock()
ws.receive.mock.return_value.data = packet.Packet(
packet.OPEN,
{
'sid': '123',
'upgrades': [],
'pingInterval': 1000,
'pingTimeout': 2000,
},
).encode()
ws.send_str = AsyncMock()
c.sid = '123'
c.current_transport = 'polling'
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
on_connect = mock.MagicMock()
c.on('connect', on_connect)
assert not _run(c.connect('ws://foo', transports=['websocket']))
c._read_loop_polling.mock.assert_not_called()
c._read_loop_websocket.mock.assert_not_called()
c._write_loop.mock.assert_not_called()
on_connect.assert_not_called()
assert c.transport() == 'polling'
ws.send_str.mock.assert_called_once_with('2probe')
def test_websocket_upgrade_successful(self):
c = asyncio_client.AsyncClient()
c.http = mock.MagicMock(closed=False)
c.http.ws_connect = AsyncMock()
ws = c.http.ws_connect.mock.return_value
ws.receive = AsyncMock()
ws.receive.mock.return_value.data = packet.Packet(
packet.PONG, 'probe'
).encode()
ws.send_str = AsyncMock()
c.sid = '123'
c.base_url = 'http://foo'
c.current_transport = 'polling'
c._read_loop_polling = AsyncMock()
c._read_loop_websocket = AsyncMock()
c._write_loop = AsyncMock()
on_connect = mock.MagicMock()
c.on('connect', on_connect)
assert _run(c.connect('ws://foo', transports=['websocket']))
c._read_loop_polling.mock.assert_not_called()
c._read_loop_websocket.mock.assert_called_once_with()
c._write_loop.mock.assert_called_once_with()
on_connect.assert_not_called() # was called by polling
assert c not in client.connected_clients # was added by polling
assert c.base_url == 'http://foo' # not changed
assert c.sid == '123' # not changed
assert c.transport() == 'websocket'
assert c.ws == ws
assert ws.send_str.mock.call_args_list[0] == (('2probe',),) # ping
assert ws.send_str.mock.call_args_list[1] == (('5',),) # upgrade
def test_receive_unknown_packet(self):
c = asyncio_client.AsyncClient()
_run(c._receive_packet(packet.Packet(encoded_packet='9')))
# should be ignored
def test_receive_noop_packet(self):
c = asyncio_client.AsyncClient()
_run(c._receive_packet(packet.Packet(packet.NOOP)))
# should be ignored
def test_receive_ping_packet(self):
c = asyncio_client.AsyncClient()
c._send_packet = AsyncMock()
_run(c._receive_packet(packet.Packet(packet.PING)))
assert c._send_packet.mock.call_args_list[0][0][0].encode() == '3'
def test_receive_message_packet(self):
c = asyncio_client.AsyncClient()
c._trigger_event = AsyncMock()
_run(c._receive_packet(packet.Packet(packet.MESSAGE, {'foo': 'bar'})))
c._trigger_event.mock.assert_called_once_with(
'message', {'foo': 'bar'}, run_async=True
)
def test_receive_close_packet(self):
c = asyncio_client.AsyncClient()
c.disconnect = AsyncMock()
_run(c._receive_packet(packet.Packet(packet.CLOSE)))
c.disconnect.mock.assert_called_once_with(abort=True)
def test_send_packet_disconnected(self):
c = asyncio_client.AsyncClient()
c.queue = c.create_queue()
c.state = 'disconnected'
_run(c._send_packet(packet.Packet(packet.NOOP)))
assert c.queue.empty()
def test_send_packet(self):
c = asyncio_client.AsyncClient()
c.queue = c.create_queue()
c.state = 'connected'
_run(c._send_packet(packet.Packet(packet.NOOP)))
assert not c.queue.empty()
pkt = _run(c.queue.get())
assert pkt.packet_type == packet.NOOP
def test_trigger_event_function(self):
result = []
def foo_handler(arg):
result.append('ok')
result.append(arg)
c = asyncio_client.AsyncClient()
c.on('message', handler=foo_handler)
_run(c._trigger_event('message', 'bar'))
assert result == ['ok', 'bar']
def test_trigger_event_coroutine(self):
result = []
async def foo_handler(arg):
result.append('ok')
result.append(arg)
c = asyncio_client.AsyncClient()
c.on('message', handler=foo_handler)
_run(c._trigger_event('message', 'bar'))
assert result == ['ok', 'bar']
def test_trigger_event_function_error(self):
def connect_handler(arg):
return 1 / 0
def foo_handler(arg):
return 1 / 0
c = asyncio_client.AsyncClient()
c.on('connect', handler=connect_handler)
c.on('message', handler=foo_handler)
assert not _run(c._trigger_event('connect', '123'))
assert _run(c._trigger_event('message', 'bar')) is None
def test_trigger_event_coroutine_error(self):
async def connect_handler(arg):
return 1 / 0
async def foo_handler(arg):
return 1 / 0
c = asyncio_client.AsyncClient()
c.on('connect', handler=connect_handler)
c.on('message', handler=foo_handler)
assert not _run(c._trigger_event('connect', '123'))
assert _run(c._trigger_event('message', 'bar')) is None
def test_trigger_event_function_async(self):
result = []
def foo_handler(arg):
result.append('ok')
result.append(arg)
c = asyncio_client.AsyncClient()
c.on('message', handler=foo_handler)
fut = _run(c._trigger_event('message', 'bar', run_async=True))
asyncio.get_event_loop().run_until_complete(fut)
assert result == ['ok', 'bar']
def test_trigger_event_coroutine_async(self):
result = []
async def foo_handler(arg):
result.append('ok')
result.append(arg)
c = asyncio_client.AsyncClient()
c.on('message', handler=foo_handler)
fut = _run(c._trigger_event('message', 'bar', run_async=True))
asyncio.get_event_loop().run_until_complete(fut)
assert result == ['ok', 'bar']
def test_trigger_event_function_async_error(self):
result = []
def foo_handler(arg):
result.append(arg)
return 1 / 0
c = asyncio_client.AsyncClient()
c.on('message', handler=foo_handler)
fut = _run(c._trigger_event('message', 'bar', run_async=True))
with pytest.raises(ZeroDivisionError):
asyncio.get_event_loop().run_until_complete(fut)
assert result == ['bar']
def test_trigger_event_coroutine_async_error(self):
result = []
async def foo_handler(arg):
result.append(arg)
return 1 / 0
c = asyncio_client.AsyncClient()
c.on('message', handler=foo_handler)
fut = _run(c._trigger_event('message', 'bar', run_async=True))
with pytest.raises(ZeroDivisionError):
asyncio.get_event_loop().run_until_complete(fut)
assert result == ['bar']
def test_trigger_unknown_event(self):
c = asyncio_client.AsyncClient()
_run(c._trigger_event('connect', run_async=False))
_run(c._trigger_event('message', 123, run_async=True))
# should do nothing
def test_read_loop_polling_disconnected(self):
c = asyncio_client.AsyncClient()
c.state = 'disconnected'
c._trigger_event = AsyncMock()
c.write_loop_task = AsyncMock()()
_run(c._read_loop_polling())
c._trigger_event.mock.assert_not_called()
# should not block
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_read_loop_polling_no_response(self, _time):
c = asyncio_client.AsyncClient()
c.ping_interval = 25
c.ping_timeout = 5
c.state = 'connected'
c.base_url = 'http://foo'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c._send_request = AsyncMock(return_value=None)
c._trigger_event = AsyncMock()
c.write_loop_task = AsyncMock()()
_run(c._read_loop_polling())
assert c.state == 'disconnected'
c.queue.put.mock.assert_called_once_with(None)
c._send_request.mock.assert_called_once_with(
'GET', 'http://foo&t=123.456', timeout=30
)
c._trigger_event.mock.assert_called_once_with(
'disconnect', run_async=False
)
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_read_loop_polling_bad_status(self, _time):
c = asyncio_client.AsyncClient()
c.ping_interval = 25
c.ping_timeout = 5
c.state = 'connected'
c.base_url = 'http://foo'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 400
c.write_loop_task = AsyncMock()()
_run(c._read_loop_polling())
assert c.state == 'disconnected'
c.queue.put.mock.assert_called_once_with(None)
c._send_request.mock.assert_called_once_with(
'GET', 'http://foo&t=123.456', timeout=30
)
@mock.patch('engineio.client.time.time', return_value=123.456)
def test_read_loop_polling_bad_packet(self, _time):
c = asyncio_client.AsyncClient()
c.ping_interval = 25
c.ping_timeout = 60
c.state = 'connected'
c.base_url = 'http://foo'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
c._send_request.mock.return_value.read = AsyncMock(return_value=b'foo')
c.write_loop_task = AsyncMock()()
_run(c._read_loop_polling())
assert c.state == 'disconnected'
c.queue.put.mock.assert_called_once_with(None)
c._send_request.mock.assert_called_once_with(
'GET', 'http://foo&t=123.456', timeout=65
)
def test_read_loop_polling(self):
c = asyncio_client.AsyncClient()
c.ping_interval = 25
c.ping_timeout = 5
c.state = 'connected'
c.base_url = 'http://foo'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c._send_request = AsyncMock()
c._send_request.mock.side_effect = [
mock.MagicMock(
status=200,
read=AsyncMock(
return_value=payload.Payload(
packets=[
packet.Packet(packet.PING),
packet.Packet(packet.NOOP),
]
).encode().encode('utf-8')
),
),
None,
]
c.write_loop_task = AsyncMock()()
c._receive_packet = AsyncMock()
_run(c._read_loop_polling())
assert c.state == 'disconnected'
c.queue.put.mock.assert_called_once_with(None)
assert c._send_request.mock.call_count == 2
assert c._receive_packet.mock.call_count == 2
assert c._receive_packet.mock.call_args_list[0][0][0].encode() == '2'
assert c._receive_packet.mock.call_args_list[1][0][0].encode() == '6'
def test_read_loop_websocket_disconnected(self):
c = asyncio_client.AsyncClient()
c.state = 'disconnected'
c.write_loop_task = AsyncMock()()
_run(c._read_loop_websocket())
# should not block
def test_read_loop_websocket_timeout(self):
c = asyncio_client.AsyncClient()
c.ping_interval = 1
c.ping_timeout = 2
c.base_url = 'ws://foo'
c.state = 'connected'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c.ws = mock.MagicMock()
c.ws.receive = AsyncMock(side_effect=asyncio.TimeoutError())
c.write_loop_task = AsyncMock()()
_run(c._read_loop_websocket())
assert c.state == 'disconnected'
c.queue.put.mock.assert_called_once_with(None)
def test_read_loop_websocket_no_response(self):
c = asyncio_client.AsyncClient()
c.ping_interval = 1
c.ping_timeout = 2
c.base_url = 'ws://foo'
c.state = 'connected'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c.ws = mock.MagicMock()
c.ws.receive = AsyncMock(
side_effect=aiohttp.client_exceptions.ServerDisconnectedError()
)
c.write_loop_task = AsyncMock()()
_run(c._read_loop_websocket())
assert c.state == 'disconnected'
c.queue.put.mock.assert_called_once_with(None)
def test_read_loop_websocket_unexpected_error(self):
c = asyncio_client.AsyncClient()
c.ping_interval = 1
c.ping_timeout = 2
c.base_url = 'ws://foo'
c.state = 'connected'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c.ws = mock.MagicMock()
c.ws.receive = AsyncMock(side_effect=ValueError)
c.write_loop_task = AsyncMock()()
_run(c._read_loop_websocket())
assert c.state == 'disconnected'
c.queue.put.mock.assert_called_once_with(None)
def test_read_loop_websocket(self):
c = asyncio_client.AsyncClient()
c.ping_interval = 1
c.ping_timeout = 2
c.base_url = 'ws://foo'
c.state = 'connected'
c.queue = mock.MagicMock()
c.queue.put = AsyncMock()
c.ws = mock.MagicMock()
c.ws.receive = AsyncMock(
side_effect=[
mock.MagicMock(data=packet.Packet(packet.PING).encode()),
ValueError,
]
)
c.write_loop_task = AsyncMock()()
c._receive_packet = AsyncMock()
_run(c._read_loop_websocket())
assert c.state == 'disconnected'
assert c._receive_packet.mock.call_args_list[0][0][0].encode() == '2'
c.queue.put.mock.assert_called_once_with(None)
def test_write_loop_disconnected(self):
c = asyncio_client.AsyncClient()
c.state = 'disconnected'
_run(c._write_loop())
# should not block
def test_write_loop_no_packets(self):
c = asyncio_client.AsyncClient()
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.queue = mock.MagicMock()
c.queue.get = AsyncMock(return_value=None)
_run(c._write_loop())
c.queue.task_done.assert_called_once_with()
c.queue.get.mock.assert_called_once_with()
def test_write_loop_empty_queue(self):
c = asyncio_client.AsyncClient()
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.queue = mock.MagicMock()
c.queue.Empty = RuntimeError
c.queue.get = AsyncMock(side_effect=RuntimeError)
_run(c._write_loop())
c.queue.get.mock.assert_called_once_with()
def test_write_loop_polling_one_packet(self):
c = asyncio_client.AsyncClient()
c.base_url = 'http://foo'
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.current_transport = 'polling'
c.queue = mock.MagicMock()
c.queue.Empty = RuntimeError
c.queue.get = AsyncMock(
side_effect=[
packet.Packet(packet.MESSAGE, {'foo': 'bar'}),
RuntimeError,
]
)
c.queue.get_nowait = mock.MagicMock(side_effect=RuntimeError)
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
_run(c._write_loop())
assert c.queue.task_done.call_count == 1
p = payload.Payload(
packets=[packet.Packet(packet.MESSAGE, {'foo': 'bar'})]
)
c._send_request.mock.assert_called_once_with(
'POST',
'http://foo',
body=p.encode(),
headers={'Content-Type': 'text/plain'},
timeout=5,
)
def test_write_loop_polling_three_packets(self):
c = asyncio_client.AsyncClient()
c.base_url = 'http://foo'
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.current_transport = 'polling'
c.queue = mock.MagicMock()
c.queue.Empty = RuntimeError
c.queue.get = AsyncMock(
side_effect=[
packet.Packet(packet.MESSAGE, {'foo': 'bar'}),
RuntimeError,
]
)
c.queue.get_nowait = mock.MagicMock(
side_effect=[
packet.Packet(packet.PING),
packet.Packet(packet.NOOP),
RuntimeError,
]
)
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
_run(c._write_loop())
assert c.queue.task_done.call_count == 3
p = payload.Payload(
packets=[
packet.Packet(packet.MESSAGE, {'foo': 'bar'}),
packet.Packet(packet.PING),
packet.Packet(packet.NOOP),
]
)
c._send_request.mock.assert_called_once_with(
'POST',
'http://foo',
body=p.encode(),
headers={'Content-Type': 'text/plain'},
timeout=5,
)
def test_write_loop_polling_two_packets_done(self):
c = asyncio_client.AsyncClient()
c.base_url = 'http://foo'
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.current_transport = 'polling'
c.queue = mock.MagicMock()
c.queue.Empty = RuntimeError
c.queue.get = AsyncMock(
side_effect=[
packet.Packet(packet.MESSAGE, {'foo': 'bar'}),
RuntimeError,
]
)
c.queue.get_nowait = mock.MagicMock(
side_effect=[packet.Packet(packet.PING), None]
)
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 200
_run(c._write_loop())
assert c.queue.task_done.call_count == 3
p = payload.Payload(
packets=[
packet.Packet(packet.MESSAGE, {'foo': 'bar'}),
packet.Packet(packet.PING),
]
)
c._send_request.mock.assert_called_once_with(
'POST',
'http://foo',
body=p.encode(),
headers={'Content-Type': 'text/plain'},
timeout=5,
)
assert c.state == 'connected'
def test_write_loop_polling_bad_connection(self):
c = asyncio_client.AsyncClient()
c.base_url = 'http://foo'
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.current_transport = 'polling'
c.queue = mock.MagicMock()
c.queue.Empty = RuntimeError
c.queue.get = AsyncMock(
side_effect=[packet.Packet(packet.MESSAGE, {'foo': 'bar'})]
)
c.queue.get_nowait = mock.MagicMock(side_effect=[RuntimeError])
c._send_request = AsyncMock(return_value=None)
_run(c._write_loop())
assert c.queue.task_done.call_count == 1
p = payload.Payload(
packets=[packet.Packet(packet.MESSAGE, {'foo': 'bar'})]
)
c._send_request.mock.assert_called_once_with(
'POST',
'http://foo',
body=p.encode(),
headers={'Content-Type': 'text/plain'},
timeout=5,
)
assert c.state == 'connected'
def test_write_loop_polling_bad_status(self):
c = asyncio_client.AsyncClient()
c.base_url = 'http://foo'
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.current_transport = 'polling'
c.queue = mock.MagicMock()
c.queue.Empty = RuntimeError
c.queue.get = AsyncMock(
side_effect=[packet.Packet(packet.MESSAGE, {'foo': 'bar'})]
)
c.queue.get_nowait = mock.MagicMock(side_effect=[RuntimeError])
c._send_request = AsyncMock()
c._send_request.mock.return_value.status = 500
_run(c._write_loop())
assert c.queue.task_done.call_count == 1
p = payload.Payload(
packets=[packet.Packet(packet.MESSAGE, {'foo': 'bar'})]
)
c._send_request.mock.assert_called_once_with(
'POST',
'http://foo',
body=p.encode(),
headers={'Content-Type': 'text/plain'},
timeout=5,
)
assert c.state == 'disconnected'
def test_write_loop_websocket_one_packet(self):
c = asyncio_client.AsyncClient()
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.current_transport = 'websocket'
c.queue = mock.MagicMock()
c.queue.Empty = RuntimeError
c.queue.get = AsyncMock(
side_effect=[
packet.Packet(packet.MESSAGE, {'foo': 'bar'}),
RuntimeError,
]
)
c.queue.get_nowait = mock.MagicMock(side_effect=[RuntimeError])
c.ws = mock.MagicMock()
c.ws.send_str = AsyncMock()
_run(c._write_loop())
assert c.queue.task_done.call_count == 1
assert c.ws.send_str.mock.call_count == 1
c.ws.send_str.mock.assert_called_once_with('4{"foo":"bar"}')
def test_write_loop_websocket_three_packets(self):
c = asyncio_client.AsyncClient()
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.current_transport = 'websocket'
c.queue = mock.MagicMock()
c.queue.Empty = RuntimeError
c.queue.get = AsyncMock(
side_effect=[
packet.Packet(packet.MESSAGE, {'foo': 'bar'}),
RuntimeError,
]
)
c.queue.get_nowait = mock.MagicMock(
side_effect=[
packet.Packet(packet.PING),
packet.Packet(packet.NOOP),
RuntimeError,
]
)
c.ws = mock.MagicMock()
c.ws.send_str = AsyncMock()
_run(c._write_loop())
assert c.queue.task_done.call_count == 3
assert c.ws.send_str.mock.call_count == 3
assert c.ws.send_str.mock.call_args_list[0][0][0] == '4{"foo":"bar"}'
assert c.ws.send_str.mock.call_args_list[1][0][0] == '2'
assert c.ws.send_str.mock.call_args_list[2][0][0] == '6'
def test_write_loop_websocket_one_packet_binary(self):
c = asyncio_client.AsyncClient()
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.current_transport = 'websocket'
c.queue = mock.MagicMock()
c.queue.Empty = RuntimeError
c.queue.get = AsyncMock(
side_effect=[packet.Packet(packet.MESSAGE, b'foo'), RuntimeError]
)
c.queue.get_nowait = mock.MagicMock(side_effect=[RuntimeError])
c.ws = mock.MagicMock()
c.ws.send_bytes = AsyncMock()
_run(c._write_loop())
assert c.queue.task_done.call_count == 1
assert c.ws.send_bytes.mock.call_count == 1
c.ws.send_bytes.mock.assert_called_once_with(b'foo')
def test_write_loop_websocket_bad_connection(self):
c = asyncio_client.AsyncClient()
c.state = 'connected'
c.ping_interval = 1
c.ping_timeout = 2
c.current_transport = 'websocket'
c.queue = mock.MagicMock()
c.queue.Empty = RuntimeError
c.queue.get = AsyncMock(
side_effect=[
packet.Packet(packet.MESSAGE, {'foo': 'bar'}),
RuntimeError,
]
)
c.queue.get_nowait = mock.MagicMock(side_effect=[RuntimeError])
c.ws = mock.MagicMock()
c.ws.send_str = AsyncMock(
side_effect=aiohttp.client_exceptions.ServerDisconnectedError()
)
_run(c._write_loop())
assert c.state == 'connected'
@mock.patch('engineio.client.original_signal_handler')
def test_signal_handler(self, original_handler):
clients = [mock.MagicMock(), mock.MagicMock()]
client.connected_clients = clients[:]
client.connected_clients[0].is_asyncio_based.return_value = False
client.connected_clients[1].is_asyncio_based.return_value = True
async def test():
asyncio_client.async_signal_handler()
asyncio.get_event_loop().run_until_complete(test())
clients[0].disconnect.assert_not_called()
clients[1].disconnect.assert_called_once_with()
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,430
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/asyncio_client.py
|
import asyncio
import signal
import ssl
import threading
try:
import aiohttp
except ImportError: # pragma: no cover
aiohttp = None
from . import client
from . import exceptions
from . import packet
from . import payload
async_signal_handler_set = False
def async_signal_handler():
"""SIGINT handler.
Disconnect all active async clients.
"""
async def _handler(): # pragma: no cover
for c in client.connected_clients[:]:
if c.is_asyncio_based():
await c.disconnect()
# cancel all running tasks
tasks = [task for task in asyncio.all_tasks() if task is not
asyncio.current_task()]
for task in tasks:
task.cancel()
await asyncio.gather(*tasks, return_exceptions=True)
asyncio.get_event_loop().stop()
asyncio.ensure_future(_handler())
class AsyncClient(client.Client):
"""An Engine.IO client for asyncio.
This class implements a fully compliant Engine.IO web client with support
for websocket and long-polling transports, compatible with the asyncio
framework on Python 3.5 or newer.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``. The default is
``False``. Note that fatal errors are logged even when
``logger`` is ``False``.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions.
:param request_timeout: A timeout in seconds for requests. The default is
5 seconds.
:param http_session: an initialized ``aiohttp.ClientSession`` object to be
used when sending requests to the server. Use it if
you need to add special client options such as proxy
servers, SSL certificates, etc.
:param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to
skip SSL certificate verification, allowing
connections to servers with self signed certificates.
The default is ``True``.
:param handle_sigint: Set to ``True`` to automatically handle disconnection
when the process is interrupted, or to ``False`` to
leave interrupt handling to the calling application.
Interrupt handling can only be enabled when the
client instance is created in the main thread.
:param websocket_extra_options: Dictionary containing additional keyword
arguments passed to
``aiohttp.ws_connect()``.
"""
def is_asyncio_based(self):
return True
async def connect(self, url, headers=None, transports=None,
engineio_path='engine.io'):
"""Connect to an Engine.IO server.
:param url: The URL of the Engine.IO server. It can include custom
query string parameters if required by the server.
:param headers: A dictionary with custom headers to send with the
connection request.
:param transports: The list of allowed transports. Valid transports
are ``'polling'`` and ``'websocket'``. If not
given, the polling transport is connected first,
then an upgrade to websocket is attempted.
:param engineio_path: The endpoint where the Engine.IO server is
installed. The default value is appropriate for
most cases.
Note: this method is a coroutine.
Example usage::
eio = engineio.Client()
await eio.connect('http://localhost:5000')
"""
global async_signal_handler_set
if self.handle_sigint and not async_signal_handler_set and \
threading.current_thread() == threading.main_thread():
try:
asyncio.get_event_loop().add_signal_handler(
signal.SIGINT, async_signal_handler)
except NotImplementedError: # pragma: no cover
self.logger.warning('Signal handler is unsupported')
async_signal_handler_set = True
if self.state != 'disconnected':
raise ValueError('Client is not in a disconnected state')
valid_transports = ['polling', 'websocket']
if transports is not None:
if isinstance(transports, str):
transports = [transports]
transports = [transport for transport in transports
if transport in valid_transports]
if not transports:
raise ValueError('No valid transports provided')
self.transports = transports or valid_transports
self.queue = self.create_queue()
return await getattr(self, '_connect_' + self.transports[0])(
url, headers or {}, engineio_path)
async def wait(self):
"""Wait until the connection with the server ends.
Client applications can use this function to block the main thread
during the life of the connection.
Note: this method is a coroutine.
"""
if self.read_loop_task:
await self.read_loop_task
async def send(self, data):
"""Send a message to the server.
:param data: The data to send to the server. Data can be of type
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
or ``dict``, the data will be serialized as JSON.
Note: this method is a coroutine.
"""
await self._send_packet(packet.Packet(packet.MESSAGE, data=data))
async def disconnect(self, abort=False):
"""Disconnect from the server.
:param abort: If set to ``True``, do not wait for background tasks
associated with the connection to end.
Note: this method is a coroutine.
"""
if self.state == 'connected':
await self._send_packet(packet.Packet(packet.CLOSE))
await self.queue.put(None)
self.state = 'disconnecting'
await self._trigger_event('disconnect', run_async=False)
if self.current_transport == 'websocket':
await self.ws.close()
if not abort:
await self.read_loop_task
self.state = 'disconnected'
try:
client.connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
await self._reset()
def start_background_task(self, target, *args, **kwargs):
"""Start a background task.
This is a utility function that applications can use to start a
background task.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
The return value is a ``asyncio.Task`` object.
"""
return asyncio.ensure_future(target(*args, **kwargs))
async def sleep(self, seconds=0):
"""Sleep for the requested amount of time.
Note: this method is a coroutine.
"""
return await asyncio.sleep(seconds)
def create_queue(self):
"""Create a queue object."""
q = asyncio.Queue()
q.Empty = asyncio.QueueEmpty
return q
def create_event(self):
"""Create an event object."""
return asyncio.Event()
async def _reset(self):
super()._reset()
if not self.external_http: # pragma: no cover
if self.http and not self.http.closed:
await self.http.close()
def __del__(self): # pragma: no cover
# try to close the aiohttp session if it is still open
if self.http and not self.http.closed:
try:
loop = asyncio.get_event_loop()
if loop.is_running():
loop.ensure_future(self.http.close())
else:
loop.run_until_complete(self.http.close())
except:
pass
async def _connect_polling(self, url, headers, engineio_path):
"""Establish a long-polling connection to the Engine.IO server."""
if aiohttp is None: # pragma: no cover
self.logger.error('aiohttp not installed -- cannot make HTTP '
'requests!')
return
self.base_url = self._get_engineio_url(url, engineio_path, 'polling')
self.logger.info('Attempting polling connection to ' + self.base_url)
r = await self._send_request(
'GET', self.base_url + self._get_url_timestamp(), headers=headers,
timeout=self.request_timeout)
if r is None or isinstance(r, str):
await self._reset()
raise exceptions.ConnectionError(
r or 'Connection refused by the server')
if r.status < 200 or r.status >= 300:
await self._reset()
try:
arg = await r.json()
except aiohttp.ClientError:
arg = None
raise exceptions.ConnectionError(
'Unexpected status code {} in server response'.format(
r.status), arg)
try:
p = payload.Payload(encoded_payload=(await r.read()).decode(
'utf-8'))
except ValueError:
raise exceptions.ConnectionError(
'Unexpected response from server') from None
open_packet = p.packets[0]
if open_packet.packet_type != packet.OPEN:
raise exceptions.ConnectionError(
'OPEN packet not returned by server')
self.logger.info(
'Polling connection accepted with ' + str(open_packet.data))
self.sid = open_packet.data['sid']
self.upgrades = open_packet.data['upgrades']
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
self.current_transport = 'polling'
self.base_url += '&sid=' + self.sid
self.state = 'connected'
client.connected_clients.append(self)
await self._trigger_event('connect', run_async=False)
for pkt in p.packets[1:]:
await self._receive_packet(pkt)
if 'websocket' in self.upgrades and 'websocket' in self.transports:
# attempt to upgrade to websocket
if await self._connect_websocket(url, headers, engineio_path):
# upgrade to websocket succeeded, we're done here
return
self.write_loop_task = self.start_background_task(self._write_loop)
self.read_loop_task = self.start_background_task(
self._read_loop_polling)
async def _connect_websocket(self, url, headers, engineio_path):
"""Establish or upgrade to a WebSocket connection with the server."""
if aiohttp is None: # pragma: no cover
self.logger.error('aiohttp package not installed')
return False
websocket_url = self._get_engineio_url(url, engineio_path,
'websocket')
if self.sid:
self.logger.info(
'Attempting WebSocket upgrade to ' + websocket_url)
upgrade = True
websocket_url += '&sid=' + self.sid
else:
upgrade = False
self.base_url = websocket_url
self.logger.info(
'Attempting WebSocket connection to ' + websocket_url)
if self.http is None or self.http.closed: # pragma: no cover
self.http = aiohttp.ClientSession()
# extract any new cookies passed in a header so that they can also be
# sent the the WebSocket route
cookies = {}
for header, value in headers.items():
if header.lower() == 'cookie':
cookies = dict(
[cookie.split('=', 1) for cookie in value.split('; ')])
del headers[header]
break
self.http.cookie_jar.update_cookies(cookies)
extra_options = {'timeout': self.request_timeout}
if not self.ssl_verify:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
extra_options['ssl'] = ssl_context
# combine internally generated options with the ones supplied by the
# caller. The caller's options take precedence.
headers.update(self.websocket_extra_options.pop('headers', {}))
extra_options['headers'] = headers
extra_options.update(self.websocket_extra_options)
try:
ws = await self.http.ws_connect(
websocket_url + self._get_url_timestamp(), **extra_options)
except (aiohttp.client_exceptions.WSServerHandshakeError,
aiohttp.client_exceptions.ServerConnectionError,
aiohttp.client_exceptions.ClientConnectionError):
if upgrade:
self.logger.warning(
'WebSocket upgrade failed: connection error')
return False
else:
raise exceptions.ConnectionError('Connection error')
if upgrade:
p = packet.Packet(packet.PING, data='probe').encode()
try:
await ws.send_str(p)
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected send exception: %s',
str(e))
return False
try:
p = (await ws.receive()).data
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected recv exception: %s',
str(e))
return False
pkt = packet.Packet(encoded_packet=p)
if pkt.packet_type != packet.PONG or pkt.data != 'probe':
self.logger.warning(
'WebSocket upgrade failed: no PONG packet')
return False
p = packet.Packet(packet.UPGRADE).encode()
try:
await ws.send_str(p)
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected send exception: %s',
str(e))
return False
self.current_transport = 'websocket'
self.logger.info('WebSocket upgrade was successful')
else:
try:
p = (await ws.receive()).data
except Exception as e: # pragma: no cover
raise exceptions.ConnectionError(
'Unexpected recv exception: ' + str(e))
open_packet = packet.Packet(encoded_packet=p)
if open_packet.packet_type != packet.OPEN:
raise exceptions.ConnectionError('no OPEN packet')
self.logger.info(
'WebSocket connection accepted with ' + str(open_packet.data))
self.sid = open_packet.data['sid']
self.upgrades = open_packet.data['upgrades']
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
self.current_transport = 'websocket'
self.state = 'connected'
client.connected_clients.append(self)
await self._trigger_event('connect', run_async=False)
self.ws = ws
self.write_loop_task = self.start_background_task(self._write_loop)
self.read_loop_task = self.start_background_task(
self._read_loop_websocket)
return True
async def _receive_packet(self, pkt):
"""Handle incoming packets from the server."""
packet_name = packet.packet_names[pkt.packet_type] \
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
self.logger.info(
'Received packet %s data %s', packet_name,
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
if pkt.packet_type == packet.MESSAGE:
await self._trigger_event('message', pkt.data, run_async=True)
elif pkt.packet_type == packet.PING:
await self._send_packet(packet.Packet(packet.PONG, pkt.data))
elif pkt.packet_type == packet.CLOSE:
await self.disconnect(abort=True)
elif pkt.packet_type == packet.NOOP:
pass
else:
self.logger.error('Received unexpected packet of type %s',
pkt.packet_type)
async def _send_packet(self, pkt):
"""Queue a packet to be sent to the server."""
if self.state != 'connected':
return
await self.queue.put(pkt)
self.logger.info(
'Sending packet %s data %s',
packet.packet_names[pkt.packet_type],
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
async def _send_request(
self, method, url, headers=None, body=None,
timeout=None): # pragma: no cover
if self.http is None or self.http.closed:
self.http = aiohttp.ClientSession()
http_method = getattr(self.http, method.lower())
try:
if not self.ssl_verify:
return await http_method(
url, headers=headers, data=body,
timeout=aiohttp.ClientTimeout(total=timeout), ssl=False)
else:
return await http_method(
url, headers=headers, data=body,
timeout=aiohttp.ClientTimeout(total=timeout))
except (aiohttp.ClientError, asyncio.TimeoutError) as exc:
self.logger.info('HTTP %s request to %s failed with error %s.',
method, url, exc)
return str(exc)
async def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
ret = None
if event in self.handlers:
if asyncio.iscoroutinefunction(self.handlers[event]) is True:
if run_async:
return self.start_background_task(self.handlers[event],
*args)
else:
try:
ret = await self.handlers[event](*args)
except asyncio.CancelledError: # pragma: no cover
pass
except:
self.logger.exception(event + ' async handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
else:
if run_async:
async def async_handler():
return self.handlers[event](*args)
return self.start_background_task(async_handler)
else:
try:
ret = self.handlers[event](*args)
except:
self.logger.exception(event + ' handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
return ret
async def _read_loop_polling(self):
"""Read packets by polling the Engine.IO server."""
while self.state == 'connected':
self.logger.info(
'Sending polling GET request to ' + self.base_url)
r = await self._send_request(
'GET', self.base_url + self._get_url_timestamp(),
timeout=max(self.ping_interval, self.ping_timeout) + 5)
if r is None or isinstance(r, str):
self.logger.warning(
r or 'Connection refused by the server, aborting')
await self.queue.put(None)
break
if r.status < 200 or r.status >= 300:
self.logger.warning('Unexpected status code %s in server '
'response, aborting', r.status)
await self.queue.put(None)
break
try:
p = payload.Payload(encoded_payload=(await r.read()).decode(
'utf-8'))
except ValueError:
self.logger.warning(
'Unexpected packet from server, aborting')
await self.queue.put(None)
break
for pkt in p.packets:
await self._receive_packet(pkt)
self.logger.info('Waiting for write loop task to end')
await self.write_loop_task
if self.state == 'connected':
await self._trigger_event('disconnect', run_async=False)
try:
client.connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
await self._reset()
self.logger.info('Exiting read loop task')
async def _read_loop_websocket(self):
"""Read packets from the Engine.IO WebSocket connection."""
while self.state == 'connected':
p = None
try:
p = await asyncio.wait_for(
self.ws.receive(),
timeout=self.ping_interval + self.ping_timeout)
if not isinstance(p.data, (str, bytes)): # pragma: no cover
self.logger.warning(
'Server sent unexpected packet %s data %s, aborting',
str(p.type), str(p.data))
await self.queue.put(None)
break # the connection is broken
p = p.data
except asyncio.TimeoutError:
self.logger.warning(
'Server has stopped communicating, aborting')
await self.queue.put(None)
break
except aiohttp.client_exceptions.ServerDisconnectedError:
self.logger.info(
'Read loop: WebSocket connection was closed, aborting')
await self.queue.put(None)
break
except Exception as e:
self.logger.info(
'Unexpected error receiving packet: "%s", aborting',
str(e))
await self.queue.put(None)
break
try:
pkt = packet.Packet(encoded_packet=p)
except Exception as e: # pragma: no cover
self.logger.info(
'Unexpected error decoding packet: "%s", aborting', str(e))
await self.queue.put(None)
break
await self._receive_packet(pkt)
self.logger.info('Waiting for write loop task to end')
await self.write_loop_task
if self.state == 'connected':
await self._trigger_event('disconnect', run_async=False)
try:
client.connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
await self._reset()
self.logger.info('Exiting read loop task')
async def _write_loop(self):
"""This background task sends packages to the server as they are
pushed to the send queue.
"""
while self.state == 'connected':
# to simplify the timeout handling, use the maximum of the
# ping interval and ping timeout as timeout, with an extra 5
# seconds grace period
timeout = max(self.ping_interval, self.ping_timeout) + 5
packets = None
try:
packets = [await asyncio.wait_for(self.queue.get(), timeout)]
except (self.queue.Empty, asyncio.TimeoutError):
self.logger.error('packet queue is empty, aborting')
break
except asyncio.CancelledError: # pragma: no cover
break
if packets == [None]:
self.queue.task_done()
packets = []
else:
while True:
try:
packets.append(self.queue.get_nowait())
except self.queue.Empty:
break
if packets[-1] is None:
packets = packets[:-1]
self.queue.task_done()
break
if not packets:
# empty packet list returned -> connection closed
break
if self.current_transport == 'polling':
p = payload.Payload(packets=packets)
r = await self._send_request(
'POST', self.base_url, body=p.encode(),
headers={'Content-Type': 'text/plain'},
timeout=self.request_timeout)
for pkt in packets:
self.queue.task_done()
if r is None or isinstance(r, str):
self.logger.warning(
r or 'Connection refused by the server, aborting')
break
if r.status < 200 or r.status >= 300:
self.logger.warning('Unexpected status code %s in server '
'response, aborting', r.status)
await self._reset()
break
else:
# websocket
try:
for pkt in packets:
if pkt.binary:
await self.ws.send_bytes(pkt.encode())
else:
await self.ws.send_str(pkt.encode())
self.queue.task_done()
except (aiohttp.client_exceptions.ServerDisconnectedError,
BrokenPipeError, OSError):
self.logger.info(
'Write loop: WebSocket connection was closed, '
'aborting')
break
self.logger.info('Exiting write loop task')
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,431
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/tests/asyncio/test_async_asgi.py
|
import asyncio
import os
import sys
import unittest
from unittest import mock
from engineio.async_drivers import asgi as async_asgi
def AsyncMock(*args, **kwargs):
"""Return a mock asynchronous function."""
m = mock.MagicMock(*args, **kwargs)
async def mock_coro(*args, **kwargs):
return m(*args, **kwargs)
mock_coro.mock = m
return mock_coro
def _run(coro):
"""Run the given coroutine."""
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(sys.version_info < (3, 5), 'only for Python 3.5+')
class AsgiTests(unittest.TestCase):
def test_create_app(self):
app = async_asgi.ASGIApp(
'eio',
'other_app',
static_files='static_files',
engineio_path='/foo/',
)
assert app.engineio_server == 'eio'
assert app.other_asgi_app == 'other_app'
assert app.static_files == 'static_files'
assert app.engineio_path == '/foo/'
def test_engineio_routing(self):
mock_server = mock.MagicMock()
mock_server.handle_request = AsyncMock()
app = async_asgi.ASGIApp(mock_server)
scope = {'type': 'http', 'path': '/engine.io/'}
_run(app(scope, 'receive', 'send'))
mock_server.handle_request.mock.assert_called_once_with(
scope, 'receive', 'send'
)
def test_other_app_routing(self):
other_app = AsyncMock()
app = async_asgi.ASGIApp('eio', other_app)
scope = {'type': 'http', 'path': '/foo'}
_run(app(scope, 'receive', 'send'))
other_app.mock.assert_called_once_with(scope, 'receive', 'send')
def test_other_app_lifespan_routing(self):
other_app = AsyncMock()
app = async_asgi.ASGIApp('eio', other_app)
scope = {'type': 'lifespan'}
_run(app(scope, 'receive', 'send'))
other_app.mock.assert_called_once_with(scope, 'receive', 'send')
def test_static_file_routing(self):
root_dir = os.path.dirname(__file__)
app = async_asgi.ASGIApp(
'eio',
static_files={
'/': root_dir + '/index.html',
'/foo': {
'content_type': 'text/plain',
'filename': root_dir + '/index.html',
},
'/static': root_dir,
'/static/test/': root_dir + '/',
'/static2/test/': {'filename': root_dir + '/',
'content_type': 'image/gif'},
},
)
def check_path(path, status_code, content_type, body):
scope = {'type': 'http', 'path': path}
receive = AsyncMock(return_value={'type': 'http.request'})
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_any_call(
{
'type': 'http.response.start',
'status': status_code,
'headers': [
(b'Content-Type', content_type.encode('utf-8'))
],
}
)
send.mock.assert_any_call(
{'type': 'http.response.body', 'body': body.encode('utf-8')}
)
check_path('/', 200, 'text/html', '<html></html>\n')
check_path('/foo', 200, 'text/plain', '<html></html>\n')
check_path('/foo/bar', 404, 'text/plain', 'Not Found')
check_path('/static/index.html', 200, 'text/html', '<html></html>\n')
check_path('/static/foo.bar', 404, 'text/plain', 'Not Found')
check_path(
'/static/test/index.html', 200, 'text/html', '<html></html>\n'
)
check_path('/static/test/index.html', 200, 'text/html',
'<html></html>\n')
check_path('/static/test/files/', 200, 'text/html',
'<html>file</html>\n')
check_path('/static/test/files/file.txt', 200, 'text/plain',
'file\n')
check_path('/static/test/files/x.html', 404, 'text/plain',
'Not Found')
check_path('/static2/test/', 200, 'image/gif', '<html></html>\n')
check_path('/static2/test/index.html', 200, 'image/gif',
'<html></html>\n')
check_path('/static2/test/files/', 200, 'image/gif',
'<html>file</html>\n')
check_path('/static2/test/files/file.txt', 200, 'image/gif',
'file\n')
check_path('/static2/test/files/x.html', 404, 'text/plain',
'Not Found')
check_path('/bar/foo', 404, 'text/plain', 'Not Found')
check_path('', 404, 'text/plain', 'Not Found')
app.static_files[''] = 'index.html'
check_path('/static/test/', 200, 'text/html', '<html></html>\n')
app.static_files[''] = {'filename': 'index.html'}
check_path('/static/test/', 200, 'text/html', '<html></html>\n')
app.static_files[''] = {
'filename': 'index.html',
'content_type': 'image/gif',
}
check_path('/static/test/', 200, 'image/gif', '<html></html>\n')
app.static_files[''] = {'filename': 'test.gif'}
check_path('/static/test/', 404, 'text/plain', 'Not Found')
app.static_files = {}
check_path('/static/test/index.html', 404, 'text/plain', 'Not Found')
def test_lifespan_startup(self):
app = async_asgi.ASGIApp('eio')
scope = {'type': 'lifespan'}
receive = AsyncMock(side_effect=[{'type': 'lifespan.startup'},
{'type': 'lifespan.shutdown'}])
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_any_call(
{'type': 'lifespan.startup.complete'}
)
def test_lifespan_startup_sync_function(self):
up = False
def startup():
nonlocal up
up = True
app = async_asgi.ASGIApp('eio', on_startup=startup)
scope = {'type': 'lifespan'}
receive = AsyncMock(side_effect=[{'type': 'lifespan.startup'},
{'type': 'lifespan.shutdown'}])
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_any_call(
{'type': 'lifespan.startup.complete'}
)
assert up
def test_lifespan_startup_async_function(self):
up = False
async def startup():
nonlocal up
up = True
app = async_asgi.ASGIApp('eio', on_startup=startup)
scope = {'type': 'lifespan'}
receive = AsyncMock(side_effect=[{'type': 'lifespan.startup'},
{'type': 'lifespan.shutdown'}])
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_any_call(
{'type': 'lifespan.startup.complete'}
)
assert up
def test_lifespan_startup_function_exception(self):
up = False
def startup():
raise Exception
app = async_asgi.ASGIApp('eio', on_startup=startup)
scope = {'type': 'lifespan'}
receive = AsyncMock(side_effect=[{'type': 'lifespan.startup'}])
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_called_once_with({'type': 'lifespan.startup.failed'})
assert not up
def test_lifespan_shutdown(self):
app = async_asgi.ASGIApp('eio')
scope = {'type': 'lifespan'}
receive = AsyncMock(return_value={'type': 'lifespan.shutdown'})
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_called_once_with(
{'type': 'lifespan.shutdown.complete'}
)
def test_lifespan_shutdown_sync_function(self):
down = False
def shutdown():
nonlocal down
down = True
app = async_asgi.ASGIApp('eio', on_shutdown=shutdown)
scope = {'type': 'lifespan'}
receive = AsyncMock(return_value={'type': 'lifespan.shutdown'})
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_called_once_with(
{'type': 'lifespan.shutdown.complete'}
)
assert down
def test_lifespan_shutdown_async_function(self):
down = False
async def shutdown():
nonlocal down
down = True
app = async_asgi.ASGIApp('eio', on_shutdown=shutdown)
scope = {'type': 'lifespan'}
receive = AsyncMock(return_value={'type': 'lifespan.shutdown'})
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_called_once_with(
{'type': 'lifespan.shutdown.complete'}
)
assert down
def test_lifespan_shutdown_function_exception(self):
down = False
def shutdown():
raise Exception
app = async_asgi.ASGIApp('eio', on_shutdown=shutdown)
scope = {'type': 'lifespan'}
receive = AsyncMock(return_value={'type': 'lifespan.shutdown'})
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_called_once_with({'type': 'lifespan.shutdown.failed'})
assert not down
def test_lifespan_invalid(self):
app = async_asgi.ASGIApp('eio')
scope = {'type': 'lifespan'}
receive = AsyncMock(side_effect=[{'type': 'lifespan.foo'},
{'type': 'lifespan.shutdown'}])
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_called_once_with(
{'type': 'lifespan.shutdown.complete'}
)
def test_not_found(self):
app = async_asgi.ASGIApp('eio')
scope = {'type': 'http', 'path': '/foo'}
receive = AsyncMock(return_value={'type': 'http.request'})
send = AsyncMock()
_run(app(scope, receive, send))
send.mock.assert_any_call(
{
'type': 'http.response.start',
'status': 404,
'headers': [(b'Content-Type', b'text/plain')],
}
)
send.mock.assert_any_call(
{'type': 'http.response.body', 'body': b'Not Found'}
)
def test_translate_request(self):
receive = AsyncMock(
return_value={'type': 'http.request', 'body': b'hello world'}
)
send = AsyncMock()
environ = _run(
async_asgi.translate_request(
{
'type': 'http',
'method': 'PUT',
'headers': [
(b'a', b'b'),
(b'c-c', b'd'),
(b'c_c', b'e'),
(b'content-type', b'application/json'),
(b'content-length', b'123'),
],
'path': '/foo/bar',
'query_string': b'baz=1',
},
receive,
send,
)
)
expected_environ = {
'REQUEST_METHOD': 'PUT',
'PATH_INFO': '/foo/bar',
'QUERY_STRING': 'baz=1',
'CONTENT_TYPE': 'application/json',
'CONTENT_LENGTH': '123',
'HTTP_A': 'b',
# 'HTTP_C_C': 'd,e',
'RAW_URI': '/foo/bar?baz=1',
'SERVER_PROTOCOL': 'HTTP/1.1',
'asgi.receive': receive,
'asgi.send': send,
}
for k, v in expected_environ.items():
assert v == environ[k]
assert environ['HTTP_C_C'] == 'd,e' or environ['HTTP_C_C'] == 'e,d'
body = _run(environ['wsgi.input'].read())
assert body == b'hello world'
def test_translate_request_no_query_string(self):
receive = AsyncMock(
return_value={'type': 'http.request', 'body': b'hello world'}
)
send = AsyncMock()
environ = _run(
async_asgi.translate_request(
{
'type': 'http',
'method': 'PUT',
'headers': [
(b'a', b'b'),
(b'c-c', b'd'),
(b'c_c', b'e'),
(b'content-type', b'application/json'),
(b'content-length', b'123'),
],
'path': '/foo/bar',
},
receive,
send,
)
)
expected_environ = {
'REQUEST_METHOD': 'PUT',
'PATH_INFO': '/foo/bar',
'QUERY_STRING': '',
'CONTENT_TYPE': 'application/json',
'CONTENT_LENGTH': '123',
'HTTP_A': 'b',
# 'HTTP_C_C': 'd,e',
'RAW_URI': '/foo/bar',
'SERVER_PROTOCOL': 'HTTP/1.1',
'asgi.receive': receive,
'asgi.send': send,
}
for k, v in expected_environ.items():
assert v == environ[k]
assert environ['HTTP_C_C'] == 'd,e' or environ['HTTP_C_C'] == 'e,d'
body = _run(environ['wsgi.input'].read())
assert body == b'hello world'
def test_translate_request_with_large_body(self):
receive = AsyncMock(
side_effect=[
{'type': 'http.request', 'body': b'hello ', 'more_body': True},
{'type': 'http.request', 'body': b'world', 'more_body': True},
{'type': 'foo.bar'}, # should stop parsing here
{'type': 'http.request', 'body': b'!!!'},
]
)
send = AsyncMock()
environ = _run(
async_asgi.translate_request(
{
'type': 'http',
'method': 'PUT',
'headers': [
(b'a', b'b'),
(b'c-c', b'd'),
(b'c_c', b'e'),
(b'content-type', b'application/json'),
(b'content-length', b'123'),
],
'path': '/foo/bar',
'query_string': b'baz=1',
},
receive,
send,
)
)
expected_environ = {
'REQUEST_METHOD': 'PUT',
'PATH_INFO': '/foo/bar',
'QUERY_STRING': 'baz=1',
'CONTENT_TYPE': 'application/json',
'CONTENT_LENGTH': '123',
'HTTP_A': 'b',
# 'HTTP_C_C': 'd,e',
'RAW_URI': '/foo/bar?baz=1',
'SERVER_PROTOCOL': 'HTTP/1.1',
'asgi.receive': receive,
'asgi.send': send,
}
for k, v in expected_environ.items():
assert v == environ[k]
assert environ['HTTP_C_C'] == 'd,e' or environ['HTTP_C_C'] == 'e,d'
body = _run(environ['wsgi.input'].read())
assert body == b'hello world'
def test_translate_websocket_request(self):
receive = AsyncMock(return_value={'type': 'websocket.connect'})
send = AsyncMock()
_run(
async_asgi.translate_request(
{
'type': 'websocket',
'headers': [
(b'a', b'b'),
(b'c-c', b'd'),
(b'c_c', b'e'),
(b'content-type', b'application/json'),
(b'content-length', b'123'),
],
'path': '/foo/bar',
'query_string': b'baz=1',
},
receive,
send,
)
)
send.mock.assert_not_called()
def test_translate_unknown_request(self):
receive = AsyncMock(return_value={'type': 'http.foo'})
send = AsyncMock()
environ = _run(
async_asgi.translate_request(
{'type': 'http', 'path': '/foo/bar', 'query_string': b'baz=1'},
receive,
send,
)
)
assert environ == {}
def test_make_response(self):
environ = {'asgi.send': AsyncMock(), 'asgi.scope': {'type': 'http'}}
_run(
async_asgi.make_response(
'202 ACCEPTED', [('foo', 'bar')], b'payload', environ
)
)
environ['asgi.send'].mock.assert_any_call(
{
'type': 'http.response.start',
'status': 202,
'headers': [(b'foo', b'bar')],
}
)
environ['asgi.send'].mock.assert_any_call(
{'type': 'http.response.body', 'body': b'payload'}
)
def test_make_response_websocket_accept(self):
environ = {
'asgi.send': AsyncMock(),
'asgi.scope': {'type': 'websocket'},
}
_run(
async_asgi.make_response(
'200 OK', [('foo', 'bar')], b'payload', environ
)
)
environ['asgi.send'].mock.assert_called_with(
{'type': 'websocket.accept', 'headers': [(b'foo', b'bar')]}
)
def test_make_response_websocket_reject(self):
environ = {
'asgi.send': AsyncMock(),
'asgi.scope': {'type': 'websocket'},
}
_run(
async_asgi.make_response(
'401 UNAUTHORIZED', [('foo', 'bar')], b'payload', environ
)
)
environ['asgi.send'].mock.assert_called_with(
{'type': 'websocket.close', 'reason': 'payload'}
)
def test_make_response_websocket_reject_no_payload(self):
environ = {
'asgi.send': AsyncMock(),
'asgi.scope': {'type': 'websocket'},
}
_run(
async_asgi.make_response(
'401 UNAUTHORIZED', [('foo', 'bar')], None, environ
)
)
environ['asgi.send'].mock.assert_called_with(
{'type': 'websocket.close'}
)
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,432
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/tests/common/test_packet.py
|
import unittest
import pytest
from engineio import packet
class TestPacket(unittest.TestCase):
def test_encode_default_packet(self):
pkt = packet.Packet()
assert pkt.packet_type == packet.NOOP
assert pkt.data is None
assert not pkt.binary
assert pkt.encode() == '6'
def test_decode_default_packet(self):
pkt = packet.Packet(encoded_packet='6')
assert pkt.encode() == '6'
def test_encode_text_packet(self):
data = 'text'
pkt = packet.Packet(packet.MESSAGE, data=data)
assert pkt.packet_type == packet.MESSAGE
assert pkt.data == data
assert not pkt.binary
assert pkt.encode() == '4text'
def test_decode_text_packet(self):
pkt = packet.Packet(encoded_packet=b'4text')
assert pkt.encode() == b'4text'
def test_encode_empty_text_packet(self):
data = ''
pkt = packet.Packet(packet.MESSAGE, data=data)
assert pkt.packet_type == packet.MESSAGE
assert pkt.data == data
assert not pkt.binary
assert pkt.encode() == '4'
def test_decode_empty_text_packet(self):
pkt = packet.Packet(encoded_packet=b'4')
assert pkt.encode() == b'4'
def test_encode_binary_packet(self):
pkt = packet.Packet(packet.MESSAGE, data=b'\x01\x02\x03')
assert pkt.packet_type == packet.MESSAGE
assert pkt.data == b'\x01\x02\x03'
assert pkt.binary
assert pkt.encode() == b'\x01\x02\x03'
def test_encode_binary_bytearray_packet(self):
pkt = packet.Packet(packet.MESSAGE, data=bytearray(b'\x01\x02\x03'))
assert pkt.packet_type == packet.MESSAGE
assert pkt.data == b'\x01\x02\x03'
assert pkt.binary
assert pkt.encode() == b'\x01\x02\x03'
def test_encode_binary_b64_packet(self):
pkt = packet.Packet(packet.MESSAGE, data=b'\x01\x02\x03\x04')
assert pkt.packet_type == packet.MESSAGE
assert pkt.data == b'\x01\x02\x03\x04'
assert pkt.binary
assert pkt.encode(b64=True) == 'bAQIDBA=='
def test_encode_empty_binary_packet(self):
pkt = packet.Packet(packet.MESSAGE, data=b'')
assert pkt.packet_type == packet.MESSAGE
assert pkt.data == b''
assert pkt.binary
assert pkt.encode() == b''
def test_decode_binary_packet(self):
pkt = packet.Packet(encoded_packet=b'\x04\x01\x02\x03')
assert pkt.encode() == b'\x04\x01\x02\x03'
def test_decode_binary_bytearray_packet(self):
pkt = packet.Packet(encoded_packet=bytearray(b'\x04\x01\x02\x03'))
assert pkt.encode() == b'\x04\x01\x02\x03'
def test_decode_binary_b64_packet(self):
pkt = packet.Packet(encoded_packet='bBAECAw==')
assert pkt.encode() == b'\x04\x01\x02\x03'
def test_decode_empty_binary_packet(self):
pkt = packet.Packet(encoded_packet=b'')
assert pkt.encode() == b''
def test_encode_json_packet(self):
pkt = packet.Packet(packet.MESSAGE, data={'a': 123, 'b': '456'})
assert pkt.packet_type == packet.MESSAGE
assert pkt.data == {'a': 123, 'b': '456'}
assert not pkt.binary
assert pkt.encode() in [
'4{"a":123,"b":"456"}',
'4{"b":"456","a":123}',
]
def test_decode_json_packet(self):
pkt = packet.Packet(encoded_packet='4{"a":123,"b":"456"}')
assert pkt.encode() in [
'4{"a":123,"b":"456"}',
'4{"b":"456","a":123}',
]
def test_decode_json_packet_long_int(self):
pkt = packet.Packet(encoded_packet='4{"a":' + '1' * 100 + '}')
assert pkt.packet_type == packet.MESSAGE
assert pkt.data == {'a': int('1' * 100)}
pkt = packet.Packet(encoded_packet='4{"a":' + '1' * 101 + '}')
assert pkt.packet_type == packet.MESSAGE
assert pkt.data == '{"a":' + '1' * 101 + '}'
def test_encode_number_packet(self):
pkt = packet.Packet(packet.MESSAGE, data=123)
assert pkt.packet_type == packet.MESSAGE
assert pkt.data == 123
assert not pkt.binary
assert pkt.encode() == '4123'
def test_decode_number_packet(self):
pkt = packet.Packet(encoded_packet='4123')
assert pkt.packet_type == packet.MESSAGE
# integer payloads are parsed as strings, see
# https://github.com/miguelgrinberg/python-engineio/issues/75
# for background on this decision
assert pkt.data == '123'
assert not pkt.binary
assert pkt.encode() == '4123'
def test_binary_non_message_packet(self):
with pytest.raises(ValueError):
packet.Packet(packet.NOOP, b'\x01\x02\x03')
def test_decode_invalid_empty_text_packet(self):
with pytest.raises(ValueError):
packet.Packet(encoded_packet='')
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,433
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/examples/client/asyncio/simple_client.py
|
import asyncio
import signal
import engineio
loop = asyncio.get_event_loop()
eio = engineio.AsyncClient()
exit_event = asyncio.Event()
original_signal_handler = None
async def send_hello():
message = 'Hello from client side!'
while not exit_event.is_set():
print('sending: ' + 'Hello from client side!')
await eio.send(message)
try:
await asyncio.wait_for(exit_event.wait(), timeout=5)
except asyncio.TimeoutError:
pass
await eio.disconnect()
@eio.on('connect')
def on_connect():
print('connected to server')
eio.start_background_task(send_hello)
@eio.on('message')
def on_message(data):
print('received: ' + str(data))
def signal_handler(sig, frame):
exit_event.set()
print('exiting')
if callable(original_signal_handler):
original_signal_handler(sig, frame)
async def start_client():
await eio.connect('http://localhost:5000')
await eio.wait()
if __name__ == '__main__':
original_signal_handler = signal.signal(signal.SIGINT, signal_handler)
loop.run_until_complete(start_client())
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,434
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/examples/server/aiohttp/simple.py
|
from aiohttp import web
import engineio
eio = engineio.AsyncServer(async_mode='aiohttp')
app = web.Application()
eio.attach(app)
async def index(request):
with open('simple.html') as f:
return web.Response(text=f.read(), content_type='text/html')
@eio.on('connect')
def connect(sid, environ):
print("connect ", sid)
@eio.on('message')
async def message(sid, data):
print('message from', sid, data)
await eio.send(sid, 'Thank you for your message!')
@eio.on('disconnect')
def disconnect(sid):
print('disconnect ', sid)
app.router.add_static('/static', 'static')
app.router.add_get('/', index)
if __name__ == '__main__':
web.run_app(app)
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,435
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/examples/client/threads/simple_client.py
|
import signal
import threading
import engineio
eio = engineio.Client()
exit_event = threading.Event()
original_signal_handler = None
def send_hello():
message = 'Hello from client side!'
while not exit_event.is_set():
print('sending: ' + 'Hello from client side!')
eio.send(message)
exit_event.wait(5)
eio.disconnect()
@eio.on('connect')
def on_connect():
print('connected to server')
eio.start_background_task(send_hello)
@eio.on('message')
def on_message(data):
print('received: ' + str(data))
def signal_handler(sig, frame):
exit_event.set()
print('exiting')
if callable(original_signal_handler):
original_signal_handler(sig, frame)
if __name__ == '__main__':
original_signal_handler = signal.signal(signal.SIGINT, signal_handler)
eio.connect('http://localhost:5000')
eio.wait()
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,436
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/examples/server/asgi/latency.py
|
import uvicorn
import engineio
eio = engineio.AsyncServer(async_mode='asgi')
app = engineio.ASGIApp(eio, static_files={
'/': 'latency.html',
'/static': 'static',
})
@eio.on('message')
async def message(sid, data):
await eio.send(sid, 'pong')
if __name__ == '__main__':
uvicorn.run(app, host='127.0.0.1', port=5000)
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,437
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/tests/common/test_async_eventlet.py
|
import logging
import unittest
from unittest import mock
from engineio.async_drivers import eventlet as async_eventlet
import pytest
class TestAsyncEventlet(unittest.TestCase):
def setUp(self):
logging.getLogger('engineio').setLevel(logging.NOTSET)
def test_bad_environ(self):
wsgi = async_eventlet.WebSocketWSGI(None, mock.MagicMock())
environ = {'foo': 'bar'}
start_response = 'bar'
with pytest.raises(RuntimeError):
wsgi(environ, start_response)
@mock.patch(
'engineio.async_drivers.eventlet._WebSocketWSGI.__call__',
return_value='data',
)
def test_wsgi_call(self, _WebSocketWSGI):
_WebSocketWSGI.__call__ = lambda e, s: 'data'
environ = {'eventlet.input': mock.MagicMock()}
start_response = 'bar'
wsgi = async_eventlet.WebSocketWSGI(None, mock.MagicMock())
assert wsgi(environ, start_response) == 'data'
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,438
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/examples/server/sanic/latency.py
|
from sanic import Sanic
from sanic.response import html
import engineio
eio = engineio.AsyncServer(async_mode='sanic')
app = Sanic(name='latency')
eio.attach(app)
@app.route('/')
async def index(request):
with open('latency.html') as f:
return html(f.read())
@eio.on('message')
async def message(sid, data):
await eio.send(sid, 'pong')
app.static('/static', './static')
if __name__ == '__main__':
app.run()
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,439
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/tests/common/test_server.py
|
import gzip
import importlib
import io
import logging
import sys
import time
import unittest
from unittest import mock
import zlib
import pytest
from engineio import exceptions
from engineio import json
from engineio import packet
from engineio import payload
from engineio import server
original_import_module = importlib.import_module
def _mock_import(module, *args, **kwargs):
if module.startswith('engineio.'):
return original_import_module(module, *args, **kwargs)
return module
class TestServer(unittest.TestCase):
_mock_async = mock.MagicMock()
_mock_async._async = {
'thread': 't',
'queue': 'q',
'queue_empty': RuntimeError,
'websocket': 'w',
}
def _get_mock_socket(self):
mock_socket = mock.MagicMock()
mock_socket.closed = False
mock_socket.closing = False
mock_socket.upgraded = False
mock_socket.session = {}
return mock_socket
@classmethod
def setUpClass(cls):
server.Server._default_monitor_clients = False
@classmethod
def tearDownClass(cls):
server.Server._default_monitor_clients = True
def setUp(self):
logging.getLogger('engineio').setLevel(logging.NOTSET)
def tearDown(self):
# restore JSON encoder, in case a test changed it
packet.Packet.json = json
def test_is_asyncio_based(self):
s = server.Server()
assert not s.is_asyncio_based()
def test_async_modes(self):
s = server.Server()
assert s.async_modes() == [
'eventlet',
'gevent_uwsgi',
'gevent',
'threading',
]
def test_create(self):
kwargs = {
'ping_timeout': 1,
'ping_interval': 2,
'max_http_buffer_size': 3,
'allow_upgrades': False,
'http_compression': False,
'compression_threshold': 4,
'cookie': 'foo',
'cors_allowed_origins': ['foo', 'bar', 'baz'],
'cors_credentials': False,
'async_handlers': False,
}
s = server.Server(**kwargs)
for arg in kwargs.keys():
assert getattr(s, arg) == kwargs[arg]
assert s.ping_interval_grace_period == 0
def test_create_with_grace_period(self):
s = server.Server(ping_interval=(1, 2))
assert s.ping_interval == 1
assert s.ping_interval_grace_period == 2
def test_create_ignores_kwargs(self):
server.Server(foo='bar') # this should not raise
def test_async_mode_threading(self):
sys.modules['simple_websocket'] = mock.MagicMock()
s = server.Server(async_mode='threading')
assert s.async_mode == 'threading'
import threading
from engineio.async_drivers import threading as async_threading
import queue
assert s._async['thread'] == threading.Thread
assert s._async['queue'] == queue.Queue
assert s._async['websocket'] == async_threading.WebSocketWSGI
del sys.modules['simple_websocket']
del sys.modules['engineio.async_drivers.threading']
def test_async_mode_threading_without_websocket(self):
s = server.Server(async_mode='threading')
assert s.async_mode == 'threading'
import threading
import queue
assert s._async['thread'] == threading.Thread
assert s._async['queue'] == queue.Queue
assert s._async['websocket'] is None
del sys.modules['engineio.async_drivers.threading']
def test_async_mode_eventlet(self):
s = server.Server(async_mode='eventlet')
assert s.async_mode == 'eventlet'
from eventlet.green import threading
from eventlet import queue
from engineio.async_drivers import eventlet as async_eventlet
assert s._async['thread'] == threading.Thread
assert s._async['queue'] == queue.Queue
assert s._async['websocket'] == async_eventlet.WebSocketWSGI
@mock.patch('importlib.import_module', side_effect=_mock_import)
def test_async_mode_gevent_uwsgi(self, import_module):
sys.modules['gevent'] = mock.MagicMock()
sys.modules['gevent'].queue = mock.MagicMock()
sys.modules['gevent.queue'] = sys.modules['gevent'].queue
sys.modules['gevent.queue'].JoinableQueue = 'foo'
sys.modules['gevent.queue'].Empty = RuntimeError
sys.modules['gevent.event'] = mock.MagicMock()
sys.modules['gevent.event'].Event = 'bar'
sys.modules['uwsgi'] = mock.MagicMock()
s = server.Server(async_mode='gevent_uwsgi')
assert s.async_mode == 'gevent_uwsgi'
from engineio.async_drivers import gevent_uwsgi as async_gevent_uwsgi
assert s._async['thread'] == async_gevent_uwsgi.Thread
assert s._async['queue'] == 'foo'
assert s._async['queue_empty'] == RuntimeError
assert s._async['event'] == 'bar'
assert s._async['websocket'] == async_gevent_uwsgi.uWSGIWebSocket
del sys.modules['gevent']
del sys.modules['gevent.queue']
del sys.modules['gevent.event']
del sys.modules['uwsgi']
del sys.modules['engineio.async_drivers.gevent_uwsgi']
@mock.patch('importlib.import_module', side_effect=_mock_import)
def test_async_mode_gevent_uwsgi_without_uwsgi(self, import_module):
sys.modules['gevent'] = mock.MagicMock()
sys.modules['gevent'].queue = mock.MagicMock()
sys.modules['gevent.queue'] = sys.modules['gevent'].queue
sys.modules['gevent.queue'].JoinableQueue = 'foo'
sys.modules['gevent.queue'].Empty = RuntimeError
sys.modules['gevent.event'] = mock.MagicMock()
sys.modules['gevent.event'].Event = 'bar'
sys.modules['uwsgi'] = None
with pytest.raises(ValueError):
server.Server(async_mode='gevent_uwsgi')
del sys.modules['gevent']
del sys.modules['gevent.queue']
del sys.modules['gevent.event']
del sys.modules['uwsgi']
@mock.patch('importlib.import_module', side_effect=_mock_import)
def test_async_mode_gevent_uwsgi_without_websocket(self, import_module):
sys.modules['gevent'] = mock.MagicMock()
sys.modules['gevent'].queue = mock.MagicMock()
sys.modules['gevent.queue'] = sys.modules['gevent'].queue
sys.modules['gevent.queue'].JoinableQueue = 'foo'
sys.modules['gevent.queue'].Empty = RuntimeError
sys.modules['gevent.event'] = mock.MagicMock()
sys.modules['gevent.event'].Event = 'bar'
sys.modules['uwsgi'] = mock.MagicMock()
del sys.modules['uwsgi'].websocket_handshake
s = server.Server(async_mode='gevent_uwsgi')
assert s.async_mode == 'gevent_uwsgi'
from engineio.async_drivers import gevent_uwsgi as async_gevent_uwsgi
assert s._async['thread'] == async_gevent_uwsgi.Thread
assert s._async['queue'] == 'foo'
assert s._async['queue_empty'] == RuntimeError
assert s._async['event'] == 'bar'
assert s._async['websocket'] is None
del sys.modules['gevent']
del sys.modules['gevent.queue']
del sys.modules['gevent.event']
del sys.modules['uwsgi']
del sys.modules['engineio.async_drivers.gevent_uwsgi']
@mock.patch('importlib.import_module', side_effect=_mock_import)
def test_async_mode_gevent(self, import_module):
sys.modules['gevent'] = mock.MagicMock()
sys.modules['gevent'].queue = mock.MagicMock()
sys.modules['gevent.queue'] = sys.modules['gevent'].queue
sys.modules['gevent.queue'].JoinableQueue = 'foo'
sys.modules['gevent.queue'].Empty = RuntimeError
sys.modules['gevent.event'] = mock.MagicMock()
sys.modules['gevent.event'].Event = 'bar'
sys.modules['geventwebsocket'] = 'geventwebsocket'
s = server.Server(async_mode='gevent')
assert s.async_mode == 'gevent'
from engineio.async_drivers import gevent as async_gevent
assert s._async['thread'] == async_gevent.Thread
assert s._async['queue'] == 'foo'
assert s._async['queue_empty'] == RuntimeError
assert s._async['event'] == 'bar'
assert s._async['websocket'] == async_gevent.WebSocketWSGI
del sys.modules['gevent']
del sys.modules['gevent.queue']
del sys.modules['gevent.event']
del sys.modules['geventwebsocket']
del sys.modules['engineio.async_drivers.gevent']
@mock.patch('importlib.import_module', side_effect=_mock_import)
def test_async_mode_gevent_without_websocket(self, import_module):
sys.modules['gevent'] = mock.MagicMock()
sys.modules['gevent'].queue = mock.MagicMock()
sys.modules['gevent.queue'] = sys.modules['gevent'].queue
sys.modules['gevent.queue'].JoinableQueue = 'foo'
sys.modules['gevent.queue'].Empty = RuntimeError
sys.modules['gevent.event'] = mock.MagicMock()
sys.modules['gevent.event'].Event = 'bar'
sys.modules['geventwebsocket'] = None
s = server.Server(async_mode='gevent')
assert s.async_mode == 'gevent'
from engineio.async_drivers import gevent as async_gevent
assert s._async['thread'] == async_gevent.Thread
assert s._async['queue'] == 'foo'
assert s._async['queue_empty'] == RuntimeError
assert s._async['event'] == 'bar'
assert s._async['websocket'] is None
del sys.modules['gevent']
del sys.modules['gevent.queue']
del sys.modules['gevent.event']
del sys.modules['geventwebsocket']
del sys.modules['engineio.async_drivers.gevent']
@unittest.skipIf(sys.version_info < (3, 5), 'only for Python 3.5+')
@mock.patch('importlib.import_module', side_effect=_mock_import)
def test_async_mode_aiohttp(self, import_module):
sys.modules['aiohttp'] = mock.MagicMock()
with pytest.raises(ValueError):
server.Server(async_mode='aiohttp')
@mock.patch('importlib.import_module', side_effect=[ImportError])
def test_async_mode_invalid(self, import_module):
with pytest.raises(ValueError):
server.Server(async_mode='foo')
@mock.patch('importlib.import_module', side_effect=[_mock_async])
def test_async_mode_auto_eventlet(self, import_module):
s = server.Server()
assert s.async_mode == 'eventlet'
@mock.patch(
'importlib.import_module', side_effect=[ImportError, _mock_async]
)
def test_async_mode_auto_gevent_uwsgi(self, import_module):
s = server.Server()
assert s.async_mode == 'gevent_uwsgi'
@mock.patch(
'importlib.import_module',
side_effect=[ImportError, ImportError, _mock_async],
)
def test_async_mode_auto_gevent(self, import_module):
s = server.Server()
assert s.async_mode == 'gevent'
@mock.patch(
'importlib.import_module',
side_effect=[ImportError, ImportError, ImportError, _mock_async],
)
def test_async_mode_auto_threading(self, import_module):
s = server.Server()
assert s.async_mode == 'threading'
def test_generate_id(self):
s = server.Server()
assert s.generate_id() != s.generate_id()
def test_on_event(self):
s = server.Server()
@s.on('connect')
def foo():
pass
s.on('disconnect', foo)
assert s.handlers['connect'] == foo
assert s.handlers['disconnect'] == foo
def test_on_event_invalid(self):
s = server.Server()
with pytest.raises(ValueError):
s.on('invalid')
def test_trigger_event(self):
s = server.Server()
f = {}
@s.on('connect')
def foo(sid, environ):
return sid + environ
@s.on('message')
def bar(sid, data):
f['bar'] = sid + data
return 'bar'
r = s._trigger_event('connect', 1, 2, run_async=False)
assert r == 3
r = s._trigger_event('message', 3, 4, run_async=True)
r.join()
assert f['bar'] == 7
r = s._trigger_event('message', 5, 6)
assert r == 'bar'
def test_trigger_event_error(self):
s = server.Server()
@s.on('connect')
def foo(sid, environ):
return 1 / 0
@s.on('message')
def bar(sid, data):
return 1 / 0
r = s._trigger_event('connect', 1, 2, run_async=False)
assert not r
r = s._trigger_event('message', 3, 4, run_async=False)
assert r is None
def test_session(self):
s = server.Server()
mock_socket = self._get_mock_socket()
s.sockets['foo'] = mock_socket
with s.session('foo') as session:
assert session == {}
session['username'] = 'bar'
assert s.get_session('foo') == {'username': 'bar'}
def test_close_one_socket(self):
s = server.Server()
mock_socket = self._get_mock_socket()
s.sockets['foo'] = mock_socket
s.disconnect('foo')
assert mock_socket.close.call_count == 1
assert 'foo' not in s.sockets
def test_close_all_sockets(self):
s = server.Server()
mock_sockets = {}
for sid in ['foo', 'bar', 'baz']:
mock_sockets[sid] = self._get_mock_socket()
s.sockets[sid] = mock_sockets[sid]
s.disconnect()
for socket in mock_sockets.values():
assert socket.close.call_count == 1
assert s.sockets == {}
def test_upgrades(self):
s = server.Server()
s.sockets['foo'] = self._get_mock_socket()
assert s._upgrades('foo', 'polling') == ['websocket']
assert s._upgrades('foo', 'websocket') == []
s.sockets['foo'].upgraded = True
assert s._upgrades('foo', 'polling') == []
assert s._upgrades('foo', 'websocket') == []
s.allow_upgrades = False
s.sockets['foo'].upgraded = True
assert s._upgrades('foo', 'polling') == []
assert s._upgrades('foo', 'websocket') == []
def test_transport(self):
s = server.Server()
s.sockets['foo'] = self._get_mock_socket()
s.sockets['foo'].upgraded = False
s.sockets['bar'] = self._get_mock_socket()
s.sockets['bar'].upgraded = True
assert s.transport('foo') == 'polling'
assert s.transport('bar') == 'websocket'
def test_bad_session(self):
s = server.Server()
s.sockets['foo'] = 'client'
with pytest.raises(KeyError):
s._get_socket('bar')
def test_closed_socket(self):
s = server.Server()
s.sockets['foo'] = self._get_mock_socket()
s.sockets['foo'].closed = True
with pytest.raises(KeyError):
s._get_socket('foo')
def test_jsonp_with_bad_index(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4&j=abc'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
def test_jsonp_index(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4&j=233'}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
assert r[0].startswith(b'___eio[233]("')
assert r[0].endswith(b'");')
def test_connect(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
assert len(s.sockets) == 1
assert start_response.call_count == 1
assert start_response.call_args[0][0] == '200 OK'
assert (
'Content-Type',
'text/plain; charset=UTF-8',
) in start_response.call_args[0][1]
assert len(r) == 1
packets = payload.Payload(encoded_payload=r[0].decode('utf-8')).packets
assert len(packets) == 1
assert packets[0].packet_type == packet.OPEN
assert 'upgrades' in packets[0].data
assert packets[0].data['upgrades'] == ['websocket']
assert 'sid' in packets[0].data
def test_connect_no_upgrades(self):
s = server.Server(allow_upgrades=False)
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
packets = payload.Payload(encoded_payload=r[0].decode('utf-8')).packets
assert packets[0].data['upgrades'] == []
def test_connect_bad_eio_version(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=1'}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
assert start_response.call_args[0][0], '400 BAD REQUEST'
assert b'unsupported version' in r[0]
def test_connect_custom_ping_times(self):
s = server.Server(ping_timeout=123, ping_interval=456)
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
packets = payload.Payload(encoded_payload=r[0].decode('utf-8')).packets
assert packets[0].data['pingTimeout'] == 123000
assert packets[0].data['pingInterval'] == 456000
@mock.patch(
'engineio.socket.Socket.poll', side_effect=exceptions.QueueEmpty
)
def test_connect_bad_poll(self, poll):
s = server.Server()
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
@mock.patch(
'engineio.socket.Socket',
return_value=mock.MagicMock(connected=False, closed=False),
)
def test_connect_transport_websocket(self, Socket):
s = server.Server()
s.generate_id = mock.MagicMock(return_value='123')
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&transport=websocket',
'HTTP_UPGRADE': 'websocket',
}
start_response = mock.MagicMock()
# force socket to stay open, so that we can check it later
Socket().closed = False
s.handle_request(environ, start_response)
assert s.sockets['123'].send.call_args[0][0].packet_type == packet.OPEN
@mock.patch(
'engineio.socket.Socket',
return_value=mock.MagicMock(connected=False, closed=False),
)
def test_http_upgrade_case_insensitive(self, Socket):
s = server.Server()
s.generate_id = mock.MagicMock(return_value='123')
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&transport=websocket',
'HTTP_UPGRADE': 'WebSocket',
}
start_response = mock.MagicMock()
# force socket to stay open, so that we can check it later
Socket().closed = False
s.handle_request(environ, start_response)
assert s.sockets['123'].send.call_args[0][0].packet_type == packet.OPEN
@mock.patch(
'engineio.socket.Socket',
return_value=mock.MagicMock(connected=False, closed=False),
)
def test_connect_transport_websocket_closed(self, Socket):
s = server.Server()
s.generate_id = mock.MagicMock(return_value='123')
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&transport=websocket',
'HTTP_UPGRADE': 'websocket',
}
start_response = mock.MagicMock()
def mock_handle(environ, start_response):
s.sockets['123'].closed = True
Socket().handle_get_request = mock_handle
s.handle_request(environ, start_response)
assert '123' not in s.sockets
def test_connect_transport_invalid(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&transport=foo'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
def test_connect_transport_websocket_without_upgrade(self):
s = server.Server()
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&transport=websocket',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
def test_connect_cors_headers(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Credentials', 'true') in headers
def test_connect_cors_allowed_origin(self):
s = server.Server(cors_allowed_origins=['a', 'b'])
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'HTTP_ORIGIN': 'b',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Origin', 'b') in headers
def test_connect_cors_allowed_origin_with_callable(self):
def cors(origin):
return origin == 'a'
s = server.Server(cors_allowed_origins=cors)
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'HTTP_ORIGIN': 'a',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Origin', 'a') in headers
environ['HTTP_ORIGIN'] = 'b'
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
def test_connect_cors_not_allowed_origin(self):
s = server.Server(cors_allowed_origins=['a', 'b'])
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'HTTP_ORIGIN': 'c',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Origin', 'c') not in headers
assert ('Access-Control-Allow-Origin', '*') not in headers
def test_connect_cors_headers_all_origins(self):
s = server.Server(cors_allowed_origins='*')
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'HTTP_ORIGIN': 'foo',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Origin', 'foo') in headers
assert ('Access-Control-Allow-Credentials', 'true') in headers
def test_connect_cors_headers_one_origin(self):
s = server.Server(cors_allowed_origins='a')
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'HTTP_ORIGIN': 'a',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Origin', 'a') in headers
assert ('Access-Control-Allow-Credentials', 'true') in headers
def test_connect_cors_headers_one_origin_not_allowed(self):
s = server.Server(cors_allowed_origins='a')
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'HTTP_ORIGIN': 'b',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Origin', 'b') not in headers
assert ('Access-Control-Allow-Origin', '*') not in headers
def test_connect_cors_headers_default_origin(self):
s = server.Server()
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'wsgi.url_scheme': 'http',
'HTTP_HOST': 'foo',
'HTTP_ORIGIN': 'http://foo',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Origin', 'http://foo') in headers
def test_connect_cors_headers_default_origin_proxy_server(self):
s = server.Server()
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'wsgi.url_scheme': 'http',
'HTTP_HOST': 'foo',
'HTTP_ORIGIN': 'https://foo',
'HTTP_X_FORWARDED_PROTO': 'https, ftp',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Origin', 'https://foo') in headers
def test_connect_cors_headers_default_origin_proxy_server2(self):
s = server.Server()
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'wsgi.url_scheme': 'http',
'HTTP_HOST': 'foo',
'HTTP_ORIGIN': 'https://bar',
'HTTP_X_FORWARDED_PROTO': 'https, ftp',
'HTTP_X_FORWARDED_HOST': 'bar , baz',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Origin', 'https://bar') in headers
def test_connect_cors_no_credentials(self):
s = server.Server(cors_credentials=False)
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Credentials', 'true') not in headers
def test_cors_options(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'OPTIONS', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert (
'Access-Control-Allow-Methods',
'OPTIONS, GET, POST',
) in headers
def test_cors_request_headers(self):
s = server.Server()
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'HTTP_ACCESS_CONTROL_REQUEST_HEADERS': 'Foo, Bar',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
assert ('Access-Control-Allow-Headers', 'Foo, Bar') in headers
def test_connect_cors_disabled(self):
s = server.Server(cors_allowed_origins=[])
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4',
'HTTP_ORIGIN': 'http://foo',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
headers = start_response.call_args[0][1]
for header in headers:
assert not header[0].startswith('Access-Control-')
def test_connect_cors_default_no_origin(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
headers = start_response.call_args[0][1]
for header in headers:
assert header[0] != 'Access-Control-Allow-Origin'
def test_connect_cors_all_no_origin(self):
s = server.Server(cors_allowed_origins='*')
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
headers = start_response.call_args[0][1]
for header in headers:
assert header[0] != 'Access-Control-Allow-Origin'
def test_connect_cors_disabled_no_origin(self):
s = server.Server(cors_allowed_origins=[])
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
headers = start_response.call_args[0][1]
for header in headers:
assert header[0] != 'Access-Control-Allow-Origin'
def test_connect_event(self):
s = server.Server()
s.generate_id = mock.MagicMock(return_value='123')
mock_event = mock.MagicMock(return_value=None)
s.on('connect')(mock_event)
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
mock_event.assert_called_once_with('123', environ)
assert len(s.sockets) == 1
def test_connect_event_rejects(self):
s = server.Server()
s.generate_id = mock.MagicMock(return_value='123')
mock_event = mock.MagicMock(return_value=False)
s.on('connect')(mock_event)
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
ret = s.handle_request(environ, start_response)
assert len(s.sockets) == 0
assert start_response.call_args[0][0] == '401 UNAUTHORIZED'
assert ret == [b'"Unauthorized"']
def test_connect_event_rejects_with_message(self):
s = server.Server()
s.generate_id = mock.MagicMock(return_value='123')
mock_event = mock.MagicMock(return_value='not allowed')
s.on('connect')(mock_event)
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
ret = s.handle_request(environ, start_response)
assert len(s.sockets) == 0
assert start_response.call_args[0][0] == '401 UNAUTHORIZED'
assert ret == [b'"not allowed"']
def test_method_not_found(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'PUT', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '405 METHOD NOT FOUND'
def test_get_request_with_bad_sid(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4&sid=foo'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
def test_post_request_with_bad_sid(self):
s = server.Server()
environ = {'REQUEST_METHOD': 'POST', 'QUERY_STRING': 'EIO=4&sid=foo'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
def test_send(self):
s = server.Server()
mock_socket = self._get_mock_socket()
s.sockets['foo'] = mock_socket
s.send('foo', 'hello')
assert mock_socket.send.call_count == 1
assert mock_socket.send.call_args[0][0].packet_type == packet.MESSAGE
assert mock_socket.send.call_args[0][0].data == 'hello'
def test_send_unknown_socket(self):
s = server.Server()
# just ensure no exceptions are raised
s.send('foo', 'hello')
def test_get_request(self):
s = server.Server()
mock_socket = self._get_mock_socket()
mock_socket.handle_get_request = mock.MagicMock(
return_value=[packet.Packet(packet.MESSAGE, data='hello')]
)
s.sockets['foo'] = mock_socket
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4&sid=foo'}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
assert len(r) == 1
packets = payload.Payload(encoded_payload=r[0].decode('utf-8')).packets
assert len(packets) == 1
assert packets[0].packet_type == packet.MESSAGE
def test_get_request_custom_response(self):
s = server.Server()
mock_socket = self._get_mock_socket()
mock_socket.handle_get_request = mock.MagicMock(side_effect=['resp'])
s.sockets['foo'] = mock_socket
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4&sid=foo'}
start_response = mock.MagicMock()
assert s.handle_request(environ, start_response) == 'resp'
def test_get_request_closes_socket(self):
s = server.Server()
mock_socket = self._get_mock_socket()
def mock_get_request(*args, **kwargs):
mock_socket.closed = True
return 'resp'
mock_socket.handle_get_request = mock_get_request
s.sockets['foo'] = mock_socket
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4&sid=foo'}
start_response = mock.MagicMock()
assert s.handle_request(environ, start_response) == 'resp'
assert 'foo' not in s.sockets
def test_get_request_error(self):
s = server.Server()
mock_socket = self._get_mock_socket()
mock_socket.handle_get_request = mock.MagicMock(
side_effect=[exceptions.QueueEmpty]
)
s.sockets['foo'] = mock_socket
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4&sid=foo'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
assert len(s.sockets) == 0
def test_post_request(self):
s = server.Server()
mock_socket = self._get_mock_socket()
mock_socket.handle_post_request = mock.MagicMock()
s.sockets['foo'] = mock_socket
environ = {'REQUEST_METHOD': 'POST', 'QUERY_STRING': 'EIO=4&sid=foo'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '200 OK'
def test_post_request_error(self):
s = server.Server()
mock_socket = self._get_mock_socket()
mock_socket.handle_post_request = mock.MagicMock(
side_effect=[exceptions.EngineIOError]
)
s.sockets['foo'] = mock_socket
environ = {'REQUEST_METHOD': 'POST', 'QUERY_STRING': 'EIO=4&sid=foo'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
assert 'foo' not in s.sockets
@staticmethod
def _gzip_decompress(b):
bytesio = io.BytesIO(b)
with gzip.GzipFile(fileobj=bytesio, mode='r') as gz:
return gz.read()
def test_gzip_compression(self):
s = server.Server(compression_threshold=0)
mock_socket = self._get_mock_socket()
mock_socket.handle_get_request = mock.MagicMock(
return_value=[packet.Packet(packet.MESSAGE, data='hello')]
)
s.sockets['foo'] = mock_socket
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&sid=foo',
'HTTP_ACCEPT_ENCODING': 'gzip,deflate',
}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
assert ('Content-Encoding', 'gzip') in start_response.call_args[0][1]
self._gzip_decompress(r[0])
def test_deflate_compression(self):
s = server.Server(compression_threshold=0)
mock_socket = self._get_mock_socket()
mock_socket.handle_get_request = mock.MagicMock(
return_value=[packet.Packet(packet.MESSAGE, data='hello')]
)
s.sockets['foo'] = mock_socket
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&sid=foo',
'HTTP_ACCEPT_ENCODING': 'deflate;q=1,gzip',
}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
assert ('Content-Encoding', 'deflate') in start_response.call_args[0][
1
]
zlib.decompress(r[0])
def test_gzip_compression_threshold(self):
s = server.Server(compression_threshold=1000)
mock_socket = self._get_mock_socket()
mock_socket.handle_get_request = mock.MagicMock(
return_value=[packet.Packet(packet.MESSAGE, data='hello')]
)
s.sockets['foo'] = mock_socket
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&sid=foo',
'HTTP_ACCEPT_ENCODING': 'gzip',
}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
for header, value in start_response.call_args[0][1]:
assert header != 'Content-Encoding'
with pytest.raises(IOError):
self._gzip_decompress(r[0])
def test_compression_disabled(self):
s = server.Server(http_compression=False, compression_threshold=0)
mock_socket = self._get_mock_socket()
mock_socket.handle_get_request = mock.MagicMock(
return_value=[packet.Packet(packet.MESSAGE, data='hello')]
)
s.sockets['foo'] = mock_socket
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&sid=foo',
'HTTP_ACCEPT_ENCODING': 'gzip',
}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
for header, value in start_response.call_args[0][1]:
assert header != 'Content-Encoding'
with pytest.raises(IOError):
self._gzip_decompress(r[0])
def test_compression_unknown(self):
s = server.Server(compression_threshold=0)
mock_socket = self._get_mock_socket()
mock_socket.handle_get_request = mock.MagicMock(
return_value=[packet.Packet(packet.MESSAGE, data='hello')]
)
s.sockets['foo'] = mock_socket
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&sid=foo',
'HTTP_ACCEPT_ENCODING': 'rar',
}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
for header, value in start_response.call_args[0][1]:
assert header != 'Content-Encoding'
with pytest.raises(IOError):
self._gzip_decompress(r[0])
def test_compression_no_encoding(self):
s = server.Server(compression_threshold=0)
mock_socket = self._get_mock_socket()
mock_socket.handle_get_request = mock.MagicMock(
return_value=[packet.Packet(packet.MESSAGE, data='hello')]
)
s.sockets['foo'] = mock_socket
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'EIO=4&sid=foo',
'HTTP_ACCEPT_ENCODING': '',
}
start_response = mock.MagicMock()
r = s.handle_request(environ, start_response)
for header, value in start_response.call_args[0][1]:
assert header != 'Content-Encoding'
with pytest.raises(IOError):
self._gzip_decompress(r[0])
def test_cookie(self):
s = server.Server(cookie='sid')
s.generate_id = mock.MagicMock(return_value='123')
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert ('Set-Cookie', 'sid=123; path=/; SameSite=Lax') \
in start_response.call_args[0][1]
def test_cookie_dict(self):
def get_path():
return '/a'
s = server.Server(cookie={
'name': 'test',
'path': get_path,
'SameSite': 'None',
'Secure': True,
'HttpOnly': True
})
s.generate_id = mock.MagicMock(return_value='123')
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert ('Set-Cookie', 'test=123; path=/a; SameSite=None; Secure; '
'HttpOnly') in start_response.call_args[0][1]
def test_no_cookie(self):
s = server.Server(cookie=None)
s.generate_id = mock.MagicMock(return_value='123')
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
for header, value in start_response.call_args[0][1]:
assert header != 'Set-Cookie'
def test_logger(self):
s = server.Server(logger=False)
assert s.logger.getEffectiveLevel() == logging.ERROR
s.logger.setLevel(logging.NOTSET)
s = server.Server(logger=True)
assert s.logger.getEffectiveLevel() == logging.INFO
s.logger.setLevel(logging.WARNING)
s = server.Server(logger=True)
assert s.logger.getEffectiveLevel() == logging.WARNING
s.logger.setLevel(logging.NOTSET)
my_logger = logging.Logger('foo')
s = server.Server(logger=my_logger)
assert s.logger == my_logger
def test_custom_json(self):
# Warning: this test cannot run in parallel with other tests, as it
# changes the JSON encoding/decoding functions
class CustomJSON(object):
@staticmethod
def dumps(*args, **kwargs):
return '*** encoded ***'
@staticmethod
def loads(*args, **kwargs):
return '+++ decoded +++'
server.Server(json=CustomJSON)
pkt = packet.Packet(packet.MESSAGE, data={'foo': 'bar'})
assert pkt.encode() == '4*** encoded ***'
pkt2 = packet.Packet(encoded_packet=pkt.encode())
assert pkt2.data == '+++ decoded +++'
# restore the default JSON module
packet.Packet.json = json
def test_background_tasks(self):
flag = {}
def bg_task():
flag['task'] = True
s = server.Server()
task = s.start_background_task(bg_task)
task.join()
assert 'task' in flag
assert flag['task']
def test_sleep(self):
s = server.Server()
t = time.time()
s.sleep(0.1)
assert time.time() - t > 0.1
def test_create_queue(self):
s = server.Server()
q = s.create_queue()
empty = s.get_queue_empty_exception()
with pytest.raises(empty):
q.get(timeout=0.01)
def test_create_event(self):
s = server.Server()
e = s.create_event()
assert not e.is_set()
e.set()
assert e.is_set()
def test_log_error_once(self):
s = server.Server(logger=mock.MagicMock())
s._log_error_once('foo', 'foo-key')
s._log_error_once('foo', 'foo-key')
s.logger.error.assert_called_with(
'foo (further occurrences of this error will be logged with '
'level INFO)')
s.logger.info.assert_called_with('foo')
def test_service_task_started(self):
s = server.Server(monitor_clients=True)
s._service_task = mock.MagicMock()
environ = {'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'EIO=4'}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
s._service_task.assert_called_once_with()
def test_transports_invalid(self):
with pytest.raises(ValueError):
server.Server(transports='invalid')
with pytest.raises(ValueError):
server.Server(transports=['invalid', 'foo'])
def test_transports_disallowed(self):
s = server.Server(transports='websocket')
environ = {
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'transport=polling',
}
start_response = mock.MagicMock()
s.handle_request(environ, start_response)
assert start_response.call_args[0][0] == '400 BAD REQUEST'
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,440
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/client.py
|
from base64 import b64encode
from engineio.json import JSONDecodeError
import logging
import queue
import signal
import ssl
import threading
import time
import urllib
try:
import requests
except ImportError: # pragma: no cover
requests = None
try:
import websocket
except ImportError: # pragma: no cover
websocket = None
from . import exceptions
from . import packet
from . import payload
default_logger = logging.getLogger('engineio.client')
connected_clients = []
def signal_handler(sig, frame):
"""SIGINT handler.
Disconnect all active clients and then invoke the original signal handler.
"""
for client in connected_clients[:]:
if not client.is_asyncio_based():
client.disconnect()
if callable(original_signal_handler):
return original_signal_handler(sig, frame)
else: # pragma: no cover
# Handle case where no original SIGINT handler was present.
return signal.default_int_handler(sig, frame)
original_signal_handler = None
class Client(object):
"""An Engine.IO client.
This class implements a fully compliant Engine.IO web client with support
for websocket and long-polling transports.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``. The default is
``False``. Note that fatal errors are logged even when
``logger`` is ``False``.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions.
:param request_timeout: A timeout in seconds for requests. The default is
5 seconds.
:param http_session: an initialized ``requests.Session`` object to be used
when sending requests to the server. Use it if you
need to add special client options such as proxy
servers, SSL certificates, custom CA bundle, etc.
:param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to
skip SSL certificate verification, allowing
connections to servers with self signed certificates.
The default is ``True``.
:param handle_sigint: Set to ``True`` to automatically handle disconnection
when the process is interrupted, or to ``False`` to
leave interrupt handling to the calling application.
Interrupt handling can only be enabled when the
client instance is created in the main thread.
:param websocket_extra_options: Dictionary containing additional keyword
arguments passed to
``websocket.create_connection()``.
"""
event_names = ['connect', 'disconnect', 'message']
def __init__(self, logger=False, json=None, request_timeout=5,
http_session=None, ssl_verify=True, handle_sigint=True,
websocket_extra_options=None):
global original_signal_handler
if handle_sigint and original_signal_handler is None and \
threading.current_thread() == threading.main_thread():
original_signal_handler = signal.signal(signal.SIGINT,
signal_handler)
self.handlers = {}
self.base_url = None
self.transports = None
self.current_transport = None
self.sid = None
self.upgrades = None
self.ping_interval = None
self.ping_timeout = None
self.http = http_session
self.external_http = http_session is not None
self.handle_sigint = handle_sigint
self.ws = None
self.read_loop_task = None
self.write_loop_task = None
self.queue = None
self.state = 'disconnected'
self.ssl_verify = ssl_verify
self.websocket_extra_options = websocket_extra_options or {}
if json is not None:
packet.Packet.json = json
if not isinstance(logger, bool):
self.logger = logger
else:
self.logger = default_logger
if self.logger.level == logging.NOTSET:
if logger:
self.logger.setLevel(logging.INFO)
else:
self.logger.setLevel(logging.ERROR)
self.logger.addHandler(logging.StreamHandler())
self.request_timeout = request_timeout
def is_asyncio_based(self):
return False
def on(self, event, handler=None):
"""Register an event handler.
:param event: The event name. Can be ``'connect'``, ``'message'`` or
``'disconnect'``.
:param handler: The function that should be invoked to handle the
event. When this parameter is not given, the method
acts as a decorator for the handler function.
Example usage::
# as a decorator:
@eio.on('connect')
def connect_handler():
print('Connection request')
# as a method:
def message_handler(msg):
print('Received message: ', msg)
eio.send('response')
eio.on('message', message_handler)
"""
if event not in self.event_names:
raise ValueError('Invalid event')
def set_handler(handler):
self.handlers[event] = handler
return handler
if handler is None:
return set_handler
set_handler(handler)
def connect(self, url, headers=None, transports=None,
engineio_path='engine.io'):
"""Connect to an Engine.IO server.
:param url: The URL of the Engine.IO server. It can include custom
query string parameters if required by the server.
:param headers: A dictionary with custom headers to send with the
connection request.
:param transports: The list of allowed transports. Valid transports
are ``'polling'`` and ``'websocket'``. If not
given, the polling transport is connected first,
then an upgrade to websocket is attempted.
:param engineio_path: The endpoint where the Engine.IO server is
installed. The default value is appropriate for
most cases.
Example usage::
eio = engineio.Client()
eio.connect('http://localhost:5000')
"""
if self.state != 'disconnected':
raise ValueError('Client is not in a disconnected state')
valid_transports = ['polling', 'websocket']
if transports is not None:
if isinstance(transports, str):
transports = [transports]
transports = [transport for transport in transports
if transport in valid_transports]
if not transports:
raise ValueError('No valid transports provided')
self.transports = transports or valid_transports
self.queue = self.create_queue()
return getattr(self, '_connect_' + self.transports[0])(
url, headers or {}, engineio_path)
def wait(self):
"""Wait until the connection with the server ends.
Client applications can use this function to block the main thread
during the life of the connection.
"""
if self.read_loop_task:
self.read_loop_task.join()
def send(self, data):
"""Send a message to the server.
:param data: The data to send to the server. Data can be of type
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
or ``dict``, the data will be serialized as JSON.
"""
self._send_packet(packet.Packet(packet.MESSAGE, data=data))
def disconnect(self, abort=False):
"""Disconnect from the server.
:param abort: If set to ``True``, do not wait for background tasks
associated with the connection to end.
"""
if self.state == 'connected':
self._send_packet(packet.Packet(packet.CLOSE))
self.queue.put(None)
self.state = 'disconnecting'
self._trigger_event('disconnect', run_async=False)
if self.current_transport == 'websocket':
self.ws.close()
if not abort:
self.read_loop_task.join()
self.state = 'disconnected'
try:
connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
self._reset()
def transport(self):
"""Return the name of the transport currently in use.
The possible values returned by this function are ``'polling'`` and
``'websocket'``.
"""
return self.current_transport
def start_background_task(self, target, *args, **kwargs):
"""Start a background task.
This is a utility function that applications can use to start a
background task.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
This function returns an object that represents the background task,
on which the ``join()`` method can be invoked to wait for the task to
complete.
"""
th = threading.Thread(target=target, args=args, kwargs=kwargs)
th.start()
return th
def sleep(self, seconds=0):
"""Sleep for the requested amount of time."""
return time.sleep(seconds)
def create_queue(self, *args, **kwargs):
"""Create a queue object."""
q = queue.Queue(*args, **kwargs)
q.Empty = queue.Empty
return q
def create_event(self, *args, **kwargs):
"""Create an event object."""
return threading.Event(*args, **kwargs)
def _reset(self):
self.state = 'disconnected'
self.sid = None
def _connect_polling(self, url, headers, engineio_path):
"""Establish a long-polling connection to the Engine.IO server."""
if requests is None: # pragma: no cover
# not installed
self.logger.error('requests package is not installed -- cannot '
'send HTTP requests!')
return
self.base_url = self._get_engineio_url(url, engineio_path, 'polling')
self.logger.info('Attempting polling connection to ' + self.base_url)
r = self._send_request(
'GET', self.base_url + self._get_url_timestamp(), headers=headers,
timeout=self.request_timeout)
if r is None or isinstance(r, str):
self._reset()
raise exceptions.ConnectionError(
r or 'Connection refused by the server')
if r.status_code < 200 or r.status_code >= 300:
self._reset()
try:
arg = r.json()
except JSONDecodeError:
arg = None
raise exceptions.ConnectionError(
'Unexpected status code {} in server response'.format(
r.status_code), arg)
try:
p = payload.Payload(encoded_payload=r.content.decode('utf-8'))
except ValueError:
raise exceptions.ConnectionError(
'Unexpected response from server') from None
open_packet = p.packets[0]
if open_packet.packet_type != packet.OPEN:
raise exceptions.ConnectionError(
'OPEN packet not returned by server')
self.logger.info(
'Polling connection accepted with ' + str(open_packet.data))
self.sid = open_packet.data['sid']
self.upgrades = open_packet.data['upgrades']
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
self.current_transport = 'polling'
self.base_url += '&sid=' + self.sid
self.state = 'connected'
connected_clients.append(self)
self._trigger_event('connect', run_async=False)
for pkt in p.packets[1:]:
self._receive_packet(pkt)
if 'websocket' in self.upgrades and 'websocket' in self.transports:
# attempt to upgrade to websocket
if self._connect_websocket(url, headers, engineio_path):
# upgrade to websocket succeeded, we're done here
return
# start background tasks associated with this client
self.write_loop_task = self.start_background_task(self._write_loop)
self.read_loop_task = self.start_background_task(
self._read_loop_polling)
def _connect_websocket(self, url, headers, engineio_path):
"""Establish or upgrade to a WebSocket connection with the server."""
if websocket is None: # pragma: no cover
# not installed
self.logger.error('websocket-client package not installed, only '
'polling transport is available')
return False
websocket_url = self._get_engineio_url(url, engineio_path, 'websocket')
if self.sid:
self.logger.info(
'Attempting WebSocket upgrade to ' + websocket_url)
upgrade = True
websocket_url += '&sid=' + self.sid
else:
upgrade = False
self.base_url = websocket_url
self.logger.info(
'Attempting WebSocket connection to ' + websocket_url)
# get cookies and other settings from the long-polling connection
# so that they are preserved when connecting to the WebSocket route
cookies = None
extra_options = {}
if self.http:
# cookies
cookies = '; '.join(["{}={}".format(cookie.name, cookie.value)
for cookie in self.http.cookies])
for header, value in headers.items():
if header.lower() == 'cookie':
if cookies:
cookies += '; '
cookies += value
del headers[header]
break
# auth
if 'Authorization' not in headers and self.http.auth is not None:
if not isinstance(self.http.auth, tuple): # pragma: no cover
raise ValueError('Only basic authentication is supported')
basic_auth = '{}:{}'.format(
self.http.auth[0], self.http.auth[1]).encode('utf-8')
basic_auth = b64encode(basic_auth).decode('utf-8')
headers['Authorization'] = 'Basic ' + basic_auth
# cert
# this can be given as ('certfile', 'keyfile') or just 'certfile'
if isinstance(self.http.cert, tuple):
extra_options['sslopt'] = {
'certfile': self.http.cert[0],
'keyfile': self.http.cert[1]}
elif self.http.cert:
extra_options['sslopt'] = {'certfile': self.http.cert}
# proxies
if self.http.proxies:
proxy_url = None
if websocket_url.startswith('ws://'):
proxy_url = self.http.proxies.get(
'ws', self.http.proxies.get('http'))
else: # wss://
proxy_url = self.http.proxies.get(
'wss', self.http.proxies.get('https'))
if proxy_url:
parsed_url = urllib.parse.urlparse(
proxy_url if '://' in proxy_url
else 'scheme://' + proxy_url)
extra_options['http_proxy_host'] = parsed_url.hostname
extra_options['http_proxy_port'] = parsed_url.port
extra_options['http_proxy_auth'] = (
(parsed_url.username, parsed_url.password)
if parsed_url.username or parsed_url.password
else None)
# verify
if isinstance(self.http.verify, str):
if 'sslopt' in extra_options:
extra_options['sslopt']['ca_certs'] = self.http.verify
else:
extra_options['sslopt'] = {'ca_certs': self.http.verify}
elif not self.http.verify:
self.ssl_verify = False
if not self.ssl_verify:
extra_options['sslopt'] = {"cert_reqs": ssl.CERT_NONE}
# combine internally generated options with the ones supplied by the
# caller. The caller's options take precedence.
headers.update(self.websocket_extra_options.pop('header', {}))
extra_options['header'] = headers
extra_options['cookie'] = cookies
extra_options['enable_multithread'] = True
extra_options['timeout'] = self.request_timeout
extra_options.update(self.websocket_extra_options)
try:
ws = websocket.create_connection(
websocket_url + self._get_url_timestamp(), **extra_options)
except (ConnectionError, IOError, websocket.WebSocketException):
if upgrade:
self.logger.warning(
'WebSocket upgrade failed: connection error')
return False
else:
raise exceptions.ConnectionError('Connection error')
if upgrade:
p = packet.Packet(packet.PING, data='probe').encode()
try:
ws.send(p)
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected send exception: %s',
str(e))
return False
try:
p = ws.recv()
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected recv exception: %s',
str(e))
return False
pkt = packet.Packet(encoded_packet=p)
if pkt.packet_type != packet.PONG or pkt.data != 'probe':
self.logger.warning(
'WebSocket upgrade failed: no PONG packet')
return False
p = packet.Packet(packet.UPGRADE).encode()
try:
ws.send(p)
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected send exception: %s',
str(e))
return False
self.current_transport = 'websocket'
self.logger.info('WebSocket upgrade was successful')
else:
try:
p = ws.recv()
except Exception as e: # pragma: no cover
raise exceptions.ConnectionError(
'Unexpected recv exception: ' + str(e))
open_packet = packet.Packet(encoded_packet=p)
if open_packet.packet_type != packet.OPEN:
raise exceptions.ConnectionError('no OPEN packet')
self.logger.info(
'WebSocket connection accepted with ' + str(open_packet.data))
self.sid = open_packet.data['sid']
self.upgrades = open_packet.data['upgrades']
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
self.current_transport = 'websocket'
self.state = 'connected'
connected_clients.append(self)
self._trigger_event('connect', run_async=False)
self.ws = ws
self.ws.settimeout(self.ping_interval + self.ping_timeout)
# start background tasks associated with this client
self.write_loop_task = self.start_background_task(self._write_loop)
self.read_loop_task = self.start_background_task(
self._read_loop_websocket)
return True
def _receive_packet(self, pkt):
"""Handle incoming packets from the server."""
packet_name = packet.packet_names[pkt.packet_type] \
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
self.logger.info(
'Received packet %s data %s', packet_name,
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
if pkt.packet_type == packet.MESSAGE:
self._trigger_event('message', pkt.data, run_async=True)
elif pkt.packet_type == packet.PING:
self._send_packet(packet.Packet(packet.PONG, pkt.data))
elif pkt.packet_type == packet.CLOSE:
self.disconnect(abort=True)
elif pkt.packet_type == packet.NOOP:
pass
else:
self.logger.error('Received unexpected packet of type %s',
pkt.packet_type)
def _send_packet(self, pkt):
"""Queue a packet to be sent to the server."""
if self.state != 'connected':
return
self.queue.put(pkt)
self.logger.info(
'Sending packet %s data %s',
packet.packet_names[pkt.packet_type],
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
def _send_request(
self, method, url, headers=None, body=None,
timeout=None): # pragma: no cover
if self.http is None:
self.http = requests.Session()
if not self.ssl_verify:
self.http.verify = False
try:
return self.http.request(method, url, headers=headers, data=body,
timeout=timeout)
except requests.exceptions.RequestException as exc:
self.logger.info('HTTP %s request to %s failed with error %s.',
method, url, exc)
return str(exc)
def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
if event in self.handlers:
if run_async:
return self.start_background_task(self.handlers[event], *args)
else:
try:
return self.handlers[event](*args)
except:
self.logger.exception(event + ' handler error')
def _get_engineio_url(self, url, engineio_path, transport):
"""Generate the Engine.IO connection URL."""
engineio_path = engineio_path.strip('/')
parsed_url = urllib.parse.urlparse(url)
if transport == 'polling':
scheme = 'http'
elif transport == 'websocket':
scheme = 'ws'
else: # pragma: no cover
raise ValueError('invalid transport')
if parsed_url.scheme in ['https', 'wss']:
scheme += 's'
return ('{scheme}://{netloc}/{path}/?{query}'
'{sep}transport={transport}&EIO=4').format(
scheme=scheme, netloc=parsed_url.netloc,
path=engineio_path, query=parsed_url.query,
sep='&' if parsed_url.query else '',
transport=transport)
def _get_url_timestamp(self):
"""Generate the Engine.IO query string timestamp."""
return '&t=' + str(time.time())
def _read_loop_polling(self):
"""Read packets by polling the Engine.IO server."""
while self.state == 'connected':
self.logger.info(
'Sending polling GET request to ' + self.base_url)
r = self._send_request(
'GET', self.base_url + self._get_url_timestamp(),
timeout=max(self.ping_interval, self.ping_timeout) + 5)
if r is None or isinstance(r, str):
self.logger.warning(
r or 'Connection refused by the server, aborting')
self.queue.put(None)
break
if r.status_code < 200 or r.status_code >= 300:
self.logger.warning('Unexpected status code %s in server '
'response, aborting', r.status_code)
self.queue.put(None)
break
try:
p = payload.Payload(encoded_payload=r.content.decode('utf-8'))
except ValueError:
self.logger.warning(
'Unexpected packet from server, aborting')
self.queue.put(None)
break
for pkt in p.packets:
self._receive_packet(pkt)
self.logger.info('Waiting for write loop task to end')
self.write_loop_task.join()
if self.state == 'connected':
self._trigger_event('disconnect', run_async=False)
try:
connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
self._reset()
self.logger.info('Exiting read loop task')
def _read_loop_websocket(self):
"""Read packets from the Engine.IO WebSocket connection."""
while self.state == 'connected':
p = None
try:
p = self.ws.recv()
except websocket.WebSocketTimeoutException:
self.logger.warning(
'Server has stopped communicating, aborting')
self.queue.put(None)
break
except websocket.WebSocketConnectionClosedException:
self.logger.warning(
'WebSocket connection was closed, aborting')
self.queue.put(None)
break
except Exception as e:
self.logger.info(
'Unexpected error receiving packet: "%s", aborting',
str(e))
self.queue.put(None)
break
try:
pkt = packet.Packet(encoded_packet=p)
except Exception as e: # pragma: no cover
self.logger.info(
'Unexpected error decoding packet: "%s", aborting', str(e))
self.queue.put(None)
break
self._receive_packet(pkt)
self.logger.info('Waiting for write loop task to end')
self.write_loop_task.join()
if self.state == 'connected':
self._trigger_event('disconnect', run_async=False)
try:
connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
self._reset()
self.logger.info('Exiting read loop task')
def _write_loop(self):
"""This background task sends packages to the server as they are
pushed to the send queue.
"""
while self.state == 'connected':
# to simplify the timeout handling, use the maximum of the
# ping interval and ping timeout as timeout, with an extra 5
# seconds grace period
timeout = max(self.ping_interval, self.ping_timeout) + 5
packets = None
try:
packets = [self.queue.get(timeout=timeout)]
except self.queue.Empty:
self.logger.error('packet queue is empty, aborting')
break
if packets == [None]:
self.queue.task_done()
packets = []
else:
while True:
try:
packets.append(self.queue.get(block=False))
except self.queue.Empty:
break
if packets[-1] is None:
packets = packets[:-1]
self.queue.task_done()
break
if not packets:
# empty packet list returned -> connection closed
break
if self.current_transport == 'polling':
p = payload.Payload(packets=packets)
r = self._send_request(
'POST', self.base_url, body=p.encode(),
headers={'Content-Type': 'text/plain'},
timeout=self.request_timeout)
for pkt in packets:
self.queue.task_done()
if r is None or isinstance(r, str):
self.logger.warning(
r or 'Connection refused by the server, aborting')
break
if r.status_code < 200 or r.status_code >= 300:
self.logger.warning('Unexpected status code %s in server '
'response, aborting', r.status_code)
self._reset()
break
else:
# websocket
try:
for pkt in packets:
encoded_packet = pkt.encode()
if pkt.binary:
self.ws.send_binary(encoded_packet)
else:
self.ws.send(encoded_packet)
self.queue.task_done()
except (websocket.WebSocketConnectionClosedException,
BrokenPipeError, OSError):
self.logger.warning(
'WebSocket connection was closed, aborting')
break
self.logger.info('Exiting write loop task')
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,441
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/__init__.py
|
import sys
from .client import Client
from .middleware import WSGIApp, Middleware
from .server import Server
if sys.version_info >= (3, 5): # pragma: no cover
from .asyncio_server import AsyncServer
from .asyncio_client import AsyncClient
from .async_drivers.asgi import ASGIApp
try:
from .async_drivers.tornado import get_tornado_handler
except ImportError:
get_tornado_handler = None
else: # pragma: no cover
AsyncServer = None
AsyncClient = None
get_tornado_handler = None
ASGIApp = None
__all__ = ['Server', 'WSGIApp', 'Middleware', 'Client']
if AsyncServer is not None: # pragma: no cover
__all__ += ['AsyncServer', 'ASGIApp', 'get_tornado_handler',
'AsyncClient']
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,442
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/server.py
|
import base64
import gzip
import importlib
import io
import logging
import secrets
import urllib
import zlib
from . import exceptions
from . import packet
from . import payload
from . import socket
default_logger = logging.getLogger('engineio.server')
class Server(object):
"""An Engine.IO server.
This class implements a fully compliant Engine.IO web server with support
for websocket and long-polling transports.
:param async_mode: The asynchronous model to use. See the Deployment
section in the documentation for a description of the
available options. Valid async modes are "threading",
"eventlet", "gevent" and "gevent_uwsgi". If this
argument is not given, "eventlet" is tried first, then
"gevent_uwsgi", then "gevent", and finally "threading".
The first async mode that has all its dependencies
installed is the one that is chosen.
:param ping_interval: The interval in seconds at which the server pings
the client. The default is 25 seconds. For advanced
control, a two element tuple can be given, where
the first number is the ping interval and the second
is a grace period added by the server.
:param ping_timeout: The time in seconds that the client waits for the
server to respond before disconnecting. The default
is 20 seconds.
:param max_http_buffer_size: The maximum size of a message. The default
is 1,000,000 bytes.
:param allow_upgrades: Whether to allow transport upgrades or not. The
default is ``True``.
:param http_compression: Whether to compress packages when using the
polling transport. The default is ``True``.
:param compression_threshold: Only compress messages when their byte size
is greater than this value. The default is
1024 bytes.
:param cookie: If set to a string, it is the name of the HTTP cookie the
server sends back tot he client containing the client
session id. If set to a dictionary, the ``'name'`` key
contains the cookie name and other keys define cookie
attributes, where the value of each attribute can be a
string, a callable with no arguments, or a boolean. If set
to ``None`` (the default), a cookie is not sent to the
client.
:param cors_allowed_origins: Origin or list of origins that are allowed to
connect to this server. Only the same origin
is allowed by default. Set this argument to
``'*'`` to allow all origins, or to ``[]`` to
disable CORS handling.
:param cors_credentials: Whether credentials (cookies, authentication) are
allowed in requests to this server. The default
is ``True``.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``. The default is
``False``. Note that fatal errors are logged even when
``logger`` is ``False``.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions.
:param async_handlers: If set to ``True``, run message event handlers in
non-blocking threads. To run handlers synchronously,
set to ``False``. The default is ``True``.
:param monitor_clients: If set to ``True``, a background task will ensure
inactive clients are closed. Set to ``False`` to
disable the monitoring task (not recommended). The
default is ``True``.
:param transports: The list of allowed transports. Valid transports
are ``'polling'`` and ``'websocket'``. Defaults to
``['polling', 'websocket']``.
:param kwargs: Reserved for future extensions, any additional parameters
given as keyword arguments will be silently ignored.
"""
compression_methods = ['gzip', 'deflate']
event_names = ['connect', 'disconnect', 'message']
valid_transports = ['polling', 'websocket']
_default_monitor_clients = True
sequence_number = 0
def __init__(self, async_mode=None, ping_interval=25, ping_timeout=20,
max_http_buffer_size=1000000, allow_upgrades=True,
http_compression=True, compression_threshold=1024,
cookie=None, cors_allowed_origins=None,
cors_credentials=True, logger=False, json=None,
async_handlers=True, monitor_clients=None, transports=None,
**kwargs):
self.ping_timeout = ping_timeout
if isinstance(ping_interval, tuple):
self.ping_interval = ping_interval[0]
self.ping_interval_grace_period = ping_interval[1]
else:
self.ping_interval = ping_interval
self.ping_interval_grace_period = 0
self.max_http_buffer_size = max_http_buffer_size
self.allow_upgrades = allow_upgrades
self.http_compression = http_compression
self.compression_threshold = compression_threshold
self.cookie = cookie
self.cors_allowed_origins = cors_allowed_origins
self.cors_credentials = cors_credentials
self.async_handlers = async_handlers
self.sockets = {}
self.handlers = {}
self.log_message_keys = set()
self.start_service_task = monitor_clients \
if monitor_clients is not None else self._default_monitor_clients
if json is not None:
packet.Packet.json = json
if not isinstance(logger, bool):
self.logger = logger
else:
self.logger = default_logger
if self.logger.level == logging.NOTSET:
if logger:
self.logger.setLevel(logging.INFO)
else:
self.logger.setLevel(logging.ERROR)
self.logger.addHandler(logging.StreamHandler())
modes = self.async_modes()
if async_mode is not None:
modes = [async_mode] if async_mode in modes else []
self._async = None
self.async_mode = None
for mode in modes:
try:
self._async = importlib.import_module(
'engineio.async_drivers.' + mode)._async
asyncio_based = self._async['asyncio'] \
if 'asyncio' in self._async else False
if asyncio_based != self.is_asyncio_based():
continue # pragma: no cover
self.async_mode = mode
break
except ImportError:
pass
if self.async_mode is None:
raise ValueError('Invalid async_mode specified')
if self.is_asyncio_based() and \
('asyncio' not in self._async or not
self._async['asyncio']): # pragma: no cover
raise ValueError('The selected async_mode is not asyncio '
'compatible')
if not self.is_asyncio_based() and 'asyncio' in self._async and \
self._async['asyncio']: # pragma: no cover
raise ValueError('The selected async_mode requires asyncio and '
'must use the AsyncServer class')
if transports is not None:
if isinstance(transports, str):
transports = [transports]
transports = [transport for transport in transports
if transport in self.valid_transports]
if not transports:
raise ValueError('No valid transports provided')
self.transports = transports or self.valid_transports
self.logger.info('Server initialized for %s.', self.async_mode)
def is_asyncio_based(self):
return False
def async_modes(self):
return ['eventlet', 'gevent_uwsgi', 'gevent', 'threading']
def on(self, event, handler=None):
"""Register an event handler.
:param event: The event name. Can be ``'connect'``, ``'message'`` or
``'disconnect'``.
:param handler: The function that should be invoked to handle the
event. When this parameter is not given, the method
acts as a decorator for the handler function.
Example usage::
# as a decorator:
@eio.on('connect')
def connect_handler(sid, environ):
print('Connection request')
if environ['REMOTE_ADDR'] in blacklisted:
return False # reject
# as a method:
def message_handler(sid, msg):
print('Received message: ', msg)
eio.send(sid, 'response')
eio.on('message', message_handler)
The handler function receives the ``sid`` (session ID) for the
client as first argument. The ``'connect'`` event handler receives the
WSGI environment as a second argument, and can return ``False`` to
reject the connection. The ``'message'`` handler receives the message
payload as a second argument. The ``'disconnect'`` handler does not
take a second argument.
"""
if event not in self.event_names:
raise ValueError('Invalid event')
def set_handler(handler):
self.handlers[event] = handler
return handler
if handler is None:
return set_handler
set_handler(handler)
def send(self, sid, data):
"""Send a message to a client.
:param sid: The session id of the recipient client.
:param data: The data to send to the client. Data can be of type
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
or ``dict``, the data will be serialized as JSON.
"""
try:
socket = self._get_socket(sid)
except KeyError:
# the socket is not available
self.logger.warning('Cannot send to sid %s', sid)
return
socket.send(packet.Packet(packet.MESSAGE, data=data))
def get_session(self, sid):
"""Return the user session for a client.
:param sid: The session id of the client.
The return value is a dictionary. Modifications made to this
dictionary are not guaranteed to be preserved unless
``save_session()`` is called, or when the ``session`` context manager
is used.
"""
socket = self._get_socket(sid)
return socket.session
def save_session(self, sid, session):
"""Store the user session for a client.
:param sid: The session id of the client.
:param session: The session dictionary.
"""
socket = self._get_socket(sid)
socket.session = session
def session(self, sid):
"""Return the user session for a client with context manager syntax.
:param sid: The session id of the client.
This is a context manager that returns the user session dictionary for
the client. Any changes that are made to this dictionary inside the
context manager block are saved back to the session. Example usage::
@eio.on('connect')
def on_connect(sid, environ):
username = authenticate_user(environ)
if not username:
return False
with eio.session(sid) as session:
session['username'] = username
@eio.on('message')
def on_message(sid, msg):
with eio.session(sid) as session:
print('received message from ', session['username'])
"""
class _session_context_manager(object):
def __init__(self, server, sid):
self.server = server
self.sid = sid
self.session = None
def __enter__(self):
self.session = self.server.get_session(sid)
return self.session
def __exit__(self, *args):
self.server.save_session(sid, self.session)
return _session_context_manager(self, sid)
def disconnect(self, sid=None):
"""Disconnect a client.
:param sid: The session id of the client to close. If this parameter
is not given, then all clients are closed.
"""
if sid is not None:
try:
socket = self._get_socket(sid)
except KeyError: # pragma: no cover
# the socket was already closed or gone
pass
else:
socket.close()
if sid in self.sockets: # pragma: no cover
del self.sockets[sid]
else:
for client in self.sockets.values():
client.close()
self.sockets = {}
def transport(self, sid):
"""Return the name of the transport used by the client.
The two possible values returned by this function are ``'polling'``
and ``'websocket'``.
:param sid: The session of the client.
"""
return 'websocket' if self._get_socket(sid).upgraded else 'polling'
def handle_request(self, environ, start_response):
"""Handle an HTTP request from the client.
This is the entry point of the Engine.IO application, using the same
interface as a WSGI application. For the typical usage, this function
is invoked by the :class:`Middleware` instance, but it can be invoked
directly when the middleware is not used.
:param environ: The WSGI environment.
:param start_response: The WSGI ``start_response`` function.
This function returns the HTTP response body to deliver to the client
as a byte sequence.
"""
if self.cors_allowed_origins != []:
# Validate the origin header if present
# This is important for WebSocket more than for HTTP, since
# browsers only apply CORS controls to HTTP.
origin = environ.get('HTTP_ORIGIN')
if origin:
allowed_origins = self._cors_allowed_origins(environ)
if allowed_origins is not None and origin not in \
allowed_origins:
self._log_error_once(
origin + ' is not an accepted origin.', 'bad-origin')
r = self._bad_request('Not an accepted origin.')
start_response(r['status'], r['headers'])
return [r['response']]
method = environ['REQUEST_METHOD']
query = urllib.parse.parse_qs(environ.get('QUERY_STRING', ''))
jsonp = False
jsonp_index = None
# make sure the client uses an allowed transport
transport = query.get('transport', ['polling'])[0]
if transport not in self.transports:
self._log_error_once('Invalid transport', 'bad-transport')
r = self._bad_request('Invalid transport')
start_response(r['status'], r['headers'])
return [r['response']]
# make sure the client speaks a compatible Engine.IO version
sid = query['sid'][0] if 'sid' in query else None
if sid is None and query.get('EIO') != ['4']:
self._log_error_once(
'The client is using an unsupported version of the Socket.IO '
'or Engine.IO protocols', 'bad-version')
r = self._bad_request(
'The client is using an unsupported version of the Socket.IO '
'or Engine.IO protocols')
start_response(r['status'], r['headers'])
return [r['response']]
if 'j' in query:
jsonp = True
try:
jsonp_index = int(query['j'][0])
except (ValueError, KeyError, IndexError):
# Invalid JSONP index number
pass
if jsonp and jsonp_index is None:
self._log_error_once('Invalid JSONP index number',
'bad-jsonp-index')
r = self._bad_request('Invalid JSONP index number')
elif method == 'GET':
if sid is None:
# transport must be one of 'polling' or 'websocket'.
# if 'websocket', the HTTP_UPGRADE header must match.
upgrade_header = environ.get('HTTP_UPGRADE').lower() \
if 'HTTP_UPGRADE' in environ else None
if transport == 'polling' \
or transport == upgrade_header == 'websocket':
r = self._handle_connect(environ, start_response,
transport, jsonp_index)
else:
self._log_error_once('Invalid websocket upgrade',
'bad-upgrade')
r = self._bad_request('Invalid websocket upgrade')
else:
if sid not in self.sockets:
self._log_error_once('Invalid session ' + sid, 'bad-sid')
r = self._bad_request('Invalid session')
else:
socket = self._get_socket(sid)
try:
packets = socket.handle_get_request(
environ, start_response)
if isinstance(packets, list):
r = self._ok(packets, jsonp_index=jsonp_index)
else:
r = packets
except exceptions.EngineIOError:
if sid in self.sockets: # pragma: no cover
self.disconnect(sid)
r = self._bad_request()
if sid in self.sockets and self.sockets[sid].closed:
del self.sockets[sid]
elif method == 'POST':
if sid is None or sid not in self.sockets:
self._log_error_once(
'Invalid session ' + (sid or 'None'), 'bad-sid')
r = self._bad_request('Invalid session')
else:
socket = self._get_socket(sid)
try:
socket.handle_post_request(environ)
r = self._ok(jsonp_index=jsonp_index)
except exceptions.EngineIOError:
if sid in self.sockets: # pragma: no cover
self.disconnect(sid)
r = self._bad_request()
except: # pragma: no cover
# for any other unexpected errors, we log the error
# and keep going
self.logger.exception('post request handler error')
r = self._ok(jsonp_index=jsonp_index)
elif method == 'OPTIONS':
r = self._ok()
else:
self.logger.warning('Method %s not supported', method)
r = self._method_not_found()
if not isinstance(r, dict):
return r or []
if self.http_compression and \
len(r['response']) >= self.compression_threshold:
encodings = [e.split(';')[0].strip() for e in
environ.get('HTTP_ACCEPT_ENCODING', '').split(',')]
for encoding in encodings:
if encoding in self.compression_methods:
r['response'] = \
getattr(self, '_' + encoding)(r['response'])
r['headers'] += [('Content-Encoding', encoding)]
break
cors_headers = self._cors_headers(environ)
start_response(r['status'], r['headers'] + cors_headers)
return [r['response']]
def start_background_task(self, target, *args, **kwargs):
"""Start a background task using the appropriate async model.
This is a utility function that applications can use to start a
background task using the method that is compatible with the
selected async mode.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
This function returns an object that represents the background task,
on which the ``join()`` methond can be invoked to wait for the task to
complete.
"""
th = self._async['thread'](target=target, args=args, kwargs=kwargs)
th.start()
return th # pragma: no cover
def sleep(self, seconds=0):
"""Sleep for the requested amount of time using the appropriate async
model.
This is a utility function that applications can use to put a task to
sleep without having to worry about using the correct call for the
selected async mode.
"""
return self._async['sleep'](seconds)
def create_queue(self, *args, **kwargs):
"""Create a queue object using the appropriate async model.
This is a utility function that applications can use to create a queue
without having to worry about using the correct call for the selected
async mode.
"""
return self._async['queue'](*args, **kwargs)
def get_queue_empty_exception(self):
"""Return the queue empty exception for the appropriate async model.
This is a utility function that applications can use to work with a
queue without having to worry about using the correct call for the
selected async mode.
"""
return self._async['queue_empty']
def create_event(self, *args, **kwargs):
"""Create an event object using the appropriate async model.
This is a utility function that applications can use to create an
event without having to worry about using the correct call for the
selected async mode.
"""
return self._async['event'](*args, **kwargs)
def generate_id(self):
"""Generate a unique session id."""
id = base64.b64encode(
secrets.token_bytes(12) + self.sequence_number.to_bytes(3, 'big'))
self.sequence_number = (self.sequence_number + 1) & 0xffffff
return id.decode('utf-8').replace('/', '_').replace('+', '-')
def _generate_sid_cookie(self, sid, attributes):
"""Generate the sid cookie."""
cookie = attributes.get('name', 'io') + '=' + sid
for attribute, value in attributes.items():
if attribute == 'name':
continue
if callable(value):
value = value()
if value is True:
cookie += '; ' + attribute
else:
cookie += '; ' + attribute + '=' + value
return cookie
def _handle_connect(self, environ, start_response, transport,
jsonp_index=None):
"""Handle a client connection request."""
if self.start_service_task:
# start the service task to monitor connected clients
self.start_service_task = False
self.start_background_task(self._service_task)
sid = self.generate_id()
s = socket.Socket(self, sid)
self.sockets[sid] = s
pkt = packet.Packet(packet.OPEN, {
'sid': sid,
'upgrades': self._upgrades(sid, transport),
'pingTimeout': int(self.ping_timeout * 1000),
'pingInterval': int(
self.ping_interval + self.ping_interval_grace_period) * 1000})
s.send(pkt)
s.schedule_ping()
# NOTE: some sections below are marked as "no cover" to workaround
# what seems to be a bug in the coverage package. All the lines below
# are covered by tests, but some are not reported as such for some
# reason
ret = self._trigger_event('connect', sid, environ, run_async=False)
if ret is not None and ret is not True: # pragma: no cover
del self.sockets[sid]
self.logger.warning('Application rejected connection')
return self._unauthorized(ret or None)
if transport == 'websocket': # pragma: no cover
ret = s.handle_get_request(environ, start_response)
if s.closed and sid in self.sockets:
# websocket connection ended, so we are done
del self.sockets[sid]
return ret
else: # pragma: no cover
s.connected = True
headers = None
if self.cookie:
if isinstance(self.cookie, dict):
headers = [(
'Set-Cookie',
self._generate_sid_cookie(sid, self.cookie)
)]
else:
headers = [(
'Set-Cookie',
self._generate_sid_cookie(sid, {
'name': self.cookie, 'path': '/', 'SameSite': 'Lax'
})
)]
try:
return self._ok(s.poll(), headers=headers,
jsonp_index=jsonp_index)
except exceptions.QueueEmpty:
return self._bad_request()
def _upgrades(self, sid, transport):
"""Return the list of possible upgrades for a client connection."""
if not self.allow_upgrades or self._get_socket(sid).upgraded or \
transport == 'websocket':
return []
if self._async['websocket'] is None: # pragma: no cover
self._log_error_once(
'The WebSocket transport is not available, you must install a '
'WebSocket server that is compatible with your async mode to '
'enable it. See the documentation for details.',
'no-websocket')
return []
return ['websocket']
def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
if event in self.handlers:
if run_async:
return self.start_background_task(self.handlers[event], *args)
else:
try:
return self.handlers[event](*args)
except:
self.logger.exception(event + ' handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
def _get_socket(self, sid):
"""Return the socket object for a given session."""
try:
s = self.sockets[sid]
except KeyError:
raise KeyError('Session not found')
if s.closed:
del self.sockets[sid]
raise KeyError('Session is disconnected')
return s
def _ok(self, packets=None, headers=None, jsonp_index=None):
"""Generate a successful HTTP response."""
if packets is not None:
if headers is None:
headers = []
headers += [('Content-Type', 'text/plain; charset=UTF-8')]
return {'status': '200 OK',
'headers': headers,
'response': payload.Payload(packets=packets).encode(
jsonp_index=jsonp_index).encode('utf-8')}
else:
return {'status': '200 OK',
'headers': [('Content-Type', 'text/plain')],
'response': b'OK'}
def _bad_request(self, message=None):
"""Generate a bad request HTTP error response."""
if message is None:
message = 'Bad Request'
message = packet.Packet.json.dumps(message)
return {'status': '400 BAD REQUEST',
'headers': [('Content-Type', 'text/plain')],
'response': message.encode('utf-8')}
def _method_not_found(self):
"""Generate a method not found HTTP error response."""
return {'status': '405 METHOD NOT FOUND',
'headers': [('Content-Type', 'text/plain')],
'response': b'Method Not Found'}
def _unauthorized(self, message=None):
"""Generate a unauthorized HTTP error response."""
if message is None:
message = 'Unauthorized'
message = packet.Packet.json.dumps(message)
return {'status': '401 UNAUTHORIZED',
'headers': [('Content-Type', 'application/json')],
'response': message.encode('utf-8')}
def _cors_allowed_origins(self, environ):
default_origins = []
if 'wsgi.url_scheme' in environ and 'HTTP_HOST' in environ:
default_origins.append('{scheme}://{host}'.format(
scheme=environ['wsgi.url_scheme'], host=environ['HTTP_HOST']))
if 'HTTP_X_FORWARDED_PROTO' in environ or \
'HTTP_X_FORWARDED_HOST' in environ:
scheme = environ.get(
'HTTP_X_FORWARDED_PROTO',
environ['wsgi.url_scheme']).split(',')[0].strip()
default_origins.append('{scheme}://{host}'.format(
scheme=scheme, host=environ.get(
'HTTP_X_FORWARDED_HOST', environ['HTTP_HOST']).split(
',')[0].strip()))
if self.cors_allowed_origins is None:
allowed_origins = default_origins
elif self.cors_allowed_origins == '*':
allowed_origins = None
elif isinstance(self.cors_allowed_origins, str):
allowed_origins = [self.cors_allowed_origins]
elif callable(self.cors_allowed_origins):
origin = environ.get('HTTP_ORIGIN')
allowed_origins = [origin] \
if self.cors_allowed_origins(origin) else []
else:
allowed_origins = self.cors_allowed_origins
return allowed_origins
def _cors_headers(self, environ):
"""Return the cross-origin-resource-sharing headers."""
if self.cors_allowed_origins == []:
# special case, CORS handling is completely disabled
return []
headers = []
allowed_origins = self._cors_allowed_origins(environ)
if 'HTTP_ORIGIN' in environ and \
(allowed_origins is None or environ['HTTP_ORIGIN'] in
allowed_origins):
headers = [('Access-Control-Allow-Origin', environ['HTTP_ORIGIN'])]
if environ['REQUEST_METHOD'] == 'OPTIONS':
headers += [('Access-Control-Allow-Methods', 'OPTIONS, GET, POST')]
if 'HTTP_ACCESS_CONTROL_REQUEST_HEADERS' in environ:
headers += [('Access-Control-Allow-Headers',
environ['HTTP_ACCESS_CONTROL_REQUEST_HEADERS'])]
if self.cors_credentials:
headers += [('Access-Control-Allow-Credentials', 'true')]
return headers
def _gzip(self, response):
"""Apply gzip compression to a response."""
bytesio = io.BytesIO()
with gzip.GzipFile(fileobj=bytesio, mode='w') as gz:
gz.write(response)
return bytesio.getvalue()
def _deflate(self, response):
"""Apply deflate compression to a response."""
return zlib.compress(response)
def _log_error_once(self, message, message_key):
"""Log message with logging.ERROR level the first time, then log
with given level."""
if message_key not in self.log_message_keys:
self.logger.error(message + ' (further occurrences of this error '
'will be logged with level INFO)')
self.log_message_keys.add(message_key)
else:
self.logger.info(message)
def _service_task(self): # pragma: no cover
"""Monitor connected clients and clean up those that time out."""
while True:
if len(self.sockets) == 0:
# nothing to do
self.sleep(self.ping_timeout)
continue
# go through the entire client list in a ping interval cycle
sleep_interval = float(self.ping_timeout) / len(self.sockets)
try:
# iterate over the current clients
for s in self.sockets.copy().values():
if not s.closing and not s.closed:
s.check_ping_timeout()
self.sleep(sleep_interval)
except (SystemExit, KeyboardInterrupt):
self.logger.info('service task canceled')
break
except:
# an unexpected exception has occurred, log it and continue
self.logger.exception('service task exception')
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,443
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/async_drivers/asgi.py
|
import os
import sys
import asyncio
from engineio.static_files import get_static_file
class ASGIApp:
"""ASGI application middleware for Engine.IO.
This middleware dispatches traffic to an Engine.IO application. It can
also serve a list of static files to the client, or forward unrelated
HTTP traffic to another ASGI application.
:param engineio_server: The Engine.IO server. Must be an instance of the
``engineio.AsyncServer`` class.
:param static_files: A dictionary with static file mapping rules. See the
documentation for details on this argument.
:param other_asgi_app: A separate ASGI app that receives all other traffic.
:param engineio_path: The endpoint where the Engine.IO application should
be installed. The default value is appropriate for
most cases.
:param on_startup: function to be called on application startup; can be
coroutine
:param on_shutdown: function to be called on application shutdown; can be
coroutine
Example usage::
import engineio
import uvicorn
eio = engineio.AsyncServer()
app = engineio.ASGIApp(eio, static_files={
'/': {'content_type': 'text/html', 'filename': 'index.html'},
'/index.html': {'content_type': 'text/html',
'filename': 'index.html'},
})
uvicorn.run(app, '127.0.0.1', 5000)
"""
def __init__(self, engineio_server, other_asgi_app=None,
static_files=None, engineio_path='engine.io',
on_startup=None, on_shutdown=None):
self.engineio_server = engineio_server
self.other_asgi_app = other_asgi_app
self.engineio_path = engineio_path
if not self.engineio_path.startswith('/'):
self.engineio_path = '/' + self.engineio_path
if not self.engineio_path.endswith('/'):
self.engineio_path += '/'
self.static_files = static_files or {}
self.on_startup = on_startup
self.on_shutdown = on_shutdown
async def __call__(self, scope, receive, send):
if scope['type'] in ['http', 'websocket'] and \
scope['path'].startswith(self.engineio_path):
await self.engineio_server.handle_request(scope, receive, send)
else:
static_file = get_static_file(scope['path'], self.static_files) \
if scope['type'] == 'http' and self.static_files else None
if scope['type'] == 'lifespan':
await self.lifespan(scope, receive, send)
elif static_file and os.path.exists(static_file['filename']):
await self.serve_static_file(static_file, receive, send)
elif self.other_asgi_app is not None:
await self.other_asgi_app(scope, receive, send)
else:
await self.not_found(receive, send)
async def serve_static_file(self, static_file, receive,
send): # pragma: no cover
event = await receive()
if event['type'] == 'http.request':
with open(static_file['filename'], 'rb') as f:
payload = f.read()
await send({'type': 'http.response.start',
'status': 200,
'headers': [(b'Content-Type', static_file[
'content_type'].encode('utf-8'))]})
await send({'type': 'http.response.body',
'body': payload})
async def lifespan(self, scope, receive, send):
if self.other_asgi_app is not None and self.on_startup is None and \
self.on_shutdown is None:
# let the other ASGI app handle lifespan events
await self.other_asgi_app(scope, receive, send)
return
while True:
event = await receive()
if event['type'] == 'lifespan.startup':
if self.on_startup:
try:
await self.on_startup() \
if asyncio.iscoroutinefunction(self.on_startup) \
else self.on_startup()
except:
await send({'type': 'lifespan.startup.failed'})
return
await send({'type': 'lifespan.startup.complete'})
elif event['type'] == 'lifespan.shutdown':
if self.on_shutdown:
try:
await self.on_shutdown() \
if asyncio.iscoroutinefunction(self.on_shutdown) \
else self.on_shutdown()
except:
await send({'type': 'lifespan.shutdown.failed'})
return
await send({'type': 'lifespan.shutdown.complete'})
return
async def not_found(self, receive, send):
"""Return a 404 Not Found error to the client."""
await send({'type': 'http.response.start',
'status': 404,
'headers': [(b'Content-Type', b'text/plain')]})
await send({'type': 'http.response.body',
'body': b'Not Found'})
async def translate_request(scope, receive, send):
class AwaitablePayload(object): # pragma: no cover
def __init__(self, payload):
self.payload = payload or b''
async def read(self, length=None):
if length is None:
r = self.payload
self.payload = b''
else:
r = self.payload[:length]
self.payload = self.payload[length:]
return r
event = await receive()
payload = b''
if event['type'] == 'http.request':
payload += event.get('body') or b''
while event.get('more_body'):
event = await receive()
if event['type'] == 'http.request':
payload += event.get('body') or b''
elif event['type'] == 'websocket.connect':
pass
else:
return {}
raw_uri = scope['path'].encode('utf-8')
if 'query_string' in scope and scope['query_string']:
raw_uri += b'?' + scope['query_string']
environ = {
'wsgi.input': AwaitablePayload(payload),
'wsgi.errors': sys.stderr,
'wsgi.version': (1, 0),
'wsgi.async': True,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False,
'SERVER_SOFTWARE': 'asgi',
'REQUEST_METHOD': scope.get('method', 'GET'),
'PATH_INFO': scope['path'],
'QUERY_STRING': scope.get('query_string', b'').decode('utf-8'),
'RAW_URI': raw_uri.decode('utf-8'),
'SCRIPT_NAME': '',
'SERVER_PROTOCOL': 'HTTP/1.1',
'REMOTE_ADDR': '127.0.0.1',
'REMOTE_PORT': '0',
'SERVER_NAME': 'asgi',
'SERVER_PORT': '0',
'asgi.receive': receive,
'asgi.send': send,
'asgi.scope': scope,
}
for hdr_name, hdr_value in scope['headers']:
hdr_name = hdr_name.upper().decode('utf-8')
hdr_value = hdr_value.decode('utf-8')
if hdr_name == 'CONTENT-TYPE':
environ['CONTENT_TYPE'] = hdr_value
continue
elif hdr_name == 'CONTENT-LENGTH':
environ['CONTENT_LENGTH'] = hdr_value
continue
key = 'HTTP_%s' % hdr_name.replace('-', '_')
if key in environ:
hdr_value = '%s,%s' % (environ[key], hdr_value)
environ[key] = hdr_value
environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
return environ
async def make_response(status, headers, payload, environ):
headers = [(h[0].encode('utf-8'), h[1].encode('utf-8')) for h in headers]
if environ['asgi.scope']['type'] == 'websocket':
if status.startswith('200 '):
await environ['asgi.send']({'type': 'websocket.accept',
'headers': headers})
else:
if payload:
reason = payload.decode('utf-8') \
if isinstance(payload, bytes) else str(payload)
await environ['asgi.send']({'type': 'websocket.close',
'reason': reason})
else:
await environ['asgi.send']({'type': 'websocket.close'})
return
await environ['asgi.send']({'type': 'http.response.start',
'status': int(status.split(' ')[0]),
'headers': headers})
await environ['asgi.send']({'type': 'http.response.body',
'body': payload})
class WebSocket(object): # pragma: no cover
"""
This wrapper class provides an asgi WebSocket interface that is
somewhat compatible with eventlet's implementation.
"""
def __init__(self, handler, server):
self.handler = handler
self.asgi_receive = None
self.asgi_send = None
async def __call__(self, environ):
self.asgi_receive = environ['asgi.receive']
self.asgi_send = environ['asgi.send']
await self.asgi_send({'type': 'websocket.accept'})
await self.handler(self)
async def close(self):
await self.asgi_send({'type': 'websocket.close'})
async def send(self, message):
msg_bytes = None
msg_text = None
if isinstance(message, bytes):
msg_bytes = message
else:
msg_text = message
await self.asgi_send({'type': 'websocket.send',
'bytes': msg_bytes,
'text': msg_text})
async def wait(self):
event = await self.asgi_receive()
if event['type'] != 'websocket.receive':
raise IOError()
return event.get('bytes') or event.get('text')
_async = {
'asyncio': True,
'translate_request': translate_request,
'make_response': make_response,
'websocket': WebSocket,
}
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,444
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/examples/server/wsgi/simple.py
|
from flask import Flask, render_template
import engineio
# set async_mode to 'threading', 'eventlet' or 'gevent' to force a mode
# else, the best mode is selected automatically from what's installed
async_mode = None
eio = engineio.Server(async_mode=async_mode)
app = Flask(__name__)
app.wsgi_app = engineio.WSGIApp(eio, app.wsgi_app)
@app.route('/')
def index():
return render_template('simple.html')
@eio.on('connect')
def connect(sid, environ):
print("connect ", sid)
@eio.on('message')
def message(sid, data):
print('message from', sid, data)
eio.send(sid, 'Thank you for your message!')
@eio.on('disconnect')
def disconnect(sid):
print('disconnect ', sid)
if __name__ == '__main__':
if eio.async_mode == 'threading':
# deploy with Werkzeug
app.run(threaded=True)
elif eio.async_mode == 'eventlet':
# deploy with eventlet
import eventlet
from eventlet import wsgi
wsgi.server(eventlet.listen(('', 5000)), app)
elif eio.async_mode == 'gevent':
# deploy with gevent
from gevent import pywsgi
try:
from geventwebsocket.handler import WebSocketHandler
websocket = True
except ImportError:
websocket = False
if websocket:
pywsgi.WSGIServer(('', 5000), app,
handler_class=WebSocketHandler).serve_forever()
else:
pywsgi.WSGIServer(('', 5000), app).serve_forever()
else:
print('Unknown async_mode: ' + eio.async_mode)
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,445
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/tests/asyncio/test_async_tornado.py
|
import asyncio
import sys
import unittest
from unittest import mock
try:
import tornado.web
except ImportError:
pass
from engineio.async_drivers import tornado as async_tornado
def _run(coro):
"""Run the given coroutine."""
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(sys.version_info < (3, 5), 'only for Python 3.5+')
class TornadoTests(unittest.TestCase):
def test_get_tornado_handler(self):
mock_server = mock.MagicMock()
handler = async_tornado.get_tornado_handler(mock_server)
assert issubclass(handler, tornado.websocket.WebSocketHandler)
def test_translate_request(self):
mock_handler = mock.MagicMock()
mock_handler.request.method = 'PUT'
mock_handler.request.path = '/foo/bar'
mock_handler.request.query = 'baz=1'
mock_handler.request.version = '1.1'
mock_handler.request.headers = {
'a': 'b',
'c': 'd',
'content-type': 'application/json',
'content-length': 123,
}
mock_handler.request.body = b'hello world'
environ = async_tornado.translate_request(mock_handler)
expected_environ = {
'REQUEST_METHOD': 'PUT',
'PATH_INFO': '/foo/bar',
'QUERY_STRING': 'baz=1',
'CONTENT_TYPE': 'application/json',
'CONTENT_LENGTH': 123,
'HTTP_A': 'b',
'HTTP_C': 'd',
'RAW_URI': '/foo/bar?baz=1',
'SERVER_PROTOCOL': 'HTTP/1.1',
# 'wsgi.input': b'hello world',
'tornado.handler': mock_handler,
}
for k, v in expected_environ.items():
assert v == environ[k]
payload = _run(environ['wsgi.input'].read(1))
payload += _run(environ['wsgi.input'].read())
assert payload == b'hello world'
def test_make_response(self):
mock_handler = mock.MagicMock()
mock_environ = {'tornado.handler': mock_handler}
async_tornado.make_response(
'202 ACCEPTED', [('foo', 'bar')], b'payload', mock_environ
)
mock_handler.set_status.assert_called_once_with(202)
mock_handler.set_header.assert_called_once_with('foo', 'bar')
mock_handler.write.assert_called_once_with(b'payload')
mock_handler.finish.assert_called_once_with()
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,446
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/payload.py
|
import urllib
from . import packet
class Payload(object):
"""Engine.IO payload."""
max_decode_packets = 16
def __init__(self, packets=None, encoded_payload=None):
self.packets = packets or []
if encoded_payload is not None:
self.decode(encoded_payload)
def encode(self, jsonp_index=None):
"""Encode the payload for transmission."""
encoded_payload = ''
for pkt in self.packets:
if encoded_payload:
encoded_payload += '\x1e'
encoded_payload += pkt.encode(b64=True)
if jsonp_index is not None:
encoded_payload = '___eio[' + \
str(jsonp_index) + \
']("' + \
encoded_payload.replace('"', '\\"') + \
'");'
return encoded_payload
def decode(self, encoded_payload):
"""Decode a transmitted payload."""
self.packets = []
if len(encoded_payload) == 0:
return
# JSONP POST payload starts with 'd='
if encoded_payload.startswith('d='):
encoded_payload = urllib.parse.parse_qs(
encoded_payload)['d'][0]
encoded_packets = encoded_payload.split('\x1e')
if len(encoded_packets) > self.max_decode_packets:
raise ValueError('Too many packets in payload')
self.packets = [packet.Packet(encoded_packet=encoded_packet)
for encoded_packet in encoded_packets]
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,447
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/tests/common/test_middleware.py
|
import os
import unittest
from unittest import mock
import engineio
class TestWSGIApp(unittest.TestCase):
def test_wsgi_routing(self):
mock_wsgi_app = mock.MagicMock()
mock_eio_app = 'foo'
m = engineio.WSGIApp(mock_eio_app, mock_wsgi_app)
environ = {'PATH_INFO': '/foo'}
start_response = "foo"
m(environ, start_response)
mock_wsgi_app.assert_called_once_with(environ, start_response)
def test_eio_routing(self):
mock_wsgi_app = 'foo'
mock_eio_app = mock.Mock()
mock_eio_app.handle_request = mock.MagicMock()
m = engineio.WSGIApp(mock_eio_app, mock_wsgi_app)
environ = {'PATH_INFO': '/engine.io/'}
start_response = "foo"
m(environ, start_response)
mock_eio_app.handle_request.assert_called_once_with(
environ, start_response
)
def test_static_files(self):
root_dir = os.path.dirname(__file__)
m = engineio.WSGIApp(
'foo',
None,
static_files={
'/': root_dir + '/index.html',
'/foo': {
'content_type': 'text/plain',
'filename': root_dir + '/index.html',
},
'/static': root_dir,
'/static/test/': root_dir + '/',
'/static2/test/': {'filename': root_dir + '/',
'content_type': 'image/gif'},
},
)
def check_path(path, status_code, content_type, body):
environ = {'PATH_INFO': path}
start_response = mock.MagicMock()
r = m(environ, start_response)
assert r == [body.encode('utf-8')]
start_response.assert_called_once_with(
status_code, [('Content-Type', content_type)]
)
check_path('/', '200 OK', 'text/html', '<html></html>\n')
check_path('/foo', '200 OK', 'text/plain', '<html></html>\n')
check_path('/foo/bar', '404 Not Found', 'text/plain', 'Not Found')
check_path(
'/static/index.html', '200 OK', 'text/html', '<html></html>\n'
)
check_path(
'/static/foo.bar', '404 Not Found', 'text/plain', 'Not Found'
)
check_path(
'/static/test/index.html', '200 OK', 'text/html', '<html></html>\n'
)
check_path('/static/test/', '200 OK', 'text/html', '<html></html>\n')
check_path('/static/test/index.html', '200 OK', 'text/html',
'<html></html>\n')
check_path('/static/test/files/', '200 OK', 'text/html',
'<html>file</html>\n')
check_path('/static/test/files/file.txt', '200 OK', 'text/plain',
'file\n')
check_path('/static/test/files/x.html', '404 Not Found', 'text/plain',
'Not Found')
check_path('/static2/test/', '200 OK', 'image/gif', '<html></html>\n')
check_path('/static2/test/index.html', '200 OK', 'image/gif',
'<html></html>\n')
check_path('/static2/test/files/', '200 OK', 'image/gif',
'<html>file</html>\n')
check_path('/static2/test/files/file.txt', '200 OK', 'image/gif',
'file\n')
check_path('/static2/test/files/x.html', '404 Not Found', 'text/plain',
'Not Found')
check_path('/bar/foo', '404 Not Found', 'text/plain', 'Not Found')
check_path('', '404 Not Found', 'text/plain', 'Not Found')
m.static_files[''] = 'index.html'
check_path('/static/test/', '200 OK', 'text/html', '<html></html>\n')
m.static_files[''] = {'filename': 'index.html'}
check_path('/static/test/', '200 OK', 'text/html', '<html></html>\n')
m.static_files[''] = {
'filename': 'index.html',
'content_type': 'image/gif',
}
check_path('/static/test/', '200 OK', 'image/gif', '<html></html>\n')
m.static_files[''] = {'filename': 'test.gif'}
check_path('/static/test/', '404 Not Found', 'text/plain', 'Not Found')
m.static_files = {}
check_path(
'/static/test/index.html',
'404 Not Found',
'text/plain',
'Not Found',
)
def test_404(self):
mock_wsgi_app = None
mock_eio_app = mock.Mock()
m = engineio.WSGIApp(mock_eio_app, mock_wsgi_app)
environ = {'PATH_INFO': '/foo/bar'}
start_response = mock.MagicMock()
r = m(environ, start_response)
assert r == [b'Not Found']
start_response.assert_called_once_with(
"404 Not Found", [('Content-Type', 'text/plain')]
)
def test_custom_eio_path(self):
mock_wsgi_app = None
mock_eio_app = mock.Mock()
mock_eio_app.handle_request = mock.MagicMock()
m = engineio.WSGIApp(mock_eio_app, mock_wsgi_app, engineio_path='foo')
environ = {'PATH_INFO': '/engine.io/'}
start_response = mock.MagicMock()
r = m(environ, start_response)
assert r == [b'Not Found']
start_response.assert_called_once_with(
"404 Not Found", [('Content-Type', 'text/plain')]
)
environ = {'PATH_INFO': '/foo/'}
m(environ, start_response)
mock_eio_app.handle_request.assert_called_once_with(
environ, start_response
)
def test_custom_eio_path_slashes(self):
mock_wsgi_app = None
mock_eio_app = mock.Mock()
mock_eio_app.handle_request = mock.MagicMock()
m = engineio.WSGIApp(
mock_eio_app, mock_wsgi_app, engineio_path='/foo/'
)
environ = {'PATH_INFO': '/foo/'}
start_response = mock.MagicMock()
m(environ, start_response)
mock_eio_app.handle_request.assert_called_once_with(
environ, start_response
)
def test_custom_eio_path_leading_slash(self):
mock_wsgi_app = None
mock_eio_app = mock.Mock()
mock_eio_app.handle_request = mock.MagicMock()
m = engineio.WSGIApp(mock_eio_app, mock_wsgi_app, engineio_path='/foo')
environ = {'PATH_INFO': '/foo/'}
start_response = mock.MagicMock()
m(environ, start_response)
mock_eio_app.handle_request.assert_called_once_with(
environ, start_response
)
def test_custom_eio_path_trailing_slash(self):
mock_wsgi_app = None
mock_eio_app = mock.Mock()
mock_eio_app.handle_request = mock.MagicMock()
m = engineio.WSGIApp(mock_eio_app, mock_wsgi_app, engineio_path='foo/')
environ = {'PATH_INFO': '/foo/'}
start_response = mock.MagicMock()
m(environ, start_response)
mock_eio_app.handle_request.assert_called_once_with(
environ, start_response
)
def test_gunicorn_socket(self):
mock_wsgi_app = None
mock_eio_app = mock.Mock()
m = engineio.WSGIApp(mock_eio_app, mock_wsgi_app)
environ = {'gunicorn.socket': 123, 'PATH_INFO': '/foo/bar'}
start_response = mock.MagicMock()
m(environ, start_response)
assert 'eventlet.input' in environ
assert environ['eventlet.input'].get_socket() == 123
def test_legacy_middleware_class(self):
m = engineio.Middleware('eio', 'wsgi', 'eio_path')
assert m.engineio_app == 'eio'
assert m.wsgi_app == 'wsgi'
assert m.static_files == {}
assert m.engineio_path == '/eio_path/'
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,448
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/packet.py
|
import base64
from engineio import json as _json
(OPEN, CLOSE, PING, PONG, MESSAGE, UPGRADE, NOOP) = (0, 1, 2, 3, 4, 5, 6)
packet_names = ['OPEN', 'CLOSE', 'PING', 'PONG', 'MESSAGE', 'UPGRADE', 'NOOP']
binary_types = (bytes, bytearray)
class Packet(object):
"""Engine.IO packet."""
json = _json
def __init__(self, packet_type=NOOP, data=None, encoded_packet=None):
self.packet_type = packet_type
self.data = data
if isinstance(data, str):
self.binary = False
elif isinstance(data, binary_types):
self.binary = True
else:
self.binary = False
if self.binary and self.packet_type != MESSAGE:
raise ValueError('Binary packets can only be of type MESSAGE')
if encoded_packet is not None:
self.decode(encoded_packet)
def encode(self, b64=False):
"""Encode the packet for transmission."""
if self.binary:
if b64:
encoded_packet = 'b' + base64.b64encode(self.data).decode(
'utf-8')
else:
encoded_packet = self.data
else:
encoded_packet = str(self.packet_type)
if isinstance(self.data, str):
encoded_packet += self.data
elif isinstance(self.data, dict) or isinstance(self.data, list):
encoded_packet += self.json.dumps(self.data,
separators=(',', ':'))
elif self.data is not None:
encoded_packet += str(self.data)
return encoded_packet
def decode(self, encoded_packet):
"""Decode a transmitted package."""
self.binary = isinstance(encoded_packet, binary_types)
if not self.binary and len(encoded_packet) == 0:
raise ValueError('Invalid empty packet received')
b64 = not self.binary and encoded_packet[0] == 'b'
if b64:
self.binary = True
self.packet_type = MESSAGE
self.data = base64.b64decode(encoded_packet[1:])
else:
if self.binary and not isinstance(encoded_packet, bytes):
encoded_packet = bytes(encoded_packet)
if self.binary:
self.packet_type = MESSAGE
self.data = encoded_packet
else:
self.packet_type = int(encoded_packet[0])
try:
self.data = self.json.loads(encoded_packet[1:])
if isinstance(self.data, int):
# do not allow integer payloads, see
# github.com/miguelgrinberg/python-engineio/issues/75
# for background on this decision
raise ValueError
except ValueError:
self.data = encoded_packet[1:]
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,449
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/async_drivers/gevent_uwsgi.py
|
import gevent
from gevent import queue
from gevent.event import Event
from gevent import selectors
import uwsgi
_websocket_available = hasattr(uwsgi, 'websocket_handshake')
class Thread(gevent.Greenlet): # pragma: no cover
"""
This wrapper class provides gevent Greenlet interface that is compatible
with the standard library's Thread class.
"""
def __init__(self, target, args=[], kwargs={}):
super().__init__(target, *args, **kwargs)
def _run(self):
return self.run()
class uWSGIWebSocket(object): # pragma: no cover
"""
This wrapper class provides a uWSGI WebSocket interface that is
compatible with eventlet's implementation.
"""
def __init__(self, handler, server):
self.app = handler
self._sock = None
self.received_messages = []
def __call__(self, environ, start_response):
self._sock = uwsgi.connection_fd()
self.environ = environ
uwsgi.websocket_handshake()
self._req_ctx = None
if hasattr(uwsgi, 'request_context'):
# uWSGI >= 2.1.x with support for api access across-greenlets
self._req_ctx = uwsgi.request_context()
else:
# use event and queue for sending messages
self._event = Event()
self._send_queue = queue.Queue()
# spawn a select greenlet
def select_greenlet_runner(fd, event):
"""Sets event when data becomes available to read on fd."""
sel = selectors.DefaultSelector()
sel.register(fd, selectors.EVENT_READ)
try:
while True:
sel.select()
event.set()
except gevent.GreenletExit:
sel.unregister(fd)
self._select_greenlet = gevent.spawn(
select_greenlet_runner,
self._sock,
self._event)
self.app(self)
def close(self):
"""Disconnects uWSGI from the client."""
if self._req_ctx is None:
# better kill it here in case wait() is not called again
self._select_greenlet.kill()
self._event.set()
uwsgi.disconnect()
def _send(self, msg):
"""Transmits message either in binary or UTF-8 text mode,
depending on its type."""
if isinstance(msg, bytes):
method = uwsgi.websocket_send_binary
else:
method = uwsgi.websocket_send
if self._req_ctx is not None:
method(msg, request_context=self._req_ctx)
else:
method(msg)
def _decode_received(self, msg):
"""Returns either bytes or str, depending on message type."""
if not isinstance(msg, bytes):
# already decoded - do nothing
return msg
# only decode from utf-8 if message is not binary data
type = ord(msg[0:1])
if type >= 48: # no binary
return msg.decode('utf-8')
# binary message, don't try to decode
return msg
def send(self, msg):
"""Queues a message for sending. Real transmission is done in
wait method.
Sends directly if uWSGI version is new enough."""
if self._req_ctx is not None:
self._send(msg)
else:
self._send_queue.put(msg)
self._event.set()
def wait(self):
"""Waits and returns received messages.
If running in compatibility mode for older uWSGI versions,
it also sends messages that have been queued by send().
A return value of None means that connection was closed.
This must be called repeatedly. For uWSGI < 2.1.x it must
be called from the main greenlet."""
while True:
if self._req_ctx is not None:
try:
msg = uwsgi.websocket_recv(request_context=self._req_ctx)
except IOError: # connection closed
self.close()
return None
return self._decode_received(msg)
else:
if self.received_messages:
return self.received_messages.pop(0)
# we wake up at least every 3 seconds to let uWSGI
# do its ping/ponging
event_set = self._event.wait(timeout=3)
if event_set:
self._event.clear()
# maybe there is something to send
msgs = []
while True:
try:
msgs.append(self._send_queue.get(block=False))
except gevent.queue.Empty:
break
for msg in msgs:
try:
self._send(msg)
except IOError:
self.close()
return None
# maybe there is something to receive, if not, at least
# ensure uWSGI does its ping/ponging
while True:
try:
msg = uwsgi.websocket_recv_nb()
except IOError: # connection closed
self.close()
return None
if msg: # message available
self.received_messages.append(
self._decode_received(msg))
else:
break
if self.received_messages:
return self.received_messages.pop(0)
_async = {
'thread': Thread,
'queue': queue.JoinableQueue,
'queue_empty': queue.Empty,
'event': Event,
'websocket': uWSGIWebSocket if _websocket_available else None,
'sleep': gevent.sleep,
}
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,450
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/src/engineio/json.py
|
"""JSON-compatible module with sane defaults."""
from json import * # noqa: F401, F403
from json import loads as original_loads
def _safe_int(s):
if len(s) > 100:
raise ValueError('Integer is too large')
return int(s)
def loads(*args, **kwargs):
if 'parse_int' not in kwargs: # pragma: no cover
kwargs['parse_int'] = _safe_int
return original_loads(*args, **kwargs)
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
21,393,451
|
miguelgrinberg/python-engineio
|
refs/heads/main
|
/examples/server/tornado/latency.py
|
import os
import tornado.ioloop
from tornado.options import define, options, parse_command_line
import tornado.web
import engineio
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=False, help="run in debug mode")
eio = engineio.AsyncServer(async_mode='tornado')
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render("latency.html")
@eio.on('message')
async def message(sid, data):
await eio.send(sid, 'pong')
def main():
parse_command_line()
app = tornado.web.Application(
[
(r"/", MainHandler),
(r"/engine.io/", engineio.get_tornado_handler(eio)),
],
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
debug=options.debug,
)
app.listen(options.port)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main()
|
{"/src/engineio/asyncio_client.py": ["/src/engineio/__init__.py"], "/src/engineio/client.py": ["/src/engineio/__init__.py"], "/src/engineio/__init__.py": ["/src/engineio/client.py", "/src/engineio/middleware.py", "/src/engineio/server.py", "/src/engineio/asyncio_server.py", "/src/engineio/asyncio_client.py", "/src/engineio/async_drivers/asgi.py", "/src/engineio/async_drivers/tornado.py"], "/src/engineio/server.py": ["/src/engineio/__init__.py"], "/src/engineio/payload.py": ["/src/engineio/__init__.py"], "/src/engineio/socket.py": ["/src/engineio/__init__.py"], "/src/engineio/asyncio_server.py": ["/src/engineio/__init__.py"], "/src/engineio/async_drivers/tornado.py": ["/src/engineio/__init__.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.