index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
17,885
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/tarfile.py
|
/home/jose/anaconda3/lib/python3.6/tarfile.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,886
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/warnings.py
|
/home/jose/anaconda3/lib/python3.6/warnings.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,887
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/tokenize.py
|
/home/jose/anaconda3/lib/python3.6/tokenize.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,888
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/genericpath.py
|
/home/jose/anaconda3/lib/python3.6/genericpath.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,889
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/_bootlocale.py
|
/home/jose/anaconda3/lib/python3.6/_bootlocale.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,890
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/reprlib.py
|
/home/jose/anaconda3/lib/python3.6/reprlib.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,891
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/ntpath.py
|
/home/jose/anaconda3/lib/python3.6/ntpath.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,892
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/os.py
|
/home/jose/anaconda3/lib/python3.6/os.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,893
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/base64.py
|
/home/jose/anaconda3/lib/python3.6/base64.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,894
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/io.py
|
/home/jose/anaconda3/lib/python3.6/io.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,895
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/tempfile.py
|
/home/jose/anaconda3/lib/python3.6/tempfile.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,896
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/token.py
|
/home/jose/anaconda3/lib/python3.6/token.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,897
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/abc.py
|
/home/jose/anaconda3/lib/python3.6/abc.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,898
|
JoseEvanan/frikr
|
refs/heads/master
|
/Frikr/urls.py
|
"""Frikr URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth.decorators import login_required
from photos.api import PhotoDetailAPI, PhotoListAPI
from photos.views import CreateView, DetailView, HomeView, PhotoListView, \
UserPhotosView
from users.api import UserDetailAPI, UserListAPI
from users.views import LoginView, LogoutView
# r le dice que es una expresion regular -^ iniciode cadena - $ fin de cadena
urlpatterns = [
url(r'^admin/', admin.site.urls),
#Photo URLs
url(r'^$', HomeView.as_view(), name="photos_home"),
url(r'^photos/(?P<pk>[0-9]+)/$', DetailView.as_view(), name="photo_detail"),
url(r'^photos/new/$', CreateView.as_view(), name="photo_create"),
url(r'^photos/$', PhotoListView.as_view(), name="photos_list"),
url(r'^my-photos/$', login_required(UserPhotosView.as_view()), name="user_photos"),
#Photos API URLs
url(r'^api/1.0/photos/$', PhotoListAPI.as_view(), name='photo_list_api'),
url(r'^api/1.0/photos/(?P<pk>[0-9]+)/$',
PhotoDetailAPI.as_view(), name='photo_detail_api'),
#Users URLs
url(r'^login$', LoginView.as_view(), name='users_login'),
url(r'^logout$', LogoutView.as_view(), name='users_logout'),
#USers API URLs
url(r'^api/1.0/users/$', UserListAPI.as_view(), name='user_list_api'),
url(r'^api/1.0/users/(?P<pk>[0-9]+)/$', UserDetailAPI.as_view(), name='user_detail_api')
]
#path('accounts/', include('accounts.urls')),
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,899
|
JoseEvanan/frikr
|
refs/heads/master
|
/env/lib/python3.6/copyreg.py
|
/home/jose/anaconda3/lib/python3.6/copyreg.py
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,900
|
JoseEvanan/frikr
|
refs/heads/master
|
/users/serializers.py
|
from django.contrib.auth.models import User
from rest_framework import serializers
class UserSerializer(serializers.Serializer):
id = serializers.ReadOnlyField() # read only
first_name = serializers.CharField()
last_name = serializers.CharField()
username = serializers.CharField()
email = serializers.EmailField()
password = serializers.CharField()
def create(self, validated_data):
"""
Crea una isntancia de user a partir de los datos de
validated_data que contiene valores deserializados
:param valdiated_data: Dicionario de datos de usuario
:return: objeto user.
"""
instance = User()
return self.update(instance, validated_data)
def update(self, instance, validated_data):
"""
Actualiza una instancia de User a partir de los datos del diccionario
validated_data que contine valores deserializados
:param instance: objeto User a actualizar
:param validated_data: Dicionario de datos de usuario
:return: objeto user.
"""
instance.first_name = validated_data.get('first_name')
instance.last_name = validated_data.get('last_name')
instance.username = validated_data.get('username')
instance.email = validated_data.get('email')
instance.set_password(validated_data.get('password'))
instance.save()
return instance
def validate_username(self,data):
"""
Valida si existe un usuario con ese nombre
"""
print(data)
user = User.objects.filter(username=data)
#Si estoy creado ( no hay instancia) comprobar si hay usuarios con ese username
if not self.instance and len(user) != 0:
raise serializers.ValidationError(
" Ya existe un usuario con ese username ")
#Si estoy actualizando, el nuevo username es diferene al de la instancia( esta cambaido el userna,e)
# y ecise usuarios registrados con ese nuevo username
elif self.instance and self.instance.username != data and len(user) != 0:
raise serializers.ValidationError(
" Ya existe un usuario con ese username ")
else:
return data
#965079339
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,901
|
JoseEvanan/frikr
|
refs/heads/master
|
/photos/views.py
|
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.http import HttpResponse, HttpResponseNotFound
from django.shortcuts import render, redirect
from django.urls import reverse
from django.views.generic import View, ListView
from django.db.models import Q
from photos.forms import PhotoForm
from photos.models import PUBLIC, Photo
class OnlyAuthenticatedView(View):
def get(self, request):
if request.user.is_authenticated:
print("LOGEADO")
return super(OnlyAuthenticatedView, self).get(request)
else:
print("NO LOGEADO")
return redirect('users_login')
#redirigir a login
def post(self, request):
if request.user.is_authenticated:
return super(OnlyAuthenticatedView, self).get(request)
else:
redirect('home')
#redirigir a login
#####
class PhotosQueryset(object):
def get_photos_queryset(self,request):
if not request.user.is_authenticated:
photos = Photo.objects.filter(visibility=PUBLIC)
elif request.user.is_superuser:
photos = Photo.objects.all()
else:
photos = Photo.objects.filter(Q(owner=request.user) |
Q(visibility=PUBLIC))
return photos
# Create your views here.
class HomeView(View):
def get(self, request):
"""
Esta función devuelve elhome de mi página
"""
photos = Photo.objects.filter(visibility=PUBLIC).order_by('-created_at')
context = {}
context['photos'] = photos[:5]
return render(request, 'photos/home.html', context)
class DetailView(View, PhotosQueryset):
@method_decorator(login_required())
def get(self, request, pk):
"""
Carga la página de detalle de una foto
:param request: HttpRequest
:param pk: id de la photo
:return: HttpResponse
"""
possible_photos = self.get_photos_queryset(
request).filter(pk=pk).select_related('owner')
#JOIN Solo una llamada
#sino haria dos llamdas para photo y para user
#relacion reversa prefec
photo = possible_photos[0] if len(possible_photos) == 1 else None
if photo:
#Cargar template detatlle
context = {
'photo': photo
}
print(photo.owner)
return render(request, 'photos/detail.html', context)
else:
return HttpResponseNotFound("NO existe la foto")
class CreateView(View):
@method_decorator(login_required())
def get(self, request):
"""
Muestra un formulario para crea una foto
:param request: HttpRequest
:return: HttpResponse
"""
form = PhotoForm()
context = {
'form': form
}
return render(request, 'photos/new_photo.html', context)
@method_decorator(login_required())
def post(self, request):
"""
Crea la foto
:param request: HttpRequest
:return: HttpResponse
"""
error_messages = []
success_message = ''
photo_with_owner = Photo()
photo_with_owner.owner = request.user # usuario autenticado
form = PhotoForm(request.POST, instance=photo_with_owner)
if form.is_valid():
print("FORMULARIO VALIDO")
new_photo = form.save() # Guarda el objeto y devolver
print(new_photo)
print("GUARDAR OBJETO")
form = PhotoForm()
success_message = "Guardado con éxito!"
success_message += "<a href='{0}'>".format(
reverse('photo_detail', args=[new_photo.pk]))
success_message += "Ver foto"
success_message += "</a>"
context = {
'errors': error_messages,
'success_message': success_message,
'form': form
}
return render(request, 'photos/new_photo.html', context)
class PhotoListView(View, PhotosQueryset):
def get(self, request):
"""
Response:
- The photos publics if user authenticated
- The photos of user authenticared or publics other users
- If user superadmin, all photos
:param request: HttpRequest
:return: HttpResponse
"""
photos = self.get_photos_queryset(request)
context = {
'photos':photos
}
return render(request, 'photos/photos_list.html', context)
class UserPhotosView(ListView):
model = Photo
template_name = 'photos/user_photos.html'
def get_queryset(self):
queryset = super(UserPhotosView, self).get_queryset()
return queryset.filter(owner=self.request.user)
|
{"/users/permissions.py": ["/users/api.py"], "/photos/migrations/0003_auto_20181022_0740.py": ["/photos/validators.py"], "/users/api.py": ["/users/serializers.py", "/users/permissions.py"], "/photos/api.py": ["/photos/views.py"], "/Frikr/urls.py": ["/photos/api.py", "/photos/views.py", "/users/api.py"]}
|
17,902
|
mhall119/wallpaper_contest
|
refs/heads/master
|
/submissions/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Contest(models.Model):
name = models.CharField(max_length=64, blank=False, null=False)
def __str__(self):
return self.name
class Category(models.Model):
name = models.CharField(max_length=64, blank=False, null=False)
contest = models.ForeignKey(Contest, on_delete=models.CASCADE)
def __str__(self):
return self.name
class Submission(models.Model):
image_url = models.URLField(blank=False, null=False)
title = models.CharField(max_length=128)
author = models.CharField(max_length=128)
contest = models.ForeignKey(Contest, on_delete=models.CASCADE)
category = models.ForeignKey(Category, on_delete=models.SET_NULL, blank=True, null=True)
def __str__(self):
return self.title
class Vote(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
submission = models.ForeignKey(Submission, on_delete=models.CASCADE)
score = models.SmallIntegerField()
def __str__(self):
return "%s: %s" % (self.user, self.submission)
|
{"/submissions/management/commands/from_flickr.py": ["/submissions/models.py"], "/submissions/admin.py": ["/submissions/models.py"], "/submissions/views.py": ["/submissions/models.py"]}
|
17,903
|
mhall119/wallpaper_contest
|
refs/heads/master
|
/submissions/management/commands/from_flickr.py
|
from django.core.management.base import BaseCommand, CommandError
from rest_framework.parsers import JSONParser
import requests
import datetime
import simplejson
from submissions.models import Contest, Submission
class Command(BaseCommand):
help = 'Imports images from Flickr group'
def add_arguments(self, parser):
parser.add_argument('contest_id', type=int)
parser.add_argument('url', type=str)
def handle(self, *args, **options):
if 'contest_id' not in options:
print("No contest id in options!")
return 1
if 'url' not in options:
print("No URL in options!")
return 1
try:
contest = Contest.objects.get(id=options['contest_id'])
except:
print("Could not find contest with id %s" % options['contest_id'])
return 2
more_data = True
photos = []
print("Calling: "+options['url'])
resp = requests.get(options['url'])
if resp.status_code != 200:
print("Request failed: %s" % resp.status_code)
return 1
data = simplejson.loads(resp.text)
if data['stat'] != 'ok':
print(data['message'])
return 1
print("Adding %s photos" % len(data['photos']['photo']))
for photo in data['photos']['photo']:
photo_url = "https://c1.staticflickr.com/%(farm)s/%(server)s/%(id)s_%(secret)s_b.jpg" % photo
created, submission = Submission.objects.update_or_create(image_url=photo_url, defaults={
'title': photo['title'],
'author': photo['ownername'],
'contest': contest
})
if created:
print("Added '%s' %s" % (photo['title'], photo_url))
|
{"/submissions/management/commands/from_flickr.py": ["/submissions/models.py"], "/submissions/admin.py": ["/submissions/models.py"], "/submissions/views.py": ["/submissions/models.py"]}
|
17,904
|
mhall119/wallpaper_contest
|
refs/heads/master
|
/submissions/migrations/0001_initial.py
|
# Generated by Django 2.0 on 2018-02-15 15:43
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64)),
],
),
migrations.CreateModel(
name='Contest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64)),
],
),
migrations.CreateModel(
name='Submission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image_url', models.URLField()),
('title', models.CharField(max_length=128)),
('author', models.CharField(max_length=128)),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='submissions.Category')),
('contest', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='submissions.Contest')),
],
),
migrations.CreateModel(
name='Vote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('score', models.SmallIntegerField()),
('submission', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='submissions.Submission')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='category',
name='contest',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='submissions.Contest'),
),
]
|
{"/submissions/management/commands/from_flickr.py": ["/submissions/models.py"], "/submissions/admin.py": ["/submissions/models.py"], "/submissions/views.py": ["/submissions/models.py"]}
|
17,905
|
mhall119/wallpaper_contest
|
refs/heads/master
|
/submissions/admin.py
|
from django.contrib import admin
from django.utils.safestring import mark_safe
from submissions.models import Contest, Category, Submission, Vote
# Register your models here.
admin.site.register(Contest)
admin.site.register(Category)
class SubmissionAdmin(admin.ModelAdmin):
def view(self, photo):
return mark_safe('<a href="%s" target="_blank">[view]</a>' % photo.image_url)
list_display = ('title', 'view', 'author', 'contest', 'category')
list_filter = ('contest', 'category', 'author')
admin.site.register(Submission, SubmissionAdmin)
class VoteAdmin(admin.ModelAdmin):
list_display = ('submission', 'user', 'score', 'contest')
list_filter = ('user',)
def contest(self, vote):
return vote.submission.contest.name
admin.site.register(Vote, VoteAdmin)
|
{"/submissions/management/commands/from_flickr.py": ["/submissions/models.py"], "/submissions/admin.py": ["/submissions/models.py"], "/submissions/views.py": ["/submissions/models.py"]}
|
17,906
|
mhall119/wallpaper_contest
|
refs/heads/master
|
/submissions/views.py
|
from django.shortcuts import render, redirect
from submissions.models import Contest, Category, Submission, Vote
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def list_contests(request):
context = {
'contests': Contest.objects.all()
}
return render(request, 'submissions/list_contests.html', context)
@login_required
def show_contest(request, contest_id):
contest = Contest.objects.get(id=contest_id)
submission_count = contest.submission_set.count()
submitter = set()
for submission in contest.submission_set.all():
submitter.add(submission.author)
voter = {}
results = {}
for vote in Vote.objects.filter(submission__contest=contest):
if vote.user.username not in voter:
voter[vote.user.username] = {'count': 0, 'average': 0, 'name': vote.user.username}
voter[vote.user.username]['average'] = ((voter[vote.user.username]['average']*voter[vote.user.username]['count'])+vote.score)/(voter[vote.user.username]['count']+1)
voter[vote.user.username]['count'] += 1
voter[vote.user.username]['percent'] = 100*voter[vote.user.username]['count']/submission_count
if vote.submission.id not in results:
results[vote.submission.id] = {'score': 0, 'photo': vote.submission}
results[vote.submission.id]['score'] += vote.score
context = {
'contest': contest,
'submission_count': submission_count,
'submitter_count': len(submitter),
'vote_count': len(voter),
'voters': voter,
'results': results
}
return render(request, 'submissions/show_contest.html', context)
@login_required
def vote(request, contest_id):
contest = Contest.objects.get(id=contest_id)
if request.method == 'GET':
submissions = Submission.objects.filter(contest=contest)
needvotes = []
for photo in submissions:
if photo.vote_set.filter(user=request.user).count() < 1:
needvotes.append(photo)
photo.index = len(needvotes)
context = {
'contest': contest,
'submissions': needvotes,
}
return render(request, 'submissions/vote.html', context)
elif request.method == 'POST':
for entry in request.POST.keys():
if entry.startswith('vote_'):
submission_id = entry.split('_')[1]
score = request.POST.get(entry, 0)
score = int(score)
if score < 1:
continue
try:
submission = Submission.objects.get(id=submission_id)
Vote.objects.create(user=request.user, submission=submission, score=score)
except:
continue
return redirect('show-contest', contest_id)
|
{"/submissions/management/commands/from_flickr.py": ["/submissions/models.py"], "/submissions/admin.py": ["/submissions/models.py"], "/submissions/views.py": ["/submissions/models.py"]}
|
17,908
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/serializers.py
|
from rest_framework import serializers
from .models import PlayList,Movie,Profile
from django.contrib.auth import get_user_model
User = get_user_model()
class MovieSerializer(serializers.ModelSerializer):
class Meta:
model = Movie
fields = '__all__'
class PlayListSerializer(serializers.ModelSerializer):
movies = serializers.SerializerMethodField()
class Meta:
model = PlayList
fields = (
'id',
'name',
'movies'
)
def get_movies(self,obj):
return MovieSerializer(obj.movies.all(),many=True).data
class ProfileSerializer(serializers.ModelSerializer):
class Meta:
model = Profile
fields = (
"id",
"bio",
)
# class UserSerializer(serializers.HyperlinkedModelSerializer):
# profile = ProfileSerializer()
# class Meta:
# model = User
# depth = 1
# fields = ('url', 'id', 'username', 'first_name', 'last_name', 'email',
# 'is_superuser', 'is_staff', 'profile')
class UserSerializer(serializers.ModelSerializer):
profile = ProfileSerializer()
class Meta:
model = User
fields = (
'username',
'email',
'password',
'profile'
)
def create(self, validated_data):
# create user
user = User.objects.create(
username = validated_data['username'],
email = validated_data['email'],
password = validated_data['password'],
# etc ...
)
profile_data = validated_data.pop('profile')
print(profile_data)
profile = Profile.objects.create(
user = user,
bio = profile_data['bio']
)
profile.save()
return user
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,909
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/migrations/0002_playlist_name.py
|
# Generated by Django 3.0.3 on 2020-02-10 17:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='playlist',
name='name',
field=models.CharField(blank=True, max_length=40, null=True),
),
]
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,910
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/api.py
|
from .serializers import MovieSerializer,PlayListSerializer,UserSerializer,ProfileSerializer
from .models import PlayList,Movie
from rest_framework import viewsets, mixins, permissions
from rest_framework import status
from rest_framework import generics
from rest_framework.response import Response
from rest_framework.response import Response
from django.contrib.auth import get_user_model
User = get_user_model()
class MovieAPI(generics.ListAPIView):
queryset = Movie.objects.all()
serializer_class = MovieSerializer
class MoviesAPI(generics.CreateAPIView):
queryset = Movie.objects.all()
serializer_class = MovieSerializer
class PlayListAPI(generics.ListAPIView):
queryset = PlayList.objects.all()
serializer_class = PlayListSerializer
class MovieCreateAPI(generics.CreateAPIView):
queryset = PlayList.objects.all()
serializer_class = PlayListSerializer
# class UserList(generics.ListCreateAPIView):
class UserList(viewsets.ModelViewSet):
"""
This viewset automatically provides `list` and `detail` actions.
"""
queryset = User.objects.all()
serializer_class = UserSerializer
# permission_classes = (permissions.IsAuthenticatedOrReadOnly,
# IsSameUserAllowEditionOrReadOnly,)
# permission_classes = (IsAuthenticatedOrWriteOnly,)
# serializer_class = UserSerializer
# queryset = User.objects.all()
# def post(self, request, format=None):
# serializer = UserSerializer(data=request.data)
# if serializer.is_valid():
# serializer.save()
# return Response(serializer.data, status=status.HTTP_201_CREATED)
# return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,911
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/models.py
|
from django.db import models
from django.contrib.auth import get_user_model
from django.db.models.signals import post_save
from django.dispatch import receiver
User = get_user_model()
# Create your models here.
class BaseModel(models.Model):
updated_at = models.DateTimeField(auto_now=True)
created_at = models.DateTimeField(auto_now_add=True)
class Profile(BaseModel):
user = models.OneToOneField(User,on_delete=models.CASCADE)
bio = models.TextField(default=0)
def __str__(self):
return self.user.username
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save()
class Movie(BaseModel):
movie_id = models.CharField(max_length=50)
movie_url = models.URLField()
movie_thumbnail = models.CharField(max_length=200)
movie_duration = models.PositiveIntegerField()
movie_title = models.CharField(max_length=200)
def __str__(self):
return self.movie_title
class PlayList(BaseModel):
name = models.CharField(max_length=40,null=True,blank=True)
movies = models.ManyToManyField(Movie)
def __str__(self):
return self.name
class Category(BaseModel):
name = models.CharField(max_length=100)
desc = models.TextField()
movies = models.ManyToManyField(Movie)
def __str__(self):
return self.name
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,912
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/views.py
|
from django.shortcuts import render
import requests
from django.http import HttpResponse
from isodate import parse_duration
from .models import Movie
from django.shortcuts import get_object_or_404
# Create your views here.
API_KEY = "AIzaSyDQnTT1O4JNvLBEWTaCj-65aAU4vQd7A_o"
def index(request):
print(requests)
api_key = '517b0afadd7811dcb5094755c338f7aa'
# r = requests.get(f'https://api.themoviedb.org/3/movie/550?api_key={ api_key}')
r = requests.get(f'https://api.themoviedb.org/3/search/company?api_key={api_key}&query=brooklyn&page=1')
print(r.json())
return HttpResponse("Hello wolrd")
def youtube(request):
search_url = 'https://www.googleapis.com/youtube/v3/search'
video_url = 'https://www.googleapis.com/youtube/v3/videos'
search_params = {
"key":API_KEY,
'q':'flask',
'part':'snippet',
'maxResults':9,
'type':'video'
}
r = requests.get(search_url,params =search_params)
results =r.json()['items']
video_ids = []
for result in results:
video_ids.append(result['id']['videoId'])
# print(video_ids)
video_params = {
"key":API_KEY,
'id':','.join(video_ids),
'part':'snippet,contentDetails',
'maxResults': 9
}
r1 = requests.get(video_url,params=video_params)
res1 =r1.json()['items']
videos = []
for res in res1:
video_data = {
'id': res['id'],
'url': f"https://www.youtube.com/watch?v={ res['id']}",
'thumbnail': res['snippet']['thumbnails']['high']['url'],
'duration':int(parse_duration(res['contentDetails']['duration']).total_seconds() //60),
'title':res['snippet']['title'],
}
try:
obj = Movie.objects.get(movie_id = res['id'])
print(obj)
except Movie.DoesNotExist:
print("not there")
obj = Movie.objects.create(
movie_id = res['id'],
movie_url = f"https://www.youtube.com/watch?v={ res['id']}",
movie_thumbnail = res['snippet']['thumbnails']['high']['url'],
movie_duration = int(parse_duration(res['contentDetails']['duration']).total_seconds() //60),
movie_title = res['snippet']['title'],
)
obj.save()
# print(video_data)
videos.append(video_data)
print(videos)
return HttpResponse("cd")
def add_to_playlist(request):
playlist = []
movie = Movie.objects.get(movie_id = 'FW1LOP09RM8')
print(movie.movie_url,movie.movie_title)
playlist.append(movie)
# print(playlist)
return HttpResponse("cd")
def save_youtube():
search_url = 'https://www.googleapis.com/youtube/v3/search'
video_url = 'https://www.googleapis.com/youtube/v3/videos'
search_params = {
"key":API_KEY,
'q':'flask',
'part':'snippet',
'maxResults':9,
'type':'video'
}
r = requests.get(search_url,params =search_params)
results =r.json()['items']
video_ids = []
for result in results:
video_ids.append(result['id']['videoId'])
# print(video_ids)
video_params = {
"key":API_KEY,
'id':','.join(video_ids),
'part':'snippet,contentDetails',
'maxResults': 9
}
r1 = requests.get(video_url,params=video_params)
res1 =r1.json()['items']
videos = []
for res in res1:
video_data = {
'id': res['id'],
'url': f"https://www.youtube.com/watch?v={ res['id']}",
'thumbnail': res['snippet']['thumbnails']['high']['url'],
'duration':int(parse_duration(res['contentDetails']['duration']).total_seconds() //60),
'title':res['snippet']['title'],
}
try:
obj = Movie.objects.get(movie_id = res['id'])
print(obj)
except Movie.DoesNotExist:
print("not there")
obj = Movie.objects.create(
movie_id = res['id'],
movie_url = f"https://www.youtube.com/watch?v={ res['id']}",
movie_thumbnail = res['snippet']['thumbnails']['high']['url'],
movie_duration = int(parse_duration(res['contentDetails']['duration']).total_seconds() //60),
movie_title = res['snippet']['title'],
)
obj.save()
# print(video_data)
videos.append(video_data)
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,913
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/migrations/0005_profile_bio.py
|
# Generated by Django 3.0.3 on 2020-02-11 14:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0004_profile'),
]
operations = [
migrations.AddField(
model_name='profile',
name='bio',
field=models.TextField(default=0),
),
]
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,914
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/admin.py
|
from django.contrib import admin
# Register your models here.
from .models import Movie,PlayList,Category,Profile
admin.site.register(Movie)
admin.site.register(PlayList)
admin.site.register(Category)
admin.site.register(Profile)
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,915
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/migrations/0001_initial.py
|
# Generated by Django 3.0.3 on 2020-02-10 17:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='BaseModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('updated_at', models.DateTimeField(auto_now=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Movie',
fields=[
('basemodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseModel')),
('movie_id', models.CharField(max_length=50)),
('movie_url', models.URLField()),
('movie_thumbnail', models.CharField(max_length=200)),
('movie_duration', models.PositiveIntegerField()),
('movie_title', models.CharField(max_length=200)),
],
bases=('core.basemodel',),
),
migrations.CreateModel(
name='PlayList',
fields=[
('basemodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseModel')),
('movies', models.ManyToManyField(to='core.Movie')),
],
bases=('core.basemodel',),
),
]
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,916
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/urls.py
|
from django.urls import path
from .views import index,youtube,add_to_playlist
from .api import MovieAPI,PlayListAPI,MovieCreateAPI,MoviesAPI,UserList
from rest_framework import routers
router = routers.SimpleRouter()
router.register(r'user', UserList)
urlpatterns = [
path('',index),
path('data/',youtube),
path('play/',add_to_playlist),
path('movie/',MovieAPI.as_view(),name='movies'),
# path('user/',UserList,name='user'),
path('movies/',MoviesAPI.as_view(),name='movies_create'),
path('movie/create/',MovieCreateAPI.as_view(),name='movie_create'),
path('playlist/',PlayListAPI.as_view(),name='playlist'),
]
urlpatterns += router.urls
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,917
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/tests.py
|
from django.test import TestCase
from datetime import datetime
import json
from .serializers import PlayListSerializer,MovieSerializer
from django.utils import timezone
from .models import BaseModel,Movie,PlayList
import pytest
from rest_framework.test import APITestCase
from rest_framework import status
from django.urls import reverse
# return APIClient()
class TestPlaylist(APITestCase):
def setUp(self):
self.name = "manu"
self.movie = Movie(
movie_id = 'id',
movie_url = "https://www.youtube.com/watch?v={ res['id']}",
movie_thumbnail = 'snippet',
movie_duration = 60,
movie_title = 'snippet',
)
self.plays = PlayList(name=self.name)
self.plays.save()
mov = self.plays.movies.create(movie_id = 'id',
movie_url = "https://www.youtube.com/watch?v={ res['id']}",
movie_thumbnail = 'snippet',
movie_duration = 60,
movie_title = 'snippet',)
mov.save()
@pytest.mark.django_db
def test_can_get_playlist(self):
url = reverse('playlist')
response = self.client.get(url)
self.assertEqual(response.status_code,status.HTTP_200_OK)
def test_instance(self):
self.assertTrue(isinstance(self.plays,PlayList))
def test_save_method(self):
self.plays.save()
sd = PlayList.objects.all()
self.assertTrue(len(sd) > 0)
class MovieTestCase(APITestCase):
def setUp(self):
self.movie = Movie(
movie_id = 'id',
movie_url = "https://www.youtube.com/watch?v={ res['id']}",
movie_thumbnail = 'snippet',
movie_duration = 60,
movie_title = 'snippet',
)
self.valid_payload = {
'movie_id' : 'id',
'movie_url' : "https://www.youtube.com/watch?v=asmvv8w8JY0",
'movie_thumbnail' : 'snippet',
'movie_duration' : 60,
'movie_title' : 'snippet',
}
self.invalid_payload = {
'movie_id' : '',
'movie_url' : "https://www.youtube.com/watch?v:{ res['id']}",
'movie_thumbnail' : 'snippet',
'movie_duration' : 60,
'movie_title' : 'snippet',
}
self.movie.save()
def test_instance(self):
self.assertTrue(isinstance(self.movie,Movie))
def test_can_save(self):
Mov = Movie.objects.all()
self.assertTrue(len(Mov) > 0)
def test_can_get_all(self):
url = reverse('movies')
response = self.client.get(url)
movies = Movie.objects.all()
serialiizer = MovieSerializer(movies,many=True)
self.assertEqual(response.data,serialiizer.data)
self.assertEqual(response.status_code,status.HTTP_200_OK)
def test_create_movie(self):
url = reverse('movies_create')
response = self.client.post(
url,
self.valid_payload,
format='json'
)
# serializer = MovieSerializer(instance =data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
"""
testing that invalid data cannot be created
"""
def test_create_invalid_puppy(self):
response = self.client.post(
reverse('movies_create'),
self.invalid_payload,
content_type='application/json'
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,918
|
manulangat1/Netflix-clone
|
refs/heads/master
|
/core/tasks.py
|
from celery.task.schedules import crontab
from celery.decorators import periodic_task
import requests
from django.http import HttpResponse
from isodate import parse_duration
from .models import Movie
from django.shortcuts import get_object_or_404
from celery.utils.log import get_task_logger
from .views import save_youtube
logger = get_task_logger(__name__)
API_KEY = "AIzaSyDQnTT1O4JNvLBEWTaCj-65aAU4vQd7A_o"
@periodic_task(run_every=(crontab(minute='*/1')), name="youtube", ignore_result=True)
def youtube():
"""
Saves latest image from Flickr
"""
save_youtube()
logger.info("Saved image from Flickr")
|
{"/core/serializers.py": ["/core/models.py"], "/core/api.py": ["/core/serializers.py", "/core/models.py"], "/core/views.py": ["/core/models.py"], "/core/admin.py": ["/core/models.py"], "/core/urls.py": ["/core/views.py", "/core/api.py"], "/core/tests.py": ["/core/serializers.py", "/core/models.py"], "/core/tasks.py": ["/core/models.py", "/core/views.py"]}
|
17,920
|
primal100/serverperformance
|
refs/heads/master
|
/lib/handlers.py
|
import asyncio
from functools import partial
import socketserver
import time
def get_handler(handler_cls, executor, action_cls, done_event):
description = ' '.join([handler_cls.__name__, executor.__name__, action_cls.__name__])
return partial(handler_cls, description, executor, action_cls, done_event)
class SomeMessagesNotProcessed(Exception):
pass
class Tracker:
first_message_time = None
last_message_time = None
messages_received = None
messages_processed = None
def __init__(self, description):
self.description = description
def received(self):
self.messages_received += 1
if not self.first_message_time:
self.first_message_time = time.time()
def processed(self):
self.messages_processed += 1
self.last_message_time = time.time()
def check_finish(self):
finished = self.messages_received == self.messages_processed
if finished:
self._print_time_taken()
return finished
def finish(self):
finished = self.check_finish()
if not finished:
raise SomeMessagesNotProcessed
def _print_time_taken(self):
time_taken = self.last_message_time - self.first_message_time
print(self.description, time_taken)
class SyncServerHandler(socketserver.BaseRequestHandler):
messages_received = 0
messages_processed = 0
first_message_time = None
last_message_time = None
sender = None
def __init__(self, description, executor, action_cls, done_event, *args, **kwargs):
super(SyncServerHandler, self).__init__(*args, **kwargs)
self.executor = executor
self.done_event = done_event
self.action = action_cls()
self.sender = self.client_address[0]
self.tracker = Tracker(description)
def handle(self):
data = None
while data != '':
data = self.request.recv(1024).strip()
if data:
self.tracker.received()
self.executor(self.tracker.processed, self.action.do, data)
def finish(self):
self.tracker.finish()
self.done_event.set()
async def async_server_sync_handler(description, executor, action_cls, done_event, reader, writer):
tracker = Tracker(description)
action = action_cls()
addr = writer.get_extra_info('peername')
data = None
while data != b'':
data = await reader.read(1024)
if data:
tracker.received()
executor(tracker.processed, action.do, data)
tracker.finish()
done_event.set()
async def async_server_async_handler(description, executor, action, done_event, reader, writer):
tracker = Tracker(description)
addr = writer.get_extra_info('peername')
data = None
while data != b'':
data = await reader.read(1024)
if data:
tracker.received()
executor(tracker.processed, action.async_do, data)
tracker.finish()
done_event.set()
class AsyncProtocol(asyncio.Protocol):
transport = None
def __init__(self, description, executor, action, done_event):
self.executor = executor
self.action = action
self.tracker = Tracker(description)
self.done_event = done_event
def connection_made(self, transport):
self.transport = transport
def connection_lost(self, exc):
self.tracker.check_finish()
def data_received(self, data):
self.tracker.received()
self.executor(self.action_done, self.do_action, data)
def action_done(self):
self.tracker.processed()
if self.transport.is_closing():
finished = self.tracker.check_finish()
if finished:
self.done_event.set()
def do_action(self, data):
raise NotImplementedError
class AsyncProtocolSyncHandler(AsyncProtocol):
def do_action(self, data):
self.executor(self.action_done, self.action.do, data)
class AsyncProtocolTaskHandler(AsyncProtocol):
def do_action(self, data):
asyncio.create_task(self.executor(self.action_done, self.action.async_do, data))
sync_handlers = (SyncServerHandler,)
async_handlers = (async_server_async_handler, AsyncProtocolTaskHandler)
async_handlers_sync = (async_server_sync_handler, AsyncProtocolSyncHandler)
|
{"/tests.py": ["/lib/testcases.py", "/lib/actions.py", "/lib/servers.py", "/lib/executors.py", "/lib/handlers.py"], "/lib/servers.py": ["/lib/handlers.py"]}
|
17,921
|
primal100/serverperformance
|
refs/heads/master
|
/lib/testcases.py
|
import unittest
import os
import socket
import multiprocessing
class BaseTestCase(unittest.TestCase):
num_to_send = 100
msg_size = 100
stop_event = multiprocessing.Event()
done_event = multiprocessing.Event()
def run_server(self, *args):
raise NotImplementedError
def start_server_process(self, s, h, e, a):
process = multiprocessing.Process(target=self.run_server,
args=(s, h, e, a))
process.start()
def wait_done(self):
self.done_event.wait(timeout=30)
def close_server_process(self):
self.stop_event.set()
def stop_server(self):
self.stop_event.set()
def run_client(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
# Connect to server and send data
sock.connect(('localhost', 9999))
for i in range(0, 100):
data = os.urandom(self.msg_size)
sock.sendall(data)
finally:
sock.close()
|
{"/tests.py": ["/lib/testcases.py", "/lib/actions.py", "/lib/servers.py", "/lib/executors.py", "/lib/handlers.py"], "/lib/servers.py": ["/lib/handlers.py"]}
|
17,922
|
primal100/serverperformance
|
refs/heads/master
|
/lib/actions.py
|
import aiofile
import asyncio
import binascii
import os
from pathlib import Path
import time
chunk_length = 16
sleep_for = 0.001
def bytes_to_chunks(b):
pos = 0
while pos < len(b):
newpos = pos + chunk_length
chunk = b[pos:newpos]
pos = newpos
yield chunk
class Action:
def do(self, data):
for msg in bytes_to_chunks(data):
self.do_one(msg)
async def async_do(self, data):
for msg in bytes_to_chunks(data):
await self.async_do_one(msg)
def do_one(self, data):
raise NotImplementedError
def async_do_one(self, data):
raise NotImplementedError
class SleepAction(Action):
def do_one(self, data):
time.sleep(sleep_for)
async def async_do_one(self, data):
await asyncio.sleep(sleep_for)
class WriteFileAction(Action):
base_dir = Path(__file__).parent.parent.joinpath('data')
i = 0
def __init__(self):
self.base_dir.mkdir(parents=True, exist_ok=True)
def get_file_path(self):
filename = binascii.hexlify(os.urandom(8))
file_path = self.base_dir.joinpath(filename)
self.i += 1
return file_path
def do_one(self, data):
filename = self.get_file_path()
with open(filename, 'w') as f:
f.write(data)
async def async_do_one(self, data):
filename = self.get_file_path()
async with aiofile.AIOFile(str(filename), 'w') as f:
await f.write(data)
actions = (SleepAction, WriteFileAction)
|
{"/tests.py": ["/lib/testcases.py", "/lib/actions.py", "/lib/servers.py", "/lib/executors.py", "/lib/handlers.py"], "/lib/servers.py": ["/lib/handlers.py"]}
|
17,923
|
primal100/serverperformance
|
refs/heads/master
|
/tests.py
|
from lib.testcases import BaseTestCase
from lib.actions import actions
from lib.servers import sync_servers
from lib.executors import sync_executors
from lib.handlers import sync_handlers
class TestSyncServersSleeping(BaseTestCase):
def run_server(self, s, h, e, a):
from lib.actions import actions
from lib.servers import sync_servers
from lib.executors import sync_executors
from lib.handlers import sync_handlers
action_cls = actions[a]
server_cls = sync_servers[a]
executor_cls = sync_executors[a]
handler_cls = sync_handlers[a]
server_cls(handler_cls, executor_cls, action_cls, self.stop_event, self.done_event)
def test_run(self):
for s in range(0, len(sync_servers)):
for h in range(0, len(sync_handlers)):
for e in range(0, len(sync_executors)):
for a in range(0, len(actions)):
self.start_server_process(s, h, e, a)
self.run_client()
self.wait_done()
self.stop_server()
|
{"/tests.py": ["/lib/testcases.py", "/lib/actions.py", "/lib/servers.py", "/lib/executors.py", "/lib/handlers.py"], "/lib/servers.py": ["/lib/handlers.py"]}
|
17,924
|
primal100/serverperformance
|
refs/heads/master
|
/lib/servers.py
|
import asyncio
import threading
import socketserver
from lib.handlers import get_handler
class BaseServer:
server = None
def __init__(self, handler_cls, executor_cls, action_cls, stop_event, done_event, host="localhost", port=9999):
self.stop_event = stop_event
self.host = host
self.port = port
self.handler = get_handler(executor_cls, handler_cls, action_cls, done_event)
def start_server(self):
raise NotImplementedError
def stop_server(self):
raise NotImplementedError
class BaseSyncServer(BaseServer):
server_cls = None
def monitor_event(self):
self.stop_event.wait()
self.stop_server()
def start_monitor_thread(self):
thread = threading.Thread(target=self.monitor_event)
thread.start()
def start_server(self):
self.start_monitor_thread()
self.server = self.server_cls((self.host, self.port), self.handler)
self.server.serve_forever()
def stop_server(self):
self.server.shutdown()
class TCPServer(BaseSyncServer, socketserver.TCPServer):
server_cls = socketserver.TCPServer
class AsyncioCallbackServer(BaseServer):
async def start_monitor_task(self):
await asyncio.get_running_loop().run_in_executor(None, self.stop_event.wait())
await self.stop_server()
await self.server.wait_closed()
async def start_server(self):
await self.start_monitor_task()
self.server = await asyncio.start_server(self.handler, host=self.host, port=self.port)
async def stop_server(self):
self.server.close()
await self.server.wait_closed()
class AsyncioProtocolServer(AsyncioCallbackServer):
async def start_server(self):
await self.start_monitor_task()
self.server = await asyncio.get_event_loop().create_server(self.handler, host=self.host, port=self.port)
sync_servers = (TCPServer,)
async_cb_servers = (AsyncioCallbackServer,)
async_protocol_servers = (AsyncioProtocolServer,)
|
{"/tests.py": ["/lib/testcases.py", "/lib/actions.py", "/lib/servers.py", "/lib/executors.py", "/lib/handlers.py"], "/lib/servers.py": ["/lib/handlers.py"]}
|
17,925
|
primal100/serverperformance
|
refs/heads/master
|
/lib/executors.py
|
import asyncio
import os
from concurrent import futures
thread_executor = futures.ThreadPoolExecutor()
process_executor = futures.ProcessPoolExecutor()
def run(cb, func, *args):
func(*args)
cb()
def run_in_thread(cb, func, *args):
future = thread_executor.submit(func, *args)
future.add_done_callback(cb)
def run_in_process(cb, func, *args):
future = process_executor.submit(func, *args)
future.add_done_callback(cb)
async def async_task(cb, coro):
await coro
cb()
def async_run_task(cb, func, *args):
asyncio.create_task(async_task(cb, func(*args)))
async def async_run_sync(cb, func, *args):
func(*args)
cb()
async def async_run_in_thread(cb, func, *args):
await asyncio.get_event_loop().run_in_executor(thread_executor, func, *args)
cb()
async def async_run_in_process(cb, func, *args):
await asyncio.get_event_loop().run_in_executor(process_executor, func, *args)
cb()
if os.name == 'posix':
sync_executors = (run, run_in_thread, run_in_process)
coro_executors = (async_run_task,)
asyncio_sync_executors = (async_run_sync, async_run_in_thread, async_run_in_process)
else:
sync_executors = (run, run_in_thread, run_in_process)
coro_executors = (async_run_task,)
asyncio_sync_executors = (async_run_sync, async_run_in_thread, async_run_in_process)
|
{"/tests.py": ["/lib/testcases.py", "/lib/actions.py", "/lib/servers.py", "/lib/executors.py", "/lib/handlers.py"], "/lib/servers.py": ["/lib/handlers.py"]}
|
17,926
|
primal100/serverperformance
|
refs/heads/master
|
/lib/__init__.py
|
import asyncio
import os
if os.name=='linux':
import uvloop
loop_policy = uvloop.EventLoopPolicy
else:
class WindowsEventLoopPolicy(asyncio.DefaultEventLoopPolicy):
def new_event_loop(self):
return asyncio.ProactorEventLoop()
loop_policy = WindowsEventLoopPolicy
def set_loop_policy():
asyncio.set_event_loop_policy(loop_policy())
|
{"/tests.py": ["/lib/testcases.py", "/lib/actions.py", "/lib/servers.py", "/lib/executors.py", "/lib/handlers.py"], "/lib/servers.py": ["/lib/handlers.py"]}
|
17,928
|
vgrangep/cooperative-agents-simulation
|
refs/heads/main
|
/agent.py
|
import random
class Agent:
"""
Notes
----------
There is a really big assumption in this simulation. Agents have no idea when the game will end. Therefore, they can not modify their strategy based on the number games left to play.
The score increment is calculated based on a combination of our answer and the answer of the other agent.
Table format :
(our answer, other agent's answer) : our score increment
cooperate : True (T), betray : False (F)
(T,T) : 1
(T,F) : 20
(F,T) : 0
(F,F) : 5
"""
def __init__(self, identifier, description="unamed agent"):
"""
Parameters
----------
id : int
Our identifier
description : String
Short description
agent_id: int
Id of the agent that is currently interracting with us
cooperate : boolean
out current answer (True : cooperate, False: betray)
memory : dictionary
past interractions with the other agents. (dictionnary whose key is the agent id)
score : array of int
our score increment, updated after each round
"""
self.id = identifier
self.description = description
self.agent_id = None
self.cooperate = True
self.memory = {}
self.score = []
def __str__(self):
return 'ID='+str(self.id)+'; '+self.description
def decide(self):
"""
logic for decision
The only known information at that time is the Agent with whom we interract
and our memory of our past interractions with them
"""
###########################################################################
# Add you logic here
# for example :
# - always cooperate
# - always betray
# - randomly choose
# - betray if other agent has betrayed you in the past
# - cooperate by default, but betray if the agent betrayed you last time.
self.cooperate = True
###########################################################################
def initiate_interraction(self, agent_id):
self.agent_id = agent_id
def update_memory(self, agent_decision):
"""
update our memory with the last interraction we have had with a specific agent.
store both our answer and theirs, stored as a list, in an array : (their answer, our answer)
"""
last_memory = (agent_decision, self.cooperate)
if self.agent_id in self.memory:
self.memory[self.agent_id].append(last_memory)
else:
self.memory[self.agent_id] = [last_memory]
def update_score(self, delta):
"""
Parameters
----------
delta : int
our last round score increase
"""
self.score.append(delta)
class CollaborativeAgent(Agent):
def decide(self):
self.cooperate = True
class ReciprocalAgent(Agent):
def decide(self):
self.cooperate = True
if self.agent_id in self.memory:
self.cooperate = self.memory[self.agent_id][-1]
class TraitorAgent(Agent):
def decide(self):
self.cooperate = False
if self.agent_id in self.memory:
if False in self.memory[self.agent_id]:
self.cooperate = False
class UnforgivingAgent(Agent):
def decide(self):
self.cooperate = True
if self.agent_id in self.memory:
if False in self.memory[self.agent_id]:
self.cooperate = False
class ChaoticAgent(Agent):
def decide(self):
self.cooperate = random.choice((True, False))
|
{"/app.py": ["/agent.py"]}
|
17,929
|
vgrangep/cooperative-agents-simulation
|
refs/heads/main
|
/app.py
|
from agent import *
import random
def display_results(agents):
for ag in agents:
print(sum(ag.score))
def pair_agents(agents, verbose=False):
random_agents = list(agents)
random.shuffle(random_agents)
pairings = list(zip(random_agents[:len(random_agents)//2],
random_agents[len(random_agents)//2:]
))
if verbose:
for p in pairings:
print("Pair : " + str(p[0]) + "|" + str(p[1]))
return pairings
if __name__ == "__main__":
verbose = False
agents = []
agents.append(ReciprocalAgent(0, "ReciprocalAgent"))
agents.append(ReciprocalAgent(1, "ReciprocalAgent"))
agents.append(ReciprocalAgent(2, "ReciprocalAgent"))
agents.append(CollaborativeAgent(3, "CollaborativeAgent"))
agents.append(CollaborativeAgent(4, "CollaborativeAgent"))
agents.append(CollaborativeAgent(5, "CollaborativeAgent"))
agents.append(CollaborativeAgent(9, "CollaborativeAgent"))
agents.append(TraitorAgent(6, "TraitorAgent"))
agents.append(TraitorAgent(7, "TraitorAgent"))
agents.append(TraitorAgent(8, "TraitorAgent"))
nb_rounds = 10000 # random.range(100)
if verbose:
print("nb of rounds", nb_rounds)
for r in range(nb_rounds):
pairings = pair_agents(agents, verbose)
for pair in pairings:
# Evaluate stretegie
pair[0].initiate_interraction(pair[1])
pair[0].decide()
pair[1].initiate_interraction(pair[0])
pair[1].decide()
# resolve match
""" Table format :
(our answer, other agent's answer) : our score increment
cooperate : True (T), betray : False (F)
(T,T) : 1
(T,F) : 20
(F,T) : 0
(F,F) : 5 """
a = pair[0].cooperate
b = pair[1].cooperate
if a and b:
pair[0].update_score(1)
pair[1].update_score(1)
if not a and not b:
pair[0].update_score(5)
pair[1].update_score(5)
if not a and b:
pair[0].update_score(0)
pair[1].update_score(20)
if a and not b:
pair[0].update_score(20)
pair[1].update_score(0)
if verbose:
print("({ad},{bd}): ({a},{b})".format(
ad=pair[0].description + "_" + str(pair[0].id), bd=pair[1].description + "_" + str(pair[1].id), a=a, b=b))
print("update is a:{a}, b:{b}".format(
a=pair[0].score[-1], b=pair[1].score[-1]))
# Update memories
pair[0].update_memory(b)
pair[1].update_memory(a)
# display_results(agents)
result = [sum(i.score) / nb_rounds for i in agents]
print(result)
|
{"/app.py": ["/agent.py"]}
|
17,937
|
wikty/Spiders
|
refs/heads/master
|
/scrapy_tor/test/check_privoxy_http_port.py
|
import requests
# privoxy proxy port
proxy_port = 8118
# generate http session
s = requests.Session()
s.proxies = {
"http": "http://127.0.0.1:%d" % proxy_port
}
# make http request
#r = s.get("http://www.google.com")
r = s.get("https://www.atagar.com/echo.php")
print(r.text)
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,938
|
wikty/Spiders
|
refs/heads/master
|
/ejob/ejob/middlewares.py
|
# -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
import os
from scrapy import signals
from scrapy.http import HtmlResponse
from selenium import webdriver
class EjobSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class BaseJsRequestMiddleware(object):
def __init__(self, driver_path, extra_script_file=None):
if not os.path.isfile(driver_path):
raise Exception('driver path [%s] not exists' % driver_path)
if extra_script_file and not os.path.isfile(extra_script_file):
raise Exception('extra script file [%s] not exists' % extra_script_file)
self.driver = None
self.script = None
if extra_script_file:
with open(extra_script_file, 'r', encoding='utf8') as f:
self.script = f.read()
def get_dirver(self, **kwargs):
if not self.driver:
self.driver = webdriver.PhantomJS(executable_path=self.driver_path, **kwargs)
return self.driver
def process_request(self, request, spider):
driver = self.get_dirver()
driver.get(request.url)
url = driver.current_url
encoding = request.encoding
if self.script:
driver.execute_script(self.script)
body = driver.page_source.encode(encoding)
response = HtmlResponse(url=url, body=body, encoding=encoding)
return response # end any process_request methods
class PhantomjsRequestMiddleware(BaseJsRequestMiddleware):
def __init__(self, phantomjs_path=None, extra_script_file=None):
super(PhantomjsRequestMiddleware, self).__init__(phantomjs_path, extra_script_file)
@classmethod
def from_crawler(cls, crawler):
phantomjs_path = crawler.settings.get('PHANTOMJS_PATH')
extra_script_file = crawler.settings.get('EXTRA_SCRIPT_FILE')
return cls(phantomjs_path, extra_script_file)
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,939
|
wikty/Spiders
|
refs/heads/master
|
/scrapy_tor/test/check_socks_to_tor.py
|
import socks # SocksiPy module
import socket
import urllib.request
import stem.process
from stem.util import term
TOR_SOCKS_PORT = 9150
def set_socks_proxy():
try:
# Set socks proxy
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, '127.0.0.1', TOR_SOCKS_PORT)
socket.socket = socks.socksocket
# Perform DNS resolution through the socket
def getaddrinfo(*args):
return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
socket.getaddrinfo = getaddrinfo
except:
raise Exception("Socks proxy is incorrect")
# Uses urllib to fetch a site using SocksiPy for Tor over the TOR_SOCKS_PORT
def query(url):
try:
return urllib.request.urlopen(url).read()
except:
return "Unable to reach %s " % url
# Start an instance of Tor configured to only exit through Russia. This prints
# Tor's bootstrap information as it starts. Note that this likely will not
# work if you have another Tor instance running.
def launch_tor():
print(term.format("Starting Tor:\n", term.Attr.BOLD))
def print_bootstrap_lines(line):
if "Bootstrapped " in line:
print(term.format(line, term.Color.BLUE))
return stem.process.launch_tor_with_config(
config={
'SocksPort': str(TOR_SOCKS_PORT),
'ExitNodes': '{ru}', # tor proxy exit node in the country Russia
},
init_msg_handler = print_bootstrap_lines,
)
def check_endpoint():
print(term.format("\nChecking our endpoint:\n", term.Attr.BOLD))
print(term.format(query("https://www.atagar.com/echo.php"), term.Color.BLUE))
def kill_tor(p):
p.kill()
if __name__ == '__main__':
set_socks_proxy()
#process = launch_tor()
#check_endpoint()
#kill_tor(process)
# https://www.atagar.com/echo.php
print(query("http://icanhazip.com/"))
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,940
|
wikty/Spiders
|
refs/heads/master
|
/china_stars/stars/stars/db.py
|
import sqlite3, sys
import settings
sqlite_file = settings.SQLITE_FILE
sqlite_table = settings.SQLITE_TABLE
sqlite_table_desc = settings.SQLITE_TABLE_DESC
def create_table():
client = sqlite3.connect(sqlite_file)
cursor = client.cursor()
sql = 'CREATE TABLE IF NOT EXISTS {table} ({fields})'
fields = ['id INTEGER PRIMARY KEY AUTOINCREMENT']
for field, dtype in sqlite_table_desc.items():
if dtype == 'i':
field = '%s INTEGER' % field
elif dtype == 'S':
field = '%s TEXT' % field
else:
field = '%s VARCHAR(255)' % field
fields.append(field)
sql = sql.format(table=sqlite_table, fields=', '.join(fields))
print('SQL:', sql)
cursor.execute(sql)
client.commit()
client.close()
def select_table():
client = sqlite3.connect(sqlite_file)
cursor = client.cursor()
sql = 'SELECT * FROM {table}'.format(table=sqlite_table)
results = cursor.execute(sql)
print('SQL:', sql)
for row in results:
print(row)
client.close()
def delete_table():
client = sqlite3.connect(sqlite_file)
cursor = client.cursor()
sql = 'DROP TABLE IF EXISTS {table}'.format(table=sqlite_table)
cursor.execute(sql)
print('SQL:', sql)
client.close()
if __name__ == '__main__':
if len(sys.argv)<2:
select_table()
elif sys.argv[1] == 'create':
create_table()
elif sys.argv[1] == 'delete':
delete_table()
else:
select_table()
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,941
|
wikty/Spiders
|
refs/heads/master
|
/netmusic/test/login.py
|
import os
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.select import Select
import config
username = '15620161152193'
password = '160318'
driver = webdriver.Firefox(executable_path=os.path.abspath(config.GECKODRIVER_PATH))
# WebDriver will wait until the page has fully loaded (that is, the “onload” event has fired)
driver.get("http://event.wisesoe.com/")
# find element by its name attribute
user_elem = driver.find_element_by_name("UserName")
user_elem.clear()
user_elem.send_keys(username)
pass_elem = driver.find_element_by_name('Password')
pass_elem.clear()
pass_elem.send_keys(password)
driver.find_element_by_class_name('click-logon').send_keys(Keys.RETURN)
# control_elem = driver.find_element_by_id('default-menu-control')
# control_elem = control_elem.find_element_by_link_text('My reservations').click()
# select_elem = driver.find_element_by_id('ctl00_MainContent_termddl')
# select_elem = Select(select_elem)
# select_elem.select_by_value('2016-2017学年秋季学期')
# driver.close()
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,942
|
wikty/Spiders
|
refs/heads/master
|
/ejob/ejob/spiders/lagou_job_spider_test.py
|
# -*- coding: utf-8 -*-
import os, json
from urllib.parse import quote
import scrapy
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from ejob.items import JobItem
from ejob.item_loaders import LagouJobItemLoader
class LagouJobSpiderSpider(scrapy.spiders.CrawlSpider):
name = "lagou_job_spider_test"
allowed_domains = ["lagou.com"]
urls = [
('理财顾问', 'https://www.lagou.com/zhaopin/licaiguwen/', '123'),
]
start_urls = ['https://www.lagou.com/']
rules = [
Rule(LinkExtractor(allow=('/zhaopin/[^/]+/\d+/$', ), restrict_xpaths=('//*[@class="pager_container"]', )), process_request='preprocess_request', follow=True),
Rule(LinkExtractor(allow=('/jobs/\d+\.html$', ), restrict_xpaths=('//*[@id="s_position_list"]')), callback='parse_job')
]
# urls = [('', 'https://www.lagou.com/jobs/2123649.html', ''), ('', 'https://www.lagou.com/jobs/3248331.html', '')]
site = '拉勾网(https://www.lagou.com/)'
query_str = '&'.join(['{}'.format(quote('city=全国'))])
def start_requests(self):
for category_name, category_url, category_id in self.urls:
category_url = '?'.join([category_url, self.query_str])
request = scrapy.Request(category_url, dont_filter=False)
request.meta['dont_redirect'] = True
request.meta['category_name'] = category_name
request.meta['category_id'] = category_id
yield request
def preprocess_request(self, request):
# request.replace(cookies={'index_location_city': '%E4%B8%8A%E6%B5%B7'})
# request.replace(url='?'.join(request.url, self.query_str))
return request
def parse_job(self, response):
item = JobItem()
l = LagouJobItemLoader(item=JobItem(), response=response)
l.add_value('url', response.url)
l.add_value('site', self.site)
l.add_value('requirements', '')
l.add_value('description', '')
xpath = '//*[contains(@class, "position-content")]/*[contains(@class, "position-content-l")]'
cl = response.xpath(xpath)
jn = cl.xpath('*[@class="job-name"]')
l.add_value('position', jn.xpath('*[@class="name"]/text()').extract_first())
l.add_value('department', jn.xpath('*[@class="company"]/text()').extract_first())
jr =cl.xpath('*[@class="job_request"]')
t = jr.xpath('p/span/text()').extract()
l.add_value('salary', t[0])
l.add_value('city', t[1])
l.add_value('exprience', t[2])
l.add_value('education', t[3])
l.add_value('jobtype', t[4])
l.add_value('tags', jr.xpath('ul[contains(@class, "position-label")]/li/text()').extract())
l.add_value('postdate', jr.xpath('*[@class="publish_time"]/text()').re_first(r'(\d{4}-\d{2}-\d{2})'))
jd = response.xpath('//*[@id="job_detail"]')
l.add_value('temptation', jd.xpath('*[contains(@class, "job-advantage")]/p/text()').extract())
l.add_value('rawpost', jd.xpath('*[contains(@class, "job_bt")]//p/text()').extract())
ja = jd.xpath('*[contains(@class, "job-address")]')
address = ja.xpath('*[contains(@class, "work_addr")]/a[contains(@href, "https://www.lagou.com/")]/text()').extract()
address += ja.xpath('*[contains(@class, "work_addr")]/text()').extract()
l.add_value('address', address)
longitude = ja.xpath('*[@name="positionLng"]/@value').extract_first(default='')
latitude = ja.xpath('*[@name="positionLat"]/@value').extract_first(default='')
l.add_value('location', ','.join([longitude, latitude]))
xpath = '//*[@id="job_company"]'
jc = response.xpath(xpath)
l.add_value('company_name', jc.xpath('.//h2/text()').extract_first())
for li in jc.xpath('.//ul[contains(@class, "c_feature")]/li'):
feature = li.xpath('*[contains(@class, "hovertips")]/text()').extract_first()
value = ''.join([s.strip() for s in li.xpath('text()').extract() if s.strip()])
if '领域' in feature:
l.add_value('company_brief', '领域: {}'.format(value))
elif '发展阶段' in feature:
l.add_value('company_brief', '发展阶段: {}'.format(value))
elif '规模' in feature:
l.add_value('company_brief', '规模: {}'.format(value))
elif '公司主页' in feature:
l.add_value('company_url', li.xpath('a/@href').extract_first())
yield l.load_item()
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,943
|
wikty/Spiders
|
refs/heads/master
|
/ejob/db/sqlite_db.py
|
import sqlite3
from .base_db import BaseDb
class SqliteDb(BaseDb):
def __init__(self, sqlite_file=None, sqlite_mode=None):
if sqlite_file:
self.sqlite_file = sqlite_file
else:
self.sqlite_file = 'dump.db'
if sqlite_mode:
self.debug = sqlite_mode
else:
self.debug = True
self.client = sqlite3.connect(self.sqlite_file)
def __del__(self):
self.client.close()
def close(self):
self.client.close()
def sql(self, q):
cursor = self.client.cursor()
results = cursor.execute(q)
self.client.commit()
return results
def create_table(self, tbl_name, fields, extra=''):
'''
fields = {'field_name': 'field_type'}
'''
cursor = self.client.cursor()
sql = 'CREATE TABLE IF NOT EXISTS {table} ({fields})'
fds = ['id INTEGER PRIMARY KEY AUTOINCREMENT']
for field_name, field_type in fields.items():
if field_type == 'i':
field_name = '`%s` INTEGER' % field_name
elif field_type == 'f':
field_name = '`%s` FLOAT' % field_name
elif field_type == 's':
field_name = '`%s` TEXT' % field_name
elif field_type == 'b':
field_name = '`%s` BLOB' % field_name
elif field_type == 'n':
field_name = '`%s` NULL' % field_name
else:
field_name = '`%s` TEXT' % field_name
fds.append(field_name)
if extra:
fds.append(extra)
sql = sql.format(table=tbl_name, fields=', '.join(fds))
if self.debug:
print('SQL:', sql)
cursor.execute(sql)
self.client.commit()
def select_table(self, tbl_name, fields=[], where_condition=''):
'''
fields = [name1, name2,...]
'''
cursor = self.client.cursor()
if fields:
fields = ['`{}`'.format(field) for field in fields]
fields = ', '.join(fields)
else:
fields = '*'
sql = 'SELECT {fields} FROM {table}'.format(table=tbl_name, fields=fields)
if where_condition:
sql += ' WHERE {condition}'.format(condition=where_condition)
if self.debug:
print('SQL:', sql)
results = cursor.execute(sql)
return [row for row in results]
def count_table(self, tbl_name, where={}):
'''
where = {'field_name': '>30'}
'''
cursor = self.client.cursor()
if where:
where = ' '.join([field_name+where[field_name] for field_name in where])
else:
where = ''
sql = 'SELECT COUNT(*) FROM {table}'.format(table=tbl_name)
if where:
sql += ' WHERE {where}'.format(where=where)
if self.debug:
print('SQL:', sql)
cursor.execute(sql)
result = cursor.fetchone()
return result[0] if result else None
def insert_table(self, tbl_name, fields={}):
'''
fields = {'field_name': 'field_value'}
'''
cursor = self.client.cursor()
sql = 'INSERT INTO {table} ({keys}) VALUES ({values})'
keys = ['`{}`'.format(key) for key in fields.keys()]
sql = sql.format(
table=tbl_name,
keys= ', '.join(keys),
values=', '.join(['?']*len(keys))
)
if self.debug:
print('SQL:', sql)
cursor.execute(sql, list(fields.values()))
self.client.commit()
return cursor.lastrowid
def insert_many_table(self, tbl_name, keys, values):
'''
values is generator or iterator
'''
cursor = self.client.cursor()
sql = 'INSERT INTO {table} ({keys}) VALUES ({values})'.format(
table=tbl_name,
keys= ', '.join(['`{}`'.format(key) for key in keys]),
values=', '.join([':{}'.format(key) for key in keys])
)
if self.debug:
print('SQL:', sql)
cursor.executemany(sql, values)
self.client.commit()
return cursor.rowcount
def delete_table(self, tbl_name):
cursor = self.client.cursor()
sql = 'DROP TABLE IF EXISTS {table}'.format(table=tbl_name)
if self.debug:
print('SQL:', sql)
cursor.execute(sql)
self.client.commit()
def update_table(self, tbl_name, fields, where_condition=''):
'''
fields = {
'field_name': 'field_value', # assign to the field
'field_name': '++field_value' # add into the field
}
where_condition = 'username="wikty"'
'''
cursor = self.client.cursor()
fds = []
for field_name, field_value in fields.items():
field_name = str(field_name)
field_value = str(field_value)
if field_value.startswith('++'):
fds.append(field_name+'='+field_name+'+'+field_value[2:])
else:
fds.append(field_name+'='+field_value)
sql = 'UPDATE {table} SET {fields}'.format(
table=tbl_name,
fields=' '.join(fds)
)
if where_condition:
sql += ' WHERE {condition}'.format(condition=where_condition)
if self.debug:
print('SQL:', sql)
cursor.execute(sql)
self.client.commit()
return cursor.rowcount
# def increment_table(self, tbl_name, field_name, , n=1):
# '''
# field_name = 'age'
# equal_condition = ['another_field_name', 'value']
# '''
# cursor = self.client.cursor()
# sql = 'SELECT {field} FROM {table} WHERE {condition}'.format(
# table=tbl_name,
# field=field_name,
# condition=where_condition
# )
# cursor.execute(sql)
# result = cursor.fetchone()
# count = result[0] if result else None
# if count is None:
# fields = {}
# fields[field_name] = n
# self.insert_table(tbl_name, fields)
# else:
# n = count+n
# sql = 'UPDATE {table} SET {field} = {n} WHERE {field}{condition}'.format(
# table=tbl_name,
# field=field_name,
# condition=where_condition,
# n=n
# )
# cursor.execute(sql)
# self.client.commit()
# return n
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,944
|
wikty/Spiders
|
refs/heads/master
|
/notification/notification/spiders/event_wisesoe_spider.py
|
# -*- coding: utf-8 -*-
import time, os, json
import requests
import scrapy
from scrapy.mail import MailSender
def get_email_body(title, speaker, address, showtime, reservetime):
return """
Title: {title}
Speaker: {speaker}
Address: {address}
ShowTime: {showtime}
ReserveTime: {reservetime}
""".format(title=title, speaker=speaker, address=address, showtime=showtime, reservetime=reservetime)
def send_simple_message(title, body, receivers):
return requests.post(
"https://api.mailgun.net/v3/sandboxdd4b279d71df4a03ad2388f4af5c81d8.mailgun.org/messages",
auth=("api", "key-fd8e348bdec8df0586d1a4801aada0e4"),
data={
"from": "xiaowenbin@wikty.com",
"to": receivers,
"subject": title,
"text": body}
)
class EventWisesoeSpiderSpider(scrapy.Spider):
name = "event_wisesoe_spider"
login_url = 'http://account.wisesoe.com/WcfServices/SSOService.svc/Account/Logon?callback=jQuery180047063062154941493_1492137595375&UserName={username}&Password={password}&_={timestamp}'
home_url = 'http://event.wisesoe.com/Authenticate.aspx?returnUrl=Default.aspx'
def start_requests(self):
config_file = self.settings.get('EVENT_WISESOE_COM_CONFIG')
if not os.path.exists(config_file):
self.logger.error('wisesoe config file not exists')
return None
self.config = {}
with open(config_file, 'r', encoding='utf8') as f:
self.config = json.loads(f.read())
if not self.config:
self.logger.error('wisesoe config file is emtpy')
return None
self.username = self.config['username']
if not self.username:
self.logger.error('wisesoe username is emtpy')
return None
self.password = self.config['password']
if not self.password:
self.logger.error('wisesoe password is emtpy')
return None
self.receivers = self.config['receivers']
if not self.receivers:
self.logger.error('wisesoe receciver is emtpy')
return None
yield scrapy.Request(
self.login_url.format(
username=self.username,
password=self.password,
timestamp=int(time.time())),
callback=self.parse,
meta={'cookiejar': 1}
)
def parse(self, response):
# self.logger.error(response.body.decode('utf-8'))
yield scrapy.Request(
self.home_url,
callback=self.home_parse,
meta={'cookiejar': response.meta['cookiejar']}
)
def home_parse(self, response):
# self.logger.error(response.body.decode('utf-8'))
xpath = '//*[@id="default-menu-control"]//a[contains(text(), "My reservations")]'
url = response.xpath(xpath).xpath('@href').extract_first()
url = response.urljoin(url)
yield scrapy.Request(
url,
callback=self.parse_my_reservations,
meta={'cookiejar': response.meta['cookiejar']}
)
def parse_my_reservations(self, response):
# self.logger.error(response.body.decode('utf-8'))
xpath = '//table[@id="ctl00_MainContent_GridView1"]/tbody/tr[position() > 1]'
msg = []
max_timestamp = self.config['timestamp']
for tr in response.xpath(xpath):
title = tr.xpath('td[2]/text()').extract_first(default='').strip()
speaker = tr.xpath('td[3]/text()').extract_first(default='').strip()
address = tr.xpath('td[4]/text()').extract_first(default='').strip()
showtime = tr.xpath('td[5]/text()').extract_first(default='').strip()
reservation_time = tr.xpath('td[6]/text()').extract_first(default='').strip()
if reservation_time:
timestamp = int(time.mktime(time.strptime(reservation_time, '%m/%d/%Y %I:%M:%S %p')))
if timestamp > max_timestamp:
max_timestamp = timestamp
if timestamp > self.config['timestamp']:
msg.append(get_email_body(title, speaker, address, showtime, reservation_time))
if msg:
send_simple_message('讲座通知', '\n'.join(msg), self.receivers)
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,945
|
wikty/Spiders
|
refs/heads/master
|
/scrapy_tor/test/check_pycurl_to_tor.py
|
import io
import pycurl
import stem.process
from stem.util import term
TOR_SOCKS_PORT = 9150
# Uses pycurl to fetch a site using the proxy on the SOCKS_PORT
def query(url):
output = io.BytesIO()
query = pycurl.Curl()
query.setopt(pycurl.URL, url)
query.setopt(pycurl.PROXY, 'localhost')
query.setopt(pycurl.PROXYPORT, TOR_SOCKS_PORT)
query.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME)
query.setopt(pycurl.WRITEFUNCTION, output.write)
try:
query.perform()
return output.getvalue().decode('utf8')
except pycurl.error as e:
return "Unable to reach %s (%s)" % (url, e)
# Start an instance of Tor configured to only exit through Russia. This prints
# Tor's bootstrap information as it starts. Note that this likely will not
# work if you have another Tor instance running.
def launch_tor():
print(term.format("Starting Tor:\n", term.Attr.BOLD))
def print_bootstrap_lines(line):
if "Bootstrapped " in line:
print(term.format(line, term.Color.BLUE))
return stem.process.launch_tor_with_config(
config={
'SocksPort': str(TOR_SOCKS_PORT),
'ExitNodes': '{ru}', # tor proxy exit node in the country Russia
},
init_msg_handler = print_bootstrap_lines,
)
def kill_tor(p):
p.kill()
def check_endpoint():
print(term.format("\nChecking our endpoint:\n", term.Attr.BOLD))
#https://www.atagar.com/echo.php
print(term.format(query("http://ip.cn/"), term.Color.BLUE))
if __name__ == '__main__':
#process = launch_tor()
check_endpoint()
#kill_tor(process)
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,946
|
wikty/Spiders
|
refs/heads/master
|
/ejob/parser/parse_jobs_description.py
|
from scrapy.selector import Selector
def parse(filename):
content = ''
with open(filename, 'r', encoding='utf8') as f:
content = f.read()
sl = Selector(text=content)
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,947
|
wikty/Spiders
|
refs/heads/master
|
/scrapy_tor/test/check_selenium.py
|
# Python language bindings for Selenium WebDriver
# https://pypi.python.org/pypi/selenium
from selenium.webdriver.phantomjs.webdriver import WebDriver
dirver = WebDriver(executable_path="../example/phantomjs-2.1.1-windows/bin/phantomjs.exe")
dirver.get('http://www.baidu.com')
print(dirver.title)
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,948
|
wikty/Spiders
|
refs/heads/master
|
/ejob/db/__init__.py
|
from .sqlite_db import SqliteDb as SQLiteDB
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,949
|
wikty/Spiders
|
refs/heads/master
|
/scrapy_tor/example/example/spiders/showip.py
|
# -*- coding: utf-8 -*-
import scrapy
class ShowIpSpider(scrapy.Spider):
name = "showip"
#start_urls = ['http://icanhazip.com']
url = 'http://icanhazip.com'
def start_requests(self):
# for i in range(200):
# yield scrapy.Request(self.url, callback=self.parse, dont_filter=True)
yield scrapy.Request(self.url, callback=self.parse, dont_filter=True)
def parse(self, response):
self.logger.info(response.body)
print(response.body)
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,950
|
wikty/Spiders
|
refs/heads/master
|
/china_stars/stars/stars/items.py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class StarsItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
class StarInfoItem(scrapy.Item):
starid = scrapy.Field()
url = scrapy.Field()
capital = scrapy.Field()
name = scrapy.Field()
another_name = scrapy.Field()
english_name = scrapy.Field()
gender = scrapy.Field()
birthyear = scrapy.Field()
birthday = scrapy.Field()
constellation = scrapy.Field()
nationality = scrapy.Field()
area = scrapy.Field()
profession = scrapy.Field()
height = scrapy.Field()
bloodtype = scrapy.Field()
brief = scrapy.Field()
avatar = scrapy.Field()
album = scrapy.Field()
image_urls = scrapy.Field()
images = scrapy.Field()
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,951
|
wikty/Spiders
|
refs/heads/master
|
/ejob/ejob/items.py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class EjobItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
class CatalogItem(scrapy.Item):
id = scrapy.Field()
name = scrapy.Field()
url = scrapy.Field()
category = scrapy.Field()
class JobItem(scrapy.Item):
position = scrapy.Field()
department = scrapy.Field()
description = scrapy.Field()
tags = scrapy.Field()
salary = scrapy.Field()
temptation = scrapy.Field()
jobtype = scrapy.Field()
exprience = scrapy.Field()
education = scrapy.Field()
requirements = scrapy.Field()
city = scrapy.Field()
address = scrapy.Field()
location = scrapy.Field()
url = scrapy.Field()
site = scrapy.Field()
rawpost = scrapy.Field()
postdate = scrapy.Field()
company_name = scrapy.Field()
company_url = scrapy.Field()
company_brief = scrapy.Field()
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,952
|
wikty/Spiders
|
refs/heads/master
|
/ejob/ejob/pipelines.py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import os, json, codecs, hashlib
from urllib.parse import quote
import scrapy
from scrapy.exceptions import DropItem
class EjobPipeline(object):
def process_item(self, item, spider):
return item
class JsonStorePipeline(object):
"""Store Scrapy item into a json line file that is named by spider name."""
def __init__(self, datafile):
# self.f = codecs.open(datafile, 'a+', encoding='utf-8')
self.f = codecs.open(datafile, 'w', encoding='utf-8')
def process_item(self, item, spider):
try:
item_dict = dict(item)
self.f.write(json.dumps(item_dict, ensure_ascii=False)+'\n')
except Exception as e:
raise DropItem(e)
return item
@classmethod
def from_crawler(cls, crawler):
datadir = crawler.settings['DATA_DIR']
dataext = crawler.settings['DATA_EXT']
datafile = os.path.join(datadir, crawler.spider.name + dataext)
i = 1
while os.path.isfile(datafile):
datafile = os.path.join(datadir, crawler.spider.name + '_%d' % i + dataext)
i += 1
return cls(datafile)
def open_spider(self, spider):
pass
def close_spider(self, spider):
if not self.f.closed:
self.f.close()
class ScreenshotBySplashPipeline(object):
"""Use Splash to render screenshot of every Scrapy item"""
def __init__(self, splash_url, screenshot_dir, screenshot_format, screenshot_url_field, screenshot_file_field):
self.splash_url = splash_url
self.screenshot_dir = screenshot_dir
self.screenshot_format = screenshot_format
self.screenshot_url_field = screenshot_url_field
self.screenshot_file_field = screenshot_file_field
@classmethod
def from_crawler(cls, crawler):
# URL like this: "http://localhost:8050/render.png?url={}"
splash_url = crawler.settings['SPLASH_URL']
screenshot_dir = crawler.settings['SCREENSHOT_DIR'] # screenshot files' storage directory
# may be is "url", the page to be screenshot
screenshot_url_field = crawler.settings['SCREENSHOT_URL_FIELD']
# may be is "screenshot", the generated screenshot location
screenshot_file_field = crawler.settings['SCRAEENSHOT_FILE_FIELD']
# png, jpg, gif and so on
screenshot_format = crawler.settings['SCREENSHOT_FORMAT']
return cls(splash_url, screenshot_dir, screenshot_format, screenshot_url_field, screenshot_file_field)
def process_item(self, item, spider):
try:
url_field = self.screenshot_url_field
splash_url = self.splash_url
screenshot_ext = self.screenshot_format
encoded_item_url = quote(item[url_field])
url = splash_url.format(encoded_item_url)
url_hash = hashlib.md5(url.encode("utf8")).hexdigest()
filename = "{}.{}".format(url_hash, screenshot_ext)
request = scrapy.Request(url)
request.meta['screenshot_filename'] = filename
# Deferred Process item
dfd = spider.crawler.engine.download(request, spider)
dfd.addBoth(self.return_item, item)
except Exception as e:
raise DropItem(e)
return dfd
def return_item(self, response, item):
screenshot_dir = self.screenshot_dir
file_field = self.screenshot_file_field
if response.status != 200:
# Error happened, return item.
return item
# Save screenshot to file
filename = response.meta['filename']
with open(os.path.join(screenshot_dir, filename), 'wb') as f:
f.write(response.body)
item[file_field] = filename
return item
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,953
|
wikty/Spiders
|
refs/heads/master
|
/ejob/ejob/item_loaders.py
|
from scrapy.loader import ItemLoader
from scrapy.loader.processors import TakeFirst, MapCompose, Join, Compose, Identity
from w3lib.html import remove_tags
def take_first_nonempty_from_iterable():
return TakeFirst()
def str_strip(chars=' '):
return MapCompose(lambda s: s.strip(chars))
def list_strip(chars=' '):
return MapCompose(lambda l: ''.join([s.strip(chars) for s in l]))
# def str_strip(chars=' '):
# return Compose(lambda l:l[0].strip(chars))
def html_strip():
return MapCompose(remove_tags)
def comma_join():
return Join(separator=',')
def newline_join():
return Join(separator='\n')
def list_join_if_nonempty(separator=''):
return lambda loader, l: separator.join([s.strip() for s in l if s.strip()])
def filter_word(word):
return MapCompose(lambda s: None if s == word else s)
class LagouJobItemLoader(ItemLoader):
default_input_processor = str_strip()
default_output_processor = Join()
salary = str_strip(' /')
city_in = str_strip(' /')
exprience_in = str_strip(' /')
education_in = str_strip(' /')
jobtype_in = str_strip(' /')
tags_in = list_strip()
tags_out = list_join_if_nonempty(',')
temptation_in = Identity()
temptation_out = newline_join()
rawpost_in = Identity()
rawpost_out = newline_join()
address_in = list_strip(' -\n')
address_out = list_join_if_nonempty(',')
company_name_in = str_strip(' \n')
company_brief_out = newline_join()
class RongypJobItemLoader(ItemLoader):
default_input_processor = str_strip()
default_output_processor = Join()
position_in = str_strip(' \n')
temptation_out = comma_join()
company_brief_out = newline_join()
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,954
|
wikty/Spiders
|
refs/heads/master
|
/netmusic/test/config.py
|
PHANTOMJS_PATH = '../bin/phantomjs-2.1.1-windows/phantomjs.exe'
GECKODRIVER_PATH = '../bin/geckodriver-v0.17.0-win32/geckodriver.exe'
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,955
|
wikty/Spiders
|
refs/heads/master
|
/china_stars/stars/stars/spiders/stars_catalog_spider.py
|
import re, string, json
import scrapy
class StarsSpider(scrapy.Spider):
name = "stars-catalog-spider"
start_urls = [
'http://ent.qq.com/c/dalu_star.shtml',
]
stars_capital = [c for c in string.ascii_uppercase]
stars_capital.append('0-9')
def start_requests(self):
for url in self.start_urls:
request = scrapy.Request(url, callback=self.parse)
yield request
def parse(self, response):
f = open('stars_catalog.json', 'w', encoding='utf-8')
stars = {}
for capital in self.stars_capital:
stars[capital] = []
count = 1
while True:
rowid = capital + ('%d' % count)
count += 1
links = response.xpath('//tr[@id="%s"]//a' % rowid)
if not links:
break
else:
for link in links:
url = link.xpath('@href').extract_first()
name = link.xpath('@title').extract_first()
if url is None:
self.logger.error('url is empty')
elif name:
stars[capital].append({
'name': name,
'url': url
})
f.write(json.dumps(stars, ensure_ascii=False))
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,956
|
wikty/Spiders
|
refs/heads/master
|
/china_stars/Scrapy-sqlite-item-exporter-master/exporters.py
|
"""
Item Exporters are used to export/serialize items into sqlite3 database.
"""
from scrapy.contrib.exporter import BaseItemExporter
import sqlite3
class SqliteItemExporter(BaseItemExporter):
def __init__(self, file, **kwargs):
self._configure(kwargs)
self.conn = sqlite3.connect(file.name)
self.conn.text_factory = str
self.created_tables = []
def export_item(self, item):
item_class_name = type(item).__name__
if item_class_name not in self.created_tables:
self._create_table(item_class_name, item.fields.iterkeys())
self.created_tables.append(item_class_name)
field_list = []
value_list = []
for field_name in item.iterkeys():
field_list.append('[%s]' % field_name)
field = item.fields[field_name]
value_list.append(self.serialize_field(field, field_name, item[field_name]))
sql = 'insert into [%s] (%s) values (%s)' % (item_class_name, ', '.join(field_list), ', '.join(['?' for f in field_list]))
self.conn.execute(sql, value_list)
self.conn.commit()
def _create_table(self, table_name, columns):
sql = 'create table if not exists [%s] ' % table_name
column_define = ', '.join(['[%s] text' % column for column in columns])
sql += '(%s)' % column_define
self.conn.execute(sql)
self.conn.commit()
def __del__(self):
self.conn.close()
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,957
|
wikty/Spiders
|
refs/heads/master
|
/netmusic/test/firefox.py
|
import os
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import config
print(os.path.abspath(config.GECKODRIVER_PATH))
driver = webdriver.Firefox(executable_path=os.path.abspath(config.GECKODRIVER_PATH))
# WebDriver will wait until the page has fully loaded (that is, the “onload” event has fired)
driver.get("http://www.python.org")
assert "Python" in driver.title
# find element by its name attribute
elem = driver.find_element_by_name("q")
elem.clear()
# sending keys, this is similar to entering keys using your keyboard.
elem.send_keys("pycon")
elem.send_keys(Keys.RETURN)
assert "No results found." not in driver.page_source
# The quit will exit entire browser whereas close` will close one tab, but if just one tab was open
# driver.quit()
# driver.close()
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,958
|
wikty/Spiders
|
refs/heads/master
|
/china_stars/stars/stars/settings.py
|
# -*- coding: utf-8 -*-
# Scrapy settings for stars project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'stars'
USER_AGENT = 'Mozilla/5.0 (compatible; U; ABrowse 0.6; Syllable) AppleWebKit/420+ (KHTML, like Gecko)'
SPIDER_MODULES = ['stars.spiders']
NEWSPIDER_MODULE = 'stars.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'stars (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
DOWNLOAD_DELAY = 5
# The download delay setting will honor only one of:
CONCURRENT_REQUESTS_PER_DOMAIN = 16
CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'stars.middlewares.MyCustomSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'stars.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
'scrapy.pipelines.images.ImagesPipeline': 10,
#'stars.pipelines.StarsCatalogPipeline': 300,
'stars.pipelines.StarInfoPipeline': 301,
'stars.pipelines.Sqlite3Pipeline': 302,
}
IMAGES_STORE = 'images'
# thumbnails
# IMAGES_THUMBS = {
# 'small': (50, 50),
# 'big': (270, 270),
# }
# filter out small images
# IMAGES_MIN_HEIGHT = 110
# IMAGES_MIN_WIDTH = 110
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
HTTPCACHE_ENABLED = True
HTTPCACHE_EXPIRATION_SECS = 0
HTTPCACHE_DIR = 'httpcache'
HTTPCACHE_IGNORE_HTTP_CODES = []
HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
LOG_LEVEL = 'DEBUG'
LOG_FILE = 'logs'
SQLITE_FILE = 'dump.db'
SQLITE_TABLE = 'stars'
# i -> int
# s -> small string
# S -> big string
SQLITE_TABLE_DESC = {
'starid': 'i',
'url': 's',
'capital': 's',
'name': 's',
'another_name': 's',
'english_name': 's',
'gender': 's',
'birthyear': 's',
'birthday': 's',
'constellation': 's',
'nationality': 's',
'area': 's',
'profession': 's',
'height': 's',
'bloodtype': 's',
'brief': 'S',
'avatar': 'S',
'album': 'S'
}
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,959
|
wikty/Spiders
|
refs/heads/master
|
/ejob/db/base_db.py
|
import abc
class BaseDb(metaclass=abc.ABCMeta):
@abc.abstractmethod
def close(self):
pass
@abc.abstractmethod
def sql(self, q):
pass
@abc.abstractmethod
def create_table(self, tbl_name, fields, extra=None):
pass
@abc.abstractmethod
def select_table(self, tbl_name, fields=[], where_condition=None):
pass
@abc.abstractmethod
def count_table(self, tbl_name, where_condition=None):
pass
@abc.abstractmethod
def insert_table(self, tbl_name, fields={}):
pass
@abc.abstractmethod
def insert_many_table(self, tbl_name, keys, values):
pass
@abc.abstractmethod
def delete_table(self, tbl_name):
pass
@abc.abstractmethod
def update_table(self, tbl_name, fields, where_condition):
pass
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,960
|
wikty/Spiders
|
refs/heads/master
|
/ejob/ejob/spiders/rongyp_catalog_spider.py
|
# -*- coding: utf-8 -*-
import scrapy
from ejob.items import CatalogItem
class RongypCatalogSpiderSpider(scrapy.Spider):
name = "rongyp_catalog_spider"
allowed_domains = ["rongyp.com"]
start_urls = ['https://www.rongyp.com/index.php?m=Home&c=Job&a=jobSearch']
def start_requests(self):
for url in self.start_urls:
request = scrapy.Request(url, callback=self.parse)
request.meta['dont_redirect'] = True
yield request
def parse(self, response):
for li in response.xpath('//ul[@id="tabBox"]/li'):
category = li.xpath('.//h2/text()').extract_first()
for link in li.xpath('.//dl/dd/a'):
item = CatalogItem()
name = link.xpath('text()').extract_first()
url = link.xpath('@href').extract_first()
if (not url) or (not name) or (name == '其它'):
continue
item['id'] = ''
item['category'] = category
item['name'] = name
item['url'] = response.urljoin(url)
yield item
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,961
|
wikty/Spiders
|
refs/heads/master
|
/scrapy_tor/test/check_tor_control_port.py
|
import getpass
import sys
import stem
import stem.connection
from stem.control import Controller
CONTROL_PORT = 9151
if __name__ == '__main__':
# connect tor control socket
try:
controller = Controller.from_port(port=CONTROL_PORT)
except stem.SocketError as e:
print("Unable to connect to tor on port %d: %s" % (CONTROL_PORT, e))
sys.exit(1)
# authenticate tor control socket
try:
controller.authenticate()
except stem.connection.MissingPassword:
pw = getpass.getpass("Tor ControllerPort Password: ")
try:
controller.authenticate(password=pw)
except stem.connection.PasswordAuthFailed:
print("Unable to authenticate, password is incorrect")
sys.exit(1)
except stem.connection.AuthenticationFailure as e:
print("Unable to authenticate: %s" % e)
sys.exit(1)
except Exception as e:
print("Wrong")
sys.exit(1)
print("Tor is running version %s" % controller.get_version())
controller.close()
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,962
|
wikty/Spiders
|
refs/heads/master
|
/ejob/ejob/spiders/lagou_catalog_spider.py
|
# -*- coding: utf-8 -*-
import scrapy
from ejob.items import CatalogItem
class LagouCatalogSpiderSpider(scrapy.Spider):
name = "lagou_catalog_spider"
allowed_domains = ["lagou.com"]
start_urls = ['http://www.lagou.com/']
def parse(self, response):
xpath = '//*[@id="sidebar"]//*[@class="menu_box"][.//h2[contains(text(), "金融")]]'
menu_box = response.xpath(xpath)
if not menu_box:
self.logger.error('Menu Element Cannot be found: %s', response.url)
return None
menu_main = menu_box.xpath('*[contains(@class, "menu_main")]')
menu_sub = menu_box.xpath('*[contains(@class, "menu_sub")]')
if menu_main and menu_sub:
for link in menu_main.xpath('a'):
item = LagouCatalogItem()
item['category'] = '金融'
item['id'] = link.xpath('@data-lg-tj-no').extract_first()
item['name'] = link.xpath('text()').extract_first()
item['url'] = response.urljoin(link.xpath('@href').extract_first())
yield item
for dl in menu_sub.xpath('dl'):
category = dl.xpath('dt/span/text()').extract_first()
for link in dl.xpath('dd/a'):
item = CatalogItem()
item['category'] = category
item['id'] = link.xpath('@data-lg-tj-no').extract_first()
item['name'] = link.xpath('text()').extract_first()
item['url'] = response.urljoin(link.xpath('@href').extract_first())
yield item
else:
self.logger.error('Menu Element Cannot be found: %s', response.url)
return None
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,963
|
wikty/Spiders
|
refs/heads/master
|
/china_stars/stars/stars/pipelines.py
|
# -*- coding: utf-8 -*-
import codecs, json, sqlite3
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class StarsCatalogPipeline(object):
def open_spider(self, spider):
self.f = codecs.open('stars_catalog.json', 'w', encoding='utf-8')
self.catalog = {}
def close_spider(self, spider):
self.f.write(json.dumps(self.catalog, ensure_ascii=False))
self.f.close()
def process_item(self, item, spider):
if item['capital'] not in self.catalog:
self.catalog[item['capital']] = []
self.catalog[item['capital']].append({
'url': item['url'],
'name': item['name']
})
return item
class StarInfoPipeline(object):
def process_item(self, item, spider):
return item
class Sqlite3Pipeline(object):
def __init__(self, sqlite_file, sqlite_table, sqlite_table_desc, image_dir):
self.sqlite_file = sqlite_file
self.sqlite_table = sqlite_table
self.sqlite_table_desc = sqlite_table_desc
self.image_dir = image_dir
@classmethod
def from_crawler(cls, crawler):
file = crawler.settings.get('SQLITE_FILE', 'sqlite3.db')
table = crawler.settings.get('SQLITE_TABLE', 'items')
tbl_desc = crawler.settings.get('SQLITE_TABLE_DESC')
image_dir = crawler.settings.get('IMAGES_STORE')
if not tbl_desc:
raise Exception('SQLITE_TABLE_DESC is missed in the settings file')
if not isinstance(tbl_desc, dict):
raise Exception('SQLITE_TABLE_DESC must be a dictionary')
return cls(
sqlite_file=file,
sqlite_table=table,
sqlite_table_desc=tbl_desc,
image_dir=image_dir
)
def open_spider(self, spider):
self.client = sqlite3.connect(self.sqlite_file)
self.cursor = self.client.cursor()
sql = 'CREATE TABLE IF NOT EXISTS {table} ({fields})'
fields = ['id INTEGER PRIMARY KEY AUTOINCREMENT']
for field, dtype in self.sqlite_table_desc.items():
if dtype == 'i':
field = '%s INTEGER' % field
elif dtype == 'S':
field = '%s TEXT' % field
else:
field = '%s VARCHAR(255)' % field
fields.append(field)
sql = sql.format(table=self.sqlite_table, fields=', '.join(fields))
self.cursor.execute(sql)
self.client.commit()
def close_spider(self, spider):
self.client.close()
def item2dict(self, item):
d = {}
d['album'] = []
avatar_url = item['avatar']
for key in self.sqlite_table_desc:
if key == 'avatar':
for image in item['images']:
if image['url'] == avatar_url:
d['avatar'] = [self.image_dir+'/'+image['path'], avatar_url]
elif key == 'album':
for image in item['images']:
if image['url'] != avatar_url:
d['album'].append([self.image_dir+'/'+image['path'], image['url']])
else:
d[key] = item[key]
d['avatar'] = json.dumps(d['avatar'], ensure_ascii=False)
d['album'] = json.dumps(d['album'], ensure_ascii=False)
return d
def process_item(self, item, spider):
d = self.item2dict(item)
sql = 'INSERT INTO {table} ({keys}) VALUES ({values})'
sql = sql.format(
table=self.sqlite_table,
keys= ', '.join(d.keys()),
values=', '.join(['?']*len(d.keys()))
)
self.cursor.execute(sql, list(d.values()))
self.client.commit()
return item
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,964
|
wikty/Spiders
|
refs/heads/master
|
/china_stars/stars/stars/spiders/stars_spider.py
|
import re, string, json, urllib
import scrapy
from stars.items import StarInfoItem
class StarsSpider(scrapy.Spider):
name = "stars-spider"
start_urls = [
'http://ent.qq.com/c/dalu_star.shtml',
]
stars_capital = [c for c in string.ascii_uppercase]
stars_capital.append('0-9')
starinfo_fields = {
'name':'姓名',
'another_name':'原名',
'gender':'性别',
'english_name':'英文名',
'birthyear':'出生年',
'birthday':'生日',
'constellation':'星座',
'nationality':'国籍',
'area':'地域',
'profession':'职业',
'height':'身高',
'bloodtype':'血型'
}
starinfo_url = 'http://datalib.ent.qq.com/star/%d/starinfo.shtml'
def start_requests(self):
for url in self.start_urls:
request = scrapy.Request(url, callback=self.parse)
yield request
def parse(self, response):
for capital in self.stars_capital:
count = 1
while True:
rowid = capital + ('%d' % count)
count += 1
links = response.xpath('//tr[@id="%s"]//a' % rowid)
if not links:
break
else:
for link in links:
url = link.xpath('@href').extract_first()
name = link.xpath('@title').extract_first()
if url is None:
self.logger.error('url is empty')
elif name:
request = scrapy.Request(url, callback=self.parse_star)
request.meta['capital'] = capital
request.meta['name'] = name
request.meta['id'] = int(url.split('/')[-2])
yield request
def parse_star(self, response):
starinfo = StarInfoItem()
starinfo['starid'] = response.meta['id']
starinfo['capital'] = response.meta['capital']
starinfo['name'] = response.meta['name']
starinfo['url'] = response.url
avatar_url = response.xpath('//div[@id="star_face"]/a/img/@src').extract_first(default='')
starinfo['avatar'] = avatar_url
starinfo['album'] = []
image_urls = [avatar_url]
count = 1
while True:
imgs = response.xpath('//*[@id="demo%d"]//img/@src' % count).extract()
count += 1
if not imgs:
break
else:
image_urls += imgs
starinfo['image_urls'] = image_urls
xpath = '//div[@id="infos"]//td[strong[contains(text(), "{field}")]]/text()'
for k, field in self.starinfo_fields.items():
value = response.xpath(xpath.format(field=field)).extract_first()
if value:
starinfo[k] = value.strip()
else:
starinfo[k] = ''
starinfo_url = self.starinfo_url % starinfo['starid']
body = urllib.request.urlopen(starinfo_url).read().decode('gbk').encode('utf-8').decode('utf-8')
r = response.replace(body=body)
xpath = '//div[@id="left"]/table[2]//td[@class="line22"]/text()'
starinfo['brief'] = r.xpath(xpath).extract_first('').strip()
yield starinfo
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,965
|
wikty/Spiders
|
refs/heads/master
|
/netmusic/test/api.py
|
##
# selenium.webdriver module provides all the WebDriver implementations.
# Currently supported WebDriver implementations are Firefox, Chrome, IE and Remote.
##
from selenium import webdriver
# webdriver.Firefox(
# firefox_profile=None,
# firefox_binary=None,
# timeout=30,
# capabilities=None,
# proxy=None,
# executable_path='geckodriver',
# firefox_options=None,
# log_path='geckodriver.log')
# webdriver.FirefoxProfile
# webdriver.Chrome(
# executable_path='chromedriver',
# port=0,
# chrome_options=None,
# service_args=None,
# desired_capabilities=None,
# service_log_path=None)
# webdriver.ChromeOptions
# webdriver.Ie
# webdriver.Opera
# webdriver.PhantomJS
# webdriver.Remote
# webdriver.DesiredCapabilities
# webdriver.ActionChains
# webdriver.TouchActions
# webdriver.Proxy
##
# Keys class provide keys in the keyboard like RETURN, F1, ALT etc.
##
from selenium.webdriver.common.keys import Keys
# exceptions
# from selenium.common.exceptions import [TheNameOfTheExceptionClass]
##
# ActionChains are a way to automate low level interactions such as mouse movements,
# mouse button actions, key press, and context menu interactions.
# This is useful for doing more complex actions like hover over and drag and drop.
##
from selenium.webdriver.common.action_chains import ActionChains
# chain pattern
# menu = driver.find_element_by_css_selector(".nav")
# hidden_submenu = driver.find_element_by_css_selector(".nav #submenu1")
# ActionChains(driver).move_to_element(menu).click(hidden_submenu).perform()
# queue pattern
# menu = driver.find_element_by_css_selector(".nav")
# hidden_submenu = driver.find_element_by_css_selector(".nav #submenu1")
# actions = ActionChains(driver)
# actions.move_to_element(menu)
# actions.click(hidden_submenu)
# actions.perform()
##
# Use this class to interact with alert prompts.
# It contains methods for dismissing, accepting,
# inputting, and getting text from alert prompts.
##
from selenium.webdriver.common.alert import Alert
##
# utils
##
# selenium.webdriver.common.utils.find_connectable_ip
# selenium.webdriver.common.utils.free_port
# selenium.webdriver.common.utils.join_host_port
# selenium.webdriver.common.utils.keys_to_typing
##
# Color conversion support class
##
from selenium.webdriver.support.color import Color
# print(Color.from_string('#00ff33').rgba)
# print(Color.from_string('rgb(1, 255, 3)').hex)
# print(Color.from_string('blue').rgba)
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,966
|
wikty/Spiders
|
refs/heads/master
|
/ejob/ejob/spiders/rongyp_job_spider_test.py
|
# -*- coding: utf-8 -*-
import os, json, re
from urllib.parse import quote
import scrapy
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from ejob.items import JobItem
from ejob.item_loaders import RongypJobItemLoader
def filter_job_url(url):
m = re.search(r'openings_id=\d+$', url)
if not m:
return None
return url
def process_page_request(request):
return request
class RongypJobSpiderSpider(CrawlSpider):
name = "rongyp_job_spider_test"
allowed_domains = ["rongyp.com"]
start_urls = ['http://www.rongyp.com/']
urls = [('category', 'https://www.rongyp.com/index.php?m=Home&c=Job&a=jobSearch&tb_city=&tb_jobtype=&tb_jobtype_two=2132&tb_salary=&tb_workyear=&tb_degree=&tb_worknature=&dayscope=&keyword=&orderby=&company_size=')]
rules = [
Rule(LinkExtractor(tags=('a', ), restrict_xpaths=('//*[class="rightmember-page"]', )), process_request=process_page_request, follow=True),
Rule(LinkExtractor(tags=('a', ), attrs=('href', ), unique=True, restrict_xpaths=('//*[@class="ryp-search-list"]/*[@class="ryp-search-li"]/p', ), process_value=filter_job_url), callback='parse_job'),
]
# query_str = '&'.join(['{}'.format(quote('city=全国'))])
def __init__(self, urlfile=None, *args, **kwargs):
# scrapy crawl myspider -a category=electronics
super(RongypJobSpiderSpider, self).__init__(*args, **kwargs)
if urlfile:
with open(urlfile, 'r', encoding='utf8') as f:
for line in f:
line = line.strip()
if not line:
continue
self.urls.append((item['name'], item['url']))
else:
print('URL file is missed')
def start_requests(self):
for category_name, category_url in self.urls:
# category_url = '?'.join([category_url, self.query_str])
request = scrapy.Request(category_url, dont_filter=False)
request.meta['dont_redirect'] = True
request.meta['category_name'] = category_name
yield request
# def preprocess_value(self, value):
# m = re.search(r'openings_id=\d+$', value)
# print(value, m)
# if not m:
# return None
# return value
def preprocess_request(self, request):
# request.replace(cookies={'index_location_city': '%E4%B8%8A%E6%B5%B7'})
# request.replace(url='?'.join(request.url, self.query_str))
return request
def parse_job(self, response):
item = JobItem()
l = RongypJobItemLoader(item=JobItem(), response=response)
info = response.xpath('//*[contains(@class, "ryp-info")]/*[contains(@class, "ryp-mask")]')
l.add_value('position', info.xpath('h1/text()').extract_first())
l.add_value('salary', info.xpath('h6/*[@class="salary"]/text()').extract_first())
t = ''.join(info.xpath('h6/text()').extract())
t = [s.strip() for s in t.split('|') if s.strip()]
if len(t) < 3:
t += ['']*(3-len(t))
l.add_value('jobtype', t[0])
l.add_value('education', t[1])
l.add_value('exprience', t[2])
l.add_value('temptation', response.xpath('//*[contains(@class, "ryp-weals")]/a/text()').extract())
l.add_value('rawpost', response.xpath('//*[contains(@class, "ryp-detail-content")]/p/text()').extract())
company = response.xpath('//*[contains(@class, "ryp-detail-right")]//*[@class="company"]')
l.add_value('company_name', company.xpath('h3/a/text()').extract_first())
company_brief = ''
for detail in company.xpath('*[@class="detail"]'):
detail_name = ''.join(detail.xpath('text()').extract())
detail_value = detail.xpath('span/text()').extract_first(default='')
if '区域' in detail_name:
l.add_value('company_brief', '区域: {}'.format(detail_value))
elif '行业' in detail_name:
l.add_value('company_brief', '行业: {}'.format(detail_value))
elif '规模' in detail_name:
l.add_value('company_brief', '规模: {}'.format(detail_value))
l.add_value('address', response.xpath('//*[contains(@class, "ryp-map")]//*[contains(@class, "company-adress")]/text()').extract_first())
yield l.load_item()
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,967
|
wikty/Spiders
|
refs/heads/master
|
/ejob/ejob/spiders/pinggu_catalog_spider.py
|
# -*- coding: utf-8 -*-
import scrapy
class PingguCatalogSpiderSpider(scrapy.Spider):
name = "pinggu_catalog_spider"
allowed_domains = ["pinggu.org"]
start_urls = ['http://bbs.pinggu.org/z_rc.php']
def parse(self, response):
pass
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,968
|
wikty/Spiders
|
refs/heads/master
|
/china_stars/stars/stars/spiders/starinfo_spier.py
|
import re, string, json, urllib
import scrapy
from stars.items import StarInfoItem
class StarsTestSpider(scrapy.Spider):
name = "starinfo-spider"
start_urls = [
'http://datalib.ent.qq.com/star/2829/index.shtml',
]
starinfo_fields = {
'name':'姓名',
'another_name':'原名',
'gender':'性别',
'english_name':'英文名',
'birthyear':'出生年',
'birthday':'生日',
'constellation':'星座',
'nationality':'国籍',
'area':'地域',
'profession':'职业',
'height':'身高',
'bloodtype':'血型'
}
starinfo_url = 'http://datalib.ent.qq.com/star/%d/starinfo.shtml'
def start_requests(self):
for url in self.start_urls:
request = scrapy.Request(url, callback=self.parse_star)
request.meta['name'] = '张一山'
request.meta['id'] = int(url.split('/')[-2])
request.meta['capital'] = 'Z'
yield request
def parse_star(self, response):
starinfo = StarInfoItem()
starinfo['starid'] = response.meta['id']
starinfo['capital'] = response.meta['capital']
starinfo['name'] = response.meta['name']
starinfo['url'] = response.url
avatar_url = response.xpath('//div[@id="star_face"]/a/img/@src').extract_first(default='')
starinfo['avatar'] = avatar_url
starinfo['album'] = []
image_urls = [avatar_url]
count = 1
while True:
imgs = response.xpath('//*[@id="demo%d"]//img/@src' % count).extract()
count += 1
if not imgs:
break
else:
image_urls += imgs
starinfo['image_urls'] = image_urls
xpath = '//div[@id="infos"]//td[strong[contains(text(), "{field}")]]/text()'
for k, field in self.starinfo_fields.items():
value = response.xpath(xpath.format(field=field)).extract_first()
if value:
starinfo[k] = value.strip()
else:
starinfo[k] = ''
starinfo_url = self.starinfo_url % starinfo['starid']
body = urllib.request.urlopen(starinfo_url).read().decode('gbk').encode('utf-8').decode('utf-8')
r = response.replace(body=body)
xpath = '//div[@id="left"]/table[2]//td[@class="line22"]/text()'
starinfo['brief'] = r.xpath(xpath).extract_first('').strip()
yield starinfo
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,969
|
wikty/Spiders
|
refs/heads/master
|
/scrapy_tor/test/renew_tor_connection.py
|
# -*- coding: utf-8 -*-
import time
import urllib
import requests
import stem
import stem.connection
from stem import Signal
from stem.control import Controller
PROXY_PORT = 8118 # privoxy proxy port
TOR_CONTROL_PORT = 9151
TOR_CONTROL_PASSWORD = '123456'
def create_http_session(proxy_port):
s = requests.Session()
s.proxies = {
"http": "http://127.0.0.1:%d" % proxy_port
}
s.headers.update({
'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.0.7) Gecko/2009021910 Firefox/3.0.7'
})
return s
def query(session, url):
# communicate with TOR via a local proxy (privoxy)
r = session.get(url)
return r.text
# signal TOR for a new connection
def renew_tor_connection(control_port, password):
with Controller.from_port(port=control_port) as controller:
controller.authenticate(password=password)
controller.signal(Signal.NEWNYM)
controller.close()
if __name__ == '__main__':
interval = 2 # two seconds
oldIP = "0.0.0.0"
newIP = "0.0.0.0"
http_session = create_http_session(PROXY_PORT)
for i in range(7):
renew_tor_connection(TOR_CONTROL_PORT, TOR_CONTROL_PASSWORD)
if newIP != "0.0.0.0":
oldIP = newIP
newIP = query(http_session, "http://icanhazip.com/")
seconds = 0
# loop until the "new" IP address
# is different than the "old" IP address,
# as it may take the TOR network some
# time to effect a different IP address
while oldIP == newIP:
time.sleep(interval)
seconds += interval
newIP = query(http_session, "http://icanhazip.com/")
print ("%d seconds elapsed awaiting a different IP address." % seconds)
# new IP address
print ("newIP: %s" % newIP)
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,970
|
wikty/Spiders
|
refs/heads/master
|
/notification/notification/middlewares.py
|
# -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
from scrapy.http import HtmlResponse
from selenium import webdriver
from selenium.webdriver.common.proxy import Proxy, ProxyType
class NotificationSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class PhantomjsMiddleware(object):
def __init__(self, phantomjs_path=None, extra_script=None):
if not phantomjs_path:
raise Exception('phantomjs path should not be empty')
self.script = None
if extra_script:
with open(extra_script, 'r', encoding='utf8') as f:
self.script = f.read()
self.driver = webdriver.PhantomJS(phantomjs_path)
@classmethod
def from_crawler(cls, crawler):
phantomjs_path = crawler.settings.get('PHANTOMJS_PATH')
extra_script = crawler.settings.get('EXTRA_SCRIPT')
return cls(phantomjs_path, extra_script)
def process_request(self, request, spider):
self.driver.get(request.url)
if self.script:
self.driver.execute_script(self.script)
body = self.driver.page_source.encode('utf8')
response = HtmlResponse(url=self.driver.current_url, body=body)
return response # end any process_request methods
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,971
|
wikty/Spiders
|
refs/heads/master
|
/scrapy_tor/example/example/middlewares.py
|
# -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
import random
from scrapy.downloadermiddlewares.useragent import UserAgentMiddleware
from stem import Signal
from stem.control import Controller
from scrapy.http import HtmlResponse
from selenium import webdriver
class RandomUserAgentMiddleware(UserAgentMiddleware):
def __init__(self, agents=[]):
super(RandomUserAgentMiddleware, self).__init__()
if not agents:
agents = ['Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)']
self.agents = agents
@classmethod
def from_crawler(cls, crawler):
# instance of the current class
ua_list = []
with open(crawler.settings.get('USER_AGENT_LIST'), 'r') as f:
ua_list = [ua.strip() for ua in f.readlines()]
return cls(ua_list)
def process_request(self, request, spider):
ua = random.choice(self.agents)
request.headers.setdefault('User-Agent', ua)
class ProxyMiddleware(object):
def __init__(self, http_proxy=None, tor_control_port=None, tor_password=None):
if not http_proxy:
raise Exception('http proxy setting should not be empty')
if not tor_control_port:
raise Exception('tor control port setting should not be empty')
if not tor_password:
raise Exception('tor password setting should not be empty')
self.http_proxy = http_proxy
self.tor_control_port = tor_control_port
self.tor_password = tor_password
self.count = 1
self.times = 50
@classmethod
def from_crawler(cls, crawler):
http_proxy = crawler.settings.get('HTTP_PROXY')
tor_control_port = crawler.settings.get('TOR_CONTROL_PORT')
tor_password = crawler.settings.get('TOR_PASSWORD')
return cls(http_proxy, tor_control_port, tor_password)
def process_request(self, request, spider):
self.count = (self.count+1) % self.times
if not self.count:
# access tor ControlPort to signal tor get a new IP
with Controller.from_port(port=self.tor_control_port) as controller:
controller.authenticate(password=self.tor_password)
controller.signal(Signal.NEWNYM)
# scrapy support http proxy
#request.meta['proxy'] = self.http_proxy
class PhantomjsRequestMiddleware(object):
def __init__(self, phantomjs_path=None):
if not phantomjs_path:
raise Exception('phantomjs path should not be empty')
self.driver = webdriver.PhantomJS(phantomjs_path)
@classmethod
def from_crawler(cls, crawler):
phantomjs_path = crawler.settings.get('PHANTOMJS_PATH')
return cls(phantomjs_path)
def process_request(self, request, spider):
self.driver.get(request.url)
body = self.driver.page_source.encode('utf8')
response = HtmlResponse(url=self.driver.current_url, body=body)
return response # end any process_request methods
class DynamicPageProxyRequestMiddleware(object):
def __init__(self, phantomjs_path=None, proxy=None):
if not phantomjs_path:
raise Exception('phantomjs path should not be empty')
if not proxy:
raise Exception('proxy should not be empty')
service_args = [
'--proxy=%s' % proxy,
'--proxy-type=http',
'--ignore-ssl-errors=true',
]
self.driver = webdriver.PhantomJS(phantomjs_path, service_args=service_args)
@classmethod
def from_crawler(cls, crawler):
phantomjs_path = crawler.settings.get('PHANTOMJS_PATH')
proxy = crawler.settings.get('HTTP_PROXY')
return cls(phantomjs_path, proxy)
def process_request(self, request, spider):
self.driver.get(request.url)
body = self.driver.page_source.encode('utf8')
response = HtmlResponse(url=self.driver.current_url, body=body)
return response # end any process_request methods
|
{"/ejob/db/sqlite_db.py": ["/ejob/db/base_db.py"], "/ejob/db/__init__.py": ["/ejob/db/sqlite_db.py"]}
|
17,972
|
zzhacked/securityMonitor
|
refs/heads/master
|
/connect_db.py
|
from pymongo import MongoClient
def enqury_data():
wordlist = []
DBNAME = '' #数据库名
DBUSERNAME = '' #数据库用户
DBPASSWORD = '' #数据库密码
DB = '' #数据库IP
PORT = 27017
db_conn = MongoClient(DB, PORT)
na_db = getattr(db_conn, DBNAME)
na_db.authenticate(DBUSERNAME, DBPASSWORD)
return na_db
if __name__ == '__main__':
enqury_data()
|
{"/flask_demo.py": ["/connect_db.py"]}
|
17,973
|
zzhacked/securityMonitor
|
refs/heads/master
|
/spider/get_wechat_info.py
|
import requests
from bs4 import BeautifulSoup as bs
from lxml import etree
import time
from PIL import Image
from fateadm_api import FateadmApi
import datetime
from selenium import webdriver
from pymongo import MongoClient
import random
import re
user_agent = [
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E; LBBROWSER)'
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; SE 2.X MetaSr 1.0)',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 OPR/26.0.1656.60',
'Opera/8.0 (Windows NT 5.1; U; en)',
'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; en) Opera 9.50'
]
def cookie_init():
retries = 1
while retries < 3:
cookie = {}
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) HeadlessChrome/65.0.3325.181 Safari/537.36'}
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-gpu')
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--disable-dev-shm-usage')
client = webdriver.Chrome(options=chrome_options)
client.get("https://weixin.sogou.com/antispider/?from=%2fweixin%3Ftype%3d2%26query%3d360CERT")
path = './1.png'
imgpath = './yzm.png'
client.get_screenshot_as_file(path)
im = Image.open(path)
box = (705, 598, 900, 680) # 设置要裁剪的区
region = im.crop(box)
region.save(imgpath)
capt = client.find_element_by_xpath('//*[@id="seccodeInput"]')
test = FateadmApi('','','','') #打码平台接口
code = test.PredictFromFile('30600','./yzm.png') #打码平台识别
#code = '123456'
print(code)
capt.send_keys(code)
time.sleep(1)
client.find_element_by_xpath('//*[@id="submit"]').click()
time.sleep(2)
#print(new_html)
for item in client.get_cookies():
cookie[item["name"]] = item["value"]
try:
print(cookie['SNUID'])
except Exception:
print ("解锁失败。重试次数:{0:d}".format(3-retries))
retries += 1
continue
time.sleep(5)
return cookie['SNUID']
def get_info(url,table,a,tb_msg,headers):
r = requests.get(url=url,headers=headers)
r.encoding='utf-8'
#print(url)
#print(r.text)
try:
soup = bs(r.text,'html.parser')
content = soup.find('a',{'uigs':'account_article_0'}).text
send_time = soup.find_all('span')[-1].find('script').text
re_time = re.findall(r"timeConvert\('(.*?)'\)",send_time)[0]
#print(re_time)
#print(send_time)
#print(content)
now_time = int(time.time())
print_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
timeArray = time.localtime(int(re_time))
otherStyleTime = time.strftime("%Y-%m-%d %H:%M:%S", timeArray)
ava_time = now_time - int(re_time)
#print(ava_time)
msg = content + ' from ' + table[a]
demo = {'time':otherStyleTime,'from':table[a],'content':content,'link':url}
tb_msg.append(demo)
DBNAME = ''
DBUSERNAME = ''
DB = '' #数据库地址
PORT = 27017
db_conn = MongoClient(DB, PORT)
na_db = getattr(db_conn, DBNAME)
na_db.authenticate(DBUSERNAME, DBPASSWORD)
c = na_db.wechatdatas
c.update_one({"content": demo['content']}, {'$set': demo}, True)
#print(tb_msg)
with open('wechat_log.txt','a+') as f:
f.write(print_time+'\n')
except Exception as e:
#print(e)
pass
def wechat_info():
table = ['360CERT','长亭安全课堂','千里目实验室','云鼎实验室','ADLab']
tb_msg = []
a = -1
with open('./snuid.txt') as f:
snuid = f.readline().strip()
headers = {
'Referer': 'http://weixin.sogou.com/weixin?type=1&query=python&ie=utf8&s_from=input&_sug_=n&_sug_type_=1&w=01015002&oq=&ri=5&sourceid=sugg&sut=0&sst0=1540733222633&lkt=0%2C0%2C0&p=40040108',
'User-Agent': random.choice(user_agent),
'Cookie': 'SUV=00D80B85458CAE4B5B299A407EA3A580;SNUID=' + snuid,
}
rr = requests.get(url='https://weixin.sogou.com/weixin?type=1&s_from=input&query=360CERT&ie=utf8&_sug_=n&_sug_type_=',headers=headers)
if len(rr.text) > 6000:
pass
else:
uid = cookie_init()
headers = {
'Referer': 'http://weixin.sogou.com/weixin?type=1&query=python&ie=utf8&s_from=input&_sug_=n&_sug_type_=1&w=01015002&oq=&ri=5&sourceid=sugg&sut=0&sst0=1540733222633&lkt=0%2C0%2C0&p=40040108',
'User-Agent': random.choice(user_agent),
'Cookie': 'SUV=00D80B85458CAE4B5B299A407EA3A580;SNUID=' + uid,
}
ff = open('./snuid.txt','w+')
ff.write(uid)
ff.close()
for i in table:
a+=1
url = 'https://weixin.sogou.com/weixin?type=1&s_from=input&query=' + i + '&ie=utf8&_sug_=n&_sug_type_='
get_info(url,table,a,tb_msg,headers)
print(tb_msg)
return tb_msg
if __name__ == '__main__':
wechat_info()
|
{"/flask_demo.py": ["/connect_db.py"]}
|
17,974
|
zzhacked/securityMonitor
|
refs/heads/master
|
/spider/get_cve_info.py
|
import re
import time
import requests
import os
from pymongo import MongoClient
from bs4 import BeautifulSoup as bs
nowtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
def get_CVE_urls():
urls = []
res = requests.get('https://cassandra.cerias.purdue.edu/CVE_changes/today.html')
#print(res.text)
targets = re.findall(r"New entries:(.*?)Graduations",res.text,re.S|re.M)
for target in targets:
soup = bs(target,'html.parser')
tags = soup.find_all('a')
#print(urls)
for i in tags:
url = i['href']
urls.append(url)
return urls
def CVE_info():
urls = get_CVE_urls()
select_msg = ''
wordlist = []
keywords = []
if(len(urls)==0):
msg = nowtime + '<p>今日CVE_today风和日丽,无大事发生!!!</p>'
return msg
else:
msg_header = '<p>今日CVE_today一共<font size="3" color="red">' + str(len(urls))+'</font>个。'
for url in urls:
res = requests.get(url, timeout=60)
soup = bs(res.text, 'html.parser')
cveId = soup.find(nowrap='nowrap').find('h2').string
table = soup.find(id='GeneratedTable').find('table')
company = table.find_all('tr')[8].find('td').string
createdate = table.find_all('tr')[10].find('td').string
content = table.find_all('tr')[3].find('td').text
data = {'time':nowtime,'from':'CVE-Today-'+cveId,'content':content,'link':url}
DBNAME = ''
DBUSERNAME = ''
DBPASSWORD = ''
DB = '' #数据库地址
PORT = 27017
db_conn = MongoClient(DB, PORT)
na_db = getattr(db_conn, DBNAME)
na_db.authenticate(DBUSERNAME, DBPASSWORD)
c = na_db.cvedatas
c.update_one({"content": data['content']}, {'$set': data}, True)
wordlist.append(data)
return wordlist
if __name__ == '__main__':
CVE_info()
|
{"/flask_demo.py": ["/connect_db.py"]}
|
17,975
|
zzhacked/securityMonitor
|
refs/heads/master
|
/flask_demo.py
|
import re
import time
from flask import Flask
from flask import request
from flask import render_template
from connect_db import enqury_data
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def home():
na_db = enqury_data()
now_time = time.strftime('%Y-%m-%d', time.localtime(time.time()))
data_len = {'cve_len':na_db.cvedatas.count(),
'wechat_len':na_db.wechatdatas.count(),
'deepweb_len':na_db.deepwebdatas.count(),
'anquanke_len':na_db.anquankedatas.count(),
'seebug_len':na_db.seebugdatas.count(),
}
today_len = {
'cve_today':na_db.cvedatas.find({'time':re.compile(now_time)}).count(),
'wechat_today':na_db.wechatdatas.find({'time':re.compile(now_time)}).count(),
'deepweb_today':na_db.deepwebdatas.find({'time':re.compile(now_time)}).count(),
'anquanke_today':na_db.anquankedatas.find({'time':re.compile(now_time)}).count(),
'seebug_today':na_db.seebugdatas.find({'time':re.compile(now_time)}).count(),
}
return render_template('home.html',data_len=data_len,today_len=today_len)
@app.route('/wechat', methods=['GET'])
def wechat():
ress = []
all_ress = []
now_time = time.strftime('%Y-%m-%d', time.localtime(time.time()))
na_db = enqury_data()
collections = na_db.wechatdatas
for res in collections.find().sort('time',-1):
ress.append(res)
for i in collections.find().sort('time',-1):
all_ress.append(i)
return render_template('wechat.html',ress=ress,all_ress=all_ress)
@app.route('/deepweb', methods=['GET'])
def deepweb():
deepdatas = []
all_deepdatas = []
now_time = time.strftime('%Y-%m-%d', time.localtime(time.time()))
na_db = enqury_data()
collections = na_db.deepwebdatas
for res in collections.find({'time':re.compile(now_time)}).sort('time',-1):
deepdatas.append(res)
for i in collections.find().sort('time',-1):
all_deepdatas.append(i)
return render_template('deepweb.html',deepdatas=deepdatas,all_deepdatas=all_deepdatas)
@app.route('/spider', methods=['GET'])
def spider():
datas = []
all_datas = []
now_time = time.strftime('%Y-%m-%d', time.localtime(time.time()))
na_db = enqury_data()
#print(now_time)
collections = na_db.cvedatas
for res in collections.find({'time':re.compile(now_time)}).sort('time',-1):
datas.append(res)
for i in collections.find().sort('time',-1):
all_datas.append(i)
return render_template('spider.html',datas=datas,all_datas=all_datas)
@app.route('/anquanke', methods=['GET'])
def anquanke():
responses = []
all_responses = []
now_time = time.strftime('%Y-%m-%d', time.localtime(time.time()))
na_db = enqury_data()
collections = na_db.anquankedatas
for res in collections.find({'time':re.compile(now_time)}).sort('time',-1):
responses.append(res)
for i in collections.find().sort('time',-1):
all_responses.append(i)
return render_template('anquanke.html',responses=responses,all_responses=all_responses)
@app.route('/seebug', methods=['GET'])
def seebug():
seebugs = []
all_seebugs = []
now_time = time.strftime('%Y-%m-%d', time.localtime(time.time()))
na_db = enqury_data()
collections = na_db.seebugdatas
for res in collections.find({'time':re.compile(now_time)}).sort('time',-1):
seebugs.append(res)
for i in collections.find().sort('time',-1):
all_seebugs.append(i)
return render_template('seebug.html',seebugs=seebugs,all_seebugs=all_seebugs)
@app.route('/search', methods=['GET','POST'])
def search():
if request.method == 'POST':
wordlists = []
content = request.form['content']
na_db = enqury_data()
collections = [na_db.cvedatas,na_db.wechatdatas,na_db.deepdatas,na_db.anquankedatas,na_db.seebugdatas]
for collection in collections:
for res in collection.find({'content':re.compile(content,re.IGNORECASE)}).sort('time',-1):
wordlists.append(res)
#print(wordlists)
return render_template('result.html',wordlists=wordlists)
else:
return render_template('search.html')
@app.route('/dashboard', methods=['GET'])
def dashboard():
return render_template('dashboard.html')
if __name__ == '__main__':
app.run(host='0.0.0.0',port=5000,debug=True)
|
{"/flask_demo.py": ["/connect_db.py"]}
|
17,976
|
zzhacked/securityMonitor
|
refs/heads/master
|
/spider/get_anquanke_info.py
|
import requests
from pymongo import MongoClient
from bs4 import BeautifulSoup as bs
import time
import os
nowtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
def anquanke_info():
url = 'https://www.anquanke.com/'
#keywords = config.keywords#关注的关键字
wordlist = []
select_msg = ''
res = requests.get(url,timeout=60)
#print(res.text)
soup = bs(res.text,'html.parser')
divs = soup.find_all('div',{'class':'title'})[0:9]
spans = soup.find_all('span',{'class':'date'})
#print(divs)
i = 0
list_date = [span.find('span').text for span in spans]
#print(list_date)
for div in divs:
content = div.find('a').string
#print(description)
site = 'https://www.anquanke.com' + div.find('a')['href']
#print(site)
data = {'time':list_date[i],'from':'安全客','content':content,'link':site}
i +=1
DBNAME = ''
DBUSERNAME = ''
DBPASSWORD = ''
DB = '' #数据库地址
PORT = 27017
db_conn = MongoClient(DB, PORT)
na_db = getattr(db_conn, DBNAME)
na_db.authenticate(DBUSERNAME, DBPASSWORD)
c = na_db.anquankedatas
c.update_one({"content": data['content']}, {'$set': data}, True)
wordlist.append(data)
#print(wordlist)
return wordlist
if __name__ == '__main__':
anquanke_info()
|
{"/flask_demo.py": ["/connect_db.py"]}
|
17,977
|
zzhacked/securityMonitor
|
refs/heads/master
|
/spider/get_seebug_info.py
|
#! /usr/bin/env python3
# encoding: utf-8
import asyncio
import requests
from pymongo import MongoClient
from bs4 import BeautifulSoup as bs
from pyppeteer import launch
async def main():
browser = await launch()
page = await browser.newPage()
await page.goto('http://www.seebug.org')
await page.waitFor("body > div.footer-up")
#print(urls)
wordlist = []
vuln_time_elements = await page.xpath('//td[@class="text-center datetime hidden-sm hidden-xs td-time"]')
vuln_post_time = [await (await item.getProperty('textContent')).jsonValue() for item in vuln_time_elements][:10]
#print(vuln_post_time)
poc_time_elements = await page.xpath('//td[@class="td-time datetime hidden-sm hidden-xs"]')
poc_post_time = [await (await item.getProperty('textContent')).jsonValue() for item in poc_time_elements][1:]
#print(poc_post_time)
vulns_elements = await page.xpath('//td[@class="vul-title-wrapper"]')
vuln_content = [await(await item.getProperty('textContent')).jsonValue() for item in vulns_elements][:10]
vuln_link_elements = await page.xpath('//td[@class="vul-title-wrapper"]/a')
vuln_link = [await(await item.getProperty('href')).jsonValue() for item in vuln_link_elements][:10]
#print(vuln_link)
for i in range(10):
vuln_data = {'time':vuln_post_time[i],'link':vuln_link[i],'from':'Seebug','content':vuln_content[i]}
DBNAME = ''
DBUSERNAME = ''
DBPASSWORD = ''
DB = '' #数据库地址
PORT = 27017
db_conn = MongoClient(DB, PORT)
na_db = getattr(db_conn, DBNAME)
na_db.authenticate(DBUSERNAME, DBPASSWORD)
c = na_db.seebugdatas
c.update_one({"content": vuln_data['content']}, {'$set': vuln_data}, True)
wordlist.append(vuln_data)
print(wordlist)
await browser.close()
asyncio.get_event_loop().run_until_complete(main())
|
{"/flask_demo.py": ["/connect_db.py"]}
|
17,987
|
honoriovega/cst205-proj2
|
refs/heads/master
|
/app.py
|
"""
Course : CST205
Title : app.py
Authors: Javar Alexander, Honorio Vega, Antonio Villagomez
Abstract : This program is the driver of the program. It sets up
the server. It also broadcasts and sends new messages.
aswell as saving the messages to a database. It pulls
pictures from Getty and Giphy API's and sends them to
the users
Date : 03/15/2017
Who worked on what: Javar and Honorio worked on this file. Javar
wrote the spotify feature. Honorio worked on the
database and received and sending images. All other
feautures in this file were a combination of
work from Javar and Honorio. For example, Javar
worked on parts of the BOT and Honorio worked
on it also
GITHUB LINK : https://github.com/honoriovega/cst205-proj2
"""
import random, os, flask, flask_socketio, flask_sqlalchemy,requests, time
from random import randint, choice
from flask_socketio import send
import gettyApi
import botcommands
import urlparse
import json
app = flask.Flask(__name__)
#app.config[ 'SQLALCHEMY_DATABASE_URI' ] = 'postgresql://potato:potatosareawesome@localhost/postgres'
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('DATABASE_URL')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = flask_sqlalchemy.SQLAlchemy(app)
class Message(db.Model):
id = db.Column(db.Integer, primary_key=True)
text = db.Column(db.String(300))
picture = db.Column(db.String(200))
name = db.Column(db.String(100))
apiLink = db.Column(db.String(500))
def __init__(self, p,n,t,al=''):
self.text = t
self.picture = p
self.name = n
self.apiLink = al
def __repr__(self):
return '<%s %s: %s>' % (self.picture, self.name, self.text)
socketio = flask_socketio.SocketIO(app)
all_messages = []
all_connected_users = { };
all_numbers = []
# fetch all message from database and store them in dictionary
# and append to a list
def fetchAllMessages():
messages = Message.query.all()
temp = []
for message in messages:
temp.append({
'name': message.name,
'picture': message.picture,
'msg': message.text,
'link' : message.apiLink
})
return temp
# broadcasst the messages
def fetchAndEmit():
all_messages[:] = fetchAllMessages()
socketio.emit('all messages', {
'messages': all_messages
})
# add message to our database
def addMessage(userPicture, name, msg):
message = Message(userPicture,name, msg)
db.session.add(message)
db.session.commit()
# add message to our database
def addBotMessage(msg):
BOT_PICTURE = '/static/bot.jpg'
BOT_NAME = 'Bender_from_futurama'
addMessage(BOT_PICTURE,BOT_NAME,msg)
# add message to our database
def addBotMessageAPI(link):
BOT_PICTURE = '/static/bot.jpg'
BOT_NAME = 'Bender_from_futurama'
addPictureMessage(BOT_PICTURE,BOT_NAME,link)
# add message to our database
def addPictureMessage(userPicture, name, apiLink):
message = Message(userPicture,name, '', apiLink)
db.session.add(message)
db.session.commit()
# this is where the app starts
@app.route('/')
def hello():
keywords = ['technology','forest','background']
a = gettyApi.initBackground(choice(keywords))
return flask.render_template('index.html',back=a)
# When the user conencts call the fetchAndEmit command
# which pulls the messages from the database and broadcasts them
@socketio.on('connect')
def on_connect():
fetchAndEmit()
# this function was used for testing purposes
@socketio.on('new number')
def on_new_number(data):
all_numbers.append(100)
socketio.emit('all numbers', {
'numbers' : all_numbers
})
# Function that Javar wrote. Fetches data from the Spotify API and display it
@socketio.on('Spotify')
def spotify(data):
tracks =[]
searchType = data['searchType']
searchQuery = data['searchQuery']
searchQuery1 = searchQuery.replace("+", "%20")
response = requests.get("https://api.spotify.com/v1/search?q="+searchQuery1+"&type="+searchType)
json = response.json()
if 'tracks' in json and 'items' in json['tracks']:
for item in json['tracks']['items']:
print item['uri']
tracks.append(item['uri'])
my_headers = {"Accept" : "application/json", "Authorization" : "Bearer BQCm9bzjiDxNb9FurI8AWVgraOhvdZyzpBBNq753DwEXocrLa8kyPNOalfXuevtiZ10Kt8FIuvM1RMnv6mWiVsz9bXU8VQzEv3xdHAE5Qs4-eFI4dh3spBArHnzQLl6gGqvddte-H7JZQVzJEsxobx1TSStfVqonFzxWdH418b5RtzgZMHFgnKtV-6qW9g_axQ1bKwQ4Fm8e1NI"}
url = "https://api.spotify.com/v1/tracks/1zHlj4dQ8ZAtrayhuDDmkY"
track_response = requests.get(url, headers= my_headers)
spotify_links = track_response.json()
random_track = random.choice(tracks)
random_track_link = "https://embed.spotify.com/?uri="+random_track
socketio.emit('fromSpotify', random_track_link)
# this function was ment as featuer to greet the user on log in
# the feature was not implemented as their was issue. We didn't
# want to remove it beacause it might break our code. For now it is just here
@socketio.on('greet user')
def greet_user(data):
picture = ''
USERNAME = ''
greet = ''
if(data['google_user_token'] == '' and data['facebook_user_token'] == ''):
send('greeting user')
else:
if(data['google_user_token'] == ''):
response = requests.get('https://graph.facebook.com/v2.8/me?fields=id%2Cname%2Cpicture&access_token=' + data['facebook_user_token'])
json = response.json()
USERNAME = json['name']
picture = json['picture']['data']['url']
greet = 'Hello ' + USERNAME + ' logged in from Facebook'
else:
response = requests.get('https://www.googleapis.com/oauth2/v3/tokeninfo?id_token=' + data['google_user_token'])
json = response.json()
picture = json['picture']
USERNAME = json['name']
greet = 'Hello ' + USERNAME + ' logged in from Google'
addBotMessage(greet)
fetchAndEmit()
# When a new message is received this function
# stores it in the database, checks to see if it a bot command
# or a link.
@socketio.on('new msg')
def on_new_msg(data):
facebookAPI = 'https://graph.facebook.com/v2.8/me?fields=id%2Cname%2Cpicture&access_token='
googleAPI = 'https://www.googleapis.com/oauth2/v3/tokeninfo?id_token='
msg = data['msg']
USERNAME = ''
picture = ''
msg = msg.strip()
if(data['google_user_token'] == '' and data['facebook_user_token'] == '' ):
send('received message')
else:
if(data['google_user_token'] == ''):
response = requests.get( facebookAPI + data['facebook_user_token'])
json = response.json()
USERNAME = json['name']
picture = json['picture']['data']['url']
else:
response = requests.get(googleAPI + data['google_user_token'])
json = response.json()
picture = json['picture']
USERNAME = json['name']
url = msg
parts = urlparse.urlsplit(url)
# it is not a url so add it and emit
if not parts.scheme or not parts.netloc:
if('!! say' in msg):
x = 10
else:
addMessage(picture,USERNAME, msg)
else:
print "yes an url"
addPictureMessage(picture,USERNAME,url)
#fetchAndEmit()
# handle bot command
if(msg[:2] == '!!'):
response = botcommands.processBotCommand(msg)
if(len(response) > 4):
if(response[:4] == 'http'):
addBotMessageAPI( response )
else:
addBotMessage(response)
else:
addBotMessage(response)
fetchAndEmit()
# this gets the server up an running
if __name__ == '__main__':
socketio.run(
app,
host=os.getenv('IP', '0.0.0.0'),
port=int(os.getenv('PORT', 8080)),
debug=True
)
|
{"/models.py": ["/app.py"], "/botcommands.py": ["/gettyApi.py"]}
|
17,988
|
honoriovega/cst205-proj2
|
refs/heads/master
|
/models.py
|
"""
Course : CST205
Title : models.py
Author: Honorio Vega
Abstract : This file is our database model. It defines our fields and
table. This file will be used to create the database
in postgresql.
Date : 03/15/2017
Who worked on what: Honorio wrote this file. Consulted with Antonio
and Javar for their input on what fields should
be included
GITHUB LINK : https://github.com/honoriovega/cst205-proj2
"""
from app import *
app.config[ 'SQLALCHEMY_DATABASE_URI' ] = 'postgresql://potato:potatosareawesome@localhost/postgres'
#app.app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('DATABASE_URL')
db = flask_sqlalchemy.SQLAlchemy(app)
class Message(db.Model):
# The structure of our database
id = db.Column(db.Integer, primary_key=True)
text = db.Column(db.String(300))
picture = db.Column(db.String(200))
name = db.Column(db.String(100))
apiLink = db.Column(db.String(500))
def __init__(self, p,n,t,al=''):
self.text = t
self.picture = p
self.name = n
self.apiLink = al
def __repr__(self):
return '<%s %s: %s>' % (self.picture, self.name, self.text)
|
{"/models.py": ["/app.py"], "/botcommands.py": ["/gettyApi.py"]}
|
17,989
|
honoriovega/cst205-proj2
|
refs/heads/master
|
/gettyApi.py
|
"""
Course : CST205
Title : botcommands.py
Authors: Javar Alexander, Honorio Vega
Abstract : This file contains the commands for the getty api.
It contains two functions. One to fetch the background for
the chat app and the other to retrieve images based on a users
request. For example if a user types "!! getty dogs" the
the getImages command will take that string and parse it
It will extract the search term and make an api call to
Getty. It will response a result and pick a random image
and return it.
Date : 03/15/2017
Who worked on what: Honorio generated the API keys and wrote the
getImages function. Javar wrote the initbackground
function
GITHUB LINK : https://github.com/honoriovega/cst205-proj2
"""
import requests
from random import randint,choice
def getImages(search_term):
# fiels=detail_set
url = "https://api.gettyimages.com/v3/search/images?fields=detail_set&sort_order=best&phrase=" \
+ search_term + "&page_size=100"
my_headers = { "Api-Key" : 'qwj5pp6xrv4td7djmab3jeec' }
response = requests.get(url, headers = my_headers)
json_body = response.json()
length = len(json_body['images'])
return json_body['images'][randint(0,length - 1)]['display_sizes'][0]['uri']
def initBackground(search_term):
url = "https://api.gettyimages.com/v3/search/images?fields=detail_set&sort_order=best&phrase=" \
+ search_term + "&page_size=100"
my_headers = { "Api-Key" : 'qwj5pp6xrv4td7djmab3jeec' }
response = requests.get(url, headers = my_headers)
json_body = response.json()
# holds ids asociated with the images using list comprehension
idHolder = [ photoID['id'] for photoID in json_body['images'] ]
#setting the randomly selected images id to a variable
getty_images_id = choice(idHolder)
#appending the variable to the HQ link to get images
getty_image_source = "http://media.gettyimages.com/photos/-id" + getty_images_id
return getty_image_source
|
{"/models.py": ["/app.py"], "/botcommands.py": ["/gettyApi.py"]}
|
17,990
|
honoriovega/cst205-proj2
|
refs/heads/master
|
/botcommands.py
|
"""
Course : CST205
Title : botcommands.py
Authors: Javar Alexander, Honorio Vega
Abstract : This contains the possible commands that the bot can do.
It can be made to repeat what a user said. It can also be
made to fetch pictures and gif's from Getty and Giffy
respectively.
Date : 03/15/2017
Who worked on what: Honorio worked on parsing and processing the text.
Javar wrote on the functions called that called
external API's
GITHUB LINK : https://github.com/honoriovega/cst205-proj2
"""
from random import randint,choice
import gettyApi
import json
import urllib
def processBotCommand(userSubmitted):
recognizedCommands = ['say','about','help','backwards','doMath','getty','giffy']
if('!! about' in userSubmitted):
msg = 'website created by Honorio Vega, Javar Alexander, Antonio Villagomez'
return msg
elif('!! say' in userSubmitted):
msg = userSubmitted.split('!! say')[1]
return msg.strip()
elif('!! backwards' in userSubmitted):
msg = userSubmitted.split('!! backwards')[1]
backwards = "".join(list(reversed(msg)))
return backwards.strip()
elif('!! doMath' in userSubmitted):
a = randint(1,100)
b = randint(1,100)
currentTime = "%d + %d = %d" % (a,b,a+b)
messagesend = str(currentTime)
return messagesend.strip()
elif('!! help' in userSubmitted):
msg = 'I recognize these commands: ' + ", ".join(recognizedCommands)
return msg
elif('!! getty' in userSubmitted):
searchTerm = userSubmitted.split('!! getty')[1]
img = gettyApi.getImages(searchTerm)
return img
elif('!! giffy' in userSubmitted):
searchTerm = userSubmitted.split('!! giffy')[1]
query = searchTerm.replace(' ','+')
link = "http://api.giphy.com/v1/gifs/search?q=" + query + "&api_key=dc6zaTOxFJmzC&limit=5"
data = json.loads(urllib.urlopen(link).read())
# no results :-*(
if(len(data['data']) == 0 ):
return "Sorry I didn't find any gif's with that search term"
else:
apilink = data['data'][randint(0,len(data['data']) -1)]['images']['downsized_medium']['url']
return apilink
else:
msg = 'command not recognized'
return msg
def sayBye(name):
randomPhrases = ["Leave and don't come back ", "Get out of here ", "Good ridance ",
"Be gone "]
return choice(randomPhrases) + name
def greetNewUser(name):
phrases = ['Wassup, ', 'YO ',
"How's it going ", "Hey there ", "Howdy "]
return choice(phrases) + name
def randomPhrase():
phrases = ['there you happy ? ',
'i am at your command for now ...',
'computers will take over',
'i will be issuing commands to you soon...']
return choice(phrases)
|
{"/models.py": ["/app.py"], "/botcommands.py": ["/gettyApi.py"]}
|
17,991
|
fanghuiz/ufc-stats-crawler
|
refs/heads/master
|
/ufcStats/utils.py
|
import datetime
import re
from dateparser import parse
def get_element_atk(stat, element):
if stat is None:
return None
f1_att = stat[0].split('of')[1].strip()
f1_land = stat[0].split('of')[0].strip()
f2_att = stat[1].split('of')[1].strip()
f2_land = stat[1].split('of')[0].strip()
if element == 'attempt':
element = list([int(f1_att), int(f2_att)])
if element == 'landed':
element = list([int(f1_land), int(f2_land)])
return element
def get_element_dmg(stat, element):
if stat is None:
return None
f1_att = stat[0].split('of')[1].strip()
f1_land = stat[0].split('of')[0].strip()
f2_att = stat[1].split('of')[1].strip()
f2_land = stat[1].split('of')[0].strip()
if element == 'absorbed':
# Absorbed - # landed by opponent
f1_abs = int(f2_land)
f2_abs = int(f1_land)
element = list([f1_abs, f2_abs])
if element == 'defended':
# Defended - # attempts - # landed by opponent
f1_def = int(f2_att) - int(f2_land)
f2_def = int(f1_att) - int(f1_land)
element = list([f1_def, f2_def])
return element
def IS_Active(last_fight_date):
"""
Returns True if last fight date is less than 365 days from
the date data lst fetched
"""
if last_fight_date is None:
return True
last_fight_date_delta = parse('today') - parse(last_fight_date)
return last_fight_date_delta < datetime.timedelta(days=365)
def print_time(time):
time = parse(time).replace(microsecond=0).isoformat().replace(':', '-')
return time
|
{"/ufcStats/spiders/spider.py": ["/ufcStats/items.py", "/ufcStats/utils.py"], "/ufcStats/pipelines.py": ["/ufcStats/utils.py"]}
|
17,992
|
fanghuiz/ufc-stats-crawler
|
refs/heads/master
|
/ufcStats/spiders/spider.py
|
import scrapy
from scrapy.loader import ItemLoader
from ufcStats.items import *
from ufcStats.utils import *
class FightsSpider(scrapy.Spider):
name = 'ufcFights'
start_urls = ['http://ufcstats.com/statistics/events/completed?page=all']
custom_settings = {
'ITEM_PIPELINES': {
'ufcStats.pipelines.FightSummaryPipeline': 400,
'ufcStats.pipelines.FightStatsPipeline': 410
}
}
def parse(self, response):
"""
Parse the event listing page, follow link to individual events page
"""
events_url = response.css(
'tbody .b-statistics__table-row ::attr(href)')
for event in events_url:
yield response.follow(event, callback=self.parse_event_link)
def parse_event_link(self, response):
"""
Parse the event page, follow link to each individual fight page
"""
event_info = response.css('.b-list__box-list-item')
date = event_info[0].css('::text').getall()[-1]
location = event_info[1].css('::text').getall()[-1]
fights_url = response.css(
'.b-fight-details__table-row ::attr(data-link)')
for fight in fights_url:
yield response.follow(fight,
callback=self.parse_fight_info,
cb_kwargs=dict(date=date, location=location))
def parse_fight_info(self, response, date, location):
"""
Parse fight info - fight level summary, and fighter stats
"""
##### Fight summary ######
fight_id = response.url.split('/')[-1]
# date and location carry over from events page
date = date.strip()
location = location.strip()
status = response.css(
'.b-fight-details__person-status ::text').getall()
# Fighter names
names = response.css(
'.b-fight-details__person-name :not(p)::text').getall()
try:
fighter_1 = names[0].strip()
fighter_2 = names[1].strip()
except:
fighter_1 = None
fighter_2 = None
# IDs - Handle errors due to missing fighter link
ids = response.css('.b-fight-details__person-name')
fighter_1_id = ids[0].css('::attr(href)').get()
fighter_2_id = ids[1].css('::attr(href)').get()
if fighter_1_id is not None:
fighter_1_id = fighter_1_id.split('/')[-1]
if fighter_2_id is not None:
fighter_2_id = fighter_2_id.split('/')[-1]
# Winner name
if status[0].strip() == 'W':
winner = fighter_1
elif status[1].strip() == 'W':
winner = fighter_2
elif status[0].strip() == 'D':
winner = 'Draw'
else:
winner = 'NC'
weight_class = response.css(
'.b-fight-details__fight-title ::text').getall()
if len(weight_class) > 1:
weight_class = weight_class[-1].strip()
if len(weight_class) == 1:
weight_class = weight_class[0].strip()
decision_method = response.css(
"i.b-fight-details__text-item_first [style='font-style: normal'] ::text"
).get()
fight_details = response.css('.b-fight-details__text-item')
time_format = fight_details[2].css('::text').getall()[-1]
fight_duration_lastrnd = fight_details[0].css('::text').getall()[-1]
fight_duration_lastrnd_time = fight_details[1].css(
'::text').getall()[-1]
l = ItemLoader(item=FightsItem(), response=response)
l.add_value('fight_id', fight_id)
l.add_value('date', date)
l.add_value('location', location)
l.add_value('fighter_1', fighter_1)
l.add_value('fighter_1_id', fighter_1_id)
l.add_value('fighter_2', fighter_2)
l.add_value('fighter_2_id', fighter_2_id)
l.add_value('winner', winner)
l.add_value('weight_class', weight_class)
l.add_value('decision_method', decision_method.strip())
l.add_value('time_format', time_format.strip())
l.add_value('fight_duration_lastrnd', fight_duration_lastrnd.strip())
l.add_value('fight_duration_lastrnd_time',
fight_duration_lastrnd_time.strip())
##### Fighter Stats ######
fighter_status = [i.strip() for i in status]
fighter_id = list([fighter_1_id, fighter_2_id])
fighter_name = list([fighter_1, fighter_2])
stats = response.css('table:not(.js-fight-table)')
# Fight stats - handle missing values
if len(stats) == 2:
stats_total = stats[0].css(
'.b-fight-details__table-body .b-fight-details__table-col')
stats_str = stats[1].css(
'.b-fight-details__table-body .b-fight-details__table-col')
## Totals
kd = stats_total[1].css('p ::text').getall()
kd = [int(i.strip()) for i in kd]
sig_str = stats_total[2].css('p ::text').getall()
total_str = stats_total[4].css('p ::text').getall()
td = stats_total[5].css('p ::text').getall()
n_sub = stats_total[7].css('p ::text').getall()
n_sub = [int(i.strip()) for i in n_sub]
n_pass = stats_total[8].css('p ::text').getall()
n_pass = [int(i.strip()) for i in n_pass]
n_rev = stats_total[9].css('p ::text').getall()
n_rev = [int(i.strip()) for i in n_rev]
## Significant strikes
head = stats_str[3].css('p ::text').getall()
body = stats_str[4].css('p ::text').getall()
leg = stats_str[5].css('p ::text').getall()
distance = stats_str[6].css('p ::text').getall()
clinch = stats_str[7].css('p ::text').getall()
ground = stats_str[8].css('p ::text').getall()
else:
kd = None
sig_str = None
total_str = None
td = None
n_sub = None
n_pass = None
n_rev = None
head = None
body = None
leg = None
distance = None
clinch = None
ground = None
#l.add_value('fight_id', fight_id)
l.add_value('fighter_id', fighter_id)
l.add_value('fighter_name', fighter_name)
l.add_value('fighter_status', fighter_status)
l.add_value('kd', kd)
l.add_value('sig_str_land', get_element_atk(sig_str, 'landed'))
l.add_value('sig_str_att', get_element_atk(sig_str, 'attempt'))
l.add_value('total_str_land', get_element_atk(total_str, 'landed'))
l.add_value('total_str_att', get_element_atk(total_str, 'attempt'))
l.add_value('td_land', get_element_atk(td, 'landed'))
l.add_value('td_att', get_element_atk(td, 'attempt'))
l.add_value('n_sub', n_sub)
l.add_value('n_pass', n_pass)
l.add_value('n_rev', n_rev)
l.add_value('head_land', get_element_atk(head, 'landed'))
l.add_value('head_att', get_element_atk(head, 'attempt'))
l.add_value('body_land', get_element_atk(body, 'landed'))
l.add_value('body_att', get_element_atk(body, 'attempt'))
l.add_value('leg_land', get_element_atk(leg, 'landed'))
l.add_value('leg_att', get_element_atk(leg, 'attempt'))
l.add_value('distance_land', get_element_atk(distance, 'landed'))
l.add_value('distance_att', get_element_atk(distance, 'attempt'))
l.add_value('clinch_land', get_element_atk(clinch, 'landed'))
l.add_value('clinch_att', get_element_atk(clinch, 'attempt'))
l.add_value('ground_land', get_element_atk(ground, 'landed'))
l.add_value('ground_att', get_element_atk(ground, 'attempt'))
l.add_value('sig_str_abs', get_element_dmg(sig_str, 'absorbed'))
l.add_value('sig_str_def', get_element_dmg(sig_str, 'defended'))
l.add_value('total_str_abs', get_element_dmg(total_str, 'absorbed'))
l.add_value('total_str_def', get_element_dmg(total_str, 'defended'))
l.add_value('td_abs', get_element_dmg(td, 'absorbed'))
l.add_value('td_def', get_element_dmg(td, 'defended'))
l.add_value('head_abs', get_element_dmg(head, 'absorbed'))
l.add_value('head_def', get_element_dmg(head, 'defended'))
l.add_value('body_abs', get_element_dmg(body, 'absorbed'))
l.add_value('body_def', get_element_dmg(body, 'defended'))
l.add_value('leg_abs', get_element_dmg(leg, 'absorbed'))
l.add_value('leg_def', get_element_dmg(leg, 'defended'))
l.add_value('distance_abs', get_element_dmg(distance, 'absorbed'))
l.add_value('distance_def', get_element_dmg(distance, 'defended'))
l.add_value('clinch_abs', get_element_dmg(clinch, 'absorbed'))
l.add_value('clinch_def', get_element_dmg(clinch, 'defended'))
l.add_value('ground_abs', get_element_dmg(ground, 'absorbed'))
l.add_value('ground_def', get_element_dmg(ground, 'defended'))
yield l.load_item()
class FightersSpider(scrapy.Spider):
name = 'ufcFighters'
start_urls = ['http://ufcstats.com/statistics/fighters']
custom_settings = {
'FEED_FORMAT': 'csv',
'FEED_URI': 'data/fighter_stats/%(time)s.csv'
}
def parse(self, response):
"""
Parse the fighter listing page, follow link to each alphabetical page
"""
by_alphabets = response.css(
'.b-statistics__nav-link ::attr(href)').getall()
pages_by_alphabets = []
for alphabet in by_alphabets:
link = alphabet + '&page=all'
pages_by_alphabets.append(link)
for page in pages_by_alphabets:
yield response.follow(page, callback=self.parse_fighter_link)
def parse_fighter_link(self, response):
"""
Parse each alphabetical listing, find links to each fighter
"""
rows = response.css('tbody .b-statistics__table-row')
rows.pop(0)
for row in rows:
fighter_link = row.css('.b-statistics__table-col ::attr(href)').get()
yield response.follow(fighter_link, callback=self.parse_fighter_stat)
def parse_fighter_stat(self, response):
"""
Parse fighter summary stats
"""
fighter_id = response.url.split('/')[-1]
name = response.css('.b-content__title-highlight ::text').get()
record = response.css('.b-content__title-record ::text').get()
record = re.findall(r'[0-9]+', record)
stat_box = response.css('.b-list__box-list')
stat_box_1 = stat_box[0].css('.b-list__box-list-item')
stat_box_2 = stat_box[1].css('.b-list__box-list-item')
stat_box_3 = stat_box[2].css('.b-list__box-list-item')
height = stat_box_1[0].css('li::text').getall()
weight = stat_box_1[1].css('li::text').getall()
reach = stat_box_1[2].css('li::text').getall()
stance = stat_box_1[3].css('li::text').getall()
dob = stat_box_1[4].css('li::text').getall()
sig_str_land_pM = stat_box_2[0].css('li::text').getall()
sig_str_land_pct = stat_box_2[1].css('li::text').getall()
sig_str_abs_pM = stat_box_2[2].css('li::text').getall()
sig_str_def_pct = stat_box_2[3].css('li::text').getall()
td_avg = stat_box_3[1].css('li::text').getall()
td_land_pct = stat_box_3[2].css('li::text').getall()
td_def_pct = stat_box_3[3].css('li::text').getall()
sub_avg = stat_box_3[4].css('li::text').getall()
l = ItemLoader(item=FighterSummaryItem(), response=response)
l.add_value('fighter_id', fighter_id)
l.add_value('name', name.strip())
l.add_value('height', height[1].strip())
l.add_value('weight', weight[1].strip())
l.add_value('reach', reach[1].strip())
l.add_value('stance', stance[1].strip())
l.add_value('dob', dob[1].strip())
l.add_value('n_win', record[0])
l.add_value('n_loss', record[1])
l.add_value('n_draw', record[2])
l.add_value('sig_str_land_pM', sig_str_land_pM[1].strip())
l.add_value('sig_str_land_pct', sig_str_land_pct[1].strip())
l.add_value('sig_str_abs_pM', sig_str_abs_pM[1].strip())
l.add_value('sig_str_def_pct', sig_str_def_pct[1].strip())
l.add_value('td_avg', td_avg[1].strip())
l.add_value('td_land_pct', td_land_pct[1].strip())
l.add_value('td_def_pct', td_def_pct[1].strip())
l.add_value('sub_avg', sub_avg[1].strip())
yield l.load_item()
class UpcomingFightsSpider(scrapy.Spider):
name = 'upcoming'
start_urls = ['http://ufcstats.com/statistics/events/completed']
time_created = print_time('now')
custom_settings = {
'FEED_FORMAT': 'csv',
'FEED_URI': f'data/upcoming/{time_created}.csv'
}
def parse(self, response):
"""
Parse the event listing page, follow link to individual events page
"""
event_url = response.css(
'tbody .b-statistics__table-row_type_first ::attr(href)').get()
yield response.follow(event_url, callback=self.parse_upcoming_event)
def parse_upcoming_event(self, response):
"""
Parse the event page, follow link to each individual fight page
"""
event_info = response.css('.b-list__box-list-item')
date = event_info[0].css('::text').getall()[-1]
location = event_info[1].css('::text').getall()[-1]
fights_url = response.css(
'.b-fight-details__table-row ::attr(data-link)')
for fight in fights_url:
yield response.follow(fight,
callback=self.parse_upcoming_fight,
cb_kwargs=dict(date=date, location=location))
def parse_upcoming_fight(self, response, date, location):
"""
Parse fight info - fight level summary, and fighter stats
"""
##### Fight summary ######
fight_id = response.url.split('/')[-1]
# date and location carry over from events page
date = date.strip()
location = location.strip()
# Fighter names
names = response.css(
'.b-fight-details__person-name :not(p)::text').getall()
try:
fighter_1 = names[0].strip()
fighter_2 = names[1].strip()
except:
fighter_1 = None
fighter_2 = None
# IDs - Handle errors due to missing fighter link
ids = response.css('.b-fight-details__person-name')
fighter_1_id = ids[0].css('::attr(href)').get()
fighter_2_id = ids[1].css('::attr(href)').get()
if fighter_1_id is not None:
fighter_1_id = fighter_1_id.split('/')[-1]
if fighter_2_id is not None:
fighter_2_id = fighter_2_id.split('/')[-1]
weight_class = response.css(
'.b-fight-details__fight-title ::text').getall()
if len(weight_class) > 1:
weight_class = weight_class[-1].strip()
if len(weight_class) == 1:
weight_class = weight_class[0].strip()
l = ItemLoader(item=UpcomingFightsItem(), response=response)
l.add_value('fight_id', fight_id)
l.add_value('date', date)
l.add_value('location', location)
l.add_value('fighter_1', fighter_1)
l.add_value('fighter_1_id', fighter_1_id)
l.add_value('fighter_2', fighter_2)
l.add_value('fighter_2_id', fighter_2_id)
l.add_value('weight_class', weight_class)
yield l.load_item()
|
{"/ufcStats/spiders/spider.py": ["/ufcStats/items.py", "/ufcStats/utils.py"], "/ufcStats/pipelines.py": ["/ufcStats/utils.py"]}
|
17,993
|
fanghuiz/ufc-stats-crawler
|
refs/heads/master
|
/ufcStats/pipelines.py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.exporters import JsonLinesItemExporter, CsvItemExporter
import pathlib
from ufcStats.utils import print_time
fields_fight_info = [
'fight_id', 'fighter_1', 'fighter_1_id', 'fighter_2', 'fighter_2_id',
'winner', 'decision_method', 'fight_duration_lastrnd',
'fight_duration_lastrnd_time', 'time_format', 'weight_class', 'date',
'location'
]
fields_fight_stats = [
'fight_id',
'fighter_id',
'fighter_name',
'fighter_status',
'kd',
'n_pass',
'n_rev',
'n_sub',
'sig_str_abs',
'sig_str_att',
'sig_str_def',
'sig_str_land',
'total_str_abs',
'total_str_att',
'total_str_def',
'total_str_land',
'td_abs',
'td_att',
'td_def',
'td_land',
'head_abs',
'head_att',
'head_def',
'head_land',
'body_abs',
'body_att',
'body_def',
'body_land',
'leg_abs',
'leg_att',
'leg_def',
'leg_land',
'distance_abs',
'distance_att',
'distance_def',
'distance_land',
'clinch_abs',
'clinch_att',
'clinch_def',
'clinch_land',
'ground_abs',
'ground_att',
'ground_def',
'ground_land',
]
class FightSummaryPipeline(object):
"""
Save Fight level summary to csv file
"""
def __init__(self):
self.files = {}
def open_spider(self, spider):
time_created = print_time('now')
# Create directory
path_fight_info = f'data/fight_info'
pathlib.Path(path_fight_info).mkdir(parents=True, exist_ok=True)
# Write to folder
file = open(f'{path_fight_info}/{time_created}.csv', 'wb')
self.files[spider] = file
self.exporter = CsvItemExporter(file)
self.exporter.fields_to_export = fields_fight_info
self.exporter.start_exporting()
def close_spider(self, spider):
self.exporter.finish_exporting()
file = self.files.pop(spider)
file.close()
def process_item(self, item, spider):
self.exporter.export_item(item)
return item
class FightStatsPipeline(object):
"""
Save Fight stats to jl file
"""
def __init__(self):
self.files = {}
def open_spider(self, spider):
time_created = print_time('now')
# Create directory
path_fight_stats = f'data/fight_stats'
pathlib.Path(path_fight_stats).mkdir(parents=True, exist_ok=True)
# Write to folder
file = open(f'{path_fight_stats}/{time_created}.jl', 'wb')
self.files[spider] = file
self.exporter = JsonLinesItemExporter(file)
self.exporter.fields_to_export = fields_fight_stats
self.exporter.start_exporting()
def close_spider(self, spider):
self.exporter.finish_exporting()
file = self.files.pop(spider)
file.close()
def process_item(self, item, spider):
self.exporter.export_item(item)
return item
|
{"/ufcStats/spiders/spider.py": ["/ufcStats/items.py", "/ufcStats/utils.py"], "/ufcStats/pipelines.py": ["/ufcStats/utils.py"]}
|
17,994
|
fanghuiz/ufc-stats-crawler
|
refs/heads/master
|
/ufcStats/items.py
|
# -*coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
import scrapy
from scrapy.loader.processors import Identity, TakeFirst, Compose, MapCompose, Join
STR_toInt = Compose(TakeFirst(), int)
STR_toFloat = Compose(TakeFirst(), float)
def stripPercent(str_input):
number = str_input.strip('%')
return float(number) / 100
class FightsItem(scrapy.Item):
fight_id = scrapy.Field(output_processor=TakeFirst())
## Fight summary, yields to csv
date = scrapy.Field(output_processor=TakeFirst())
location = scrapy.Field(output_processor=TakeFirst())
fighter_1 = scrapy.Field(output_processor=TakeFirst())
fighter_1_id = scrapy.Field(output_processor=TakeFirst())
fighter_2 = scrapy.Field(output_processor=TakeFirst())
fighter_2_id = scrapy.Field(output_processor=TakeFirst())
winner = scrapy.Field(output_processor=TakeFirst())
weight_class = scrapy.Field(output_processor=TakeFirst())
decision_method = scrapy.Field(output_processor=TakeFirst())
time_format = scrapy.Field(output_processor=TakeFirst())
fight_duration_lastrnd = scrapy.Field(output_processor=STR_toInt)
fight_duration_lastrnd_time = scrapy.Field(output_processor=TakeFirst())
## Fighter stats, yields to json
fighter_id = scrapy.Field()
fighter_name = scrapy.Field()
fighter_status = scrapy.Field()
kd = scrapy.Field()
sig_str_land = scrapy.Field()
sig_str_att = scrapy.Field()
total_str_land = scrapy.Field()
total_str_att = scrapy.Field()
td_land = scrapy.Field()
td_att = scrapy.Field()
n_sub = scrapy.Field()
n_pass = scrapy.Field()
n_rev = scrapy.Field()
head_land = scrapy.Field()
head_att = scrapy.Field()
body_land = scrapy.Field()
body_att = scrapy.Field()
leg_land = scrapy.Field()
leg_att = scrapy.Field()
distance_land = scrapy.Field()
distance_att = scrapy.Field()
clinch_land = scrapy.Field()
clinch_att = scrapy.Field()
ground_land = scrapy.Field()
ground_att = scrapy.Field()
sig_str_abs = scrapy.Field()
sig_str_def = scrapy.Field()
total_str_abs = scrapy.Field()
total_str_def = scrapy.Field()
td_abs = scrapy.Field()
td_def = scrapy.Field()
head_abs = scrapy.Field()
head_def = scrapy.Field()
body_abs = scrapy.Field()
body_def = scrapy.Field()
leg_abs = scrapy.Field()
leg_def = scrapy.Field()
distance_abs = scrapy.Field()
distance_def = scrapy.Field()
clinch_abs = scrapy.Field()
clinch_def = scrapy.Field()
ground_abs = scrapy.Field()
ground_def = scrapy.Field()
class UpcomingFightsItem(scrapy.Item):
fight_id = scrapy.Field(output_processor=TakeFirst())
date = scrapy.Field(output_processor=TakeFirst())
location = scrapy.Field(output_processor=TakeFirst())
fighter_1 = scrapy.Field(output_processor=TakeFirst())
fighter_1_id = scrapy.Field(output_processor=TakeFirst())
fighter_2 = scrapy.Field(output_processor=TakeFirst())
fighter_2_id = scrapy.Field(output_processor=TakeFirst())
weight_class = scrapy.Field(output_processor=TakeFirst())
class FighterSummaryItem(scrapy.Item):
# define the fields for your item here like:
fighter_id = scrapy.Field(output_processor=TakeFirst())
name = scrapy.Field(output_processor=TakeFirst())
height = scrapy.Field(output_processor=TakeFirst())
weight = scrapy.Field(output_processor=TakeFirst())
reach = scrapy.Field(output_processor=TakeFirst())
stance = scrapy.Field(output_processor=TakeFirst())
dob = scrapy.Field(output_processor=TakeFirst())
#active = scrapy.Field(output_processor=TakeFirst())
n_win = scrapy.Field(output_processor=STR_toInt)
n_loss = scrapy.Field(output_processor=STR_toInt)
n_draw = scrapy.Field(output_processor=STR_toInt)
sig_str_land_pM = scrapy.Field(output_processor=STR_toFloat)
sig_str_land_pct = scrapy.Field(
output_processor=Compose(TakeFirst(), stripPercent))
sig_str_abs_pM = scrapy.Field(output_processor=STR_toFloat)
sig_str_def_pct = scrapy.Field(
output_processor=Compose(TakeFirst(), stripPercent))
td_avg = scrapy.Field(output_processor=STR_toFloat)
td_land_pct = scrapy.Field(
output_processor=Compose(TakeFirst(), stripPercent))
td_def_pct = scrapy.Field(
output_processor=Compose(TakeFirst(), stripPercent))
sub_avg = scrapy.Field(output_processor=STR_toFloat)
|
{"/ufcStats/spiders/spider.py": ["/ufcStats/items.py", "/ufcStats/utils.py"], "/ufcStats/pipelines.py": ["/ufcStats/utils.py"]}
|
18,022
|
useakat/cfqcd
|
refs/heads/master
|
/particles.py
|
# This file was automatically created by FeynRules $Revision: 302 $
# Mathematica version: 7.0 for Mac OS X x86 (64-bit) (November 11, 2008)
# Date: Tue 31 Aug 2010 16:54:46
from __future__ import division
from object_library import all_particles, Particle
ve = Particle(pdg_code = 12,
name = 've',
antiname = 've~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 've',
antitexname = 've',
line = 'straight',
charge = 0,
LeptonNumber = 1,
GhostNumber = 0)
ve__tilde__ = ve.anti()
vm = Particle(pdg_code = 14,
name = 'vm',
antiname = 'vm~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'vm',
antitexname = 'vm',
line = 'straight',
charge = 0,
LeptonNumber = 1,
GhostNumber = 0)
vm__tilde__ = vm.anti()
vt = Particle(pdg_code = 16,
name = 'vt',
antiname = 'vt~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'vt',
antitexname = 'vt',
line = 'straight',
charge = 0,
LeptonNumber = 1,
GhostNumber = 0)
vt__tilde__ = vt.anti()
e__minus__ = Particle(pdg_code = 11,
name = 'e-',
antiname = 'e+',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'e-',
antitexname = 'e-',
line = 'straight',
charge = -1,
LeptonNumber = 1,
GhostNumber = 0)
e__plus__ = e__minus__.anti()
m__minus__ = Particle(pdg_code = 13,
name = 'm-',
antiname = 'm+',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'm-',
antitexname = 'm-',
line = 'straight',
charge = -1,
LeptonNumber = 1,
GhostNumber = 0)
m__plus__ = m__minus__.anti()
tt__minus__ = Particle(pdg_code = 15,
name = 'tt-',
antiname = 'tt+',
spin = 2,
color = 1,
mass = 'MTA',
width = 'ZERO',
texname = 'tt-',
antitexname = 'tt-',
line = 'straight',
charge = -1,
LeptonNumber = 1,
GhostNumber = 0)
tt__plus__ = tt__minus__.anti()
u = Particle(pdg_code = 2,
name = 'u',
antiname = 'u~',
spin = 2,
color = 3,
mass = 'ZERO',
width = 'ZERO',
texname = 'u',
antitexname = 'u',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
u__tilde__ = u.anti()
c = Particle(pdg_code = 4,
name = 'c',
antiname = 'c~',
spin = 2,
color = 3,
mass = 'ZERO',
width = 'ZERO',
texname = 'c',
antitexname = 'c',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
c__tilde__ = c.anti()
t = Particle(pdg_code = 6,
name = 't',
antiname = 't~',
spin = 2,
color = 3,
mass = 'MT',
width = 'WT',
texname = 't',
antitexname = 't',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
t__tilde__ = t.anti()
d = Particle(pdg_code = 1,
name = 'd',
antiname = 'd~',
spin = 2,
color = 3,
mass = 'ZERO',
width = 'ZERO',
texname = 'd',
antitexname = 'd',
line = 'straight',
charge = -1/3,
LeptonNumber = 0,
GhostNumber = 0)
d__tilde__ = d.anti()
s = Particle(pdg_code = 3,
name = 's',
antiname = 's~',
spin = 2,
color = 3,
mass = 'ZERO',
width = 'ZERO',
texname = 's',
antitexname = 's',
line = 'straight',
charge = -1/3,
LeptonNumber = 0,
GhostNumber = 0)
s__tilde__ = s.anti()
b = Particle(pdg_code = 5,
name = 'b',
antiname = 'b~',
spin = 2,
color = 3,
mass = 'MB',
width = 'ZERO',
texname = 'b',
antitexname = 'b',
line = 'straight',
charge = -1/3,
LeptonNumber = 0,
GhostNumber = 0)
b__tilde__ = b.anti()
ghA = Particle(pdg_code = 9000001,
name = 'ghA',
antiname = 'ghA~',
spin = -1,
color = 1,
mass = 'ZERO',
width = 'WghA',
texname = 'ghA',
antitexname = 'ghA',
line = 'dotted',
charge = 0,
LeptonNumber = 0,
GhostNumber = 1)
ghA__tilde__ = ghA.anti()
ghZ = Particle(pdg_code = 9000002,
name = 'ghZ',
antiname = 'ghZ~',
spin = -1,
color = 1,
mass = 'MZ',
width = 'WghZ',
texname = 'ghZ',
antitexname = 'ghZ',
line = 'dotted',
charge = 0,
LeptonNumber = 0,
GhostNumber = 1)
ghZ__tilde__ = ghZ.anti()
ghWp = Particle(pdg_code = 9000003,
name = 'ghWp',
antiname = 'ghWp~',
spin = -1,
color = 1,
mass = 'MW',
width = 'WghWp',
texname = 'ghWp',
antitexname = 'ghWp',
line = 'dotted',
charge = 1,
LeptonNumber = 0,
GhostNumber = 1)
ghWp__tilde__ = ghWp.anti()
ghWm = Particle(pdg_code = 9000004,
name = 'ghWm',
antiname = 'ghWm~',
spin = -1,
color = 1,
mass = 'MW',
width = 'WghWm',
texname = 'ghWm',
antitexname = 'ghWm',
line = 'dotted',
charge = -1,
LeptonNumber = 0,
GhostNumber = 1)
ghWm__tilde__ = ghWm.anti()
ghG = Particle(pdg_code = 9000005,
name = 'ghG',
antiname = 'ghG~',
spin = -1,
color = 8,
mass = 'ZERO',
width = 'WghG',
texname = 'ghG',
antitexname = 'ghG',
line = 'dotted',
charge = 0,
LeptonNumber = 0,
GhostNumber = 1)
ghG__tilde__ = ghG.anti()
A = Particle(pdg_code = 22,
name = 'A',
antiname = 'A',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'A',
antitexname = 'A',
line = 'wavy',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
Z = Particle(pdg_code = 23,
name = 'Z',
antiname = 'Z',
spin = 3,
color = 1,
mass = 'MZ',
width = 'WZ',
texname = 'Z',
antitexname = 'Z',
line = 'wavy',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
W__plus__ = Particle(pdg_code = 24,
name = 'W+',
antiname = 'W-',
spin = 3,
color = 1,
mass = 'MW',
width = 'WW',
texname = 'W+',
antitexname = 'W+',
line = 'wavy',
charge = 1,
LeptonNumber = 0,
GhostNumber = 0)
W__minus__ = W__plus__.anti()
G = Particle(pdg_code = 21,
name = 'G',
antiname = 'G',
spin = 3,
color = 8,
mass = 'ZERO',
width = 'ZERO',
texname = 'G',
antitexname = 'G',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
H = Particle(pdg_code = 25,
name = 'H',
antiname = 'H',
spin = 1,
color = 1,
mass = 'MH',
width = 'WH',
texname = '\\phi',
antitexname = '\\phi',
line = 'dashed',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
phi0 = Particle(pdg_code = 250,
name = 'phi0',
antiname = 'phi0',
spin = 1,
color = 1,
mass = 'MZ',
width = 'Wphi',
texname = 'phi0',
antitexname = 'phi0',
line = 'dashed',
GoldstoneBoson = True,
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
phi__plus__ = Particle(pdg_code = 251,
name = 'phi+',
antiname = 'phi-',
spin = 1,
color = 1,
mass = 'MW',
width = 'Wphi2',
texname = '\\phi^+',
antitexname = '\\phi^+',
line = 'dashed',
GoldstoneBoson = True,
charge = 1,
LeptonNumber = 0,
GhostNumber = 0)
phi__minus__ = phi__plus__.anti()
#For gg>8g, uu~>8g and uu>uu6g
g13 = Particle(pdg_code = 9013,
name = 'g13',
antiname = 'g13',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g13',
antitexname = 'g13',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g14 = Particle(pdg_code = 9014,
name = 'g14',
antiname = 'g14',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g14',
antitexname = 'g14',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g15 = Particle(pdg_code = 9015,
name = 'g15',
antiname = 'g15',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g15',
antitexname = 'g15',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g16 = Particle(pdg_code = 9016,
name = 'g16',
antiname = 'g16',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g16',
antitexname = 'g16',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g17 = Particle(pdg_code = 9017,
name = 'g17',
antiname = 'g17',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g17',
antitexname = 'g17',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g18 = Particle(pdg_code = 9018,
name = 'g18',
antiname = 'g18',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g18',
antitexname = 'g18',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g19 = Particle(pdg_code = 9019,
name = 'g19',
antiname = 'g19',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g19',
antitexname = 'g19',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g1a = Particle(pdg_code = 90110,
name = 'g1a',
antiname = 'g1a',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g1a',
antitexname = 'g1a',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g21 = Particle(pdg_code = 9021,
name = 'g21',
antiname = 'g21',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g21',
antitexname = 'g21',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g24 = Particle(pdg_code = 9024,
name = 'g24',
antiname = 'g24',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g24',
antitexname = 'g24',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g25 = Particle(pdg_code = 9025,
name = 'g25',
antiname = 'g25',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g25',
antitexname = 'g25',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g26 = Particle(pdg_code = 9026,
name = 'g26',
antiname = 'g26',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g26',
antitexname = 'g26',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g27 = Particle(pdg_code = 9027,
name = 'g27',
antiname = 'g27',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g27',
antitexname = 'g27',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g28 = Particle(pdg_code = 9028,
name = 'g28',
antiname = 'g28',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g28',
antitexname = 'g28',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g29 = Particle(pdg_code = 9029,
name = 'g29',
antiname = 'g29',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g29',
antitexname = 'g29',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g2a = Particle(pdg_code = 90210,
name = 'g2a',
antiname = 'g2a',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g2a',
antitexname = 'g2a',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g31 = Particle(pdg_code = 9031,
name = 'g31',
antiname = 'g31',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g31',
antitexname = 'g31',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g32 = Particle(pdg_code = 9032,
name = 'g32',
antiname = 'g32',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g32',
antitexname = 'g32',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g35 = Particle(pdg_code = 9035,
name = 'g35',
antiname = 'g35',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g35',
antitexname = 'g35',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g36 = Particle(pdg_code = 9036,
name = 'g36',
antiname = 'g36',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g36',
antitexname = 'g36',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g37 = Particle(pdg_code = 9037,
name = 'g37',
antiname = 'g37',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g37',
antitexname = 'g37',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g38 = Particle(pdg_code = 9038,
name = 'g38',
antiname = 'g38',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g38',
antitexname = 'g38',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g39 = Particle(pdg_code = 9039,
name = 'g39',
antiname = 'g39',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g39',
antitexname = 'g39',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g3a = Particle(pdg_code = 90310,
name = 'g3a',
antiname = 'g3a',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g3a',
antitexname = 'g3a',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g41 = Particle(pdg_code = 9041,
name = 'g41',
antiname = 'g41',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g41',
antitexname = 'g41',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g42 = Particle(pdg_code = 9042,
name = 'g42',
antiname = 'g42',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g42',
antitexname = 'g42',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g43 = Particle(pdg_code = 9043,
name = 'g43',
antiname = 'g43',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g43',
antitexname = 'g43',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g46 = Particle(pdg_code = 9046,
name = 'g46',
antiname = 'g46',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g46',
antitexname = 'g46',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g47 = Particle(pdg_code = 9047,
name = 'g47',
antiname = 'g47',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g47',
antitexname = 'g47',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g48 = Particle(pdg_code = 9048,
name = 'g48',
antiname = 'g48',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g48',
antitexname = 'g48',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g49 = Particle(pdg_code = 9049,
name = 'g49',
antiname = 'g49',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g49',
antitexname = 'g49',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g4a = Particle(pdg_code = 90410,
name = 'g4a',
antiname = 'g4a',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g4a',
antitexname = 'g4a',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g51 = Particle(pdg_code = 9051,
name = 'g51',
antiname = 'g51',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g51',
antitexname = 'g51',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g52 = Particle(pdg_code = 9052,
name = 'g52',
antiname = 'g52',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g52',
antitexname = 'g52',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g53 = Particle(pdg_code = 9053,
name = 'g53',
antiname = 'g53',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g53',
antitexname = 'g53',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g54 = Particle(pdg_code = 9054,
name = 'g54',
antiname = 'g54',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g54',
antitexname = 'g54',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g57 = Particle(pdg_code = 9057,
name = 'g57',
antiname = 'g57',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g57',
antitexname = 'g57',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g58 = Particle(pdg_code = 9058,
name = 'g58',
antiname = 'g58',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g58',
antitexname = 'g58',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g59 = Particle(pdg_code = 9059,
name = 'g59',
antiname = 'g59',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g59',
antitexname = 'g59',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g5a = Particle(pdg_code = 90510,
name = 'g5a',
antiname = 'g5a',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g5a',
antitexname = 'g5a',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g61 = Particle(pdg_code = 9061,
name = 'g61',
antiname = 'g61',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g61',
antitexname = 'g61',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g62 = Particle(pdg_code = 9062,
name = 'g62',
antiname = 'g62',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g62',
antitexname = 'g62',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g63 = Particle(pdg_code = 9063,
name = 'g63',
antiname = 'g63',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g63',
antitexname = 'g63',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g64 = Particle(pdg_code = 9064,
name = 'g64',
antiname = 'g64',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g64',
antitexname = 'g64',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g65 = Particle(pdg_code = 9065,
name = 'g65',
antiname = 'g65',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g65',
antitexname = 'g65',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g68 = Particle(pdg_code = 9068,
name = 'g68',
antiname = 'g68',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g68',
antitexname = 'g68',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g69 = Particle(pdg_code = 9069,
name = 'g69',
antiname = 'g69',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g69',
antitexname = 'g69',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g6a = Particle(pdg_code = 90610,
name = 'g6a',
antiname = 'g6a',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g6a',
antitexname = 'g6a',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g71 = Particle(pdg_code = 9071,
name = 'g71',
antiname = 'g71',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g71',
antitexname = 'g71',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g72 = Particle(pdg_code = 9072,
name = 'g72',
antiname = 'g72',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g72',
antitexname = 'g72',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g73 = Particle(pdg_code = 9073,
name = 'g73',
antiname = 'g73',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g73',
antitexname = 'g73',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g74 = Particle(pdg_code = 9074,
name = 'g74',
antiname = 'g74',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g74',
antitexname = 'g74',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g75 = Particle(pdg_code = 9075,
name = 'g75',
antiname = 'g75',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g75',
antitexname = 'g75',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g76 = Particle(pdg_code = 9076,
name = 'g76',
antiname = 'g76',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g76',
antitexname = 'g76',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g79 = Particle(pdg_code = 9079,
name = 'g79',
antiname = 'g79',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g79',
antitexname = 'g79',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g7a = Particle(pdg_code = 90710,
name = 'g7a',
antiname = 'g7a',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g7a',
antitexname = 'g7a',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g81 = Particle(pdg_code = 9081,
name = 'g81',
antiname = 'g81',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g81',
antitexname = 'g81',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g82 = Particle(pdg_code = 9082,
name = 'g82',
antiname = 'g82',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g82',
antitexname = 'g82',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g83 = Particle(pdg_code = 9083,
name = 'g83',
antiname = 'g83',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g83',
antitexname = 'g83',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g84 = Particle(pdg_code = 9084,
name = 'g84',
antiname = 'g84',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g84',
antitexname = 'g84',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g85 = Particle(pdg_code = 9085,
name = 'g85',
antiname = 'g85',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g85',
antitexname = 'g85',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g86 = Particle(pdg_code = 9086,
name = 'g86',
antiname = 'g86',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g86',
antitexname = 'g86',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g87 = Particle(pdg_code = 9087,
name = 'g87',
antiname = 'g87',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g87',
antitexname = 'g87',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g8a = Particle(pdg_code = 90810,
name = 'g8a',
antiname = 'g8a',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g8a',
antitexname = 'g8a',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g91 = Particle(pdg_code = 9091,
name = 'g91',
antiname = 'g91',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g91',
antitexname = 'g91',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g92 = Particle(pdg_code = 9092,
name = 'g92',
antiname = 'g92',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g92',
antitexname = 'g92',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g93 = Particle(pdg_code = 9093,
name = 'g93',
antiname = 'g93',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g93',
antitexname = 'g93',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g94 = Particle(pdg_code = 9094,
name = 'g94',
antiname = 'g94',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g94',
antitexname = 'g94',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g95 = Particle(pdg_code = 9095,
name = 'g95',
antiname = 'g95',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g95',
antitexname = 'g95',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g96 = Particle(pdg_code = 9096,
name = 'g96',
antiname = 'g96',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g96',
antitexname = 'g96',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g97 = Particle(pdg_code = 9097,
name = 'g97',
antiname = 'g97',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g97',
antitexname = 'g97',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g98 = Particle(pdg_code = 9098,
name = 'g98',
antiname = 'g98',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g98',
antitexname = 'g98',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
ga2 = Particle(pdg_code = 9102,
name = 'ga2',
antiname = 'ga2',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'ga2',
antitexname = 'ga2',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
ga3 = Particle(pdg_code = 9103,
name = 'ga3',
antiname = 'ga3',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'ga3',
antitexname = 'ga3',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
ga4 = Particle(pdg_code = 9104,
name = 'ga4',
antiname = 'ga4',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'ga4',
antitexname = 'ga4',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
ga5 = Particle(pdg_code = 9105,
name = 'ga5',
antiname = 'ga5',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'ga5',
antitexname = 'ga5',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
ga6 = Particle(pdg_code = 9106,
name = 'ga6',
antiname = 'ga6',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'ga6',
antitexname = 'ga6',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
ga7 = Particle(pdg_code = 9107,
name = 'ga7',
antiname = 'ga7',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'ga7',
antitexname = 'ga7',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
ga8 = Particle(pdg_code = 9108,
name = 'ga8',
antiname = 'ga8',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'ga8',
antitexname = 'ga8',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
ga9 = Particle(pdg_code = 9109,
name = 'ga9',
antiname = 'ga9',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'ga9',
antitexname = 'ga9',
line = 'curly',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
g0 = Particle(pdg_code = 9000,
name = 'g0',
antiname = 'g0',
spin = 3,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'g0',
antitexname = 'g0',
line = 'wavy',
charge = 0,
LeptonNumber = 0,
GhostNumber = 0)
u1 = Particle(pdg_code = 9001,
name = 'u1',
antiname = 'u1~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'u1',
antitexname = 'u1',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
u1__tilde__ = u1.anti()
u2 = Particle(pdg_code = 9002,
name = 'u2',
antiname = 'u2~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'u2',
antitexname = 'u2',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
u2__tilde__ = u2.anti()
u3 = Particle(pdg_code = 9003,
name = 'u3',
antiname = 'u3~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'u3',
antitexname = 'u3',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
u3__tilde__ = u3.anti()
u4 = Particle(pdg_code = 9004,
name = 'u4',
antiname = 'u4~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'u4',
antitexname = 'u4',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
u4__tilde__ = u4.anti()
u5 = Particle(pdg_code = 9005,
name = 'u5',
antiname = 'u5~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'u5',
antitexname = 'u5',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
u5__tilde__ = u5.anti()
u6 = Particle(pdg_code = 9006,
name = 'u6',
antiname = 'u6~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'u6',
antitexname = 'u6',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
u6__tilde__ = u6.anti()
u7 = Particle(pdg_code = 9007,
name = 'u7',
antiname = 'u7~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'u7',
antitexname = 'u7',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
u7__tilde__ = u7.anti()
u8 = Particle(pdg_code = 9008,
name = 'u8',
antiname = 'u8~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'u8',
antitexname = 'u8',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
u8__tilde__ = u8.anti()
u9 = Particle(pdg_code = 9009,
name = 'u9',
antiname = 'u9~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'u9',
antitexname = 'u9',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
u9__tilde__ = u9.anti()
ua = Particle(pdg_code = 90010,
name = 'ua',
antiname = 'ua~',
spin = 2,
color = 1,
mass = 'ZERO',
width = 'ZERO',
texname = 'ua',
antitexname = 'ua',
line = 'straight',
charge = 2/3,
LeptonNumber = 0,
GhostNumber = 0)
ua__tilde__ = ua.anti()
|
{"/vertices.py": ["/particles.py"]}
|
18,023
|
useakat/cfqcd
|
refs/heads/master
|
/vertices.py
|
# This file was automatically created by FeynRules $Revision: 302 $
# Mathematica version: 7.0 for Mac OS X x86 (64-bit) (November 11, 2008)
# Date: Tue 31 Aug 2010 16:54:46
from object_library import all_vertices, Vertex
import particles as P
import couplings as C
import lorentz as L
V_1 = Vertex(name = 'V_1',
particles = [ P.H, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.SSS1 ],
couplings = {(0,0):C.GC_21})
V_2 = Vertex(name = 'V_2',
particles = [ P.G, P.G, P.G ],
color = [ 'f(1,2,3)' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_3 = Vertex(name = 'V_3',
particles = [ P.G, P.G, P.G, P.G ],
color = [ 'f(2,3,\'a1\')*f(\'a1\',1,4)', 'f(2,4,\'a1\')*f(\'a1\',1,3)', 'f(3,4,\'a1\')*f(\'a1\',1,2)' ],
lorentz = [ L.VVVV1, L.VVVV3, L.VVVV4 ],
couplings = {(1,1):C.GC_6,(2,0):C.GC_6,(0,2):C.GC_6})
V_4 = Vertex(name = 'V_4',
particles = [ P.A, P.W__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_16})
V_5 = Vertex(name = 'V_5',
particles = [ P.W__minus__, P.W__plus__, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS1 ],
couplings = {(0,0):C.GC_10})
V_6 = Vertex(name = 'V_6',
particles = [ P.W__minus__, P.W__plus__, P.H ],
color = [ '1' ],
lorentz = [ L.VVS1 ],
couplings = {(0,0):C.GC_22})
V_7 = Vertex(name = 'V_7',
particles = [ P.A, P.A, P.W__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV2 ],
couplings = {(0,0):C.GC_18})
V_8 = Vertex(name = 'V_8',
particles = [ P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_7})
V_9 = Vertex(name = 'V_9',
particles = [ P.W__minus__, P.W__minus__, P.W__plus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.VVVV2 ],
couplings = {(0,0):C.GC_8})
V_10 = Vertex(name = 'V_10',
particles = [ P.A, P.W__minus__, P.W__plus__, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV5 ],
couplings = {(0,0):C.GC_17})
V_11 = Vertex(name = 'V_11',
particles = [ P.Z, P.Z, P.H, P.H ],
color = [ '1' ],
lorentz = [ L.VVSS1 ],
couplings = {(0,0):C.GC_20})
V_12 = Vertex(name = 'V_12',
particles = [ P.Z, P.Z, P.H ],
color = [ '1' ],
lorentz = [ L.VVS1 ],
couplings = {(0,0):C.GC_23})
V_13 = Vertex(name = 'V_13',
particles = [ P.W__minus__, P.W__plus__, P.Z, P.Z ],
color = [ '1' ],
lorentz = [ L.VVVV2 ],
couplings = {(0,0):C.GC_9})
V_14 = Vertex(name = 'V_14',
particles = [ P.d__tilde__, P.d, P.A ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_1})
V_15 = Vertex(name = 'V_15',
particles = [ P.s__tilde__, P.s, P.A ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_1})
V_16 = Vertex(name = 'V_16',
particles = [ P.b__tilde__, P.b, P.A ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_1})
V_17 = Vertex(name = 'V_17',
particles = [ P.e__plus__, P.e__minus__, P.A ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_3})
V_18 = Vertex(name = 'V_18',
particles = [ P.m__plus__, P.m__minus__, P.A ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_3})
V_19 = Vertex(name = 'V_19',
particles = [ P.tt__plus__, P.tt__minus__, P.A ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_3})
V_20 = Vertex(name = 'V_20',
particles = [ P.u__tilde__, P.u, P.A ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_2})
V_21 = Vertex(name = 'V_21',
particles = [ P.c__tilde__, P.c, P.A ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_2})
V_22 = Vertex(name = 'V_22',
particles = [ P.t__tilde__, P.t, P.A ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_2})
V_23 = Vertex(name = 'V_23',
particles = [ P.d__tilde__, P.d, P.G ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_5})
V_24 = Vertex(name = 'V_24',
particles = [ P.s__tilde__, P.s, P.G ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_5})
V_25 = Vertex(name = 'V_25',
particles = [ P.b__tilde__, P.b, P.G ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_5})
V_26 = Vertex(name = 'V_26',
particles = [ P.b__tilde__, P.b, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_24})
V_27 = Vertex(name = 'V_27',
particles = [ P.d__tilde__, P.d, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV3 ],
couplings = {(0,0):C.GC_12,(0,1):C.GC_14})
V_28 = Vertex(name = 'V_28',
particles = [ P.s__tilde__, P.s, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV3 ],
couplings = {(0,0):C.GC_12,(0,1):C.GC_14})
V_29 = Vertex(name = 'V_29',
particles = [ P.b__tilde__, P.b, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV3 ],
couplings = {(0,0):C.GC_12,(0,1):C.GC_14})
V_30 = Vertex(name = 'V_30',
particles = [ P.d__tilde__, P.u, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_31 = Vertex(name = 'V_31',
particles = [ P.s__tilde__, P.c, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_32 = Vertex(name = 'V_32',
particles = [ P.b__tilde__, P.t, P.W__minus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_33 = Vertex(name = 'V_33',
particles = [ P.u__tilde__, P.d, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_34 = Vertex(name = 'V_34',
particles = [ P.c__tilde__, P.s, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_35 = Vertex(name = 'V_35',
particles = [ P.t__tilde__, P.b, P.W__plus__ ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_36 = Vertex(name = 'V_36',
particles = [ P.u__tilde__, P.u, P.G ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_5})
V_37 = Vertex(name = 'V_37',
particles = [ P.c__tilde__, P.c, P.G ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_5})
V_38 = Vertex(name = 'V_38',
particles = [ P.t__tilde__, P.t, P.G ],
color = [ 'T(3,2,1)' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GC_5})
V_39 = Vertex(name = 'V_39',
particles = [ P.tt__plus__, P.tt__minus__, P.H ],
color = [ '1' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_26})
V_40 = Vertex(name = 'V_40',
particles = [ P.t__tilde__, P.t, P.H ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFS1 ],
couplings = {(0,0):C.GC_25})
V_41 = Vertex(name = 'V_41',
particles = [ P.e__plus__, P.e__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2, L.FFV4 ],
couplings = {(0,0):C.GC_12,(0,1):C.GC_15})
V_42 = Vertex(name = 'V_42',
particles = [ P.m__plus__, P.m__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2, L.FFV4 ],
couplings = {(0,0):C.GC_12,(0,1):C.GC_15})
V_43 = Vertex(name = 'V_43',
particles = [ P.tt__plus__, P.tt__minus__, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2, L.FFV4 ],
couplings = {(0,0):C.GC_12,(0,1):C.GC_15})
V_44 = Vertex(name = 'V_44',
particles = [ P.e__plus__, P.ve, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_45 = Vertex(name = 'V_45',
particles = [ P.m__plus__, P.vm, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_46 = Vertex(name = 'V_46',
particles = [ P.tt__plus__, P.vt, P.W__minus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_47 = Vertex(name = 'V_47',
particles = [ P.ve__tilde__, P.e__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_48 = Vertex(name = 'V_48',
particles = [ P.vm__tilde__, P.m__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_49 = Vertex(name = 'V_49',
particles = [ P.vt__tilde__, P.tt__minus__, P.W__plus__ ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_11})
V_50 = Vertex(name = 'V_50',
particles = [ P.u__tilde__, P.u, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5 ],
couplings = {(0,0):C.GC_13,(0,1):C.GC_14})
V_51 = Vertex(name = 'V_51',
particles = [ P.c__tilde__, P.c, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5 ],
couplings = {(0,0):C.GC_13,(0,1):C.GC_14})
V_52 = Vertex(name = 'V_52',
particles = [ P.t__tilde__, P.t, P.Z ],
color = [ 'Identity(1,2)' ],
lorentz = [ L.FFV2, L.FFV5 ],
couplings = {(0,0):C.GC_13,(0,1):C.GC_14})
V_53 = Vertex(name = 'V_53',
particles = [ P.ve__tilde__, P.ve, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_19})
V_54 = Vertex(name = 'V_54',
particles = [ P.vm__tilde__, P.vm, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_19})
V_55 = Vertex(name = 'V_55',
particles = [ P.vt__tilde__, P.vt, P.Z ],
color = [ '1' ],
lorentz = [ L.FFV2 ],
couplings = {(0,0):C.GC_19})
#For gg>8g, uu~>8g and uu>uu6g
V_56 = Vertex(name = 'V_56',
particles = [ P.g21, P.g32, P.g13 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_57 = Vertex(name = 'V_57',
particles = [ P.g21, P.g42, P.g14 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_58 = Vertex(name = 'V_58',
particles = [ P.g21, P.g52, P.g15 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_59 = Vertex(name = 'V_59',
particles = [ P.g21, P.g62, P.g16 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_60 = Vertex(name = 'V_60',
particles = [ P.g21, P.g72, P.g17 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_61 = Vertex(name = 'V_61',
particles = [ P.g21, P.g82, P.g18 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_62 = Vertex(name = 'V_62',
particles = [ P.g31, P.g43, P.g14 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_63 = Vertex(name = 'V_63',
particles = [ P.g41, P.g54, P.g15 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_64 = Vertex(name = 'V_64',
particles = [ P.g51, P.g65, P.g16 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_65 = Vertex(name = 'V_65',
particles = [ P.g61, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_66 = Vertex(name = 'V_66',
particles = [ P.g71, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_67 = Vertex(name = 'V_67',
particles = [ P.g32, P.g43, P.g24 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_68 = Vertex(name = 'V_68',
particles = [ P.g32, P.g53, P.g25 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_69 = Vertex(name = 'V_69',
particles = [ P.g32, P.g63, P.g26 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_70 = Vertex(name = 'V_70',
particles = [ P.g32, P.g73, P.g27 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_71 = Vertex(name = 'V_71',
particles = [ P.g32, P.g83, P.g28 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_72 = Vertex(name = 'V_72',
particles = [ P.g42, P.g54, P.g25 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_73 = Vertex(name = 'V_73',
particles = [ P.g52, P.g65, P.g26 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_74 = Vertex(name = 'V_74',
particles = [ P.g62, P.g76, P.g27 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_75 = Vertex(name = 'V_75',
particles = [ P.g72, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_76 = Vertex(name = 'V_76',
particles = [ P.g43, P.g54, P.g35 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_77 = Vertex(name = 'V_77',
particles = [ P.g43, P.g64, P.g36 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_78 = Vertex(name = 'V_78',
particles = [ P.g43, P.g74, P.g37 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_79 = Vertex(name = 'V_79',
particles = [ P.g43, P.g84, P.g38 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_80 = Vertex(name = 'V_80',
particles = [ P.g53, P.g65, P.g36 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_81 = Vertex(name = 'V_81',
particles = [ P.g63, P.g76, P.g37 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_82 = Vertex(name = 'V_82',
particles = [ P.g73, P.g87, P.g38 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_83 = Vertex(name = 'V_83',
particles = [ P.g54, P.g65, P.g46 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_84 = Vertex(name = 'V_84',
particles = [ P.g54, P.g75, P.g47 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_85 = Vertex(name = 'V_85',
particles = [ P.g54, P.g85, P.g48 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_86 = Vertex(name = 'V_86',
particles = [ P.g64, P.g76, P.g47 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_87 = Vertex(name = 'V_87',
particles = [ P.g74, P.g87, P.g48 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_88 = Vertex(name = 'V_88',
particles = [ P.g65, P.g76, P.g57 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_89 = Vertex(name = 'V_89',
particles = [ P.g65, P.g86, P.g58 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_90 = Vertex(name = 'V_90',
particles = [ P.g75, P.g87, P.g58 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_91 = Vertex(name = 'V_91',
particles = [ P.g76, P.g87, P.g68 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
V_92 = Vertex(name = 'V_92',
particles = [ P.g87, P.g98, P.g79 ],
color = [ '1' ],
lorentz = [ L.VVV1 ],
couplings = {(0,0):C.GC_4})
# 4-point
V_93 = Vertex(name = 'V_93',
particles = [ P.g21, P.g32, P.g43, P.g14 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_94 = Vertex(name = 'V_94',
particles = [ P.g21, P.g32, P.g53, P.g15 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_95 = Vertex(name = 'V_95',
particles = [ P.g21, P.g32, P.g63, P.g16 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_96 = Vertex(name = 'V_96',
particles = [ P.g21, P.g32, P.g73, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_97 = Vertex(name = 'V_97',
particles = [ P.g21, P.g32, P.g83, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_98 = Vertex(name = 'V_98',
particles = [ P.g21, P.g42, P.g54, P.g15 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_99 = Vertex(name = 'V_99',
particles = [ P.g21, P.g52, P.g65, P.g16 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_100 = Vertex(name = 'V_100',
particles = [ P.g21, P.g62, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_101 = Vertex(name = 'V_101',
particles = [ P.g21, P.g72, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_102 = Vertex(name = 'V_102',
particles = [ P.g31, P.g43, P.g54, P.g15 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_103 = Vertex(name = 'V_103',
particles = [ P.g41, P.g54, P.g65, P.g16 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_104 = Vertex(name = 'V_104',
particles = [ P.g51, P.g65, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_105 = Vertex(name = 'V_105',
particles = [ P.g61, P.g76, P.g87, P.g18],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_106 = Vertex(name = 'V_106',
particles = [ P.g32, P.g43, P.g54, P.g25 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_107 = Vertex(name = 'V_107',
particles = [ P.g32, P.g43, P.g64, P.g26 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_108 = Vertex(name = 'V_108',
particles = [ P.g32, P.g43, P.g74, P.g27 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_109 = Vertex(name = 'V_109',
particles = [ P.g32, P.g43, P.g84, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_110 = Vertex(name = 'V_110',
particles = [ P.g32, P.g53, P.g65, P.g26 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_111 = Vertex(name = 'V_111',
particles = [ P.g32, P.g63, P.g76, P.g27 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_112 = Vertex(name = 'V_112',
particles = [ P.g32, P.g73, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_113 = Vertex(name = 'V_113',
particles = [ P.g42, P.g54, P.g65, P.g26 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_114 = Vertex(name = 'V_114',
particles = [ P.g52, P.g65, P.g76, P.g27 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_115 = Vertex(name = 'V_115',
particles = [ P.g62, P.g76, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_116 = Vertex(name = 'V_116',
particles = [ P.g43, P.g54, P.g65, P.g36 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_117 = Vertex(name = 'V_117',
particles = [ P.g43, P.g54, P.g75, P.g37 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_118 = Vertex(name = 'V_118',
particles = [ P.g43, P.g54, P.g85, P.g38 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_119 = Vertex(name = 'V_119',
particles = [ P.g43, P.g64, P.g76, P.g37 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_120 = Vertex(name = 'V_120',
particles = [ P.g43, P.g74, P.g87, P.g38 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_121 = Vertex(name = 'V_121',
particles = [ P.g53, P.g65, P.g76, P.g37 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_122 = Vertex(name = 'V_122',
particles = [ P.g63, P.g76, P.g87, P.g38 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_123 = Vertex(name = 'V_123',
particles = [ P.g54, P.g65, P.g76, P.g47 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_124 = Vertex(name = 'V_124',
particles = [ P.g54, P.g65, P.g86, P.g48 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_125 = Vertex(name = 'V_125',
particles = [ P.g54, P.g75, P.g87, P.g48 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_126 = Vertex(name = 'V_126',
particles = [ P.g64, P.g76, P.g87, P.g48 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_127 = Vertex(name = 'V_127',
particles = [ P.g65, P.g76, P.g87, P.g58 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
V_128 = Vertex(name = 'V_128',
particles = [ P.g76, P.g87, P.g98, P.g69 ],
color = [ '1' ],
lorentz = [ L.GLUON4 ],
couplings = {(0,0):C.G2})
# 5-point
V_129 = Vertex(name = 'V129',
particles = [ P.g21, P.g32, P.g43, P.g54, P.g15 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_130 = Vertex(name = 'V130',
particles = [ P.g21, P.g32, P.g43, P.g64, P.g16 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_131 = Vertex(name = 'V131',
particles = [ P.g21, P.g32, P.g43, P.g74, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_132 = Vertex(name = 'V132',
particles = [ P.g21, P.g32, P.g43, P.g84, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_133 = Vertex(name = 'V133',
particles = [ P.g21, P.g32, P.g53, P.g65, P.g16 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_134 = Vertex(name = 'V134',
particles = [ P.g21, P.g32, P.g63, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_135 = Vertex(name = 'V135',
particles = [ P.g21, P.g32, P.g73, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_136 = Vertex(name = 'V136',
particles = [ P.g21, P.g42, P.g54, P.g65, P.g16 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_137 = Vertex(name = 'V137',
particles = [ P.g21, P.g52, P.g65, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_138 = Vertex(name = 'V138',
particles = [ P.g21, P.g62, P.g76, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_139 = Vertex(name = 'V139',
particles = [ P.g31, P.g43, P.g54, P.g65, P.g16 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_140 = Vertex(name = 'V140',
particles = [ P.g41, P.g54, P.g65, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_141 = Vertex(name = 'V141',
particles = [ P.g51, P.g65, P.g76, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_142 = Vertex(name = 'V142',
particles = [ P.g32, P.g43, P.g54, P.g65, P.g26 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_143 = Vertex(name = 'V143',
particles = [ P.g32, P.g43, P.g54, P.g75, P.g27 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_144 = Vertex(name = 'V144',
particles = [ P.g32, P.g43, P.g54, P.g85, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_145 = Vertex(name = 'V145',
particles = [ P.g32, P.g43, P.g64, P.g76, P.g27 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_146 = Vertex(name = 'V146',
particles = [ P.g32, P.g43, P.g74, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_147 = Vertex(name = 'V147',
particles = [ P.g32, P.g53, P.g65, P.g76, P.g27 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_148 = Vertex(name = 'V148',
particles = [ P.g32, P.g63, P.g76, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_149 = Vertex(name = 'V149',
particles = [ P.g42, P.g54, P.g65, P.g76, P.g27 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_150 = Vertex(name = 'V150',
particles = [ P.g52, P.g65, P.g76, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_151 = Vertex(name = 'V151',
particles = [ P.g43, P.g54, P.g65, P.g76, P.g37 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_152 = Vertex(name = 'V152',
particles = [ P.g43, P.g54, P.g65, P.g86, P.g38 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_153 = Vertex(name = 'V153',
particles = [ P.g43, P.g54, P.g75, P.g87, P.g38 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_154 = Vertex(name = 'V154',
particles = [ P.g43, P.g64, P.g76, P.g87, P.g38 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_155 = Vertex(name = 'V155',
particles = [ P.g53, P.g65, P.g76, P.g87, P.g38 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_156 = Vertex(name = 'V156',
particles = [ P.g54, P.g65, P.g76, P.g87, P.g48 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
V_157 = Vertex(name = 'V157',
particles = [ P.g65, P.g76, P.g87, P.g98, P.g59 ],
color = [ '1' ],
lorentz = [ L.GLUON5 ],
couplings = {(0,0):C.G2})
# 6-point
V_158 = Vertex(name = 'V158',
particles = [ P.g21, P.g32, P.g43, P.g54, P.g65, P.g16 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_159 = Vertex(name = 'V159',
particles = [ P.g21, P.g32, P.g43, P.g54, P.g75, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_160 = Vertex(name = 'V160',
particles = [ P.g21, P.g32, P.g43, P.g54, P.g85, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_161 = Vertex(name = 'V161',
particles = [ P.g21, P.g32, P.g43, P.g64, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_162 = Vertex(name = 'V162',
particles = [ P.g21, P.g32, P.g43, P.g74, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_163 = Vertex(name = 'V163',
particles = [ P.g21, P.g32, P.g53, P.g65, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_164 = Vertex(name = 'V164',
particles = [ P.g21, P.g32, P.g63, P.g76, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_165 = Vertex(name = 'V165',
particles = [ P.g21, P.g42, P.g54, P.g65, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_166 = Vertex(name = 'V166',
particles = [ P.g21, P.g52, P.g65, P.g76, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_167 = Vertex(name = 'V167',
particles = [ P.g31, P.g43, P.g54, P.g65, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_168 = Vertex(name = 'V168',
particles = [ P.g41, P.g54, P.g65, P.g76, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_169 = Vertex(name = 'V169',
particles = [ P.g32, P.g43, P.g54, P.g65, P.g76, P.g27 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_170 = Vertex(name = 'V170',
particles = [ P.g32, P.g43, P.g54, P.g65, P.g86, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_171 = Vertex(name = 'V171',
particles = [ P.g32, P.g43, P.g54, P.g75, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_172 = Vertex(name = 'V172',
particles = [ P.g32, P.g43, P.g64, P.g76, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_173 = Vertex(name = 'V173',
particles = [ P.g32, P.g53, P.g65, P.g76, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_174 = Vertex(name = 'V174',
particles = [ P.g42, P.g54, P.g65, P.g76, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_175 = Vertex(name = 'V175',
particles = [ P.g43, P.g54, P.g65, P.g76, P.g87, P.g38 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
V_176 = Vertex(name = 'V176',
particles = [ P.g54, P.g65, P.g76, P.g87, P.g98, P.g49 ],
color = [ '1' ],
lorentz = [ L.GLUON6 ],
couplings = {(0,0):C.G2})
# 7-point
V_177 = Vertex(name = 'V177',
particles = [ P.g21, P.g32, P.g43, P.g54, P.g65, P.g76, P.g17 ],
color = [ '1' ],
lorentz = [ L.GLUON7 ],
couplings = {(0,0):C.G2})
V_178 = Vertex(name = 'V178',
particles = [ P.g21, P.g32, P.g43, P.g54, P.g65, P.g86, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON7 ],
couplings = {(0,0):C.G2})
V_179 = Vertex(name = 'V179',
particles = [ P.g21, P.g32, P.g43, P.g54, P.g75, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON7 ],
couplings = {(0,0):C.G2})
V_180 = Vertex(name = 'V180',
particles = [ P.g21, P.g32, P.g43, P.g64, P.g76, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON7 ],
couplings = {(0,0):C.G2})
V_181 = Vertex(name = 'V181',
particles = [ P.g21, P.g32, P.g53, P.g65, P.g76, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON7 ],
couplings = {(0,0):C.G2})
V_182 = Vertex(name = 'V182',
particles = [ P.g21, P.g42, P.g54, P.g65, P.g76, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON7 ],
couplings = {(0,0):C.G2})
V_183 = Vertex(name = 'V183',
particles = [ P.g31, P.g43, P.g54, P.g65, P.g76, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON7 ],
couplings = {(0,0):C.G2})
V_184 = Vertex(name = 'V184',
particles = [ P.g32, P.g43, P.g54, P.g65, P.g76, P.g87, P.g28 ],
color = [ '1' ],
lorentz = [ L.GLUON7 ],
couplings = {(0,0):C.G2})
V_185 = Vertex(name = 'V185',
particles = [ P.g43, P.g54, P.g65, P.g76, P.g87, P.g98, P.g39 ],
color = [ '1' ],
lorentz = [ L.GLUON7 ],
couplings = {(0,0):C.G2})
# 8-point
V_186 = Vertex(name = 'V186',
particles = [ P.g21, P.g32, P.g43, P.g54, P.g65, P.g76, P.g87, P.g18 ],
color = [ '1' ],
lorentz = [ L.GLUON8 ],
couplings = {(0,0):C.G2})
V_187 = Vertex(name = 'V187',
particles = [ P.g32, P.g43, P.g54, P.g65, P.g76, P.g87, P.g98, P.g29 ],
color = [ '1' ],
lorentz = [ L.GLUON8 ],
couplings = {(0,0):C.G2})
# 9-point
V_188 = Vertex(name = 'V188',
particles = [ P.g21, P.g32, P.g43, P.g54, P.g65, P.g76, P.g87, P.g98, P.g19 ],
color = [ '1' ],
lorentz = [ L.GLUON9 ],
couplings = {(0,0):C.G2})
# 10-point
V_189 = Vertex(name = 'V189',
particles = [ P.g21, P.g32, P.g43, P.g54, P.g65, P.g76, P.g87, P.g98, P.ga9, P.g1a ],
color = [ '1' ],
lorentz = [ L.GLUON10 ],
couplings = {(0,0):C.G2})
# qqg
V_201 = Vertex(name = 'V_201',
particles = [ P.u3__tilde__, P.u1, P.g13 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_202 = Vertex(name = 'V_202',
particles = [ P.u1__tilde__, P.u3, P.g13 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_203 = Vertex(name = 'V_203',
particles = [ P.u4__tilde__, P.u1, P.g14 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_204 = Vertex(name = 'V_204',
particles = [ P.u1__tilde__, P.u4, P.g14 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_205 = Vertex(name = 'V_205',
particles = [ P.u5__tilde__, P.u1, P.g15 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_206 = Vertex(name = 'V_206',
particles = [ P.u1__tilde__, P.u5, P.g15 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_207 = Vertex(name = 'V_207',
particles = [ P.u6__tilde__, P.u1, P.g16 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_208 = Vertex(name = 'V_208',
particles = [ P.u1__tilde__, P.u6, P.g16 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_209 = Vertex(name = 'V_209',
particles = [ P.u7__tilde__, P.u1, P.g17 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_210 = Vertex(name = 'V_210',
particles = [ P.u1__tilde__, P.u7, P.g17 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_211 = Vertex(name = 'V_211',
particles = [ P.u8__tilde__, P.u1, P.g18 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_212 = Vertex(name = 'V_212',
particles = [ P.u1__tilde__, P.u8, P.g18 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_213 = Vertex(name = 'V_213',
particles = [ P.u9__tilde__, P.u1, P.g19 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_214 = Vertex(name = 'V_214',
particles = [ P.u1__tilde__, P.u9, P.g19 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_215 = Vertex(name = 'V_215',
particles = [ P.u1__tilde__, P.u2, P.g21 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_216 = Vertex(name = 'V_216',
particles = [ P.u2__tilde__, P.u1, P.g21 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_217 = Vertex(name = 'V_217',
particles = [ P.u4__tilde__, P.u2, P.g24 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_218 = Vertex(name = 'V_218',
particles = [ P.u2__tilde__, P.u4, P.g24 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_219 = Vertex(name = 'V_219',
particles = [ P.u5__tilde__, P.u2, P.g25 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_220 = Vertex(name = 'V_220',
particles = [ P.u2__tilde__, P.u5, P.g25 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_221 = Vertex(name = 'V_221',
particles = [ P.u6__tilde__, P.u2, P.g26 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_222 = Vertex(name = 'V_222',
particles = [ P.u2__tilde__, P.u6, P.g26 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_223 = Vertex(name = 'V_223',
particles = [ P.u7__tilde__, P.u2, P.g27 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_224 = Vertex(name = 'V_224',
particles = [ P.u2__tilde__, P.u7, P.g27 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_225 = Vertex(name = 'V_225',
particles = [ P.u8__tilde__, P.u2, P.g28 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_226 = Vertex(name = 'V_226',
particles = [ P.u2__tilde__, P.u8, P.g28 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_227 = Vertex(name = 'V_227',
particles = [ P.u9__tilde__, P.u2, P.g29 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_228 = Vertex(name = 'V_228',
particles = [ P.u2__tilde__, P.u9, P.g29 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_229 = Vertex(name = 'V_229',
particles = [ P.u3__tilde__, P.u1, P.g31 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_230 = Vertex(name = 'V_230',
particles = [ P.u2__tilde__, P.u3, P.g32 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_231 = Vertex(name = 'V_231',
particles = [ P.u3__tilde__, P.u2, P.g32 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_232 = Vertex(name = 'V_232',
particles = [ P.u5__tilde__, P.u3, P.g35 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_233 = Vertex(name = 'V_233',
particles = [ P.u3__tilde__, P.u5, P.g35 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_234 = Vertex(name = 'V_234',
particles = [ P.u6__tilde__, P.u3, P.g36 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_235 = Vertex(name = 'V_235',
particles = [ P.u3__tilde__, P.u6, P.g36 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_236 = Vertex(name = 'V_236',
particles = [ P.u7__tilde__, P.u3, P.g37 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_237 = Vertex(name = 'V_237',
particles = [ P.u3__tilde__, P.u7, P.g37 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_238 = Vertex(name = 'V_238',
particles = [ P.u8__tilde__, P.u3, P.g38 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_239 = Vertex(name = 'V_239',
particles = [ P.u3__tilde__, P.u8, P.g38 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_240 = Vertex(name = 'V_240',
particles = [ P.u9__tilde__, P.u3, P.g39 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_241 = Vertex(name = 'V_241',
particles = [ P.u3__tilde__, P.u9, P.g39 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_242 = Vertex(name = 'V_242',
particles = [ P.u4__tilde__, P.u1, P.g41 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_243 = Vertex(name = 'V_243',
particles = [ P.u4__tilde__, P.u2, P.g42 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_244 = Vertex(name = 'V_244',
particles = [ P.u3__tilde__, P.u4, P.g43 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_245 = Vertex(name = 'V_245',
particles = [ P.u4__tilde__, P.u3, P.g43 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_246 = Vertex(name = 'V_246',
particles = [ P.u6__tilde__, P.u4, P.g46 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_247 = Vertex(name = 'V_247',
particles = [ P.u4__tilde__, P.u6, P.g46 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_248 = Vertex(name = 'V_248',
particles = [ P.u7__tilde__, P.u4, P.g47 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_249 = Vertex(name = 'V_249',
particles = [ P.u4__tilde__, P.u7, P.g47 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_251 = Vertex(name = 'V_251',
particles = [ P.u8__tilde__, P.u4, P.g48 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_252 = Vertex(name = 'V_252',
particles = [ P.u4__tilde__, P.u8, P.g48 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_253 = Vertex(name = 'V_253',
particles = [ P.u9__tilde__, P.u4, P.g49 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_254 = Vertex(name = 'V_254',
particles = [ P.u4__tilde__, P.u9, P.g49 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_255 = Vertex(name = 'V_255',
particles = [ P.u5__tilde__, P.u1, P.g51 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_256 = Vertex(name = 'V_256',
particles = [ P.u5__tilde__, P.u2, P.g52 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_257 = Vertex(name = 'V_257',
particles = [ P.u5__tilde__, P.u3, P.g53 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_258 = Vertex(name = 'V_258',
particles = [ P.u4__tilde__, P.u5, P.g54 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_259 = Vertex(name = 'V_259',
particles = [ P.u5__tilde__, P.u4, P.g54 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_260 = Vertex(name = 'V_260',
particles = [ P.u7__tilde__, P.u5, P.g57 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_261 = Vertex(name = 'V_261',
particles = [ P.u5__tilde__, P.u7, P.g57 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_262 = Vertex(name = 'V_262',
particles = [ P.u8__tilde__, P.u5, P.g58 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_263 = Vertex(name = 'V_263',
particles = [ P.u5__tilde__, P.u8, P.g58 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_264 = Vertex(name = 'V_264',
particles = [ P.u9__tilde__, P.u5, P.g59 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_265 = Vertex(name = 'V_265',
particles = [ P.u5__tilde__, P.u9, P.g59 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_266 = Vertex(name = 'V_266',
particles = [ P.u6__tilde__, P.u1, P.g61 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_267 = Vertex(name = 'V_267',
particles = [ P.u6__tilde__, P.u2, P.g62 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_268 = Vertex(name = 'V_268',
particles = [ P.u6__tilde__, P.u3, P.g63 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_269 = Vertex(name = 'V_269',
particles = [ P.u6__tilde__, P.u4, P.g64 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_270 = Vertex(name = 'V_270',
particles = [ P.u5__tilde__, P.u6, P.g65 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_271 = Vertex(name = 'V_271',
particles = [ P.u6__tilde__, P.u5, P.g65 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_272 = Vertex(name = 'V_272',
particles = [ P.u8__tilde__, P.u6, P.g68 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_273 = Vertex(name = 'V_273',
particles = [ P.u6__tilde__, P.u8, P.g68 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_274 = Vertex(name = 'V_274',
particles = [ P.u9__tilde__, P.u6, P.g69 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_275 = Vertex(name = 'V_275',
particles = [ P.u6__tilde__, P.u9, P.g69 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_276 = Vertex(name = 'V_276',
particles = [ P.u7__tilde__, P.u1, P.g71 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_277 = Vertex(name = 'V_277',
particles = [ P.u7__tilde__, P.u2, P.g72 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_278 = Vertex(name = 'V_278',
particles = [ P.u7__tilde__, P.u3, P.g73 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_279 = Vertex(name = 'V_279',
particles = [ P.u7__tilde__, P.u4, P.g74 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_280 = Vertex(name = 'V_280',
particles = [ P.u7__tilde__, P.u5, P.g75 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_281 = Vertex(name = 'V_281',
particles = [ P.u6__tilde__, P.u7, P.g76 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_282 = Vertex(name = 'V_282',
particles = [ P.u7__tilde__, P.u6, P.g76 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_283 = Vertex(name = 'V_283',
particles = [ P.u9__tilde__, P.u7, P.g79 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_284 = Vertex(name = 'V_284',
particles = [ P.u7__tilde__, P.u9, P.g79 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_285 = Vertex(name = 'V_285',
particles = [ P.u8__tilde__, P.u1, P.g81 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_286 = Vertex(name = 'V_286',
particles = [ P.u8__tilde__, P.u2, P.g82 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_287 = Vertex(name = 'V_287',
particles = [ P.u8__tilde__, P.u3, P.g83 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_288 = Vertex(name = 'V_288',
particles = [ P.u8__tilde__, P.u4, P.g84 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_289 = Vertex(name = 'V_289',
particles = [ P.u8__tilde__, P.u5, P.g85 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_290 = Vertex(name = 'V_290',
particles = [ P.u8__tilde__, P.u6, P.g86 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_291 = Vertex(name = 'V_291',
particles = [ P.u7__tilde__, P.u8, P.g87 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_292 = Vertex(name = 'V_292',
particles = [ P.u8__tilde__, P.u7, P.g87 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_293 = Vertex(name = 'V_293',
particles = [ P.u9__tilde__, P.u1, P.g91 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_294 = Vertex(name = 'V_294',
particles = [ P.u9__tilde__, P.u2, P.g92 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_295 = Vertex(name = 'V_295',
particles = [ P.u9__tilde__, P.u3, P.g93 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_296 = Vertex(name = 'V_296',
particles = [ P.u9__tilde__, P.u4, P.g94 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_297 = Vertex(name = 'V_297',
particles = [ P.u9__tilde__, P.u5, P.g95 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_298 = Vertex(name = 'V_298',
particles = [ P.u9__tilde__, P.u6, P.g96 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_299 = Vertex(name = 'V_299',
particles = [ P.u9__tilde__, P.u7, P.g97 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_300 = Vertex(name = 'V_300',
particles = [ P.u8__tilde__, P.u9, P.g98 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_301 = Vertex(name = 'V_301',
particles = [ P.u9__tilde__, P.u8, P.g98 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG2})
V_401 = Vertex(name = 'V_401',
particles = [ P.u1__tilde__, P.u1, P.g0 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG0})
V_402 = Vertex(name = 'V_402',
particles = [ P.u2__tilde__, P.u2, P.g0 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG0})
V_403 = Vertex(name = 'V_403',
particles = [ P.u3__tilde__, P.u3, P.g0 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG0})
V_404 = Vertex(name = 'V_404',
particles = [ P.u4__tilde__, P.u4, P.g0 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG0})
V_405 = Vertex(name = 'V_405',
particles = [ P.u5__tilde__, P.u5, P.g0 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG0})
V_406 = Vertex(name = 'V_406',
particles = [ P.u6__tilde__, P.u6, P.g0 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG0})
V_407 = Vertex(name = 'V_407',
particles = [ P.u7__tilde__, P.u7, P.g0 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG0})
V_408 = Vertex(name = 'V_408',
particles = [ P.u8__tilde__, P.u8, P.g0 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG0})
V_409 = Vertex(name = 'V_409',
particles = [ P.u9__tilde__, P.u9, P.g0 ],
color = [ '1' ],
lorentz = [ L.FFV1 ],
couplings = {(0,0):C.GG0})
|
{"/vertices.py": ["/particles.py"]}
|
18,024
|
umluizlima/ac318-turma1-grupo1
|
refs/heads/master
|
/app/controller/main.py
|
from flask import (
Blueprint, redirect, url_for, session, render_template, request, flash
)
from app.model import User
bp = Blueprint('main', __name__, url_prefix='')
@bp.route('/', methods=["GET", "POST"])
def index():
if request.method == "POST":
data = request.form.to_dict()
if 'username' in data.keys():
return redirect(url_for('user.profile', username=data['username']))
flash('Nome de usuário inválido.')
return render_template('main/index.html', title="Início")
|
{"/app/controller/main.py": ["/app/model.py"], "/app/controller/auth.py": ["/app/model.py"], "/app/controller/user.py": ["/app/model.py", "/app/controller/auth.py"]}
|
18,025
|
umluizlima/ac318-turma1-grupo1
|
refs/heads/master
|
/app/controller/auth.py
|
import functools
from flask import (
Blueprint, flash, g, redirect, render_template, request, session, url_for
)
from werkzeug.security import check_password_hash, generate_password_hash
from app.model import db, User, Telephone, Email
bp = Blueprint('auth', __name__, url_prefix='')
@bp.route('/signup', methods=('GET', 'POST'))
def signup():
if request.method == 'POST':
error = None
username = request.form['username']
if User.query.filter_by(username=username).first() is not None:
error = f'Usuário {username} já cadastrado.'
if error is None:
user = User(username=username,
password=generate_password_hash(request.form['password']),
first_name=request.form['firstname'],
last_name=request.form['lastname'])
db.session.add(user)
user = User.query.filter_by(username=username).first()
email = Email(tag="main",
email=request.form['email'],
user_id=user.id)
db.session.add(email)
telephone = Telephone(tag="main",
telephone=request.form['telephone'],
user_id=user.id)
db.session.add(telephone)
db.session.commit()
return redirect(url_for('auth.login'))
flash(error)
return render_template('auth/signup.html', title='Cadastrar-se')
@bp.route('/login', methods=('GET', 'POST'))
def login():
if request.method == 'POST':
error = None
username = request.form['username']
password = request.form['password']
user = User.query.filter_by(username=username).first()
if user is None:
error = 'Nome de usuário incorreto.'
elif not check_password_hash(user.password, password):
error = 'Senha incorreta.'
if error is None:
session.clear()
session['user_id'] = user.id
return redirect(url_for('main.index'))
flash(error)
return render_template('auth/login.html', title='Entrar')
@bp.route('/logout')
def logout():
session.clear()
return redirect(url_for('auth.login'))
def login_required(view):
@functools.wraps(view)
def wrapped_view(**kwargs):
if g.user is None:
return redirect(url_for('auth.login'))
return view(**kwargs)
return wrapped_view
@bp.before_app_request
def load_logged_in_user():
user_id = session.get('user_id')
if user_id is None:
g.user = None
else:
g.user = User.query.filter_by(id=user_id).first()
|
{"/app/controller/main.py": ["/app/model.py"], "/app/controller/auth.py": ["/app/model.py"], "/app/controller/user.py": ["/app/model.py", "/app/controller/auth.py"]}
|
18,026
|
umluizlima/ac318-turma1-grupo1
|
refs/heads/master
|
/app/model.py
|
import os
import vobject
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
db = SQLAlchemy()
migrate = Migrate()
class User(db.Model):
"""
CREATE: db.session.add(item) -> db.session.commit()
READ: Item.query.all() or Item.query.filter_by(key=value).first()
UPDATE: item = Item.query.filter_by(key=value).first() -> item.key = value\
-> db.session.commit()
DELETE: db.session.delete(item) -> db.session.commit()
"""
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.Text, unique=True, nullable=False)
password = db.Column(db.Text, nullable=False)
first_name = db.Column(db.Text, nullable=False)
last_name = db.Column(db.Text, nullable=False)
emails = db.relationship('Email', cascade='delete', backref='user')
telephones = db.relationship('Telephone', cascade='delete', backref='user')
def to_dict(self):
user = {'id': self.id,
'username': self.username,
'first_name': self.first_name,
'last_name': self.last_name,
'full_name': f'{self.first_name} {self.last_name}',
'emails': [email.to_dict() for email in Email.query.filter_by(user_id=self.id).all()],
'telephones': [phone.to_dict() for phone in Telephone.query.filter_by(user_id=self.id).all()]}
return user
def to_vcard(self):
vcard = vobject.vCard()
user = self.to_dict()
name = vcard.add('fn')
name.value = user['full_name']
name = vcard.add('n')
name.value = vobject.vcard.Name(family=user['last_name'],
given=user['first_name'])
for email in user['emails']:
e = vcard.add('email')
e.value = email['email']
e.type_param = email['tag']
for telephone in user['telephones']:
t = vcard.add('tel')
t.value = telephone['telephone']
t.type_param = telephone['tag']
filename = '_'.join([self.first_name, self.last_name, 'contact.vcf'])
filepath = os.path.join(os.path.abspath(''), 'instance', 'vcf', filename)
with open(filepath, 'w+') as f:
f.write(vcard.serialize())
return filename
def __repr__(self):
return f"<User {self.username}>"
class Email(db.Model):
id = db.Column(db.Integer, primary_key=True)
tag = db.Column(db.Text, nullable=True)
email = db.Column(db.Text, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def to_dict(self):
email = {'id': self.id,
'tag': self.tag,
'email': self.email}
return email
def __repr__(self):
return f"'<Email {self.email}>"
class Telephone(db.Model):
id = db.Column(db.Integer, primary_key=True)
tag = db.Column(db.Text, nullable=True)
telephone = db.Column(db.String(15), nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def to_dict(self):
telephone = {'id': self.id,
'tag': self.tag,
'telephone': self.telephone}
return telephone
def __repr__(self):
return f"<Telephone {self.telephone}>"
|
{"/app/controller/main.py": ["/app/model.py"], "/app/controller/auth.py": ["/app/model.py"], "/app/controller/user.py": ["/app/model.py", "/app/controller/auth.py"]}
|
18,027
|
umluizlima/ac318-turma1-grupo1
|
refs/heads/master
|
/app/controller/user.py
|
from flask import (
Blueprint, render_template, abort, send_from_directory, request, session,
jsonify, current_app, redirect, url_for, flash
)
from app.model import db, User, Email, Telephone
from .auth import login_required
bp = Blueprint('user', __name__, url_prefix='')
@bp.route("/<username>")
def profile(username):
user = User.query.filter_by(username=username).first()
if user:
return render_template('user/profile.html', user=user.to_dict(),
title=user.username)
flash('Nome de usuário inválido.')
return redirect(url_for('main.index'))
@bp.route("/settings", methods=["GET", "POST"])
@login_required
def settings():
user = User.query.filter_by(id=session.get('user_id')).first()
if request.method == "POST":
data = request.form.to_dict()
if 'delete' in data:
db.session.delete(user)
db.session.commit()
if 'update' in data:
print('Tentou editar!!!')
user.first_name = data['first_name']
user.last_name = data['last_name']
email = Email.query.filter_by(id=data['email_id']).first()
email.email = data['email']
telephone = Telephone.query.filter_by(id=data['telephone_id']).first()
telephone.telephone = data['telephone']
db.session.commit()
return redirect(url_for('user.profile', username=user.username))
return render_template('user/settings.html', user=user.to_dict(),
title='Editar')
@bp.route("/download/<username>")
def download(username):
user = User.query.filter_by(username=username).first()
if user:
return send_from_directory(current_app.config['VCARD_FOLDER'],
user.to_vcard(),
as_attachment=True)
abort(404)
|
{"/app/controller/main.py": ["/app/model.py"], "/app/controller/auth.py": ["/app/model.py"], "/app/controller/user.py": ["/app/model.py", "/app/controller/auth.py"]}
|
18,028
|
umluizlima/ac318-turma1-grupo1
|
refs/heads/master
|
/app/controller/pwa.py
|
from flask import (
Blueprint, send_from_directory
)
bp = Blueprint('pwa', __name__, url_prefix='')
@bp.route('/manifest.json')
def manifest():
return send_from_directory('static', 'manifest.json')
@bp.route('/sw.js')
def service_worker():
return send_from_directory('static', 'sw.js')
|
{"/app/controller/main.py": ["/app/model.py"], "/app/controller/auth.py": ["/app/model.py"], "/app/controller/user.py": ["/app/model.py", "/app/controller/auth.py"]}
|
18,031
|
omipareja/Gestor_Empleados
|
refs/heads/master
|
/empleados/aplicaciones/departamento/migrations/0003_auto_20210219_1414.py
|
# Generated by Django 3.1.6 on 2021-02-19 14:14
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('departamento', '0002_auto_20210218_0125'),
]
operations = [
migrations.AlterModelOptions(
name='departamento',
options={'ordering': ['name'], 'verbose_name': 'Mi departamento', 'verbose_name_plural': 'Mis departamentos'},
),
]
|
{"/empleados/aplicaciones/departamento/views.py": ["/empleados/aplicaciones/departamento/models.py"], "/empleados/aplicaciones/persona/admin.py": ["/empleados/aplicaciones/persona/models.py"], "/empleados/aplicaciones/departamento/urls.py": ["/empleados/aplicaciones/departamento/views.py"], "/empleados/aplicaciones/persona/urls.py": ["/empleados/aplicaciones/persona/views.py"], "/empleados/aplicaciones/home/urls.py": ["/empleados/aplicaciones/home/views.py"]}
|
18,032
|
omipareja/Gestor_Empleados
|
refs/heads/master
|
/empleados/aplicaciones/departamento/views.py
|
from django.shortcuts import render
from django.urls import reverse_lazy
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.views.generic.edit import FormView
from django.views.generic import ListView, DetailView, CreateView, TemplateView, UpdateView, DeleteView
from .forms import *
from aplicaciones.persona.models import *
from .models import *
# Create your views here.
class NewDepartamento(FormView): #Esta vista no trabja directamente con modelos
template_name = "departamento.html"
form_class = NewDepartamento
success_url = reverse_lazy('departamento:list_departamento')
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return super().dispatch(request, *args, **kwargs)
def form_valid(self, form):
nombre = form.cleaned_data['nombre']
apellido = form.cleaned_data['apellidos']
depa = Departamento.objects.create(
name= form.cleaned_data['departamento'],
short_name= form.cleaned_data['shorname']
)
Empleado.objects.create( #crar registro
first_name= nombre,
last_name= apellido,
job = '1',
departamento= depa
)
return super(NewDepartamento, self).form_valid(form)
class DepartamentoListView(ListView):
template_name = 'departamento_list.html'
model = Departamento
context_object_name = 'consulta'
paginate_by = 2 # para la paginacion de los resultados el crea un objeto de paginacion
ordering = 'id'
# def get_queryset(self):
# palabra = self.request.GET.get("kword", '') # obtengo parametro por metodo GET
# list = Departamento.objects.filter(name__icontains=palabra)
# return list
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['title'] = 'Lista Departamentos'
context['place_holder'] = 'Buscar Departamento'
return context
|
{"/empleados/aplicaciones/departamento/views.py": ["/empleados/aplicaciones/departamento/models.py"], "/empleados/aplicaciones/persona/admin.py": ["/empleados/aplicaciones/persona/models.py"], "/empleados/aplicaciones/departamento/urls.py": ["/empleados/aplicaciones/departamento/views.py"], "/empleados/aplicaciones/persona/urls.py": ["/empleados/aplicaciones/persona/views.py"], "/empleados/aplicaciones/home/urls.py": ["/empleados/aplicaciones/home/views.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.